Feat: nested collections (#3716)
This commit is contained in:
@ -28,6 +28,7 @@ import {
|
||||
readFileMetadata,
|
||||
throwOnConflictingBranches,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import { dirname } from 'path';
|
||||
import { oneLine } from 'common-tags';
|
||||
import { parse } from 'what-the-diff';
|
||||
|
||||
@ -364,8 +365,8 @@ export default class API {
|
||||
};
|
||||
};
|
||||
|
||||
listFiles = async (path: string, depth = 1, pagelen = 20) => {
|
||||
const node = await this.branchCommitSha(this.branch);
|
||||
listFiles = async (path: string, depth = 1, pagelen: number, branch: string) => {
|
||||
const node = await this.branchCommitSha(branch);
|
||||
const result: BitBucketSrcResult = await this.requestJSON({
|
||||
url: `${this.repoURL}/src/${node}/${path}`,
|
||||
params: {
|
||||
@ -398,11 +399,12 @@ export default class API {
|
||||
})),
|
||||
])(cursor.data!.getIn(['links', action]));
|
||||
|
||||
listAllFiles = async (path: string, depth = 1) => {
|
||||
listAllFiles = async (path: string, depth: number, branch: string) => {
|
||||
const { cursor: initialCursor, entries: initialEntries } = await this.listFiles(
|
||||
path,
|
||||
depth,
|
||||
100,
|
||||
branch,
|
||||
);
|
||||
const entries = [...initialEntries];
|
||||
let currentCursor = initialCursor;
|
||||
@ -418,7 +420,7 @@ export default class API {
|
||||
};
|
||||
|
||||
async uploadFiles(
|
||||
files: (Entry | AssetProxy | DeleteEntry)[],
|
||||
files: { path: string; newPath?: string; delete?: boolean }[],
|
||||
{
|
||||
commitMessage,
|
||||
branch,
|
||||
@ -426,10 +428,14 @@ export default class API {
|
||||
}: { commitMessage: string; branch: string; parentSha?: string },
|
||||
) {
|
||||
const formData = new FormData();
|
||||
const toMove: { from: string; to: string; contentBlob: Blob }[] = [];
|
||||
files.forEach(file => {
|
||||
if ((file as DeleteEntry).delete) {
|
||||
if (file.delete) {
|
||||
// delete the file
|
||||
formData.append('files', file.path);
|
||||
} else if (file.newPath) {
|
||||
const contentBlob = get(file, 'fileObj', new Blob([(file as Entry).raw]));
|
||||
toMove.push({ from: file.path, to: file.newPath, contentBlob });
|
||||
} else {
|
||||
// add/modify the file
|
||||
const contentBlob = get(file, 'fileObj', new Blob([(file as Entry).raw]));
|
||||
@ -437,6 +443,30 @@ export default class API {
|
||||
formData.append(file.path, contentBlob, basename(file.path));
|
||||
}
|
||||
});
|
||||
for (const { from, to, contentBlob } of toMove) {
|
||||
const sourceDir = dirname(from);
|
||||
const destDir = dirname(to);
|
||||
const filesBranch = parentSha ? this.branch : branch;
|
||||
const files = await this.listAllFiles(sourceDir, 100, filesBranch);
|
||||
for (const file of files) {
|
||||
// to move a file in Bitbucket we need to delete the old path
|
||||
// and upload the file content to the new path
|
||||
// NOTE: this is very wasteful, and also the Bitbucket `diff` API
|
||||
// reports these files as deleted+added instead of renamed
|
||||
// delete current path
|
||||
formData.append('files', file.path);
|
||||
// create in new path
|
||||
const content =
|
||||
file.path === from
|
||||
? contentBlob
|
||||
: await this.readFile(file.path, null, {
|
||||
branch: filesBranch,
|
||||
parseText: false,
|
||||
});
|
||||
formData.append(file.path.replace(sourceDir, destDir), content, basename(file.path));
|
||||
}
|
||||
}
|
||||
|
||||
if (commitMessage) {
|
||||
formData.append('message', commitMessage);
|
||||
}
|
||||
@ -538,19 +568,20 @@ export default class API {
|
||||
},
|
||||
});
|
||||
|
||||
return parse(rawDiff).map(d => {
|
||||
const diffs = parse(rawDiff).map(d => {
|
||||
const oldPath = d.oldPath?.replace(/b\//, '') || '';
|
||||
const newPath = d.newPath?.replace(/b\//, '') || '';
|
||||
const path = newPath || (oldPath as string);
|
||||
return {
|
||||
oldPath,
|
||||
newPath,
|
||||
binary: d.binary || /.svg$/.test(path),
|
||||
status: d.status,
|
||||
newFile: d.status === 'added',
|
||||
path,
|
||||
binary: d.binary || /.svg$/.test(path),
|
||||
};
|
||||
});
|
||||
return diffs;
|
||||
}
|
||||
|
||||
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
|
||||
@ -573,8 +604,8 @@ export default class API {
|
||||
// mark files for deletion
|
||||
const diffs = await this.getDifferences(branch);
|
||||
const toDelete: DeleteEntry[] = [];
|
||||
for (const diff of diffs) {
|
||||
if (!files.some(file => file.path === diff.newPath)) {
|
||||
for (const diff of diffs.filter(d => d.binary && d.status !== 'deleted')) {
|
||||
if (!files.some(file => file.path === diff.path)) {
|
||||
toDelete.push({ path: diff.path, delete: true });
|
||||
}
|
||||
}
|
||||
@ -637,47 +668,6 @@ export default class API {
|
||||
return pullRequests[0];
|
||||
}
|
||||
|
||||
async retrieveMetadata(contentKey: string) {
|
||||
const { collection, slug } = parseContentKey(contentKey);
|
||||
const branch = branchFromContentKey(contentKey);
|
||||
const pullRequest = await this.getBranchPullRequest(branch);
|
||||
const diff = await this.getDifferences(branch);
|
||||
const { newPath: path, newFile } = diff.find(d => !d.binary) as {
|
||||
newPath: string;
|
||||
newFile: boolean;
|
||||
};
|
||||
// TODO: get real file id
|
||||
const mediaFiles = await Promise.all(
|
||||
diff.filter(d => d.newPath !== path).map(d => ({ path: d.newPath, id: null })),
|
||||
);
|
||||
const label = await this.getPullRequestLabel(pullRequest.id);
|
||||
const status = labelToStatus(label);
|
||||
const timeStamp = pullRequest.updated_on;
|
||||
return { branch, collection, slug, path, status, newFile, mediaFiles, timeStamp };
|
||||
}
|
||||
|
||||
async readUnpublishedBranchFile(contentKey: string) {
|
||||
const {
|
||||
branch,
|
||||
collection,
|
||||
slug,
|
||||
path,
|
||||
status,
|
||||
newFile,
|
||||
mediaFiles,
|
||||
timeStamp,
|
||||
} = await this.retrieveMetadata(contentKey);
|
||||
|
||||
const fileData = (await this.readFile(path, null, { branch })) as string;
|
||||
|
||||
return {
|
||||
slug,
|
||||
metaData: { branch, collection, objects: { entry: { path, mediaFiles } }, status, timeStamp },
|
||||
fileData,
|
||||
isModification: !newFile,
|
||||
};
|
||||
}
|
||||
|
||||
async listUnpublishedBranches() {
|
||||
console.log(
|
||||
'%c Checking for Unpublished entries',
|
||||
@ -690,6 +680,26 @@ export default class API {
|
||||
return branches;
|
||||
}
|
||||
|
||||
async retrieveUnpublishedEntryData(contentKey: string) {
|
||||
const { collection, slug } = parseContentKey(contentKey);
|
||||
const branch = branchFromContentKey(contentKey);
|
||||
const pullRequest = await this.getBranchPullRequest(branch);
|
||||
const diffs = await this.getDifferences(branch);
|
||||
const label = await this.getPullRequestLabel(pullRequest.id);
|
||||
const status = labelToStatus(label);
|
||||
const updatedAt = pullRequest.updated_on;
|
||||
return {
|
||||
collection,
|
||||
slug,
|
||||
status,
|
||||
// TODO: get real id
|
||||
diffs: diffs
|
||||
.filter(d => d.status !== 'deleted')
|
||||
.map(d => ({ path: d.path, newFile: d.newFile, id: '' })),
|
||||
updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
|
||||
const contentKey = generateContentKey(collection, slug);
|
||||
const branch = branchFromContentKey(contentKey);
|
||||
|
@ -23,7 +23,6 @@ import {
|
||||
Config,
|
||||
ImplementationFile,
|
||||
unpublishedEntries,
|
||||
UnpublishedEntryMediaFile,
|
||||
runWithLock,
|
||||
AsyncLock,
|
||||
asyncLock,
|
||||
@ -38,6 +37,7 @@ import {
|
||||
localForage,
|
||||
allEntriesByFolder,
|
||||
AccessTokenError,
|
||||
branchFromContentKey,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import { NetlifyAuthenticator } from 'netlify-cms-lib-auth';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
@ -299,7 +299,7 @@ export default class BitbucketBackend implements Implementation {
|
||||
let cursor: Cursor;
|
||||
|
||||
const listFiles = () =>
|
||||
this.api!.listFiles(folder, depth).then(({ entries, cursor: c }) => {
|
||||
this.api!.listFiles(folder, depth, 20, this.branch).then(({ entries, cursor: c }) => {
|
||||
cursor = c.mergeMeta({ extension });
|
||||
return entries.filter(e => filterByExtension(e, extension));
|
||||
});
|
||||
@ -323,7 +323,7 @@ export default class BitbucketBackend implements Implementation {
|
||||
}
|
||||
|
||||
async listAllFiles(folder: string, extension: string, depth: number) {
|
||||
const files = await this.api!.listAllFiles(folder, depth);
|
||||
const files = await this.api!.listAllFiles(folder, depth, this.branch);
|
||||
const filtered = files.filter(file => filterByExtension(file, extension));
|
||||
return filtered;
|
||||
}
|
||||
@ -371,7 +371,7 @@ export default class BitbucketBackend implements Implementation {
|
||||
}
|
||||
|
||||
getMedia(mediaFolder = this.mediaFolder) {
|
||||
return this.api!.listAllFiles(mediaFolder).then(files =>
|
||||
return this.api!.listAllFiles(mediaFolder, 1, this.branch).then(files =>
|
||||
files.map(({ id, name, path }) => ({ id, name, path, displayURL: { id, path } })),
|
||||
);
|
||||
}
|
||||
@ -509,31 +509,26 @@ export default class BitbucketBackend implements Implementation {
|
||||
});
|
||||
}
|
||||
|
||||
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
|
||||
const readFile = (
|
||||
async loadMediaFile(path: string, id: string, { branch }: { branch: string }) {
|
||||
const readFile = async (
|
||||
path: string,
|
||||
id: string | null | undefined,
|
||||
{ parseText }: { parseText: boolean },
|
||||
) => this.api!.readFile(path, id, { branch, parseText });
|
||||
|
||||
return getMediaAsBlob(file.path, null, readFile).then(blob => {
|
||||
const name = basename(file.path);
|
||||
const fileObj = blobToFileObj(name, blob);
|
||||
return {
|
||||
id: file.path,
|
||||
displayURL: URL.createObjectURL(fileObj),
|
||||
path: file.path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
|
||||
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
|
||||
|
||||
return mediaFiles;
|
||||
) => {
|
||||
const content = await this.api!.readFile(path, id, { branch, parseText });
|
||||
return content;
|
||||
};
|
||||
const blob = await getMediaAsBlob(path, id, readFile);
|
||||
const name = basename(path);
|
||||
const fileObj = blobToFileObj(name, blob);
|
||||
return {
|
||||
id: path,
|
||||
displayURL: URL.createObjectURL(fileObj),
|
||||
path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
};
|
||||
}
|
||||
|
||||
async unpublishedEntries() {
|
||||
@ -542,37 +537,47 @@ export default class BitbucketBackend implements Implementation {
|
||||
branches.map(branch => contentKeyFromBranch(branch)),
|
||||
);
|
||||
|
||||
const readUnpublishedBranchFile = (contentKey: string) =>
|
||||
this.api!.readUnpublishedBranchFile(contentKey);
|
||||
|
||||
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, API_NAME);
|
||||
const ids = await unpublishedEntries(listEntriesKeys);
|
||||
return ids;
|
||||
}
|
||||
|
||||
async unpublishedEntry(
|
||||
collection: string,
|
||||
slug: string,
|
||||
{
|
||||
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
|
||||
this.loadEntryMediaFiles(branch, files),
|
||||
} = {},
|
||||
) {
|
||||
async unpublishedEntry({
|
||||
id,
|
||||
collection,
|
||||
slug,
|
||||
}: {
|
||||
id?: string;
|
||||
collection?: string;
|
||||
slug?: string;
|
||||
}) {
|
||||
if (id) {
|
||||
const data = await this.api!.retrieveUnpublishedEntryData(id);
|
||||
return data;
|
||||
} else if (collection && slug) {
|
||||
const entryId = generateContentKey(collection, slug);
|
||||
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
|
||||
return data;
|
||||
} else {
|
||||
throw new Error('Missing unpublished entry id or collection and slug');
|
||||
}
|
||||
}
|
||||
|
||||
getBranch(collection: string, slug: string) {
|
||||
const contentKey = generateContentKey(collection, slug);
|
||||
const data = await this.api!.readUnpublishedBranchFile(contentKey);
|
||||
const mediaFiles = await loadEntryMediaFiles(
|
||||
data.metaData.branch,
|
||||
// TODO: fix this
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
|
||||
// @ts-ignore
|
||||
data.metaData.objects.entry.mediaFiles,
|
||||
);
|
||||
return {
|
||||
slug,
|
||||
file: { path: data.metaData.objects.entry.path, id: null },
|
||||
data: data.fileData as string,
|
||||
metaData: data.metaData,
|
||||
mediaFiles,
|
||||
isModification: data.isModification,
|
||||
};
|
||||
const branch = branchFromContentKey(contentKey);
|
||||
return branch;
|
||||
}
|
||||
|
||||
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
|
||||
const branch = this.getBranch(collection, slug);
|
||||
const data = (await this.api!.readFile(path, id, { branch })) as string;
|
||||
return data;
|
||||
}
|
||||
|
||||
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
|
||||
const branch = this.getBranch(collection, slug);
|
||||
const mediaFile = await this.loadMediaFile(path, id, { branch });
|
||||
return mediaFile;
|
||||
}
|
||||
|
||||
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
|
||||
|
@ -18,7 +18,6 @@ import {
|
||||
entriesByFiles,
|
||||
Config,
|
||||
ImplementationFile,
|
||||
UnpublishedEntryMediaFile,
|
||||
parsePointerFile,
|
||||
getLargeMediaPatternsFromGitAttributesFile,
|
||||
getPointerFileForMediaFileObj,
|
||||
@ -394,34 +393,27 @@ export default class GitGateway implements Implementation {
|
||||
return this.backend!.getEntry(path);
|
||||
}
|
||||
|
||||
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
|
||||
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
|
||||
return this.backend!.unpublishedEntryDataFile(collection, slug, path, id);
|
||||
}
|
||||
|
||||
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
|
||||
const client = await this.getLargeMediaClient();
|
||||
const backend = this.backend as GitLabBackend | GitHubBackend;
|
||||
if (!client.enabled) {
|
||||
return backend!.loadEntryMediaFiles(branch, files);
|
||||
if (client.enabled && client.matchPath(path)) {
|
||||
const branch = this.backend!.getBranch(collection, slug);
|
||||
const url = await this.getLargeMediaDisplayURL({ path, id }, branch);
|
||||
return {
|
||||
id,
|
||||
name: basename(path),
|
||||
path,
|
||||
url,
|
||||
displayURL: url,
|
||||
file: new File([], name),
|
||||
size: 0,
|
||||
};
|
||||
} else {
|
||||
return this.backend!.unpublishedEntryMediaFile(collection, slug, path, id);
|
||||
}
|
||||
|
||||
const mediaFiles = await Promise.all(
|
||||
files.map(async file => {
|
||||
if (client.matchPath(file.path)) {
|
||||
const { path, id } = file;
|
||||
const url = await this.getLargeMediaDisplayURL({ path, id }, branch);
|
||||
return {
|
||||
id,
|
||||
name: basename(path),
|
||||
path,
|
||||
url,
|
||||
displayURL: url,
|
||||
file: new File([], name),
|
||||
size: 0,
|
||||
};
|
||||
} else {
|
||||
return backend!.loadMediaFile(branch, file);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
return mediaFiles;
|
||||
}
|
||||
|
||||
getMedia(mediaFolder = this.mediaFolder) {
|
||||
@ -597,10 +589,8 @@ export default class GitGateway implements Implementation {
|
||||
unpublishedEntries() {
|
||||
return this.backend!.unpublishedEntries();
|
||||
}
|
||||
unpublishedEntry(collection: string, slug: string) {
|
||||
return this.backend!.unpublishedEntry(collection, slug, {
|
||||
loadEntryMediaFiles: (branch, files) => this.loadEntryMediaFiles(branch, files),
|
||||
});
|
||||
unpublishedEntry({ id, collection, slug }: { id?: string; collection?: string; slug?: string }) {
|
||||
return this.backend!.unpublishedEntry({ id, collection, slug });
|
||||
}
|
||||
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
|
||||
return this.backend!.updateUnpublishedEntryStatus(collection, slug, newStatus);
|
||||
|
@ -29,6 +29,7 @@ import {
|
||||
ApiRequest,
|
||||
throwOnConflictingBranches,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import { dirname } from 'path';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
|
||||
type GitHubUser = Octokit.UsersGetAuthenticatedResponse;
|
||||
@ -154,6 +155,24 @@ const getTreeFiles = (files: GitHubCompareFiles) => {
|
||||
return treeFiles;
|
||||
};
|
||||
|
||||
type Diff = {
|
||||
path: string;
|
||||
newFile: boolean;
|
||||
sha: string;
|
||||
binary: boolean;
|
||||
};
|
||||
|
||||
const diffFromFile = (diff: Octokit.ReposCompareCommitsResponseFilesItem): Diff => {
|
||||
return {
|
||||
path: diff.filename,
|
||||
newFile: diff.status === 'added',
|
||||
sha: diff.sha,
|
||||
// media files diffs don't have a patch attribute, except svg files
|
||||
// renamed files don't have a patch attribute too
|
||||
binary: (diff.status !== 'renamed' && !diff.patch) || diff.filename.endsWith('.svg'),
|
||||
};
|
||||
};
|
||||
|
||||
let migrationNotified = false;
|
||||
|
||||
export default class API {
|
||||
@ -497,7 +516,9 @@ export default class API {
|
||||
// since the contributor doesn't have access to set labels
|
||||
// a branch without a pr (or a closed pr) means a 'draft' entry
|
||||
// a branch with an opened pr means a 'pending_review' entry
|
||||
const data = await this.getBranch(branch);
|
||||
const data = await this.getBranch(branch).catch(() => {
|
||||
throw new EditorialWorkflowError('content is not under editorial workflow', true);
|
||||
});
|
||||
// since we get all (open and closed) pull requests by branch name, make sure to filter by head sha
|
||||
const pullRequest = pullRequests.filter(pr => pr.head.sha === data.commit.sha)[0];
|
||||
// if no pull request is found for the branch we return a mocked one
|
||||
@ -552,65 +573,22 @@ export default class API {
|
||||
}
|
||||
}
|
||||
|
||||
matchingEntriesFromDiffs(diffs: Octokit.ReposCompareCommitsResponseFilesItem[]) {
|
||||
// media files don't have a patch attribute, except svg files
|
||||
const matchingEntries = diffs
|
||||
.filter(d => d.patch && !d.filename.endsWith('.svg'))
|
||||
.map(f => ({ path: f.filename, newFile: f.status === 'added' }));
|
||||
|
||||
return matchingEntries;
|
||||
}
|
||||
|
||||
async retrieveMetadata(contentKey: string) {
|
||||
async retrieveUnpublishedEntryData(contentKey: string) {
|
||||
const { collection, slug } = this.parseContentKey(contentKey);
|
||||
const branch = branchFromContentKey(contentKey);
|
||||
const pullRequest = await this.getBranchPullRequest(branch);
|
||||
const { files: diffs } = await this.getDifferences(this.branch, pullRequest.head.sha);
|
||||
const matchingEntries = this.matchingEntriesFromDiffs(diffs);
|
||||
let entry = matchingEntries[0];
|
||||
if (matchingEntries.length <= 0) {
|
||||
// this can happen if there is an empty diff for some reason
|
||||
// we traverse the commits history to infer the entry
|
||||
const commits = await this.getPullRequestCommits(pullRequest.number);
|
||||
for (const commit of commits) {
|
||||
const { files: diffs } = await this.getDifferences(this.branch, commit.sha);
|
||||
const matchingEntries = this.matchingEntriesFromDiffs(diffs);
|
||||
entry = matchingEntries[0];
|
||||
if (entry) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!entry) {
|
||||
console.error(
|
||||
'Unable to locate entry from diff',
|
||||
JSON.stringify({ branch, pullRequest, diffs, matchingEntries }),
|
||||
);
|
||||
throw new EditorialWorkflowError('content is not under editorial workflow', true);
|
||||
}
|
||||
} else if (matchingEntries.length > 1) {
|
||||
// this only works for folder collections
|
||||
const entryBySlug = matchingEntries.filter(e => e.path.includes(slug))[0];
|
||||
entry = entryBySlug || entry;
|
||||
if (!entryBySlug) {
|
||||
console.warn(
|
||||
`Expected 1 matching entry from diff, but received '${matchingEntries.length}'. Matched '${entry.path}'`,
|
||||
JSON.stringify({ branch, pullRequest, diffs, matchingEntries }),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const { path, newFile } = entry;
|
||||
|
||||
const mediaFiles = diffs
|
||||
.filter(d => d.filename !== path)
|
||||
.map(({ filename: path, sha: id }) => ({
|
||||
path,
|
||||
id,
|
||||
}));
|
||||
const { files } = await this.getDifferences(this.branch, pullRequest.head.sha);
|
||||
const diffs = files.map(diffFromFile);
|
||||
const label = pullRequest.labels.find(l => isCMSLabel(l.name)) as { name: string };
|
||||
const status = labelToStatus(label.name);
|
||||
const timeStamp = pullRequest.updated_at;
|
||||
return { branch, collection, slug, path, status, newFile, mediaFiles, timeStamp, pullRequest };
|
||||
const updatedAt = pullRequest.updated_at;
|
||||
return {
|
||||
collection,
|
||||
slug,
|
||||
status,
|
||||
diffs: diffs.map(d => ({ path: d.path, newFile: d.newFile, id: d.sha })),
|
||||
updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
async readFile(
|
||||
@ -712,45 +690,6 @@ export default class API {
|
||||
}
|
||||
}
|
||||
|
||||
async readUnpublishedBranchFile(contentKey: string) {
|
||||
try {
|
||||
const {
|
||||
branch,
|
||||
collection,
|
||||
slug,
|
||||
path,
|
||||
status,
|
||||
newFile,
|
||||
mediaFiles,
|
||||
timeStamp,
|
||||
} = await this.retrieveMetadata(contentKey);
|
||||
|
||||
const repoURL = this.useOpenAuthoring
|
||||
? `/repos/${contentKey
|
||||
.split('/')
|
||||
.slice(0, 2)
|
||||
.join('/')}`
|
||||
: this.repoURL;
|
||||
|
||||
const fileData = (await this.readFile(path, null, { branch, repoURL })) as string;
|
||||
|
||||
return {
|
||||
slug,
|
||||
metaData: {
|
||||
branch,
|
||||
collection,
|
||||
objects: { entry: { path, mediaFiles } },
|
||||
status,
|
||||
timeStamp,
|
||||
},
|
||||
fileData,
|
||||
isModification: !newFile,
|
||||
};
|
||||
} catch (e) {
|
||||
throw new EditorialWorkflowError('content is not under editorial workflow', true);
|
||||
}
|
||||
}
|
||||
|
||||
filterOpenAuthoringBranches = async (branch: string) => {
|
||||
try {
|
||||
const pullRequest = await this.getBranchPullRequest(branch);
|
||||
@ -1044,16 +983,17 @@ export default class API {
|
||||
}
|
||||
} else {
|
||||
// Entry is already on editorial review workflow - commit to existing branch
|
||||
const { files: diffs } = await this.getDifferences(
|
||||
const { files: diffFiles } = await this.getDifferences(
|
||||
this.branch,
|
||||
await this.getHeadReference(branch),
|
||||
);
|
||||
|
||||
const diffs = diffFiles.map(diffFromFile);
|
||||
// mark media files to remove
|
||||
const mediaFilesToRemove: { path: string; sha: string | null }[] = [];
|
||||
for (const diff of diffs) {
|
||||
if (!mediaFilesList.some(file => file.path === diff.filename)) {
|
||||
mediaFilesToRemove.push({ path: diff.filename, sha: null });
|
||||
for (const diff of diffs.filter(d => d.binary)) {
|
||||
if (!mediaFilesList.some(file => file.path === diff.path)) {
|
||||
mediaFilesToRemove.push({ path: diff.path, sha: null });
|
||||
}
|
||||
}
|
||||
|
||||
@ -1414,30 +1354,67 @@ export default class API {
|
||||
return Promise.resolve(Base64.encode(str));
|
||||
}
|
||||
|
||||
uploadBlob(item: { raw?: string; sha?: string; toBase64?: () => Promise<string> }) {
|
||||
const content = result(item, 'toBase64', partial(this.toBase64, item.raw as string));
|
||||
|
||||
return content.then(contentBase64 =>
|
||||
this.request(`${this.repoURL}/git/blobs`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
content: contentBase64,
|
||||
encoding: 'base64',
|
||||
}),
|
||||
}).then(response => {
|
||||
item.sha = response.sha;
|
||||
return item;
|
||||
}),
|
||||
async uploadBlob(item: { raw?: string; sha?: string; toBase64?: () => Promise<string> }) {
|
||||
const contentBase64 = await result(
|
||||
item,
|
||||
'toBase64',
|
||||
partial(this.toBase64, item.raw as string),
|
||||
);
|
||||
const response = await this.request(`${this.repoURL}/git/blobs`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
content: contentBase64,
|
||||
encoding: 'base64',
|
||||
}),
|
||||
});
|
||||
item.sha = response.sha;
|
||||
return item;
|
||||
}
|
||||
|
||||
async updateTree(baseSha: string, files: { path: string; sha: string | null }[]) {
|
||||
const tree: TreeEntry[] = files.map(file => ({
|
||||
path: trimStart(file.path, '/'),
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: file.sha,
|
||||
}));
|
||||
async updateTree(
|
||||
baseSha: string,
|
||||
files: { path: string; sha: string | null; newPath?: string }[],
|
||||
branch = this.branch,
|
||||
) {
|
||||
const toMove: { from: string; to: string; sha: string }[] = [];
|
||||
const tree = files.reduce((acc, file) => {
|
||||
const entry = {
|
||||
path: trimStart(file.path, '/'),
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: file.sha,
|
||||
} as TreeEntry;
|
||||
|
||||
if (file.newPath) {
|
||||
toMove.push({ from: file.path, to: file.newPath, sha: file.sha as string });
|
||||
} else {
|
||||
acc.push(entry);
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, [] as TreeEntry[]);
|
||||
|
||||
for (const { from, to, sha } of toMove) {
|
||||
const sourceDir = dirname(from);
|
||||
const destDir = dirname(to);
|
||||
const files = await this.listFiles(sourceDir, { branch, depth: 100 });
|
||||
for (const file of files) {
|
||||
// delete current path
|
||||
tree.push({
|
||||
path: file.path,
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: null,
|
||||
});
|
||||
// create in new path
|
||||
tree.push({
|
||||
path: file.path.replace(sourceDir, destDir),
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: file.path === from ? sha : file.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const newTree = await this.createTree(baseSha, tree);
|
||||
return { ...newTree, parentSha: baseSha };
|
||||
|
@ -403,6 +403,9 @@ export default class GraphQLAPI extends API {
|
||||
...this.getBranchQuery(branch, this.repoOwner, this.repoName),
|
||||
fetchPolicy: CACHE_FIRST,
|
||||
});
|
||||
if (!data.repository.branch) {
|
||||
throw new APIError('Branch not found', 404, API_NAME);
|
||||
}
|
||||
return data.repository.branch;
|
||||
}
|
||||
|
||||
@ -539,12 +542,9 @@ export default class GraphQLAPI extends API {
|
||||
try {
|
||||
const contentKey = this.generateContentKey(collectionName, slug);
|
||||
const branchName = branchFromContentKey(contentKey);
|
||||
const metadata = await this.retrieveMetadata(contentKey);
|
||||
if (metadata.pullRequest.number !== MOCK_PULL_REQUEST) {
|
||||
const { branch, pullRequest } = await this.getPullRequestAndBranch(
|
||||
branchName,
|
||||
metadata.pullRequest.number,
|
||||
);
|
||||
const pr = await this.getBranchPullRequest(branchName);
|
||||
if (pr.number !== MOCK_PULL_REQUEST) {
|
||||
const { branch, pullRequest } = await this.getPullRequestAndBranch(branchName, pr.number);
|
||||
|
||||
const { data } = await this.mutate({
|
||||
mutation: mutations.closePullRequestAndDeleteBranch,
|
||||
|
@ -132,48 +132,16 @@ describe('github backend implementation', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadEntryMediaFiles', () => {
|
||||
const readFile = jest.fn();
|
||||
const mockAPI = {
|
||||
readFile,
|
||||
};
|
||||
|
||||
it('should return media files from meta data', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const blob = new Blob(['']);
|
||||
readFile.mockResolvedValue(blob);
|
||||
|
||||
const file = new File([blob], name);
|
||||
|
||||
await expect(
|
||||
gitHubImplementation.loadEntryMediaFiles('branch', [
|
||||
{ path: 'static/media/image.png', id: 'sha' },
|
||||
]),
|
||||
).resolves.toEqual([
|
||||
{
|
||||
id: 'sha',
|
||||
displayURL: 'displayURL',
|
||||
path: 'static/media/image.png',
|
||||
name: 'image.png',
|
||||
size: file.size,
|
||||
file,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('unpublishedEntry', () => {
|
||||
const generateContentKey = jest.fn();
|
||||
const readUnpublishedBranchFile = jest.fn();
|
||||
const retrieveUnpublishedEntryData = jest.fn();
|
||||
|
||||
const mockAPI = {
|
||||
generateContentKey,
|
||||
readUnpublishedBranchFile,
|
||||
retrieveUnpublishedEntryData,
|
||||
};
|
||||
|
||||
it('should return unpublished entry', async () => {
|
||||
it('should return unpublished entry data', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
gitHubImplementation.loadEntryMediaFiles = jest
|
||||
@ -183,37 +151,25 @@ describe('github backend implementation', () => {
|
||||
generateContentKey.mockReturnValue('contentKey');
|
||||
|
||||
const data = {
|
||||
fileData: 'fileData',
|
||||
isModification: true,
|
||||
metaData: {
|
||||
branch: 'branch',
|
||||
objects: {
|
||||
entry: { path: 'entry-path', mediaFiles: [{ path: 'image.png', id: 'sha' }] },
|
||||
},
|
||||
},
|
||||
collection: 'collection',
|
||||
slug: 'slug',
|
||||
status: 'draft',
|
||||
diffs: [],
|
||||
updatedAt: 'updatedAt',
|
||||
};
|
||||
readUnpublishedBranchFile.mockResolvedValue(data);
|
||||
retrieveUnpublishedEntryData.mockResolvedValue(data);
|
||||
|
||||
const collection = 'posts';
|
||||
await expect(gitHubImplementation.unpublishedEntry(collection, 'slug')).resolves.toEqual({
|
||||
slug: 'slug',
|
||||
file: { path: 'entry-path', id: null },
|
||||
data: 'fileData',
|
||||
metaData: data.metaData,
|
||||
mediaFiles: [{ path: 'image.png', id: 'sha' }],
|
||||
isModification: true,
|
||||
});
|
||||
const slug = 'slug';
|
||||
await expect(gitHubImplementation.unpublishedEntry({ collection, slug })).resolves.toEqual(
|
||||
data,
|
||||
);
|
||||
|
||||
expect(generateContentKey).toHaveBeenCalledTimes(1);
|
||||
expect(generateContentKey).toHaveBeenCalledWith('posts', 'slug');
|
||||
|
||||
expect(readUnpublishedBranchFile).toHaveBeenCalledTimes(1);
|
||||
expect(readUnpublishedBranchFile).toHaveBeenCalledWith('contentKey');
|
||||
|
||||
expect(gitHubImplementation.loadEntryMediaFiles).toHaveBeenCalledTimes(1);
|
||||
expect(gitHubImplementation.loadEntryMediaFiles).toHaveBeenCalledWith('branch', [
|
||||
{ path: 'image.png', id: 'sha' },
|
||||
]);
|
||||
expect(retrieveUnpublishedEntryData).toHaveBeenCalledTimes(1);
|
||||
expect(retrieveUnpublishedEntryData).toHaveBeenCalledWith('contentKey');
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -29,6 +29,7 @@ import {
|
||||
blobToFileObj,
|
||||
contentKeyFromBranch,
|
||||
unsentRequest,
|
||||
branchFromContentKey,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
@ -546,68 +547,73 @@ export default class GitHub implements Implementation {
|
||||
};
|
||||
}
|
||||
|
||||
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
|
||||
async loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
|
||||
const readFile = (
|
||||
path: string,
|
||||
id: string | null | undefined,
|
||||
{ parseText }: { parseText: boolean },
|
||||
) => this.api!.readFile(path, id, { branch, parseText });
|
||||
|
||||
return getMediaAsBlob(file.path, file.id, readFile).then(blob => {
|
||||
const name = basename(file.path);
|
||||
const fileObj = blobToFileObj(name, blob);
|
||||
return {
|
||||
id: file.id,
|
||||
displayURL: URL.createObjectURL(fileObj),
|
||||
path: file.path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
};
|
||||
});
|
||||
const blob = await getMediaAsBlob(file.path, file.id, readFile);
|
||||
const name = basename(file.path);
|
||||
const fileObj = blobToFileObj(name, blob);
|
||||
return {
|
||||
id: file.id,
|
||||
displayURL: URL.createObjectURL(fileObj),
|
||||
path: file.path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
};
|
||||
}
|
||||
|
||||
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
|
||||
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
|
||||
|
||||
return mediaFiles;
|
||||
}
|
||||
|
||||
unpublishedEntries() {
|
||||
async unpublishedEntries() {
|
||||
const listEntriesKeys = () =>
|
||||
this.api!.listUnpublishedBranches().then(branches =>
|
||||
branches.map(branch => contentKeyFromBranch(branch)),
|
||||
);
|
||||
|
||||
const readUnpublishedBranchFile = (contentKey: string) =>
|
||||
this.api!.readUnpublishedBranchFile(contentKey);
|
||||
|
||||
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, 'GitHub');
|
||||
const ids = await unpublishedEntries(listEntriesKeys);
|
||||
return ids;
|
||||
}
|
||||
|
||||
async unpublishedEntry(
|
||||
collection: string,
|
||||
slug: string,
|
||||
{
|
||||
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
|
||||
this.loadEntryMediaFiles(branch, files),
|
||||
} = {},
|
||||
) {
|
||||
async unpublishedEntry({
|
||||
id,
|
||||
collection,
|
||||
slug,
|
||||
}: {
|
||||
id?: string;
|
||||
collection?: string;
|
||||
slug?: string;
|
||||
}) {
|
||||
if (id) {
|
||||
const data = await this.api!.retrieveUnpublishedEntryData(id);
|
||||
return data;
|
||||
} else if (collection && slug) {
|
||||
const entryId = this.api!.generateContentKey(collection, slug);
|
||||
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
|
||||
return data;
|
||||
} else {
|
||||
throw new Error('Missing unpublished entry id or collection and slug');
|
||||
}
|
||||
}
|
||||
|
||||
getBranch(collection: string, slug: string) {
|
||||
const contentKey = this.api!.generateContentKey(collection, slug);
|
||||
const data = await this.api!.readUnpublishedBranchFile(contentKey);
|
||||
const files = data.metaData.objects.entry.mediaFiles || [];
|
||||
const mediaFiles = await loadEntryMediaFiles(
|
||||
data.metaData.branch,
|
||||
files.map(({ id, path }) => ({ id, path })),
|
||||
);
|
||||
return {
|
||||
slug,
|
||||
file: { path: data.metaData.objects.entry.path, id: null },
|
||||
data: data.fileData as string,
|
||||
metaData: data.metaData,
|
||||
mediaFiles,
|
||||
isModification: data.isModification,
|
||||
};
|
||||
const branch = branchFromContentKey(contentKey);
|
||||
return branch;
|
||||
}
|
||||
|
||||
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
|
||||
const branch = this.getBranch(collection, slug);
|
||||
const data = (await this.api!.readFile(path, id, { branch })) as string;
|
||||
return data;
|
||||
}
|
||||
|
||||
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
|
||||
const branch = this.getBranch(collection, slug);
|
||||
const mediaFile = await this.loadMediaFile(branch, { path, id });
|
||||
return mediaFile;
|
||||
}
|
||||
|
||||
async getDeployPreview(collection: string, slug: string) {
|
||||
|
@ -30,6 +30,7 @@ import {
|
||||
import { Base64 } from 'js-base64';
|
||||
import { Map } from 'immutable';
|
||||
import { flow, partial, result, trimStart } from 'lodash';
|
||||
import { dirname } from 'path';
|
||||
|
||||
export const API_NAME = 'GitLab';
|
||||
|
||||
@ -57,6 +58,7 @@ enum CommitAction {
|
||||
type CommitItem = {
|
||||
base64Content?: string;
|
||||
path: string;
|
||||
oldPath?: string;
|
||||
action: CommitAction;
|
||||
};
|
||||
|
||||
@ -68,6 +70,7 @@ interface CommitsParams {
|
||||
actions?: {
|
||||
action: string;
|
||||
file_path: string;
|
||||
previous_path?: string;
|
||||
content?: string;
|
||||
encoding?: string;
|
||||
}[];
|
||||
@ -386,14 +389,14 @@ export default class API {
|
||||
};
|
||||
};
|
||||
|
||||
listAllFiles = async (path: string, recursive = false) => {
|
||||
listAllFiles = async (path: string, recursive = false, branch = this.branch) => {
|
||||
const entries = [];
|
||||
// eslint-disable-next-line prefer-const
|
||||
let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({
|
||||
url: `${this.repoURL}/repository/tree`,
|
||||
// Get the maximum number of entries per page
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
params: { path, ref: this.branch, per_page: 100, recursive },
|
||||
params: { path, ref: branch, per_page: 100, recursive },
|
||||
});
|
||||
entries.push(...initialEntries);
|
||||
while (cursor && cursor.actions!.has('next')) {
|
||||
@ -423,7 +426,11 @@ export default class API {
|
||||
action: item.action,
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
file_path: item.path,
|
||||
...(item.base64Content ? { content: item.base64Content, encoding: 'base64' } : {}),
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
...(item.oldPath ? { previous_path: item.oldPath } : {}),
|
||||
...(item.base64Content !== undefined
|
||||
? { content: item.base64Content, encoding: 'base64' }
|
||||
: {}),
|
||||
}));
|
||||
|
||||
const commitParams: CommitsParams = {
|
||||
@ -459,21 +466,49 @@ export default class API {
|
||||
}
|
||||
}
|
||||
|
||||
async getCommitItems(files: (Entry | AssetProxy)[], branch: string) {
|
||||
const items = await Promise.all(
|
||||
async getCommitItems(files: { path: string; newPath?: string }[], branch: string) {
|
||||
const items: CommitItem[] = await Promise.all(
|
||||
files.map(async file => {
|
||||
const [base64Content, fileExists] = await Promise.all([
|
||||
result(file, 'toBase64', partial(this.toBase64, (file as Entry).raw)),
|
||||
this.isFileExists(file.path, branch),
|
||||
]);
|
||||
|
||||
let action = CommitAction.CREATE;
|
||||
let path = trimStart(file.path, '/');
|
||||
let oldPath = undefined;
|
||||
if (fileExists) {
|
||||
action = file.newPath ? CommitAction.MOVE : CommitAction.UPDATE;
|
||||
oldPath = file.newPath && path;
|
||||
path = file.newPath ? trimStart(file.newPath, '/') : path;
|
||||
}
|
||||
|
||||
return {
|
||||
action: fileExists ? CommitAction.UPDATE : CommitAction.CREATE,
|
||||
action,
|
||||
base64Content,
|
||||
path: trimStart(file.path, '/'),
|
||||
path,
|
||||
oldPath,
|
||||
};
|
||||
}),
|
||||
);
|
||||
return items as CommitItem[];
|
||||
|
||||
// move children
|
||||
for (const item of items.filter(i => i.oldPath && i.action === CommitAction.MOVE)) {
|
||||
const sourceDir = dirname(item.oldPath as string);
|
||||
const destDir = dirname(item.path);
|
||||
const children = await this.listAllFiles(sourceDir, true, branch);
|
||||
children
|
||||
.filter(f => f.path !== item.oldPath)
|
||||
.forEach(file => {
|
||||
items.push({
|
||||
action: CommitAction.MOVE,
|
||||
path: file.path.replace(sourceDir, destDir),
|
||||
oldPath: file.path,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
|
||||
@ -604,54 +639,33 @@ export default class API {
|
||||
oldPath: d.old_path,
|
||||
newPath: d.new_path,
|
||||
newFile: d.new_file,
|
||||
path: d.new_path || d.old_path,
|
||||
binary: d.diff.startsWith('Binary') || /.svg$/.test(d.new_path),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async retrieveMetadata(contentKey: string) {
|
||||
async retrieveUnpublishedEntryData(contentKey: string) {
|
||||
const { collection, slug } = parseContentKey(contentKey);
|
||||
const branch = branchFromContentKey(contentKey);
|
||||
const mergeRequest = await this.getBranchMergeRequest(branch);
|
||||
const diff = await this.getDifferences(mergeRequest.sha);
|
||||
const { oldPath: path, newFile: newFile } = diff.find(d => !d.binary) as {
|
||||
oldPath: string;
|
||||
newFile: boolean;
|
||||
};
|
||||
const mediaFiles = await Promise.all(
|
||||
diff
|
||||
.filter(d => d.oldPath !== path)
|
||||
.map(async d => {
|
||||
const path = d.newPath;
|
||||
const id = await this.getFileId(path, branch);
|
||||
return { path, id };
|
||||
}),
|
||||
const diffs = await this.getDifferences(mergeRequest.sha);
|
||||
const diffsWithIds = await Promise.all(
|
||||
diffs.map(async d => {
|
||||
const { path, newFile } = d;
|
||||
const id = await this.getFileId(path, branch);
|
||||
return { id, path, newFile };
|
||||
}),
|
||||
);
|
||||
const label = mergeRequest.labels.find(isCMSLabel) as string;
|
||||
const status = labelToStatus(label);
|
||||
const timeStamp = mergeRequest.updated_at;
|
||||
return { branch, collection, slug, path, status, newFile, mediaFiles, timeStamp };
|
||||
}
|
||||
|
||||
async readUnpublishedBranchFile(contentKey: string) {
|
||||
const {
|
||||
branch,
|
||||
const updatedAt = mergeRequest.updated_at;
|
||||
return {
|
||||
collection,
|
||||
slug,
|
||||
path,
|
||||
status,
|
||||
newFile,
|
||||
mediaFiles,
|
||||
timeStamp,
|
||||
} = await this.retrieveMetadata(contentKey);
|
||||
|
||||
const fileData = (await this.readFile(path, null, { branch })) as string;
|
||||
|
||||
return {
|
||||
slug,
|
||||
metaData: { branch, collection, objects: { entry: { path, mediaFiles } }, status, timeStamp },
|
||||
fileData,
|
||||
isModification: !newFile,
|
||||
diffs: diffsWithIds,
|
||||
updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
@ -726,10 +740,9 @@ export default class API {
|
||||
this.getCommitItems(files, branch),
|
||||
this.getDifferences(branch),
|
||||
]);
|
||||
|
||||
// mark files for deletion
|
||||
for (const diff of diffs) {
|
||||
if (!items.some(item => item.path === diff.newPath)) {
|
||||
for (const diff of diffs.filter(d => d.binary)) {
|
||||
if (!items.some(item => item.path === diff.path)) {
|
||||
items.push({ action: CommitAction.DELETE, path: diff.newPath });
|
||||
}
|
||||
}
|
||||
|
@ -32,6 +32,7 @@ import {
|
||||
localForage,
|
||||
allEntriesByFolder,
|
||||
filterByExtension,
|
||||
branchFromContentKey,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
import API, { API_NAME } from './API';
|
||||
@ -351,34 +352,47 @@ export default class GitLab implements Implementation {
|
||||
branches.map(branch => contentKeyFromBranch(branch)),
|
||||
);
|
||||
|
||||
const readUnpublishedBranchFile = (contentKey: string) =>
|
||||
this.api!.readUnpublishedBranchFile(contentKey);
|
||||
|
||||
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, API_NAME);
|
||||
const ids = await unpublishedEntries(listEntriesKeys);
|
||||
return ids;
|
||||
}
|
||||
|
||||
async unpublishedEntry(
|
||||
collection: string,
|
||||
slug: string,
|
||||
{
|
||||
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
|
||||
this.loadEntryMediaFiles(branch, files),
|
||||
} = {},
|
||||
) {
|
||||
async unpublishedEntry({
|
||||
id,
|
||||
collection,
|
||||
slug,
|
||||
}: {
|
||||
id?: string;
|
||||
collection?: string;
|
||||
slug?: string;
|
||||
}) {
|
||||
if (id) {
|
||||
const data = await this.api!.retrieveUnpublishedEntryData(id);
|
||||
return data;
|
||||
} else if (collection && slug) {
|
||||
const entryId = generateContentKey(collection, slug);
|
||||
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
|
||||
return data;
|
||||
} else {
|
||||
throw new Error('Missing unpublished entry id or collection and slug');
|
||||
}
|
||||
}
|
||||
|
||||
getBranch(collection: string, slug: string) {
|
||||
const contentKey = generateContentKey(collection, slug);
|
||||
const data = await this.api!.readUnpublishedBranchFile(contentKey);
|
||||
const mediaFiles = await loadEntryMediaFiles(
|
||||
data.metaData.branch,
|
||||
data.metaData.objects.entry.mediaFiles,
|
||||
);
|
||||
return {
|
||||
slug,
|
||||
file: { path: data.metaData.objects.entry.path, id: null },
|
||||
data: data.fileData as string,
|
||||
metaData: data.metaData,
|
||||
mediaFiles,
|
||||
isModification: data.isModification,
|
||||
};
|
||||
const branch = branchFromContentKey(contentKey);
|
||||
return branch;
|
||||
}
|
||||
|
||||
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
|
||||
const branch = this.getBranch(collection, slug);
|
||||
const data = (await this.api!.readFile(path, id, { branch })) as string;
|
||||
return data;
|
||||
}
|
||||
|
||||
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
|
||||
const branch = this.getBranch(collection, slug);
|
||||
const mediaFile = await this.loadMediaFile(branch, { path, id });
|
||||
return mediaFile;
|
||||
}
|
||||
|
||||
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
EditorialWorkflowError,
|
||||
APIError,
|
||||
unsentRequest,
|
||||
UnpublishedEntry,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
|
||||
@ -131,15 +132,22 @@ export default class ProxyBackend implements Implementation {
|
||||
});
|
||||
}
|
||||
|
||||
async unpublishedEntry(collection: string, slug: string) {
|
||||
async unpublishedEntry({
|
||||
id,
|
||||
collection,
|
||||
slug,
|
||||
}: {
|
||||
id?: string;
|
||||
collection?: string;
|
||||
slug?: string;
|
||||
}) {
|
||||
try {
|
||||
const entry = await this.request({
|
||||
const entry: UnpublishedEntry = await this.request({
|
||||
action: 'unpublishedEntry',
|
||||
params: { branch: this.branch, collection, slug },
|
||||
params: { branch: this.branch, id, collection, slug },
|
||||
});
|
||||
|
||||
const mediaFiles = entry.mediaFiles.map(deserializeMediaFile);
|
||||
return { ...entry, mediaFiles };
|
||||
return entry;
|
||||
} catch (e) {
|
||||
if (e.status === 404) {
|
||||
throw new EditorialWorkflowError('content is not under editorial workflow', true);
|
||||
@ -148,6 +156,22 @@ export default class ProxyBackend implements Implementation {
|
||||
}
|
||||
}
|
||||
|
||||
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
|
||||
const { data } = await this.request({
|
||||
action: 'unpublishedEntryDataFile',
|
||||
params: { branch: this.branch, collection, slug, path, id },
|
||||
});
|
||||
return data;
|
||||
}
|
||||
|
||||
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
|
||||
const file = await this.request({
|
||||
action: 'unpublishedEntryMediaFile',
|
||||
params: { branch: this.branch, collection, slug, path, id },
|
||||
});
|
||||
return deserializeMediaFile(file);
|
||||
}
|
||||
|
||||
deleteUnpublishedEntry(collection: string, slug: string) {
|
||||
return this.request({
|
||||
action: 'deleteUnpublishedEntry',
|
||||
|
@ -1,4 +1,4 @@
|
||||
import TestBackend, { getFolderEntries } from '../implementation';
|
||||
import TestBackend, { getFolderFiles } from '../implementation';
|
||||
|
||||
describe('test backend implementation', () => {
|
||||
beforeEach(() => {
|
||||
@ -15,7 +15,7 @@ describe('test backend implementation', () => {
|
||||
},
|
||||
};
|
||||
|
||||
const backend = new TestBackend();
|
||||
const backend = new TestBackend({});
|
||||
|
||||
await expect(backend.getEntry('posts/some-post.md')).resolves.toEqual({
|
||||
file: { path: 'posts/some-post.md', id: null },
|
||||
@ -36,7 +36,7 @@ describe('test backend implementation', () => {
|
||||
},
|
||||
};
|
||||
|
||||
const backend = new TestBackend();
|
||||
const backend = new TestBackend({});
|
||||
|
||||
await expect(backend.getEntry('posts/dir1/dir2/some-post.md')).resolves.toEqual({
|
||||
file: { path: 'posts/dir1/dir2/some-post.md', id: null },
|
||||
@ -49,7 +49,7 @@ describe('test backend implementation', () => {
|
||||
it('should persist entry', async () => {
|
||||
window.repoFiles = {};
|
||||
|
||||
const backend = new TestBackend();
|
||||
const backend = new TestBackend({});
|
||||
|
||||
const entry = { path: 'posts/some-post.md', raw: 'content', slug: 'some-post.md' };
|
||||
await backend.persistEntry(entry, [], { newEntry: true });
|
||||
@ -58,6 +58,7 @@ describe('test backend implementation', () => {
|
||||
posts: {
|
||||
'some-post.md': {
|
||||
content: 'content',
|
||||
path: 'posts/some-post.md',
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -77,7 +78,7 @@ describe('test backend implementation', () => {
|
||||
},
|
||||
};
|
||||
|
||||
const backend = new TestBackend();
|
||||
const backend = new TestBackend({});
|
||||
|
||||
const entry = { path: 'posts/new-post.md', raw: 'content', slug: 'new-post.md' };
|
||||
await backend.persistEntry(entry, [], { newEntry: true });
|
||||
@ -91,6 +92,7 @@ describe('test backend implementation', () => {
|
||||
posts: {
|
||||
'new-post.md': {
|
||||
content: 'content',
|
||||
path: 'posts/new-post.md',
|
||||
},
|
||||
'other-post.md': {
|
||||
content: 'content',
|
||||
@ -102,7 +104,7 @@ describe('test backend implementation', () => {
|
||||
it('should persist nested entry', async () => {
|
||||
window.repoFiles = {};
|
||||
|
||||
const backend = new TestBackend();
|
||||
const backend = new TestBackend({});
|
||||
|
||||
const slug = 'dir1/dir2/some-post.md';
|
||||
const path = `posts/${slug}`;
|
||||
@ -115,6 +117,7 @@ describe('test backend implementation', () => {
|
||||
dir2: {
|
||||
'some-post.md': {
|
||||
content: 'content',
|
||||
path: 'posts/dir1/dir2/some-post.md',
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -136,7 +139,7 @@ describe('test backend implementation', () => {
|
||||
},
|
||||
};
|
||||
|
||||
const backend = new TestBackend();
|
||||
const backend = new TestBackend({});
|
||||
|
||||
const slug = 'dir1/dir2/some-post.md';
|
||||
const path = `posts/${slug}`;
|
||||
@ -148,7 +151,7 @@ describe('test backend implementation', () => {
|
||||
dir1: {
|
||||
dir2: {
|
||||
'some-post.md': {
|
||||
mediaFiles: ['file1'],
|
||||
path: 'posts/dir1/dir2/some-post.md',
|
||||
content: 'new content',
|
||||
},
|
||||
},
|
||||
@ -168,7 +171,7 @@ describe('test backend implementation', () => {
|
||||
},
|
||||
};
|
||||
|
||||
const backend = new TestBackend();
|
||||
const backend = new TestBackend({});
|
||||
|
||||
await backend.deleteFile('posts/some-post.md');
|
||||
expect(window.repoFiles).toEqual({
|
||||
@ -189,7 +192,7 @@ describe('test backend implementation', () => {
|
||||
},
|
||||
};
|
||||
|
||||
const backend = new TestBackend();
|
||||
const backend = new TestBackend({});
|
||||
|
||||
await backend.deleteFile('posts/dir1/dir2/some-post.md');
|
||||
expect(window.repoFiles).toEqual({
|
||||
@ -202,7 +205,7 @@ describe('test backend implementation', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFolderEntries', () => {
|
||||
describe('getFolderFiles', () => {
|
||||
it('should get files by depth', () => {
|
||||
const tree = {
|
||||
pages: {
|
||||
@ -222,34 +225,34 @@ describe('test backend implementation', () => {
|
||||
},
|
||||
};
|
||||
|
||||
expect(getFolderEntries(tree, 'pages', 'md', 1)).toEqual([
|
||||
expect(getFolderFiles(tree, 'pages', 'md', 1)).toEqual([
|
||||
{
|
||||
file: { path: 'pages/root-page.md', id: null },
|
||||
data: 'root page content',
|
||||
path: 'pages/root-page.md',
|
||||
content: 'root page content',
|
||||
},
|
||||
]);
|
||||
expect(getFolderEntries(tree, 'pages', 'md', 2)).toEqual([
|
||||
expect(getFolderFiles(tree, 'pages', 'md', 2)).toEqual([
|
||||
{
|
||||
file: { path: 'pages/dir1/nested-page-1.md', id: null },
|
||||
data: 'nested page 1 content',
|
||||
path: 'pages/dir1/nested-page-1.md',
|
||||
content: 'nested page 1 content',
|
||||
},
|
||||
{
|
||||
file: { path: 'pages/root-page.md', id: null },
|
||||
data: 'root page content',
|
||||
path: 'pages/root-page.md',
|
||||
content: 'root page content',
|
||||
},
|
||||
]);
|
||||
expect(getFolderEntries(tree, 'pages', 'md', 3)).toEqual([
|
||||
expect(getFolderFiles(tree, 'pages', 'md', 3)).toEqual([
|
||||
{
|
||||
file: { path: 'pages/dir1/dir2/nested-page-2.md', id: null },
|
||||
data: 'nested page 2 content',
|
||||
path: 'pages/dir1/dir2/nested-page-2.md',
|
||||
content: 'nested page 2 content',
|
||||
},
|
||||
{
|
||||
file: { path: 'pages/dir1/nested-page-1.md', id: null },
|
||||
data: 'nested page 1 content',
|
||||
path: 'pages/dir1/nested-page-1.md',
|
||||
content: 'nested page 1 content',
|
||||
},
|
||||
{
|
||||
file: { path: 'pages/root-page.md', id: null },
|
||||
data: 'root page content',
|
||||
path: 'pages/root-page.md',
|
||||
content: 'root page content',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
@ -10,35 +10,65 @@ import {
|
||||
ImplementationEntry,
|
||||
AssetProxy,
|
||||
PersistOptions,
|
||||
ImplementationMediaFile,
|
||||
User,
|
||||
Config,
|
||||
ImplementationFile,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import { extname, dirname } from 'path';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
|
||||
type RepoFile = { file?: { path: string }; content: string };
|
||||
type RepoFile = { path: string; content: string | AssetProxy };
|
||||
type RepoTree = { [key: string]: RepoFile | RepoTree };
|
||||
|
||||
type UnpublishedRepoEntry = {
|
||||
slug: string;
|
||||
collection: string;
|
||||
status: string;
|
||||
diffs: {
|
||||
id: string;
|
||||
originalPath?: string;
|
||||
path: string;
|
||||
newFile: boolean;
|
||||
status: string;
|
||||
content: string | AssetProxy;
|
||||
}[];
|
||||
updatedAt: string;
|
||||
};
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
repoFiles: RepoTree;
|
||||
repoFilesUnpublished: ImplementationEntry[];
|
||||
repoFilesUnpublished: { [key: string]: UnpublishedRepoEntry };
|
||||
}
|
||||
}
|
||||
|
||||
window.repoFiles = window.repoFiles || {};
|
||||
window.repoFilesUnpublished = window.repoFilesUnpublished || [];
|
||||
|
||||
function getFile(path: string) {
|
||||
function getFile(path: string, tree: RepoTree) {
|
||||
const segments = path.split('/');
|
||||
let obj: RepoTree = window.repoFiles;
|
||||
let obj: RepoTree = tree;
|
||||
while (obj && segments.length) {
|
||||
obj = obj[segments.shift() as string] as RepoTree;
|
||||
}
|
||||
return ((obj as unknown) as RepoFile) || {};
|
||||
}
|
||||
|
||||
function writeFile(path: string, content: string | AssetProxy, tree: RepoTree) {
|
||||
const segments = path.split('/');
|
||||
let obj = tree;
|
||||
while (segments.length > 1) {
|
||||
const segment = segments.shift() as string;
|
||||
obj[segment] = obj[segment] || {};
|
||||
obj = obj[segment] as RepoTree;
|
||||
}
|
||||
(obj[segments.shift() as string] as RepoFile) = { content, path };
|
||||
}
|
||||
|
||||
function deleteFile(path: string, tree: RepoTree) {
|
||||
unset(tree, path.split('/'));
|
||||
}
|
||||
|
||||
const pageSize = 10;
|
||||
|
||||
const getCursor = (
|
||||
@ -60,12 +90,12 @@ const getCursor = (
|
||||
});
|
||||
};
|
||||
|
||||
export const getFolderEntries = (
|
||||
export const getFolderFiles = (
|
||||
tree: RepoTree,
|
||||
folder: string,
|
||||
extension: string,
|
||||
depth: number,
|
||||
files = [] as ImplementationEntry[],
|
||||
files = [] as RepoFile[],
|
||||
path = folder,
|
||||
) => {
|
||||
if (depth <= 0) {
|
||||
@ -73,15 +103,14 @@ export const getFolderEntries = (
|
||||
}
|
||||
|
||||
Object.keys(tree[folder] || {}).forEach(key => {
|
||||
if (key.endsWith(`.${extension}`)) {
|
||||
if (extname(key)) {
|
||||
const file = (tree[folder] as RepoTree)[key] as RepoFile;
|
||||
files.unshift({
|
||||
file: { path: `${path}/${key}`, id: null },
|
||||
data: file.content,
|
||||
});
|
||||
if (!extension || key.endsWith(`.${extension}`)) {
|
||||
files.unshift({ content: file.content, path: `${path}/${key}` });
|
||||
}
|
||||
} else {
|
||||
const subTree = tree[folder] as RepoTree;
|
||||
return getFolderEntries(subTree, key, extension, depth - 1, files, `${path}/${key}`);
|
||||
return getFolderFiles(subTree, key, extension, depth - 1, files, `${path}/${key}`);
|
||||
}
|
||||
});
|
||||
|
||||
@ -89,12 +118,12 @@ export const getFolderEntries = (
|
||||
};
|
||||
|
||||
export default class TestBackend implements Implementation {
|
||||
assets: ImplementationMediaFile[];
|
||||
mediaFolder: string;
|
||||
options: { initialWorkflowStatus?: string };
|
||||
|
||||
constructor(_config: Config, options = {}) {
|
||||
this.assets = [];
|
||||
constructor(config: Config, options = {}) {
|
||||
this.options = options;
|
||||
this.mediaFolder = config.media_folder;
|
||||
}
|
||||
|
||||
isGitBackend() {
|
||||
@ -149,14 +178,22 @@ export default class TestBackend implements Implementation {
|
||||
return 0;
|
||||
})();
|
||||
// TODO: stop assuming cursors are for collections
|
||||
const allEntries = getFolderEntries(window.repoFiles, folder, extension, depth);
|
||||
const allFiles = getFolderFiles(window.repoFiles, folder, extension, depth);
|
||||
const allEntries = allFiles.map(f => ({
|
||||
data: f.content as string,
|
||||
file: { path: f.path, id: f.path },
|
||||
}));
|
||||
const entries = allEntries.slice(newIndex * pageSize, newIndex * pageSize + pageSize);
|
||||
const newCursor = getCursor(folder, extension, allEntries, newIndex, depth);
|
||||
return Promise.resolve({ entries, cursor: newCursor });
|
||||
}
|
||||
|
||||
entriesByFolder(folder: string, extension: string, depth: number) {
|
||||
const entries = folder ? getFolderEntries(window.repoFiles, folder, extension, depth) : [];
|
||||
const files = folder ? getFolderFiles(window.repoFiles, folder, extension, depth) : [];
|
||||
const entries = files.map(f => ({
|
||||
data: f.content as string,
|
||||
file: { path: f.path, id: f.path },
|
||||
}));
|
||||
const cursor = getCursor(folder, extension, entries, 0, depth);
|
||||
const ret = take(entries, pageSize);
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
|
||||
@ -169,7 +206,7 @@ export default class TestBackend implements Implementation {
|
||||
return Promise.all(
|
||||
files.map(file => ({
|
||||
file,
|
||||
data: getFile(file.path).content,
|
||||
data: getFile(file.path, window.repoFiles).content as string,
|
||||
})),
|
||||
);
|
||||
}
|
||||
@ -177,133 +214,160 @@ export default class TestBackend implements Implementation {
|
||||
getEntry(path: string) {
|
||||
return Promise.resolve({
|
||||
file: { path, id: null },
|
||||
data: getFile(path).content,
|
||||
data: getFile(path, window.repoFiles).content as string,
|
||||
});
|
||||
}
|
||||
|
||||
unpublishedEntries() {
|
||||
return Promise.resolve(window.repoFilesUnpublished);
|
||||
return Promise.resolve(Object.keys(window.repoFilesUnpublished));
|
||||
}
|
||||
|
||||
getMediaFiles(entry: ImplementationEntry) {
|
||||
const mediaFiles = entry.mediaFiles!.map(file => ({
|
||||
...file,
|
||||
...this.normalizeAsset(file),
|
||||
file: file.file as File,
|
||||
}));
|
||||
return mediaFiles;
|
||||
}
|
||||
|
||||
unpublishedEntry(collection: string, slug: string) {
|
||||
const entry = window.repoFilesUnpublished.find(
|
||||
e => e.metaData!.collection === collection && e.slug === slug,
|
||||
);
|
||||
unpublishedEntry({ id, collection, slug }: { id?: string; collection?: string; slug?: string }) {
|
||||
if (id) {
|
||||
const parts = id.split('/');
|
||||
collection = parts[0];
|
||||
slug = parts[1];
|
||||
}
|
||||
const entry = window.repoFilesUnpublished[`${collection}/${slug}`];
|
||||
if (!entry) {
|
||||
return Promise.reject(
|
||||
new EditorialWorkflowError('content is not under editorial workflow', true),
|
||||
);
|
||||
}
|
||||
entry.mediaFiles = this.getMediaFiles(entry);
|
||||
|
||||
return Promise.resolve(entry);
|
||||
}
|
||||
|
||||
async unpublishedEntryDataFile(collection: string, slug: string, path: string) {
|
||||
const entry = window.repoFilesUnpublished[`${collection}/${slug}`];
|
||||
const file = entry.diffs.find(d => d.path === path);
|
||||
return file?.content as string;
|
||||
}
|
||||
|
||||
async unpublishedEntryMediaFile(collection: string, slug: string, path: string) {
|
||||
const entry = window.repoFilesUnpublished[`${collection}/${slug}`];
|
||||
const file = entry.diffs.find(d => d.path === path);
|
||||
return this.normalizeAsset(file?.content as AssetProxy);
|
||||
}
|
||||
|
||||
deleteUnpublishedEntry(collection: string, slug: string) {
|
||||
const unpubStore = window.repoFilesUnpublished;
|
||||
const existingEntryIndex = unpubStore.findIndex(
|
||||
e => e.metaData!.collection === collection && e.slug === slug,
|
||||
);
|
||||
unpubStore.splice(existingEntryIndex, 1);
|
||||
delete window.repoFilesUnpublished[`${collection}/${slug}`];
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async addOrUpdateUnpublishedEntry(
|
||||
key: string,
|
||||
path: string,
|
||||
newPath: string | undefined,
|
||||
raw: string,
|
||||
assetProxies: AssetProxy[],
|
||||
slug: string,
|
||||
collection: string,
|
||||
status: string,
|
||||
) {
|
||||
const currentDataFile = window.repoFilesUnpublished[key]?.diffs.find(d => d.path === path);
|
||||
const originalPath = currentDataFile ? currentDataFile.originalPath : path;
|
||||
const diffs = [];
|
||||
diffs.push({
|
||||
originalPath,
|
||||
id: newPath || path,
|
||||
path: newPath || path,
|
||||
newFile: isEmpty(getFile(originalPath as string, window.repoFiles)),
|
||||
status: 'added',
|
||||
content: raw,
|
||||
});
|
||||
assetProxies.forEach(a => {
|
||||
const asset = this.normalizeAsset(a);
|
||||
diffs.push({
|
||||
id: asset.id,
|
||||
path: asset.path,
|
||||
newFile: true,
|
||||
status: 'added',
|
||||
content: asset,
|
||||
});
|
||||
});
|
||||
window.repoFilesUnpublished[key] = {
|
||||
slug,
|
||||
collection,
|
||||
status,
|
||||
diffs,
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
async persistEntry(
|
||||
{ path, raw, slug }: Entry,
|
||||
{ path, raw, slug, newPath }: Entry,
|
||||
assetProxies: AssetProxy[],
|
||||
options: PersistOptions,
|
||||
) {
|
||||
if (options.useWorkflow) {
|
||||
const unpubStore = window.repoFilesUnpublished;
|
||||
|
||||
const existingEntryIndex = unpubStore.findIndex(e => e.file.path === path);
|
||||
if (existingEntryIndex >= 0) {
|
||||
const unpubEntry = {
|
||||
...unpubStore[existingEntryIndex],
|
||||
data: raw,
|
||||
mediaFiles: assetProxies.map(this.normalizeAsset),
|
||||
};
|
||||
|
||||
unpubStore.splice(existingEntryIndex, 1, unpubEntry);
|
||||
} else {
|
||||
const unpubEntry = {
|
||||
data: raw,
|
||||
file: {
|
||||
path,
|
||||
id: null,
|
||||
},
|
||||
metaData: {
|
||||
collection: options.collectionName as string,
|
||||
status: (options.status || this.options.initialWorkflowStatus) as string,
|
||||
},
|
||||
slug,
|
||||
mediaFiles: assetProxies.map(this.normalizeAsset),
|
||||
isModification: !isEmpty(getFile(path)),
|
||||
};
|
||||
unpubStore.push(unpubEntry);
|
||||
}
|
||||
const key = `${options.collectionName}/${slug}`;
|
||||
const currentEntry = window.repoFilesUnpublished[key];
|
||||
const status =
|
||||
currentEntry?.status || options.status || (this.options.initialWorkflowStatus as string);
|
||||
this.addOrUpdateUnpublishedEntry(
|
||||
key,
|
||||
path,
|
||||
newPath,
|
||||
raw,
|
||||
assetProxies,
|
||||
slug,
|
||||
options.collectionName as string,
|
||||
status,
|
||||
);
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
const newEntry = options.newEntry || false;
|
||||
|
||||
const segments = path.split('/');
|
||||
const entry = newEntry ? { content: raw } : { ...getFile(path), content: raw };
|
||||
|
||||
let obj = window.repoFiles;
|
||||
while (segments.length > 1) {
|
||||
const segment = segments.shift() as string;
|
||||
obj[segment] = obj[segment] || {};
|
||||
obj = obj[segment] as RepoTree;
|
||||
}
|
||||
(obj[segments.shift() as string] as RepoFile) = entry;
|
||||
|
||||
await Promise.all(assetProxies.map(file => this.persistMedia(file)));
|
||||
writeFile(path, raw, window.repoFiles);
|
||||
assetProxies.forEach(a => {
|
||||
writeFile(a.path, raw, window.repoFiles);
|
||||
});
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
|
||||
const unpubStore = window.repoFilesUnpublished;
|
||||
const entryIndex = unpubStore.findIndex(
|
||||
e => e.metaData!.collection === collection && e.slug === slug,
|
||||
);
|
||||
unpubStore[entryIndex]!.metaData!.status = newStatus;
|
||||
window.repoFilesUnpublished[`${collection}/${slug}`].status = newStatus;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async publishUnpublishedEntry(collection: string, slug: string) {
|
||||
const unpubStore = window.repoFilesUnpublished;
|
||||
const unpubEntryIndex = unpubStore.findIndex(
|
||||
e => e.metaData!.collection === collection && e.slug === slug,
|
||||
);
|
||||
const unpubEntry = unpubStore[unpubEntryIndex];
|
||||
const entry = {
|
||||
raw: unpubEntry.data,
|
||||
slug: unpubEntry.slug as string,
|
||||
path: unpubEntry.file.path,
|
||||
};
|
||||
unpubStore.splice(unpubEntryIndex, 1);
|
||||
publishUnpublishedEntry(collection: string, slug: string) {
|
||||
const key = `${collection}/${slug}`;
|
||||
const unpubEntry = window.repoFilesUnpublished[key];
|
||||
|
||||
await this.persistEntry(entry, unpubEntry.mediaFiles!, { commitMessage: '' });
|
||||
delete window.repoFilesUnpublished[key];
|
||||
|
||||
const tree = window.repoFiles;
|
||||
unpubEntry.diffs.forEach(d => {
|
||||
if (d.originalPath && !d.newFile) {
|
||||
const originalPath = d.originalPath;
|
||||
const sourceDir = dirname(originalPath);
|
||||
const destDir = dirname(d.path);
|
||||
const toMove = getFolderFiles(tree, originalPath.split('/')[0], '', 100).filter(f =>
|
||||
f.path.startsWith(sourceDir),
|
||||
);
|
||||
toMove.forEach(f => {
|
||||
deleteFile(f.path, tree);
|
||||
writeFile(f.path.replace(sourceDir, destDir), f.content, tree);
|
||||
});
|
||||
}
|
||||
writeFile(d.path, d.content, tree);
|
||||
});
|
||||
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
getMedia() {
|
||||
return Promise.resolve(this.assets);
|
||||
getMedia(mediaFolder = this.mediaFolder) {
|
||||
const files = getFolderFiles(window.repoFiles, mediaFolder.split('/')[0], '', 100).filter(f =>
|
||||
f.path.startsWith(mediaFolder),
|
||||
);
|
||||
const assets = files.map(f => this.normalizeAsset(f.content as AssetProxy));
|
||||
return Promise.resolve(assets);
|
||||
}
|
||||
|
||||
async getMediaFile(path: string) {
|
||||
const asset = this.assets.find(asset => asset.path === path) as ImplementationMediaFile;
|
||||
const asset = getFile(path, window.repoFiles).content as AssetProxy;
|
||||
|
||||
const url = asset.url as string;
|
||||
const url = asset.toString();
|
||||
const name = basename(path);
|
||||
const blob = await fetch(url).then(res => res.blob());
|
||||
const fileObj = new File([blob], name);
|
||||
@ -340,18 +404,13 @@ export default class TestBackend implements Implementation {
|
||||
persistMedia(assetProxy: AssetProxy) {
|
||||
const normalizedAsset = this.normalizeAsset(assetProxy);
|
||||
|
||||
this.assets.push(normalizedAsset);
|
||||
writeFile(assetProxy.path, assetProxy, window.repoFiles);
|
||||
|
||||
return Promise.resolve(normalizedAsset);
|
||||
}
|
||||
|
||||
deleteFile(path: string) {
|
||||
const assetIndex = this.assets.findIndex(asset => asset.path === path);
|
||||
if (assetIndex > -1) {
|
||||
this.assets.splice(assetIndex, 1);
|
||||
} else {
|
||||
unset(window.repoFiles, path.split('/'));
|
||||
}
|
||||
deleteFile(path, window.repoFiles);
|
||||
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
@ -5,7 +5,6 @@ import { Map, List, fromJS } from 'immutable';
|
||||
|
||||
jest.mock('Lib/registry');
|
||||
jest.mock('netlify-cms-lib-util');
|
||||
jest.mock('Formats/formats');
|
||||
jest.mock('../lib/urlHelper');
|
||||
|
||||
describe('Backend', () => {
|
||||
@ -179,7 +178,7 @@ describe('Backend', () => {
|
||||
const slug = 'slug';
|
||||
|
||||
localForage.getItem.mockReturnValue({
|
||||
raw: 'content',
|
||||
raw: '---\ntitle: "Hello World"\n---\n',
|
||||
});
|
||||
|
||||
const result = await backend.getLocalDraftBackup(collection, slug);
|
||||
@ -192,11 +191,12 @@ describe('Backend', () => {
|
||||
slug: 'slug',
|
||||
path: '',
|
||||
partial: false,
|
||||
raw: 'content',
|
||||
data: {},
|
||||
raw: '---\ntitle: "Hello World"\n---\n',
|
||||
data: { title: 'Hello World' },
|
||||
meta: {},
|
||||
label: null,
|
||||
metaData: null,
|
||||
isModification: null,
|
||||
status: '',
|
||||
updatedOn: '',
|
||||
},
|
||||
});
|
||||
@ -218,7 +218,7 @@ describe('Backend', () => {
|
||||
const slug = 'slug';
|
||||
|
||||
localForage.getItem.mockReturnValue({
|
||||
raw: 'content',
|
||||
raw: '---\ntitle: "Hello World"\n---\n',
|
||||
mediaFiles: [{ id: '1' }],
|
||||
});
|
||||
|
||||
@ -232,11 +232,12 @@ describe('Backend', () => {
|
||||
slug: 'slug',
|
||||
path: '',
|
||||
partial: false,
|
||||
raw: 'content',
|
||||
data: {},
|
||||
raw: '---\ntitle: "Hello World"\n---\n',
|
||||
data: { title: 'Hello World' },
|
||||
meta: {},
|
||||
label: null,
|
||||
metaData: null,
|
||||
isModification: null,
|
||||
status: '',
|
||||
updatedOn: '',
|
||||
},
|
||||
});
|
||||
@ -343,22 +344,24 @@ describe('Backend', () => {
|
||||
describe('unpublishedEntry', () => {
|
||||
it('should return unpublished entry', async () => {
|
||||
const unpublishedEntryResult = {
|
||||
file: { path: 'path' },
|
||||
isModification: true,
|
||||
metaData: {},
|
||||
mediaFiles: [{ id: '1' }],
|
||||
data: 'content',
|
||||
diffs: [{ path: 'src/posts/index.md', newFile: false }, { path: 'netlify.png' }],
|
||||
};
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
unpublishedEntry: jest.fn().mockResolvedValue(unpublishedEntryResult),
|
||||
unpublishedEntryDataFile: jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce('---\ntitle: "Hello World"\n---\n'),
|
||||
unpublishedEntryMediaFile: jest.fn().mockResolvedValueOnce({ id: '1' }),
|
||||
};
|
||||
const config = Map({ media_folder: 'static/images' });
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
const collection = fromJS({
|
||||
name: 'posts',
|
||||
folder: 'src/posts',
|
||||
fields: [],
|
||||
});
|
||||
|
||||
const state = {
|
||||
@ -374,14 +377,15 @@ describe('Backend', () => {
|
||||
author: '',
|
||||
collection: 'posts',
|
||||
slug: '',
|
||||
path: 'path',
|
||||
path: 'src/posts/index.md',
|
||||
partial: false,
|
||||
raw: 'content',
|
||||
data: {},
|
||||
raw: '---\ntitle: "Hello World"\n---\n',
|
||||
data: { title: 'Hello World' },
|
||||
meta: { path: 'src/posts/index.md' },
|
||||
label: null,
|
||||
metaData: {},
|
||||
isModification: true,
|
||||
mediaFiles: [{ id: '1', draft: true }],
|
||||
status: '',
|
||||
updatedOn: '',
|
||||
});
|
||||
});
|
||||
|
@ -5,6 +5,7 @@ import {
|
||||
retrieveLocalBackup,
|
||||
persistLocalBackup,
|
||||
getMediaAssets,
|
||||
validateMetaField,
|
||||
} from '../entries';
|
||||
import configureMockStore from 'redux-mock-store';
|
||||
import thunk from 'redux-thunk';
|
||||
@ -13,6 +14,8 @@ import AssetProxy from '../../valueObjects/AssetProxy';
|
||||
jest.mock('coreSrc/backend');
|
||||
jest.mock('netlify-cms-lib-util');
|
||||
jest.mock('../mediaLibrary');
|
||||
jest.mock('../../reducers/entries');
|
||||
jest.mock('../../reducers/entryDraft');
|
||||
|
||||
const middlewares = [thunk];
|
||||
const mockStore = configureMockStore(middlewares);
|
||||
@ -45,14 +48,15 @@ describe('entries', () => {
|
||||
author: '',
|
||||
collection: undefined,
|
||||
data: {},
|
||||
meta: {},
|
||||
isModification: null,
|
||||
label: null,
|
||||
mediaFiles: [],
|
||||
metaData: null,
|
||||
partial: false,
|
||||
path: '',
|
||||
raw: '',
|
||||
slug: '',
|
||||
status: '',
|
||||
updatedOn: '',
|
||||
},
|
||||
type: 'DRAFT_CREATE_EMPTY',
|
||||
@ -76,14 +80,15 @@ describe('entries', () => {
|
||||
author: '',
|
||||
collection: undefined,
|
||||
data: { title: 'title', boolean: true },
|
||||
meta: {},
|
||||
isModification: null,
|
||||
label: null,
|
||||
mediaFiles: [],
|
||||
metaData: null,
|
||||
partial: false,
|
||||
path: '',
|
||||
raw: '',
|
||||
slug: '',
|
||||
status: '',
|
||||
updatedOn: '',
|
||||
},
|
||||
type: 'DRAFT_CREATE_EMPTY',
|
||||
@ -109,14 +114,15 @@ describe('entries', () => {
|
||||
author: '',
|
||||
collection: undefined,
|
||||
data: { title: '<script>alert('hello')</script>' },
|
||||
meta: {},
|
||||
isModification: null,
|
||||
label: null,
|
||||
mediaFiles: [],
|
||||
metaData: null,
|
||||
partial: false,
|
||||
path: '',
|
||||
raw: '',
|
||||
slug: '',
|
||||
status: '',
|
||||
updatedOn: '',
|
||||
},
|
||||
type: 'DRAFT_CREATE_EMPTY',
|
||||
@ -383,4 +389,170 @@ describe('entries', () => {
|
||||
expect(getMediaAssets({ entry })).toEqual([new AssetProxy({ path: 'path2' })]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateMetaField', () => {
|
||||
const state = {
|
||||
config: fromJS({
|
||||
slug: {
|
||||
encoding: 'unicode',
|
||||
clean_accents: false,
|
||||
sanitize_replacement: '-',
|
||||
},
|
||||
}),
|
||||
entries: fromJS([]),
|
||||
};
|
||||
const collection = fromJS({
|
||||
folder: 'folder',
|
||||
type: 'folder_based_collection',
|
||||
name: 'name',
|
||||
});
|
||||
const t = jest.fn((key, args) => ({ key, args }));
|
||||
|
||||
const { selectCustomPath } = require('../../reducers/entryDraft');
|
||||
const { selectEntryByPath } = require('../../reducers/entries');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should not return error on non meta field', () => {
|
||||
expect(validateMetaField(null, null, fromJS({}), null, t)).toEqual({ error: false });
|
||||
});
|
||||
|
||||
it('should not return error on meta path field', () => {
|
||||
expect(
|
||||
validateMetaField(null, null, fromJS({ meta: true, name: 'other' }), null, t),
|
||||
).toEqual({ error: false });
|
||||
});
|
||||
|
||||
it('should return error on empty path', () => {
|
||||
expect(validateMetaField(null, null, fromJS({ meta: true, name: 'path' }), null, t)).toEqual({
|
||||
error: {
|
||||
message: {
|
||||
key: 'editor.editorControlPane.widget.invalidPath',
|
||||
args: { path: null },
|
||||
},
|
||||
type: 'CUSTOM',
|
||||
},
|
||||
});
|
||||
|
||||
expect(
|
||||
validateMetaField(null, null, fromJS({ meta: true, name: 'path' }), undefined, t),
|
||||
).toEqual({
|
||||
error: {
|
||||
message: {
|
||||
key: 'editor.editorControlPane.widget.invalidPath',
|
||||
args: { path: undefined },
|
||||
},
|
||||
type: 'CUSTOM',
|
||||
},
|
||||
});
|
||||
|
||||
expect(validateMetaField(null, null, fromJS({ meta: true, name: 'path' }), '', t)).toEqual({
|
||||
error: {
|
||||
message: {
|
||||
key: 'editor.editorControlPane.widget.invalidPath',
|
||||
args: { path: '' },
|
||||
},
|
||||
type: 'CUSTOM',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return error on invalid path', () => {
|
||||
expect(
|
||||
validateMetaField(state, null, fromJS({ meta: true, name: 'path' }), 'invalid path', t),
|
||||
).toEqual({
|
||||
error: {
|
||||
message: {
|
||||
key: 'editor.editorControlPane.widget.invalidPath',
|
||||
args: { path: 'invalid path' },
|
||||
},
|
||||
type: 'CUSTOM',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return error on existing path', () => {
|
||||
selectCustomPath.mockReturnValue('existing-path');
|
||||
selectEntryByPath.mockReturnValue(fromJS({ path: 'existing-path' }));
|
||||
expect(
|
||||
validateMetaField(
|
||||
{
|
||||
...state,
|
||||
entryDraft: fromJS({
|
||||
entry: {},
|
||||
}),
|
||||
},
|
||||
collection,
|
||||
fromJS({ meta: true, name: 'path' }),
|
||||
'existing-path',
|
||||
t,
|
||||
),
|
||||
).toEqual({
|
||||
error: {
|
||||
message: {
|
||||
key: 'editor.editorControlPane.widget.pathExists',
|
||||
args: { path: 'existing-path' },
|
||||
},
|
||||
type: 'CUSTOM',
|
||||
},
|
||||
});
|
||||
|
||||
expect(selectCustomPath).toHaveBeenCalledTimes(1);
|
||||
expect(selectCustomPath).toHaveBeenCalledWith(
|
||||
collection,
|
||||
fromJS({ entry: { meta: { path: 'existing-path' } } }),
|
||||
);
|
||||
|
||||
expect(selectEntryByPath).toHaveBeenCalledTimes(1);
|
||||
expect(selectEntryByPath).toHaveBeenCalledWith(
|
||||
state.entries,
|
||||
collection.get('name'),
|
||||
'existing-path',
|
||||
);
|
||||
});
|
||||
|
||||
it('should not return error on non existing path for new entry', () => {
|
||||
selectCustomPath.mockReturnValue('non-existing-path');
|
||||
selectEntryByPath.mockReturnValue(undefined);
|
||||
expect(
|
||||
validateMetaField(
|
||||
{
|
||||
...state,
|
||||
entryDraft: fromJS({
|
||||
entry: {},
|
||||
}),
|
||||
},
|
||||
collection,
|
||||
fromJS({ meta: true, name: 'path' }),
|
||||
'non-existing-path',
|
||||
t,
|
||||
),
|
||||
).toEqual({
|
||||
error: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not return error when for existing entry', () => {
|
||||
selectCustomPath.mockReturnValue('existing-path');
|
||||
selectEntryByPath.mockReturnValue(fromJS({ path: 'existing-path' }));
|
||||
expect(
|
||||
validateMetaField(
|
||||
{
|
||||
...state,
|
||||
entryDraft: fromJS({
|
||||
entry: { path: 'existing-path' },
|
||||
}),
|
||||
},
|
||||
collection,
|
||||
fromJS({ meta: true, name: 'path' }),
|
||||
'existing-path',
|
||||
t,
|
||||
),
|
||||
).toEqual({
|
||||
error: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
import yaml from 'yaml';
|
||||
import { Map, fromJS } from 'immutable';
|
||||
import { trimStart, get, isPlainObject } from 'lodash';
|
||||
import { trimStart, trim, get, isPlainObject } from 'lodash';
|
||||
import { authenticateUser } from 'Actions/auth';
|
||||
import * as publishModes from 'Constants/publishModes';
|
||||
import { validateConfig } from 'Constants/configSchema';
|
||||
@ -82,11 +82,28 @@ export function applyDefaults(config) {
|
||||
'fields',
|
||||
traverseFields(collection.get('fields'), setDefaultPublicFolder),
|
||||
);
|
||||
collection = collection.set('folder', trimStart(folder, '/'));
|
||||
collection = collection.set('folder', trim(folder, '/'));
|
||||
if (collection.has('meta')) {
|
||||
const fields = collection.get('fields');
|
||||
const metaFields = [];
|
||||
collection.get('meta').forEach((value, key) => {
|
||||
const field = value.withMutations(map => {
|
||||
map.set('name', key);
|
||||
map.set('meta', true);
|
||||
map.set('required', true);
|
||||
});
|
||||
metaFields.push(field);
|
||||
});
|
||||
collection = collection.set('fields', fromJS([]).concat(metaFields, fields));
|
||||
} else {
|
||||
collection = collection.set('meta', Map());
|
||||
}
|
||||
}
|
||||
|
||||
const files = collection.get('files');
|
||||
if (files) {
|
||||
collection = collection.delete('nested');
|
||||
collection = collection.delete('meta');
|
||||
collection = collection.set(
|
||||
'files',
|
||||
files.map(file => {
|
||||
|
@ -5,17 +5,24 @@ import { BEGIN, COMMIT, REVERT } from 'redux-optimist';
|
||||
import { ThunkDispatch } from 'redux-thunk';
|
||||
import { Map, List } from 'immutable';
|
||||
import { serializeValues } from '../lib/serializeEntryValues';
|
||||
import { currentBackend } from '../backend';
|
||||
import { currentBackend, slugFromCustomPath } from '../backend';
|
||||
import {
|
||||
selectPublishedSlugs,
|
||||
selectUnpublishedSlugs,
|
||||
selectEntry,
|
||||
selectUnpublishedEntry,
|
||||
} from '../reducers';
|
||||
import { selectEditingDraft } from '../reducers/entries';
|
||||
import { selectFields } from '../reducers/collections';
|
||||
import { EDITORIAL_WORKFLOW, status, Status } from '../constants/publishModes';
|
||||
import { EDITORIAL_WORKFLOW_ERROR } from 'netlify-cms-lib-util';
|
||||
import { loadEntry, entryDeleted, getMediaAssets, createDraftFromEntry } from './entries';
|
||||
import {
|
||||
loadEntry,
|
||||
entryDeleted,
|
||||
getMediaAssets,
|
||||
createDraftFromEntry,
|
||||
loadEntries,
|
||||
} from './entries';
|
||||
import { createAssetProxy } from '../valueObjects/AssetProxy';
|
||||
import { addAssets } from './media';
|
||||
import { loadMedia } from './mediaLibrary';
|
||||
@ -24,6 +31,7 @@ import ValidationErrorTypes from '../constants/validationErrorTypes';
|
||||
import { Collection, EntryMap, State, Collections, EntryDraft, MediaFile } from '../types/redux';
|
||||
import { AnyAction } from 'redux';
|
||||
import { EntryValue } from '../valueObjects/Entry';
|
||||
import { navigateToEntry } from '../routing/history';
|
||||
|
||||
const { notifSend } = notifActions;
|
||||
|
||||
@ -406,7 +414,10 @@ export function persistUnpublishedEntry(collection: Collection, existingUnpublis
|
||||
}),
|
||||
);
|
||||
dispatch(unpublishedEntryPersisted(collection, transactionID, newSlug));
|
||||
if (!existingUnpublishedEntry) return dispatch(loadUnpublishedEntry(collection, newSlug));
|
||||
if (entry.get('slug') !== newSlug) {
|
||||
dispatch(loadUnpublishedEntry(collection, newSlug));
|
||||
navigateToEntry(collection.get('name'), newSlug);
|
||||
}
|
||||
} catch (error) {
|
||||
dispatch(
|
||||
notifSend({
|
||||
@ -506,40 +517,47 @@ export function deleteUnpublishedEntry(collection: string, slug: string) {
|
||||
};
|
||||
}
|
||||
|
||||
export function publishUnpublishedEntry(collection: string, slug: string) {
|
||||
return (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
|
||||
export function publishUnpublishedEntry(collectionName: string, slug: string) {
|
||||
return async (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
|
||||
const state = getState();
|
||||
const collections = state.collections;
|
||||
const backend = currentBackend(state.config);
|
||||
const transactionID = uuid();
|
||||
const entry = selectUnpublishedEntry(state, collection, slug);
|
||||
dispatch(unpublishedEntryPublishRequest(collection, slug, transactionID));
|
||||
return backend
|
||||
.publishUnpublishedEntry(entry)
|
||||
.then(() => {
|
||||
// re-load media after entry was published
|
||||
dispatch(loadMedia());
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: { key: 'ui.toast.entryPublished' },
|
||||
kind: 'success',
|
||||
dismissAfter: 4000,
|
||||
}),
|
||||
);
|
||||
|
||||
dispatch(unpublishedEntryPublished(collection, slug, transactionID));
|
||||
return dispatch(loadEntry(collections.get(collection), slug));
|
||||
})
|
||||
.catch((error: Error) => {
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: { key: 'ui.toast.onFailToPublishEntry', details: error },
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
dispatch(unpublishedEntryPublishError(collection, slug, transactionID));
|
||||
});
|
||||
const entry = selectUnpublishedEntry(state, collectionName, slug);
|
||||
dispatch(unpublishedEntryPublishRequest(collectionName, slug, transactionID));
|
||||
try {
|
||||
await backend.publishUnpublishedEntry(entry);
|
||||
// re-load media after entry was published
|
||||
dispatch(loadMedia());
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: { key: 'ui.toast.entryPublished' },
|
||||
kind: 'success',
|
||||
dismissAfter: 4000,
|
||||
}),
|
||||
);
|
||||
dispatch(unpublishedEntryPublished(collectionName, slug, transactionID));
|
||||
const collection = collections.get(collectionName);
|
||||
if (collection.has('nested')) {
|
||||
dispatch(loadEntries(collection));
|
||||
const newSlug = slugFromCustomPath(collection, entry.get('path'));
|
||||
loadEntry(collection, newSlug);
|
||||
if (slug !== newSlug && selectEditingDraft(state.entryDraft)) {
|
||||
navigateToEntry(collection.get('name'), newSlug);
|
||||
}
|
||||
} else {
|
||||
return dispatch(loadEntry(collection, slug));
|
||||
}
|
||||
} catch (error) {
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: { key: 'ui.toast.onFailToPublishEntry', details: error },
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
dispatch(unpublishedEntryPublishError(collectionName, slug, transactionID));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -26,7 +26,10 @@ import { ThunkDispatch } from 'redux-thunk';
|
||||
import { AnyAction } from 'redux';
|
||||
import { waitForMediaLibraryToLoad, loadMedia } from './mediaLibrary';
|
||||
import { waitUntil } from './waitUntil';
|
||||
import { selectIsFetching, selectEntriesSortFields } from '../reducers/entries';
|
||||
import { selectIsFetching, selectEntriesSortFields, selectEntryByPath } from '../reducers/entries';
|
||||
import { selectCustomPath } from '../reducers/entryDraft';
|
||||
import { navigateToEntry } from '../routing/history';
|
||||
import { getProcessSegment } from '../lib/formatters';
|
||||
|
||||
const { notifSend } = notifActions;
|
||||
|
||||
@ -336,7 +339,7 @@ export function discardDraft() {
|
||||
}
|
||||
|
||||
export function changeDraftField(
|
||||
field: string,
|
||||
field: EntryField,
|
||||
value: string,
|
||||
metadata: Record<string, unknown>,
|
||||
entries: EntryMap[],
|
||||
@ -520,7 +523,10 @@ export function loadEntries(collection: Collection, page = 0) {
|
||||
cursor: Cursor;
|
||||
pagination: number;
|
||||
entries: EntryValue[];
|
||||
} = await provider.listEntries(collection, page);
|
||||
} = await (collection.has('nested')
|
||||
? // nested collections require all entries to construct the tree
|
||||
provider.listAllEntries(collection).then((entries: EntryValue[]) => ({ entries }))
|
||||
: provider.listEntries(collection, page));
|
||||
response = {
|
||||
...response,
|
||||
// The only existing backend using the pagination system is the
|
||||
@ -647,7 +653,8 @@ export function createEmptyDraft(collection: Collection, search: string) {
|
||||
});
|
||||
|
||||
const fields = collection.get('fields', List());
|
||||
const dataFields = createEmptyDraftData(fields);
|
||||
const dataFields = createEmptyDraftData(fields.filter(f => !f!.get('meta')).toList());
|
||||
const metaFields = createEmptyDraftData(fields.filter(f => f!.get('meta') === true).toList());
|
||||
|
||||
const state = getState();
|
||||
const backend = currentBackend(state.config);
|
||||
@ -659,6 +666,8 @@ export function createEmptyDraft(collection: Collection, search: string) {
|
||||
let newEntry = createEntry(collection.get('name'), '', '', {
|
||||
data: dataFields,
|
||||
mediaFiles: [],
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
meta: metaFields as any,
|
||||
});
|
||||
newEntry = await backend.processEntry(state, collection, newEntry);
|
||||
dispatch(emptyDraftCreated(newEntry));
|
||||
@ -791,7 +800,7 @@ export function persistEntry(collection: Collection) {
|
||||
assetProxies,
|
||||
usedSlugs,
|
||||
})
|
||||
.then((slug: string) => {
|
||||
.then((newSlug: string) => {
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: {
|
||||
@ -805,8 +814,14 @@ export function persistEntry(collection: Collection) {
|
||||
if (assetProxies.length > 0) {
|
||||
dispatch(loadMedia());
|
||||
}
|
||||
dispatch(entryPersisted(collection, serializedEntry, slug));
|
||||
if (serializedEntry.get('newRecord')) return dispatch(loadEntry(collection, slug));
|
||||
dispatch(entryPersisted(collection, serializedEntry, newSlug));
|
||||
if (collection.has('nested')) {
|
||||
dispatch(loadEntries(collection));
|
||||
}
|
||||
if (entry.get('slug') !== newSlug) {
|
||||
dispatch(loadEntry(collection, newSlug));
|
||||
navigateToEntry(collection.get('name'), newSlug);
|
||||
}
|
||||
})
|
||||
.catch((error: Error) => {
|
||||
console.error(error);
|
||||
@ -852,3 +867,53 @@ export function deleteEntry(collection: Collection, slug: string) {
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
const getPathError = (
|
||||
path: string | undefined,
|
||||
key: string,
|
||||
t: (key: string, args: Record<string, unknown>) => string,
|
||||
) => {
|
||||
return {
|
||||
error: {
|
||||
type: ValidationErrorTypes.CUSTOM,
|
||||
message: t(`editor.editorControlPane.widget.${key}`, {
|
||||
path,
|
||||
}),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export function validateMetaField(
|
||||
state: State,
|
||||
collection: Collection,
|
||||
field: EntryField,
|
||||
value: string | undefined,
|
||||
t: (key: string, args: Record<string, unknown>) => string,
|
||||
) {
|
||||
if (field.get('meta') && field.get('name') === 'path') {
|
||||
if (!value) {
|
||||
return getPathError(value, 'invalidPath', t);
|
||||
}
|
||||
const sanitizedPath = (value as string)
|
||||
.split('/')
|
||||
.map(getProcessSegment(state.config.get('slug')))
|
||||
.join('/');
|
||||
|
||||
if (value !== sanitizedPath) {
|
||||
return getPathError(value, 'invalidPath', t);
|
||||
}
|
||||
|
||||
const customPath = selectCustomPath(collection, fromJS({ entry: { meta: { path: value } } }));
|
||||
const existingEntry = customPath
|
||||
? selectEntryByPath(state.entries, collection.get('name'), customPath)
|
||||
: undefined;
|
||||
|
||||
const existingEntryPath = existingEntry?.get('path');
|
||||
const draftPath = state.entryDraft?.getIn(['entry', 'path']);
|
||||
|
||||
if (existingEntryPath && existingEntryPath !== draftPath) {
|
||||
return getPathError(value, 'pathExists', t);
|
||||
}
|
||||
}
|
||||
return { error: false };
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { attempt, flatten, isError, uniq } from 'lodash';
|
||||
import { attempt, flatten, isError, uniq, trim, sortBy } from 'lodash';
|
||||
import { List, Map, fromJS } from 'immutable';
|
||||
import * as fuzzy from 'fuzzy';
|
||||
import { resolveFormat } from './formats/formats';
|
||||
@ -15,6 +15,7 @@ import {
|
||||
selectInferedField,
|
||||
selectMediaFolders,
|
||||
selectFieldsComments,
|
||||
selectHasMetaPath,
|
||||
} from './reducers/collections';
|
||||
import { createEntry, EntryValue } from './valueObjects/Entry';
|
||||
import { sanitizeChar } from './lib/urlHelper';
|
||||
@ -34,6 +35,7 @@ import {
|
||||
Config as ImplementationConfig,
|
||||
blobToFileObj,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import { basename, join, extname, dirname } from 'path';
|
||||
import { status } from './constants/publishModes';
|
||||
import { stringTemplate } from 'netlify-cms-lib-widgets';
|
||||
import {
|
||||
@ -49,6 +51,8 @@ import {
|
||||
} from './types/redux';
|
||||
import AssetProxy from './valueObjects/AssetProxy';
|
||||
import { FOLDER, FILES } from './constants/collectionTypes';
|
||||
import { selectCustomPath } from './reducers/entryDraft';
|
||||
import { UnpublishedEntry } from 'netlify-cms-lib-util/src/implementation';
|
||||
|
||||
const { extractTemplateVars, dateParsers } = stringTemplate;
|
||||
|
||||
@ -103,6 +107,13 @@ const sortByScore = (a: fuzzy.FilterResult<EntryValue>, b: fuzzy.FilterResult<En
|
||||
return 0;
|
||||
};
|
||||
|
||||
export const slugFromCustomPath = (collection: Collection, customPath: string) => {
|
||||
const folderPath = collection.get('folder', '') as string;
|
||||
const entryPath = customPath.toLowerCase().replace(folderPath.toLowerCase(), '');
|
||||
const slug = join(dirname(trim(entryPath, '/')), basename(entryPath, extname(customPath)));
|
||||
return slug;
|
||||
};
|
||||
|
||||
interface AuthStore {
|
||||
retrieve: () => User;
|
||||
store: (user: User) => void;
|
||||
@ -153,6 +164,14 @@ type Implementation = BackendImplementation & {
|
||||
init: (config: ImplementationConfig, options: ImplementationInitOptions) => Implementation;
|
||||
};
|
||||
|
||||
const prepareMetaPath = (path: string, collection: Collection) => {
|
||||
if (!selectHasMetaPath(collection)) {
|
||||
return path;
|
||||
}
|
||||
const dir = dirname(path);
|
||||
return dir.substr(collection.get('folder')!.length + 1) || '/';
|
||||
};
|
||||
|
||||
export class Backend {
|
||||
implementation: Implementation;
|
||||
backendName: string;
|
||||
@ -261,12 +280,14 @@ export class Backend {
|
||||
async entryExist(collection: Collection, path: string, slug: string, useWorkflow: boolean) {
|
||||
const unpublishedEntry =
|
||||
useWorkflow &&
|
||||
(await this.implementation.unpublishedEntry(collection.get('name'), slug).catch(error => {
|
||||
if (error instanceof EditorialWorkflowError && error.notUnderEditorialWorkflow) {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
return Promise.reject(error);
|
||||
}));
|
||||
(await this.implementation
|
||||
.unpublishedEntry({ collection: collection.get('name'), slug })
|
||||
.catch(error => {
|
||||
if (error instanceof EditorialWorkflowError && error.notUnderEditorialWorkflow) {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
return Promise.reject(error);
|
||||
}));
|
||||
|
||||
if (unpublishedEntry) return unpublishedEntry;
|
||||
|
||||
@ -285,9 +306,15 @@ export class Backend {
|
||||
entryData: Map<string, unknown>,
|
||||
config: Config,
|
||||
usedSlugs: List<string>,
|
||||
customPath: string | undefined,
|
||||
) {
|
||||
const slugConfig = config.get('slug');
|
||||
const slug: string = slugFormatter(collection, entryData, slugConfig);
|
||||
let slug: string;
|
||||
if (customPath) {
|
||||
slug = slugFromCustomPath(collection, customPath);
|
||||
} else {
|
||||
slug = slugFormatter(collection, entryData, slugConfig);
|
||||
}
|
||||
let i = 1;
|
||||
let uniqueSlug = slug;
|
||||
|
||||
@ -334,12 +361,17 @@ export class Backend {
|
||||
let listMethod: () => Promise<ImplementationEntry[]>;
|
||||
const collectionType = collection.get('type');
|
||||
if (collectionType === FOLDER) {
|
||||
listMethod = () =>
|
||||
this.implementation.entriesByFolder(
|
||||
listMethod = () => {
|
||||
const depth =
|
||||
collection.get('nested')?.get('depth') ||
|
||||
getPathDepth(collection.get('path', '') as string);
|
||||
|
||||
return this.implementation.entriesByFolder(
|
||||
collection.get('folder') as string,
|
||||
extension,
|
||||
getPathDepth(collection.get('path', '') as string),
|
||||
depth,
|
||||
);
|
||||
};
|
||||
} else if (collectionType === FILES) {
|
||||
const files = collection
|
||||
.get('files')!
|
||||
@ -379,12 +411,12 @@ export class Backend {
|
||||
async listAllEntries(collection: Collection) {
|
||||
if (collection.get('folder') && this.implementation.allEntriesByFolder) {
|
||||
const extension = selectFolderEntryExtension(collection);
|
||||
const depth =
|
||||
collection.get('nested')?.get('depth') ||
|
||||
getPathDepth(collection.get('path', '') as string);
|
||||
|
||||
return this.implementation
|
||||
.allEntriesByFolder(
|
||||
collection.get('folder') as string,
|
||||
extension,
|
||||
getPathDepth(collection.get('path', '') as string),
|
||||
)
|
||||
.allEntriesByFolder(collection.get('folder') as string, extension, depth)
|
||||
.then(entries => this.processEntries(entries, collection));
|
||||
}
|
||||
|
||||
@ -491,7 +523,12 @@ export class Backend {
|
||||
|
||||
const label = selectFileEntryLabel(collection, slug);
|
||||
const entry: EntryValue = this.entryWithFormat(collection)(
|
||||
createEntry(collection.get('name'), slug, path, { raw, label, mediaFiles }),
|
||||
createEntry(collection.get('name'), slug, path, {
|
||||
raw,
|
||||
label,
|
||||
mediaFiles,
|
||||
meta: { path: prepareMetaPath(path, collection) },
|
||||
}),
|
||||
);
|
||||
|
||||
return { entry };
|
||||
@ -548,6 +585,7 @@ export class Backend {
|
||||
raw: loadedEntry.data,
|
||||
label,
|
||||
mediaFiles: [],
|
||||
meta: { path: prepareMetaPath(loadedEntry.file.path, collection) },
|
||||
});
|
||||
|
||||
entry = this.entryWithFormat(collection)(entry);
|
||||
@ -586,35 +624,93 @@ export class Backend {
|
||||
};
|
||||
}
|
||||
|
||||
unpublishedEntries(collections: Collections) {
|
||||
return this.implementation.unpublishedEntries!()
|
||||
.then(entries =>
|
||||
entries.map(loadedEntry => {
|
||||
const collectionName = loadedEntry.metaData!.collection;
|
||||
async processUnpublishedEntry(
|
||||
collection: Collection,
|
||||
entryData: UnpublishedEntry,
|
||||
withMediaFiles: boolean,
|
||||
) {
|
||||
const { slug } = entryData;
|
||||
let extension: string;
|
||||
if (collection.get('type') === FILES) {
|
||||
const file = collection.get('files')!.find(f => f?.get('name') === slug);
|
||||
extension = extname(file.get('file'));
|
||||
} else {
|
||||
extension = selectFolderEntryExtension(collection);
|
||||
}
|
||||
const dataFiles = sortBy(
|
||||
entryData.diffs.filter(d => d.path.endsWith(extension)),
|
||||
f => f.path.length,
|
||||
);
|
||||
// if the unpublished entry has no diffs, return the original
|
||||
let data = '';
|
||||
let newFile = false;
|
||||
let path = slug;
|
||||
if (dataFiles.length <= 0) {
|
||||
const loadedEntry = await this.implementation.getEntry(
|
||||
selectEntryPath(collection, slug) as string,
|
||||
);
|
||||
data = loadedEntry.data;
|
||||
path = loadedEntry.file.path;
|
||||
} else {
|
||||
const entryFile = dataFiles[0];
|
||||
data = await this.implementation.unpublishedEntryDataFile(
|
||||
collection.get('name'),
|
||||
entryData.slug,
|
||||
entryFile.path,
|
||||
entryFile.id,
|
||||
);
|
||||
newFile = entryFile.newFile;
|
||||
path = entryFile.path;
|
||||
}
|
||||
|
||||
const mediaFiles: MediaFile[] = [];
|
||||
if (withMediaFiles) {
|
||||
const nonDataFiles = entryData.diffs.filter(d => !d.path.endsWith(extension));
|
||||
const files = await Promise.all(
|
||||
nonDataFiles.map(f =>
|
||||
this.implementation!.unpublishedEntryMediaFile(
|
||||
collection.get('name'),
|
||||
slug,
|
||||
f.path,
|
||||
f.id,
|
||||
),
|
||||
),
|
||||
);
|
||||
mediaFiles.push(...files.map(f => ({ ...f, draft: true })));
|
||||
}
|
||||
const entry = createEntry(collection.get('name'), slug, path, {
|
||||
raw: data,
|
||||
isModification: !newFile,
|
||||
label: collection && selectFileEntryLabel(collection, slug),
|
||||
mediaFiles,
|
||||
updatedOn: entryData.updatedAt,
|
||||
status: entryData.status,
|
||||
meta: { path: prepareMetaPath(path, collection) },
|
||||
});
|
||||
|
||||
const entryWithFormat = this.entryWithFormat(collection)(entry);
|
||||
return entryWithFormat;
|
||||
}
|
||||
|
||||
async unpublishedEntries(collections: Collections) {
|
||||
const ids = await this.implementation.unpublishedEntries!();
|
||||
const entries = (
|
||||
await Promise.all(
|
||||
ids.map(async id => {
|
||||
const entryData = await this.implementation.unpublishedEntry({ id });
|
||||
const collectionName = entryData.collection;
|
||||
const collection = collections.find(c => c.get('name') === collectionName);
|
||||
const entry = createEntry(collectionName, loadedEntry.slug, loadedEntry.file.path, {
|
||||
raw: loadedEntry.data,
|
||||
isModification: loadedEntry.isModification,
|
||||
label: collection && selectFileEntryLabel(collection, loadedEntry.slug!),
|
||||
});
|
||||
entry.metaData = loadedEntry.metaData;
|
||||
if (!collection) {
|
||||
console.warn(`Missing collection '${collectionName}' for unpublished entry '${id}'`);
|
||||
return null;
|
||||
}
|
||||
const entry = await this.processUnpublishedEntry(collection, entryData, false);
|
||||
return entry;
|
||||
}),
|
||||
)
|
||||
.then(entries => ({
|
||||
pagination: 0,
|
||||
entries: entries.reduce((acc, entry) => {
|
||||
const collection = collections.get(entry.collection);
|
||||
if (collection) {
|
||||
acc.push(this.entryWithFormat(collection)(entry) as EntryValue);
|
||||
} else {
|
||||
console.warn(
|
||||
`Missing collection '${entry.collection}' for entry with path '${entry.path}'`,
|
||||
);
|
||||
}
|
||||
return acc;
|
||||
}, [] as EntryValue[]),
|
||||
}));
|
||||
).filter(Boolean) as EntryValue[];
|
||||
|
||||
return { pagination: 0, entries };
|
||||
}
|
||||
|
||||
async processEntry(state: State, collection: Collection, entry: EntryValue) {
|
||||
@ -633,19 +729,12 @@ export class Backend {
|
||||
}
|
||||
|
||||
async unpublishedEntry(state: State, collection: Collection, slug: string) {
|
||||
const loadedEntry = await this.implementation!.unpublishedEntry!(
|
||||
collection.get('name') as string,
|
||||
const entryData = await this.implementation!.unpublishedEntry!({
|
||||
collection: collection.get('name') as string,
|
||||
slug,
|
||||
);
|
||||
|
||||
let entry = createEntry(collection.get('name'), loadedEntry.slug, loadedEntry.file.path, {
|
||||
raw: loadedEntry.data,
|
||||
isModification: loadedEntry.isModification,
|
||||
metaData: loadedEntry.metaData,
|
||||
mediaFiles: loadedEntry.mediaFiles?.map(file => ({ ...file, draft: true })) || [],
|
||||
});
|
||||
|
||||
entry = this.entryWithFormat(collection)(entry);
|
||||
let entry = await this.processUnpublishedEntry(collection, entryData, true);
|
||||
entry = await this.processEntry(state, collection, entry);
|
||||
return entry;
|
||||
}
|
||||
@ -738,12 +827,17 @@ export class Backend {
|
||||
|
||||
const newEntry = entryDraft.getIn(['entry', 'newRecord']) || false;
|
||||
|
||||
const useWorkflow = selectUseWorkflow(config);
|
||||
|
||||
let entryObj: {
|
||||
path: string;
|
||||
slug: string;
|
||||
raw: string;
|
||||
newPath?: string;
|
||||
};
|
||||
|
||||
const customPath = selectCustomPath(collection, entryDraft);
|
||||
|
||||
if (newEntry) {
|
||||
if (!selectAllowNewEntries(collection)) {
|
||||
throw new Error('Not allowed to create new entries in this collection');
|
||||
@ -753,9 +847,9 @@ export class Backend {
|
||||
entryDraft.getIn(['entry', 'data']),
|
||||
config,
|
||||
usedSlugs,
|
||||
customPath,
|
||||
);
|
||||
const path = selectEntryPath(collection, slug) as string;
|
||||
|
||||
const path = customPath || (selectEntryPath(collection, slug) as string);
|
||||
entryObj = {
|
||||
path,
|
||||
slug,
|
||||
@ -775,12 +869,13 @@ export class Backend {
|
||||
asset.path = newPath;
|
||||
});
|
||||
} else {
|
||||
const path = entryDraft.getIn(['entry', 'path']);
|
||||
const slug = entryDraft.getIn(['entry', 'slug']);
|
||||
entryObj = {
|
||||
path,
|
||||
slug,
|
||||
path: entryDraft.getIn(['entry', 'path']),
|
||||
// for workflow entries we refresh the slug on publish
|
||||
slug: customPath && !useWorkflow ? slugFromCustomPath(collection, customPath) : slug,
|
||||
raw: this.entryToRaw(collection, entryDraft.get('entry')),
|
||||
newPath: customPath,
|
||||
};
|
||||
}
|
||||
|
||||
@ -798,8 +893,6 @@ export class Backend {
|
||||
user.useOpenAuthoring,
|
||||
);
|
||||
|
||||
const useWorkflow = selectUseWorkflow(config);
|
||||
|
||||
const collectionName = collection.get('name');
|
||||
|
||||
const updatedOptions = { unpublished, status };
|
||||
|
@ -234,6 +234,11 @@ class App extends React.Component {
|
||||
collections={collections}
|
||||
render={props => <Collection {...props} isSearchResults isSingleSearchResult />}
|
||||
/>
|
||||
<RouteInCollection
|
||||
collections={collections}
|
||||
path="/collections/:name/filter/:filterTerm*"
|
||||
render={props => <Collection {...props} />}
|
||||
/>
|
||||
<Route
|
||||
path="/search/:searchTerm"
|
||||
render={props => <Collection {...props} isSearchResults />}
|
||||
|
@ -33,7 +33,7 @@ const SearchResultHeading = styled.h1`
|
||||
${components.cardTopHeading};
|
||||
`;
|
||||
|
||||
class Collection extends React.Component {
|
||||
export class Collection extends React.Component {
|
||||
static propTypes = {
|
||||
searchTerm: PropTypes.string,
|
||||
collectionName: PropTypes.string,
|
||||
@ -51,8 +51,14 @@ class Collection extends React.Component {
|
||||
};
|
||||
|
||||
renderEntriesCollection = () => {
|
||||
const { collection } = this.props;
|
||||
return <EntriesCollection collection={collection} viewStyle={this.state.viewStyle} />;
|
||||
const { collection, filterTerm } = this.props;
|
||||
return (
|
||||
<EntriesCollection
|
||||
collection={collection}
|
||||
viewStyle={this.state.viewStyle}
|
||||
filterTerm={filterTerm}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
renderEntriesSearch = () => {
|
||||
@ -83,11 +89,19 @@ class Collection extends React.Component {
|
||||
onSortClick,
|
||||
sort,
|
||||
viewFilters,
|
||||
filterTerm,
|
||||
t,
|
||||
onFilterClick,
|
||||
filter,
|
||||
} = this.props;
|
||||
const newEntryUrl = collection.get('create') ? getNewEntryUrl(collectionName) : '';
|
||||
|
||||
let newEntryUrl = collection.get('create') ? getNewEntryUrl(collectionName) : '';
|
||||
if (newEntryUrl && filterTerm) {
|
||||
newEntryUrl = getNewEntryUrl(collectionName);
|
||||
if (filterTerm) {
|
||||
newEntryUrl = `${newEntryUrl}?path=${filterTerm}`;
|
||||
}
|
||||
}
|
||||
|
||||
const searchResultKey =
|
||||
'collection.collectionTop.searchResults' + (isSingleSearchResult ? 'InCollection' : '');
|
||||
@ -98,6 +112,7 @@ class Collection extends React.Component {
|
||||
collections={collections}
|
||||
collection={(!isSearchResults || isSingleSearchResult) && collection}
|
||||
searchTerm={searchTerm}
|
||||
filterTerm={filterTerm}
|
||||
/>
|
||||
<CollectionMain>
|
||||
{isSearchResults ? (
|
||||
@ -132,7 +147,7 @@ class Collection extends React.Component {
|
||||
function mapStateToProps(state, ownProps) {
|
||||
const { collections } = state;
|
||||
const { isSearchResults, match, t } = ownProps;
|
||||
const { name, searchTerm } = match.params;
|
||||
const { name, searchTerm = '', filterTerm = '' } = match.params;
|
||||
const collection = name ? collections.get(name) : collections.first();
|
||||
const sort = selectEntriesSort(state.entries, collection.get('name'));
|
||||
const sortableFields = selectSortableFields(collection, t);
|
||||
@ -145,6 +160,7 @@ function mapStateToProps(state, ownProps) {
|
||||
collectionName: name,
|
||||
isSearchResults,
|
||||
searchTerm,
|
||||
filterTerm,
|
||||
sort,
|
||||
sortableFields,
|
||||
viewFilters,
|
||||
|
@ -12,7 +12,7 @@ import { selectEntries, selectEntriesLoaded, selectIsFetching } from '../../../r
|
||||
import { selectCollectionEntriesCursor } from 'Reducers/cursors';
|
||||
import Entries from './Entries';
|
||||
|
||||
class EntriesCollection extends React.Component {
|
||||
export class EntriesCollection extends React.Component {
|
||||
static propTypes = {
|
||||
collection: ImmutablePropTypes.map.isRequired,
|
||||
page: PropTypes.number,
|
||||
@ -62,11 +62,36 @@ class EntriesCollection extends React.Component {
|
||||
}
|
||||
}
|
||||
|
||||
export const filterNestedEntries = (path, collectionFolder, entries) => {
|
||||
const filtered = entries.filter(e => {
|
||||
const entryPath = e.get('path').substring(collectionFolder.length + 1);
|
||||
if (!entryPath.startsWith(path)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// only show immediate children
|
||||
if (path) {
|
||||
// non root path
|
||||
const trimmed = entryPath.substring(path.length + 1);
|
||||
return trimmed.split('/').length === 2;
|
||||
} else {
|
||||
// root path
|
||||
return entryPath.split('/').length <= 2;
|
||||
}
|
||||
});
|
||||
return filtered;
|
||||
};
|
||||
|
||||
function mapStateToProps(state, ownProps) {
|
||||
const { collection, viewStyle } = ownProps;
|
||||
const { collection, viewStyle, filterTerm } = ownProps;
|
||||
const page = state.entries.getIn(['pages', collection.get('name'), 'page']);
|
||||
|
||||
const entries = selectEntries(state.entries, collection);
|
||||
let entries = selectEntries(state.entries, collection);
|
||||
|
||||
if (collection.has('nested')) {
|
||||
const collectionFolder = collection.get('folder');
|
||||
entries = filterNestedEntries(filterTerm || '', collectionFolder, entries);
|
||||
}
|
||||
const entriesLoaded = selectEntriesLoaded(state.entries, collection.get('name'));
|
||||
const isFetching = selectIsFetching(state.entries, collection.get('name'));
|
||||
|
||||
|
@ -0,0 +1,153 @@
|
||||
import React from 'react';
|
||||
import ConnectedEntriesCollection, {
|
||||
EntriesCollection,
|
||||
filterNestedEntries,
|
||||
} from '../EntriesCollection';
|
||||
import { render } from '@testing-library/react';
|
||||
import { fromJS } from 'immutable';
|
||||
import configureStore from 'redux-mock-store';
|
||||
import { Provider } from 'react-redux';
|
||||
|
||||
jest.mock('../Entries', () => 'mock-entries');
|
||||
|
||||
const middlewares = [];
|
||||
const mockStore = configureStore(middlewares);
|
||||
|
||||
const renderWithRedux = (component, { store } = {}) => {
|
||||
function Wrapper({ children }) {
|
||||
return <Provider store={store}>{children}</Provider>;
|
||||
}
|
||||
return render(component, { wrapper: Wrapper });
|
||||
};
|
||||
|
||||
const toEntriesState = (collection, entriesArray) => {
|
||||
const entries = entriesArray.reduce(
|
||||
(acc, entry) => {
|
||||
acc.entities[`${collection.get('name')}.${entry.slug}`] = entry;
|
||||
acc.pages[collection.get('name')].ids.push(entry.slug);
|
||||
return acc;
|
||||
},
|
||||
{ pages: { [collection.get('name')]: { ids: [] } }, entities: {} },
|
||||
);
|
||||
return fromJS(entries);
|
||||
};
|
||||
|
||||
describe('filterNestedEntries', () => {
|
||||
it('should return only immediate children for non root path', () => {
|
||||
const entriesArray = [
|
||||
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
|
||||
{ slug: 'dir1/dir2/index', path: 'src/pages/dir1/dir2/index.md', data: { title: 'File 2' } },
|
||||
{ slug: 'dir3/index', path: 'src/pages/dir3/index.md', data: { title: 'File 3' } },
|
||||
{ slug: 'dir3/dir4/index', path: 'src/pages/dir3/dir4/index.md', data: { title: 'File 4' } },
|
||||
];
|
||||
const entries = fromJS(entriesArray);
|
||||
expect(filterNestedEntries('dir3', 'src/pages', entries).toJS()).toEqual([
|
||||
{ slug: 'dir3/dir4/index', path: 'src/pages/dir3/dir4/index.md', data: { title: 'File 4' } },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return immediate children and root for root path', () => {
|
||||
const entriesArray = [
|
||||
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
|
||||
{ slug: 'dir1/dir2/index', path: 'src/pages/dir1/dir2/index.md', data: { title: 'File 2' } },
|
||||
{ slug: 'dir3/index', path: 'src/pages/dir3/index.md', data: { title: 'File 3' } },
|
||||
{ slug: 'dir3/dir4/index', path: 'src/pages/dir3/dir4/index.md', data: { title: 'File 4' } },
|
||||
];
|
||||
const entries = fromJS(entriesArray);
|
||||
expect(filterNestedEntries('', 'src/pages', entries).toJS()).toEqual([
|
||||
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
|
||||
{ slug: 'dir3/index', path: 'src/pages/dir3/index.md', data: { title: 'File 3' } },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('EntriesCollection', () => {
|
||||
const collection = fromJS({ name: 'pages', label: 'Pages', folder: 'src/pages' });
|
||||
const props = {
|
||||
t: jest.fn(),
|
||||
loadEntries: jest.fn(),
|
||||
traverseCollectionCursor: jest.fn(),
|
||||
isFetching: false,
|
||||
cursor: {},
|
||||
collection,
|
||||
};
|
||||
it('should render with entries', () => {
|
||||
const entries = fromJS([{ slug: 'index' }]);
|
||||
const { asFragment } = render(<EntriesCollection {...props} entries={entries} />);
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render connected component', () => {
|
||||
const entriesArray = [
|
||||
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
|
||||
{ slug: 'dir2/index', path: 'src/pages/dir2/index.md', data: { title: 'File 2' } },
|
||||
];
|
||||
|
||||
const store = mockStore({
|
||||
entries: toEntriesState(collection, entriesArray),
|
||||
cursors: fromJS({}),
|
||||
});
|
||||
|
||||
const { asFragment } = renderWithRedux(<ConnectedEntriesCollection collection={collection} />, {
|
||||
store,
|
||||
});
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render show only immediate children for nested collection', () => {
|
||||
const entriesArray = [
|
||||
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
|
||||
{ slug: 'dir1/dir2/index', path: 'src/pages/dir1/dir2/index.md', data: { title: 'File 2' } },
|
||||
{ slug: 'dir3/index', path: 'src/pages/dir3/index.md', data: { title: 'File 3' } },
|
||||
{ slug: 'dir3/dir4/index', path: 'src/pages/dir3/dir4/index.md', data: { title: 'File 4' } },
|
||||
];
|
||||
|
||||
const store = mockStore({
|
||||
entries: toEntriesState(collection, entriesArray),
|
||||
cursors: fromJS({}),
|
||||
});
|
||||
|
||||
const { asFragment } = renderWithRedux(
|
||||
<ConnectedEntriesCollection collection={collection.set('nested', fromJS({ depth: 10 }))} />,
|
||||
{
|
||||
store,
|
||||
},
|
||||
);
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render apply filter term for nested collections', () => {
|
||||
const entriesArray = [
|
||||
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
|
||||
{ slug: 'dir1/dir2/index', path: 'src/pages/dir1/dir2/index.md', data: { title: 'File 2' } },
|
||||
{ slug: 'dir3/index', path: 'src/pages/dir3/index.md', data: { title: 'File 3' } },
|
||||
{ slug: 'dir3/dir4/index', path: 'src/pages/dir3/dir4/index.md', data: { title: 'File 4' } },
|
||||
];
|
||||
|
||||
const store = mockStore({
|
||||
entries: toEntriesState(collection, entriesArray),
|
||||
cursors: fromJS({}),
|
||||
});
|
||||
|
||||
const { asFragment } = renderWithRedux(
|
||||
<ConnectedEntriesCollection
|
||||
collection={collection.set('nested', fromJS({ depth: 10 }))}
|
||||
filterTerm="dir3/dir4"
|
||||
/>,
|
||||
{
|
||||
store,
|
||||
},
|
||||
);
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
});
|
@ -0,0 +1,49 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`EntriesCollection should render apply filter term for nested collections 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-entries
|
||||
collectionname="Pages"
|
||||
collections="Map { \\"name\\": \\"pages\\", \\"label\\": \\"Pages\\", \\"folder\\": \\"src/pages\\", \\"nested\\": Map { \\"depth\\": 10 } }"
|
||||
cursor="[object Object]"
|
||||
entries="List []"
|
||||
isfetching="false"
|
||||
/>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`EntriesCollection should render connected component 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-entries
|
||||
collectionname="Pages"
|
||||
collections="Map { \\"name\\": \\"pages\\", \\"label\\": \\"Pages\\", \\"folder\\": \\"src/pages\\" }"
|
||||
cursor="[object Object]"
|
||||
entries="List [ Map { \\"slug\\": \\"index\\", \\"path\\": \\"src/pages/index.md\\", \\"data\\": Map { \\"title\\": \\"Root\\" } }, Map { \\"slug\\": \\"dir1/index\\", \\"path\\": \\"src/pages/dir1/index.md\\", \\"data\\": Map { \\"title\\": \\"File 1\\" } }, Map { \\"slug\\": \\"dir2/index\\", \\"path\\": \\"src/pages/dir2/index.md\\", \\"data\\": Map { \\"title\\": \\"File 2\\" } } ]"
|
||||
isfetching="false"
|
||||
/>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`EntriesCollection should render show only immediate children for nested collection 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-entries
|
||||
collectionname="Pages"
|
||||
collections="Map { \\"name\\": \\"pages\\", \\"label\\": \\"Pages\\", \\"folder\\": \\"src/pages\\", \\"nested\\": Map { \\"depth\\": 10 } }"
|
||||
cursor="[object Object]"
|
||||
entries="List [ Map { \\"slug\\": \\"index\\", \\"path\\": \\"src/pages/index.md\\", \\"data\\": Map { \\"title\\": \\"Root\\" } }, Map { \\"slug\\": \\"dir1/index\\", \\"path\\": \\"src/pages/dir1/index.md\\", \\"data\\": Map { \\"title\\": \\"File 1\\" } }, Map { \\"slug\\": \\"dir3/index\\", \\"path\\": \\"src/pages/dir3/index.md\\", \\"data\\": Map { \\"title\\": \\"File 3\\" } } ]"
|
||||
isfetching="false"
|
||||
/>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`EntriesCollection should render with entries 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-entries
|
||||
collectionname="Pages"
|
||||
collections="Map { \\"name\\": \\"pages\\", \\"label\\": \\"Pages\\", \\"folder\\": \\"src/pages\\" }"
|
||||
cursor="[object Object]"
|
||||
entries="List [ Map { \\"slug\\": \\"index\\" } ]"
|
||||
isfetching="false"
|
||||
/>
|
||||
</DocumentFragment>
|
||||
`;
|
@ -0,0 +1,308 @@
|
||||
import React from 'react';
|
||||
import { List } from 'immutable';
|
||||
import { css } from '@emotion/core';
|
||||
import styled from '@emotion/styled';
|
||||
import { connect } from 'react-redux';
|
||||
import { NavLink } from 'react-router-dom';
|
||||
import { dirname, sep } from 'path';
|
||||
import { stringTemplate } from 'netlify-cms-lib-widgets';
|
||||
import { selectEntryCollectionTitle } from '../../reducers/collections';
|
||||
import { selectEntries } from '../../reducers/entries';
|
||||
import { Icon, colors, components } from 'netlify-cms-ui-default';
|
||||
import PropTypes from 'prop-types';
|
||||
import ImmutablePropTypes from 'react-immutable-proptypes';
|
||||
import { sortBy } from 'lodash';
|
||||
|
||||
const { addFileTemplateFields } = stringTemplate;
|
||||
|
||||
const NodeTitleContainer = styled.div`
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
`;
|
||||
|
||||
const NodeTitle = styled.div`
|
||||
margin-right: 4px;
|
||||
`;
|
||||
|
||||
const Caret = styled.div`
|
||||
position: relative;
|
||||
top: 2px;
|
||||
`;
|
||||
|
||||
const CaretDown = styled(Caret)`
|
||||
${components.caretDown};
|
||||
color: currentColor;
|
||||
`;
|
||||
|
||||
const CaretRight = styled(Caret)`
|
||||
${components.caretRight};
|
||||
color: currentColor;
|
||||
left: 2px;
|
||||
`;
|
||||
|
||||
const TreeNavLink = styled(NavLink)`
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
align-items: center;
|
||||
padding: 8px;
|
||||
padding-left: ${props => props.depth * 20 + 12}px;
|
||||
border-left: 2px solid #fff;
|
||||
|
||||
${Icon} {
|
||||
margin-right: 8px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
${props => css`
|
||||
&:hover,
|
||||
&:active,
|
||||
&.${props.activeClassName} {
|
||||
color: ${colors.active};
|
||||
background-color: ${colors.activeBackground};
|
||||
border-left-color: #4863c6;
|
||||
}
|
||||
`};
|
||||
`;
|
||||
|
||||
const getNodeTitle = node => {
|
||||
const title = node.isRoot
|
||||
? node.title
|
||||
: node.children.find(c => !c.isDir && c.title)?.title || node.title;
|
||||
return title;
|
||||
};
|
||||
|
||||
const TreeNode = props => {
|
||||
const { collection, treeData, depth = 0, onToggle } = props;
|
||||
const collectionName = collection.get('name');
|
||||
|
||||
const sortedData = sortBy(treeData, getNodeTitle);
|
||||
return sortedData.map(node => {
|
||||
const leaf = node.children.length <= 1 && !node.children[0]?.isDir && depth > 0;
|
||||
if (leaf) {
|
||||
return null;
|
||||
}
|
||||
let to = `/collections/${collectionName}`;
|
||||
if (depth > 0) {
|
||||
to = `${to}/filter${node.path}`;
|
||||
}
|
||||
const title = getNodeTitle(node);
|
||||
|
||||
const hasChildren = depth === 0 || node.children.some(c => c.children.some(c => c.isDir));
|
||||
|
||||
return (
|
||||
<React.Fragment key={node.path}>
|
||||
<TreeNavLink
|
||||
exact
|
||||
to={to}
|
||||
activeClassName="sidebar-active"
|
||||
onClick={() => onToggle({ node, expanded: !node.expanded })}
|
||||
depth={depth}
|
||||
data-testid={node.path}
|
||||
>
|
||||
<Icon type="write" />
|
||||
<NodeTitleContainer>
|
||||
<NodeTitle>{title}</NodeTitle>
|
||||
{hasChildren && (node.expanded ? <CaretDown /> : <CaretRight />)}
|
||||
</NodeTitleContainer>
|
||||
</TreeNavLink>
|
||||
{node.expanded && (
|
||||
<TreeNode
|
||||
collection={collection}
|
||||
depth={depth + 1}
|
||||
treeData={node.children}
|
||||
onToggle={onToggle}
|
||||
/>
|
||||
)}
|
||||
</React.Fragment>
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
TreeNode.propTypes = {
|
||||
collection: ImmutablePropTypes.map.isRequired,
|
||||
depth: PropTypes.number,
|
||||
treeData: PropTypes.array.isRequired,
|
||||
onToggle: PropTypes.func.isRequired,
|
||||
};
|
||||
|
||||
export const walk = (treeData, callback) => {
|
||||
const traverse = children => {
|
||||
for (const child of children) {
|
||||
callback(child);
|
||||
traverse(child.children);
|
||||
}
|
||||
};
|
||||
|
||||
return traverse(treeData);
|
||||
};
|
||||
|
||||
export const getTreeData = (collection, entries) => {
|
||||
const collectionFolder = collection.get('folder');
|
||||
const rootFolder = '/';
|
||||
const entriesObj = entries
|
||||
.toJS()
|
||||
.map(e => ({ ...e, path: e.path.substring(collectionFolder.length) }));
|
||||
|
||||
const dirs = entriesObj.reduce((acc, entry) => {
|
||||
let dir = dirname(entry.path);
|
||||
while (!acc[dir] && dir && dir !== rootFolder) {
|
||||
const parts = dir.split(sep);
|
||||
acc[dir] = parts.pop();
|
||||
dir = parts.length && parts.join(sep);
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
if (collection.getIn(['nested', 'summary'])) {
|
||||
collection = collection.set('summary', collection.getIn(['nested', 'summary']));
|
||||
} else {
|
||||
collection = collection.delete('summary');
|
||||
}
|
||||
|
||||
const flatData = [
|
||||
{
|
||||
title: collection.get('label'),
|
||||
path: rootFolder,
|
||||
isDir: true,
|
||||
isRoot: true,
|
||||
},
|
||||
...Object.entries(dirs).map(([key, value]) => ({
|
||||
title: value,
|
||||
path: key,
|
||||
isDir: true,
|
||||
isRoot: false,
|
||||
})),
|
||||
...entriesObj.map((e, index) => {
|
||||
let entryMap = entries.get(index);
|
||||
entryMap = entryMap.set(
|
||||
'data',
|
||||
addFileTemplateFields(entryMap.get('path'), entryMap.get('data')),
|
||||
);
|
||||
const title = selectEntryCollectionTitle(collection, entryMap);
|
||||
return {
|
||||
...e,
|
||||
title,
|
||||
isDir: false,
|
||||
isRoot: false,
|
||||
};
|
||||
}),
|
||||
];
|
||||
|
||||
const parentsToChildren = flatData.reduce((acc, node) => {
|
||||
const parent = node.path === rootFolder ? '' : dirname(node.path);
|
||||
if (acc[parent]) {
|
||||
acc[parent].push(node);
|
||||
} else {
|
||||
acc[parent] = [node];
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const reducer = (acc, value) => {
|
||||
const node = value;
|
||||
let children = [];
|
||||
if (parentsToChildren[node.path]) {
|
||||
children = parentsToChildren[node.path].reduce(reducer, []);
|
||||
}
|
||||
|
||||
acc.push({ ...node, children });
|
||||
return acc;
|
||||
};
|
||||
|
||||
const treeData = parentsToChildren[''].reduce(reducer, []);
|
||||
|
||||
return treeData;
|
||||
};
|
||||
|
||||
export const updateNode = (treeData, node, callback) => {
|
||||
let stop = false;
|
||||
|
||||
const updater = nodes => {
|
||||
if (stop) {
|
||||
return nodes;
|
||||
}
|
||||
for (let i = 0; i < nodes.length; i++) {
|
||||
if (nodes[i].path === node.path) {
|
||||
nodes[i] = callback(node);
|
||||
stop = true;
|
||||
return nodes;
|
||||
}
|
||||
}
|
||||
nodes.forEach(node => updater(node.children));
|
||||
return nodes;
|
||||
};
|
||||
|
||||
return updater([...treeData]);
|
||||
};
|
||||
|
||||
export class NestedCollection extends React.Component {
|
||||
static propTypes = {
|
||||
collection: ImmutablePropTypes.map.isRequired,
|
||||
entries: ImmutablePropTypes.list.isRequired,
|
||||
filterTerm: PropTypes.string,
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.state = {
|
||||
treeData: getTreeData(this.props.collection, this.props.entries),
|
||||
selected: null,
|
||||
useFilter: true,
|
||||
};
|
||||
}
|
||||
|
||||
componentDidUpdate(prevProps) {
|
||||
const { collection, entries, filterTerm } = this.props;
|
||||
if (
|
||||
collection !== prevProps.collection ||
|
||||
entries !== prevProps.entries ||
|
||||
filterTerm !== prevProps.filterTerm
|
||||
) {
|
||||
const expanded = {};
|
||||
walk(this.state.treeData, node => {
|
||||
if (node.expanded) {
|
||||
expanded[node.path] = true;
|
||||
}
|
||||
});
|
||||
const treeData = getTreeData(collection, entries);
|
||||
|
||||
const path = `/${filterTerm}`;
|
||||
walk(treeData, node => {
|
||||
if (expanded[node.path] || (this.state.useFilter && path.startsWith(node.path))) {
|
||||
node.expanded = true;
|
||||
}
|
||||
});
|
||||
this.setState({ treeData });
|
||||
}
|
||||
}
|
||||
|
||||
onToggle = ({ node, expanded }) => {
|
||||
if (!this.state.selected || this.state.selected.path === node.path || expanded) {
|
||||
const treeData = updateNode(this.state.treeData, node, node => ({
|
||||
...node,
|
||||
expanded,
|
||||
}));
|
||||
this.setState({ treeData, selected: node, useFilter: false });
|
||||
} else {
|
||||
// don't collapse non selected nodes when clicked
|
||||
this.setState({ selected: node, useFilter: false });
|
||||
}
|
||||
};
|
||||
|
||||
render() {
|
||||
const { treeData } = this.state;
|
||||
const { collection } = this.props;
|
||||
|
||||
return <TreeNode collection={collection} treeData={treeData} onToggle={this.onToggle} />;
|
||||
}
|
||||
}
|
||||
|
||||
function mapStateToProps(state, ownProps) {
|
||||
const { collection } = ownProps;
|
||||
const entries = selectEntries(state.entries, collection) || List();
|
||||
return { entries };
|
||||
}
|
||||
|
||||
export default connect(mapStateToProps, null)(NestedCollection);
|
@ -8,6 +8,7 @@ import { NavLink } from 'react-router-dom';
|
||||
import { Icon, components, colors } from 'netlify-cms-ui-default';
|
||||
import { searchCollections } from 'Actions/collections';
|
||||
import CollectionSearch from './CollectionSearch';
|
||||
import NestedCollection from './NestedCollection';
|
||||
|
||||
const styles = {
|
||||
sidebarNavLinkActive: css`
|
||||
@ -64,23 +65,35 @@ const SidebarNavLink = styled(NavLink)`
|
||||
`};
|
||||
`;
|
||||
|
||||
class Sidebar extends React.Component {
|
||||
export class Sidebar extends React.Component {
|
||||
static propTypes = {
|
||||
collections: ImmutablePropTypes.orderedMap.isRequired,
|
||||
collection: ImmutablePropTypes.map,
|
||||
searchTerm: PropTypes.string,
|
||||
filterTerm: PropTypes.string,
|
||||
t: PropTypes.func.isRequired,
|
||||
};
|
||||
|
||||
static defaultProps = {
|
||||
searchTerm: '',
|
||||
};
|
||||
|
||||
renderLink = collection => {
|
||||
renderLink = (collection, filterTerm) => {
|
||||
const collectionName = collection.get('name');
|
||||
if (collection.has('nested')) {
|
||||
return (
|
||||
<li key={collectionName}>
|
||||
<NestedCollection
|
||||
collection={collection}
|
||||
filterTerm={filterTerm}
|
||||
data-testid={collectionName}
|
||||
/>
|
||||
</li>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<li key={collectionName}>
|
||||
<SidebarNavLink to={`/collections/${collectionName}`} activeClassName="sidebar-active">
|
||||
<SidebarNavLink
|
||||
to={`/collections/${collectionName}`}
|
||||
activeClassName="sidebar-active"
|
||||
data-testid={collectionName}
|
||||
>
|
||||
<Icon type="write" />
|
||||
{collection.get('label')}
|
||||
</SidebarNavLink>
|
||||
@ -89,7 +102,8 @@ class Sidebar extends React.Component {
|
||||
};
|
||||
|
||||
render() {
|
||||
const { collections, collection, searchTerm, t } = this.props;
|
||||
const { collections, collection, searchTerm, t, filterTerm } = this.props;
|
||||
|
||||
return (
|
||||
<SidebarContainer>
|
||||
<SidebarHeading>{t('collection.sidebar.collections')}</SidebarHeading>
|
||||
@ -103,7 +117,7 @@ class Sidebar extends React.Component {
|
||||
{collections
|
||||
.toList()
|
||||
.filter(collection => collection.get('hide') !== true)
|
||||
.map(this.renderLink)}
|
||||
.map(collection => this.renderLink(collection, filterTerm))}
|
||||
</SidebarNavList>
|
||||
</SidebarContainer>
|
||||
);
|
||||
|
@ -0,0 +1,68 @@
|
||||
import React from 'react';
|
||||
import ConnectedCollection, { Collection } from '../Collection';
|
||||
import { render } from '@testing-library/react';
|
||||
import { fromJS } from 'immutable';
|
||||
import configureStore from 'redux-mock-store';
|
||||
import { Provider } from 'react-redux';
|
||||
|
||||
jest.mock('../Entries/EntriesCollection', () => 'mock-entries-collection');
|
||||
jest.mock('../CollectionTop', () => 'mock-collection-top');
|
||||
jest.mock('../CollectionControls', () => 'mock-collection-controls');
|
||||
jest.mock('../Sidebar', () => 'mock-sidebar');
|
||||
|
||||
const middlewares = [];
|
||||
const mockStore = configureStore(middlewares);
|
||||
|
||||
const renderWithRedux = (component, { store } = {}) => {
|
||||
function Wrapper({ children }) {
|
||||
return <Provider store={store}>{children}</Provider>;
|
||||
}
|
||||
return render(component, { wrapper: Wrapper });
|
||||
};
|
||||
|
||||
describe('Collection', () => {
|
||||
const collection = fromJS({ name: 'pages', sortableFields: [], view_filters: [] });
|
||||
const props = {
|
||||
collections: fromJS([collection]).toOrderedMap(),
|
||||
collection,
|
||||
collectionName: collection.get('name'),
|
||||
t: jest.fn(key => key),
|
||||
onSortClick: jest.fn(),
|
||||
};
|
||||
|
||||
it('should render with collection without create url', () => {
|
||||
const { asFragment } = render(
|
||||
<Collection {...props} collection={collection.set('create', false)} />,
|
||||
);
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
it('should render with collection with create url', () => {
|
||||
const { asFragment } = render(
|
||||
<Collection {...props} collection={collection.set('create', true)} />,
|
||||
);
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render with collection with create url and path', () => {
|
||||
const { asFragment } = render(
|
||||
<Collection {...props} collection={collection.set('create', true)} filterTerm="dir1/dir2" />,
|
||||
);
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render connected component', () => {
|
||||
const store = mockStore({
|
||||
collections: props.collections,
|
||||
entries: fromJS({}),
|
||||
});
|
||||
|
||||
const { asFragment } = renderWithRedux(<ConnectedCollection match={{ params: {} }} />, {
|
||||
store,
|
||||
});
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
});
|
@ -0,0 +1,440 @@
|
||||
import React from 'react';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import ConnectedNestedCollection, {
|
||||
NestedCollection,
|
||||
getTreeData,
|
||||
walk,
|
||||
updateNode,
|
||||
} from '../NestedCollection';
|
||||
import { render, fireEvent } from '@testing-library/react';
|
||||
import { fromJS } from 'immutable';
|
||||
import configureStore from 'redux-mock-store';
|
||||
import { Provider } from 'react-redux';
|
||||
|
||||
jest.mock('netlify-cms-ui-default', () => {
|
||||
const actual = jest.requireActual('netlify-cms-ui-default');
|
||||
return {
|
||||
...actual,
|
||||
Icon: 'mocked-icon',
|
||||
};
|
||||
});
|
||||
|
||||
const middlewares = [];
|
||||
const mockStore = configureStore(middlewares);
|
||||
|
||||
const renderWithRedux = (component, { store } = {}) => {
|
||||
function Wrapper({ children }) {
|
||||
return <Provider store={store}>{children}</Provider>;
|
||||
}
|
||||
return render(component, { wrapper: Wrapper });
|
||||
};
|
||||
|
||||
describe('NestedCollection', () => {
|
||||
const collection = fromJS({
|
||||
name: 'pages',
|
||||
label: 'Pages',
|
||||
folder: 'src/pages',
|
||||
fields: [{ name: 'title', widget: 'string' }],
|
||||
});
|
||||
|
||||
it('should render correctly with no entries', () => {
|
||||
const entries = fromJS([]);
|
||||
const { asFragment, getByTestId } = render(
|
||||
<MemoryRouter>
|
||||
<NestedCollection collection={collection} entries={entries} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(getByTestId('/')).toHaveTextContent('Pages');
|
||||
expect(getByTestId('/')).toHaveAttribute('href', '/collections/pages');
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render correctly with nested entries', () => {
|
||||
const entries = fromJS([
|
||||
{ path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
|
||||
{ path: 'src/pages/b/index.md', data: { title: 'File 2' } },
|
||||
{ path: 'src/pages/a/a/index.md', data: { title: 'File 3' } },
|
||||
{ path: 'src/pages/b/a/index.md', data: { title: 'File 4' } },
|
||||
]);
|
||||
const { asFragment, getByTestId } = render(
|
||||
<MemoryRouter>
|
||||
<NestedCollection collection={collection} entries={entries} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// expand the tree
|
||||
fireEvent.click(getByTestId('/'));
|
||||
|
||||
expect(getByTestId('/a')).toHaveTextContent('File 1');
|
||||
expect(getByTestId('/a')).toHaveAttribute('href', '/collections/pages/filter/a');
|
||||
|
||||
expect(getByTestId('/b')).toHaveTextContent('File 2');
|
||||
expect(getByTestId('/b')).toHaveAttribute('href', '/collections/pages/filter/b');
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should keep expanded nodes on re-render', () => {
|
||||
const entries = fromJS([
|
||||
{ path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
|
||||
{ path: 'src/pages/b/index.md', data: { title: 'File 2' } },
|
||||
{ path: 'src/pages/a/a/index.md', data: { title: 'File 3' } },
|
||||
{ path: 'src/pages/b/a/index.md', data: { title: 'File 4' } },
|
||||
]);
|
||||
const { getByTestId, rerender } = render(
|
||||
<MemoryRouter>
|
||||
<NestedCollection collection={collection} entries={entries} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
fireEvent.click(getByTestId('/'));
|
||||
fireEvent.click(getByTestId('/a'));
|
||||
|
||||
expect(getByTestId('/a')).toHaveTextContent('File 1');
|
||||
|
||||
const newEntries = fromJS([
|
||||
{ path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
|
||||
{ path: 'src/pages/b/index.md', data: { title: 'File 2' } },
|
||||
{ path: 'src/pages/a/a/index.md', data: { title: 'File 3' } },
|
||||
{ path: 'src/pages/b/a/index.md', data: { title: 'File 4' } },
|
||||
{ path: 'src/pages/c/index.md', data: { title: 'File 5' } },
|
||||
{ path: 'src/pages/c/a/index.md', data: { title: 'File 6' } },
|
||||
]);
|
||||
|
||||
rerender(
|
||||
<MemoryRouter>
|
||||
<NestedCollection collection={collection} entries={newEntries} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(getByTestId('/a')).toHaveTextContent('File 1');
|
||||
});
|
||||
|
||||
it('should expand nodes based on filterTerm', () => {
|
||||
const entries = fromJS([
|
||||
{ path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
|
||||
{ path: 'src/pages/a/a/index.md', data: { title: 'File 2' } },
|
||||
{ path: 'src/pages/a/a/a/index.md', data: { title: 'File 3' } },
|
||||
]);
|
||||
|
||||
const { getByTestId, queryByTestId, rerender } = render(
|
||||
<MemoryRouter>
|
||||
<NestedCollection collection={collection} entries={entries} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(queryByTestId('/a/a')).toBeNull();
|
||||
|
||||
rerender(
|
||||
<MemoryRouter>
|
||||
<NestedCollection collection={collection} entries={entries} filterTerm={'a/a'} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(getByTestId('/a/a')).toHaveTextContent('File 2');
|
||||
});
|
||||
|
||||
it('should ignore filterTerm once a user toggles an node', () => {
|
||||
const entries = fromJS([
|
||||
{ path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
|
||||
{ path: 'src/pages/a/a/index.md', data: { title: 'File 2' } },
|
||||
{ path: 'src/pages/a/a/a/index.md', data: { title: 'File 3' } },
|
||||
]);
|
||||
|
||||
const { getByTestId, queryByTestId, rerender } = render(
|
||||
<MemoryRouter>
|
||||
<NestedCollection collection={collection} entries={entries} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
rerender(
|
||||
<MemoryRouter>
|
||||
<NestedCollection collection={collection} entries={entries} filterTerm={'a/a'} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(getByTestId('/a/a')).toHaveTextContent('File 2');
|
||||
|
||||
fireEvent.click(getByTestId('/a'));
|
||||
|
||||
rerender(
|
||||
<MemoryRouter>
|
||||
<NestedCollection
|
||||
collection={collection}
|
||||
entries={fromJS(entries.toJS())}
|
||||
filterTerm={'a/a'}
|
||||
/>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(queryByTestId('/a/a')).toBeNull();
|
||||
});
|
||||
|
||||
it('should not collapse an unselected node when clicked', () => {
|
||||
const entries = fromJS([
|
||||
{ path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
|
||||
{ path: 'src/pages/a/a/index.md', data: { title: 'File 2' } },
|
||||
{ path: 'src/pages/a/a/a/index.md', data: { title: 'File 3' } },
|
||||
{ path: 'src/pages/a/a/a/a/index.md', data: { title: 'File 4' } },
|
||||
]);
|
||||
|
||||
const { getByTestId } = render(
|
||||
<MemoryRouter>
|
||||
<NestedCollection collection={collection} entries={entries} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
fireEvent.click(getByTestId('/'));
|
||||
fireEvent.click(getByTestId('/a'));
|
||||
fireEvent.click(getByTestId('/a/a'));
|
||||
|
||||
expect(getByTestId('/a/a')).toHaveTextContent('File 2');
|
||||
fireEvent.click(getByTestId('/a'));
|
||||
expect(getByTestId('/a/a')).toHaveTextContent('File 2');
|
||||
});
|
||||
|
||||
it('should collapse a selected node when clicked', () => {
|
||||
const entries = fromJS([
|
||||
{ path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
|
||||
{ path: 'src/pages/a/a/index.md', data: { title: 'File 2' } },
|
||||
{ path: 'src/pages/a/a/a/index.md', data: { title: 'File 3' } },
|
||||
{ path: 'src/pages/a/a/a/a/index.md', data: { title: 'File 4' } },
|
||||
]);
|
||||
|
||||
const { getByTestId, queryByTestId } = render(
|
||||
<MemoryRouter>
|
||||
<NestedCollection collection={collection} entries={entries} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
fireEvent.click(getByTestId('/'));
|
||||
fireEvent.click(getByTestId('/a'));
|
||||
fireEvent.click(getByTestId('/a/a'));
|
||||
|
||||
expect(getByTestId('/a/a/a')).toHaveTextContent('File 3');
|
||||
fireEvent.click(getByTestId('/a/a'));
|
||||
expect(queryByTestId('/a/a/a')).toBeNull();
|
||||
});
|
||||
|
||||
it('should render connected component', () => {
|
||||
const entriesArray = [
|
||||
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ slug: 'a/index', path: 'src/pages/a/index.md', data: { title: 'File 1' } },
|
||||
{ slug: 'b/index', path: 'src/pages/b/index.md', data: { title: 'File 2' } },
|
||||
{ slug: 'a/a/index', path: 'src/pages/a/a/index.md', data: { title: 'File 3' } },
|
||||
{ slug: 'b/a/index', path: 'src/pages/b/a/index.md', data: { title: 'File 4' } },
|
||||
];
|
||||
const entries = entriesArray.reduce(
|
||||
(acc, entry) => {
|
||||
acc.entities[`${collection.get('name')}.${entry.slug}`] = entry;
|
||||
acc.pages[collection.get('name')].ids.push(entry.slug);
|
||||
return acc;
|
||||
},
|
||||
{ pages: { [collection.get('name')]: { ids: [] } }, entities: {} },
|
||||
);
|
||||
|
||||
const store = mockStore({ entries: fromJS(entries) });
|
||||
|
||||
const { asFragment, getByTestId } = renderWithRedux(
|
||||
<MemoryRouter>
|
||||
<ConnectedNestedCollection collection={collection} entries={entries} />
|
||||
</MemoryRouter>,
|
||||
{ store },
|
||||
);
|
||||
|
||||
// expand the root
|
||||
fireEvent.click(getByTestId('/'));
|
||||
|
||||
expect(getByTestId('/a')).toHaveTextContent('File 1');
|
||||
expect(getByTestId('/a')).toHaveAttribute('href', '/collections/pages/filter/a');
|
||||
|
||||
expect(getByTestId('/b')).toHaveTextContent('File 2');
|
||||
expect(getByTestId('/b')).toHaveAttribute('href', '/collections/pages/filter/b');
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
describe('getTreeData', () => {
|
||||
it('should return nested tree data from entries', () => {
|
||||
const entries = fromJS([
|
||||
{ path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ path: 'src/pages/intro/index.md', data: { title: 'intro index' } },
|
||||
{ path: 'src/pages/intro/category/index.md', data: { title: 'intro category index' } },
|
||||
{ path: 'src/pages/compliance/index.md', data: { title: 'compliance index' } },
|
||||
]);
|
||||
|
||||
const treeData = getTreeData(collection, entries);
|
||||
|
||||
expect(treeData).toEqual([
|
||||
{
|
||||
title: 'Pages',
|
||||
path: '/',
|
||||
isDir: true,
|
||||
isRoot: true,
|
||||
children: [
|
||||
{
|
||||
title: 'intro',
|
||||
path: '/intro',
|
||||
isDir: true,
|
||||
isRoot: false,
|
||||
children: [
|
||||
{
|
||||
title: 'category',
|
||||
path: '/intro/category',
|
||||
isDir: true,
|
||||
isRoot: false,
|
||||
children: [
|
||||
{
|
||||
path: '/intro/category/index.md',
|
||||
data: { title: 'intro category index' },
|
||||
title: 'intro category index',
|
||||
isDir: false,
|
||||
isRoot: false,
|
||||
children: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: '/intro/index.md',
|
||||
data: { title: 'intro index' },
|
||||
title: 'intro index',
|
||||
isDir: false,
|
||||
isRoot: false,
|
||||
children: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'compliance',
|
||||
path: '/compliance',
|
||||
isDir: true,
|
||||
isRoot: false,
|
||||
children: [
|
||||
{
|
||||
path: '/compliance/index.md',
|
||||
data: { title: 'compliance index' },
|
||||
title: 'compliance index',
|
||||
isDir: false,
|
||||
isRoot: false,
|
||||
children: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: '/index.md',
|
||||
data: { title: 'Root' },
|
||||
title: 'Root',
|
||||
isDir: false,
|
||||
isRoot: false,
|
||||
children: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should ignore collection summary', () => {
|
||||
const entries = fromJS([{ path: 'src/pages/index.md', data: { title: 'Root' } }]);
|
||||
|
||||
const treeData = getTreeData(collection, entries);
|
||||
|
||||
expect(treeData).toEqual([
|
||||
{
|
||||
title: 'Pages',
|
||||
path: '/',
|
||||
isDir: true,
|
||||
isRoot: true,
|
||||
children: [
|
||||
{
|
||||
path: '/index.md',
|
||||
data: { title: 'Root' },
|
||||
title: 'Root',
|
||||
isDir: false,
|
||||
isRoot: false,
|
||||
children: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should use nested collection summary for title', () => {
|
||||
const entries = fromJS([{ path: 'src/pages/index.md', data: { title: 'Root' } }]);
|
||||
|
||||
const treeData = getTreeData(
|
||||
collection.setIn(['nested', 'summary'], '{{filename}}'),
|
||||
entries,
|
||||
);
|
||||
|
||||
expect(treeData).toEqual([
|
||||
{
|
||||
title: 'Pages',
|
||||
path: '/',
|
||||
isDir: true,
|
||||
isRoot: true,
|
||||
children: [
|
||||
{
|
||||
path: '/index.md',
|
||||
data: { title: 'Root' },
|
||||
title: 'index',
|
||||
isDir: false,
|
||||
isRoot: false,
|
||||
children: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('walk', () => {
|
||||
it('should visit every tree node', () => {
|
||||
const entries = fromJS([
|
||||
{ path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ path: 'src/pages/dir1/index.md', data: { title: 'Dir1 File' } },
|
||||
{ path: 'src/pages/dir2/index.md', data: { title: 'Dir2 File' } },
|
||||
]);
|
||||
|
||||
const treeData = getTreeData(collection, entries);
|
||||
const callback = jest.fn();
|
||||
walk(treeData, callback);
|
||||
|
||||
expect(callback).toHaveBeenCalledTimes(6);
|
||||
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/' }));
|
||||
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/index.md' }));
|
||||
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/dir1' }));
|
||||
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/dir2' }));
|
||||
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/dir1/index.md' }));
|
||||
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/dir2/index.md' }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateNode', () => {
|
||||
it('should update node', () => {
|
||||
const entries = fromJS([
|
||||
{ path: 'src/pages/index.md', data: { title: 'Root' } },
|
||||
{ path: 'src/pages/dir1/index.md', data: { title: 'Dir1 File' } },
|
||||
{ path: 'src/pages/dir2/index.md', data: { title: 'Dir2 File' } },
|
||||
]);
|
||||
|
||||
const treeData = getTreeData(collection, entries);
|
||||
expect(treeData[0].children[0].children[0].expanded).toBeUndefined();
|
||||
|
||||
const callback = jest.fn(node => ({ ...node, expanded: true }));
|
||||
const node = { path: '/dir1/index.md' };
|
||||
updateNode(treeData, node, callback);
|
||||
|
||||
expect(callback).toHaveBeenCalledTimes(1);
|
||||
expect(callback).toHaveBeenCalledWith(node);
|
||||
expect(treeData[0].children[0].children[0].expanded).toEqual(true);
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,74 @@
|
||||
import React from 'react';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { Sidebar } from '../Sidebar';
|
||||
import { render } from '@testing-library/react';
|
||||
import { fromJS } from 'immutable';
|
||||
|
||||
jest.mock('netlify-cms-ui-default', () => {
|
||||
const actual = jest.requireActual('netlify-cms-ui-default');
|
||||
return {
|
||||
...actual,
|
||||
Icon: 'mocked-icon',
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('../NestedCollection', () => 'nested-collection');
|
||||
jest.mock('../CollectionSearch', () => 'collection-search');
|
||||
jest.mock('Actions/collections');
|
||||
|
||||
describe('Sidebar', () => {
|
||||
const props = {
|
||||
searchTerm: '',
|
||||
t: jest.fn(key => key),
|
||||
};
|
||||
it('should render sidebar with a simple collection', () => {
|
||||
const collections = fromJS([{ name: 'posts', label: 'Posts' }]).toOrderedMap();
|
||||
const { asFragment, getByTestId } = render(
|
||||
<MemoryRouter>
|
||||
<Sidebar {...props} collections={collections} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(getByTestId('posts')).toHaveTextContent('Posts');
|
||||
expect(getByTestId('posts')).toHaveAttribute('href', '/collections/posts');
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should not render a hidden collection', () => {
|
||||
const collections = fromJS([{ name: 'posts', label: 'Posts', hide: true }]).toOrderedMap();
|
||||
const { queryByTestId } = render(
|
||||
<MemoryRouter>
|
||||
<Sidebar {...props} collections={collections} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(queryByTestId('posts')).toBeNull();
|
||||
});
|
||||
|
||||
it('should render sidebar with a nested collection', () => {
|
||||
const collections = fromJS([
|
||||
{ name: 'posts', label: 'Posts', nested: { depth: 10 } },
|
||||
]).toOrderedMap();
|
||||
const { asFragment } = render(
|
||||
<MemoryRouter>
|
||||
<Sidebar {...props} collections={collections} />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render nested collection with filterTerm', () => {
|
||||
const collections = fromJS([
|
||||
{ name: 'posts', label: 'Posts', nested: { depth: 10 } },
|
||||
]).toOrderedMap();
|
||||
const { asFragment } = render(
|
||||
<MemoryRouter>
|
||||
<Sidebar {...props} collections={collections} filterTerm="dir1/dir2" />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
});
|
@ -0,0 +1,153 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Collection should render connected component 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-2 {
|
||||
margin: 28px 18px;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
padding-left: 280px;
|
||||
}
|
||||
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<mock-sidebar
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [] }"
|
||||
collections="OrderedMap { 0: Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [] } }"
|
||||
filterterm=""
|
||||
searchterm=""
|
||||
/>
|
||||
<main
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
<mock-collection-top
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [] }"
|
||||
newentryurl=""
|
||||
/>
|
||||
<mock-collection-controls
|
||||
filter="Map {}"
|
||||
sortablefields=""
|
||||
viewfilters=""
|
||||
viewstyle="VIEW_STYLE_LIST"
|
||||
/>
|
||||
<mock-entries-collection
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [] }"
|
||||
filterterm=""
|
||||
viewstyle="VIEW_STYLE_LIST"
|
||||
/>
|
||||
</main>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Collection should render with collection with create url 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-2 {
|
||||
margin: 28px 18px;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
padding-left: 280px;
|
||||
}
|
||||
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<mock-sidebar
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [], \\"create\\": true }"
|
||||
collections="OrderedMap { 0: Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [] } }"
|
||||
/>
|
||||
<main
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
<mock-collection-top
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [], \\"create\\": true }"
|
||||
newentryurl="/collections/pages/new"
|
||||
/>
|
||||
<mock-collection-controls
|
||||
viewstyle="VIEW_STYLE_LIST"
|
||||
/>
|
||||
<mock-entries-collection
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [], \\"create\\": true }"
|
||||
viewstyle="VIEW_STYLE_LIST"
|
||||
/>
|
||||
</main>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Collection should render with collection with create url and path 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-2 {
|
||||
margin: 28px 18px;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
padding-left: 280px;
|
||||
}
|
||||
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<mock-sidebar
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [], \\"create\\": true }"
|
||||
collections="OrderedMap { 0: Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [] } }"
|
||||
filterterm="dir1/dir2"
|
||||
/>
|
||||
<main
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
<mock-collection-top
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [], \\"create\\": true }"
|
||||
newentryurl="/collections/pages/new?path=dir1/dir2"
|
||||
/>
|
||||
<mock-collection-controls
|
||||
viewstyle="VIEW_STYLE_LIST"
|
||||
/>
|
||||
<mock-entries-collection
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [], \\"create\\": true }"
|
||||
filterterm="dir1/dir2"
|
||||
viewstyle="VIEW_STYLE_LIST"
|
||||
/>
|
||||
</main>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Collection should render with collection without create url 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-2 {
|
||||
margin: 28px 18px;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
padding-left: 280px;
|
||||
}
|
||||
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<mock-sidebar
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [], \\"create\\": false }"
|
||||
collections="OrderedMap { 0: Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [] } }"
|
||||
/>
|
||||
<main
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
<mock-collection-top
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [], \\"create\\": false }"
|
||||
newentryurl=""
|
||||
/>
|
||||
<mock-collection-controls
|
||||
viewstyle="VIEW_STYLE_LIST"
|
||||
/>
|
||||
<mock-entries-collection
|
||||
collection="Map { \\"name\\": \\"pages\\", \\"sortableFields\\": List [], \\"view_filters\\": List [], \\"create\\": false }"
|
||||
viewstyle="VIEW_STYLE_LIST"
|
||||
/>
|
||||
</main>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
@ -0,0 +1,549 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`NestedCollection should render connected component 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-6 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
padding: 8px;
|
||||
padding-left: 12px;
|
||||
border-left: 2px solid #fff;
|
||||
}
|
||||
|
||||
.emotion-6 mocked-icon {
|
||||
margin-right: 8px;
|
||||
-webkit-flex-shrink: 0;
|
||||
-ms-flex-negative: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.emotion-6:hover,
|
||||
.emotion-6:active,
|
||||
.emotion-6.sidebar-active {
|
||||
color: #3a69c7;
|
||||
background-color: #e8f5fe;
|
||||
border-left-color: #4863c6;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-pack: center;
|
||||
-webkit-justify-content: center;
|
||||
-ms-flex-pack: center;
|
||||
justify-content: center;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
position: relative;
|
||||
top: 2px;
|
||||
color: #fff;
|
||||
width: 0;
|
||||
height: 0;
|
||||
border: 5px solid transparent;
|
||||
border-radius: 2px;
|
||||
border-top: 6px solid currentColor;
|
||||
border-bottom: 0;
|
||||
color: currentColor;
|
||||
}
|
||||
|
||||
<a
|
||||
class="emotion-6 emotion-7"
|
||||
data-testid="/"
|
||||
depth="0"
|
||||
href="/collections/pages"
|
||||
>
|
||||
<mocked-icon
|
||||
type="write"
|
||||
/>
|
||||
<div
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
<div
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
Pages
|
||||
</div>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
/>
|
||||
</div>
|
||||
</a>
|
||||
.emotion-2 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-pack: center;
|
||||
-webkit-justify-content: center;
|
||||
-ms-flex-pack: center;
|
||||
justify-content: center;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
padding: 8px;
|
||||
padding-left: 32px;
|
||||
border-left: 2px solid #fff;
|
||||
}
|
||||
|
||||
.emotion-4 mocked-icon {
|
||||
margin-right: 8px;
|
||||
-webkit-flex-shrink: 0;
|
||||
-ms-flex-negative: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.emotion-4:hover,
|
||||
.emotion-4:active,
|
||||
.emotion-4.sidebar-active {
|
||||
color: #3a69c7;
|
||||
background-color: #e8f5fe;
|
||||
border-left-color: #4863c6;
|
||||
}
|
||||
|
||||
<a
|
||||
class="emotion-4 emotion-5"
|
||||
data-testid="/a"
|
||||
depth="1"
|
||||
href="/collections/pages/filter/a"
|
||||
>
|
||||
<mocked-icon
|
||||
type="write"
|
||||
/>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<div
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
File 1
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
.emotion-2 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-pack: center;
|
||||
-webkit-justify-content: center;
|
||||
-ms-flex-pack: center;
|
||||
justify-content: center;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
padding: 8px;
|
||||
padding-left: 32px;
|
||||
border-left: 2px solid #fff;
|
||||
}
|
||||
|
||||
.emotion-4 mocked-icon {
|
||||
margin-right: 8px;
|
||||
-webkit-flex-shrink: 0;
|
||||
-ms-flex-negative: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.emotion-4:hover,
|
||||
.emotion-4:active,
|
||||
.emotion-4.sidebar-active {
|
||||
color: #3a69c7;
|
||||
background-color: #e8f5fe;
|
||||
border-left-color: #4863c6;
|
||||
}
|
||||
|
||||
<a
|
||||
class="emotion-4 emotion-5"
|
||||
data-testid="/b"
|
||||
depth="1"
|
||||
href="/collections/pages/filter/b"
|
||||
>
|
||||
<mocked-icon
|
||||
type="write"
|
||||
/>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<div
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
File 2
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`NestedCollection should render correctly with nested entries 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-6 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
padding: 8px;
|
||||
padding-left: 12px;
|
||||
border-left: 2px solid #fff;
|
||||
}
|
||||
|
||||
.emotion-6 mocked-icon {
|
||||
margin-right: 8px;
|
||||
-webkit-flex-shrink: 0;
|
||||
-ms-flex-negative: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.emotion-6:hover,
|
||||
.emotion-6:active,
|
||||
.emotion-6.sidebar-active {
|
||||
color: #3a69c7;
|
||||
background-color: #e8f5fe;
|
||||
border-left-color: #4863c6;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-pack: center;
|
||||
-webkit-justify-content: center;
|
||||
-ms-flex-pack: center;
|
||||
justify-content: center;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
position: relative;
|
||||
top: 2px;
|
||||
color: #fff;
|
||||
width: 0;
|
||||
height: 0;
|
||||
border: 5px solid transparent;
|
||||
border-radius: 2px;
|
||||
border-top: 6px solid currentColor;
|
||||
border-bottom: 0;
|
||||
color: currentColor;
|
||||
}
|
||||
|
||||
<a
|
||||
aria-current="page"
|
||||
class="emotion-6 emotion-7 sidebar-active"
|
||||
data-testid="/"
|
||||
depth="0"
|
||||
href="/collections/pages"
|
||||
>
|
||||
<mocked-icon
|
||||
type="write"
|
||||
/>
|
||||
<div
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
<div
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
Pages
|
||||
</div>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
/>
|
||||
</div>
|
||||
</a>
|
||||
.emotion-2 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-pack: center;
|
||||
-webkit-justify-content: center;
|
||||
-ms-flex-pack: center;
|
||||
justify-content: center;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
padding: 8px;
|
||||
padding-left: 32px;
|
||||
border-left: 2px solid #fff;
|
||||
}
|
||||
|
||||
.emotion-4 mocked-icon {
|
||||
margin-right: 8px;
|
||||
-webkit-flex-shrink: 0;
|
||||
-ms-flex-negative: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.emotion-4:hover,
|
||||
.emotion-4:active,
|
||||
.emotion-4.sidebar-active {
|
||||
color: #3a69c7;
|
||||
background-color: #e8f5fe;
|
||||
border-left-color: #4863c6;
|
||||
}
|
||||
|
||||
<a
|
||||
class="emotion-4 emotion-5"
|
||||
data-testid="/a"
|
||||
depth="1"
|
||||
href="/collections/pages/filter/a"
|
||||
>
|
||||
<mocked-icon
|
||||
type="write"
|
||||
/>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<div
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
File 1
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
.emotion-2 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-pack: center;
|
||||
-webkit-justify-content: center;
|
||||
-ms-flex-pack: center;
|
||||
justify-content: center;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
padding: 8px;
|
||||
padding-left: 32px;
|
||||
border-left: 2px solid #fff;
|
||||
}
|
||||
|
||||
.emotion-4 mocked-icon {
|
||||
margin-right: 8px;
|
||||
-webkit-flex-shrink: 0;
|
||||
-ms-flex-negative: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.emotion-4:hover,
|
||||
.emotion-4:active,
|
||||
.emotion-4.sidebar-active {
|
||||
color: #3a69c7;
|
||||
background-color: #e8f5fe;
|
||||
border-left-color: #4863c6;
|
||||
}
|
||||
|
||||
<a
|
||||
class="emotion-4 emotion-5"
|
||||
data-testid="/b"
|
||||
depth="1"
|
||||
href="/collections/pages/filter/b"
|
||||
>
|
||||
<mocked-icon
|
||||
type="write"
|
||||
/>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<div
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
File 2
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`NestedCollection should render correctly with no entries 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-6 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
padding: 8px;
|
||||
padding-left: 12px;
|
||||
border-left: 2px solid #fff;
|
||||
}
|
||||
|
||||
.emotion-6 mocked-icon {
|
||||
margin-right: 8px;
|
||||
-webkit-flex-shrink: 0;
|
||||
-ms-flex-negative: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.emotion-6:hover,
|
||||
.emotion-6:active,
|
||||
.emotion-6.sidebar-active {
|
||||
color: #3a69c7;
|
||||
background-color: #e8f5fe;
|
||||
border-left-color: #4863c6;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-pack: center;
|
||||
-webkit-justify-content: center;
|
||||
-ms-flex-pack: center;
|
||||
justify-content: center;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
position: relative;
|
||||
top: 2px;
|
||||
color: #fff;
|
||||
width: 0;
|
||||
height: 0;
|
||||
border: 5px solid transparent;
|
||||
border-radius: 2px;
|
||||
border-left: 6px solid currentColor;
|
||||
border-right: 0;
|
||||
color: currentColor;
|
||||
left: 2px;
|
||||
}
|
||||
|
||||
<a
|
||||
class="emotion-6 emotion-7"
|
||||
data-testid="/"
|
||||
depth="0"
|
||||
href="/collections/pages"
|
||||
>
|
||||
<mocked-icon
|
||||
type="write"
|
||||
/>
|
||||
<div
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
<div
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
Pages
|
||||
</div>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
/>
|
||||
</div>
|
||||
</a>
|
||||
</DocumentFragment>
|
||||
`;
|
@ -0,0 +1,216 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Sidebar should render nested collection with filterTerm 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-4 {
|
||||
box-shadow: 0 2px 6px 0 rgba(68,74,87,0.05),0 1px 3px 0 rgba(68,74,87,0.1);
|
||||
border-radius: 5px;
|
||||
background-color: #fff;
|
||||
width: 250px;
|
||||
padding: 8px 0 12px;
|
||||
position: fixed;
|
||||
max-height: calc(100vh - 112px);
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-flex-direction: column;
|
||||
-ms-flex-direction: column;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
font-size: 23px;
|
||||
font-weight: 600;
|
||||
padding: 0;
|
||||
margin: 18px 12px 12px;
|
||||
color: #313d3e;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
margin: 16px 0 0;
|
||||
list-style: none;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
<aside
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
<h2
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
collection.sidebar.collections
|
||||
</h2>
|
||||
<collection-search
|
||||
collections="OrderedMap { 0: Map { \\"name\\": \\"posts\\", \\"label\\": \\"Posts\\", \\"nested\\": Map { \\"depth\\": 10 } } }"
|
||||
searchterm=""
|
||||
/>
|
||||
<ul
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<li>
|
||||
<nested-collection
|
||||
collection="Map { \\"name\\": \\"posts\\", \\"label\\": \\"Posts\\", \\"nested\\": Map { \\"depth\\": 10 } }"
|
||||
data-testid="posts"
|
||||
filterterm="dir1/dir2"
|
||||
/>
|
||||
</li>
|
||||
</ul>
|
||||
</aside>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Sidebar should render sidebar with a nested collection 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-4 {
|
||||
box-shadow: 0 2px 6px 0 rgba(68,74,87,0.05),0 1px 3px 0 rgba(68,74,87,0.1);
|
||||
border-radius: 5px;
|
||||
background-color: #fff;
|
||||
width: 250px;
|
||||
padding: 8px 0 12px;
|
||||
position: fixed;
|
||||
max-height: calc(100vh - 112px);
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-flex-direction: column;
|
||||
-ms-flex-direction: column;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
font-size: 23px;
|
||||
font-weight: 600;
|
||||
padding: 0;
|
||||
margin: 18px 12px 12px;
|
||||
color: #313d3e;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
margin: 16px 0 0;
|
||||
list-style: none;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
<aside
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
<h2
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
collection.sidebar.collections
|
||||
</h2>
|
||||
<collection-search
|
||||
collections="OrderedMap { 0: Map { \\"name\\": \\"posts\\", \\"label\\": \\"Posts\\", \\"nested\\": Map { \\"depth\\": 10 } } }"
|
||||
searchterm=""
|
||||
/>
|
||||
<ul
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<li>
|
||||
<nested-collection
|
||||
collection="Map { \\"name\\": \\"posts\\", \\"label\\": \\"Posts\\", \\"nested\\": Map { \\"depth\\": 10 } }"
|
||||
data-testid="posts"
|
||||
/>
|
||||
</li>
|
||||
</ul>
|
||||
</aside>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Sidebar should render sidebar with a simple collection 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-6 {
|
||||
box-shadow: 0 2px 6px 0 rgba(68,74,87,0.05),0 1px 3px 0 rgba(68,74,87,0.1);
|
||||
border-radius: 5px;
|
||||
background-color: #fff;
|
||||
width: 250px;
|
||||
padding: 8px 0 12px;
|
||||
position: fixed;
|
||||
max-height: calc(100vh - 112px);
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-flex-direction: column;
|
||||
-ms-flex-direction: column;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
font-size: 23px;
|
||||
font-weight: 600;
|
||||
padding: 0;
|
||||
margin: 18px 12px 12px;
|
||||
color: #313d3e;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
margin: 16px 0 0;
|
||||
list-style: none;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
display: -webkit-box;
|
||||
display: -webkit-flex;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
-webkit-align-items: center;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
padding: 8px 12px;
|
||||
border-left: 2px solid #fff;
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
.emotion-2 mocked-icon {
|
||||
margin-right: 8px;
|
||||
-webkit-flex-shrink: 0;
|
||||
-ms-flex-negative: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.emotion-2:hover,
|
||||
.emotion-2:active,
|
||||
.emotion-2.sidebar-active {
|
||||
color: #3a69c7;
|
||||
background-color: #e8f5fe;
|
||||
border-left-color: #4863c6;
|
||||
}
|
||||
|
||||
<aside
|
||||
class="emotion-6 emotion-7"
|
||||
>
|
||||
<h2
|
||||
class="emotion-0 emotion-1"
|
||||
>
|
||||
collection.sidebar.collections
|
||||
</h2>
|
||||
<collection-search
|
||||
collections="OrderedMap { 0: Map { \\"name\\": \\"posts\\", \\"label\\": \\"Posts\\" } }"
|
||||
searchterm=""
|
||||
/>
|
||||
<ul
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
<li>
|
||||
<a
|
||||
class="emotion-2 emotion-3"
|
||||
data-testid="posts"
|
||||
href="/collections/posts"
|
||||
>
|
||||
<mocked-icon
|
||||
type="write"
|
||||
/>
|
||||
Posts
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</aside>
|
||||
</DocumentFragment>
|
||||
`;
|
@ -34,12 +34,7 @@ import { selectFields } from 'Reducers/collections';
|
||||
import { status, EDITORIAL_WORKFLOW } from 'Constants/publishModes';
|
||||
import EditorInterface from './EditorInterface';
|
||||
import withWorkflow from './withWorkflow';
|
||||
|
||||
const navigateCollection = collectionPath => history.push(`/collections/${collectionPath}`);
|
||||
const navigateToCollection = collectionName => navigateCollection(collectionName);
|
||||
const navigateToNewEntry = collectionName => navigateCollection(`${collectionName}/new`);
|
||||
const navigateToEntry = (collectionName, slug) =>
|
||||
navigateCollection(`${collectionName}/entries/${slug}`);
|
||||
import { navigateToCollection, navigateToNewEntry } from '../../routing/history';
|
||||
|
||||
export class Editor extends React.Component {
|
||||
static propTypes = {
|
||||
@ -169,16 +164,6 @@ export class Editor extends React.Component {
|
||||
}
|
||||
|
||||
componentDidUpdate(prevProps) {
|
||||
/**
|
||||
* If the old slug is empty and the new slug is not, a new entry was just
|
||||
* saved, and we need to update navigation to the correct url using the
|
||||
* slug.
|
||||
*/
|
||||
const newSlug = this.props.entryDraft && this.props.entryDraft.getIn(['entry', 'slug']);
|
||||
if (!prevProps.slug && newSlug && this.props.newEntry) {
|
||||
navigateToEntry(prevProps.collection.get('name'), newSlug);
|
||||
}
|
||||
|
||||
if (!prevProps.localBackup && this.props.localBackup) {
|
||||
const confirmLoadBackup = window.confirm(this.props.t('editor.editor.confirmLoadBackup'));
|
||||
if (confirmLoadBackup) {
|
||||
@ -453,7 +438,7 @@ function mapStateToProps(state, ownProps) {
|
||||
const collectionEntriesLoaded = !!entries.getIn(['pages', collectionName]);
|
||||
const unPublishedEntry = selectUnpublishedEntry(state, collectionName, slug);
|
||||
const publishedEntry = selectEntry(state, collectionName, slug);
|
||||
const currentStatus = unPublishedEntry && unPublishedEntry.getIn(['metaData', 'status']);
|
||||
const currentStatus = unPublishedEntry && unPublishedEntry.get('status');
|
||||
const deployPreview = selectDeployPreview(state, collectionName, slug);
|
||||
const localBackup = entryDraft.get('localBackup');
|
||||
const draftKey = entryDraft.get('key');
|
||||
|
@ -20,6 +20,7 @@ import {
|
||||
removeMediaControl,
|
||||
} from 'Actions/mediaLibrary';
|
||||
import Widget from './Widget';
|
||||
import { validateMetaField } from '../../../actions/entries';
|
||||
|
||||
/**
|
||||
* This is a necessary bridge as we are still passing classnames to widgets
|
||||
@ -116,6 +117,8 @@ class EditorControl extends React.Component {
|
||||
isEditorComponent: PropTypes.bool,
|
||||
isNewEditorComponent: PropTypes.bool,
|
||||
parentIds: PropTypes.arrayOf(PropTypes.string),
|
||||
entry: ImmutablePropTypes.map.isRequired,
|
||||
collection: ImmutablePropTypes.map.isRequired,
|
||||
};
|
||||
|
||||
static defaultProps = {
|
||||
@ -171,6 +174,7 @@ class EditorControl extends React.Component {
|
||||
isNewEditorComponent,
|
||||
parentIds,
|
||||
t,
|
||||
validateMetaField,
|
||||
} = this.props;
|
||||
|
||||
const widgetName = field.get('widget');
|
||||
@ -248,7 +252,7 @@ class EditorControl extends React.Component {
|
||||
value={value}
|
||||
mediaPaths={mediaPaths}
|
||||
metadata={metadata}
|
||||
onChange={(newValue, newMetadata) => onChange(fieldName, newValue, newMetadata)}
|
||||
onChange={(newValue, newMetadata) => onChange(field, newValue, newMetadata)}
|
||||
onValidate={onValidate && partial(onValidate, this.uniqueFieldId)}
|
||||
onOpenMediaLibrary={openMediaLibrary}
|
||||
onClearMediaControl={clearMediaControl}
|
||||
@ -277,6 +281,7 @@ class EditorControl extends React.Component {
|
||||
isNewEditorComponent={isNewEditorComponent}
|
||||
parentIds={parentIds}
|
||||
t={t}
|
||||
validateMetaField={validateMetaField}
|
||||
/>
|
||||
{fieldHint && (
|
||||
<ControlHint active={isSelected || this.state.styleActive} error={hasErrors}>
|
||||
@ -311,10 +316,11 @@ const mapStateToProps = state => {
|
||||
isFetching: state.search.get('isFetching'),
|
||||
queryHits: state.search.get('queryHits'),
|
||||
config: state.config,
|
||||
collection,
|
||||
entry,
|
||||
collection,
|
||||
isLoadingAsset,
|
||||
loadEntry,
|
||||
validateMetaField: (field, value, t) => validateMetaField(state, collection, field, value, t),
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -50,21 +50,27 @@ export default class ControlPane extends React.Component {
|
||||
|
||||
return (
|
||||
<ControlPaneContainer>
|
||||
{fields.map((field, i) =>
|
||||
field.get('widget') === 'hidden' ? null : (
|
||||
{fields.map((field, i) => {
|
||||
return field.get('widget') === 'hidden' ? null : (
|
||||
<EditorControl
|
||||
key={i}
|
||||
field={field}
|
||||
value={entry.getIn(['data', field.get('name')])}
|
||||
value={
|
||||
field.get('meta')
|
||||
? entry.getIn(['meta', field.get('name')])
|
||||
: entry.getIn(['data', field.get('name')])
|
||||
}
|
||||
fieldsMetaData={fieldsMetaData}
|
||||
fieldsErrors={fieldsErrors}
|
||||
onChange={onChange}
|
||||
onValidate={onValidate}
|
||||
processControlRef={this.controlRef.bind(this)}
|
||||
controlRef={this.controlRef}
|
||||
entry={entry}
|
||||
collection={collection}
|
||||
/>
|
||||
),
|
||||
)}
|
||||
);
|
||||
})}
|
||||
</ControlPaneContainer>
|
||||
);
|
||||
}
|
||||
|
@ -59,6 +59,7 @@ export default class Widget extends Component {
|
||||
onValidateObject: PropTypes.func,
|
||||
isEditorComponent: PropTypes.bool,
|
||||
isNewEditorComponent: PropTypes.bool,
|
||||
entry: ImmutablePropTypes.map.isRequired,
|
||||
};
|
||||
|
||||
shouldComponentUpdate(nextProps) {
|
||||
@ -104,8 +105,11 @@ export default class Widget extends Component {
|
||||
const field = this.props.field;
|
||||
const errors = [];
|
||||
const validations = [this.validatePresence, this.validatePattern];
|
||||
if (field.get('meta')) {
|
||||
validations.push(this.props.validateMetaField);
|
||||
}
|
||||
validations.forEach(func => {
|
||||
const response = func(field, value);
|
||||
const response = func(field, value, this.props.t);
|
||||
if (response.error) errors.push(response.error);
|
||||
});
|
||||
if (skipWrapped) {
|
||||
@ -114,6 +118,7 @@ export default class Widget extends Component {
|
||||
const wrappedError = this.validateWrappedControl(field);
|
||||
if (wrappedError.error) errors.push(wrappedError.error);
|
||||
}
|
||||
|
||||
this.props.onValidate(errors);
|
||||
};
|
||||
|
||||
@ -211,8 +216,8 @@ export default class Widget extends Component {
|
||||
/**
|
||||
* Change handler for fields that are nested within another field.
|
||||
*/
|
||||
onChangeObject = (fieldName, newValue, newMetadata) => {
|
||||
const newObjectValue = this.getObjectValue().set(fieldName, newValue);
|
||||
onChangeObject = (field, newValue, newMetadata) => {
|
||||
const newObjectValue = this.getObjectValue().set(field.get('name'), newValue);
|
||||
return this.props.onChange(
|
||||
newObjectValue,
|
||||
newMetadata && { [this.props.field.get('name')]: newMetadata },
|
||||
|
@ -77,6 +77,9 @@ export class PreviewPane extends React.Component {
|
||||
// custom preview templates, where the field object can't be passed in.
|
||||
let field = fields && fields.find(f => f.get('name') === name);
|
||||
let value = values && values.get(field.get('name'));
|
||||
if (field.get('meta')) {
|
||||
value = this.props.entry.getIn(['meta', field.get('name')]);
|
||||
}
|
||||
const nestedFields = field.get('fields');
|
||||
const singleField = field.get('field');
|
||||
const metadata = fieldsMetaData && fieldsMetaData.get(field.get('name'), Map());
|
||||
|
@ -5,7 +5,7 @@ import { css } from '@emotion/core';
|
||||
import styled from '@emotion/styled';
|
||||
import { translate } from 'react-polyglot';
|
||||
import { Map } from 'immutable';
|
||||
import { Link } from 'react-router-dom';
|
||||
import history from 'Routing/history';
|
||||
import {
|
||||
Icon,
|
||||
Dropdown,
|
||||
@ -80,7 +80,7 @@ const ToolbarSubSectionLast = styled(ToolbarSubSectionFirst)`
|
||||
justify-content: flex-end;
|
||||
`;
|
||||
|
||||
const ToolbarSectionBackLink = styled(Link)`
|
||||
const ToolbarSectionBackLink = styled.a`
|
||||
${styles.toolbarSection};
|
||||
border-right-width: 1px;
|
||||
font-weight: normal;
|
||||
@ -568,7 +568,15 @@ class EditorToolbar extends React.Component {
|
||||
|
||||
return (
|
||||
<ToolbarContainer>
|
||||
<ToolbarSectionBackLink to={`/collections/${collection.get('name')}`}>
|
||||
<ToolbarSectionBackLink
|
||||
onClick={() => {
|
||||
if (history.length > 0) {
|
||||
history.goBack();
|
||||
} else {
|
||||
history.push(`/collections/${collection.get('name')}`);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<BackArrow>←</BackArrow>
|
||||
<div>
|
||||
<BackCollection>
|
||||
|
@ -204,13 +204,13 @@ class WorkflowList extends React.Component {
|
||||
return (
|
||||
<div>
|
||||
{entries.map(entry => {
|
||||
const timestamp = moment(entry.getIn(['metaData', 'timeStamp'])).format(
|
||||
const timestamp = moment(entry.get('updatedOn')).format(
|
||||
t('workflow.workflow.dateFormat'),
|
||||
);
|
||||
const slug = entry.get('slug');
|
||||
const editLink = `collections/${entry.getIn(['metaData', 'collection'])}/entries/${slug}`;
|
||||
const ownStatus = entry.getIn(['metaData', 'status']);
|
||||
const collectionName = entry.getIn(['metaData', 'collection']);
|
||||
const collectionName = entry.get('collection');
|
||||
const editLink = `collections/${collectionName}/entries/${slug}`;
|
||||
const ownStatus = entry.get('status');
|
||||
const collection = collections.find(
|
||||
collection => collection.get('name') === collectionName,
|
||||
);
|
||||
|
@ -316,5 +316,47 @@ describe('config', () => {
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw if collection meta is not a plain object', () => {
|
||||
expect(() => {
|
||||
validateConfig(merge({}, validConfig, { collections: [{ meta: [] }] }));
|
||||
}).toThrowError("'collections[0].meta' should be object");
|
||||
});
|
||||
|
||||
it('should throw if collection meta is an empty object', () => {
|
||||
expect(() => {
|
||||
validateConfig(merge({}, validConfig, { collections: [{ meta: {} }] }));
|
||||
}).toThrowError("'collections[0].meta' should NOT have fewer than 1 properties");
|
||||
});
|
||||
|
||||
it('should throw if collection meta is an empty object', () => {
|
||||
expect(() => {
|
||||
validateConfig(merge({}, validConfig, { collections: [{ meta: { path: {} } }] }));
|
||||
}).toThrowError("'collections[0].meta.path' should have required property 'label'");
|
||||
expect(() => {
|
||||
validateConfig(
|
||||
merge({}, validConfig, { collections: [{ meta: { path: { label: 'Label' } } }] }),
|
||||
);
|
||||
}).toThrowError("'collections[0].meta.path' should have required property 'widget'");
|
||||
expect(() => {
|
||||
validateConfig(
|
||||
merge({}, validConfig, {
|
||||
collections: [{ meta: { path: { label: 'Label', widget: 'widget' } } }],
|
||||
}),
|
||||
);
|
||||
}).toThrowError("'collections[0].meta.path' should have required property 'index_file'");
|
||||
});
|
||||
|
||||
it('should allow collection meta to have a path configuration', () => {
|
||||
expect(() => {
|
||||
validateConfig(
|
||||
merge({}, validConfig, {
|
||||
collections: [
|
||||
{ meta: { path: { label: 'Path', widget: 'string', index_file: 'index' } } },
|
||||
],
|
||||
}),
|
||||
);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -183,6 +183,30 @@ const getConfigSchema = () => ({
|
||||
},
|
||||
},
|
||||
view_filters: viewFilters,
|
||||
nested: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
depth: { type: 'number', minimum: 1, maximum: 1000 },
|
||||
summary: { type: 'string' },
|
||||
},
|
||||
required: ['depth'],
|
||||
},
|
||||
meta: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
path: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
label: { type: 'string' },
|
||||
widget: { type: 'string' },
|
||||
index_file: { type: 'string' },
|
||||
},
|
||||
required: ['label', 'widget', 'index_file'],
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
minProperties: 1,
|
||||
},
|
||||
},
|
||||
required: ['name', 'label'],
|
||||
oneOf: [{ required: ['files'] }, { required: ['folder', 'fields'] }],
|
||||
|
@ -103,7 +103,7 @@ export const prepareSlug = (slug: string) => {
|
||||
);
|
||||
};
|
||||
|
||||
const getProcessSegment = (slugConfig: SlugConfig) =>
|
||||
export const getProcessSegment = (slugConfig: SlugConfig) =>
|
||||
flow([value => String(value), prepareSlug, partialRight(sanitizeSlug, slugConfig)]);
|
||||
|
||||
export const slugFormatter = (
|
||||
|
@ -449,4 +449,13 @@ export const selectFieldsComments = (collection: Collection, entryMap: EntryMap)
|
||||
return comments;
|
||||
};
|
||||
|
||||
export const selectHasMetaPath = (collection: Collection) => {
|
||||
return (
|
||||
collection.has('folder') &&
|
||||
collection.get('type') === FOLDER &&
|
||||
collection.has('meta') &&
|
||||
collection.get('meta')?.has('path')
|
||||
);
|
||||
};
|
||||
|
||||
export default collections;
|
||||
|
@ -98,12 +98,7 @@ const unpublishedEntries = (state = Map(), action: EditorialWorkflowAction) => {
|
||||
// Update Optimistically
|
||||
return state.withMutations(map => {
|
||||
map.setIn(
|
||||
[
|
||||
'entities',
|
||||
`${action.payload!.collection}.${action.payload!.slug}`,
|
||||
'metaData',
|
||||
'status',
|
||||
],
|
||||
['entities', `${action.payload!.collection}.${action.payload!.slug}`, 'status'],
|
||||
action.payload!.newStatus,
|
||||
);
|
||||
map.setIn(
|
||||
@ -148,7 +143,7 @@ export const selectUnpublishedEntry = (
|
||||
export const selectUnpublishedEntriesByStatus = (state: EditorialWorkflow, status: string) => {
|
||||
if (!state) return null;
|
||||
const entities = state.get('entities') as Entities;
|
||||
return entities.filter(entry => entry.getIn(['metaData', 'status']) === status).valueSeq();
|
||||
return entities.filter(entry => entry.get('status') === status).valueSeq();
|
||||
};
|
||||
|
||||
export const selectUnpublishedSlugs = (state: EditorialWorkflow, collection: string) => {
|
||||
|
@ -351,6 +351,14 @@ export const selectEntries = (state: Entries, collection: Collection) => {
|
||||
return entries;
|
||||
};
|
||||
|
||||
export const selectEntryByPath = (state: Entries, collection: string, path: string) => {
|
||||
const slugs = selectPublishedSlugs(state, collection);
|
||||
const entries =
|
||||
slugs && (slugs.map(slug => selectEntry(state, collection, slug as string)) as List<EntryMap>);
|
||||
|
||||
return entries && entries.find(e => e?.get('path') === path);
|
||||
};
|
||||
|
||||
export const selectEntriesLoaded = (state: Entries, collection: string) => {
|
||||
return !!state.getIn(['pages', collection]);
|
||||
};
|
||||
|
@ -22,6 +22,9 @@ import {
|
||||
UNPUBLISHED_ENTRY_PERSIST_SUCCESS,
|
||||
UNPUBLISHED_ENTRY_PERSIST_FAILURE,
|
||||
} from 'Actions/editorialWorkflow';
|
||||
import { get } from 'lodash';
|
||||
import { selectFolderEntryExtension, selectHasMetaPath } from './collections';
|
||||
import { join } from 'path';
|
||||
|
||||
const initialState = Map({
|
||||
entry: Map(),
|
||||
@ -87,10 +90,22 @@ const entryDraftReducer = (state = Map(), action) => {
|
||||
}
|
||||
case DRAFT_CHANGE_FIELD: {
|
||||
return state.withMutations(state => {
|
||||
state.setIn(['entry', 'data', action.payload.field], action.payload.value);
|
||||
state.mergeDeepIn(['fieldsMetaData'], fromJS(action.payload.metadata));
|
||||
const { field, value, metadata, entries } = action.payload;
|
||||
const name = field.get('name');
|
||||
const meta = field.get('meta');
|
||||
if (meta) {
|
||||
state.setIn(['entry', 'meta', name], value);
|
||||
} else {
|
||||
state.setIn(['entry', 'data', name], value);
|
||||
}
|
||||
state.mergeDeepIn(['fieldsMetaData'], fromJS(metadata));
|
||||
const newData = state.getIn(['entry', 'data']);
|
||||
state.set('hasChanged', !action.payload.entries.some(e => newData.equals(e.get('data'))));
|
||||
const newMeta = state.getIn(['entry', 'meta']);
|
||||
state.set(
|
||||
'hasChanged',
|
||||
!entries.some(e => newData.equals(e.get('data'))) ||
|
||||
!entries.some(e => newMeta.equals(e.get('meta'))),
|
||||
);
|
||||
});
|
||||
}
|
||||
case DRAFT_VALIDATION_ERRORS:
|
||||
@ -161,4 +176,16 @@ const entryDraftReducer = (state = Map(), action) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const selectCustomPath = (collection, entryDraft) => {
|
||||
if (!selectHasMetaPath(collection)) {
|
||||
return;
|
||||
}
|
||||
const meta = entryDraft.getIn(['entry', 'meta']);
|
||||
const path = meta && meta.get('path');
|
||||
const indexFile = get(collection.toJS(), ['meta', 'path', 'index_file']);
|
||||
const extension = selectFolderEntryExtension(collection);
|
||||
const customPath = path && join(collection.get('folder'), path, `${indexFile}.${extension}`);
|
||||
return customPath;
|
||||
};
|
||||
|
||||
export default entryDraftReducer;
|
||||
|
@ -0,0 +1,44 @@
|
||||
jest.mock('history');
|
||||
|
||||
describe('history', () => {
|
||||
const { createHashHistory } = require('history');
|
||||
const history = { push: jest.fn(), replace: jest.fn() };
|
||||
createHashHistory.mockReturnValue(history);
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('navigateToCollection', () => {
|
||||
it('should push route', () => {
|
||||
const { navigateToCollection } = require('../history');
|
||||
|
||||
navigateToCollection('posts');
|
||||
|
||||
expect(history.push).toHaveBeenCalledTimes(1);
|
||||
expect(history.push).toHaveBeenCalledWith('/collections/posts');
|
||||
});
|
||||
});
|
||||
|
||||
describe('navigateToNewEntry', () => {
|
||||
it('should replace route', () => {
|
||||
const { navigateToNewEntry } = require('../history');
|
||||
|
||||
navigateToNewEntry('posts');
|
||||
|
||||
expect(history.replace).toHaveBeenCalledTimes(1);
|
||||
expect(history.replace).toHaveBeenCalledWith('/collections/posts/new');
|
||||
});
|
||||
});
|
||||
|
||||
describe('navigateToEntry', () => {
|
||||
it('should replace route', () => {
|
||||
const { navigateToEntry } = require('../history');
|
||||
|
||||
navigateToEntry('posts', 'index');
|
||||
|
||||
expect(history.replace).toHaveBeenCalledTimes(1);
|
||||
expect(history.replace).toHaveBeenCalledWith('/collections/posts/entries/index');
|
||||
});
|
||||
});
|
||||
});
|
@ -2,4 +2,11 @@ import { createHashHistory } from 'history';
|
||||
|
||||
const history = createHashHistory();
|
||||
|
||||
export const navigateToCollection = collectionName =>
|
||||
history.push(`/collections/${collectionName}`);
|
||||
export const navigateToNewEntry = collectionName =>
|
||||
history.replace(`/collections/${collectionName}/new`);
|
||||
export const navigateToEntry = (collectionName, slug) =>
|
||||
history.replace(`/collections/${collectionName}/entries/${slug}`);
|
||||
|
||||
export default history;
|
||||
|
@ -93,9 +93,10 @@ export type EntryObject = {
|
||||
collection: string;
|
||||
mediaFiles: List<MediaFileMap>;
|
||||
newRecord: boolean;
|
||||
metaData: { status: string };
|
||||
author?: string;
|
||||
updatedOn?: string;
|
||||
status: string;
|
||||
meta: StaticallyTypedRecord<{ path: string }>;
|
||||
};
|
||||
|
||||
export type EntryMap = StaticallyTypedRecord<EntryObject>;
|
||||
@ -107,6 +108,7 @@ export type FieldsErrors = StaticallyTypedRecord<{ [field: string]: { type: stri
|
||||
export type EntryDraft = StaticallyTypedRecord<{
|
||||
entry: Entry;
|
||||
fieldsErrors: FieldsErrors;
|
||||
fieldsMetaData?: Map<string, Map<string, string>>;
|
||||
}>;
|
||||
|
||||
export type EntryField = StaticallyTypedRecord<{
|
||||
@ -119,6 +121,7 @@ export type EntryField = StaticallyTypedRecord<{
|
||||
media_folder?: string;
|
||||
public_folder?: string;
|
||||
comment?: string;
|
||||
meta?: boolean;
|
||||
}>;
|
||||
|
||||
export type EntryFields = List<EntryField>;
|
||||
@ -145,6 +148,17 @@ export type ViewFilter = {
|
||||
pattern: string;
|
||||
id: string;
|
||||
};
|
||||
type NestedObject = { depth: number };
|
||||
|
||||
type Nested = StaticallyTypedRecord<NestedObject>;
|
||||
|
||||
type PathObject = { label: string; widget: string; index_file: string };
|
||||
|
||||
type MetaObject = {
|
||||
path?: StaticallyTypedRecord<PathObject>;
|
||||
};
|
||||
|
||||
type Meta = StaticallyTypedRecord<MetaObject>;
|
||||
|
||||
type CollectionObject = {
|
||||
name: string;
|
||||
@ -170,6 +184,8 @@ type CollectionObject = {
|
||||
label: string;
|
||||
sortableFields: List<string>;
|
||||
view_filters: List<StaticallyTypedRecord<ViewFilter>>;
|
||||
nested?: Nested;
|
||||
meta?: Meta;
|
||||
};
|
||||
|
||||
export type Collection = StaticallyTypedRecord<CollectionObject>;
|
||||
@ -332,6 +348,10 @@ export interface EntriesFilterFailurePayload {
|
||||
error: Error;
|
||||
}
|
||||
|
||||
export interface EntriesMoveSuccessPayload extends EntryPayload {
|
||||
entries: EntryObject[];
|
||||
}
|
||||
|
||||
export interface EntriesAction extends Action<string> {
|
||||
payload:
|
||||
| EntryRequestPayload
|
||||
|
@ -26,6 +26,9 @@ export default class AssetProxy {
|
||||
|
||||
async toBase64(): Promise<string> {
|
||||
const blob = await fetch(this.url).then(response => response.blob());
|
||||
if (blob.size <= 0) {
|
||||
return '';
|
||||
}
|
||||
const result = await new Promise<string>(resolve => {
|
||||
const fr = new FileReader();
|
||||
fr.onload = (readerEvt): void => {
|
||||
|
@ -7,11 +7,12 @@ interface Options {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
data?: any;
|
||||
label?: string | null;
|
||||
metaData?: unknown | null;
|
||||
isModification?: boolean | null;
|
||||
mediaFiles?: MediaFile[] | null;
|
||||
author?: string;
|
||||
updatedOn?: string;
|
||||
status?: string;
|
||||
meta?: { path?: string };
|
||||
}
|
||||
|
||||
export interface EntryValue {
|
||||
@ -23,11 +24,12 @@ export interface EntryValue {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
data: any;
|
||||
label: string | null;
|
||||
metaData: unknown | null;
|
||||
isModification: boolean | null;
|
||||
mediaFiles: MediaFile[];
|
||||
author: string;
|
||||
updatedOn: string;
|
||||
status?: string;
|
||||
meta: { path?: string };
|
||||
}
|
||||
|
||||
export function createEntry(collection: string, slug = '', path = '', options: Options = {}) {
|
||||
@ -39,11 +41,12 @@ export function createEntry(collection: string, slug = '', path = '', options: O
|
||||
raw: options.raw || '',
|
||||
data: options.data || {},
|
||||
label: options.label || null,
|
||||
metaData: options.metaData || null,
|
||||
isModification: isBoolean(options.isModification) ? options.isModification : null,
|
||||
mediaFiles: options.mediaFiles || [],
|
||||
author: options.author || '',
|
||||
updatedOn: options.updatedOn || '',
|
||||
status: options.status || '',
|
||||
meta: options.meta || {},
|
||||
};
|
||||
|
||||
return returnObj;
|
||||
|
@ -26,13 +26,16 @@ export interface UnpublishedEntryMediaFile {
|
||||
}
|
||||
|
||||
export interface ImplementationEntry {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
data: string;
|
||||
file: { path: string; label?: string; id?: string | null; author?: string; updatedOn?: string };
|
||||
slug?: string;
|
||||
mediaFiles?: ImplementationMediaFile[];
|
||||
metaData?: { collection: string; status: string };
|
||||
isModification?: boolean;
|
||||
}
|
||||
|
||||
export interface UnpublishedEntry {
|
||||
slug: string;
|
||||
collection: string;
|
||||
status: string;
|
||||
diffs: { id: string; path: string; newFile: boolean }[];
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
export interface Map {
|
||||
@ -48,7 +51,7 @@ export type AssetProxy = {
|
||||
toBase64?: () => Promise<string>;
|
||||
};
|
||||
|
||||
export type Entry = { path: string; slug: string; raw: string };
|
||||
export type Entry = { path: string; slug: string; raw: string; newPath?: string };
|
||||
|
||||
export type PersistOptions = {
|
||||
newEntry?: boolean;
|
||||
@ -116,8 +119,24 @@ export interface Implementation {
|
||||
persistMedia: (file: AssetProxy, opts: PersistOptions) => Promise<ImplementationMediaFile>;
|
||||
deleteFile: (path: string, commitMessage: string) => Promise<void>;
|
||||
|
||||
unpublishedEntries: () => Promise<ImplementationEntry[]>;
|
||||
unpublishedEntry: (collection: string, slug: string) => Promise<ImplementationEntry>;
|
||||
unpublishedEntries: () => Promise<string[]>;
|
||||
unpublishedEntry: (args: {
|
||||
id?: string;
|
||||
collection?: string;
|
||||
slug?: string;
|
||||
}) => Promise<UnpublishedEntry>;
|
||||
unpublishedEntryDataFile: (
|
||||
collection: string,
|
||||
slug: string,
|
||||
path: string,
|
||||
id: string,
|
||||
) => Promise<string>;
|
||||
unpublishedEntryMediaFile: (
|
||||
collection: string,
|
||||
slug: string,
|
||||
path: string,
|
||||
id: string,
|
||||
) => Promise<ImplementationMediaFile>;
|
||||
updateUnpublishedEntryStatus: (
|
||||
collection: string,
|
||||
slug: string,
|
||||
@ -155,12 +174,6 @@ export type ImplementationFile = {
|
||||
path: string;
|
||||
};
|
||||
|
||||
type Metadata = {
|
||||
objects: { entry: { path: string } };
|
||||
collection: string;
|
||||
status: string;
|
||||
};
|
||||
|
||||
type ReadFile = (
|
||||
path: string,
|
||||
id: string | null | undefined,
|
||||
@ -169,10 +182,6 @@ type ReadFile = (
|
||||
|
||||
type ReadFileMetadata = (path: string, id: string | null | undefined) => Promise<FileMetadata>;
|
||||
|
||||
type ReadUnpublishedFile = (
|
||||
key: string,
|
||||
) => Promise<{ metaData: Metadata; fileData: string; isModification: boolean; slug: string }>;
|
||||
|
||||
const fetchFiles = async (
|
||||
files: ImplementationFile[],
|
||||
readFile: ReadFile,
|
||||
@ -206,47 +215,6 @@ const fetchFiles = async (
|
||||
) as Promise<ImplementationEntry[]>;
|
||||
};
|
||||
|
||||
const fetchUnpublishedFiles = async (
|
||||
keys: string[],
|
||||
readUnpublishedFile: ReadUnpublishedFile,
|
||||
apiName: string,
|
||||
) => {
|
||||
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
|
||||
const promises = [] as Promise<ImplementationEntry | { error: boolean }>[];
|
||||
keys.forEach(key => {
|
||||
promises.push(
|
||||
new Promise(resolve =>
|
||||
sem.take(() =>
|
||||
readUnpublishedFile(key)
|
||||
.then(data => {
|
||||
if (data === null || data === undefined) {
|
||||
resolve({ error: true });
|
||||
sem.leave();
|
||||
} else {
|
||||
resolve({
|
||||
slug: data.slug,
|
||||
file: { path: data.metaData.objects.entry.path, id: null },
|
||||
data: data.fileData,
|
||||
metaData: data.metaData,
|
||||
isModification: data.isModification,
|
||||
});
|
||||
sem.leave();
|
||||
}
|
||||
})
|
||||
.catch((error = true) => {
|
||||
sem.leave();
|
||||
console.error(`failed to load file from ${apiName}: ${key}`);
|
||||
resolve({ error });
|
||||
}),
|
||||
),
|
||||
),
|
||||
);
|
||||
});
|
||||
return Promise.all(promises).then(loadedEntries =>
|
||||
loadedEntries.filter(loadedEntry => !(loadedEntry as { error: boolean }).error),
|
||||
) as Promise<ImplementationEntry[]>;
|
||||
};
|
||||
|
||||
export const entriesByFolder = async (
|
||||
listFiles: () => Promise<ImplementationFile[]>,
|
||||
readFile: ReadFile,
|
||||
@ -266,15 +234,10 @@ export const entriesByFiles = async (
|
||||
return fetchFiles(files, readFile, readFileMetadata, apiName);
|
||||
};
|
||||
|
||||
export const unpublishedEntries = async (
|
||||
listEntriesKeys: () => Promise<string[]>,
|
||||
readUnpublishedFile: ReadUnpublishedFile,
|
||||
apiName: string,
|
||||
) => {
|
||||
export const unpublishedEntries = async (listEntriesKeys: () => Promise<string[]>) => {
|
||||
try {
|
||||
const keys = await listEntriesKeys();
|
||||
const entries = await fetchUnpublishedFiles(keys, readUnpublishedFile, apiName);
|
||||
return entries;
|
||||
return keys;
|
||||
} catch (error) {
|
||||
if (error.message === 'Not Found') {
|
||||
return Promise.resolve([]);
|
||||
@ -392,7 +355,6 @@ type GetDiffFromLocalTreeMethods = {
|
||||
oldPath: string;
|
||||
newPath: string;
|
||||
status: string;
|
||||
binary: boolean;
|
||||
}[]
|
||||
>;
|
||||
filterFile: (file: { path: string; name: string }) => boolean;
|
||||
@ -417,7 +379,7 @@ const getDiffFromLocalTree = async ({
|
||||
}: GetDiffFromLocalTreeArgs) => {
|
||||
const diff = await getDifferences(branch.sha, localTree.head);
|
||||
const diffFiles = diff
|
||||
.filter(d => (d.oldPath?.startsWith(folder) || d.newPath?.startsWith(folder)) && !d.binary)
|
||||
.filter(d => d.oldPath?.startsWith(folder) || d.newPath?.startsWith(folder))
|
||||
.reduce((acc, d) => {
|
||||
if (d.status === 'renamed') {
|
||||
acc.push({
|
||||
|
@ -20,6 +20,7 @@ import { asyncLock, AsyncLock as AL } from './asyncLock';
|
||||
import {
|
||||
Implementation as I,
|
||||
ImplementationEntry as IE,
|
||||
UnpublishedEntry as UE,
|
||||
ImplementationMediaFile as IMF,
|
||||
ImplementationFile as IF,
|
||||
DisplayURLObject as DUO,
|
||||
@ -75,6 +76,7 @@ import {
|
||||
export type AsyncLock = AL;
|
||||
export type Implementation = I;
|
||||
export type ImplementationEntry = IE;
|
||||
export type UnpublishedEntry = UE;
|
||||
export type ImplementationMediaFile = IMF;
|
||||
export type ImplementationFile = IF;
|
||||
export type DisplayURL = DU;
|
||||
|
@ -81,6 +81,8 @@ const en = {
|
||||
rangeCountExact: '%{fieldLabel} must have exactly %{count} item(s).',
|
||||
minCount: '%{fieldLabel} must be at least %{minCount} item(s).',
|
||||
maxCount: '%{fieldLabel} must be %{maxCount} or less item(s).',
|
||||
invalidPath: `'%{path}' is not a valid path`,
|
||||
pathExists: `Path '%{path}' already exists`,
|
||||
},
|
||||
},
|
||||
editor: {
|
||||
|
@ -27,7 +27,8 @@
|
||||
"dotenv": "^8.2.0",
|
||||
"express": "^4.17.1",
|
||||
"morgan": "^1.9.1",
|
||||
"simple-git": "^2.0.0"
|
||||
"simple-git": "^2.0.0",
|
||||
"what-the-diff": "^0.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cors": "^2.8.6",
|
||||
|
@ -5,8 +5,8 @@ import Joi from '@hapi/joi';
|
||||
const assetFailure = (result: Joi.ValidationResult, expectedMessage: string) => {
|
||||
const { error } = result;
|
||||
expect(error).not.toBeNull();
|
||||
expect(error.details).toHaveLength(1);
|
||||
const message = error.details.map(({ message }) => message)[0];
|
||||
expect(error!.details).toHaveLength(1);
|
||||
const message = error!.details.map(({ message }) => message)[0];
|
||||
expect(message).toBe(expectedMessage);
|
||||
};
|
||||
|
||||
@ -26,7 +26,7 @@ describe('defaultSchema', () => {
|
||||
|
||||
assetFailure(
|
||||
schema.validate({ action: 'unknown', params: {} }),
|
||||
'"action" must be one of [info, entriesByFolder, entriesByFiles, getEntry, unpublishedEntries, unpublishedEntry, deleteUnpublishedEntry, persistEntry, updateUnpublishedEntryStatus, publishUnpublishedEntry, getMedia, getMediaFile, persistMedia, deleteFile, getDeployPreview]',
|
||||
'"action" must be one of [info, entriesByFolder, entriesByFiles, getEntry, unpublishedEntries, unpublishedEntry, unpublishedEntryDataFile, unpublishedEntryMediaFile, deleteUnpublishedEntry, persistEntry, updateUnpublishedEntryStatus, publishUnpublishedEntry, getMedia, getMediaFile, persistMedia, deleteFile, getDeployPreview]',
|
||||
);
|
||||
});
|
||||
|
||||
@ -157,28 +157,13 @@ describe('defaultSchema', () => {
|
||||
describe('unpublishedEntry', () => {
|
||||
it('should fail on invalid params', () => {
|
||||
const schema = defaultSchema();
|
||||
|
||||
assetFailure(
|
||||
schema.validate({ action: 'unpublishedEntry', params: { ...defaultParams } }),
|
||||
'"params.collection" is required',
|
||||
);
|
||||
assetFailure(
|
||||
schema.validate({
|
||||
action: 'unpublishedEntry',
|
||||
params: { ...defaultParams, collection: 'collection' },
|
||||
}),
|
||||
'"params.slug" is required',
|
||||
);
|
||||
assetFailure(
|
||||
schema.validate({
|
||||
action: 'unpublishedEntry',
|
||||
params: { ...defaultParams, collection: 'collection', slug: 1 },
|
||||
}),
|
||||
'"params.slug" must be a string',
|
||||
schema.validate({ action: 'unpublishedEntry', params: {} }),
|
||||
'"params.branch" is required',
|
||||
);
|
||||
});
|
||||
|
||||
it('should pass on valid params', () => {
|
||||
it('should pass on valid collection and slug', () => {
|
||||
const schema = defaultSchema();
|
||||
const { error } = schema.validate({
|
||||
action: 'unpublishedEntry',
|
||||
@ -187,6 +172,66 @@ describe('defaultSchema', () => {
|
||||
|
||||
expect(error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should pass on valid id', () => {
|
||||
const schema = defaultSchema();
|
||||
const { error } = schema.validate({
|
||||
action: 'unpublishedEntry',
|
||||
params: { ...defaultParams, id: 'id' },
|
||||
});
|
||||
|
||||
expect(error).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
['unpublishedEntryDataFile', 'unpublishedEntryMediaFile'].forEach(action => {
|
||||
describe(action, () => {
|
||||
it('should fail on invalid params', () => {
|
||||
const schema = defaultSchema();
|
||||
|
||||
assetFailure(
|
||||
schema.validate({ action, params: { ...defaultParams } }),
|
||||
'"params.collection" is required',
|
||||
);
|
||||
assetFailure(
|
||||
schema.validate({
|
||||
action,
|
||||
params: { ...defaultParams, collection: 'collection' },
|
||||
}),
|
||||
'"params.slug" is required',
|
||||
);
|
||||
assetFailure(
|
||||
schema.validate({
|
||||
action,
|
||||
params: { ...defaultParams, collection: 'collection', slug: 'slug' },
|
||||
}),
|
||||
'"params.id" is required',
|
||||
);
|
||||
assetFailure(
|
||||
schema.validate({
|
||||
action,
|
||||
params: { ...defaultParams, collection: 'collection', slug: 'slug', id: 'id' },
|
||||
}),
|
||||
'"params.path" is required',
|
||||
);
|
||||
});
|
||||
|
||||
it('should pass on valid params', () => {
|
||||
const schema = defaultSchema();
|
||||
const { error } = schema.validate({
|
||||
action,
|
||||
params: {
|
||||
...defaultParams,
|
||||
collection: 'collection',
|
||||
slug: 'slug',
|
||||
id: 'id',
|
||||
path: 'path',
|
||||
},
|
||||
});
|
||||
|
||||
expect(error).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteUnpublishedEntry', () => {
|
||||
|
@ -8,6 +8,8 @@ const allowedActions = [
|
||||
'getEntry',
|
||||
'unpublishedEntries',
|
||||
'unpublishedEntry',
|
||||
'unpublishedEntryDataFile',
|
||||
'unpublishedEntryMediaFile',
|
||||
'deleteUnpublishedEntry',
|
||||
'persistEntry',
|
||||
'updateUnpublishedEntryStatus',
|
||||
@ -75,10 +77,33 @@ export const defaultSchema = ({ path = requiredString } = {}) => {
|
||||
},
|
||||
{
|
||||
is: 'unpublishedEntry',
|
||||
then: defaultParams
|
||||
.keys({
|
||||
id: Joi.string().optional(),
|
||||
collection: Joi.string().optional(),
|
||||
slug: Joi.string().optional(),
|
||||
})
|
||||
.required(),
|
||||
},
|
||||
{
|
||||
is: 'unpublishedEntryDataFile',
|
||||
then: defaultParams
|
||||
.keys({
|
||||
collection,
|
||||
slug,
|
||||
id: requiredString,
|
||||
path: requiredString,
|
||||
})
|
||||
.required(),
|
||||
},
|
||||
{
|
||||
is: 'unpublishedEntryMediaFile',
|
||||
then: defaultParams
|
||||
.keys({
|
||||
collection,
|
||||
slug,
|
||||
id: requiredString,
|
||||
path: requiredString,
|
||||
})
|
||||
.required(),
|
||||
},
|
||||
@ -95,7 +120,12 @@ export const defaultSchema = ({ path = requiredString } = {}) => {
|
||||
is: 'persistEntry',
|
||||
then: defaultParams
|
||||
.keys({
|
||||
entry: Joi.object({ slug: requiredString, path, raw: requiredString }).required(),
|
||||
entry: Joi.object({
|
||||
slug: requiredString,
|
||||
path,
|
||||
raw: requiredString,
|
||||
newPath: path.optional(),
|
||||
}).required(),
|
||||
assets: Joi.array()
|
||||
.items(asset)
|
||||
.required(),
|
||||
|
@ -5,8 +5,8 @@ import { getSchema } from '.';
|
||||
const assetFailure = (result: Joi.ValidationResult, expectedMessage: string) => {
|
||||
const { error } = result;
|
||||
expect(error).not.toBeNull();
|
||||
expect(error.details).toHaveLength(1);
|
||||
const message = error.details.map(({ message }) => message)[0];
|
||||
expect(error!.details).toHaveLength(1);
|
||||
const message = error!.details.map(({ message }) => message)[0];
|
||||
expect(message).toBe(expectedMessage);
|
||||
};
|
||||
|
||||
|
@ -12,7 +12,7 @@ import {
|
||||
PersistMediaParams,
|
||||
DeleteFileParams,
|
||||
} from '../types';
|
||||
import { listRepoFiles, deleteFile, writeFile } from '../utils/fs';
|
||||
import { listRepoFiles, deleteFile, writeFile, move } from '../utils/fs';
|
||||
import { entriesFromFiles, readMediaFile } from '../utils/entries';
|
||||
|
||||
type Options = {
|
||||
@ -67,6 +67,9 @@ export const localFsMiddleware = ({ repoPath }: Options) => {
|
||||
writeFile(path.join(repoPath, a.path), Buffer.from(a.content, a.encoding)),
|
||||
),
|
||||
);
|
||||
if (entry.newPath) {
|
||||
await move(path.join(repoPath, entry.path), path.join(repoPath, entry.newPath));
|
||||
}
|
||||
res.json({ message: 'entry persisted' });
|
||||
break;
|
||||
}
|
||||
|
@ -9,8 +9,8 @@ jest.mock('simple-git/promise');
|
||||
const assetFailure = (result: Joi.ValidationResult, expectedMessage: string) => {
|
||||
const { error } = result;
|
||||
expect(error).not.toBeNull();
|
||||
expect(error.details).toHaveLength(1);
|
||||
const message = error.details.map(({ message }) => message)[0];
|
||||
expect(error!.details).toHaveLength(1);
|
||||
const message = error!.details.map(({ message }) => message)[0];
|
||||
expect(message).toBe(expectedMessage);
|
||||
};
|
||||
|
||||
|
@ -2,15 +2,15 @@ import express from 'express';
|
||||
import path from 'path';
|
||||
import { promises as fs } from 'fs';
|
||||
import {
|
||||
parseContentKey,
|
||||
branchFromContentKey,
|
||||
generateContentKey,
|
||||
contentKeyFromBranch,
|
||||
CMS_BRANCH_PREFIX,
|
||||
statusToLabel,
|
||||
labelToStatus,
|
||||
parseContentKey,
|
||||
} from 'netlify-cms-lib-util/src/APIUtils';
|
||||
|
||||
import { parse } from 'what-the-diff';
|
||||
import { defaultSchema, joi } from '../joi';
|
||||
import {
|
||||
EntriesByFolderParams,
|
||||
@ -27,16 +27,19 @@ import {
|
||||
UpdateUnpublishedEntryStatusParams,
|
||||
Entry,
|
||||
GetMediaFileParams,
|
||||
DeleteEntryParams,
|
||||
UnpublishedEntryDataFileParams,
|
||||
UnpublishedEntryMediaFileParams,
|
||||
} from '../types';
|
||||
// eslint-disable-next-line import/default
|
||||
import simpleGit from 'simple-git/promise';
|
||||
import { pathTraversal } from '../joi/customValidators';
|
||||
import { listRepoFiles, writeFile } from '../utils/fs';
|
||||
import { listRepoFiles, writeFile, move } from '../utils/fs';
|
||||
import { entriesFromFiles, readMediaFile } from '../utils/entries';
|
||||
|
||||
const commit = async (git: simpleGit.SimpleGit, commitMessage: string, files: string[]) => {
|
||||
await git.add(files);
|
||||
await git.commit(commitMessage, files, {
|
||||
const commit = async (git: simpleGit.SimpleGit, commitMessage: string) => {
|
||||
await git.add('.');
|
||||
await git.commit(commitMessage, undefined, {
|
||||
'--no-verify': true,
|
||||
'--no-gpg-sign': true,
|
||||
});
|
||||
@ -62,69 +65,10 @@ const runOnBranch = async <T>(git: simpleGit.SimpleGit, branch: string, func: ()
|
||||
|
||||
const branchDescription = (branch: string) => `branch.${branch}.description`;
|
||||
|
||||
const getEntryDataFromDiff = async (git: simpleGit.SimpleGit, branch: string, diff: string[]) => {
|
||||
const contentKey = contentKeyFromBranch(branch);
|
||||
const { collection, slug } = parseContentKey(contentKey);
|
||||
const path = diff.find(d => d.includes(slug)) as string;
|
||||
const mediaFiles = diff.filter(d => d !== path);
|
||||
const label = await git.raw(['config', branchDescription(branch)]);
|
||||
const status = label && labelToStatus(label.trim());
|
||||
|
||||
return {
|
||||
slug,
|
||||
metaData: { branch, collection, objects: { entry: { path, mediaFiles } }, status },
|
||||
};
|
||||
};
|
||||
|
||||
type Options = {
|
||||
repoPath: string;
|
||||
};
|
||||
|
||||
const entriesFromDiffs = async (
|
||||
git: simpleGit.SimpleGit,
|
||||
branch: string,
|
||||
repoPath: string,
|
||||
cmsBranches: string[],
|
||||
diffs: simpleGit.DiffResult[],
|
||||
) => {
|
||||
const entries = [];
|
||||
for (let i = 0; i < diffs.length; i++) {
|
||||
const cmsBranch = cmsBranches[i];
|
||||
const diff = diffs[i];
|
||||
const data = await getEntryDataFromDiff(
|
||||
git,
|
||||
cmsBranch,
|
||||
diff.files.map(f => f.file),
|
||||
);
|
||||
const entryPath = data.metaData.objects.entry.path;
|
||||
const [entry] = await runOnBranch(git, cmsBranch, () =>
|
||||
entriesFromFiles(repoPath, [{ path: entryPath }]),
|
||||
);
|
||||
|
||||
const rawDiff = await git.diff([branch, cmsBranch, '--', entryPath]);
|
||||
entries.push({
|
||||
...data,
|
||||
...entry,
|
||||
isModification: !rawDiff.includes('new file'),
|
||||
});
|
||||
}
|
||||
|
||||
return entries;
|
||||
};
|
||||
|
||||
const getEntryMediaFiles = async (
|
||||
git: simpleGit.SimpleGit,
|
||||
repoPath: string,
|
||||
cmsBranch: string,
|
||||
files: string[],
|
||||
) => {
|
||||
const mediaFiles = await runOnBranch(git, cmsBranch, async () => {
|
||||
const serializedFiles = await Promise.all(files.map(file => readMediaFile(repoPath, file)));
|
||||
return serializedFiles;
|
||||
});
|
||||
return mediaFiles;
|
||||
};
|
||||
|
||||
const commitEntry = async (
|
||||
git: simpleGit.SimpleGit,
|
||||
repoPath: string,
|
||||
@ -138,8 +82,12 @@ const commitEntry = async (
|
||||
await Promise.all(
|
||||
assets.map(a => writeFile(path.join(repoPath, a.path), Buffer.from(a.content, a.encoding))),
|
||||
);
|
||||
if (entry.newPath) {
|
||||
await move(path.join(repoPath, entry.path), path.join(repoPath, entry.newPath));
|
||||
}
|
||||
|
||||
// commits files
|
||||
await commit(git, commitMessage, [entry.path, ...assets.map(a => a.path)]);
|
||||
await commit(git, commitMessage);
|
||||
};
|
||||
|
||||
const rebase = async (git: simpleGit.SimpleGit, branch: string) => {
|
||||
@ -175,6 +123,25 @@ const isBranchExists = async (git: simpleGit.SimpleGit, branch: string) => {
|
||||
return branchExists;
|
||||
};
|
||||
|
||||
const getDiffs = async (git: simpleGit.SimpleGit, source: string, dest: string) => {
|
||||
const rawDiff = await git.diff([source, dest]);
|
||||
const diffs = parse(rawDiff).map(d => {
|
||||
const oldPath = d.oldPath?.replace(/b\//, '') || '';
|
||||
const newPath = d.newPath?.replace(/b\//, '') || '';
|
||||
const path = newPath || (oldPath as string);
|
||||
return {
|
||||
oldPath,
|
||||
newPath,
|
||||
status: d.status,
|
||||
newFile: d.status === 'added',
|
||||
path,
|
||||
id: path,
|
||||
binary: d.binary || /.svg$/.test(path),
|
||||
};
|
||||
});
|
||||
return diffs;
|
||||
};
|
||||
|
||||
export const validateRepo = async ({ repoPath }: Options) => {
|
||||
const git = simpleGit(repoPath).silent(false);
|
||||
const isRepo = await git.checkIsRepo();
|
||||
@ -247,36 +214,53 @@ export const localGitMiddleware = ({ repoPath }: Options) => {
|
||||
const cmsBranches = await git
|
||||
.branchLocal()
|
||||
.then(result => result.all.filter(b => b.startsWith(`${CMS_BRANCH_PREFIX}/`)));
|
||||
|
||||
const diffs = await Promise.all(
|
||||
cmsBranches.map(cmsBranch => git.diffSummary([branch, cmsBranch])),
|
||||
);
|
||||
const entries = await entriesFromDiffs(git, branch, repoPath, cmsBranches, diffs);
|
||||
res.json(entries);
|
||||
res.json(cmsBranches.map(contentKeyFromBranch));
|
||||
break;
|
||||
}
|
||||
case 'unpublishedEntry': {
|
||||
const { collection, slug } = body.params as UnpublishedEntryParams;
|
||||
const contentKey = generateContentKey(collection, slug);
|
||||
let { id, collection, slug } = body.params as UnpublishedEntryParams;
|
||||
if (id) {
|
||||
({ collection, slug } = parseContentKey(id));
|
||||
}
|
||||
const contentKey = generateContentKey(collection as string, slug as string);
|
||||
const cmsBranch = branchFromContentKey(contentKey);
|
||||
const branchExists = await isBranchExists(git, cmsBranch);
|
||||
if (branchExists) {
|
||||
const diff = await git.diffSummary([branch, cmsBranch]);
|
||||
const [entry] = await entriesFromDiffs(git, branch, repoPath, [cmsBranch], [diff]);
|
||||
const mediaFiles = await getEntryMediaFiles(
|
||||
git,
|
||||
repoPath,
|
||||
cmsBranch,
|
||||
entry.metaData.objects.entry.mediaFiles,
|
||||
);
|
||||
res.json({ ...entry, mediaFiles });
|
||||
const diffs = await getDiffs(git, branch, cmsBranch);
|
||||
const label = await git.raw(['config', branchDescription(cmsBranch)]);
|
||||
const status = label && labelToStatus(label.trim());
|
||||
const unpublishedEntry = {
|
||||
collection,
|
||||
slug,
|
||||
status,
|
||||
diffs,
|
||||
};
|
||||
res.json(unpublishedEntry);
|
||||
} else {
|
||||
return res.status(404).json({ message: 'Not Found' });
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'unpublishedEntryDataFile': {
|
||||
const { path, collection, slug } = body.params as UnpublishedEntryDataFileParams;
|
||||
const contentKey = generateContentKey(collection as string, slug as string);
|
||||
const cmsBranch = branchFromContentKey(contentKey);
|
||||
const [entry] = await runOnBranch(git, cmsBranch, () =>
|
||||
entriesFromFiles(repoPath, [{ path }]),
|
||||
);
|
||||
res.json({ data: entry.data });
|
||||
break;
|
||||
}
|
||||
case 'unpublishedEntryMediaFile': {
|
||||
const { path, collection, slug } = body.params as UnpublishedEntryMediaFileParams;
|
||||
const contentKey = generateContentKey(collection as string, slug as string);
|
||||
const cmsBranch = branchFromContentKey(contentKey);
|
||||
const file = await runOnBranch(git, cmsBranch, () => readMediaFile(repoPath, path));
|
||||
res.json(file);
|
||||
break;
|
||||
}
|
||||
case 'deleteUnpublishedEntry': {
|
||||
const { collection, slug } = body.params as UnpublishedEntryParams;
|
||||
const { collection, slug } = body.params as DeleteEntryParams;
|
||||
const contentKey = generateContentKey(collection, slug);
|
||||
const cmsBranch = branchFromContentKey(contentKey);
|
||||
const currentBranch = await getCurrentBranch(git);
|
||||
@ -290,7 +274,7 @@ export const localGitMiddleware = ({ repoPath }: Options) => {
|
||||
case 'persistEntry': {
|
||||
const { entry, assets, options } = body.params as PersistEntryParams;
|
||||
if (!options.useWorkflow) {
|
||||
runOnBranch(git, branch, async () => {
|
||||
await runOnBranch(git, branch, async () => {
|
||||
await commitEntry(git, repoPath, entry, assets, options.commitMessage);
|
||||
});
|
||||
} else {
|
||||
@ -306,28 +290,19 @@ export const localGitMiddleware = ({ repoPath }: Options) => {
|
||||
await git.checkoutLocalBranch(cmsBranch);
|
||||
}
|
||||
await rebase(git, branch);
|
||||
const diff = await git.diffSummary([branch, cmsBranch]);
|
||||
const data = await getEntryDataFromDiff(
|
||||
git,
|
||||
branch,
|
||||
diff.files.map(f => f.file),
|
||||
);
|
||||
const diffs = await getDiffs(git, branch, cmsBranch);
|
||||
// delete media files that have been removed from the entry
|
||||
const toDelete = data.metaData.objects.entry.mediaFiles.filter(
|
||||
f => !assets.map(a => a.path).includes(f),
|
||||
const toDelete = diffs.filter(
|
||||
d => d.binary && !assets.map(a => a.path).includes(d.path),
|
||||
);
|
||||
await Promise.all(toDelete.map(f => fs.unlink(path.join(repoPath, f))));
|
||||
await Promise.all(toDelete.map(f => fs.unlink(path.join(repoPath, f.path))));
|
||||
await commitEntry(git, repoPath, entry, assets, options.commitMessage);
|
||||
|
||||
// add status for new entries
|
||||
if (!data.metaData.status) {
|
||||
if (!branchExists) {
|
||||
const description = statusToLabel(options.status);
|
||||
await git.addConfig(branchDescription(cmsBranch), description);
|
||||
}
|
||||
// set path for new entries
|
||||
if (!data.metaData.objects.entry.path) {
|
||||
data.metaData.objects.entry.path = entry.path;
|
||||
}
|
||||
});
|
||||
}
|
||||
res.json({ message: 'entry persisted' });
|
||||
@ -382,7 +357,7 @@ export const localGitMiddleware = ({ repoPath }: Options) => {
|
||||
path.join(repoPath, asset.path),
|
||||
Buffer.from(asset.content, asset.encoding),
|
||||
);
|
||||
await commit(git, commitMessage, [asset.path]);
|
||||
await commit(git, commitMessage);
|
||||
return readMediaFile(repoPath, asset.path);
|
||||
});
|
||||
res.json(file);
|
||||
@ -395,7 +370,7 @@ export const localGitMiddleware = ({ repoPath }: Options) => {
|
||||
} = body.params as DeleteFileParams;
|
||||
await runOnBranch(git, branch, async () => {
|
||||
await fs.unlink(path.join(repoPath, filePath));
|
||||
await commit(git, commitMessage, [filePath]);
|
||||
await commit(git, commitMessage);
|
||||
});
|
||||
res.json({ message: `deleted file ${filePath}` });
|
||||
break;
|
||||
|
@ -17,6 +17,26 @@ export type GetEntryParams = {
|
||||
};
|
||||
|
||||
export type UnpublishedEntryParams = {
|
||||
id?: string;
|
||||
collection?: string;
|
||||
slug?: string;
|
||||
};
|
||||
|
||||
export type UnpublishedEntryDataFileParams = {
|
||||
collection: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
path: string;
|
||||
};
|
||||
|
||||
export type UnpublishedEntryMediaFileParams = {
|
||||
collection: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
path: string;
|
||||
};
|
||||
|
||||
export type DeleteEntryParams = {
|
||||
collection: string;
|
||||
slug: string;
|
||||
};
|
||||
@ -32,7 +52,7 @@ export type PublishUnpublishedEntryParams = {
|
||||
slug: string;
|
||||
};
|
||||
|
||||
export type Entry = { slug: string; path: string; raw: string };
|
||||
export type Entry = { slug: string; path: string; raw: string; newPath?: string };
|
||||
|
||||
export type Asset = { path: string; content: string; encoding: 'base64' };
|
||||
|
||||
|
@ -40,3 +40,19 @@ export const writeFile = async (filePath: string, content: Buffer | string) => {
|
||||
export const deleteFile = async (repoPath: string, filePath: string) => {
|
||||
await fs.unlink(path.join(repoPath, filePath));
|
||||
};
|
||||
|
||||
const moveFile = async (from: string, to: string) => {
|
||||
await fs.mkdir(path.dirname(to), { recursive: true });
|
||||
await fs.rename(from, to);
|
||||
};
|
||||
|
||||
export const move = async (from: string, to: string) => {
|
||||
// move file
|
||||
await moveFile(from, to);
|
||||
|
||||
// move children
|
||||
const sourceDir = path.dirname(from);
|
||||
const destDir = path.dirname(to);
|
||||
const allFiles = await listFiles(sourceDir, '', 100);
|
||||
await Promise.all(allFiles.map(file => moveFile(file, file.replace(sourceDir, destDir))));
|
||||
};
|
||||
|
5
packages/netlify-cms-proxy-server/src/what-the-diff.d.ts
vendored
Normal file
5
packages/netlify-cms-proxy-server/src/what-the-diff.d.ts
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
declare module 'what-the-diff' {
|
||||
export const parse: (
|
||||
rawDiff: string,
|
||||
) => { oldPath?: string; newPath?: string; binary: boolean; status: string }[];
|
||||
}
|
@ -244,16 +244,25 @@ const buttons = {
|
||||
`,
|
||||
};
|
||||
|
||||
const caret = css`
|
||||
color: ${colorsRaw.white};
|
||||
width: 0;
|
||||
height: 0;
|
||||
border: 5px solid transparent;
|
||||
border-radius: 2px;
|
||||
`;
|
||||
|
||||
const components = {
|
||||
card,
|
||||
caretDown: css`
|
||||
color: ${colorsRaw.white};
|
||||
width: 0;
|
||||
height: 0;
|
||||
border-left: 5px solid transparent;
|
||||
border-right: 5px solid transparent;
|
||||
${caret};
|
||||
border-top: 6px solid currentColor;
|
||||
border-radius: 2px;
|
||||
border-bottom: 0;
|
||||
`,
|
||||
caretRight: css`
|
||||
${caret};
|
||||
border-left: 6px solid currentColor;
|
||||
border-right: 0;
|
||||
`,
|
||||
badge: css`
|
||||
${backgroundBadge};
|
||||
|
@ -271,7 +271,7 @@ export default class ListControl extends React.Component {
|
||||
getObjectValue = idx => this.props.value.get(idx) || Map();
|
||||
|
||||
handleChangeFor(index) {
|
||||
return (fieldName, newValue, newMetadata) => {
|
||||
return (f, newValue, newMetadata) => {
|
||||
const { value, metadata, onChange, field } = this.props;
|
||||
const collectionName = field.get('name');
|
||||
const listFieldObjectWidget = field.getIn(['field', 'widget']) === 'object';
|
||||
@ -279,7 +279,7 @@ export default class ListControl extends React.Component {
|
||||
this.getValueType() !== valueTypes.SINGLE ||
|
||||
(this.getValueType() === valueTypes.SINGLE && listFieldObjectWidget);
|
||||
const newObjectValue = withNameKey
|
||||
? this.getObjectValue(index).set(fieldName, newValue)
|
||||
? this.getObjectValue(index).set(f.get('name'), newValue)
|
||||
: newValue;
|
||||
const parsedMetadata = {
|
||||
[collectionName]: Object.assign(metadata ? metadata.toJS() : {}, newMetadata || {}),
|
||||
|
Reference in New Issue
Block a user