Feat: multi content authoring (#4139)

This commit is contained in:
Erez Rokah 2020-09-20 10:30:46 -07:00 committed by GitHub
parent 7968e01e29
commit cb2ad687ee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
65 changed files with 4331 additions and 1521 deletions

View File

@ -0,0 +1,59 @@
import { newPost, populateEntry, publishEntry, flushClockAndSave } from '../../utils/steps';
const enterTranslation = str => {
cy.get(`[id^="title-field"]`)
.first()
.clear({ force: true });
cy.get(`[id^="title-field"]`)
.first()
.type(str, { force: true });
};
const createAndTranslate = entry => {
newPost();
// fill the main entry
populateEntry(entry, () => undefined);
// fill the translation
cy.get('.Pane2').within(() => {
enterTranslation('de');
cy.contains('span', 'Writing in DE').click();
cy.contains('span', 'fr').click();
enterTranslation('fr');
});
};
export const updateTranslation = () => {
cy.get('.Pane2').within(() => {
enterTranslation('fr fr');
cy.contains('span', 'Writing in FR').click();
cy.contains('span', 'de').click();
enterTranslation('de de');
});
flushClockAndSave();
};
export const assertTranslation = () => {
cy.get('.Pane2').within(() => {
cy.get(`[id^="title-field"]`).should('have.value', 'de');
cy.contains('span', 'Writing in DE').click();
cy.contains('span', 'fr').click();
cy.get(`[id^="title-field"]`).should('have.value', 'fr');
});
};
export const createEntryTranslateAndPublish = entry => {
createAndTranslate(entry);
publishEntry();
};
export const createEntryTranslateAndSave = entry => {
createAndTranslate(entry);
flushClockAndSave();
};

View File

@ -0,0 +1,54 @@
import '../../utils/dismiss-local-backup';
import {
login,
goToWorkflow,
updateWorkflowStatus,
exitEditor,
publishWorkflowEntry,
goToEntry,
updateWorkflowStatusInEditor,
publishEntryInEditor,
assertPublishedEntryInEditor,
assertUnpublishedEntryInEditor,
assertUnpublishedChangesInEditor,
} from '../../utils/steps';
import { createEntryTranslateAndSave, assertTranslation, updateTranslation } from './i18n';
import { workflowStatus, editorStatus, publishTypes } from '../../utils/constants';
export default function({ entry, getUser }) {
const structures = ['multiple_folders', 'multiple_files', 'single_file'];
structures.forEach(structure => {
it(`can create and publish entry with translation in ${structure} mode`, () => {
cy.task('updateConfig', { i18n: { structure } });
login(getUser());
createEntryTranslateAndSave(entry);
assertUnpublishedEntryInEditor();
exitEditor();
goToWorkflow();
updateWorkflowStatus(entry, workflowStatus.draft, workflowStatus.ready);
publishWorkflowEntry(entry);
goToEntry(entry);
assertTranslation();
assertPublishedEntryInEditor();
});
it(`can update translated entry in ${structure} mode`, () => {
cy.task('updateConfig', { i18n: { structure: 'multiple_folders' } });
login(getUser());
createEntryTranslateAndSave(entry);
assertUnpublishedEntryInEditor();
updateWorkflowStatusInEditor(editorStatus.ready);
publishEntryInEditor(publishTypes.publishNow);
exitEditor();
goToEntry(entry);
assertTranslation();
assertPublishedEntryInEditor();
updateTranslation();
assertUnpublishedChangesInEditor();
});
});
}

View File

@ -0,0 +1,38 @@
import fixture from './common/i18n_editorial_workflow_spec';
const backend = 'test';
describe(`I18N Test Backend Editorial Workflow`, () => {
const taskResult = { data: {} };
before(() => {
Cypress.config('defaultCommandTimeout', 4000);
cy.task('setupBackend', {
backend,
options: {
publish_mode: 'editorial_workflow',
i18n: {
locales: ['en', 'de', 'fr'],
},
collections: [
{
folder: 'content/i18n',
i18n: true,
fields: [{ i18n: true }, {}, { i18n: 'duplicate' }],
},
],
},
});
});
after(() => {
cy.task('teardownBackend', { backend });
});
const entry = {
title: 'first title',
body: 'first body',
};
fixture({ entry, getUser: () => taskResult.data.user });
});

View File

@ -0,0 +1,148 @@
import * as specUtils from './common/spec_utils';
import { login } from '../utils/steps';
import { createEntryTranslateAndPublish } from './common/i18n';
const backend = 'proxy';
const mode = 'fs';
const expectedEnContent = `---
template: post
title: first title
date: 1970-01-01T00:00:00.000Z
description: first description
category: first category
tags:
- tag1
---
`;
const expectedDeContent = `---
title: de
date: 1970-01-01T00:00:00.000Z
---
`;
const expectedFrContent = `---
title: fr
date: 1970-01-01T00:00:00.000Z
---
`;
const contentSingleFile = `---
en:
template: post
date: 1970-01-01T00:00:00.000Z
title: first title
description: first description
category: first category
tags:
- tag1
body: first body
de:
date: 1970-01-01T00:00:00.000Z
title: de
fr:
date: 1970-01-01T00:00:00.000Z
title: fr
---
`;
describe(`I18N Proxy Backend Simple Workflow - '${mode}' mode`, () => {
const taskResult = { data: {} };
const entry = {
title: 'first title',
body: 'first body',
description: 'first description',
category: 'first category',
tags: 'tag1',
};
before(() => {
specUtils.before(
taskResult,
{
mode,
publish_mode: 'simple',
i18n: {
locales: ['en', 'de', 'fr'],
},
collections: [{ i18n: true, fields: [{}, { i18n: true }, {}, { i18n: 'duplicate' }] }],
},
backend,
);
Cypress.config('taskTimeout', 15 * 1000);
Cypress.config('defaultCommandTimeout', 5 * 1000);
});
after(() => {
specUtils.after(taskResult, backend);
});
beforeEach(() => {
specUtils.beforeEach(taskResult, backend);
});
afterEach(() => {
specUtils.afterEach(taskResult, backend);
});
it('can create entry with translation in locale_folders mode', () => {
cy.task('updateConfig', { i18n: { structure: 'multiple_folders' } });
login(taskResult.data.user);
createEntryTranslateAndPublish(entry);
cy.readFile(`${taskResult.data.tempDir}/content/posts/en/1970-01-01-first-title.md`).should(
'contain',
expectedEnContent,
);
cy.readFile(`${taskResult.data.tempDir}/content/posts/de/1970-01-01-first-title.md`).should(
'eq',
expectedDeContent,
);
cy.readFile(`${taskResult.data.tempDir}/content/posts/fr/1970-01-01-first-title.md`).should(
'eq',
expectedFrContent,
);
});
it('can create entry with translation in single_file mode', () => {
cy.task('updateConfig', { i18n: { structure: 'multiple_files' } });
login(taskResult.data.user);
createEntryTranslateAndPublish(entry);
cy.readFile(`${taskResult.data.tempDir}/content/posts/1970-01-01-first-title.en.md`).should(
'contain',
expectedEnContent,
);
cy.readFile(`${taskResult.data.tempDir}/content/posts/1970-01-01-first-title.de.md`).should(
'eq',
expectedDeContent,
);
cy.readFile(`${taskResult.data.tempDir}/content/posts/1970-01-01-first-title.fr.md`).should(
'eq',
expectedFrContent,
);
});
it('can create entry with translation in locale_file_extensions mode', () => {
cy.task('updateConfig', { i18n: { structure: 'single_file' } });
login(taskResult.data.user);
createEntryTranslateAndPublish(entry);
cy.readFile(`${taskResult.data.tempDir}/content/posts/1970-01-01-first-title.md`).should(
'eq',
contentSingleFile,
);
});
});

View File

@ -6,7 +6,7 @@ const backend = 'proxy';
const mode = 'fs';
describe(`Proxy Backend Simple Workflow - '${mode}' mode`, () => {
let taskResult = { data: {} };
const taskResult = { data: {} };
before(() => {
specUtils.before(taskResult, { publish_mode: 'simple', mode }, backend);

View File

@ -302,14 +302,11 @@ async function teardownGitGatewayTest(taskData) {
transformRecordedData: (expectation, toSanitize) => {
const result = methods[taskData.provider].transformData(expectation, toSanitize);
const { httpRequest, httpResponse } = expectation;
if (httpResponse.body && httpRequest.path === '/.netlify/identity/token') {
const parsed = JSON.parse(httpResponse.body);
if (result.response && result.url === '/.netlify/identity/token') {
const parsed = JSON.parse(result.response);
parsed.access_token = 'access_token';
parsed.refresh_token = 'refresh_token';
const responseBody = JSON.stringify(parsed);
return { ...result, response: responseBody };
return { ...result, response: JSON.stringify(parsed) };
} else {
return result;
}

View File

@ -310,7 +310,11 @@ const transformRecordedData = (expectation, toSanitize) => {
const requestBodySanitizer = httpRequest => {
let body;
if (httpRequest.body && httpRequest.body.type === 'JSON' && httpRequest.body.json) {
const bodyObject = JSON.parse(httpRequest.body.json);
const bodyObject =
typeof httpRequest.body.json === 'string'
? JSON.parse(httpRequest.body.json)
: httpRequest.body.json;
if (bodyObject.encoding === 'base64') {
// sanitize encoded data
const decodedBody = Buffer.from(bodyObject.content, 'base64').toString('binary');
@ -319,10 +323,14 @@ const transformRecordedData = (expectation, toSanitize) => {
bodyObject.content = sanitizedEncodedContent;
body = JSON.stringify(bodyObject);
} else {
body = httpRequest.body.json;
body = JSON.stringify(bodyObject);
}
} else if (httpRequest.body && httpRequest.body.type === 'STRING' && httpRequest.body.string) {
body = httpRequest.body.string;
} else if (httpRequest.body) {
const str =
typeof httpRequest.body !== 'string' ? JSON.stringify(httpRequest.body) : httpRequest.body;
body = sanitizeString(str, toSanitize);
}
return body;
};
@ -340,8 +348,13 @@ const transformRecordedData = (expectation, toSanitize) => {
encoding: 'base64',
content: httpResponse.body.base64Bytes,
};
} else if (httpResponse.body) {
responseBody = httpResponse.body;
} else if (httpResponse.body && httpResponse.body.json) {
responseBody = JSON.stringify(httpResponse.body.json);
} else {
responseBody =
typeof httpResponse.body === 'string'
? httpResponse.body
: httpResponse.body && JSON.stringify(httpResponse.body);
}
// replace recorded user with fake one

View File

@ -216,7 +216,11 @@ const transformRecordedData = (expectation, toSanitize) => {
const requestBodySanitizer = httpRequest => {
let body;
if (httpRequest.body && httpRequest.body.type === 'JSON' && httpRequest.body.json) {
const bodyObject = JSON.parse(httpRequest.body.json);
const bodyObject =
typeof httpRequest.body.json === 'string'
? JSON.parse(httpRequest.body.json)
: httpRequest.body.json;
if (bodyObject.encoding === 'base64') {
// sanitize encoded data
const decodedBody = Buffer.from(bodyObject.content, 'base64').toString('binary');
@ -225,10 +229,14 @@ const transformRecordedData = (expectation, toSanitize) => {
bodyObject.content = sanitizedEncodedContent;
body = JSON.stringify(bodyObject);
} else {
body = httpRequest.body.json;
body = JSON.stringify(bodyObject);
}
} else if (httpRequest.body && httpRequest.body.type === 'STRING' && httpRequest.body.string) {
body = sanitizeString(httpRequest.body.string, toSanitize);
} else if (httpRequest.body) {
const str =
typeof httpRequest.body !== 'string' ? JSON.stringify(httpRequest.body) : httpRequest.body;
body = sanitizeString(str, toSanitize);
}
return body;
};
@ -246,8 +254,13 @@ const transformRecordedData = (expectation, toSanitize) => {
encoding: 'base64',
content: httpResponse.body.base64Bytes,
};
} else if (httpResponse.body) {
responseBody = httpResponse.body;
} else if (httpResponse.body && httpResponse.body.json) {
responseBody = JSON.stringify(httpResponse.body.json);
} else {
responseBody =
typeof httpResponse.body === 'string'
? httpResponse.body
: httpResponse.body && JSON.stringify(httpResponse.body);
}
// replace recorded user with fake one

View File

@ -91,6 +91,18 @@ function goToMediaLibrary() {
cy.contains('button', 'Media').click();
}
function assertUnpublishedEntryInEditor() {
cy.contains('button', 'Delete unpublished entry');
}
function assertPublishedEntryInEditor() {
cy.contains('button', 'Delete published entry');
}
function assertUnpublishedChangesInEditor() {
cy.contains('button', 'Delete unpublished changes');
}
function goToEntry(entry) {
goToCollections();
cy.get('a h2')
@ -252,12 +264,17 @@ function populateEntry(entry, onDone = flushClockAndSave) {
const value = entry[key];
if (key === 'body') {
cy.getMarkdownEditor()
.first()
.click()
.clear({ force: true })
.type(value, { force: true });
} else {
cy.get(`[id^="${key}-field"]`).clear({ force: true });
cy.get(`[id^="${key}-field"]`).type(value, { force: true });
cy.get(`[id^="${key}-field"]`)
.first()
.clear({ force: true });
cy.get(`[id^="${key}-field"]`)
.first()
.type(value, { force: true });
}
}
@ -305,7 +322,8 @@ function publishEntry({ createNew = false, duplicate = false } = {}) {
selectDropdownItem('Publish', publishTypes.publishNow);
}
assertNotification(notifications.saved);
// eslint-disable-next-line cypress/no-unnecessary-waiting
cy.wait(500);
});
}
@ -686,4 +704,8 @@ module.exports = {
publishAndDuplicateEntryInEditor,
assertNotification,
assertFieldValidationError,
flushClockAndSave,
assertPublishedEntryInEditor,
assertUnpublishedEntryInEditor,
assertUnpublishedChangesInEditor,
};

View File

@ -9,7 +9,6 @@ import {
APIError,
ApiRequest,
AssetProxy,
Entry,
PersistOptions,
readFile,
CMS_BRANCH_PREFIX,
@ -27,6 +26,7 @@ import {
requestWithBackoff,
readFileMetadata,
throwOnConflictingBranches,
DataFile,
} from 'netlify-cms-lib-util';
import { dirname } from 'path';
import { oneLine } from 'common-tags';
@ -437,11 +437,11 @@ export default class API {
// delete the file
formData.append('files', file.path);
} else if (file.newPath) {
const contentBlob = get(file, 'fileObj', new Blob([(file as Entry).raw]));
const contentBlob = get(file, 'fileObj', new Blob([(file as DataFile).raw]));
toMove.push({ from: file.path, to: file.newPath, contentBlob });
} else {
// add/modify the file
const contentBlob = get(file, 'fileObj', new Blob([(file as Entry).raw]));
const contentBlob = get(file, 'fileObj', new Blob([(file as DataFile).raw]));
// Third param is filename header, in case path is `message`, `branch`, etc.
formData.append(file.path, contentBlob, basename(file.path));
}
@ -502,10 +502,11 @@ export default class API {
return files;
}
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
const files = entry ? [entry, ...mediaFiles] : mediaFiles;
async persistFiles(dataFiles: DataFile[], mediaFiles: AssetProxy[], options: PersistOptions) {
const files = [...dataFiles, ...mediaFiles];
if (options.useWorkflow) {
return this.editorialWorkflowGit(files, entry as Entry, options);
const slug = dataFiles[0].slug;
return this.editorialWorkflowGit(files, slug, options);
} else {
return this.uploadFiles(files, { commitMessage: options.commitMessage, branch: this.branch });
}
@ -587,8 +588,12 @@ export default class API {
return diffs;
}
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
const contentKey = generateContentKey(options.collectionName as string, entry.slug);
async editorialWorkflowGit(
files: (DataFile | AssetProxy)[],
slug: string,
options: PersistOptions,
) {
const contentKey = generateContentKey(options.collectionName as string, slug);
const branch = branchFromContentKey(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
@ -620,9 +625,11 @@ export default class API {
}
}
deleteFile = (path: string, message: string) => {
deleteFiles = (paths: string[], message: string) => {
const body = new FormData();
paths.forEach(path => {
body.append('files', path);
});
body.append('branch', this.branch);
if (message) {
body.append('message', message);

View File

@ -432,15 +432,17 @@ export default class BitbucketBackend implements Implementation {
};
}
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
async persistEntry(entry: Entry, options: PersistOptions) {
const client = await this.getLargeMediaClient();
// persistEntry is a transactional operation
return runWithLock(
this.lock,
async () =>
this.api!.persistFiles(
entry,
client.enabled ? await getLargeMediaFilteredMediaFiles(client, mediaFiles) : mediaFiles,
entry.dataFiles,
client.enabled
? await getLargeMediaFilteredMediaFiles(client, entry.assets)
: entry.assets,
options,
),
'Failed to acquire persist entry lock',
@ -468,7 +470,7 @@ export default class BitbucketBackend implements Implementation {
const [id] = await Promise.all([
getBlobSHA(fileObj),
this.api!.persistFiles(null, [mediaFile], options),
this.api!.persistFiles([], [mediaFile], options),
]);
const url = URL.createObjectURL(fileObj);
@ -484,8 +486,8 @@ export default class BitbucketBackend implements Implementation {
};
}
deleteFile(path: string, commitMessage: string) {
return this.api!.deleteFile(path, commitMessage);
deleteFiles(paths: string[], commitMessage: string) {
return this.api!.deleteFiles(paths, commitMessage);
}
traverseCursor(cursor: Cursor, action: string) {

View File

@ -534,13 +534,14 @@ export default class GitGateway implements Implementation {
return this.backend!.getMediaFile(path);
}
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
async persistEntry(entry: Entry, options: PersistOptions) {
const client = await this.getLargeMediaClient();
return this.backend!.persistEntry(
entry,
client.enabled ? await getLargeMediaFilteredMediaFiles(client, mediaFiles) : mediaFiles,
options,
);
if (client.enabled) {
const assets = await getLargeMediaFilteredMediaFiles(client, entry.assets);
return this.backend!.persistEntry({ ...entry, assets }, options);
} else {
return this.backend!.persistEntry(entry, options);
}
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
@ -558,8 +559,8 @@ export default class GitGateway implements Implementation {
displayURL,
};
}
deleteFile(path: string, commitMessage: string) {
return this.backend!.deleteFile(path, commitMessage);
deleteFiles(paths: string[], commitMessage: string) {
return this.backend!.deleteFiles(paths, commitMessage);
}
async getDeployPreview(collection: string, slug: string) {
let preview = await this.backend!.getDeployPreview(collection, slug);

View File

@ -9,7 +9,7 @@ import {
localForage,
basename,
AssetProxy,
Entry as LibEntry,
DataFile,
PersistOptions,
readFileMetadata,
CMS_BRANCH_PREFIX,
@ -62,10 +62,6 @@ interface TreeFile {
raw?: string;
}
export interface Entry extends LibEntry {
sha?: string;
}
type Override<T, U> = Pick<T, Exclude<keyof T, keyof U>> & U;
type TreeEntry = Override<GitCreateTreeParamsTree, { sha: string | null }>;
@ -877,8 +873,8 @@ export default class API {
}));
}
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
const files = entry ? mediaFiles.concat(entry) : mediaFiles;
async persistFiles(dataFiles: DataFile[], mediaFiles: AssetProxy[], options: PersistOptions) {
const files = mediaFiles.concat(dataFiles);
const uploadPromises = files.map(file => this.uploadBlob(file));
await Promise.all(uploadPromises);
@ -896,12 +892,8 @@ export default class API {
sha,
}),
);
return this.editorialWorkflowGit(
files as TreeFile[],
entry as Entry,
mediaFilesList,
options,
);
const slug = dataFiles[0].slug;
return this.editorialWorkflowGit(files as TreeFile[], slug, mediaFilesList, options);
}
}
@ -927,29 +919,16 @@ export default class API {
}
}
deleteFile(path: string, message: string) {
async deleteFiles(paths: string[], message: string) {
if (this.useOpenAuthoring) {
return Promise.reject('Cannot delete published entries as an Open Authoring user!');
}
const branch = this.branch;
return this.getFileSha(path, { branch }).then(sha => {
const params: { sha: string; message: string; branch: string; author?: { date: string } } = {
sha,
message,
branch,
};
const opts = { method: 'DELETE', params };
if (this.commitAuthor) {
opts.params.author = {
...this.commitAuthor,
date: new Date().toISOString(),
};
}
const fileURL = `${this.repoURL}/contents/${path}`;
return this.request(fileURL, opts);
});
const branchData = await this.getDefaultBranch();
const files = paths.map(path => ({ path, sha: null }));
const changeTree = await this.updateTree(branchData.commit.sha, files);
const commit = await this.commit(message, changeTree);
await this.patchBranch(this.branch, commit.sha);
}
async createBranchAndPullRequest(branchName: string, sha: string, commitMessage: string) {
@ -966,11 +945,11 @@ export default class API {
async editorialWorkflowGit(
files: TreeFile[],
entry: Entry,
slug: string,
mediaFilesList: MediaFile[],
options: PersistOptions,
) {
const contentKey = this.generateContentKey(options.collectionName as string, entry.slug);
const contentKey = this.generateContentKey(options.collectionName as string, slug);
const branch = branchFromContentKey(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {

View File

@ -229,12 +229,17 @@ describe('github API', () => {
mockAPI(api, responses);
const entry = {
dataFiles: [
{
slug: 'entry',
sha: 'abc',
path: 'content/posts/new-post.md',
raw: 'content',
},
],
assets: [],
};
await api.persistFiles(entry, [], { commitMessage: 'commitMessage' });
await api.persistFiles(entry.dataFiles, entry.assets, { commitMessage: 'commitMessage' });
expect(api.request).toHaveBeenCalledTimes(5);
@ -242,7 +247,10 @@ describe('github API', () => {
'/repos/owner/repo/git/blobs',
{
method: 'POST',
body: JSON.stringify({ content: Base64.encode(entry.raw), encoding: 'base64' }),
body: JSON.stringify({
content: Base64.encode(entry.dataFiles[0].raw),
encoding: 'base64',
}),
},
]);
@ -297,13 +305,15 @@ describe('github API', () => {
api.editorialWorkflowGit = jest.fn();
const entry = {
dataFiles: [
{
slug: 'entry',
sha: 'abc',
path: 'content/posts/new-post.md',
raw: 'content',
};
const mediaFiles = [
},
],
assets: [
{
path: '/static/media/image-1.png',
sha: 'image-1.png',
@ -312,20 +322,21 @@ describe('github API', () => {
path: '/static/media/image-2.png',
sha: 'image-2.png',
},
];
],
};
await api.persistFiles(entry, mediaFiles, { useWorkflow: true });
await api.persistFiles(entry.dataFiles, entry.assets, { useWorkflow: true });
expect(api.uploadBlob).toHaveBeenCalledTimes(3);
expect(api.uploadBlob).toHaveBeenCalledWith(entry);
expect(api.uploadBlob).toHaveBeenCalledWith(mediaFiles[0]);
expect(api.uploadBlob).toHaveBeenCalledWith(mediaFiles[1]);
expect(api.uploadBlob).toHaveBeenCalledWith(entry.dataFiles[0]);
expect(api.uploadBlob).toHaveBeenCalledWith(entry.assets[0]);
expect(api.uploadBlob).toHaveBeenCalledWith(entry.assets[1]);
expect(api.editorialWorkflowGit).toHaveBeenCalledTimes(1);
expect(api.editorialWorkflowGit).toHaveBeenCalledWith(
mediaFiles.concat(entry),
entry,
entry.assets.concat(entry.dataFiles),
entry.dataFiles[0].slug,
[
{ path: 'static/media/image-1.png', sha: 'image-1.png' },
{ path: 'static/media/image-2.png', sha: 'image-2.png' },

View File

@ -104,7 +104,7 @@ describe('github backend implementation', () => {
});
expect(persistFiles).toHaveBeenCalledTimes(1);
expect(persistFiles).toHaveBeenCalledWith(null, [mediaFile], {});
expect(persistFiles).toHaveBeenCalledWith([], [mediaFile], {});
expect(createObjectURL).toHaveBeenCalledTimes(1);
expect(createObjectURL).toHaveBeenCalledWith(mediaFile.fileObj);
});

View File

@ -30,10 +30,11 @@ import {
contentKeyFromBranch,
unsentRequest,
branchFromContentKey,
Entry,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import { Octokit } from '@octokit/rest';
import API, { Entry, API_NAME } from './API';
import API, { API_NAME } from './API';
import GraphQLAPI from './GraphQLAPI';
type GitHubUser = Octokit.UsersGetAuthenticatedResponse;
@ -473,18 +474,18 @@ export default class GitHub implements Implementation {
);
}
persistEntry(entry: Entry, mediaFiles: AssetProxy[] = [], options: PersistOptions) {
persistEntry(entry: Entry, options: PersistOptions) {
// persistEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.persistFiles(entry, mediaFiles, options),
() => this.api!.persistFiles(entry.dataFiles, entry.assets, options),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
try {
await this.api!.persistFiles(null, [mediaFile], options);
await this.api!.persistFiles([], [mediaFile], options);
const { sha, path, fileObj } = mediaFile as AssetProxy & { sha: string };
const displayURL = URL.createObjectURL(fileObj);
return {
@ -500,8 +501,8 @@ export default class GitHub implements Implementation {
}
}
deleteFile(path: string, commitMessage: string) {
return this.api!.deleteFile(path, commitMessage);
deleteFiles(paths: string[], commitMessage: string) {
return this.api!.deleteFiles(paths, commitMessage);
}
async traverseCursor(cursor: Cursor, action: string) {

View File

@ -6,7 +6,7 @@ import {
APIError,
Cursor,
ApiRequest,
Entry,
DataFile,
AssetProxy,
PersistOptions,
readFile,
@ -473,7 +473,7 @@ export default class API {
const items: CommitItem[] = await Promise.all(
files.map(async file => {
const [base64Content, fileExists] = await Promise.all([
result(file, 'toBase64', partial(this.toBase64, (file as Entry).raw)),
result(file, 'toBase64', partial(this.toBase64, (file as DataFile).raw)),
this.isFileExists(file.path, branch),
]);
@ -515,10 +515,11 @@ export default class API {
return items;
}
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
const files = entry ? [entry, ...mediaFiles] : mediaFiles;
async persistFiles(dataFiles: DataFile[], mediaFiles: AssetProxy[], options: PersistOptions) {
const files = [...dataFiles, ...mediaFiles];
if (options.useWorkflow) {
return this.editorialWorkflowGit(files, entry as Entry, options);
const slug = dataFiles[0].slug;
return this.editorialWorkflowGit(files, slug, options);
} else {
const items = await this.getCommitItems(files, this.branch);
return this.uploadAndCommit(items, {
@ -527,7 +528,7 @@ export default class API {
}
}
deleteFile = (path: string, commitMessage: string) => {
deleteFiles = (paths: string[], commitMessage: string) => {
const branch = this.branch;
// eslint-disable-next-line @typescript-eslint/camelcase
const commitParams: CommitsParams = { commit_message: commitMessage, branch };
@ -538,12 +539,11 @@ export default class API {
// eslint-disable-next-line @typescript-eslint/camelcase
commitParams.author_email = email;
}
return flow([
unsentRequest.withMethod('DELETE'),
// TODO: only send author params if they are defined.
unsentRequest.withParams(commitParams),
this.request,
])(`${this.repoURL}/repository/files/${encodeURIComponent(path)}`);
const items = paths.map(path => ({ path, action: CommitAction.DELETE }));
return this.uploadAndCommit(items, {
commitMessage,
});
};
async getMergeRequests(sourceBranch?: string) {
@ -723,8 +723,12 @@ export default class API {
});
}
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
const contentKey = generateContentKey(options.collectionName as string, entry.slug);
async editorialWorkflowGit(
files: (DataFile | AssetProxy)[],
slug: string,
options: PersistOptions,
) {
const contentKey = generateContentKey(options.collectionName as string, slug);
const branch = branchFromContentKey(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {

View File

@ -263,11 +263,11 @@ export default class GitLab implements Implementation {
};
}
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
async persistEntry(entry: Entry, options: PersistOptions) {
// persistEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.persistFiles(entry, mediaFiles, options),
() => this.api!.persistFiles(entry.dataFiles, entry.assets, options),
'Failed to acquire persist entry lock',
);
}
@ -277,7 +277,7 @@ export default class GitLab implements Implementation {
const [id] = await Promise.all([
getBlobSHA(fileObj),
this.api!.persistFiles(null, [mediaFile], options),
this.api!.persistFiles([], [mediaFile], options),
]);
const { path } = mediaFile;
@ -294,8 +294,8 @@ export default class GitLab implements Implementation {
};
}
deleteFile(path: string, commitMessage: string) {
return this.api!.deleteFile(path, commitMessage);
deleteFiles(paths: string[], commitMessage: string) {
return this.api!.deleteFiles(paths, commitMessage);
}
traverseCursor(cursor: Cursor, action: string) {

View File

@ -183,13 +183,13 @@ export default class ProxyBackend implements Implementation {
});
}
async persistEntry(entry: Entry, assetProxies: AssetProxy[], options: PersistOptions) {
const assets = await Promise.all(assetProxies.map(serializeAsset));
async persistEntry(entry: Entry, options: PersistOptions) {
const assets = await Promise.all(entry.assets.map(serializeAsset));
return this.request({
action: 'persistEntry',
params: {
branch: this.branch,
entry,
dataFiles: entry.dataFiles,
assets,
options: { ...options, status: options.status || this.options.initialWorkflowStatus },
cmsLabelPrefix: this.cmsLabelPrefix,
@ -244,10 +244,10 @@ export default class ProxyBackend implements Implementation {
return deserializeMediaFile(file);
}
deleteFile(path: string, commitMessage: string) {
deleteFiles(paths: string[], commitMessage: string) {
return this.request({
action: 'deleteFile',
params: { branch: this.branch, path, options: { commitMessage } },
action: 'deleteFiles',
params: { branch: this.branch, paths, options: { commitMessage } },
});
}

View File

@ -51,8 +51,11 @@ describe('test backend implementation', () => {
const backend = new TestBackend({});
const entry = { path: 'posts/some-post.md', raw: 'content', slug: 'some-post.md' };
await backend.persistEntry(entry, [], { newEntry: true });
const entry = {
dataFiles: [{ path: 'posts/some-post.md', raw: 'content', slug: 'some-post.md' }],
assets: [],
};
await backend.persistEntry(entry, { newEntry: true });
expect(window.repoFiles).toEqual({
posts: {
@ -80,8 +83,11 @@ describe('test backend implementation', () => {
const backend = new TestBackend({});
const entry = { path: 'posts/new-post.md', raw: 'content', slug: 'new-post.md' };
await backend.persistEntry(entry, [], { newEntry: true });
const entry = {
dataFiles: [{ path: 'posts/new-post.md', raw: 'content', slug: 'new-post.md' }],
assets: [],
};
await backend.persistEntry(entry, { newEntry: true });
expect(window.repoFiles).toEqual({
pages: {
@ -108,8 +114,8 @@ describe('test backend implementation', () => {
const slug = 'dir1/dir2/some-post.md';
const path = `posts/${slug}`;
const entry = { path, raw: 'content', slug };
await backend.persistEntry(entry, [], { newEntry: true });
const entry = { dataFiles: [{ path, raw: 'content', slug }], assets: [] };
await backend.persistEntry(entry, { newEntry: true });
expect(window.repoFiles).toEqual({
posts: {
@ -143,8 +149,8 @@ describe('test backend implementation', () => {
const slug = 'dir1/dir2/some-post.md';
const path = `posts/${slug}`;
const entry = { path, raw: 'new content', slug };
await backend.persistEntry(entry, [], { newEntry: false });
const entry = { dataFiles: [{ path, raw: 'new content', slug }], assets: [] };
await backend.persistEntry(entry, { newEntry: false });
expect(window.repoFiles).toEqual({
posts: {
@ -161,7 +167,7 @@ describe('test backend implementation', () => {
});
});
describe('deleteFile', () => {
describe('deleteFiles', () => {
it('should delete entry by path', async () => {
window.repoFiles = {
posts: {
@ -173,7 +179,7 @@ describe('test backend implementation', () => {
const backend = new TestBackend({});
await backend.deleteFile('posts/some-post.md');
await backend.deleteFiles(['posts/some-post.md']);
expect(window.repoFiles).toEqual({
posts: {},
});
@ -194,7 +200,7 @@ describe('test backend implementation', () => {
const backend = new TestBackend({});
await backend.deleteFile('posts/dir1/dir2/some-post.md');
await backend.deleteFiles(['posts/dir1/dir2/some-post.md']);
expect(window.repoFiles).toEqual({
posts: {
dir1: {

View File

@ -13,6 +13,7 @@ import {
User,
Config,
ImplementationFile,
DataFile,
} from 'netlify-cms-lib-util';
import { extname, dirname } from 'path';
import AuthenticationPage from './AuthenticationPage';
@ -20,18 +21,20 @@ import AuthenticationPage from './AuthenticationPage';
type RepoFile = { path: string; content: string | AssetProxy };
type RepoTree = { [key: string]: RepoFile | RepoTree };
type UnpublishedRepoEntry = {
slug: string;
collection: string;
status: string;
diffs: {
type Diff = {
id: string;
originalPath?: string;
path: string;
newFile: boolean;
status: string;
content: string | AssetProxy;
}[];
};
type UnpublishedRepoEntry = {
slug: string;
collection: string;
status: string;
diffs: Diff[];
updatedAt: string;
};
@ -257,17 +260,17 @@ export default class TestBackend implements Implementation {
async addOrUpdateUnpublishedEntry(
key: string,
path: string,
newPath: string | undefined,
raw: string,
dataFiles: DataFile[],
assetProxies: AssetProxy[],
slug: string,
collection: string,
status: string,
) {
const diffs: Diff[] = [];
dataFiles.forEach(dataFile => {
const { path, newPath, raw } = dataFile;
const currentDataFile = window.repoFilesUnpublished[key]?.diffs.find(d => d.path === path);
const originalPath = currentDataFile ? currentDataFile.originalPath : path;
const diffs = [];
diffs.push({
originalPath,
id: newPath || path,
@ -276,6 +279,7 @@ export default class TestBackend implements Implementation {
status: 'added',
content: raw,
});
});
assetProxies.forEach(a => {
const asset = this.normalizeAsset(a);
diffs.push({
@ -295,22 +299,18 @@ export default class TestBackend implements Implementation {
};
}
async persistEntry(
{ path, raw, slug, newPath }: Entry,
assetProxies: AssetProxy[],
options: PersistOptions,
) {
async persistEntry(entry: Entry, options: PersistOptions) {
if (options.useWorkflow) {
const slug = entry.dataFiles[0].slug;
const key = `${options.collectionName}/${slug}`;
const currentEntry = window.repoFilesUnpublished[key];
const status =
currentEntry?.status || options.status || (this.options.initialWorkflowStatus as string);
this.addOrUpdateUnpublishedEntry(
key,
path,
newPath,
raw,
assetProxies,
entry.dataFiles,
entry.assets,
slug,
options.collectionName as string,
status,
@ -318,9 +318,12 @@ export default class TestBackend implements Implementation {
return Promise.resolve();
}
entry.dataFiles.forEach(dataFile => {
const { path, raw } = dataFile;
writeFile(path, raw, window.repoFiles);
assetProxies.forEach(a => {
writeFile(a.path, raw, window.repoFiles);
});
entry.assets.forEach(a => {
writeFile(a.path, a, window.repoFiles);
});
return Promise.resolve();
}
@ -409,8 +412,10 @@ export default class TestBackend implements Implementation {
return Promise.resolve(normalizedAsset);
}
deleteFile(path: string) {
deleteFiles(paths: string[]) {
paths.forEach(path => {
deleteFile(path, window.repoFiles);
});
return Promise.resolve();
}

View File

@ -533,6 +533,182 @@ describe('config', () => {
],
});
});
describe('i18n', () => {
it('should set root i18n on collection when collection i18n is set to true', () => {
expect(
applyDefaults(
fromJS({
i18n: {
structure: 'multiple_folders',
locales: ['en', 'de'],
},
collections: [
{ folder: 'foo', i18n: true, fields: [{ name: 'title', widget: 'string' }] },
],
}),
)
.getIn(['collections', 0, 'i18n'])
.toJS(),
).toEqual({ structure: 'multiple_folders', locales: ['en', 'de'], default_locale: 'en' });
});
it('should not set root i18n on collection when collection i18n is not set', () => {
expect(
applyDefaults(
fromJS({
i18n: {
structure: 'multiple_folders',
locales: ['en', 'de'],
},
collections: [{ folder: 'foo', fields: [{ name: 'title', widget: 'string' }] }],
}),
).getIn(['collections', 0, 'i18n']),
).toBeUndefined();
});
it('should not set root i18n on collection when collection i18n is set to false', () => {
expect(
applyDefaults(
fromJS({
i18n: {
structure: 'multiple_folders',
locales: ['en', 'de'],
},
collections: [
{ folder: 'foo', i18n: false, fields: [{ name: 'title', widget: 'string' }] },
],
}),
).getIn(['collections', 0, 'i18n']),
).toBeUndefined();
});
it('should merge root i18n on collection when collection i18n is set to an object', () => {
expect(
applyDefaults(
fromJS({
i18n: {
structure: 'multiple_folders',
locales: ['en', 'de'],
default_locale: 'en',
},
collections: [
{
folder: 'foo',
i18n: { locales: ['en', 'fr'], default_locale: 'fr' },
fields: [{ name: 'title', widget: 'string' }],
},
],
}),
)
.getIn(['collections', 0, 'i18n'])
.toJS(),
).toEqual({ structure: 'multiple_folders', locales: ['en', 'fr'], default_locale: 'fr' });
});
it('should throw when i18n is set on files collection', () => {
expect(() =>
applyDefaults(
fromJS({
i18n: {
structure: 'multiple_folders',
locales: ['en', 'de'],
},
collections: [
{
files: [
{ name: 'file', file: 'file', fields: [{ name: 'title', widget: 'string' }] },
],
i18n: true,
},
],
}),
),
).toThrow('i18n configuration is not supported for files collection');
});
it('should set i18n value to translate on field when i18n=true for field', () => {
expect(
applyDefaults(
fromJS({
i18n: {
structure: 'multiple_folders',
locales: ['en', 'de'],
},
collections: [
{
folder: 'foo',
i18n: true,
fields: [{ name: 'title', widget: 'string', i18n: true }],
},
],
}),
).getIn(['collections', 0, 'fields', 0, 'i18n']),
).toEqual('translate');
});
it('should set i18n value to none on field when i18n=false for field', () => {
expect(
applyDefaults(
fromJS({
i18n: {
structure: 'multiple_folders',
locales: ['en', 'de'],
},
collections: [
{
folder: 'foo',
i18n: true,
fields: [{ name: 'title', widget: 'string', i18n: false }],
},
],
}),
).getIn(['collections', 0, 'fields', 0, 'i18n']),
).toEqual('none');
});
it('should throw is default locale is missing from root i18n config', () => {
expect(() =>
applyDefaults(
fromJS({
i18n: {
structure: 'multiple_folders',
locales: ['en', 'de'],
default_locale: 'fr',
},
collections: [
{
folder: 'foo',
fields: [{ name: 'title', widget: 'string' }],
},
],
}),
),
).toThrow("i18n locales 'en, de' are missing the default locale fr");
});
it('should throw is default locale is missing from collection i18n config', () => {
expect(() =>
applyDefaults(
fromJS({
i18n: {
structure: 'multiple_folders',
locales: ['en', 'de'],
},
collections: [
{
folder: 'foo',
i18n: {
default_locale: 'fr',
},
fields: [{ name: 'title', widget: 'string' }],
},
],
}),
),
).toThrow("i18n locales 'en, de' are missing the default locale fr");
});
});
});
describe('detectProxyServer', () => {

View File

@ -6,6 +6,7 @@ import * as publishModes from 'Constants/publishModes';
import { validateConfig } from 'Constants/configSchema';
import { selectDefaultSortableFields, traverseFields } from '../reducers/collections';
import { resolveBackend } from 'coreSrc/backend';
import { I18N, I18N_FIELD } from '../lib/i18n';
export const CONFIG_REQUEST = 'CONFIG_REQUEST';
export const CONFIG_SUCCESS = 'CONFIG_SUCCESS';
@ -58,6 +59,59 @@ const setSnakeCaseConfig = field => {
return field;
};
const setI18nField = field => {
if (field.get(I18N) === true) {
field = field.set(I18N, I18N_FIELD.TRANSLATE);
} else if (field.get(I18N) === false || !field.has(I18N)) {
field = field.set(I18N, I18N_FIELD.NONE);
}
return field;
};
const setI18nDefaults = (i18n, collection) => {
if (i18n && collection.has(I18N)) {
const collectionI18n = collection.get(I18N);
if (collectionI18n === true) {
collection = collection.set(I18N, i18n);
} else if (collectionI18n === false) {
collection = collection.delete(I18N);
} else {
const locales = collectionI18n.get('locales', i18n.get('locales'));
const defaultLocale = collectionI18n.get(
'default_locale',
collectionI18n.has('locales') ? locales.first() : i18n.get('default_locale'),
);
collection = collection.set(I18N, i18n.merge(collectionI18n));
collection = collection.setIn([I18N, 'locales'], locales);
collection = collection.setIn([I18N, 'default_locale'], defaultLocale);
throwOnMissingDefaultLocale(collection.get(I18N));
}
if (collectionI18n !== false) {
// set default values for i18n fields
collection = collection.set('fields', traverseFields(collection.get('fields'), setI18nField));
}
} else {
collection = collection.delete(I18N);
collection = collection.set(
'fields',
traverseFields(collection.get('fields'), field => field.delete(I18N)),
);
}
return collection;
};
const throwOnMissingDefaultLocale = i18n => {
if (i18n && !i18n.get('locales').includes(i18n.get('default_locale'))) {
throw new Error(
`i18n locales '${i18n.get('locales').join(', ')}' are missing the default locale ${i18n.get(
'default_locale',
)}`,
);
}
};
const defaults = {
publish_mode: publishModes.SIMPLE,
};
@ -132,6 +186,10 @@ export function applyDefaults(config) {
map.setIn(['slug', 'sanitize_replacement'], '-');
}
let i18n = config.get(I18N);
i18n = i18n?.set('default_locale', i18n.get('default_locale', i18n.get('locales').first()));
throwOnMissingDefaultLocale(i18n);
// Strip leading slash from collection folders and files
map.set(
'collections',
@ -167,10 +225,15 @@ export function applyDefaults(config) {
} else {
collection = collection.set('meta', Map());
}
collection = setI18nDefaults(i18n, collection);
}
const files = collection.get('files');
if (files) {
if (i18n && collection.has(I18N)) {
throw new Error('i18n configuration is not supported for files collection');
}
collection = collection.delete('nested');
collection = collection.delete('meta');
collection = collection.set(

View File

@ -4,7 +4,6 @@ import { actions as notifActions } from 'redux-notifications';
import { BEGIN, COMMIT, REVERT } from 'redux-optimist';
import { ThunkDispatch } from 'redux-thunk';
import { Map, List } from 'immutable';
import { serializeValues } from '../lib/serializeEntryValues';
import { currentBackend, slugFromCustomPath } from '../backend';
import {
selectPublishedSlugs,
@ -13,7 +12,6 @@ import {
selectUnpublishedEntry,
} from '../reducers';
import { selectEditingDraft } from '../reducers/entries';
import { selectFields } from '../reducers/collections';
import { EDITORIAL_WORKFLOW, status, Status } from '../constants/publishModes';
import { EDITORIAL_WORKFLOW_ERROR } from 'netlify-cms-lib-util';
import {
@ -22,11 +20,11 @@ import {
getMediaAssets,
createDraftFromEntry,
loadEntries,
getSerializedEntry,
} from './entries';
import { createAssetProxy } from '../valueObjects/AssetProxy';
import { addAssets } from './media';
import { loadMedia } from './mediaLibrary';
import ValidationErrorTypes from '../constants/validationErrorTypes';
import { Collection, EntryMap, State, Collections, EntryDraft, MediaFile } from '../types/redux';
import { AnyAction } from 'redux';
@ -382,13 +380,7 @@ export function persistUnpublishedEntry(collection: Collection, existingUnpublis
entry,
});
/**
* Serialize the values of any fields with registered serializers, and
* update the entry and entryDraft with the serialized values.
*/
const fields = selectFields(collection, entry.get('slug'));
const serializedData = serializeValues(entry.get('data'), fields);
const serializedEntry = entry.set('data', serializedData);
const serializedEntry = getSerializedEntry(collection, entry);
const serializedEntryDraft = entryDraft.set('entry', serializedEntry);
dispatch(unpublishedEntryPersisting(collection, serializedEntry, transactionID));

View File

@ -20,6 +20,7 @@ import {
EntryField,
SortDirection,
ViewFilter,
Entry,
} from '../types/redux';
import { ThunkDispatch } from 'redux-thunk';
@ -30,6 +31,7 @@ import { selectIsFetching, selectEntriesSortFields, selectEntryByPath } from '..
import { selectCustomPath } from '../reducers/entryDraft';
import { navigateToEntry } from '../routing/history';
import { getProcessSegment } from '../lib/formatters';
import { hasI18n, serializeI18n } from '../lib/i18n';
const { notifSend } = notifActions;
@ -349,15 +351,26 @@ export function discardDraft() {
return { type: DRAFT_DISCARD };
}
export function changeDraftField(
field: EntryField,
value: string,
metadata: Record<string, unknown>,
entries: EntryMap[],
) {
export function changeDraftField({
field,
value,
metadata,
entries,
i18n,
}: {
field: EntryField;
value: string;
metadata: Record<string, unknown>;
entries: EntryMap[];
i18n?: {
currentLocale: string;
defaultLocale: string;
locales: string[];
};
}) {
return {
type: DRAFT_CHANGE_FIELD,
payload: { field, value, metadata, entries },
payload: { field, value, metadata, entries, i18n },
};
}
@ -530,11 +543,13 @@ export function loadEntries(collection: Collection, page = 0) {
dispatch(entriesLoading(collection));
try {
const loadAllEntries = collection.has('nested') || hasI18n(collection);
let response: {
cursor: Cursor;
pagination: number;
entries: EntryValue[];
} = await (collection.has('nested')
} = await (loadAllEntries
? // nested collections require all entries to construct the tree
provider.listAllEntries(collection).then((entries: EntryValue[]) => ({ entries }))
: provider.listEntries(collection, page));
@ -760,6 +775,24 @@ export function getMediaAssets({ entry }: { entry: EntryMap }) {
return assets;
}
export const getSerializedEntry = (collection: Collection, entry: Entry) => {
/**
* Serialize the values of any fields with registered serializers, and
* update the entry and entryDraft with the serialized values.
*/
const fields = selectFields(collection, entry.get('slug'));
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const serializeData = (data: any) => {
return serializeValues(data, fields);
};
const serializedData = serializeData(entry.get('data'));
let serializedEntry = entry.set('data', serializedData);
if (hasI18n(collection)) {
serializedEntry = serializeI18n(collection, serializedEntry, serializeData);
}
return serializedEntry;
};
export function persistEntry(collection: Collection) {
return async (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
const state = getState();
@ -794,13 +827,7 @@ export function persistEntry(collection: Collection) {
entry,
});
/**
* Serialize the values of any fields with registered serializers, and
* update the entry and entryDraft with the serialized values.
*/
const fields = selectFields(collection, entry.get('slug'));
const serializedData = serializeValues(entryDraft.getIn(['entry', 'data']), fields);
const serializedEntry = entry.set('data', serializedData);
const serializedEntry = getSerializedEntry(collection, entry);
const serializedEntryDraft = entryDraft.set('entry', serializedEntry);
dispatch(entryPersisting(collection, serializedEntry));
return backend
@ -811,7 +838,7 @@ export function persistEntry(collection: Collection) {
assetProxies,
usedSlugs,
})
.then((newSlug: string) => {
.then(async (newSlug: string) => {
dispatch(
notifSend({
message: {
@ -821,16 +848,17 @@ export function persistEntry(collection: Collection) {
dismissAfter: 4000,
}),
);
// re-load media library if entry had media files
if (assetProxies.length > 0) {
dispatch(loadMedia());
await dispatch(loadMedia());
}
dispatch(entryPersisted(collection, serializedEntry, newSlug));
if (collection.has('nested')) {
dispatch(loadEntries(collection));
await dispatch(loadEntries(collection));
}
if (entry.get('slug') !== newSlug) {
dispatch(loadEntry(collection, newSlug));
await dispatch(loadEntry(collection, newSlug));
navigateToEntry(collection.get('name'), newSlug);
}
})

View File

@ -37,6 +37,8 @@ import {
asyncLock,
AsyncLock,
UnpublishedEntry,
DataFile,
UnpublishedEntryDiff,
} from 'netlify-cms-lib-util';
import { basename, join, extname, dirname } from 'path';
import { status } from './constants/publishModes';
@ -55,9 +57,41 @@ import {
import AssetProxy from './valueObjects/AssetProxy';
import { FOLDER, FILES } from './constants/collectionTypes';
import { selectCustomPath } from './reducers/entryDraft';
import {
getI18nFilesDepth,
getI18nFiles,
hasI18n,
getFilePaths,
getI18nEntry,
groupEntries,
getI18nDataFiles,
getI18nBackup,
formatI18nBackup,
} from './lib/i18n';
const { extractTemplateVars, dateParsers, expandPath } = stringTemplate;
const updateAssetProxies = (
assetProxies: AssetProxy[],
config: Config,
collection: Collection,
entryDraft: EntryDraft,
path: string,
) => {
assetProxies.map(asset => {
// update media files path based on entry path
const oldPath = asset.path;
const newPath = selectMediaFilePath(
config,
collection,
entryDraft.get('entry').set('path', path),
oldPath,
asset.field,
);
asset.path = newPath;
});
};
export class LocalStorageAuthStore {
storageKey = 'netlify-cms-user';
@ -223,6 +257,7 @@ interface BackupEntry {
raw: string;
path: string;
mediaFiles: MediaFile[];
i18n?: Record<string, { raw: string }>;
}
interface PersistArgs {
@ -253,6 +288,18 @@ const prepareMetaPath = (path: string, collection: Collection) => {
return dir.substr(collection.get('folder')!.length + 1) || '/';
};
const collectionDepth = (collection: Collection) => {
let depth;
depth =
collection.get('nested')?.get('depth') || getPathDepth(collection.get('path', '') as string);
if (hasI18n(collection)) {
depth = getI18nFilesDepth(collection, depth);
}
return depth;
};
export class Backend {
implementation: Implementation;
backendName: string;
@ -417,7 +464,6 @@ export class Backend {
}
processEntries(loadedEntries: ImplementationEntry[], collection: Collection) {
const collectionFilter = collection.get('filter');
const entries = loadedEntries.map(loadedEntry =>
createEntry(
collection.get('name'),
@ -433,9 +479,17 @@ export class Backend {
);
const formattedEntries = entries.map(this.entryWithFormat(collection));
// If this collection has a "filter" property, filter entries accordingly
const collectionFilter = collection.get('filter');
const filteredEntries = collectionFilter
? this.filterEntries({ entries: formattedEntries }, collectionFilter)
: formattedEntries;
if (hasI18n(collection)) {
const extension = selectFolderEntryExtension(collection);
const groupedEntries = groupEntries(collection, extension, entries);
return groupedEntries;
}
return filteredEntries;
}
@ -445,10 +499,7 @@ export class Backend {
const collectionType = collection.get('type');
if (collectionType === FOLDER) {
listMethod = () => {
const depth =
collection.get('nested')?.get('depth') ||
getPathDepth(collection.get('path', '') as string);
const depth = collectionDepth(collection);
return this.implementation.entriesByFolder(
collection.get('folder') as string,
extension,
@ -493,11 +544,8 @@ export class Backend {
// for local searches and queries.
async listAllEntries(collection: Collection) {
if (collection.get('folder') && this.implementation.allEntriesByFolder) {
const depth = collectionDepth(collection);
const extension = selectFolderEntryExtension(collection);
const depth =
collection.get('nested')?.get('depth') ||
getPathDepth(collection.get('path', '') as string);
return this.implementation
.allEntriesByFolder(collection.get('folder') as string, extension, depth)
.then(entries => this.processEntries(entries, collection));
@ -640,7 +688,9 @@ export class Backend {
});
const label = selectFileEntryLabel(collection, slug);
const entry: EntryValue = this.entryWithFormat(collection)(
const formatRawData = (raw: string) => {
return this.entryWithFormat(collection)(
createEntry(collection.get('name'), slug, path, {
raw,
label,
@ -648,6 +698,13 @@ export class Backend {
meta: { path: prepareMetaPath(path, collection) },
}),
);
};
const entry: EntryValue = formatRawData(raw);
if (hasI18n(collection) && backup.i18n) {
const i18n = formatI18nBackup(backup.i18n, formatRawData);
entry.i18n = i18n;
}
return { entry };
}
@ -676,10 +733,16 @@ export class Backend {
}),
);
let i18n;
if (hasI18n(collection)) {
i18n = getI18nBackup(collection, entry, entry => this.entryToRaw(collection, entry));
}
await localForage.setItem<BackupEntry>(key, {
raw,
path: entry.get('path'),
mediaFiles,
...(i18n && { i18n }),
});
const result = await localForage.setItem(getEntryBackupKey(), raw);
return result;
@ -714,7 +777,9 @@ export class Backend {
async getEntry(state: State, collection: Collection, slug: string) {
const path = selectEntryPath(collection, slug) as string;
const label = selectFileEntryLabel(collection, slug);
const extension = selectFolderEntryExtension(collection);
const getEntryValue = async (path: string) => {
const loadedEntry = await this.implementation.getEntry(path);
let entry = createEntry(collection.get('name'), slug, loadedEntry.file.path, {
raw: loadedEntry.data,
@ -725,7 +790,18 @@ export class Backend {
entry = this.entryWithFormat(collection)(entry);
entry = await this.processEntry(state, collection, entry);
return entry;
};
let entryValue: EntryValue;
if (hasI18n(collection)) {
entryValue = await getI18nEntry(collection, extension, path, slug, getEntryValue);
} else {
entryValue = await getEntryValue(path);
}
return entryValue;
}
getMedia() {
@ -772,31 +848,6 @@ export class Backend {
} else {
extension = selectFolderEntryExtension(collection);
}
const dataFiles = sortBy(
entryData.diffs.filter(d => d.path.endsWith(extension)),
f => f.path.length,
);
// if the unpublished entry has no diffs, return the original
let data = '';
let newFile = false;
let path = slug;
if (dataFiles.length <= 0) {
const loadedEntry = await this.implementation.getEntry(
selectEntryPath(collection, slug) as string,
);
data = loadedEntry.data;
path = loadedEntry.file.path;
} else {
const entryFile = dataFiles[0];
data = await this.implementation.unpublishedEntryDataFile(
collection.get('name'),
entryData.slug,
entryFile.path,
entryFile.id,
);
newFile = entryFile.newFile;
path = entryFile.path;
}
const mediaFiles: MediaFile[] = [];
if (withMediaFiles) {
@ -813,6 +864,13 @@ export class Backend {
);
mediaFiles.push(...files.map(f => ({ ...f, draft: true })));
}
const dataFiles = sortBy(
entryData.diffs.filter(d => d.path.endsWith(extension)),
f => f.path.length,
);
const formatData = (data: string, path: string, newFile: boolean) => {
const entry = createEntry(collection.get('name'), slug, path, {
raw: data,
isModification: !newFile,
@ -825,6 +883,39 @@ export class Backend {
const entryWithFormat = this.entryWithFormat(collection)(entry);
return entryWithFormat;
};
const readAndFormatDataFile = async (dataFile: UnpublishedEntryDiff) => {
const data = await this.implementation.unpublishedEntryDataFile(
collection.get('name'),
entryData.slug,
dataFile.path,
dataFile.id,
);
const entryWithFormat = formatData(data, dataFile.path, dataFile.newFile);
return entryWithFormat;
};
// if the unpublished entry has no diffs, return the original
if (dataFiles.length <= 0) {
const loadedEntry = await this.implementation.getEntry(
selectEntryPath(collection, slug) as string,
);
return formatData(loadedEntry.data, loadedEntry.file.path, false);
} else if (hasI18n(collection)) {
// we need to read all locales files and not just the changes
const path = selectEntryPath(collection, slug) as string;
const i18nFiles = getI18nDataFiles(collection, extension, path, slug, dataFiles);
let entries = await Promise.all(
i18nFiles.map(dataFile => readAndFormatDataFile(dataFile).catch(() => null)),
);
entries = entries.filter(Boolean);
const grouped = await groupEntries(collection, extension, entries as EntryValue[]);
return grouped[0];
} else {
const entryWithFormat = await readAndFormatDataFile(dataFiles[0]);
return entryWithFormat;
}
}
async unpublishedEntries(collections: Collections) {
@ -964,15 +1055,9 @@ export class Backend {
const useWorkflow = selectUseWorkflow(config);
let entryObj: {
path: string;
slug: string;
raw: string;
newPath?: string;
};
const customPath = selectCustomPath(collection, entryDraft);
let dataFile: DataFile;
if (newEntry) {
if (!selectAllowNewEntries(collection)) {
throw new Error('Not allowed to create new entries in this collection');
@ -985,27 +1070,16 @@ export class Backend {
customPath,
);
const path = customPath || (selectEntryPath(collection, slug) as string);
entryObj = {
dataFile = {
path,
slug,
raw: this.entryToRaw(collection, entryDraft.get('entry')),
};
assetProxies.map(asset => {
// update media files path based on entry path
const oldPath = asset.path;
const newPath = selectMediaFilePath(
config,
collection,
entryDraft.get('entry').set('path', path),
oldPath,
asset.field,
);
asset.path = newPath;
});
updateAssetProxies(assetProxies, config, collection, entryDraft, path);
} else {
const slug = entryDraft.getIn(['entry', 'slug']);
entryObj = {
dataFile = {
path: entryDraft.getIn(['entry', 'path']),
// for workflow entries we refresh the slug on publish
slug: customPath && !useWorkflow ? slugFromCustomPath(collection, customPath) : slug,
@ -1014,14 +1088,30 @@ export class Backend {
};
}
const { slug, path, newPath } = dataFile;
let dataFiles = [dataFile];
if (hasI18n(collection)) {
const extension = selectFolderEntryExtension(collection);
dataFiles = getI18nFiles(
collection,
extension,
entryDraft.get('entry'),
(draftData: EntryMap) => this.entryToRaw(collection, draftData),
path,
slug,
newPath,
);
}
const user = (await this.currentUser()) as User;
const commitMessage = commitMessageFormatter(
newEntry ? 'create' : 'update',
config,
{
collection,
slug: entryObj.slug,
path: entryObj.path,
slug,
path,
authorLogin: user.login,
authorName: user.name,
},
@ -1043,7 +1133,13 @@ export class Backend {
await this.invokePrePublishEvent(entryDraft.get('entry'));
}
await this.implementation.persistEntry(entryObj, assetProxies, opts);
await this.implementation.persistEntry(
{
dataFiles,
assets: assetProxies,
},
opts,
);
await this.invokePostSaveEvent(entryDraft.get('entry'));
@ -1051,7 +1147,7 @@ export class Backend {
await this.invokePostPublishEvent(entryDraft.get('entry'));
}
return entryObj.slug;
return slug;
}
async invokeEventWithEntry(event: string, entry: EntryMap) {
@ -1101,13 +1197,14 @@ export class Backend {
}
async deleteEntry(state: State, collection: Collection, slug: string) {
const config = state.config;
const path = selectEntryPath(collection, slug) as string;
const extension = selectFolderEntryExtension(collection) as string;
if (!selectAllowDeletion(collection)) {
throw new Error('Not allowed to delete entries in this collection');
}
const config = state.config;
const user = (await this.currentUser()) as User;
const commitMessage = commitMessageFormatter(
'delete',
@ -1124,9 +1221,13 @@ export class Backend {
const entry = selectEntry(state.entries, collection.get('name'), slug);
await this.invokePreUnpublishEvent(entry);
const result = await this.implementation.deleteFile(path, commitMessage);
let paths = [path];
if (hasI18n(collection)) {
paths = getFilePaths(collection, extension, path, slug);
}
await this.implementation.deleteFiles(paths, commitMessage);
await this.invokePostUnpublishEvent(entry);
return result;
}
async deleteMedia(config: Config, path: string) {
@ -1141,7 +1242,7 @@ export class Backend {
},
user.useOpenAuthoring,
);
return this.implementation.deleteFile(path, commitMessage);
return this.implementation.deleteFiles([path], commitMessage);
}
persistUnpublishedEntry(args: PersistArgs) {

View File

@ -196,9 +196,9 @@ export class Editor extends React.Component {
this.props.persistLocalBackup(entry, collection);
}, 2000);
handleChangeDraftField = (field, value, metadata) => {
handleChangeDraftField = (field, value, metadata, i18n) => {
const entries = [this.props.unPublishedEntry, this.props.publishedEntry].filter(Boolean);
this.props.changeDraftField(field, value, metadata, entries);
this.props.changeDraftField({ field, value, metadata, entries, i18n });
};
handleChangeStatus = newStatusName => {
@ -418,6 +418,7 @@ export class Editor extends React.Component {
deployPreview={deployPreview}
loadDeployPreview={opts => loadDeployPreview(collection, slug, entry, isPublished, opts)}
editorBackLink={editorBackLink}
t={t}
/>
);
}

View File

@ -56,6 +56,14 @@ const styleStrings = {
widgetError: `
border-color: ${colors.errorText};
`,
disabled: `
pointer-events: none;
opacity: 0.5;
background: #ccc;
`,
hidden: `
visibility: hidden;
`,
};
const ControlContainer = styled.div`
@ -87,6 +95,17 @@ export const ControlHint = styled.p`
transition: color ${transitions.main};
`;
const LabelComponent = ({ field, isActive, hasErrors, uniqueFieldId, isFieldOptional, t }) => {
const label = `${field.get('label', field.get('name'))}`;
const labelComponent = (
<FieldLabel isActive={isActive} hasErrors={hasErrors} htmlFor={uniqueFieldId}>
{label} {`${isFieldOptional ? ` (${t('editor.editorControl.field.optional')})` : ''}`}
</FieldLabel>
);
return labelComponent;
};
class EditorControl extends React.Component {
static propTypes = {
value: PropTypes.oneOfType([
@ -119,6 +138,10 @@ class EditorControl extends React.Component {
parentIds: PropTypes.arrayOf(PropTypes.string),
entry: ImmutablePropTypes.map.isRequired,
collection: ImmutablePropTypes.map.isRequired,
isDisabled: PropTypes.bool,
isHidden: PropTypes.bool,
isFieldDuplicate: PropTypes.func,
isFieldHidden: PropTypes.func,
};
static defaultProps = {
@ -175,6 +198,10 @@ class EditorControl extends React.Component {
parentIds,
t,
validateMetaField,
isDisabled,
isHidden,
isFieldDuplicate,
isFieldHidden,
} = this.props;
const widgetName = field.get('widget');
@ -191,7 +218,12 @@ class EditorControl extends React.Component {
return (
<ClassNames>
{({ css, cx }) => (
<ControlContainer className={className}>
<ControlContainer
className={className}
css={css`
${isHidden && styleStrings.hidden};
`}
>
{widget.globalStyles && <Global styles={coreCss`${widget.globalStyles}`} />}
{errors && (
<ControlErrorsList>
@ -206,15 +238,14 @@ class EditorControl extends React.Component {
)}
</ControlErrorsList>
)}
<FieldLabel
<LabelComponent
field={field}
isActive={isSelected || this.state.styleActive}
hasErrors={hasErrors}
htmlFor={this.uniqueFieldId}
>
{`${field.get('label', field.get('name'))}${
isFieldOptional ? ` (${t('editor.editorControl.field.optional')})` : ''
}`}
</FieldLabel>
uniqueFieldId={this.uniqueFieldId}
isFieldOptional={isFieldOptional}
t={t}
/>
<Widget
classNameWrapper={cx(
css`
@ -230,6 +261,11 @@ class EditorControl extends React.Component {
${styleStrings.widgetError};
`]: hasErrors,
},
{
[css`
${styleStrings.disabled}
`]: isDisabled,
},
)}
classNameWidget={css`
${styleStrings.widget};
@ -282,6 +318,9 @@ class EditorControl extends React.Component {
parentIds={parentIds}
t={t}
validateMetaField={validateMetaField}
isDisabled={isDisabled}
isFieldDuplicate={isFieldDuplicate}
isFieldHidden={isFieldHidden}
/>
{fieldHint && (
<ControlHint active={isSelected || this.state.styleActive} error={hasErrors}>

View File

@ -1,8 +1,25 @@
import React from 'react';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { css } from '@emotion/core';
import styled from '@emotion/styled';
import EditorControl from './EditorControl';
import {
colors,
Dropdown,
DropdownItem,
StyledDropdownButton,
buttons,
text,
} from 'netlify-cms-ui-default';
import {
getI18nInfo,
isFieldTranslatable,
isFieldDuplicate,
isFieldHidden,
getLocaleDataPath,
hasI18n,
} from '../../../lib/i18n';
const ControlPaneContainer = styled.div`
max-width: 800px;
@ -11,7 +28,75 @@ const ControlPaneContainer = styled.div`
font-size: 16px;
`;
const LocaleButton = styled(StyledDropdownButton)`
${buttons.button};
${buttons.medium};
color: ${colors.controlLabel};
background: ${colors.textFieldBorder};
height: 100%;
&:after {
top: 11px;
}
`;
const LocaleButtonWrapper = styled.div`
display: flex;
`;
const StyledDropdown = styled(Dropdown)`
width: max-content;
margin-top: 20px;
margin-bottom: 20px;
`;
const LocaleDropdown = ({ locales, selectedLocale, onLocaleChange, t }) => {
return (
<StyledDropdown
renderButton={() => {
return (
<LocaleButtonWrapper>
<LocaleButton>
{t('editor.editorControlPane.i18n.writingInLocale', {
locale: selectedLocale.toUpperCase(),
})}
</LocaleButton>
</LocaleButtonWrapper>
);
}}
>
{locales.map(l => (
<DropdownItem
css={css`
${text.fieldLabel}
`}
key={l}
label={l}
onClick={() => onLocaleChange(l)}
/>
))}
</StyledDropdown>
);
};
const getFieldValue = ({ field, entry, isTranslatable, locale }) => {
if (field.get('meta')) {
return entry.getIn(['meta', field.get('name')]);
}
if (isTranslatable) {
const dataPath = getLocaleDataPath(locale);
return entry.getIn([...dataPath, field.get('name')]);
}
return entry.getIn(['data', field.get('name')]);
};
export default class ControlPane extends React.Component {
state = {
selectedLocale: this.props.locale,
};
componentValidate = {};
controlRef(field, wrappedControl) {
@ -22,23 +107,29 @@ export default class ControlPane extends React.Component {
wrappedControl.innerWrappedControl?.validate || wrappedControl.validate;
}
validate = () => {
handleLocaleChange = val => {
this.setState({ selectedLocale: val });
};
validate = async () => {
this.props.fields.forEach(field => {
if (field.get('widget') === 'hidden') return;
this.componentValidate[field.get('name')]();
});
};
switchToDefaultLocale = () => {
if (hasI18n(this.props.collection)) {
const { defaultLocale } = getI18nInfo(this.props.collection);
return new Promise(resolve => this.setState({ selectedLocale: defaultLocale }, resolve));
} else {
return Promise.resolve();
}
};
render() {
const {
collection,
fields,
entry,
fieldsMetaData,
fieldsErrors,
onChange,
onValidate,
} = this.props;
const { collection, entry, fieldsMetaData, fieldsErrors, onChange, onValidate, t } = this.props;
const fields = this.props.fields;
if (!collection || !fields) {
return null;
@ -48,26 +139,56 @@ export default class ControlPane extends React.Component {
return null;
}
const { locales, defaultLocale } = getI18nInfo(collection);
const locale = this.state.selectedLocale;
const i18n = locales && {
currentLocale: locale,
locales,
defaultLocale,
};
return (
<ControlPaneContainer>
{fields.map((field, i) => {
return field.get('widget') === 'hidden' ? null : (
{locales && (
<LocaleDropdown
locales={locales}
selectedLocale={locale}
onLocaleChange={this.handleLocaleChange}
t={t}
/>
)}
{fields
.filter(f => f.get('widget') !== 'hidden')
.map((field, i) => {
const isTranslatable = isFieldTranslatable(field, locale, defaultLocale);
const isDuplicate = isFieldDuplicate(field, locale, defaultLocale);
const isHidden = isFieldHidden(field, locale, defaultLocale);
const key = i18n ? `${locale}_${i}` : i;
return (
<EditorControl
key={i}
key={key}
field={field}
value={
field.get('meta')
? entry.getIn(['meta', field.get('name')])
: entry.getIn(['data', field.get('name')])
}
value={getFieldValue({
field,
entry,
locale,
isTranslatable,
})}
fieldsMetaData={fieldsMetaData}
fieldsErrors={fieldsErrors}
onChange={onChange}
onChange={(field, newValue, newMetadata) =>
onChange(field, newValue, newMetadata, i18n)
}
onValidate={onValidate}
processControlRef={this.controlRef.bind(this)}
controlRef={this.controlRef}
entry={entry}
collection={collection}
isDisabled={isDuplicate}
isHidden={isHidden}
isFieldDuplicate={field => isFieldDuplicate(field, locale, defaultLocale)}
isFieldHidden={field => isFieldHidden(field, locale, defaultLocale)}
/>
);
})}

View File

@ -60,6 +60,9 @@ export default class Widget extends Component {
isEditorComponent: PropTypes.bool,
isNewEditorComponent: PropTypes.bool,
entry: ImmutablePropTypes.map.isRequired,
isDisabled: PropTypes.bool,
isFieldDuplicate: PropTypes.func,
isFieldHidden: PropTypes.func,
};
shouldComponentUpdate(nextProps) {
@ -277,6 +280,9 @@ export default class Widget extends Component {
isNewEditorComponent,
parentIds,
t,
isDisabled,
isFieldDuplicate,
isFieldHidden,
} = this.props;
return React.createElement(controlComponent, {
@ -323,6 +329,9 @@ export default class Widget extends Component {
controlRef,
parentIds,
t,
isDisabled,
isFieldDuplicate,
isFieldHidden,
});
}
}

View File

@ -16,10 +16,12 @@ import { ScrollSync, ScrollSyncPane } from 'react-scroll-sync';
import EditorControlPane from './EditorControlPane/EditorControlPane';
import EditorPreviewPane from './EditorPreviewPane/EditorPreviewPane';
import EditorToolbar from './EditorToolbar';
import { hasI18n, getI18nInfo, getPreviewEntry } from '../../lib/i18n';
const PREVIEW_VISIBLE = 'cms.preview-visible';
const SCROLL_SYNC_ENABLED = 'cms.scroll-sync-enabled';
const SPLIT_PANE_POSITION = 'cms.split-pane-position';
const I18N_VISIBLE = 'cms.i18n-visible';
const styles = {
splitPane: css`
@ -100,8 +102,8 @@ const Editor = styled.div`
const PreviewPaneContainer = styled.div`
height: 100%;
overflow-y: auto;
pointer-events: ${props => (props.blockEntry ? 'none' : 'auto')};
overflow-y: ${props => (props.overFlow ? 'auto' : 'hidden')};
`;
const ControlPaneContainer = styled(PreviewPaneContainer)`
@ -117,11 +119,28 @@ const ViewControls = styled.div`
z-index: ${zIndex.zIndex299};
`;
const EditorContent = ({
i18nVisible,
previewVisible,
editor,
editorWithEditor,
editorWithPreview,
}) => {
if (i18nVisible) {
return editorWithEditor;
} else if (previewVisible) {
return editorWithPreview;
} else {
return <NoPreviewContainer>{editor}</NoPreviewContainer>;
}
};
class EditorInterface extends Component {
state = {
showEventBlocker: false,
previewVisible: localStorage.getItem(PREVIEW_VISIBLE) !== 'false',
scrollSyncEnabled: localStorage.getItem(SCROLL_SYNC_ENABLED) !== 'false',
i18nVisible: localStorage.getItem(I18N_VISIBLE) !== 'false',
};
handleSplitPaneDragStart = () => {
@ -132,14 +151,16 @@ class EditorInterface extends Component {
this.setState({ showEventBlocker: false });
};
handleOnPersist = (opts = {}) => {
handleOnPersist = async (opts = {}) => {
const { createNew = false, duplicate = false } = opts;
await this.controlPaneRef.switchToDefaultLocale();
this.controlPaneRef.validate();
this.props.onPersist({ createNew, duplicate });
};
handleOnPublish = (opts = {}) => {
handleOnPublish = async (opts = {}) => {
const { createNew = false, duplicate = false } = opts;
await this.controlPaneRef.switchToDefaultLocale();
this.controlPaneRef.validate();
this.props.onPublish({ createNew, duplicate });
};
@ -156,6 +177,16 @@ class EditorInterface extends Component {
localStorage.setItem(SCROLL_SYNC_ENABLED, newScrollSyncEnabled);
};
handleToggleI18n = () => {
const newI18nVisible = !this.state.i18nVisible;
this.setState({ i18nVisible: newI18nVisible });
localStorage.setItem(I18N_VISIBLE, newI18nVisible);
};
handleLeftPanelLocaleChange = locale => {
this.setState({ leftPanelLocale: locale });
};
render() {
const {
collection,
@ -186,27 +217,46 @@ class EditorInterface extends Component {
deployPreview,
draftKey,
editorBackLink,
t,
} = this.props;
const { previewVisible, scrollSyncEnabled, showEventBlocker } = this.state;
const { scrollSyncEnabled, showEventBlocker } = this.state;
const collectionPreviewEnabled = collection.getIn(['editor', 'preview'], true);
const collectionI18nEnabled = hasI18n(collection);
const { locales, defaultLocale } = getI18nInfo(this.props.collection);
const editorProps = {
collection,
entry,
fields,
fieldsMetaData,
fieldsErrors,
onChange,
onValidate,
};
const leftPanelLocale = this.state.leftPanelLocale || locales?.[0];
const editor = (
<ControlPaneContainer blockEntry={showEventBlocker}>
<ControlPaneContainer overFlow blockEntry={showEventBlocker}>
<EditorControlPane
collection={collection}
entry={entry}
fields={fields}
fieldsMetaData={fieldsMetaData}
fieldsErrors={fieldsErrors}
onChange={onChange}
onValidate={onValidate}
{...editorProps}
ref={c => (this.controlPaneRef = c)}
locale={leftPanelLocale}
t={t}
onLocaleChange={this.handleLeftPanelLocaleChange}
/>
</ControlPaneContainer>
);
const editor2 = (
<ControlPaneContainer overFlow={!this.state.scrollSyncEnabled} blockEntry={showEventBlocker}>
<EditorControlPane {...editorProps} locale={locales?.[1]} t={t} />
</ControlPaneContainer>
);
const previewEntry = collectionI18nEnabled
? getPreviewEntry(entry, leftPanelLocale, defaultLocale)
: entry;
const editorWithPreview = (
<ScrollSync enabled={this.state.scrollSyncEnabled}>
<div>
@ -222,7 +272,7 @@ class EditorInterface extends Component {
<PreviewPaneContainer blockEntry={showEventBlocker}>
<EditorPreviewPane
collection={collection}
entry={entry}
entry={previewEntry}
fields={fields}
fieldsMetaData={fieldsMetaData}
/>
@ -232,6 +282,27 @@ class EditorInterface extends Component {
</ScrollSync>
);
const editorWithEditor = (
<ScrollSync enabled={this.state.scrollSyncEnabled}>
<div>
<StyledSplitPane
maxSize={-100}
defaultSize={parseInt(localStorage.getItem(SPLIT_PANE_POSITION), 10) || '50%'}
onChange={size => localStorage.setItem(SPLIT_PANE_POSITION, size)}
onDragStarted={this.handleSplitPaneDragStart}
onDragFinished={this.handleSplitPaneDragFinished}
>
<ScrollSyncPane>{editor}</ScrollSyncPane>
<ScrollSyncPane>{editor2}</ScrollSyncPane>
</StyledSplitPane>
</div>
</ScrollSync>
);
const i18nVisible = collectionI18nEnabled && this.state.i18nVisible;
const previewVisible = collectionPreviewEnabled && this.state.previewVisible;
const scrollSyncVisible = i18nVisible || previewVisible;
return (
<EditorContainer>
<EditorToolbar
@ -268,6 +339,16 @@ class EditorInterface extends Component {
/>
<Editor key={draftKey}>
<ViewControls>
{collectionI18nEnabled && (
<EditorToggle
isActive={i18nVisible}
onClick={this.handleToggleI18n}
size="large"
type="page"
title="Toggle i18n"
marginTop="70px"
/>
)}
{collectionPreviewEnabled && (
<EditorToggle
isActive={previewVisible}
@ -277,7 +358,7 @@ class EditorInterface extends Component {
title="Toggle preview"
/>
)}
{collectionPreviewEnabled && previewVisible && (
{scrollSyncVisible && (
<EditorToggle
isActive={scrollSyncEnabled}
onClick={this.handleToggleScrollSync}
@ -287,11 +368,13 @@ class EditorInterface extends Component {
/>
)}
</ViewControls>
{collectionPreviewEnabled && this.state.previewVisible ? (
editorWithPreview
) : (
<NoPreviewContainer>{editor}</NoPreviewContainer>
)}
<EditorContent
i18nVisible={i18nVisible}
previewVisible={previewVisible}
editor={editor}
editorWithEditor={editorWithEditor}
editorWithPreview={editorWithPreview}
/>
</Editor>
</EditorContainer>
);
@ -327,6 +410,7 @@ EditorInterface.propTypes = {
deployPreview: ImmutablePropTypes.map,
loadDeployPreview: PropTypes.func.isRequired,
draftKey: PropTypes.string.isRequired,
t: PropTypes.func.isRequired,
};
export default EditorInterface;

View File

@ -450,5 +450,59 @@ describe('config', () => {
);
}).not.toThrow();
});
describe('i18n', () => {
it('should throw error when locale has invalid characters', () => {
expect(() => {
validateConfig(
merge({}, validConfig, {
i18n: {
structure: 'multiple_folders',
locales: ['en', 'tr.TR'],
},
}),
);
}).toThrowError(`'i18n.locales[1]' should match pattern "^[a-zA-Z-_]+$"`);
});
it('should throw error when locale is less than 2 characters', () => {
expect(() => {
validateConfig(
merge({}, validConfig, {
i18n: {
structure: 'multiple_folders',
locales: ['en', 't'],
},
}),
);
}).toThrowError(`'i18n.locales[1]' should NOT be shorter than 2 characters`);
});
it('should throw error when locale is more than 10 characters', () => {
expect(() => {
validateConfig(
merge({}, validConfig, {
i18n: {
structure: 'multiple_folders',
locales: ['en', 'a_very_long_locale'],
},
}),
);
}).toThrowError(`'i18n.locales[1]' should NOT be longer than 10 characters`);
});
it('should allow valid locales strings', () => {
expect(() => {
validateConfig(
merge({}, validConfig, {
i18n: {
structure: 'multiple_folders',
locales: ['en', 'tr-TR', 'zh_CHS'],
},
}),
);
}).not.toThrow();
});
});
});
});

View File

@ -1,8 +1,43 @@
import AJV from 'ajv';
import { select, uniqueItemProperties, instanceof as instanceOf } from 'ajv-keywords/keywords';
import {
select,
uniqueItemProperties,
instanceof as instanceOf,
prohibited,
} from 'ajv-keywords/keywords';
import ajvErrors from 'ajv-errors';
import { formatExtensions, frontmatterFormats, extensionFormatters } from 'Formats/formats';
import { getWidgets } from 'Lib/registry';
import { I18N_STRUCTURE, I18N_FIELD } from '../lib/i18n';
const localeType = { type: 'string', minLength: 2, maxLength: 10, pattern: '^[a-zA-Z-_]+$' };
const i18n = {
type: 'object',
properties: {
structure: { type: 'string', enum: Object.values(I18N_STRUCTURE) },
locales: {
type: 'array',
minItems: 2,
items: localeType,
uniqueItems: true,
},
default_locale: localeType,
},
};
const i18nRoot = {
...i18n,
required: ['structure', 'locales'],
};
const i18nCollection = {
oneOf: [{ type: 'boolean' }, i18n],
};
const i18nField = {
oneOf: [{ type: 'boolean' }, { type: 'string', enum: Object.values(I18N_FIELD) }],
};
/**
* Config for fields in both file and folder collections.
@ -20,6 +55,7 @@ const fieldsConfig = () => ({
label: { type: 'string' },
widget: { type: 'string' },
required: { type: 'boolean' },
i18n: i18nField,
hint: { type: 'string' },
pattern: {
type: 'array',
@ -100,6 +136,7 @@ const getConfigSchema = () => ({
],
},
locale: { type: 'string', examples: ['en', 'fr', 'de'] },
i18n: i18nRoot,
site_url: { type: 'string', examples: ['https://example.com'] },
display_url: { type: 'string', examples: ['https://example.com'] },
logo_url: { type: 'string', examples: ['https://example.com/images/logo.svg'] },
@ -219,6 +256,7 @@ const getConfigSchema = () => ({
additionalProperties: false,
minProperties: 1,
},
i18n: i18nCollection,
},
required: ['name', 'label'],
oneOf: [{ required: ['files'] }, { required: ['folder', 'fields'] }],
@ -289,6 +327,7 @@ export function validateConfig(config) {
uniqueItemProperties(ajv);
select(ajv);
instanceOf(ajv);
prohibited(ajv);
ajvErrors(ajv);
const valid = ajv.validate(getConfigSchema(), config);

View File

@ -0,0 +1,706 @@
import { fromJS } from 'immutable';
import * as i18n from '../i18n';
jest.mock('../../reducers/collections', () => {
return {
selectEntrySlug: () => 'index',
};
});
describe('i18n', () => {
describe('hasI18n', () => {
it('should return false for collection with no i18n', () => {
expect(i18n.hasI18n(fromJS({}))).toBe(false);
});
it('should return true for collection with i18n', () => {
expect(i18n.hasI18n(fromJS({ i18n: { structure: i18n.I18N_STRUCTURE.SINGLE_FILE } }))).toBe(
true,
);
});
});
describe('getI18nInfo', () => {
it('should return empty object for collection with no i18n', () => {
expect(i18n.getI18nInfo(fromJS({}))).toEqual({});
});
it('should return i18n object for collection with i18n', () => {
const i18nObject = {
locales: ['en', 'de'],
default_locale: 'en',
structure: i18n.I18N_STRUCTURE.MULTIPLE_FOLDERS,
};
expect(i18n.getI18nInfo(fromJS({ i18n: i18nObject }))).toEqual({
locales: ['en', 'de'],
defaultLocale: 'en',
structure: i18n.I18N_STRUCTURE.MULTIPLE_FOLDERS,
});
});
});
describe('getI18nFilesDepth', () => {
it('should increase depth when i18n structure is I18N_STRUCTURE.MULTIPLE_FOLDERS', () => {
expect(
i18n.getI18nFilesDepth(
fromJS({ i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FOLDERS } }),
5,
),
).toBe(6);
});
it('should return current depth when i18n structure is not I18N_STRUCTURE.MULTIPLE_FOLDERS', () => {
expect(
i18n.getI18nFilesDepth(
fromJS({ i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FILES } }),
5,
),
).toBe(5);
expect(
i18n.getI18nFilesDepth(fromJS({ i18n: { structure: i18n.I18N_STRUCTURE.SINGLE_FILE } }), 5),
).toBe(5);
expect(i18n.getI18nFilesDepth(fromJS({}), 5)).toBe(5);
});
});
describe('isFieldTranslatable', () => {
it('should return true when not default locale and has I18N_FIELD.TRANSLATE', () => {
expect(
i18n.isFieldTranslatable(fromJS({ i18n: i18n.I18N_FIELD.TRANSLATE }), 'en', 'de'),
).toBe(true);
});
it('should return false when default locale and has I18N_FIELD.TRANSLATE', () => {
expect(
i18n.isFieldTranslatable(fromJS({ i18n: i18n.I18N_FIELD.TRANSLATE }), 'en', 'en'),
).toBe(false);
});
it("should return false when doesn't have i18n", () => {
expect(i18n.isFieldTranslatable(fromJS({}), 'en', 'en')).toBe(false);
});
});
describe('isFieldDuplicate', () => {
it('should return true when not default locale and has I18N_FIELD.TRANSLATE', () => {
expect(i18n.isFieldDuplicate(fromJS({ i18n: i18n.I18N_FIELD.DUPLICATE }), 'en', 'de')).toBe(
true,
);
});
it('should return false when default locale and has I18N_FIELD.TRANSLATE', () => {
expect(i18n.isFieldDuplicate(fromJS({ i18n: i18n.I18N_FIELD.DUPLICATE }), 'en', 'en')).toBe(
false,
);
});
it("should return false when doesn't have i18n", () => {
expect(i18n.isFieldDuplicate(fromJS({}), 'en', 'en')).toBe(false);
});
});
describe('isFieldHidden', () => {
it('should return true when not default locale and has I18N_FIELD.NONE', () => {
expect(i18n.isFieldHidden(fromJS({ i18n: i18n.I18N_FIELD.NONE }), 'en', 'de')).toBe(true);
});
it('should return false when default locale and has I18N_FIELD.NONE', () => {
expect(i18n.isFieldHidden(fromJS({ i18n: i18n.I18N_FIELD.NONE }), 'en', 'en')).toBe(false);
});
it("should return false when doesn't have i18n", () => {
expect(i18n.isFieldHidden(fromJS({}), 'en', 'en')).toBe(false);
});
});
describe('getLocaleDataPath', () => {
it('should return string array with locale as part of the data path', () => {
expect(i18n.getLocaleDataPath('de')).toEqual(['i18n', 'de', 'data']);
});
});
describe('getDataPath', () => {
it('should not include locale in path for default locale', () => {
expect(i18n.getDataPath('en', 'en')).toEqual(['data']);
});
it('should include locale in path for non default locale', () => {
expect(i18n.getDataPath('de', 'en')).toEqual(['i18n', 'de', 'data']);
});
});
describe('getFilePath', () => {
const args = ['md', 'src/content/index.md', 'index', 'de'];
it('should return directory path based on locale when structure is I18N_STRUCTURE.MULTIPLE_FOLDERS', () => {
expect(i18n.getFilePath(i18n.I18N_STRUCTURE.MULTIPLE_FOLDERS, ...args)).toEqual(
'src/content/de/index.md',
);
});
it('should return file path based on locale when structure is I18N_STRUCTURE.MULTIPLE_FILES', () => {
expect(i18n.getFilePath(i18n.I18N_STRUCTURE.MULTIPLE_FILES, ...args)).toEqual(
'src/content/index.de.md',
);
});
it('should not modify path when structure is I18N_STRUCTURE.SINGLE_FILE', () => {
expect(i18n.getFilePath(i18n.I18N_STRUCTURE.SINGLE_FILE, ...args)).toEqual(
'src/content/index.md',
);
});
});
describe('getFilePaths', () => {
const args = ['md', 'src/content/index.md', 'index'];
it('should return file paths for all locales', () => {
expect(
i18n.getFilePaths(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FOLDERS, locales: ['en', 'de'] },
}),
...args,
),
).toEqual(['src/content/en/index.md', 'src/content/de/index.md']);
});
});
describe('normalizeFilePath', () => {
it('should remove locale folder from path when structure is I18N_STRUCTURE.MULTIPLE_FOLDERS', () => {
expect(
i18n.normalizeFilePath(
i18n.I18N_STRUCTURE.MULTIPLE_FOLDERS,
'src/content/en/index.md',
'en',
),
).toEqual('src/content/index.md');
});
it('should remove locale extension from path when structure is I18N_STRUCTURE.MULTIPLE_FILES', () => {
expect(
i18n.normalizeFilePath(i18n.I18N_STRUCTURE.MULTIPLE_FILES, 'src/content/index.en.md', 'en'),
).toEqual('src/content/index.md');
});
it('should not modify path when structure is I18N_STRUCTURE.SINGLE_FILE', () => {
expect(
i18n.normalizeFilePath(i18n.I18N_STRUCTURE.SINGLE_FILE, 'src/content/index.md', 'en'),
).toEqual('src/content/index.md');
});
});
describe('getI18nFiles', () => {
const locales = ['en', 'de', 'fr'];
const default_locale = 'en';
const args = [
'md',
fromJS({
data: { title: 'en_title' },
i18n: { de: { data: { title: 'de_title' } }, fr: { data: { title: 'fr_title' } } },
}),
map => map.get('data').toJS(),
'src/content/index.md',
'index',
];
it('should return a single file when structure is I18N_STRUCTURE.SINGLE_FILE', () => {
expect(
i18n.getI18nFiles(
fromJS({ i18n: { structure: i18n.I18N_STRUCTURE.SINGLE_FILE, locales, default_locale } }),
...args,
),
).toEqual([
{
path: 'src/content/index.md',
raw: {
en: { title: 'en_title' },
de: { title: 'de_title' },
fr: { title: 'fr_title' },
},
slug: 'index',
},
]);
});
it('should return a folder based files when structure is I18N_STRUCTURE.MULTIPLE_FOLDERS', () => {
expect(
i18n.getI18nFiles(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FOLDERS, locales, default_locale },
}),
...args,
),
).toEqual([
{
path: 'src/content/en/index.md',
raw: { title: 'en_title' },
slug: 'index',
},
{
path: 'src/content/de/index.md',
raw: { title: 'de_title' },
slug: 'index',
},
{
path: 'src/content/fr/index.md',
raw: { title: 'fr_title' },
slug: 'index',
},
]);
});
it('should return a extension based files when structure is I18N_STRUCTURE.MULTIPLE_FILES', () => {
expect(
i18n.getI18nFiles(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FILES, locales, default_locale },
}),
...args,
),
).toEqual([
{
path: 'src/content/index.en.md',
raw: { title: 'en_title' },
slug: 'index',
},
{
path: 'src/content/index.de.md',
raw: { title: 'de_title' },
slug: 'index',
},
{
path: 'src/content/index.fr.md',
raw: { title: 'fr_title' },
slug: 'index',
},
]);
});
});
describe('getI18nEntry', () => {
const locales = ['en', 'de', 'fr', 'es'];
const default_locale = 'en';
const args = ['md', 'src/content/index.md', 'index'];
it('should return i18n entry content when structure is I18N_STRUCTURE.MULTIPLE_FOLDERS', async () => {
const data = {
'src/content/en/index.md': {
slug: 'index',
path: 'src/content/en/index.md',
data: { title: 'en_title' },
},
'src/content/de/index.md': {
slug: 'index',
path: 'src/content/de/index.md',
data: { title: 'de_title' },
},
'src/content/fr/index.md': {
slug: 'index',
path: 'src/content/fr/index.md',
data: { title: 'fr_title' },
},
};
const getEntryValue = jest.fn(path =>
data[path] ? Promise.resolve(data[path]) : Promise.reject('Not found'),
);
await expect(
i18n.getI18nEntry(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FOLDERS, locales, default_locale },
}),
...args,
getEntryValue,
),
).resolves.toEqual({
slug: 'index',
path: 'src/content/index.md',
data: { title: 'en_title' },
i18n: {
de: { data: { title: 'de_title' } },
fr: { data: { title: 'fr_title' } },
},
raw: '',
});
});
it('should return i18n entry content when structure is I18N_STRUCTURE.MULTIPLE_FILES', async () => {
const data = {
'src/content/index.en.md': {
slug: 'index',
path: 'src/content/index.en.md',
data: { title: 'en_title' },
},
'src/content/index.de.md': {
slug: 'index',
path: 'src/content/index.de.md',
data: { title: 'de_title' },
},
'src/content/index.fr.md': {
slug: 'index',
path: 'src/content/index.fr.md',
data: { title: 'fr_title' },
},
};
const getEntryValue = jest.fn(path =>
data[path] ? Promise.resolve(data[path]) : Promise.reject('Not found'),
);
await expect(
i18n.getI18nEntry(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FILES, locales, default_locale },
}),
...args,
getEntryValue,
),
).resolves.toEqual({
slug: 'index',
path: 'src/content/index.md',
data: { title: 'en_title' },
i18n: {
de: { data: { title: 'de_title' } },
fr: { data: { title: 'fr_title' } },
},
raw: '',
});
});
it('should return single entry content when structure is I18N_STRUCTURE.SINGLE_FILE', async () => {
const data = {
'src/content/index.md': {
slug: 'index',
path: 'src/content/index.md',
data: {
en: { title: 'en_title' },
de: { title: 'de_title' },
fr: { title: 'fr_title' },
},
},
};
const getEntryValue = jest.fn(path => Promise.resolve(data[path]));
await expect(
i18n.getI18nEntry(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.SINGLE_FILE, locales, default_locale },
}),
...args,
getEntryValue,
),
).resolves.toEqual({
slug: 'index',
path: 'src/content/index.md',
data: {
title: 'en_title',
},
i18n: {
de: { data: { title: 'de_title' } },
fr: { data: { title: 'fr_title' } },
},
raw: '',
});
});
});
describe('groupEntries', () => {
const locales = ['en', 'de', 'fr'];
const default_locale = 'en';
const extension = 'md';
it('should group entries array when structure is I18N_STRUCTURE.MULTIPLE_FOLDERS', () => {
const entries = [
{
slug: 'index',
path: 'src/content/en/index.md',
data: { title: 'en_title' },
},
{
slug: 'index',
path: 'src/content/de/index.md',
data: { title: 'de_title' },
},
{
slug: 'index',
path: 'src/content/fr/index.md',
data: { title: 'fr_title' },
},
];
expect(
i18n.groupEntries(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FOLDERS, locales, default_locale },
}),
extension,
entries,
),
).toEqual([
{
slug: 'index',
path: 'src/content/index.md',
data: { title: 'en_title' },
i18n: { de: { data: { title: 'de_title' } }, fr: { data: { title: 'fr_title' } } },
raw: '',
},
]);
});
it('should group entries array when structure is I18N_STRUCTURE.MULTIPLE_FILES', () => {
const entries = [
{
slug: 'index',
path: 'src/content/index.en.md',
data: { title: 'en_title' },
},
{
slug: 'index',
path: 'src/content/index.de.md',
data: { title: 'de_title' },
},
{
slug: 'index',
path: 'src/content/index.fr.md',
data: { title: 'fr_title' },
},
];
expect(
i18n.groupEntries(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FILES, locales, default_locale },
}),
extension,
entries,
),
).toEqual([
{
slug: 'index',
path: 'src/content/index.md',
data: { title: 'en_title' },
i18n: { de: { data: { title: 'de_title' } }, fr: { data: { title: 'fr_title' } } },
raw: '',
},
]);
});
it('should return entries array as is when structure is I18N_STRUCTURE.SINGLE_FILE', () => {
const entries = [
{
slug: 'index',
path: 'src/content/index.md',
data: {
en: { title: 'en_title' },
de: { title: 'de_title' },
fr: { title: 'fr_title' },
},
},
];
expect(
i18n.groupEntries(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.SINGLE_FILE, locales, default_locale },
}),
extension,
entries,
),
).toEqual([
{
slug: 'index',
path: 'src/content/index.md',
data: {
title: 'en_title',
},
i18n: { de: { data: { title: 'de_title' } }, fr: { data: { title: 'fr_title' } } },
raw: '',
},
]);
});
});
describe('getI18nDataFiles', () => {
const locales = ['en', 'de', 'fr'];
const default_locale = 'en';
const args = ['md', 'src/content/index.md', 'index'];
it('should add missing locale files to diff files when structure is MULTIPLE_FOLDERS', () => {
expect(
i18n.getI18nDataFiles(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FOLDERS, locales, default_locale },
}),
...args,
[{ path: 'src/content/fr/index.md', id: 'id', newFile: false }],
),
).toEqual([
{ path: 'src/content/en/index.md', id: '', newFile: false },
{ path: 'src/content/de/index.md', id: '', newFile: false },
{ path: 'src/content/fr/index.md', id: 'id', newFile: false },
]);
});
it('should add missing locale files to diff files when structure is MULTIPLE_FILES', () => {
expect(
i18n.getI18nDataFiles(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FILES, locales, default_locale },
}),
...args,
[{ path: 'src/content/index.fr.md', id: 'id', newFile: false }],
),
).toEqual([
{ path: 'src/content/index.en.md', id: '', newFile: false },
{ path: 'src/content/index.de.md', id: '', newFile: false },
{ path: 'src/content/index.fr.md', id: 'id', newFile: false },
]);
});
it('should return a single file when structure is SINGLE_FILE', () => {
expect(
i18n.getI18nDataFiles(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.SINGLE_FILE, locales, default_locale },
}),
...args,
[{ path: 'src/content/index.md', id: 'id', newFile: false }],
),
).toEqual([{ path: 'src/content/index.md', id: 'id', newFile: false }]);
});
});
describe('getI18nBackup', () => {
it('should return i18n with raw data', () => {
const locales = ['en', 'de', 'fr'];
const default_locale = 'en';
expect(
i18n.getI18nBackup(
fromJS({
i18n: { structure: i18n.I18N_STRUCTURE.MULTIPLE_FILES, locales, default_locale },
}),
fromJS({
data: 'raw_en',
i18n: {
de: { data: 'raw_de' },
fr: { data: 'raw_fr' },
},
}),
e => e.get('data'),
),
).toEqual({ de: { raw: 'raw_de' }, fr: { raw: 'raw_fr' } });
});
});
describe('formatI18nBackup', () => {
it('should return i18n with formatted data', () => {
expect(
i18n.formatI18nBackup({ de: { raw: 'raw_de' }, fr: { raw: 'raw_fr' } }, raw => ({
data: raw,
})),
).toEqual({ de: { data: 'raw_de' }, fr: { data: 'raw_fr' } });
});
});
describe('duplicateI18nFields', () => {
it('should duplicate non nested field when field i18n is DUPLICATE', () => {
const date = new Date('2020/01/01');
expect(
i18n
.duplicateI18nFields(
fromJS({ entry: { data: { date } } }),
fromJS({ name: 'date', i18n: i18n.I18N_FIELD.DUPLICATE }),
['en', 'de', 'fr'],
'en',
)
.toJS(),
).toEqual({
entry: {
data: { date },
i18n: {
de: { data: { date } },
fr: { data: { date } },
},
},
});
});
it('should not duplicate field when field i18n is not DUPLICATE', () => {
const date = new Date('2020/01/01');
[i18n.I18N_FIELD.TRANSLATE, i18n.I18N_FIELD.TRANSLATE.DUPLICATE].forEach(fieldI18n => {
expect(
i18n
.duplicateI18nFields(
fromJS({ entry: { data: { date } } }),
fromJS({ name: 'date', i18n: fieldI18n }),
['en', 'de', 'fr'],
'en',
)
.toJS(),
).toEqual({
entry: {
data: { date },
},
});
});
});
it('should duplicate nested field when nested fields i18n is DUPLICATE', () => {
const date = new Date('2020/01/01');
const value = fromJS({ title: 'title', date, boolean: true });
expect(
i18n
.duplicateI18nFields(
fromJS({ entry: { data: { object: value } } }),
fromJS({
name: 'object',
fields: [
{ name: 'string', i18n: i18n.I18N_FIELD.TRANSLATE },
{ name: 'date', i18n: i18n.I18N_FIELD.DUPLICATE },
{ name: 'boolean', i18n: i18n.I18N_FIELD.NONE },
],
i18n: i18n.I18N_FIELD.TRANSLATE,
}),
['en', 'de', 'fr'],
'en',
)
.toJS(),
).toEqual({
entry: {
data: { object: value.toJS() },
i18n: {
de: { data: { object: { date } } },
fr: { data: { object: { date } } },
},
},
});
});
});
describe('getPreviewEntry', () => {
it('should set data to i18n data when locale is not default', () => {
expect(
i18n
.getPreviewEntry(
fromJS({
data: { title: 'en', body: 'markdown' },
i18n: { de: { data: { title: 'de' } } },
}),
'de',
)
.toJS(),
).toEqual({
data: { title: 'de' },
i18n: { de: { data: { title: 'de' } } },
});
});
it('should not change entry for default locale', () => {
const entry = fromJS({
data: { title: 'en', body: 'markdown' },
i18n: { de: { data: { title: 'de' } } },
});
expect(i18n.getPreviewEntry(entry, 'en', 'en')).toBe(entry);
});
});
});

View File

@ -0,0 +1,407 @@
import { Map, List } from 'immutable';
import { set, trimEnd, groupBy } from 'lodash';
import { Collection, Entry, EntryDraft, EntryField, EntryMap } from '../types/redux';
import { selectEntrySlug } from '../reducers/collections';
import { EntryValue } from '../valueObjects/Entry';
export const I18N = 'i18n';
export enum I18N_STRUCTURE {
MULTIPLE_FOLDERS = 'multiple_folders',
MULTIPLE_FILES = 'multiple_files',
SINGLE_FILE = 'single_file',
}
export enum I18N_FIELD {
TRANSLATE = 'translate',
DUPLICATE = 'duplicate',
NONE = 'none',
}
export const hasI18n = (collection: Collection) => {
return collection.has(I18N);
};
type I18nInfo = {
locales: string[];
defaultLocale: string;
structure: I18N_STRUCTURE;
};
export const getI18nInfo = (collection: Collection) => {
if (!hasI18n(collection)) {
return {};
}
const { structure, locales, default_locale: defaultLocale } = collection.get(I18N).toJS();
return { structure, locales, defaultLocale } as I18nInfo;
};
export const getI18nFilesDepth = (collection: Collection, depth: number) => {
const { structure } = getI18nInfo(collection) as I18nInfo;
if (structure === I18N_STRUCTURE.MULTIPLE_FOLDERS) {
return depth + 1;
}
return depth;
};
export const isFieldTranslatable = (field: EntryField, locale: string, defaultLocale: string) => {
const isTranslatable = locale !== defaultLocale && field.get(I18N) === I18N_FIELD.TRANSLATE;
return isTranslatable;
};
export const isFieldDuplicate = (field: EntryField, locale: string, defaultLocale: string) => {
const isDuplicate = locale !== defaultLocale && field.get(I18N) === I18N_FIELD.DUPLICATE;
return isDuplicate;
};
export const isFieldHidden = (field: EntryField, locale: string, defaultLocale: string) => {
const isHidden = locale !== defaultLocale && field.get(I18N) === I18N_FIELD.NONE;
return isHidden;
};
export const getLocaleDataPath = (locale: string) => {
return [I18N, locale, 'data'];
};
export const getDataPath = (locale: string, defaultLocale: string) => {
const dataPath = locale !== defaultLocale ? getLocaleDataPath(locale) : ['data'];
return dataPath;
};
export const getFilePath = (
structure: I18N_STRUCTURE,
extension: string,
path: string,
slug: string,
locale: string,
) => {
switch (structure) {
case I18N_STRUCTURE.MULTIPLE_FOLDERS:
return path.replace(`/${slug}`, `/${locale}/${slug}`);
case I18N_STRUCTURE.MULTIPLE_FILES:
return path.replace(extension, `${locale}.${extension}`);
case I18N_STRUCTURE.SINGLE_FILE:
default:
return path;
}
};
export const getLocaleFromPath = (structure: I18N_STRUCTURE, extension: string, path: string) => {
switch (structure) {
case I18N_STRUCTURE.MULTIPLE_FOLDERS: {
const parts = path.split('/');
// filename
parts.pop();
// locale
return parts.pop();
}
case I18N_STRUCTURE.MULTIPLE_FILES: {
const parts = trimEnd(path, `.${extension}`);
return parts.split('.').pop();
}
case I18N_STRUCTURE.SINGLE_FILE:
default:
return '';
}
};
export const getFilePaths = (
collection: Collection,
extension: string,
path: string,
slug: string,
) => {
const { structure, locales } = getI18nInfo(collection) as I18nInfo;
const paths = locales.map(locale =>
getFilePath(structure as I18N_STRUCTURE, extension, path, slug, locale),
);
return paths;
};
export const normalizeFilePath = (structure: I18N_STRUCTURE, path: string, locale: string) => {
switch (structure) {
case I18N_STRUCTURE.MULTIPLE_FOLDERS:
return path.replace(`${locale}/`, '');
case I18N_STRUCTURE.MULTIPLE_FILES:
return path.replace(`.${locale}`, '');
case I18N_STRUCTURE.SINGLE_FILE:
default:
return path;
}
};
export const getI18nFiles = (
collection: Collection,
extension: string,
entryDraft: EntryMap,
entryToRaw: (entryDraft: EntryMap) => string,
path: string,
slug: string,
newPath?: string,
) => {
const { structure, defaultLocale, locales } = getI18nInfo(collection) as I18nInfo;
if (structure === I18N_STRUCTURE.SINGLE_FILE) {
const data = locales.reduce((map, locale) => {
const dataPath = getDataPath(locale, defaultLocale);
return map.set(locale, entryDraft.getIn(dataPath));
}, Map<string, unknown>({}));
const draft = entryDraft.set('data', data);
return [
{
path: getFilePath(structure, extension, path, slug, locales[0]),
slug,
raw: entryToRaw(draft),
...(newPath && {
newPath: getFilePath(structure, extension, newPath, slug, locales[0]),
}),
},
];
}
const dataFiles = locales
.map(locale => {
const dataPath = getDataPath(locale, defaultLocale);
const draft = entryDraft.set('data', entryDraft.getIn(dataPath));
return {
path: getFilePath(structure, extension, path, slug, locale),
slug,
raw: draft.get('data') ? entryToRaw(draft) : '',
...(newPath && {
newPath: getFilePath(structure, extension, newPath, slug, locale),
}),
};
})
.filter(dataFile => dataFile.raw);
return dataFiles;
};
export const getI18nBackup = (
collection: Collection,
entry: EntryMap,
entryToRaw: (entry: EntryMap) => string,
) => {
const { locales, defaultLocale } = getI18nInfo(collection) as I18nInfo;
const i18nBackup = locales
.filter(l => l !== defaultLocale)
.reduce((acc, locale) => {
const dataPath = getDataPath(locale, defaultLocale);
const data = entry.getIn(dataPath);
if (!data) {
return acc;
}
const draft = entry.set('data', data);
return { ...acc, [locale]: { raw: entryToRaw(draft) } };
}, {} as Record<string, { raw: string }>);
return i18nBackup;
};
export const formatI18nBackup = (
i18nBackup: Record<string, { raw: string }>,
formatRawData: (raw: string) => EntryValue,
) => {
const i18n = Object.entries(i18nBackup).reduce((acc, [locale, { raw }]) => {
const entry = formatRawData(raw);
return { ...acc, [locale]: { data: entry.data } };
}, {});
return i18n;
};
const mergeValues = (
collection: Collection,
structure: I18N_STRUCTURE,
defaultLocale: string,
values: { locale: string; value: EntryValue }[],
) => {
let defaultEntry = values.find(e => e.locale === defaultLocale);
if (!defaultEntry) {
defaultEntry = values[0];
console.warn(`Could not locale entry for default locale '${defaultLocale}'`);
}
const i18n = values
.filter(e => e.locale !== defaultEntry!.locale)
.reduce((acc, { locale, value }) => {
const dataPath = getLocaleDataPath(locale);
return set(acc, dataPath, value.data);
}, {});
const path = normalizeFilePath(structure, defaultEntry.value.path, defaultLocale);
const slug = selectEntrySlug(collection, path) as string;
const entryValue: EntryValue = {
...defaultEntry.value,
raw: '',
...i18n,
path,
slug,
};
return entryValue;
};
const mergeSingleFileValue = (entryValue: EntryValue, defaultLocale: string, locales: string[]) => {
const data = entryValue.data[defaultLocale];
const i18n = locales
.filter(l => l !== defaultLocale)
.map(l => ({ locale: l, value: entryValue.data[l] }))
.filter(e => e.value)
.reduce((acc, e) => {
return { ...acc, [e.locale]: { data: e.value } };
}, {});
return {
...entryValue,
data,
i18n,
raw: '',
};
};
export const getI18nEntry = async (
collection: Collection,
extension: string,
path: string,
slug: string,
getEntryValue: (path: string) => Promise<EntryValue>,
) => {
const { structure, locales, defaultLocale } = getI18nInfo(collection) as I18nInfo;
let entryValue: EntryValue;
if (structure === I18N_STRUCTURE.SINGLE_FILE) {
entryValue = mergeSingleFileValue(await getEntryValue(path), defaultLocale, locales);
} else {
const entryValues = await Promise.all(
locales.map(async locale => {
const entryPath = getFilePath(structure, extension, path, slug, locale);
const value = await getEntryValue(entryPath).catch(() => null);
return { value, locale };
}),
);
const nonNullValues = entryValues.filter(e => e.value !== null) as {
value: EntryValue;
locale: string;
}[];
entryValue = mergeValues(collection, structure, defaultLocale, nonNullValues);
}
return entryValue;
};
export const groupEntries = (collection: Collection, extension: string, entries: EntryValue[]) => {
const { structure, defaultLocale, locales } = getI18nInfo(collection) as I18nInfo;
if (structure === I18N_STRUCTURE.SINGLE_FILE) {
return entries.map(e => mergeSingleFileValue(e, defaultLocale, locales));
}
const grouped = groupBy(
entries.map(e => ({
locale: getLocaleFromPath(structure, extension, e.path) as string,
value: e,
})),
({ locale, value: e }) => {
return normalizeFilePath(structure, e.path, locale);
},
);
const groupedEntries = Object.values(grouped).reduce((acc, values) => {
const entryValue = mergeValues(collection, structure, defaultLocale, values);
return [...acc, entryValue];
}, [] as EntryValue[]);
return groupedEntries;
};
export const getI18nDataFiles = (
collection: Collection,
extension: string,
path: string,
slug: string,
diffFiles: { path: string; id: string; newFile: boolean }[],
) => {
const { structure } = getI18nInfo(collection) as I18nInfo;
if (structure === I18N_STRUCTURE.SINGLE_FILE) {
return diffFiles;
}
const paths = getFilePaths(collection, extension, path, slug);
const dataFiles = paths.reduce((acc, path) => {
const dataFile = diffFiles.find(file => file.path === path);
if (dataFile) {
return [...acc, dataFile];
} else {
return [...acc, { path, id: '', newFile: false }];
}
}, [] as { path: string; id: string; newFile: boolean }[]);
return dataFiles;
};
export const duplicateI18nFields = (
entryDraft: EntryDraft,
field: EntryField,
locales: string[],
defaultLocale: string,
fieldPath: string[] = [field.get('name')],
) => {
const value = entryDraft.getIn(['entry', 'data', ...fieldPath]);
if (field.get(I18N) === I18N_FIELD.DUPLICATE) {
locales
.filter(l => l !== defaultLocale)
.forEach(l => {
entryDraft = entryDraft.setIn(
['entry', ...getDataPath(l, defaultLocale), ...fieldPath],
value,
);
});
}
if (field.has('field') && !List.isList(value)) {
const fields = [field.get('field') as EntryField];
fields.forEach(field => {
entryDraft = duplicateI18nFields(entryDraft, field, locales, defaultLocale, [
...fieldPath,
field.get('name'),
]);
});
} else if (field.has('fields') && !List.isList(value)) {
const fields = field.get('fields')!.toArray() as EntryField[];
fields.forEach(field => {
entryDraft = duplicateI18nFields(entryDraft, field, locales, defaultLocale, [
...fieldPath,
field.get('name'),
]);
});
}
return entryDraft;
};
export const getPreviewEntry = (entry: EntryMap, locale: string, defaultLocale: string) => {
if (locale === defaultLocale) {
return entry;
}
return entry.set('data', entry.getIn([I18N, locale, 'data']));
};
export const serializeI18n = (
collection: Collection,
entry: Entry,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
serializeValues: (data: any) => any,
) => {
const { locales, defaultLocale } = getI18nInfo(collection) as I18nInfo;
locales
.filter(locale => locale !== defaultLocale)
.forEach(locale => {
const dataPath = getLocaleDataPath(locale);
entry = entry.setIn(dataPath, serializeValues(entry.getIn(dataPath)));
});
return entry;
};

View File

@ -25,6 +25,7 @@ import {
import { get } from 'lodash';
import { selectFolderEntryExtension, selectHasMetaPath } from './collections';
import { join } from 'path';
import { getDataPath, duplicateI18nFields } from '../lib/i18n';
const initialState = Map({
entry: Map(),
@ -90,20 +91,25 @@ const entryDraftReducer = (state = Map(), action) => {
}
case DRAFT_CHANGE_FIELD: {
return state.withMutations(state => {
const { field, value, metadata, entries } = action.payload;
const { field, value, metadata, entries, i18n } = action.payload;
const name = field.get('name');
const meta = field.get('meta');
const dataPath = (i18n && getDataPath(i18n.currentLocale, i18n.defaultLocale)) || ['data'];
if (meta) {
state.setIn(['entry', 'meta', name], value);
} else {
state.setIn(['entry', 'data', name], value);
state.setIn(['entry', ...dataPath, name], value);
if (i18n) {
state = duplicateI18nFields(state, field, i18n.locales, i18n.defaultLocale);
}
}
state.mergeDeepIn(['fieldsMetaData'], fromJS(metadata));
const newData = state.getIn(['entry', 'data']);
const newData = state.getIn(['entry', ...dataPath]);
const newMeta = state.getIn(['entry', 'meta']);
state.set(
'hasChanged',
!entries.some(e => newData.equals(e.get('data'))) ||
!entries.some(e => newData.equals(e.get(...dataPath))) ||
!entries.some(e => newMeta.equals(e.get('meta'))),
);
});

View File

@ -16,10 +16,12 @@ export interface StaticallyTypedRecord<T> {
keys: [K1, K2, K3],
defaultValue?: V,
): T[K1][K2][K3];
getIn(keys: string[]): unknown;
setIn<K1 extends keyof T, K2 extends keyof T[K1], V extends T[K1][K2]>(
keys: [K1, K2],
value: V,
): StaticallyTypedRecord<T>;
setIn(keys: string[], value: unknown): StaticallyTypedRecord<T> & T;
toJS(): T;
isEmpty(): boolean;
some<K extends keyof T>(predicate: (value: T[K], key: K, iter: this) => boolean): boolean;

View File

@ -123,6 +123,7 @@ export type EntryField = StaticallyTypedRecord<{
public_folder?: string;
comment?: string;
meta?: boolean;
i18n: 'translate' | 'duplicate' | 'none';
}>;
export type EntryFields = List<EntryField>;
@ -161,6 +162,12 @@ type MetaObject = {
type Meta = StaticallyTypedRecord<MetaObject>;
type i18n = StaticallyTypedRecord<{
structure: string;
locales: string[];
default_locale: string;
}>;
type CollectionObject = {
name: string;
folder?: string;
@ -187,6 +194,7 @@ type CollectionObject = {
view_filters: List<StaticallyTypedRecord<ViewFilter>>;
nested?: Nested;
meta?: Meta;
i18n: i18n;
};
export type Collection = StaticallyTypedRecord<CollectionObject>;

View File

@ -30,6 +30,10 @@ export interface EntryValue {
updatedOn: string;
status?: string;
meta: { path?: string };
i18n?: {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
[locale: string]: any;
};
}
export function createEntry(collection: string, slug = '', path = '', options: Options = {}) {

View File

@ -30,11 +30,17 @@ export interface ImplementationEntry {
file: { path: string; label?: string; id?: string | null; author?: string; updatedOn?: string };
}
export interface UnpublishedEntryDiff {
id: string;
path: string;
newFile: boolean;
}
export interface UnpublishedEntry {
slug: string;
collection: string;
status: string;
diffs: { id: string; path: string; newFile: boolean }[];
diffs: UnpublishedEntryDiff[];
updatedAt: string;
}
@ -45,13 +51,23 @@ export interface Map {
set: <T>(key: string, value: T) => Map;
}
export type DataFile = {
path: string;
slug: string;
raw: string;
newPath?: string;
};
export type AssetProxy = {
path: string;
fileObj?: File;
toBase64?: () => Promise<string>;
};
export type Entry = { path: string; slug: string; raw: string; newPath?: string };
export type Entry = {
dataFiles: DataFile[];
assets: AssetProxy[];
};
export type PersistOptions = {
newEntry?: boolean;
@ -116,9 +132,9 @@ export interface Implementation {
getMedia: (folder?: string) => Promise<ImplementationMediaFile[]>;
getMediaFile: (path: string) => Promise<ImplementationMediaFile>;
persistEntry: (obj: Entry, assetProxies: AssetProxy[], opts: PersistOptions) => Promise<void>;
persistEntry: (entry: Entry, opts: PersistOptions) => Promise<void>;
persistMedia: (file: AssetProxy, opts: PersistOptions) => Promise<ImplementationMediaFile>;
deleteFile: (path: string, commitMessage: string) => Promise<void>;
deleteFiles: (paths: string[], commitMessage: string) => Promise<void>;
unpublishedEntries: () => Promise<string[]>;
unpublishedEntry: (args: {

View File

@ -20,6 +20,7 @@ import { asyncLock, AsyncLock as AL } from './asyncLock';
import {
Implementation as I,
ImplementationEntry as IE,
UnpublishedEntryDiff as UED,
UnpublishedEntry as UE,
ImplementationMediaFile as IMF,
ImplementationFile as IF,
@ -40,6 +41,7 @@ import {
UnpublishedEntryMediaFile as UEMF,
blobToFileObj,
allEntriesByFolder,
DataFile as DF,
} from './implementation';
import {
readFile,
@ -76,6 +78,7 @@ import {
export type AsyncLock = AL;
export type Implementation = I;
export type ImplementationEntry = IE;
export type UnpublishedEntryDiff = UED;
export type UnpublishedEntry = UE;
export type ImplementationMediaFile = IMF;
export type ImplementationFile = IF;
@ -91,6 +94,7 @@ export type ApiRequest = AR;
export type Config = C;
export type FetchError = FE;
export type PointerFile = PF;
export type DataFile = DF;
export const NetlifyCmsLibUtil = {
APIError,

View File

@ -84,6 +84,9 @@ const en = {
invalidPath: `'%{path}' is not a valid path`,
pathExists: `Path '%{path}' already exists`,
},
i18n: {
writingInLocale: 'Writing in %{locale}',
},
},
editor: {
onLeavePage: 'Are you sure you want to leave this page?',

View File

@ -23,6 +23,7 @@
},
"dependencies": {
"@hapi/joi": "^17.0.2",
"async-mutex": "^0.2.4",
"cors": "^2.8.5",
"dotenv": "^8.2.0",
"express": "^4.17.1",

View File

@ -26,7 +26,7 @@ describe('defaultSchema', () => {
assetFailure(
schema.validate({ action: 'unknown', params: {} }),
'"action" must be one of [info, entriesByFolder, entriesByFiles, getEntry, unpublishedEntries, unpublishedEntry, unpublishedEntryDataFile, unpublishedEntryMediaFile, deleteUnpublishedEntry, persistEntry, updateUnpublishedEntryStatus, publishUnpublishedEntry, getMedia, getMediaFile, persistMedia, deleteFile, getDeployPreview]',
'"action" must be one of [info, entriesByFolder, entriesByFiles, getEntry, unpublishedEntries, unpublishedEntry, unpublishedEntryDataFile, unpublishedEntryMediaFile, deleteUnpublishedEntry, persistEntry, updateUnpublishedEntryStatus, publishUnpublishedEntry, getMedia, getMediaFile, persistMedia, deleteFile, deleteFiles, getDeployPreview]',
);
});
@ -274,8 +274,19 @@ describe('defaultSchema', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'persistEntry', params: { ...defaultParams } }),
'"params.entry" is required',
schema.validate({
action: 'persistEntry',
params: {
...defaultParams,
assets: [],
options: {
commitMessage: 'commitMessage',
useWorkflow: true,
status: 'draft',
},
},
}),
'"params" must contain at least one of [entry, dataFiles]',
);
assetFailure(
schema.validate({
@ -309,7 +320,7 @@ describe('defaultSchema', () => {
);
});
it('should pass on valid params', () => {
it('should pass on valid params (entry argument)', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'persistEntry',
@ -327,6 +338,25 @@ describe('defaultSchema', () => {
expect(error).toBeUndefined();
});
it('should pass on valid params (dataFiles argument)', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'persistEntry',
params: {
...defaultParams,
dataFiles: [{ slug: 'slug', path: 'path', raw: 'content' }],
assets: [{ path: 'path', content: 'content', encoding: 'base64' }],
options: {
commitMessage: 'commitMessage',
useWorkflow: true,
status: 'draft',
},
},
});
expect(error).toBeUndefined();
});
});
describe('updateUnpublishedEntryStatus', () => {
@ -491,6 +521,31 @@ describe('defaultSchema', () => {
});
});
describe('deleteFiles', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'deleteFiles', params: { ...defaultParams } }),
'"params.paths" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'deleteFiles',
params: {
...defaultParams,
paths: ['src/static/images/image.png'],
options: { commitMessage: 'commitMessage' },
},
});
expect(error).toBeUndefined();
});
});
describe('getDeployPreview', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();

View File

@ -18,6 +18,7 @@ const allowedActions = [
'getMediaFile',
'persistMedia',
'deleteFile',
'deleteFiles',
'getDeployPreview',
];
@ -39,6 +40,13 @@ export const defaultSchema = ({ path = requiredString } = {}) => {
encoding: requiredString.valid('base64'),
});
const dataFile = Joi.object({
slug: requiredString,
path,
raw: requiredString,
newPath: path.optional(),
});
const params = Joi.when('action', {
switch: [
{
@ -122,12 +130,8 @@ export const defaultSchema = ({ path = requiredString } = {}) => {
then: defaultParams
.keys({
cmsLabelPrefix: Joi.string().optional(),
entry: Joi.object({
slug: requiredString,
path,
raw: requiredString,
newPath: path.optional(),
}).required(),
entry: dataFile, // entry is kept for backwards compatibility
dataFiles: Joi.array().items(dataFile),
assets: Joi.array()
.items(asset)
.required(),
@ -138,6 +142,7 @@ export const defaultSchema = ({ path = requiredString } = {}) => {
status: requiredString,
}).required(),
})
.xor('entry', 'dataFiles')
.required(),
},
{
@ -198,6 +203,20 @@ export const defaultSchema = ({ path = requiredString } = {}) => {
})
.required(),
},
{
is: 'deleteFiles',
then: defaultParams
.keys({
paths: Joi.array()
.items(path)
.min(1)
.required(),
options: Joi.object({
commitMessage: requiredString,
}).required(),
})
.required(),
},
{
is: 'getDeployPreview',
then: defaultParams

View File

@ -12,6 +12,8 @@ import {
GetMediaFileParams,
PersistMediaParams,
DeleteFileParams,
DeleteFilesParams,
DataFile,
} from '../types';
import { listRepoFiles, deleteFile, writeFile, move } from '../utils/fs';
import { entriesFromFiles, readMediaFile } from '../utils/entries';
@ -61,16 +63,27 @@ export const localFsMiddleware = ({ repoPath, logger }: FsOptions) => {
break;
}
case 'persistEntry': {
const { entry, assets } = body.params as PersistEntryParams;
await writeFile(path.join(repoPath, entry.path), entry.raw);
const {
entry,
dataFiles = [entry as DataFile],
assets,
} = body.params as PersistEntryParams;
await Promise.all(
dataFiles.map(dataFile => writeFile(path.join(repoPath, dataFile.path), dataFile.raw)),
);
// save assets
await Promise.all(
assets.map(a =>
writeFile(path.join(repoPath, a.path), Buffer.from(a.content, a.encoding)),
),
);
if (entry.newPath) {
await move(path.join(repoPath, entry.path), path.join(repoPath, entry.newPath));
if (dataFiles.every(dataFile => dataFile.newPath)) {
dataFiles.forEach(async dataFile => {
await move(
path.join(repoPath, dataFile.path),
path.join(repoPath, dataFile.newPath!),
);
});
}
res.json({ message: 'entry persisted' });
break;
@ -104,6 +117,12 @@ export const localFsMiddleware = ({ repoPath, logger }: FsOptions) => {
res.json({ message: `deleted file ${filePath}` });
break;
}
case 'deleteFiles': {
const { paths } = body.params as DeleteFilesParams;
await Promise.all(paths.map(filePath => deleteFile(repoPath, filePath)));
res.json({ message: `deleted files ${paths.join(', ')}` });
break;
}
case 'getDeployPreview': {
res.json(null);
break;

View File

@ -26,16 +26,18 @@ import {
PersistMediaParams,
DeleteFileParams,
UpdateUnpublishedEntryStatusParams,
Entry,
DataFile,
GetMediaFileParams,
DeleteEntryParams,
DeleteFilesParams,
UnpublishedEntryDataFileParams,
UnpublishedEntryMediaFileParams,
} from '../types';
// eslint-disable-next-line import/default
import simpleGit from 'simple-git/promise';
import { Mutex, withTimeout } from 'async-mutex';
import { pathTraversal } from '../joi/customValidators';
import { listRepoFiles, writeFile, move } from '../utils/fs';
import { listRepoFiles, writeFile, move, deleteFile, getUpdateDate } from '../utils/fs';
import { entriesFromFiles, readMediaFile } from '../utils/entries';
const commit = async (git: simpleGit.SimpleGit, commitMessage: string) => {
@ -76,18 +78,22 @@ type GitOptions = {
const commitEntry = async (
git: simpleGit.SimpleGit,
repoPath: string,
entry: Entry,
dataFiles: DataFile[],
assets: Asset[],
commitMessage: string,
) => {
// save entry content
await writeFile(path.join(repoPath, entry.path), entry.raw);
await Promise.all(
dataFiles.map(dataFile => writeFile(path.join(repoPath, dataFile.path), dataFile.raw)),
);
// save assets
await Promise.all(
assets.map(a => writeFile(path.join(repoPath, a.path), Buffer.from(a.content, a.encoding))),
);
if (entry.newPath) {
await move(path.join(repoPath, entry.path), path.join(repoPath, entry.newPath));
if (dataFiles.every(dataFile => dataFile.newPath)) {
dataFiles.forEach(async dataFile => {
await move(path.join(repoPath, dataFile.path), path.join(repoPath, dataFile.newPath!));
});
}
// commits files
@ -162,8 +168,13 @@ export const getSchema = ({ repoPath }: { repoPath: string }) => {
export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
const git = simpleGit(repoPath).silent(false);
// we can only perform a single git operation at any given time
const mutex = withTimeout(new Mutex(), 3000, new Error('Request timed out'));
return async function(req: express.Request, res: express.Response) {
let release;
try {
release = await mutex.acquire();
const { body } = req;
if (body.action === 'info') {
res.json({
@ -233,11 +244,23 @@ export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
const diffs = await getDiffs(git, branch, cmsBranch);
const label = await git.raw(['config', branchDescription(cmsBranch)]);
const status = label && labelToStatus(label.trim(), cmsLabelPrefix || '');
const updatedAt =
diffs.length >= 0
? await runOnBranch(git, cmsBranch, async () => {
const dates = await Promise.all(
diffs.map(({ newPath }) => getUpdateDate(repoPath, newPath)),
);
return dates.reduce((a, b) => {
return a > b ? a : b;
});
})
: new Date();
const unpublishedEntry = {
collection,
slug,
status,
diffs,
updatedAt,
};
res.json(unpublishedEntry);
} else {
@ -276,13 +299,20 @@ export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
break;
}
case 'persistEntry': {
const { entry, assets, options, cmsLabelPrefix } = body.params as PersistEntryParams;
const {
cmsLabelPrefix,
entry,
dataFiles = [entry as DataFile],
assets,
options,
} = body.params as PersistEntryParams;
if (!options.useWorkflow) {
await runOnBranch(git, branch, async () => {
await commitEntry(git, repoPath, entry, assets, options.commitMessage);
await commitEntry(git, repoPath, dataFiles, assets, options.commitMessage);
});
} else {
const slug = entry.slug;
const slug = dataFiles[0].slug;
const collection = options.collectionName as string;
const contentKey = generateContentKey(collection, slug);
const cmsBranch = branchFromContentKey(contentKey);
@ -300,7 +330,7 @@ export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
d => d.binary && !assets.map(a => a.path).includes(d.path),
);
await Promise.all(toDelete.map(f => fs.unlink(path.join(repoPath, f.path))));
await commitEntry(git, repoPath, entry, assets, options.commitMessage);
await commitEntry(git, repoPath, dataFiles, assets, options.commitMessage);
// add status for new entries
if (!branchExists) {
@ -378,12 +408,24 @@ export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
options: { commitMessage },
} = body.params as DeleteFileParams;
await runOnBranch(git, branch, async () => {
await fs.unlink(path.join(repoPath, filePath));
await deleteFile(repoPath, filePath);
await commit(git, commitMessage);
});
res.json({ message: `deleted file ${filePath}` });
break;
}
case 'deleteFiles': {
const {
paths,
options: { commitMessage },
} = body.params as DeleteFilesParams;
await runOnBranch(git, branch, async () => {
await Promise.all(paths.map(filePath => deleteFile(repoPath, filePath)));
await commit(git, commitMessage);
});
res.json({ message: `deleted files ${paths.join(', ')}` });
break;
}
case 'getDeployPreview': {
res.json(null);
break;
@ -397,6 +439,8 @@ export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
} catch (e) {
logger.error(`Error handling ${JSON.stringify(req.body)}: ${e.message}`);
res.status(500).json({ error: 'Unknown error' });
} finally {
release && release();
}
};
};

View File

@ -54,13 +54,14 @@ export type PublishUnpublishedEntryParams = {
slug: string;
};
export type Entry = { slug: string; path: string; raw: string; newPath?: string };
export type DataFile = { slug: string; path: string; raw: string; newPath?: string };
export type Asset = { path: string; content: string; encoding: 'base64' };
export type PersistEntryParams = {
cmsLabelPrefix?: string;
entry: Entry;
entry?: DataFile;
dataFiles?: DataFile[];
assets: Asset[];
options: {
collectionName?: string;
@ -91,3 +92,10 @@ export type DeleteFileParams = {
commitMessage: string;
};
};
export type DeleteFilesParams = {
paths: string[];
options: {
commitMessage: string;
};
};

View File

@ -38,7 +38,7 @@ export const writeFile = async (filePath: string, content: Buffer | string) => {
};
export const deleteFile = async (repoPath: string, filePath: string) => {
await fs.unlink(path.join(repoPath, filePath));
await fs.unlink(path.join(repoPath, filePath)).catch(() => undefined);
};
const moveFile = async (from: string, to: string) => {
@ -56,3 +56,10 @@ export const move = async (from: string, to: string) => {
const allFiles = await listFiles(sourceDir, '', 100);
await Promise.all(allFiles.map(file => moveFile(file, file.replace(sourceDir, destDir))));
};
export const getUpdateDate = async (repoPath: string, filePath: string) => {
return fs
.stat(path.join(repoPath, filePath))
.then(stat => stat.mtime)
.catch(() => new Date());
};

View File

@ -373,7 +373,7 @@ const reactSelectStyles = {
: 'transparent',
paddingLeft: '22px',
}),
menu: styles => ({ ...styles, right: 0, zIndex: 300 }),
menu: styles => ({ ...styles, right: 0, zIndex: zIndex.zIndex300 }),
container: styles => ({ ...styles, padding: '0 !important' }),
indicatorSeparator: (styles, state) =>
state.hasValue && state.selectProps.isClearable

View File

@ -7,6 +7,34 @@ import DateTime from 'react-datetime';
import moment from 'moment';
import { buttons } from 'netlify-cms-ui-default';
const NowButton = ({ t, handleChange }) => {
return (
<div
css={css`
position: absolute;
right: 20px;
transform: translateY(-40px);
width: fit-content;
z-index: 1;
`}
>
<button
css={css`
${buttons.button}
${buttons.default}
${buttons.lightBlue}
${buttons.small}
`}
onClick={() => {
handleChange(moment());
}}
>
{t('editor.editorWidgets.datetime.now')}
</button>
</div>
);
};
export default class DateTimeControl extends React.Component {
static propTypes = {
field: PropTypes.object.isRequired,
@ -114,7 +142,7 @@ export default class DateTimeControl extends React.Component {
};
render() {
const { forID, value, classNameWrapper, setActiveStyle, t } = this.props;
const { forID, value, classNameWrapper, setActiveStyle, t, isDisabled } = this.props;
const { format, dateFormat, timeFormat } = this.formats;
return (
@ -134,29 +162,7 @@ export default class DateTimeControl extends React.Component {
inputProps={{ className: classNameWrapper, id: forID }}
utc={this.pickerUtc}
/>
<div
css={css`
position: absolute;
right: 20px;
transform: translateY(-40px);
width: fit-content;
z-index: 1;
`}
>
<button
css={css`
${buttons.button}
${buttons.default}
${buttons.lightBlue}
${buttons.small}
`}
onClick={() => {
this.handleChange(moment());
}}
>
{t('editor.editorWidgets.datetime.now')}
</button>
</div>
{!isDisabled && <NowButton t={t} handleChange={v => this.handleChange(v)} />}
</div>
);
}

View File

@ -5,5 +5,6 @@ export default {
summary: { type: 'string' },
minimize_collapsed: { type: 'boolean' },
label_singular: { type: 'string' },
i18n: { type: 'boolean' },
},
};

View File

@ -38,7 +38,16 @@ export default class RawEditor extends React.Component {
}
shouldComponentUpdate(nextProps, nextState) {
return !this.state.value.equals(nextState.value);
return (
!this.state.value.equals(nextState.value) ||
nextProps.value !== Plain.serialize(nextState.value)
);
}
componentDidUpdate(prevProps) {
if (prevProps.value !== this.props.value) {
this.setState({ value: Plain.deserialize(this.props.value) });
}
}
componentDidMount() {

View File

@ -121,7 +121,9 @@ export default class Editor extends React.Component {
};
shouldComponentUpdate(nextProps, nextState) {
return !this.state.value.equals(nextState.value);
const raw = nextState.value.document.toJS();
const markdown = slateToMarkdown(raw, { voidCodeBlock: this.codeBlockComponent });
return !this.state.value.equals(nextState.value) || nextProps.value !== markdown;
}
componentDidMount() {
@ -131,6 +133,14 @@ export default class Editor extends React.Component {
}
}
componentDidUpdate(prevProps) {
if (prevProps.value !== this.props.value) {
this.setState({
value: createSlateValue(this.props.value, { voidCodeBlock: !!this.codeBlockComponent }),
});
}
}
handleMarkClick = type => {
this.editor.toggleMark(type).focus();
};
@ -178,7 +188,7 @@ export default class Editor extends React.Component {
};
render() {
const { onAddAsset, getAsset, className, field, isShowModeToggle, t } = this.props;
const { onAddAsset, getAsset, className, field, isShowModeToggle, t, isDisabled } = this.props;
return (
<div
css={coreCss`
@ -202,6 +212,7 @@ export default class Editor extends React.Component {
hasBlock={this.hasBlock}
isShowModeToggle={isShowModeToggle}
t={t}
disabled={isDisabled}
/>
</EditorControlBar>
<ClassNames>

View File

@ -71,6 +71,7 @@ export default class MarkdownControl extends React.Component {
getEditorComponents,
resolveWidget,
t,
isDisabled,
} = this.props;
const { mode, pendingFocus } = this.state;
@ -90,6 +91,7 @@ export default class MarkdownControl extends React.Component {
resolveWidget={resolveWidget}
pendingFocus={pendingFocus && this.setFocusReceived}
t={t}
isDisabled={isDisabled}
/>
</div>
);

View File

@ -81,6 +81,8 @@ export default class ObjectControl extends React.Component {
editorControl: EditorControl,
controlRef,
parentIds,
isFieldDuplicate,
isFieldHidden,
} = this.props;
if (field.get('widget') === 'hidden') {
@ -89,6 +91,9 @@ export default class ObjectControl extends React.Component {
const fieldName = field.get('name');
const fieldValue = value && Map.isMap(value) ? value.get(fieldName) : value;
const isDuplicate = isFieldDuplicate && isFieldDuplicate(field);
const isHidden = isFieldHidden && isFieldHidden(field);
return (
<EditorControl
key={key}
@ -102,6 +107,10 @@ export default class ObjectControl extends React.Component {
processControlRef={controlRef && controlRef.bind(this)}
controlRef={controlRef}
parentIds={parentIds}
isDisabled={isDuplicate}
isHidden={isHidden}
isFieldDuplicate={isFieldDuplicate}
isFieldHidden={isFieldHidden}
/>
);
}

View File

@ -1,5 +1,6 @@
export default {
properties: {
collapsed: { type: 'boolean' },
i18n: { type: 'boolean' },
},
};

View File

@ -48,6 +48,110 @@ publish_mode: editorial_workflow
In order to track unpublished entries statuses the GitLab implementation uses merge requests labels and the BitBucket implementation uses pull requests comments.
## i18n Support
The CMS can provide a side by side interface for authoring content in multiple languages.
Configuring the CMS for i18n support requires top level configuration, collection level configuration and field level configuration.
### Top level configuration
```yaml
i18n:
# Required and can be one of multiple_folders, multiple_files or single_file
# multiple_folders - persists files in `<folder>/<locale>/<slug>.<extension>`
# multiple_files - persists files in `<folder>/<slug>.<locale>.<extension>`
# single_file - persists a single file in `<folder>/<slug>.<extension>`
structure: multiple_folders
# Required - a list of locales to show in the editor UI
locales: [en, de, fr]
# Optional, defaults to the first item in locales.
# The locale to be used for fields validation and as a baseline for the entry.
default_locale: en
```
### Collection level configuration
```yaml
collections:
- name: i18n_content
# same as the top level, but all fields are optional and defaults to the top level
# can also be a boolean to accept the top level defaults
i18n: true
```
### Field level configuration
```yaml
fields:
- label: Title
name: title
widget: string
# same as 'i18n: translate'. Allows translation of the title field
i18n: true
- label: Date
name: date
widget: datetime
# The date field will be duplicated from the default locale.
i18n: duplicate
- label: Body
name: body
# The markdown field will be omitted from the translation.
widget: markdown
```
Example configuration:
```yaml
i18n:
structure: multiple_folders
locales: [en, de, fr]
collections:
- name: posts
label: Posts
folder: content/posts
create: true
i18n: true
fields:
- label: Title
name: title
widget: string
i18n: true
- label: Date
name: date
widget: datetime
i18n: duplicate
- label: Body
name: body
widget: markdown
```
### Limitations
1. File collections are not supported.
2. List widgets only support `i18n: true`. `i18n` configuration on sub fields is ignored.
3. Object widgets only support `i18n: true` and `i18n` configuration should be done per field:
```yaml
- label: 'Object'
name: 'object'
widget: 'object'
i18n: true
fields:
- { label: 'String', name: 'string', widget: 'string', i18n: true }
- { label: 'Date', name: 'date', widget: 'datetime', i18n: duplicate }
- { label: 'Boolean', name: 'boolean', widget: 'boolean', i18n: duplicate }
- {
label: 'Object',
name: 'object',
widget: 'object',
i18n: true,
field: { label: 'String', name: 'string', widget: 'string', i18n: duplicate },
}
```
## GitHub GraphQL API
Experimental support for GitHub's [GraphQL API](https://developer.github.com/v4/) is now available for the GitHub backend.

View File

@ -4395,6 +4395,13 @@ async-limiter@~1.0.0:
resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd"
integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==
async-mutex@^0.2.4:
version "0.2.4"
resolved "https://registry.yarnpkg.com/async-mutex/-/async-mutex-0.2.4.tgz#f6ea5f9cc73147f395f86fa573a2af039fe63082"
integrity sha512-fcQKOXUKMQc57JlmjBCHtkKNrfGpHyR7vu18RfuLfeTAf4hK9PgOadPR5cDrBQ682zasrLUhJFe7EKAHJOduDg==
dependencies:
tslib "^2.0.0"
async@^2.6.2:
version "2.6.3"
resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff"
@ -17489,7 +17496,7 @@ tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3:
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.13.0.tgz#c881e13cc7015894ed914862d276436fa9a47043"
integrity sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q==
tslib@^2.0.1:
tslib@^2.0.0, tslib@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.0.1.tgz#410eb0d113e5b6356490eec749603725b021b43e"
integrity sha512-SgIkNheinmEBgx1IUNirK0TUD4X9yjjBRTqqjggWCU3pUEqIk3/Uwl3yRixYKT6WjQuGiwDv4NomL3wqRCj+CQ==