chore: update prettier (#5412)
This commit is contained in:
parent
46738492a0
commit
39f113715a
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
* text=auto eol=lf
|
@ -1,4 +1,5 @@
|
|||||||
{
|
{
|
||||||
|
"arrowParens": "avoid",
|
||||||
"trailingComma": "all",
|
"trailingComma": "all",
|
||||||
"singleQuote": true,
|
"singleQuote": true,
|
||||||
"printWidth": 100
|
"printWidth": 100
|
||||||
|
@ -148,7 +148,7 @@
|
|||||||
"nock": "^13.0.0",
|
"nock": "^13.0.0",
|
||||||
"node-fetch": "^2.3.0",
|
"node-fetch": "^2.3.0",
|
||||||
"npm-run-all": "^4.1.5",
|
"npm-run-all": "^4.1.5",
|
||||||
"prettier": "^1.19.1",
|
"prettier": "^2.3.0",
|
||||||
"react": "^16.12.0",
|
"react": "^16.12.0",
|
||||||
"react-dom": "^16.12.0",
|
"react-dom": "^16.12.0",
|
||||||
"react-test-renderer": "^16.8.4",
|
"react-test-renderer": "^16.8.4",
|
||||||
|
@ -277,9 +277,9 @@ export default class BitbucketBackend implements Implementation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
apiRequestFunction = async (req: ApiRequest) => {
|
apiRequestFunction = async (req: ApiRequest) => {
|
||||||
const token = (this.refreshedTokenPromise
|
const token = (
|
||||||
? await this.refreshedTokenPromise
|
this.refreshedTokenPromise ? await this.refreshedTokenPromise : this.token
|
||||||
: this.token) as string;
|
) as string;
|
||||||
|
|
||||||
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
|
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
|
||||||
const response: Response = await unsentRequest.performRequest(authorizedRequest);
|
const response: Response = await unsentRequest.performRequest(authorizedRequest);
|
||||||
|
@ -310,10 +310,10 @@ export default class API {
|
|||||||
let responseStatus = 500;
|
let responseStatus = 500;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const req = (unsentRequest.fromFetchArguments(url, {
|
const req = unsentRequest.fromFetchArguments(url, {
|
||||||
...options,
|
...options,
|
||||||
headers,
|
headers,
|
||||||
}) as unknown) as ApiRequest;
|
}) as unknown as ApiRequest;
|
||||||
const response = await requestWithBackoff(this, req);
|
const response = await requestWithBackoff(this, req);
|
||||||
responseStatus = response.status;
|
responseStatus = response.status;
|
||||||
const parsedResponse = await parser(response);
|
const parsedResponse = await parser(response);
|
||||||
@ -366,8 +366,7 @@ export default class API {
|
|||||||
.catch(() => {
|
.catch(() => {
|
||||||
// Meta ref doesn't exist
|
// Meta ref doesn't exist
|
||||||
const readme = {
|
const readme = {
|
||||||
raw:
|
raw: '# Netlify CMS\n\nThis tree is used by the Netlify CMS to store metadata information for specific files and branches.',
|
||||||
'# Netlify CMS\n\nThis tree is used by the Netlify CMS to store metadata information for specific files and branches.',
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return this.uploadBlob(readme)
|
return this.uploadBlob(readme)
|
||||||
@ -808,7 +807,8 @@ export default class API {
|
|||||||
let branches: string[];
|
let branches: string[];
|
||||||
if (this.useOpenAuthoring) {
|
if (this.useOpenAuthoring) {
|
||||||
// open authoring branches can exist without a pr
|
// open authoring branches can exist without a pr
|
||||||
const cmsBranches: Octokit.GitListMatchingRefsResponse = await this.getOpenAuthoringBranches();
|
const cmsBranches: Octokit.GitListMatchingRefsResponse =
|
||||||
|
await this.getOpenAuthoringBranches();
|
||||||
branches = cmsBranches.map(b => b.ref.substring('refs/heads/'.length));
|
branches = cmsBranches.map(b => b.ref.substring('refs/heads/'.length));
|
||||||
// filter irrelevant branches
|
// filter irrelevant branches
|
||||||
const branchesWithFilter = await Promise.all(
|
const branchesWithFilter = await Promise.all(
|
||||||
@ -1036,7 +1036,7 @@ export default class API {
|
|||||||
author,
|
author,
|
||||||
committer,
|
committer,
|
||||||
);
|
);
|
||||||
return (newCommit as unknown) as GitHubCompareCommit;
|
return newCommit as unknown as GitHubCompareCommit;
|
||||||
} else {
|
} else {
|
||||||
return commit;
|
return commit;
|
||||||
}
|
}
|
||||||
|
@ -75,10 +75,8 @@ export default class GitHubAuthenticationPage extends React.Component {
|
|||||||
};
|
};
|
||||||
const auth = new NetlifyAuthenticator(cfg);
|
const auth = new NetlifyAuthenticator(cfg);
|
||||||
|
|
||||||
const {
|
const { open_authoring: openAuthoring = false, auth_scope: authScope = '' } =
|
||||||
open_authoring: openAuthoring = false,
|
this.props.config.backend;
|
||||||
auth_scope: authScope = '',
|
|
||||||
} = this.props.config.backend;
|
|
||||||
|
|
||||||
const scope = authScope || (openAuthoring ? 'public_repo' : 'repo');
|
const scope = authScope || (openAuthoring ? 'public_repo' : 'repo');
|
||||||
auth.authenticate({ provider: 'github', scope }, (err, data) => {
|
auth.authenticate({ provider: 'github', scope }, (err, data) => {
|
||||||
|
@ -300,7 +300,7 @@ export default class GraphQLAPI extends API {
|
|||||||
|
|
||||||
const mapped = pullRequests.nodes.map(transformPullRequest);
|
const mapped = pullRequests.nodes.map(transformPullRequest);
|
||||||
|
|
||||||
return ((mapped as unknown) as Octokit.PullsListResponseItem[]).filter(
|
return (mapped as unknown as Octokit.PullsListResponseItem[]).filter(
|
||||||
pr => pr.head.ref.startsWith(`${CMS_BRANCH_PREFIX}/`) && predicate(pr),
|
pr => pr.head.ref.startsWith(`${CMS_BRANCH_PREFIX}/`) && predicate(pr),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -673,7 +673,7 @@ export default class GraphQLAPI extends API {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const { pullRequest } = data!.createPullRequest;
|
const { pullRequest } = data!.createPullRequest;
|
||||||
return (transformPullRequest(pullRequest) as unknown) as Octokit.PullsCreateResponse;
|
return transformPullRequest(pullRequest) as unknown as Octokit.PullsCreateResponse;
|
||||||
}
|
}
|
||||||
|
|
||||||
async getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {
|
async getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {
|
||||||
|
@ -537,9 +537,9 @@ export default class GitHub implements Implementation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const readFile = (path: string, id: string | null | undefined) =>
|
const readFile = (path: string, id: string | null | undefined) =>
|
||||||
this.api!.readFile(path, id, { repoURL: this.api!.originRepoURL }).catch(() => '') as Promise<
|
this.api!.readFile(path, id, { repoURL: this.api!.originRepoURL }).catch(
|
||||||
string
|
() => '',
|
||||||
>;
|
) as Promise<string>;
|
||||||
|
|
||||||
const entries = await entriesByFiles(
|
const entries = await entriesByFiles(
|
||||||
result.files,
|
result.files,
|
||||||
|
@ -245,10 +245,8 @@ export default class API {
|
|||||||
MAINTAINER_ACCESS = 40;
|
MAINTAINER_ACCESS = 40;
|
||||||
|
|
||||||
hasWriteAccess = async () => {
|
hasWriteAccess = async () => {
|
||||||
const {
|
const { shared_with_groups: sharedWithGroups, permissions }: GitLabRepo =
|
||||||
shared_with_groups: sharedWithGroups,
|
await this.requestJSON(this.repoURL);
|
||||||
permissions,
|
|
||||||
}: GitLabRepo = await this.requestJSON(this.repoURL);
|
|
||||||
|
|
||||||
const { project_access: projectAccess, group_access: groupAccess } = permissions;
|
const { project_access: projectAccess, group_access: groupAccess } = permissions;
|
||||||
if (projectAccess && projectAccess.access_level >= this.WRITE_ACCESS) {
|
if (projectAccess && projectAccess.access_level >= this.WRITE_ACCESS) {
|
||||||
|
@ -287,10 +287,7 @@ describe('gitlab backend', () => {
|
|||||||
function interceptFiles(backend, path) {
|
function interceptFiles(backend, path) {
|
||||||
const api = mockApi(backend);
|
const api = mockApi(backend);
|
||||||
const url = `${expectedRepoUrl}/repository/files/${encodeURIComponent(path)}/raw`;
|
const url = `${expectedRepoUrl}/repository/files/${encodeURIComponent(path)}/raw`;
|
||||||
api
|
api.get(url).query(true).reply(200, mockRepo.files[path]);
|
||||||
.get(url)
|
|
||||||
.query(true)
|
|
||||||
.reply(200, mockRepo.files[path]);
|
|
||||||
|
|
||||||
api
|
api
|
||||||
.get(`${expectedRepoUrl}/repository/commits`)
|
.get(`${expectedRepoUrl}/repository/commits`)
|
||||||
@ -391,10 +388,7 @@ describe('gitlab backend', () => {
|
|||||||
|
|
||||||
it('returns an entry from folder collection', async () => {
|
it('returns an entry from folder collection', async () => {
|
||||||
const entryTree = mockRepo.tree[collectionContentConfig.folder][0];
|
const entryTree = mockRepo.tree[collectionContentConfig.folder][0];
|
||||||
const slug = entryTree.path
|
const slug = entryTree.path.split('/').pop().replace('.md', '');
|
||||||
.split('/')
|
|
||||||
.pop()
|
|
||||||
.replace('.md', '');
|
|
||||||
|
|
||||||
interceptFiles(backend, entryTree.path);
|
interceptFiles(backend, entryTree.path);
|
||||||
interceptCollection(backend, collectionContentConfig);
|
interceptCollection(backend, collectionContentConfig);
|
||||||
|
@ -80,7 +80,7 @@ export default class ProxyBackend implements Implementation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
authenticate() {
|
authenticate() {
|
||||||
return (Promise.resolve() as unknown) as Promise<User>;
|
return Promise.resolve() as unknown as Promise<User>;
|
||||||
}
|
}
|
||||||
|
|
||||||
logout() {
|
logout() {
|
||||||
|
@ -54,7 +54,7 @@ function getFile(path: string, tree: RepoTree) {
|
|||||||
while (obj && segments.length) {
|
while (obj && segments.length) {
|
||||||
obj = obj[segments.shift() as string] as RepoTree;
|
obj = obj[segments.shift() as string] as RepoTree;
|
||||||
}
|
}
|
||||||
return ((obj as unknown) as RepoFile) || {};
|
return (obj as unknown as RepoFile) || {};
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeFile(path: string, content: string | AssetProxy, tree: RepoTree) {
|
function writeFile(path: string, content: string | AssetProxy, tree: RepoTree) {
|
||||||
@ -146,7 +146,7 @@ export default class TestBackend implements Implementation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
authenticate() {
|
authenticate() {
|
||||||
return (Promise.resolve() as unknown) as Promise<User>;
|
return Promise.resolve() as unknown as Promise<User>;
|
||||||
}
|
}
|
||||||
|
|
||||||
logout() {
|
logout() {
|
||||||
|
9
packages/netlify-cms-core/index.d.ts
vendored
9
packages/netlify-cms-core/index.d.ts
vendored
@ -516,9 +516,12 @@ declare module 'netlify-cms-core' {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetAssetFunction = (
|
type GetAssetFunction = (asset: string) => {
|
||||||
asset: string,
|
url: string;
|
||||||
) => { url: string; path: string; field?: any; fileObj: File };
|
path: string;
|
||||||
|
field?: any;
|
||||||
|
fileObj: File;
|
||||||
|
};
|
||||||
|
|
||||||
export type PreviewTemplateComponentProps = {
|
export type PreviewTemplateComponentProps = {
|
||||||
entry: Map<string, any>;
|
entry: Map<string, any>;
|
||||||
|
@ -436,9 +436,9 @@ describe('entries', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should not return error on meta path field', () => {
|
it('should not return error on meta path field', () => {
|
||||||
expect(
|
expect(validateMetaField(null, null, fromJS({ meta: true, name: 'other' }), null, t)).toEqual(
|
||||||
validateMetaField(null, null, fromJS({ meta: true, name: 'other' }), null, t),
|
{ error: false },
|
||||||
).toEqual({ error: false });
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error on empty path', () => {
|
it('should return error on empty path', () => {
|
||||||
|
@ -9,9 +9,8 @@ import { State } from '../../types/redux';
|
|||||||
import AssetProxy from '../../valueObjects/AssetProxy';
|
import AssetProxy from '../../valueObjects/AssetProxy';
|
||||||
|
|
||||||
const middlewares = [thunk];
|
const middlewares = [thunk];
|
||||||
const mockStore = configureMockStore<Partial<State>, ThunkDispatch<State, {}, AnyAction>>(
|
const mockStore =
|
||||||
middlewares,
|
configureMockStore<Partial<State>, ThunkDispatch<State, {}, AnyAction>>(middlewares);
|
||||||
);
|
|
||||||
const mockedSelectMediaFilePath = mocked(selectMediaFilePath);
|
const mockedSelectMediaFilePath = mocked(selectMediaFilePath);
|
||||||
|
|
||||||
jest.mock('../../reducers/entries');
|
jest.mock('../../reducers/entries');
|
||||||
|
@ -474,9 +474,11 @@ export async function handleLocalBackend(originalConfig: CmsConfig) {
|
|||||||
return originalConfig;
|
return originalConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
const { proxyUrl, publish_modes: publishModes, type: backendType } = await detectProxyServer(
|
const {
|
||||||
originalConfig.local_backend,
|
proxyUrl,
|
||||||
);
|
publish_modes: publishModes,
|
||||||
|
type: backendType,
|
||||||
|
} = await detectProxyServer(originalConfig.local_backend);
|
||||||
|
|
||||||
if (!proxyUrl) {
|
if (!proxyUrl) {
|
||||||
return originalConfig;
|
return originalConfig;
|
||||||
|
@ -519,7 +519,7 @@ export function unpublishPublishedEntry(collection: Collection, slug: string) {
|
|||||||
const state = getState();
|
const state = getState();
|
||||||
const backend = currentBackend(state.config);
|
const backend = currentBackend(state.config);
|
||||||
const entry = selectEntry(state, collection.get('name'), slug);
|
const entry = selectEntry(state, collection.get('name'), slug);
|
||||||
const entryDraft = (Map().set('entry', entry) as unknown) as EntryDraft;
|
const entryDraft = Map().set('entry', entry) as unknown as EntryDraft;
|
||||||
dispatch(unpublishedEntryPersisting(collection, slug));
|
dispatch(unpublishedEntryPersisting(collection, slug));
|
||||||
return backend
|
return backend
|
||||||
.deleteEntry(state, collection, slug)
|
.deleteEntry(state, collection, slug)
|
||||||
|
@ -1322,7 +1322,7 @@ export function resolveBackend(config: CmsConfig) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const currentBackend = (function() {
|
export const currentBackend = (function () {
|
||||||
let backend: Backend;
|
let backend: Backend;
|
||||||
|
|
||||||
return (config: CmsConfig) => {
|
return (config: CmsConfig) => {
|
||||||
|
@ -25,7 +25,7 @@ import Header from './Header';
|
|||||||
|
|
||||||
TopBarProgress.config({
|
TopBarProgress.config({
|
||||||
barColors: {
|
barColors: {
|
||||||
'0': colors.active,
|
0: colors.active,
|
||||||
'1.0': colors.active,
|
'1.0': colors.active,
|
||||||
},
|
},
|
||||||
shadowBlur: 0,
|
shadowBlur: 0,
|
||||||
|
@ -192,7 +192,7 @@ export class Editor extends React.Component {
|
|||||||
window.removeEventListener('beforeunload', this.exitBlocker);
|
window.removeEventListener('beforeunload', this.exitBlocker);
|
||||||
}
|
}
|
||||||
|
|
||||||
createBackup = debounce(function(entry, collection) {
|
createBackup = debounce(function (entry, collection) {
|
||||||
this.props.persistLocalBackup(entry, collection);
|
this.props.persistLocalBackup(entry, collection);
|
||||||
}, 2000);
|
}, 2000);
|
||||||
|
|
||||||
@ -202,14 +202,8 @@ export class Editor extends React.Component {
|
|||||||
};
|
};
|
||||||
|
|
||||||
handleChangeStatus = newStatusName => {
|
handleChangeStatus = newStatusName => {
|
||||||
const {
|
const { entryDraft, updateUnpublishedEntryStatus, collection, slug, currentStatus, t } =
|
||||||
entryDraft,
|
this.props;
|
||||||
updateUnpublishedEntryStatus,
|
|
||||||
collection,
|
|
||||||
slug,
|
|
||||||
currentStatus,
|
|
||||||
t,
|
|
||||||
} = this.props;
|
|
||||||
if (entryDraft.get('hasChanged')) {
|
if (entryDraft.get('hasChanged')) {
|
||||||
window.alert(t('editor.editor.onUpdatingWithUnsavedChanges'));
|
window.alert(t('editor.editor.onUpdatingWithUnsavedChanges'));
|
||||||
return;
|
return;
|
||||||
@ -318,15 +312,8 @@ export class Editor extends React.Component {
|
|||||||
};
|
};
|
||||||
|
|
||||||
handleDeleteUnpublishedChanges = async () => {
|
handleDeleteUnpublishedChanges = async () => {
|
||||||
const {
|
const { entryDraft, collection, slug, deleteUnpublishedEntry, loadEntry, isModification, t } =
|
||||||
entryDraft,
|
this.props;
|
||||||
collection,
|
|
||||||
slug,
|
|
||||||
deleteUnpublishedEntry,
|
|
||||||
loadEntry,
|
|
||||||
isModification,
|
|
||||||
t,
|
|
||||||
} = this.props;
|
|
||||||
if (
|
if (
|
||||||
entryDraft.get('hasChanged') &&
|
entryDraft.get('hasChanged') &&
|
||||||
!window.confirm(t('editor.editor.onDeleteUnpublishedChangesWithUnsavedChanges'))
|
!window.confirm(t('editor.editor.onDeleteUnpublishedChangesWithUnsavedChanges'))
|
||||||
|
@ -20,7 +20,7 @@ export function resolveIntegrations(interationsConfig, getToken) {
|
|||||||
return integrationInstances;
|
return integrationInstances;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getIntegrationProvider = (function() {
|
export const getIntegrationProvider = (function () {
|
||||||
let integrations = null;
|
let integrations = null;
|
||||||
|
|
||||||
return (interationsConfig, getToken, provider) => {
|
return (interationsConfig, getToken, provider) => {
|
||||||
|
@ -182,7 +182,7 @@ export function previewUrlFormatter(
|
|||||||
let fields = entry.get('data') as Map<string, string>;
|
let fields = entry.get('data') as Map<string, string>;
|
||||||
fields = addFileTemplateFields(entry.get('path'), fields, collection.get('folder'));
|
fields = addFileTemplateFields(entry.get('path'), fields, collection.get('folder'));
|
||||||
const dateFieldName = getDateField() || selectInferedField(collection, 'date');
|
const dateFieldName = getDateField() || selectInferedField(collection, 'date');
|
||||||
const date = parseDateFromEntry((entry as unknown) as Map<string, unknown>, dateFieldName);
|
const date = parseDateFromEntry(entry as unknown as Map<string, unknown>, dateFieldName);
|
||||||
|
|
||||||
// Prepare and sanitize slug variables only, leave the rest of the
|
// Prepare and sanitize slug variables only, leave the rest of the
|
||||||
// `preview_path` template as is.
|
// `preview_path` template as is.
|
||||||
@ -213,7 +213,7 @@ export function summaryFormatter(summaryTemplate: string, entry: EntryMap, colle
|
|||||||
let entryData = entry.get('data');
|
let entryData = entry.get('data');
|
||||||
const date =
|
const date =
|
||||||
parseDateFromEntry(
|
parseDateFromEntry(
|
||||||
(entry as unknown) as Map<string, unknown>,
|
entry as unknown as Map<string, unknown>,
|
||||||
selectInferedField(collection, 'date'),
|
selectInferedField(collection, 'date'),
|
||||||
) || null;
|
) || null;
|
||||||
const identifier = entryData.getIn(keyToPathArray(selectIdentifier(collection) as string));
|
const identifier = entryData.getIn(keyToPathArray(selectIdentifier(collection) as string));
|
||||||
@ -247,7 +247,7 @@ export function folderFormatter(
|
|||||||
|
|
||||||
const date =
|
const date =
|
||||||
parseDateFromEntry(
|
parseDateFromEntry(
|
||||||
(entry as unknown) as Map<string, unknown>,
|
entry as unknown as Map<string, unknown>,
|
||||||
selectInferedField(collection, 'date'),
|
selectInferedField(collection, 'date'),
|
||||||
) || null;
|
) || null;
|
||||||
const identifier = fields.getIn(keyToPathArray(selectIdentifier(collection) as string));
|
const identifier = fields.getIn(keyToPathArray(selectIdentifier(collection) as string));
|
||||||
|
@ -36,7 +36,8 @@ export function stripProtocol(urlString: string) {
|
|||||||
* but JS stores strings as UTF-16/UCS-2 internally, so we should not normalize or re-encode.
|
* but JS stores strings as UTF-16/UCS-2 internally, so we should not normalize or re-encode.
|
||||||
*/
|
*/
|
||||||
const uriChars = /[\w\-.~]/i;
|
const uriChars = /[\w\-.~]/i;
|
||||||
const ucsChars = /[\xA0-\u{D7FF}\u{F900}-\u{FDCF}\u{FDF0}-\u{FFEF}\u{10000}-\u{1FFFD}\u{20000}-\u{2FFFD}\u{30000}-\u{3FFFD}\u{40000}-\u{4FFFD}\u{50000}-\u{5FFFD}\u{60000}-\u{6FFFD}\u{70000}-\u{7FFFD}\u{80000}-\u{8FFFD}\u{90000}-\u{9FFFD}\u{A0000}-\u{AFFFD}\u{B0000}-\u{BFFFD}\u{C0000}-\u{CFFFD}\u{D0000}-\u{DFFFD}\u{E1000}-\u{EFFFD}]/u;
|
const ucsChars =
|
||||||
|
/[\xA0-\u{D7FF}\u{F900}-\u{FDCF}\u{FDF0}-\u{FFEF}\u{10000}-\u{1FFFD}\u{20000}-\u{2FFFD}\u{30000}-\u{3FFFD}\u{40000}-\u{4FFFD}\u{50000}-\u{5FFFD}\u{60000}-\u{6FFFD}\u{70000}-\u{7FFFD}\u{80000}-\u{8FFFD}\u{90000}-\u{9FFFD}\u{A0000}-\u{AFFFD}\u{B0000}-\u{BFFFD}\u{C0000}-\u{CFFFD}\u{D0000}-\u{DFFFD}\u{E1000}-\u{EFFFD}]/u;
|
||||||
|
|
||||||
function validURIChar(char: string) {
|
function validURIChar(char: string) {
|
||||||
return uriChars.test(char);
|
return uriChars.test(char);
|
||||||
@ -80,9 +81,7 @@ export function sanitizeURI(
|
|||||||
|
|
||||||
// `Array.from` must be used instead of `String.split` because
|
// `Array.from` must be used instead of `String.split` because
|
||||||
// `split` converts things like emojis into UTF-16 surrogate pairs.
|
// `split` converts things like emojis into UTF-16 surrogate pairs.
|
||||||
return Array.from(str)
|
return Array.from(str).map(getCharReplacer(encoding, replacement)).join('');
|
||||||
.map(getCharReplacer(encoding, replacement))
|
|
||||||
.join('');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sanitizeChar(char: string, options?: CmsSlug) {
|
export function sanitizeChar(char: string, options?: CmsSlug) {
|
||||||
@ -95,8 +94,11 @@ export function sanitizeSlug(str: string, options?: CmsSlug) {
|
|||||||
throw new Error('The input slug must be a string.');
|
throw new Error('The input slug must be a string.');
|
||||||
}
|
}
|
||||||
|
|
||||||
const { encoding, clean_accents: stripDiacritics, sanitize_replacement: replacement } =
|
const {
|
||||||
options || {};
|
encoding,
|
||||||
|
clean_accents: stripDiacritics,
|
||||||
|
sanitize_replacement: replacement,
|
||||||
|
} = options || {};
|
||||||
|
|
||||||
const sanitizedSlug = flow([
|
const sanitizedSlug = flow([
|
||||||
...(stripDiacritics ? [diacritics.remove] : []),
|
...(stripDiacritics ? [diacritics.remove] : []),
|
||||||
|
@ -25,7 +25,7 @@ function handleInsert(url: string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const initializeMediaLibrary = once(async function initializeMediaLibrary(name, options) {
|
const initializeMediaLibrary = once(async function initializeMediaLibrary(name, options) {
|
||||||
const lib = (getMediaLibrary(name) as unknown) as MediaLibrary | undefined;
|
const lib = getMediaLibrary(name) as unknown as MediaLibrary | undefined;
|
||||||
if (!lib) {
|
if (!lib) {
|
||||||
const err = new Error(
|
const err = new Error(
|
||||||
`Missing external media library '${name}'. Please use 'registerMediaLibrary' to register it.`,
|
`Missing external media library '${name}'. Please use 'registerMediaLibrary' to register it.`,
|
||||||
|
@ -368,30 +368,15 @@ describe('collections', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
expect(selectField(collection, 'en.title')).toBe(
|
expect(selectField(collection, 'en.title')).toBe(
|
||||||
collection
|
collection.get('fields').get(0).get('fields').get(0),
|
||||||
.get('fields')
|
|
||||||
.get(0)
|
|
||||||
.get('fields')
|
|
||||||
.get(0),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(selectField(collection, 'it.title.subTitle')).toBe(
|
expect(selectField(collection, 'it.title.subTitle')).toBe(
|
||||||
collection
|
collection.get('fields').get(2).get('field').get('fields').get(0),
|
||||||
.get('fields')
|
|
||||||
.get(2)
|
|
||||||
.get('field')
|
|
||||||
.get('fields')
|
|
||||||
.get(0),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(selectField(collection, 'fr.title.variableType')).toBe(
|
expect(selectField(collection, 'fr.title.variableType')).toBe(
|
||||||
collection
|
collection.get('fields').get(3).get('fields').get(0).get('types').get(0),
|
||||||
.get('fields')
|
|
||||||
.get(3)
|
|
||||||
.get('fields')
|
|
||||||
.get(0)
|
|
||||||
.get('types')
|
|
||||||
.get(0),
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -144,10 +144,7 @@ export function selectFieldsWithMediaFolders(collection: Collection, slug: strin
|
|||||||
const fields = collection.get('fields').toArray();
|
const fields = collection.get('fields').toArray();
|
||||||
return getFieldsWithMediaFolders(fields);
|
return getFieldsWithMediaFolders(fields);
|
||||||
} else if (collection.has('files')) {
|
} else if (collection.has('files')) {
|
||||||
const fields =
|
const fields = getFileFromSlug(collection, slug)?.get('fields').toArray() || [];
|
||||||
getFileFromSlug(collection, slug)
|
|
||||||
?.get('fields')
|
|
||||||
.toArray() || [];
|
|
||||||
return getFieldsWithMediaFolders(fields);
|
return getFieldsWithMediaFolders(fields);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -317,7 +314,8 @@ export function selectInferedField(collection: Collection, fieldName: string) {
|
|||||||
if (fieldName === 'title' && collection.get('identifier_field')) {
|
if (fieldName === 'title' && collection.get('identifier_field')) {
|
||||||
return selectIdentifier(collection);
|
return selectIdentifier(collection);
|
||||||
}
|
}
|
||||||
const inferableField = (INFERABLE_FIELDS as Record<
|
const inferableField = (
|
||||||
|
INFERABLE_FIELDS as Record<
|
||||||
string,
|
string,
|
||||||
{
|
{
|
||||||
type: string;
|
type: string;
|
||||||
@ -326,7 +324,8 @@ export function selectInferedField(collection: Collection, fieldName: string) {
|
|||||||
fallbackToFirstField: boolean;
|
fallbackToFirstField: boolean;
|
||||||
showError: boolean;
|
showError: boolean;
|
||||||
}
|
}
|
||||||
>)[fieldName];
|
>
|
||||||
|
)[fieldName];
|
||||||
const fields = collection.get('fields');
|
const fields = collection.get('fields');
|
||||||
let field;
|
let field;
|
||||||
|
|
||||||
|
@ -105,10 +105,9 @@ function persistSort(sort: Sort | undefined) {
|
|||||||
const storageSort: StorageSort = {};
|
const storageSort: StorageSort = {};
|
||||||
sort.keySeq().forEach(key => {
|
sort.keySeq().forEach(key => {
|
||||||
const collection = key as string;
|
const collection = key as string;
|
||||||
const sortObjects = (sort
|
const sortObjects = (sort.get(collection).valueSeq().toJS() as SortObject[]).map(
|
||||||
.get(collection)
|
(value, index) => ({ ...value, index }),
|
||||||
.valueSeq()
|
);
|
||||||
.toJS() as SortObject[]).map((value, index) => ({ ...value, index }));
|
|
||||||
|
|
||||||
sortObjects.forEach(value => {
|
sortObjects.forEach(value => {
|
||||||
set(storageSort, [collection, value.key], value);
|
set(storageSort, [collection, value.key], value);
|
||||||
@ -333,7 +332,7 @@ function entries(
|
|||||||
}
|
}
|
||||||
|
|
||||||
case CHANGE_VIEW_STYLE: {
|
case CHANGE_VIEW_STYLE: {
|
||||||
const payload = (action.payload as unknown) as ChangeViewStylePayload;
|
const payload = action.payload as unknown as ChangeViewStylePayload;
|
||||||
const { style } = payload;
|
const { style } = payload;
|
||||||
const newState = state.withMutations(map => {
|
const newState = state.withMutations(map => {
|
||||||
map.setIn(['viewStyle'], style);
|
map.setIn(['viewStyle'], style);
|
||||||
@ -492,10 +491,8 @@ export function selectGroups(state: Entries, collection: Collection) {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
let groups: Record<
|
let groups: Record<string, { id: string; label: string; value: string | boolean | undefined }> =
|
||||||
string,
|
{};
|
||||||
{ id: string; label: string; value: string | boolean | undefined }
|
|
||||||
> = {};
|
|
||||||
const groupedEntries = groupBy(entries.toArray(), entry => {
|
const groupedEntries = groupBy(entries.toArray(), entry => {
|
||||||
const group = getGroup(entry, selectedGroup);
|
const group = getGroup(entry, selectedGroup);
|
||||||
groups = { ...groups, [group.id]: group };
|
groups = { ...groups, [group.id]: group };
|
||||||
|
@ -262,7 +262,7 @@ export function selectMediaFileByPath(state: State, path: string) {
|
|||||||
export function selectMediaDisplayURL(state: State, id: string) {
|
export function selectMediaDisplayURL(state: State, id: string) {
|
||||||
const displayUrlState = state.mediaLibrary.getIn(
|
const displayUrlState = state.mediaLibrary.getIn(
|
||||||
['displayURLs', id],
|
['displayURLs', id],
|
||||||
(Map() as unknown) as DisplayURLState,
|
Map() as unknown as DisplayURLState,
|
||||||
);
|
);
|
||||||
return displayUrlState;
|
return displayUrlState;
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@ import { State } from '../types/redux';
|
|||||||
import { Reducer } from 'react';
|
import { Reducer } from 'react';
|
||||||
|
|
||||||
const store = createStore<State | undefined, AnyAction, unknown, unknown>(
|
const store = createStore<State | undefined, AnyAction, unknown, unknown>(
|
||||||
(createRootReducer() as unknown) as Reducer<State | undefined, AnyAction>,
|
createRootReducer() as unknown as Reducer<State | undefined, AnyAction>,
|
||||||
composeWithDevTools(applyMiddleware(thunkMiddleware as ThunkMiddleware<State>, waitUntilAction)),
|
composeWithDevTools(applyMiddleware(thunkMiddleware as ThunkMiddleware<State>, waitUntilAction)),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -51,7 +51,8 @@ export const waitUntilAction: Middleware<{}, State, Dispatch> = ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return (next: Dispatch<AnyAction>) => (action: AnyAction): null | AnyAction => {
|
return (next: Dispatch<AnyAction>) =>
|
||||||
|
(action: AnyAction): null | AnyAction => {
|
||||||
if (action.type === WAIT_UNTIL_ACTION) {
|
if (action.type === WAIT_UNTIL_ACTION) {
|
||||||
pending.push(action as WaitAction);
|
pending.push(action as WaitAction);
|
||||||
return null;
|
return null;
|
||||||
|
@ -3,7 +3,7 @@ import { mocked } from 'ts-jest/utils';
|
|||||||
|
|
||||||
jest.mock('history');
|
jest.mock('history');
|
||||||
|
|
||||||
const history = ({ push: jest.fn(), replace: jest.fn() } as unknown) as History;
|
const history = { push: jest.fn(), replace: jest.fn() } as unknown as History;
|
||||||
const mockedCreateHashHistory = mocked(createHashHistory);
|
const mockedCreateHashHistory = mocked(createHashHistory);
|
||||||
mockedCreateHashHistory.mockReturnValue(history);
|
mockedCreateHashHistory.mockReturnValue(history);
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ export interface StaticallyTypedRecord<T> {
|
|||||||
K1 extends keyof T,
|
K1 extends keyof T,
|
||||||
K2 extends keyof T[K1],
|
K2 extends keyof T[K1],
|
||||||
K3 extends keyof T[K1][K2],
|
K3 extends keyof T[K1][K2],
|
||||||
V extends T[K1][K2][K3]
|
V extends T[K1][K2][K3],
|
||||||
>(
|
>(
|
||||||
keys: [K1, K2, K3],
|
keys: [K1, K2, K3],
|
||||||
defaultValue?: V,
|
defaultValue?: V,
|
||||||
|
@ -26,10 +26,7 @@ function generateVerifierCode() {
|
|||||||
async function createCodeChallenge(codeVerifier) {
|
async function createCodeChallenge(codeVerifier) {
|
||||||
const sha = await sha256(codeVerifier);
|
const sha = await sha256(codeVerifier);
|
||||||
// https://tools.ietf.org/html/rfc7636#appendix-A
|
// https://tools.ietf.org/html/rfc7636#appendix-A
|
||||||
return btoa(sha)
|
return btoa(sha).split('=')[0].replace(/\+/g, '-').replace(/\//g, '_');
|
||||||
.split('=')[0]
|
|
||||||
.replace(/\+/g, '-')
|
|
||||||
.replace(/\//g, '_');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const CODE_VERIFIER_STORAGE_KEY = 'netlify-cms-pkce-verifier-code';
|
const CODE_VERIFIER_STORAGE_KEY = 'netlify-cms-pkce-verifier-code';
|
||||||
|
@ -3,11 +3,10 @@ import unsentRequest from '../unsentRequest';
|
|||||||
describe('unsentRequest', () => {
|
describe('unsentRequest', () => {
|
||||||
describe('withHeaders', () => {
|
describe('withHeaders', () => {
|
||||||
it('should create new request with headers', () => {
|
it('should create new request with headers', () => {
|
||||||
expect(
|
expect(unsentRequest.withHeaders({ Authorization: 'token' })('path').toJS()).toEqual({
|
||||||
unsentRequest
|
url: 'path',
|
||||||
.withHeaders({ Authorization: 'token' })('path')
|
headers: { Authorization: 'token' },
|
||||||
.toJS(),
|
});
|
||||||
).toEqual({ url: 'path', headers: { Authorization: 'token' } });
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should add headers to existing request', () => {
|
it('should add headers to existing request', () => {
|
||||||
|
@ -7,7 +7,7 @@ export default function loadScript(url) {
|
|||||||
const head = document.getElementsByTagName('head')[0];
|
const head = document.getElementsByTagName('head')[0];
|
||||||
const script = document.createElement('script');
|
const script = document.createElement('script');
|
||||||
script.src = url;
|
script.src = url;
|
||||||
script.onload = script.onreadystatechange = function() {
|
script.onload = script.onreadystatechange = function () {
|
||||||
if (
|
if (
|
||||||
!done &&
|
!done &&
|
||||||
(!this.readyState || this.readyState === 'loaded' || this.readyState === 'complete')
|
(!this.readyState || this.readyState === 'loaded' || this.readyState === 'complete')
|
||||||
|
@ -60,13 +60,7 @@ function toURL(req) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function toFetchArguments(req) {
|
function toFetchArguments(req) {
|
||||||
return [
|
return [toURL(req), req.remove('url').remove('params').toJS()];
|
||||||
toURL(req),
|
|
||||||
req
|
|
||||||
.remove('url')
|
|
||||||
.remove('params')
|
|
||||||
.toJS(),
|
|
||||||
];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function maybeRequestArg(req) {
|
function maybeRequestArg(req) {
|
||||||
|
@ -601,7 +601,7 @@ describe('joi', () => {
|
|||||||
} as express.Request;
|
} as express.Request;
|
||||||
const json = jest.fn();
|
const json = jest.fn();
|
||||||
const status = jest.fn(() => ({ json }));
|
const status = jest.fn(() => ({ json }));
|
||||||
const res: express.Response = ({ status } as unknown) as express.Response;
|
const res: express.Response = { status } as unknown as express.Response;
|
||||||
|
|
||||||
joi(defaultSchema())(req, res, next);
|
joi(defaultSchema())(req, res, next);
|
||||||
|
|
||||||
|
@ -132,9 +132,7 @@ export function defaultSchema({ path = requiredString } = {}) {
|
|||||||
cmsLabelPrefix: Joi.string().optional(),
|
cmsLabelPrefix: Joi.string().optional(),
|
||||||
entry: dataFile, // entry is kept for backwards compatibility
|
entry: dataFile, // entry is kept for backwards compatibility
|
||||||
dataFiles: Joi.array().items(dataFile),
|
dataFiles: Joi.array().items(dataFile),
|
||||||
assets: Joi.array()
|
assets: Joi.array().items(asset).required(),
|
||||||
.items(asset)
|
|
||||||
.required(),
|
|
||||||
options: Joi.object({
|
options: Joi.object({
|
||||||
collectionName: Joi.string(),
|
collectionName: Joi.string(),
|
||||||
commitMessage: requiredString,
|
commitMessage: requiredString,
|
||||||
@ -207,10 +205,7 @@ export function defaultSchema({ path = requiredString } = {}) {
|
|||||||
is: 'deleteFiles',
|
is: 'deleteFiles',
|
||||||
then: defaultParams
|
then: defaultParams
|
||||||
.keys({
|
.keys({
|
||||||
paths: Joi.array()
|
paths: Joi.array().items(path).min(1).required(),
|
||||||
.items(path)
|
|
||||||
.min(1)
|
|
||||||
.required(),
|
|
||||||
options: Joi.object({
|
options: Joi.object({
|
||||||
commitMessage: requiredString,
|
commitMessage: requiredString,
|
||||||
}).required(),
|
}).required(),
|
||||||
|
@ -24,7 +24,7 @@ type FsOptions = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export function localFsMiddleware({ repoPath, logger }: FsOptions) {
|
export function localFsMiddleware({ repoPath, logger }: FsOptions) {
|
||||||
return async function(req: express.Request, res: express.Response) {
|
return async function (req: express.Request, res: express.Response) {
|
||||||
try {
|
try {
|
||||||
const { body } = req;
|
const { body } = req;
|
||||||
|
|
||||||
|
@ -123,7 +123,7 @@ describe('localGitMiddleware', () => {
|
|||||||
describe('localGitMiddleware', () => {
|
describe('localGitMiddleware', () => {
|
||||||
const json = jest.fn();
|
const json = jest.fn();
|
||||||
const status = jest.fn(() => ({ json }));
|
const status = jest.fn(() => ({ json }));
|
||||||
const res: express.Response = ({ status } as unknown) as express.Response;
|
const res: express.Response = { status } as unknown as express.Response;
|
||||||
|
|
||||||
const repoPath = '.';
|
const repoPath = '.';
|
||||||
|
|
||||||
|
@ -173,7 +173,7 @@ export function localGitMiddleware({ repoPath, logger }: GitOptions) {
|
|||||||
// we can only perform a single git operation at any given time
|
// we can only perform a single git operation at any given time
|
||||||
const mutex = withTimeout(new Mutex(), 3000, new Error('Request timed out'));
|
const mutex = withTimeout(new Mutex(), 3000, new Error('Request timed out'));
|
||||||
|
|
||||||
return async function(req: express.Request, res: express.Response) {
|
return async function (req: express.Request, res: express.Response) {
|
||||||
let release;
|
let release;
|
||||||
try {
|
try {
|
||||||
release = await mutex.acquire();
|
release = await mutex.acquire();
|
||||||
@ -345,12 +345,8 @@ export function localGitMiddleware({ repoPath, logger }: GitOptions) {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case 'updateUnpublishedEntryStatus': {
|
case 'updateUnpublishedEntryStatus': {
|
||||||
const {
|
const { collection, slug, newStatus, cmsLabelPrefix } =
|
||||||
collection,
|
body.params as UpdateUnpublishedEntryStatusParams;
|
||||||
slug,
|
|
||||||
newStatus,
|
|
||||||
cmsLabelPrefix,
|
|
||||||
} = body.params as UpdateUnpublishedEntryStatusParams;
|
|
||||||
const contentKey = generateContentKey(collection, slug);
|
const contentKey = generateContentKey(collection, slug);
|
||||||
const cmsBranch = branchFromContentKey(contentKey);
|
const cmsBranch = branchFromContentKey(contentKey);
|
||||||
const description = statusToLabel(newStatus, cmsLabelPrefix || '');
|
const description = statusToLabel(newStatus, cmsLabelPrefix || '');
|
||||||
|
@ -3,10 +3,7 @@ import path from 'path';
|
|||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
|
||||||
function sha256(buffer: Buffer) {
|
function sha256(buffer: Buffer) {
|
||||||
return crypto
|
return crypto.createHash('sha256').update(buffer).digest('hex');
|
||||||
.createHash('sha256')
|
|
||||||
.update(buffer)
|
|
||||||
.digest('hex');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// normalize windows os path format
|
// normalize windows os path format
|
||||||
|
@ -17,14 +17,8 @@ function BooleanBackground({ isActive, ...props }) {
|
|||||||
|
|
||||||
export default class BooleanControl extends React.Component {
|
export default class BooleanControl extends React.Component {
|
||||||
render() {
|
render() {
|
||||||
const {
|
const { value, forID, onChange, classNameWrapper, setActiveStyle, setInactiveStyle } =
|
||||||
value,
|
this.props;
|
||||||
forID,
|
|
||||||
onChange,
|
|
||||||
classNameWrapper,
|
|
||||||
setActiveStyle,
|
|
||||||
setInactiveStyle,
|
|
||||||
} = this.props;
|
|
||||||
return (
|
return (
|
||||||
<div className={classNameWrapper}>
|
<div className={classNameWrapper}>
|
||||||
<Toggle
|
<Toggle
|
||||||
|
@ -111,15 +111,8 @@ export default class ColorControl extends React.Component {
|
|||||||
this.props.onChange(formattedColor);
|
this.props.onChange(formattedColor);
|
||||||
};
|
};
|
||||||
render() {
|
render() {
|
||||||
const {
|
const { forID, value, field, onChange, classNameWrapper, setActiveStyle, setInactiveStyle } =
|
||||||
forID,
|
this.props;
|
||||||
value,
|
|
||||||
field,
|
|
||||||
onChange,
|
|
||||||
classNameWrapper,
|
|
||||||
setActiveStyle,
|
|
||||||
setInactiveStyle,
|
|
||||||
} = this.props;
|
|
||||||
|
|
||||||
const allowInput = field.get('allowInput', false);
|
const allowInput = field.get('allowInput', false);
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ function NowButton({ t, handleChange }) {
|
|||||||
${buttons.default}
|
${buttons.default}
|
||||||
${buttons.lightBlue}
|
${buttons.lightBlue}
|
||||||
${buttons.small}
|
${buttons.small}
|
||||||
`}
|
`}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
handleChange(moment());
|
handleChange(moment());
|
||||||
}}
|
}}
|
||||||
|
@ -40,10 +40,7 @@ describe.skip('slate', () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
function fn(editor) {
|
function fn(editor) {
|
||||||
editor
|
editor.deleteBackward().insertText('b').setBlocks('heading-one');
|
||||||
.deleteBackward()
|
|
||||||
.insertText('b')
|
|
||||||
.setBlocks('heading-one');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const [actual, expected] = run(input, output, fn);
|
const [actual, expected] = run(input, output, fn);
|
||||||
|
@ -19,16 +19,10 @@ function ForceInsert({ defaultType }) {
|
|||||||
forceInsertBeforeNode(editor, node) {
|
forceInsertBeforeNode(editor, node) {
|
||||||
const block = { type: defaultType, object: 'block' };
|
const block = { type: defaultType, object: 'block' };
|
||||||
const parent = editor.value.document.getParent(node.key);
|
const parent = editor.value.document.getParent(node.key);
|
||||||
return editor
|
return editor.insertNodeByKey(parent.key, 0, block).moveToStartOfNode(parent).focus();
|
||||||
.insertNodeByKey(parent.key, 0, block)
|
|
||||||
.moveToStartOfNode(parent)
|
|
||||||
.focus();
|
|
||||||
},
|
},
|
||||||
forceInsertAfterNode(editor, node) {
|
forceInsertAfterNode(editor, node) {
|
||||||
return editor
|
return editor.moveToEndOfNode(node).insertBlock(defaultType).focus();
|
||||||
.moveToEndOfNode(node)
|
|
||||||
.insertBlock(defaultType)
|
|
||||||
.focus();
|
|
||||||
},
|
},
|
||||||
moveToEndOfDocument(editor) {
|
moveToEndOfDocument(editor) {
|
||||||
const lastBlock = editor.value.document.nodes.last();
|
const lastBlock = editor.value.document.nodes.last();
|
||||||
|
@ -7,10 +7,7 @@ function LineBreak() {
|
|||||||
if (!isShiftEnter) {
|
if (!isShiftEnter) {
|
||||||
return next();
|
return next();
|
||||||
}
|
}
|
||||||
return editor
|
return editor.insertInline('break').insertText('').moveToStartOfNextText();
|
||||||
.insertInline('break')
|
|
||||||
.insertText('')
|
|
||||||
.moveToStartOfNextText();
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -25,24 +25,7 @@ const skips = [
|
|||||||
{ number: 507, reason: 'Remark allows a space between link alt and url' },
|
{ number: 507, reason: 'Remark allows a space between link alt and url' },
|
||||||
{
|
{
|
||||||
number: [
|
number: [
|
||||||
511,
|
511, 516, 525, 528, 529, 530, 532, 533, 534, 540, 541, 542, 543, 546, 548, 560, 565, 567,
|
||||||
516,
|
|
||||||
525,
|
|
||||||
528,
|
|
||||||
529,
|
|
||||||
530,
|
|
||||||
532,
|
|
||||||
533,
|
|
||||||
534,
|
|
||||||
540,
|
|
||||||
541,
|
|
||||||
542,
|
|
||||||
543,
|
|
||||||
546,
|
|
||||||
548,
|
|
||||||
560,
|
|
||||||
565,
|
|
||||||
567,
|
|
||||||
],
|
],
|
||||||
reason: 'we convert link references to standard links, but Remark also fails these',
|
reason: 'we convert link references to standard links, but Remark also fails these',
|
||||||
},
|
},
|
||||||
@ -81,7 +64,7 @@ const parse = flow([markdownToSlate, slateToMarkdown]);
|
|||||||
* tests, of which we're passing about 300 as of introduction of this suite. To
|
* tests, of which we're passing about 300 as of introduction of this suite. To
|
||||||
* work on improving Commonmark support, update __fixtures__/commonmarkExpected.json
|
* work on improving Commonmark support, update __fixtures__/commonmarkExpected.json
|
||||||
*/
|
*/
|
||||||
describe.skip('Commonmark support', function() {
|
describe.skip('Commonmark support', function () {
|
||||||
const specs =
|
const specs =
|
||||||
onlys.length > 0
|
onlys.length > 0
|
||||||
? commonmarkSpec.filter(({ number }) => onlys.includes(number))
|
? commonmarkSpec.filter(({ number }) => onlys.includes(number))
|
||||||
|
@ -23,8 +23,7 @@ describe('markdownToSlate', () => {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
object: 'text',
|
object: 'text',
|
||||||
text:
|
text: 'this_mark, and your charge is but a penny; tothisa penny more; and so on to the full glass—the Cape Horn measure, which you may gulp down for a shilling.\\n\\nUpon entering the place I found a number of young seamen gathered about a table, examining by a dim light divers specimens ofskrimshander',
|
||||||
'this_mark, and your charge is but a penny; tothisa penny more; and so on to the full glass—the Cape Horn measure, which you may gulp down for a shilling.\\n\\nUpon entering the place I found a number of young seamen gathered about a table, examining by a dim light divers specimens ofskrimshander',
|
|
||||||
marks: [
|
marks: [
|
||||||
{
|
{
|
||||||
type: 'italic',
|
type: 'italic',
|
||||||
|
@ -3,10 +3,7 @@ import markdownToRemark from 'remark-parse';
|
|||||||
import remarkAllowHtmlEntities from '../remarkAllowHtmlEntities';
|
import remarkAllowHtmlEntities from '../remarkAllowHtmlEntities';
|
||||||
|
|
||||||
function process(markdown) {
|
function process(markdown) {
|
||||||
const mdast = unified()
|
const mdast = unified().use(markdownToRemark).use(remarkAllowHtmlEntities).parse(markdown);
|
||||||
.use(markdownToRemark)
|
|
||||||
.use(remarkAllowHtmlEntities)
|
|
||||||
.parse(markdown);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The MDAST will look like:
|
* The MDAST will look like:
|
||||||
|
@ -4,9 +4,7 @@ import remarkEscapeMarkdownEntities from '../remarkEscapeMarkdownEntities';
|
|||||||
|
|
||||||
function process(text) {
|
function process(text) {
|
||||||
const tree = u('root', [u('text', text)]);
|
const tree = u('root', [u('text', text)]);
|
||||||
const escapedMdast = unified()
|
const escapedMdast = unified().use(remarkEscapeMarkdownEntities).runSync(tree);
|
||||||
.use(remarkEscapeMarkdownEntities)
|
|
||||||
.runSync(tree);
|
|
||||||
|
|
||||||
return escapedMdast.children[0].value;
|
return escapedMdast.children[0].value;
|
||||||
}
|
}
|
||||||
|
@ -12,10 +12,7 @@ function input(markdown) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function output(markdown) {
|
function output(markdown) {
|
||||||
return unified()
|
return unified().use(markdownToRemark).use(remarkToMarkdown).processSync(markdown).contents;
|
||||||
.use(markdownToRemark)
|
|
||||||
.use(remarkToMarkdown)
|
|
||||||
.processSync(markdown).contents;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('remarkPaddedLinks', () => {
|
describe('remarkPaddedLinks', () => {
|
||||||
|
@ -4,9 +4,7 @@ import remarkStripTrailingBreaks from '../remarkStripTrailingBreaks';
|
|||||||
|
|
||||||
function process(children) {
|
function process(children) {
|
||||||
const tree = u('root', children);
|
const tree = u('root', children);
|
||||||
const strippedMdast = unified()
|
const strippedMdast = unified().use(remarkStripTrailingBreaks).runSync(tree);
|
||||||
.use(remarkStripTrailingBreaks)
|
|
||||||
.runSync(tree);
|
|
||||||
|
|
||||||
return strippedMdast.children;
|
return strippedMdast.children;
|
||||||
}
|
}
|
||||||
|
@ -72,9 +72,7 @@ export function markdownToRemark(markdown) {
|
|||||||
/**
|
/**
|
||||||
* Further transform the MDAST with plugins.
|
* Further transform the MDAST with plugins.
|
||||||
*/
|
*/
|
||||||
const result = unified()
|
const result = unified().use(remarkSquashReferences).runSync(parsed);
|
||||||
.use(remarkSquashReferences)
|
|
||||||
.runSync(parsed);
|
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -173,9 +171,7 @@ export function markdownToHtml(markdown, { getAsset, resolveWidget } = {}) {
|
|||||||
* pastes.
|
* pastes.
|
||||||
*/
|
*/
|
||||||
export function htmlToSlate(html) {
|
export function htmlToSlate(html) {
|
||||||
const hast = unified()
|
const hast = unified().use(htmlToRehype, { fragment: true }).parse(html);
|
||||||
.use(htmlToRehype, { fragment: true })
|
|
||||||
.parse(html);
|
|
||||||
|
|
||||||
const mdast = unified()
|
const mdast = unified()
|
||||||
.use(rehypePaperEmoji)
|
.use(rehypePaperEmoji)
|
||||||
|
@ -233,11 +233,8 @@ export default function slateToRemark(raw, { voidCodeBlock }) {
|
|||||||
const node = markNodes[0];
|
const node = markNodes[0];
|
||||||
convertedNodes.push(convertInlineNode(node, convertInlineAndTextChildren(node.nodes)));
|
convertedNodes.push(convertInlineNode(node, convertInlineAndTextChildren(node.nodes)));
|
||||||
} else {
|
} else {
|
||||||
const {
|
const { leadingWhitespace, trailingWhitespace, centerNodes } =
|
||||||
leadingWhitespace,
|
normalizeFlankingWhitespace(markNodes);
|
||||||
trailingWhitespace,
|
|
||||||
centerNodes,
|
|
||||||
} = normalizeFlankingWhitespace(markNodes);
|
|
||||||
const children = convertInlineAndTextChildren(centerNodes);
|
const children = convertInlineAndTextChildren(centerNodes);
|
||||||
const markNode = u(markMap[markType], children);
|
const markNode = u(markMap[markType], children);
|
||||||
|
|
||||||
|
@ -247,15 +247,8 @@ export default class RelationControl extends React.Component {
|
|||||||
}, 500);
|
}, 500);
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {
|
const { value, field, forID, classNameWrapper, setActiveStyle, setInactiveStyle, queryHits } =
|
||||||
value,
|
this.props;
|
||||||
field,
|
|
||||||
forID,
|
|
||||||
classNameWrapper,
|
|
||||||
setActiveStyle,
|
|
||||||
setInactiveStyle,
|
|
||||||
queryHits,
|
|
||||||
} = this.props;
|
|
||||||
const isMultiple = this.isMultiple();
|
const isMultiple = this.isMultiple();
|
||||||
const isClearable = !field.get('required', true) || isMultiple;
|
const isClearable = !field.get('required', true) || isMultiple;
|
||||||
|
|
||||||
|
@ -382,10 +382,10 @@ describe('Relation widget', () => {
|
|||||||
|
|
||||||
expect(onChangeSpy).toHaveBeenCalledTimes(2);
|
expect(onChangeSpy).toHaveBeenCalledTimes(2);
|
||||||
expect(onChangeSpy).toHaveBeenCalledWith(1, {
|
expect(onChangeSpy).toHaveBeenCalledWith(1, {
|
||||||
numbers: { numbers_collection: { '1': { index: 1, slug: 'post-1', title: 'post # 1' } } },
|
numbers: { numbers_collection: { 1: { index: 1, slug: 'post-1', title: 'post # 1' } } },
|
||||||
});
|
});
|
||||||
expect(onChangeSpy).toHaveBeenCalledWith(2, {
|
expect(onChangeSpy).toHaveBeenCalledWith(2, {
|
||||||
numbers: { numbers_collection: { '2': { index: 2, slug: 'post-2', title: 'post # 2' } } },
|
numbers: { numbers_collection: { 2: { index: 2, slug: 'post-2', title: 'post # 2' } } },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -28,14 +28,8 @@ export default class TextControl extends React.Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {
|
const { forID, value, onChange, classNameWrapper, setActiveStyle, setInactiveStyle } =
|
||||||
forID,
|
this.props;
|
||||||
value,
|
|
||||||
onChange,
|
|
||||||
classNameWrapper,
|
|
||||||
setActiveStyle,
|
|
||||||
setInactiveStyle,
|
|
||||||
} = this.props;
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Textarea
|
<Textarea
|
||||||
|
@ -57,7 +57,7 @@ module.exports = {
|
|||||||
noInlineHighlight: true,
|
noInlineHighlight: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
'gatsby-transformer-yaml',
|
'gatsby-transformer-yaml',
|
||||||
|
@ -9,15 +9,8 @@ import BlogPostTemplate from '../components/blog-post-template';
|
|||||||
|
|
||||||
function BlogPost({ data }) {
|
function BlogPost({ data }) {
|
||||||
const { html, frontmatter } = data.markdownRemark;
|
const { html, frontmatter } = data.markdownRemark;
|
||||||
const {
|
const { author, title, date, description, meta_description, twitter_image, canonical_url } =
|
||||||
author,
|
frontmatter;
|
||||||
title,
|
|
||||||
date,
|
|
||||||
description,
|
|
||||||
meta_description,
|
|
||||||
twitter_image,
|
|
||||||
canonical_url,
|
|
||||||
} = frontmatter;
|
|
||||||
const { siteUrl } = data.site.siteMetadata;
|
const { siteUrl } = data.site.siteMetadata;
|
||||||
const twitterImageUrl =
|
const twitterImageUrl =
|
||||||
twitter_image && `${trimEnd(siteUrl, '/')}/${trimStart(twitter_image, '/')}`;
|
twitter_image && `${trimEnd(siteUrl, '/')}/${trimStart(twitter_image, '/')}`;
|
||||||
|
@ -7,10 +7,7 @@ import Layout from '../components/layout';
|
|||||||
import DocsTemplate from '../components/docs-template';
|
import DocsTemplate from '../components/docs-template';
|
||||||
|
|
||||||
function filenameFromPath(p) {
|
function filenameFromPath(p) {
|
||||||
return p
|
return p.split('/').slice(-1)[0].split('.')[0];
|
||||||
.split('/')
|
|
||||||
.slice(-1)[0]
|
|
||||||
.split('.')[0];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function toMenu(menu, nav) {
|
function toMenu(menu, nav) {
|
||||||
|
@ -14244,10 +14244,10 @@ prettier-linter-helpers@^1.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
fast-diff "^1.1.2"
|
fast-diff "^1.1.2"
|
||||||
|
|
||||||
prettier@^1.19.1:
|
prettier@^2.3.0:
|
||||||
version "1.19.1"
|
version "2.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.19.1.tgz#f7d7f5ff8a9cd872a7be4ca142095956a60797cb"
|
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.3.0.tgz#b6a5bf1284026ae640f17f7ff5658a7567fc0d18"
|
||||||
integrity sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==
|
integrity sha512-kXtO4s0Lz/DW/IJ9QdWhAf7/NmPWQXkFr/r/WkR3vyI+0v8amTDxiaQSLzs8NBlytfLWX/7uQUMIW677yLKl4w==
|
||||||
|
|
||||||
pretty-bytes@^5.6.0:
|
pretty-bytes@^5.6.0:
|
||||||
version "5.6.0"
|
version "5.6.0"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user