feat: commit media with post (#2851)
* feat: commit media with post - initial commit * feat: add draft media indication * feat: sync UI media files with GitHub on entry load * feat: bug fixes * feat: delete media files from github when removed from library * test: add GitHub backend tests * test: add unit tests * fix: meta data object files are not updated * feat: used nested paths when update a tree instead of recursion * feat(test-backend): update test backend to persist media file with entry * test(e2e): re-record fixtures data * chore: code cleanup * chore: code cleanup * fix: wait for library to load before adding entry media files * chore: code cleanup * fix: don't add media files on entry when not a draft * fix: sync media library after draft entry was published * feat: update media library card draft style, add tests * test: add Editor unit tests * chore: test code cleanup * fix: publishing an entry from workflow tab throws an error * fix: duplicate media files when using test backend * refactor: fix lodash import * chore: update translations and yarn file after rebase * test(cypress): update recorded data * fix(test-backend): fix mapping of media files on publish
This commit is contained in:
parent
0898767fc9
commit
6515dee871
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -78,6 +78,8 @@
|
||||
"@commitlint/cli": "^8.2.0",
|
||||
"@commitlint/config-conventional": "^8.2.0",
|
||||
"@octokit/rest": "^16.28.7",
|
||||
"@testing-library/jest-dom": "^4.2.3",
|
||||
"@testing-library/react": "^9.3.2",
|
||||
"all-contributors-cli": "^6.0.0",
|
||||
"babel-core": "^7.0.0-bridge.0",
|
||||
"babel-eslint": "^10.0.1",
|
||||
@ -119,7 +121,6 @@
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "1.18.2",
|
||||
"react-test-renderer": "^16.8.4",
|
||||
"react-testing-library": "^7.0.0",
|
||||
"rimraf": "^3.0.0",
|
||||
"simple-git": "^1.124.0",
|
||||
"start-server-and-test": "^1.7.11",
|
||||
|
@ -1,6 +1,17 @@
|
||||
import { Base64 } from 'js-base64';
|
||||
import semaphore from 'semaphore';
|
||||
import { find, flow, get, hasIn, initial, last, partial, result, uniq } from 'lodash';
|
||||
import {
|
||||
find,
|
||||
flow,
|
||||
get,
|
||||
hasIn,
|
||||
initial,
|
||||
last,
|
||||
partial,
|
||||
result,
|
||||
differenceBy,
|
||||
trimStart,
|
||||
} from 'lodash';
|
||||
import { map } from 'lodash/fp';
|
||||
import {
|
||||
getAllResponses,
|
||||
@ -195,15 +206,10 @@ export default class API {
|
||||
this._metadataSemaphore.take(async () => {
|
||||
try {
|
||||
const branchData = await this.checkMetadataRef();
|
||||
const fileTree = {
|
||||
[`${key}.json`]: {
|
||||
path: `${key}.json`,
|
||||
raw: JSON.stringify(data),
|
||||
file: true,
|
||||
},
|
||||
};
|
||||
await this.uploadBlob(fileTree[`${key}.json`]);
|
||||
const changeTree = await this.updateTree(branchData.sha, '/', fileTree);
|
||||
const file = { path: `${key}.json`, raw: JSON.stringify(data) };
|
||||
|
||||
await this.uploadBlob(file);
|
||||
const changeTree = await this.updateTree(branchData.sha, [file]);
|
||||
const { sha } = await this.commit(`Updating “${key}” metadata`, changeTree);
|
||||
await this.patchRef('meta', '_netlify_cms', sha);
|
||||
localForage.setItem(`gh.meta.${key}`, {
|
||||
@ -304,7 +310,7 @@ export default class API {
|
||||
return text;
|
||||
}
|
||||
|
||||
async getMediaDisplayURL(sha, path) {
|
||||
async getMediaAsBlob(sha, path) {
|
||||
const response = await this.fetchBlob(sha, this.repoURL);
|
||||
let blob;
|
||||
if (path.match(/.svg$/)) {
|
||||
@ -313,6 +319,11 @@ export default class API {
|
||||
} else {
|
||||
blob = await response.blob();
|
||||
}
|
||||
return blob;
|
||||
}
|
||||
|
||||
async getMediaDisplayURL(sha, path) {
|
||||
const blob = await this.getMediaAsBlob(sha, path);
|
||||
|
||||
return URL.createObjectURL(blob);
|
||||
}
|
||||
@ -501,56 +512,23 @@ export default class API {
|
||||
}
|
||||
}
|
||||
|
||||
composeFileTree(files) {
|
||||
let filename;
|
||||
let part;
|
||||
let parts;
|
||||
let subtree;
|
||||
const fileTree = {};
|
||||
|
||||
files.forEach(file => {
|
||||
if (file.uploaded) {
|
||||
return;
|
||||
}
|
||||
parts = file.path.split('/').filter(part => part);
|
||||
filename = parts.pop();
|
||||
subtree = fileTree;
|
||||
while ((part = parts.shift())) {
|
||||
// eslint-disable-line no-cond-assign
|
||||
subtree[part] = subtree[part] || {};
|
||||
subtree = subtree[part];
|
||||
}
|
||||
subtree[filename] = file;
|
||||
file.file = true;
|
||||
});
|
||||
|
||||
return fileTree;
|
||||
}
|
||||
|
||||
persistFiles(entry, mediaFiles, options) {
|
||||
const uploadPromises = [];
|
||||
async persistFiles(entry, mediaFiles, options) {
|
||||
const files = entry ? mediaFiles.concat(entry) : mediaFiles;
|
||||
const uploadPromises = files.filter(file => !file.uploaded).map(file => this.uploadBlob(file));
|
||||
await Promise.all(uploadPromises);
|
||||
|
||||
files.forEach(file => {
|
||||
if (file.uploaded) {
|
||||
return;
|
||||
}
|
||||
uploadPromises.push(this.uploadBlob(file));
|
||||
});
|
||||
|
||||
const fileTree = this.composeFileTree(files);
|
||||
|
||||
return Promise.all(uploadPromises).then(() => {
|
||||
if (!options.useWorkflow) {
|
||||
return this.getBranch()
|
||||
.then(branchData => this.updateTree(branchData.commit.sha, '/', fileTree))
|
||||
.then(changeTree => this.commit(options.commitMessage, changeTree))
|
||||
.then(response => this.patchBranch(this.branch, response.sha));
|
||||
} else {
|
||||
const mediaFilesList = mediaFiles.map(file => ({ path: file.path, sha: file.sha }));
|
||||
return this.editorialWorkflowGit(fileTree, entry, mediaFilesList, options);
|
||||
}
|
||||
});
|
||||
if (!options.useWorkflow) {
|
||||
return this.getBranch()
|
||||
.then(branchData => this.updateTree(branchData.commit.sha, files))
|
||||
.then(changeTree => this.commit(options.commitMessage, changeTree))
|
||||
.then(response => this.patchBranch(this.branch, response.sha));
|
||||
} else {
|
||||
const mediaFilesList = mediaFiles.map(({ sha, path }) => ({
|
||||
path: trimStart(path, '/'),
|
||||
sha,
|
||||
}));
|
||||
return this.editorialWorkflowGit(files, entry, mediaFilesList, options);
|
||||
}
|
||||
}
|
||||
|
||||
getFileSha(path, branch) {
|
||||
@ -597,7 +575,7 @@ export default class API {
|
||||
return this.createPR(commitMessage, branchName);
|
||||
}
|
||||
|
||||
async editorialWorkflowGit(fileTree, entry, filesList, options) {
|
||||
async editorialWorkflowGit(files, entry, mediaFilesList, options) {
|
||||
const contentKey = this.generateContentKey(options.collectionName, entry.slug);
|
||||
const branchName = this.generateBranchName(contentKey);
|
||||
const unpublished = options.unpublished || false;
|
||||
@ -605,7 +583,7 @@ export default class API {
|
||||
// Open new editorial review workflow for this entry - Create new metadata and commit to new branch
|
||||
const userPromise = this.user();
|
||||
const branchData = await this.getBranch();
|
||||
const changeTree = await this.updateTree(branchData.commit.sha, '/', fileTree);
|
||||
const changeTree = await this.updateTree(branchData.commit.sha, files);
|
||||
const commitResponse = await this.commit(options.commitMessage, changeTree);
|
||||
|
||||
let pr;
|
||||
@ -640,24 +618,30 @@ export default class API {
|
||||
path: entry.path,
|
||||
sha: entry.sha,
|
||||
},
|
||||
files: filesList,
|
||||
files: mediaFilesList,
|
||||
},
|
||||
timeStamp: new Date().toISOString(),
|
||||
});
|
||||
} else {
|
||||
// Entry is already on editorial review workflow - just update metadata and commit to existing branch
|
||||
const metadata = await this.retrieveMetadata(contentKey);
|
||||
// mark media files to remove
|
||||
const metadataMediaFiles = get(metadata, 'objects.files', []);
|
||||
const mediaFilesToRemove = differenceBy(metadataMediaFiles, mediaFilesList, 'path').map(
|
||||
file => ({ ...file, remove: true }),
|
||||
);
|
||||
const branchData = await this.getBranch(branchName);
|
||||
const changeTree = await this.updateTree(branchData.commit.sha, '/', fileTree);
|
||||
const commitPromise = this.commit(options.commitMessage, changeTree);
|
||||
const metadataPromise = this.retrieveMetadata(contentKey);
|
||||
const [commit, metadata] = await Promise.all([commitPromise, metadataPromise]);
|
||||
const changeTree = await this.updateTree(
|
||||
branchData.commit.sha,
|
||||
files.concat(mediaFilesToRemove),
|
||||
);
|
||||
const commit = await this.commit(options.commitMessage, changeTree);
|
||||
const { title, description } = options.parsedData || {};
|
||||
const metadataFiles = get(metadata.objects, 'files', []);
|
||||
const files = [...metadataFiles, ...filesList];
|
||||
|
||||
const pr = metadata.pr ? { ...metadata.pr, head: commit.sha } : undefined;
|
||||
const objects = {
|
||||
entry: { path: entry.path, sha: entry.sha },
|
||||
files: uniq(files),
|
||||
files: mediaFilesList,
|
||||
};
|
||||
const updatedMetadata = { ...metadata, pr, title, description, objects };
|
||||
|
||||
@ -667,7 +651,7 @@ export default class API {
|
||||
}
|
||||
|
||||
if (pr) {
|
||||
return this.rebasePullRequest(pr.number, branchName, contentKey, metadata, commit);
|
||||
return this.rebasePullRequest(pr.number, branchName, contentKey, updatedMetadata, commit);
|
||||
} else if (this.useOpenAuthoring) {
|
||||
// if a PR hasn't been created yet for the forked repo, just patch the branch
|
||||
await this.patchBranch(branchName, commit.sha, { force: true });
|
||||
@ -692,7 +676,7 @@ export default class API {
|
||||
*/
|
||||
const [baseBranch, commits] = await Promise.all([
|
||||
this.getBranch(),
|
||||
this.getPullRequestCommits(prNumber, head),
|
||||
this.getPullRequestCommits(prNumber),
|
||||
]);
|
||||
|
||||
/**
|
||||
@ -891,12 +875,14 @@ export default class API {
|
||||
);
|
||||
}
|
||||
|
||||
publishUnpublishedEntry(collectionName, slug) {
|
||||
async publishUnpublishedEntry(collectionName, slug) {
|
||||
const contentKey = this.generateContentKey(collectionName, slug);
|
||||
const branchName = this.generateBranchName(contentKey);
|
||||
return this.retrieveMetadata(contentKey)
|
||||
.then(metadata => this.mergePR(metadata.pr, metadata.objects))
|
||||
.then(() => this.deleteBranch(branchName));
|
||||
const metadata = await this.retrieveMetadata(contentKey);
|
||||
await this.mergePR(metadata.pr, metadata.objects);
|
||||
await this.deleteBranch(branchName);
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
createRef(type, name, sha) {
|
||||
@ -1000,7 +986,6 @@ export default class API {
|
||||
|
||||
forceMergePR(pullrequest, objects) {
|
||||
const files = objects.files.concat(objects.entry);
|
||||
const fileTree = this.composeFileTree(files);
|
||||
let commitMessage = 'Automatically generated. Merged on Netlify CMS\n\nForce merge of:';
|
||||
files.forEach(file => {
|
||||
commitMessage += `\n* "${file.path}"`;
|
||||
@ -1010,7 +995,7 @@ export default class API {
|
||||
'line-height: 30px;text-align: center;font-weight: bold',
|
||||
);
|
||||
return this.getBranch()
|
||||
.then(branchData => this.updateTree(branchData.commit.sha, '/', fileTree))
|
||||
.then(branchData => this.updateTree(branchData.commit.sha, files))
|
||||
.then(changeTree => this.commit(commitMessage, changeTree))
|
||||
.then(response => this.patchBranch(this.branch, response.sha));
|
||||
}
|
||||
@ -1062,47 +1047,17 @@ export default class API {
|
||||
);
|
||||
}
|
||||
|
||||
updateTree(sha, path, fileTree) {
|
||||
return this.getTree(sha).then(tree => {
|
||||
let obj;
|
||||
let filename;
|
||||
let fileOrDir;
|
||||
const updates = [];
|
||||
const added = {};
|
||||
async updateTree(sha, files) {
|
||||
const tree = files.map(file => ({
|
||||
path: trimStart(file.path, '/'),
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: file.remove ? null : file.sha,
|
||||
}));
|
||||
|
||||
for (let i = 0, len = tree.tree.length; i < len; i++) {
|
||||
obj = tree.tree[i];
|
||||
if ((fileOrDir = fileTree[obj.path])) {
|
||||
// eslint-disable-line no-cond-assign
|
||||
added[obj.path] = true;
|
||||
if (fileOrDir.file) {
|
||||
updates.push({ path: obj.path, mode: obj.mode, type: obj.type, sha: fileOrDir.sha });
|
||||
} else {
|
||||
updates.push(this.updateTree(obj.sha, obj.path, fileOrDir));
|
||||
}
|
||||
}
|
||||
}
|
||||
for (filename in fileTree) {
|
||||
fileOrDir = fileTree[filename];
|
||||
if (added[filename]) {
|
||||
continue;
|
||||
}
|
||||
updates.push(
|
||||
fileOrDir.file
|
||||
? { path: filename, mode: '100644', type: 'blob', sha: fileOrDir.sha }
|
||||
: this.updateTree(null, filename, fileOrDir),
|
||||
);
|
||||
}
|
||||
return Promise.all(updates)
|
||||
.then(tree => this.createTree(sha, tree))
|
||||
.then(response => ({
|
||||
path,
|
||||
mode: '040000',
|
||||
type: 'tree',
|
||||
sha: response.sha,
|
||||
parentSha: sha,
|
||||
}));
|
||||
});
|
||||
const newTree = await this.createTree(sha, tree);
|
||||
newTree.parentSha = sha;
|
||||
return newTree;
|
||||
}
|
||||
|
||||
createTree(baseSha, tree) {
|
||||
|
@ -1,40 +1,85 @@
|
||||
import { Base64 } from 'js-base64';
|
||||
import API from '../API';
|
||||
|
||||
global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
|
||||
|
||||
describe('github API', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
const mockAPI = (api, responses) => {
|
||||
api.request = (path, options = {}) => {
|
||||
api.request = jest.fn().mockImplementation((path, options = {}) => {
|
||||
const normalizedPath = path.indexOf('?') !== -1 ? path.substr(0, path.indexOf('?')) : path;
|
||||
const response = responses[normalizedPath];
|
||||
return typeof response === 'function'
|
||||
? Promise.resolve(response(options))
|
||||
: Promise.reject(new Error(`No response for path '${normalizedPath}'`));
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
it('should create PR with correct base branch name when publishing with editorial workflow', () => {
|
||||
let prBaseBranch = null;
|
||||
const api = new API({ branch: 'gh-pages', repo: 'my-repo' });
|
||||
const responses = {
|
||||
'/repos/my-repo/branches/gh-pages': () => ({ commit: { sha: 'def' } }),
|
||||
'/repos/my-repo/git/trees/def': () => ({ tree: [] }),
|
||||
'/repos/my-repo/git/trees': () => ({}),
|
||||
'/repos/my-repo/git/commits': () => ({}),
|
||||
'/repos/my-repo/git/refs': () => ({}),
|
||||
'/repos/my-repo/pulls': pullRequest => {
|
||||
prBaseBranch = JSON.parse(pullRequest.body).base;
|
||||
return { head: { sha: 'cbd' } };
|
||||
},
|
||||
'/user': () => ({}),
|
||||
'/repos/my-repo/git/blobs': () => ({}),
|
||||
'/repos/my-repo/git/refs/meta/_netlify_cms': () => ({ object: {} }),
|
||||
};
|
||||
mockAPI(api, responses);
|
||||
describe('editorialWorkflowGit', () => {
|
||||
it('should create PR with correct base branch name when publishing with editorial workflow', () => {
|
||||
let prBaseBranch = null;
|
||||
const api = new API({ branch: 'gh-pages', repo: 'my-repo' });
|
||||
const responses = {
|
||||
'/repos/my-repo/branches/gh-pages': () => ({ commit: { sha: 'def' } }),
|
||||
'/repos/my-repo/git/trees/def': () => ({ tree: [] }),
|
||||
'/repos/my-repo/git/trees': () => ({}),
|
||||
'/repos/my-repo/git/commits': () => ({}),
|
||||
'/repos/my-repo/git/refs': () => ({}),
|
||||
'/repos/my-repo/pulls': pullRequest => {
|
||||
prBaseBranch = JSON.parse(pullRequest.body).base;
|
||||
return { head: { sha: 'cbd' } };
|
||||
},
|
||||
'/user': () => ({}),
|
||||
'/repos/my-repo/git/blobs': () => ({}),
|
||||
'/repos/my-repo/git/refs/meta/_netlify_cms': () => ({ object: {} }),
|
||||
};
|
||||
mockAPI(api, responses);
|
||||
|
||||
return expect(
|
||||
api
|
||||
.editorialWorkflowGit(null, { slug: 'entry', sha: 'abc' }, null, {})
|
||||
.then(() => prBaseBranch),
|
||||
).resolves.toEqual('gh-pages');
|
||||
return expect(
|
||||
api
|
||||
.editorialWorkflowGit([], { slug: 'entry', sha: 'abc' }, null, {})
|
||||
.then(() => prBaseBranch),
|
||||
).resolves.toEqual('gh-pages');
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateTree', () => {
|
||||
it('should create tree with nested paths', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
api.createTree = jest.fn().mockImplementation(() => Promise.resolve({ sha: 'newTreeSha' }));
|
||||
|
||||
const files = [
|
||||
{ path: '/static/media/new-image.jpeg', sha: 'new-image.jpeg', remove: true },
|
||||
{ path: 'content/posts/new-post.md', sha: 'new-post.md' },
|
||||
];
|
||||
|
||||
const baseTreeSha = 'baseTreeSha';
|
||||
|
||||
await expect(api.updateTree(baseTreeSha, files)).resolves.toEqual({
|
||||
sha: 'newTreeSha',
|
||||
parentSha: baseTreeSha,
|
||||
});
|
||||
|
||||
expect(api.createTree).toHaveBeenCalledTimes(1);
|
||||
expect(api.createTree).toHaveBeenCalledWith(baseTreeSha, [
|
||||
{
|
||||
path: 'static/media/new-image.jpeg',
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: null,
|
||||
},
|
||||
{
|
||||
path: 'content/posts/new-post.md',
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: 'new-post.md',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('request', () => {
|
||||
@ -106,4 +151,191 @@ describe('github API', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMediaAsBlob', () => {
|
||||
it('should return response blob on non svg file', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
const blob = {};
|
||||
const response = { blob: jest.fn().mockResolvedValue(blob) };
|
||||
api.fetchBlob = jest.fn().mockResolvedValue(response);
|
||||
|
||||
await expect(api.getMediaAsBlob('sha', 'static/media/image.png')).resolves.toBe(blob);
|
||||
|
||||
expect(api.fetchBlob).toHaveBeenCalledTimes(1);
|
||||
expect(api.fetchBlob).toHaveBeenCalledWith('sha', '/repos/owner/repo');
|
||||
|
||||
expect(response.blob).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return test blob on non file', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
const response = { text: jest.fn().mockResolvedValue('svg') };
|
||||
api.fetchBlob = jest.fn().mockResolvedValue(response);
|
||||
|
||||
await expect(api.getMediaAsBlob('sha', 'static/media/logo.svg')).resolves.toEqual(
|
||||
new Blob(['svg'], { type: 'image/svg+xml' }),
|
||||
);
|
||||
|
||||
expect(api.fetchBlob).toHaveBeenCalledTimes(1);
|
||||
expect(api.fetchBlob).toHaveBeenCalledWith('sha', '/repos/owner/repo');
|
||||
|
||||
expect(response.text).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMediaDisplayURL', () => {
|
||||
it('should return createObjectURL result', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
const blob = {};
|
||||
api.getMediaAsBlob = jest.fn().mockResolvedValue(blob);
|
||||
global.URL.createObjectURL = jest
|
||||
.fn()
|
||||
.mockResolvedValue('blob:http://localhost:8080/blob-id');
|
||||
|
||||
await expect(api.getMediaDisplayURL('sha', 'static/media/image.png')).resolves.toBe(
|
||||
'blob:http://localhost:8080/blob-id',
|
||||
);
|
||||
|
||||
expect(api.getMediaAsBlob).toHaveBeenCalledTimes(1);
|
||||
expect(api.getMediaAsBlob).toHaveBeenCalledWith('sha', 'static/media/image.png');
|
||||
|
||||
expect(global.URL.createObjectURL).toHaveBeenCalledTimes(1);
|
||||
expect(global.URL.createObjectURL).toHaveBeenCalledWith(blob);
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistFiles', () => {
|
||||
it('should update tree, commit and patch branch when useWorkflow is false', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
const responses = {
|
||||
// upload the file
|
||||
'/repos/owner/repo/git/blobs': () => ({ sha: 'new-file-sha' }),
|
||||
|
||||
// get the branch
|
||||
'/repos/owner/repo/branches/master': () => ({ commit: { sha: 'root' } }),
|
||||
|
||||
// create new tree
|
||||
'/repos/owner/repo/git/trees': options => {
|
||||
const data = JSON.parse(options.body);
|
||||
return { sha: data.base_tree };
|
||||
},
|
||||
|
||||
// update the commit with the tree
|
||||
'/repos/owner/repo/git/commits': () => ({ sha: 'commit-sha' }),
|
||||
|
||||
// patch the branch
|
||||
'/repos/owner/repo/git/refs/heads/master': () => ({}),
|
||||
};
|
||||
mockAPI(api, responses);
|
||||
|
||||
const entry = {
|
||||
slug: 'entry',
|
||||
sha: 'abc',
|
||||
path: 'content/posts/new-post.md',
|
||||
raw: 'content',
|
||||
};
|
||||
await api.persistFiles(entry, [], { commitMessage: 'commitMessage' });
|
||||
|
||||
expect(api.request).toHaveBeenCalledTimes(5);
|
||||
|
||||
expect(api.request.mock.calls[0]).toEqual([
|
||||
'/repos/owner/repo/git/blobs',
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ content: Base64.encode(entry.raw), encoding: 'base64' }),
|
||||
},
|
||||
]);
|
||||
|
||||
expect(api.request.mock.calls[1]).toEqual(['/repos/owner/repo/branches/master']);
|
||||
|
||||
expect(api.request.mock.calls[2]).toEqual([
|
||||
'/repos/owner/repo/git/trees',
|
||||
{
|
||||
body: JSON.stringify({
|
||||
base_tree: 'root',
|
||||
tree: [
|
||||
{
|
||||
path: 'content/posts/new-post.md',
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: 'new-file-sha',
|
||||
},
|
||||
],
|
||||
}),
|
||||
method: 'POST',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(api.request.mock.calls[3]).toEqual([
|
||||
'/repos/owner/repo/git/commits',
|
||||
{
|
||||
body: JSON.stringify({
|
||||
message: 'commitMessage',
|
||||
tree: 'root',
|
||||
parents: ['root'],
|
||||
}),
|
||||
method: 'POST',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(api.request.mock.calls[4]).toEqual([
|
||||
'/repos/owner/repo/git/refs/heads/master',
|
||||
{
|
||||
body: JSON.stringify({
|
||||
sha: 'commit-sha',
|
||||
force: false,
|
||||
}),
|
||||
method: 'PATCH',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should call editorialWorkflowGit when useWorkflow is true', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
api.uploadBlob = jest.fn();
|
||||
api.editorialWorkflowGit = jest.fn();
|
||||
|
||||
const entry = {
|
||||
slug: 'entry',
|
||||
sha: 'abc',
|
||||
path: 'content/posts/new-post.md',
|
||||
raw: 'content',
|
||||
};
|
||||
|
||||
const mediaFiles = [
|
||||
{
|
||||
path: '/static/media/image-1.png',
|
||||
uploaded: true,
|
||||
sha: 'image-1.png',
|
||||
},
|
||||
{
|
||||
path: '/static/media/image-2.png',
|
||||
sha: 'image-2.png',
|
||||
},
|
||||
];
|
||||
|
||||
await api.persistFiles(entry, mediaFiles, { useWorkflow: true });
|
||||
|
||||
expect(api.uploadBlob).toHaveBeenCalledTimes(2);
|
||||
expect(api.uploadBlob).toHaveBeenCalledWith(entry);
|
||||
expect(api.uploadBlob).toHaveBeenCalledWith(mediaFiles[1]);
|
||||
|
||||
expect(api.editorialWorkflowGit).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(api.editorialWorkflowGit).toHaveBeenCalledWith(
|
||||
mediaFiles.concat(entry),
|
||||
entry,
|
||||
[
|
||||
{ path: 'static/media/image-1.png', sha: 'image-1.png' },
|
||||
{ path: 'static/media/image-2.png', sha: 'image-2.png' },
|
||||
],
|
||||
{ useWorkflow: true },
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -20,6 +20,13 @@ describe('github backend implementation', () => {
|
||||
}),
|
||||
};
|
||||
|
||||
const createObjectURL = jest.fn();
|
||||
global.URL = {
|
||||
createObjectURL,
|
||||
};
|
||||
|
||||
createObjectURL.mockReturnValue('displayURL');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
@ -72,4 +79,173 @@ describe('github backend implementation', () => {
|
||||
await expect(gitHubImplementation.forkExists({ token: 'token' })).resolves.toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistMedia', () => {
|
||||
const persistFiles = jest.fn();
|
||||
const mockAPI = {
|
||||
persistFiles,
|
||||
};
|
||||
|
||||
persistFiles.mockImplementation((_, files) => {
|
||||
files.forEach((file, index) => {
|
||||
file.sha = index;
|
||||
});
|
||||
});
|
||||
|
||||
it('should persist media file when not draft', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const mediaFile = {
|
||||
value: 'image.png',
|
||||
fileObj: { size: 100 },
|
||||
path: '/media/image.png',
|
||||
};
|
||||
|
||||
expect.assertions(5);
|
||||
await expect(gitHubImplementation.persistMedia(mediaFile)).resolves.toEqual({
|
||||
id: 0,
|
||||
name: 'image.png',
|
||||
size: 100,
|
||||
displayURL: 'displayURL',
|
||||
path: 'media/image.png',
|
||||
draft: undefined,
|
||||
});
|
||||
|
||||
expect(persistFiles).toHaveBeenCalledTimes(1);
|
||||
expect(persistFiles).toHaveBeenCalledWith(null, [mediaFile], {});
|
||||
expect(createObjectURL).toHaveBeenCalledTimes(1);
|
||||
expect(createObjectURL).toHaveBeenCalledWith(mediaFile.fileObj);
|
||||
});
|
||||
|
||||
it('should not persist media file when draft', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
createObjectURL.mockReturnValue('displayURL');
|
||||
|
||||
const mediaFile = {
|
||||
value: 'image.png',
|
||||
fileObj: { size: 100 },
|
||||
path: '/media/image.png',
|
||||
};
|
||||
|
||||
expect.assertions(4);
|
||||
await expect(gitHubImplementation.persistMedia(mediaFile, { draft: true })).resolves.toEqual({
|
||||
id: undefined,
|
||||
name: 'image.png',
|
||||
size: 100,
|
||||
displayURL: 'displayURL',
|
||||
path: 'media/image.png',
|
||||
draft: true,
|
||||
});
|
||||
|
||||
expect(persistFiles).toHaveBeenCalledTimes(0);
|
||||
expect(createObjectURL).toHaveBeenCalledTimes(1);
|
||||
expect(createObjectURL).toHaveBeenCalledWith(mediaFile.fileObj);
|
||||
});
|
||||
|
||||
it('should log and throw error on "persistFiles" error', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const error = new Error('failed to persist files');
|
||||
persistFiles.mockRejectedValue(error);
|
||||
|
||||
const mediaFile = {
|
||||
value: 'image.png',
|
||||
fileObj: { size: 100 },
|
||||
path: '/media/image.png',
|
||||
};
|
||||
|
||||
expect.assertions(5);
|
||||
await expect(gitHubImplementation.persistMedia(mediaFile)).rejects.toThrowError(error);
|
||||
|
||||
expect(persistFiles).toHaveBeenCalledTimes(1);
|
||||
expect(createObjectURL).toHaveBeenCalledTimes(0);
|
||||
expect(console.error).toHaveBeenCalledTimes(1);
|
||||
expect(console.error).toHaveBeenCalledWith(error);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMediaFiles', () => {
|
||||
const getMediaAsBlob = jest.fn();
|
||||
const mockAPI = {
|
||||
getMediaAsBlob,
|
||||
};
|
||||
|
||||
it('should return media files from meta data', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const blob = new Blob(['']);
|
||||
getMediaAsBlob.mockResolvedValue(blob);
|
||||
|
||||
const file = new File([blob], name);
|
||||
|
||||
const data = {
|
||||
metaData: {
|
||||
objects: {
|
||||
files: [{ path: 'static/media/image.png', sha: 'image.png' }],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
await expect(gitHubImplementation.getMediaFiles(data)).resolves.toEqual([
|
||||
{
|
||||
id: 'image.png',
|
||||
sha: 'image.png',
|
||||
displayURL: 'displayURL',
|
||||
path: 'static/media/image.png',
|
||||
name: 'image.png',
|
||||
size: file.size,
|
||||
file,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('unpublishedEntry', () => {
|
||||
const generateContentKey = jest.fn();
|
||||
const readUnpublishedBranchFile = jest.fn();
|
||||
|
||||
const mockAPI = {
|
||||
generateContentKey,
|
||||
readUnpublishedBranchFile,
|
||||
};
|
||||
|
||||
it('should return unpublished entry', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
gitHubImplementation.getMediaFiles = jest.fn().mockResolvedValue([{ path: 'image.png' }]);
|
||||
|
||||
generateContentKey.mockReturnValue('contentKey');
|
||||
|
||||
const data = {
|
||||
fileData: 'fileData',
|
||||
isModification: true,
|
||||
metaData: { objects: { entry: { path: 'entry-path' } } },
|
||||
};
|
||||
readUnpublishedBranchFile.mockResolvedValue(data);
|
||||
|
||||
const collection = { get: jest.fn().mockReturnValue('posts') };
|
||||
await expect(gitHubImplementation.unpublishedEntry(collection, 'slug')).resolves.toEqual({
|
||||
slug: 'slug',
|
||||
file: { path: 'entry-path' },
|
||||
data: 'fileData',
|
||||
metaData: { objects: { entry: { path: 'entry-path' } } },
|
||||
mediaFiles: [{ path: 'image.png' }],
|
||||
isModification: true,
|
||||
});
|
||||
|
||||
expect(generateContentKey).toHaveBeenCalledTimes(1);
|
||||
expect(generateContentKey).toHaveBeenCalledWith('posts', 'slug');
|
||||
|
||||
expect(readUnpublishedBranchFile).toHaveBeenCalledTimes(1);
|
||||
expect(readUnpublishedBranchFile).toHaveBeenCalledWith('contentKey');
|
||||
|
||||
expect(gitHubImplementation.getMediaFiles).toHaveBeenCalledTimes(1);
|
||||
expect(gitHubImplementation.getMediaFiles).toHaveBeenCalledWith(data);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -4,6 +4,7 @@ import semaphore from 'semaphore';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import { asyncLock } from 'netlify-cms-lib-util';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
import { get } from 'lodash';
|
||||
import API from './API';
|
||||
import GraphQLAPI from './GraphQLAPI';
|
||||
|
||||
@ -331,7 +332,9 @@ export default class GitHub {
|
||||
|
||||
async persistMedia(mediaFile, options = {}) {
|
||||
try {
|
||||
await this.api.persistFiles(null, [mediaFile], options);
|
||||
if (!options.draft) {
|
||||
await this.api.persistFiles(null, [mediaFile], options);
|
||||
}
|
||||
|
||||
const { sha, value, path, fileObj } = mediaFile;
|
||||
const displayURL = URL.createObjectURL(fileObj);
|
||||
@ -341,6 +344,7 @@ export default class GitHub {
|
||||
size: fileObj.size,
|
||||
displayURL,
|
||||
path: trimStart(path, '/'),
|
||||
draft: options.draft,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
@ -352,6 +356,29 @@ export default class GitHub {
|
||||
return this.api.deleteFile(path, commitMessage, options);
|
||||
}
|
||||
|
||||
async getMediaFiles(data) {
|
||||
const files = get(data, 'metaData.objects.files', []);
|
||||
const mediaFiles = await Promise.all(
|
||||
files.map(file =>
|
||||
this.api.getMediaAsBlob(file.sha, file.path).then(blob => {
|
||||
const name = file.path.substring(file.path.lastIndexOf('/') + 1);
|
||||
const fileObj = new File([blob], name);
|
||||
return {
|
||||
id: file.sha,
|
||||
sha: file.sha,
|
||||
displayURL: URL.createObjectURL(fileObj),
|
||||
path: file.path,
|
||||
name: name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
};
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
return mediaFiles;
|
||||
}
|
||||
|
||||
unpublishedEntries() {
|
||||
return this.api
|
||||
.listUnpublishedBranches()
|
||||
@ -371,10 +398,9 @@ export default class GitHub {
|
||||
resolve(null);
|
||||
sem.leave();
|
||||
} else {
|
||||
const path = data.metaData.objects.entry.path;
|
||||
resolve({
|
||||
slug,
|
||||
file: { path },
|
||||
file: { path: data.metaData.objects.entry.path },
|
||||
data: data.fileData,
|
||||
metaData: data.metaData,
|
||||
isModification: data.isModification,
|
||||
@ -400,18 +426,21 @@ export default class GitHub {
|
||||
});
|
||||
}
|
||||
|
||||
unpublishedEntry(collection, slug) {
|
||||
async unpublishedEntry(collection, slug) {
|
||||
const contentKey = this.api.generateContentKey(collection.get('name'), slug);
|
||||
return this.api.readUnpublishedBranchFile(contentKey).then(data => {
|
||||
if (!data) return null;
|
||||
return {
|
||||
slug,
|
||||
file: { path: data.metaData.objects.entry.path },
|
||||
data: data.fileData,
|
||||
metaData: data.metaData,
|
||||
isModification: data.isModification,
|
||||
};
|
||||
});
|
||||
const data = await this.api.readUnpublishedBranchFile(contentKey);
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
const mediaFiles = await this.getMediaFiles(data);
|
||||
return {
|
||||
slug,
|
||||
file: { path: data.metaData.objects.entry.path },
|
||||
data: data.fileData,
|
||||
metaData: data.metaData,
|
||||
mediaFiles,
|
||||
isModification: data.isModification,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@ -456,9 +485,10 @@ export default class GitHub {
|
||||
|
||||
publishUnpublishedEntry(collection, slug) {
|
||||
// publishUnpublishedEntry is a transactional operation
|
||||
return this.runWithLock(
|
||||
() => this.api.publishUnpublishedEntry(collection, slug),
|
||||
'Failed to acquire publish entry lock',
|
||||
);
|
||||
return this.runWithLock(async () => {
|
||||
const metaData = await this.api.publishUnpublishedEntry(collection, slug);
|
||||
const mediaFiles = await this.getMediaFiles({ metaData });
|
||||
return { mediaFiles };
|
||||
}, 'Failed to acquire publish entry lock');
|
||||
}
|
||||
}
|
||||
|
@ -123,6 +123,15 @@ export default class TestBackend {
|
||||
return Promise.resolve(window.repoFilesUnpublished);
|
||||
}
|
||||
|
||||
getMediaFiles(entry) {
|
||||
const mediaFiles = entry.mediaFiles.map(file => ({
|
||||
...file,
|
||||
...this.mediaFileToAsset(file),
|
||||
file: file.fileObj,
|
||||
}));
|
||||
return mediaFiles;
|
||||
}
|
||||
|
||||
unpublishedEntry(collection, slug) {
|
||||
const entry = window.repoFilesUnpublished.find(
|
||||
e => e.metaData.collection === collection.get('name') && e.slug === slug,
|
||||
@ -132,6 +141,8 @@ export default class TestBackend {
|
||||
new EditorialWorkflowError('content is not under editorial workflow', true),
|
||||
);
|
||||
}
|
||||
entry.mediaFiles = this.getMediaFiles(entry);
|
||||
|
||||
return Promise.resolve(entry);
|
||||
}
|
||||
|
||||
@ -144,14 +155,17 @@ export default class TestBackend {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
persistEntry({ path, raw, slug }, mediaFiles, options = {}) {
|
||||
async persistEntry({ path, raw, slug }, mediaFiles, options = {}) {
|
||||
if (options.useWorkflow) {
|
||||
const unpubStore = window.repoFilesUnpublished;
|
||||
|
||||
const existingEntryIndex = unpubStore.findIndex(e => e.file.path === path);
|
||||
if (existingEntryIndex >= 0) {
|
||||
const unpubEntry = { ...unpubStore[existingEntryIndex], data: raw };
|
||||
unpubEntry.title = options.parsedData && options.parsedData.title;
|
||||
unpubEntry.description = options.parsedData && options.parsedData.description;
|
||||
unpubEntry.mediaFiles = mediaFiles;
|
||||
|
||||
unpubStore.splice(existingEntryIndex, 1, unpubEntry);
|
||||
} else {
|
||||
const unpubEntry = {
|
||||
@ -166,6 +180,7 @@ export default class TestBackend {
|
||||
description: options.parsedData && options.parsedData.description,
|
||||
},
|
||||
slug,
|
||||
mediaFiles,
|
||||
};
|
||||
unpubStore.push(unpubEntry);
|
||||
}
|
||||
@ -182,6 +197,7 @@ export default class TestBackend {
|
||||
} else {
|
||||
window.repoFiles[folder][fileName].content = raw;
|
||||
}
|
||||
await Promise.all(mediaFiles.map(file => this.persistMedia(file)));
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
@ -194,7 +210,7 @@ export default class TestBackend {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
publishUnpublishedEntry(collection, slug) {
|
||||
async publishUnpublishedEntry(collection, slug) {
|
||||
const unpubStore = window.repoFilesUnpublished;
|
||||
const unpubEntryIndex = unpubStore.findIndex(
|
||||
e => e.metaData.collection === collection && e.slug === slug,
|
||||
@ -202,19 +218,32 @@ export default class TestBackend {
|
||||
const unpubEntry = unpubStore[unpubEntryIndex];
|
||||
const entry = { raw: unpubEntry.data, slug: unpubEntry.slug, path: unpubEntry.file.path };
|
||||
unpubStore.splice(unpubEntryIndex, 1);
|
||||
return this.persistEntry(entry);
|
||||
|
||||
await this.persistEntry(entry, unpubEntry.mediaFiles);
|
||||
return { mediaFiles: this.getMediaFiles(unpubEntry) };
|
||||
}
|
||||
|
||||
getMedia() {
|
||||
return Promise.resolve(this.assets);
|
||||
}
|
||||
|
||||
persistMedia({ fileObj }) {
|
||||
mediaFileToAsset(mediaFile) {
|
||||
const { fileObj } = mediaFile;
|
||||
const { name, size } = fileObj;
|
||||
const objectUrl = attempt(window.URL.createObjectURL, fileObj);
|
||||
const url = isError(objectUrl) ? '' : objectUrl;
|
||||
const normalizedAsset = { id: uuid(), name, size, path: url, url };
|
||||
const normalizedAsset = { id: uuid(), name, size, path: mediaFile.path, url };
|
||||
|
||||
return normalizedAsset;
|
||||
}
|
||||
|
||||
persistMedia(mediaFile, options = {}) {
|
||||
const normalizedAsset = this.mediaFileToAsset(mediaFile);
|
||||
|
||||
if (!options.draft) {
|
||||
this.assets.push(normalizedAsset);
|
||||
}
|
||||
|
||||
this.assets.push(normalizedAsset);
|
||||
return Promise.resolve(normalizedAsset);
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,10 @@
|
||||
import { resolveBackend } from '../backend';
|
||||
import { resolveBackend, Backend } from '../backend';
|
||||
import registry from 'Lib/registry';
|
||||
import { Map, List } from 'immutable';
|
||||
|
||||
jest.mock('Lib/registry');
|
||||
jest.mock('netlify-cms-lib-util');
|
||||
jest.mock('Formats/formats');
|
||||
|
||||
const configWrapper = inputObject => ({
|
||||
get: prop => inputObject[prop],
|
||||
@ -108,4 +111,271 @@ describe('Backend', () => {
|
||||
expect(result.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLocalDraftBackup', () => {
|
||||
const { localForage } = require('netlify-cms-lib-util');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should return empty object on no item', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
const slug = 'slug';
|
||||
|
||||
localForage.getItem.mockReturnValue();
|
||||
|
||||
const result = await backend.getLocalDraftBackup(collection, slug);
|
||||
|
||||
expect(result).toEqual({});
|
||||
expect(localForage.getItem).toHaveBeenCalledTimes(1);
|
||||
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
|
||||
});
|
||||
|
||||
it('should return empty object on item with empty content', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
const slug = 'slug';
|
||||
|
||||
localForage.getItem.mockReturnValue({ raw: '' });
|
||||
|
||||
const result = await backend.getLocalDraftBackup(collection, slug);
|
||||
|
||||
expect(result).toEqual({});
|
||||
expect(localForage.getItem).toHaveBeenCalledTimes(1);
|
||||
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
|
||||
});
|
||||
|
||||
it('should return backup entry, empty media files and assets when only raw property was saved', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
const slug = 'slug';
|
||||
|
||||
localForage.getItem.mockReturnValue({
|
||||
raw: 'content',
|
||||
});
|
||||
|
||||
const result = await backend.getLocalDraftBackup(collection, slug);
|
||||
|
||||
expect(result).toEqual({
|
||||
assets: [],
|
||||
mediaFiles: [],
|
||||
entry: {
|
||||
collection: 'posts',
|
||||
slug: 'slug',
|
||||
path: '',
|
||||
partial: false,
|
||||
raw: 'content',
|
||||
data: {},
|
||||
label: null,
|
||||
metaData: null,
|
||||
isModification: null,
|
||||
},
|
||||
});
|
||||
expect(localForage.getItem).toHaveBeenCalledTimes(1);
|
||||
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
|
||||
});
|
||||
|
||||
it('should return backup entry, media files and assets when all were backed up', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
const slug = 'slug';
|
||||
|
||||
localForage.getItem.mockReturnValue({
|
||||
raw: 'content',
|
||||
mediaFiles: [{ id: '1' }],
|
||||
assets: [{ public_path: 'public_path' }],
|
||||
});
|
||||
|
||||
const result = await backend.getLocalDraftBackup(collection, slug);
|
||||
|
||||
expect(result).toEqual({
|
||||
assets: [{ public_path: 'public_path' }],
|
||||
mediaFiles: [{ id: '1' }],
|
||||
entry: {
|
||||
collection: 'posts',
|
||||
slug: 'slug',
|
||||
path: '',
|
||||
partial: false,
|
||||
raw: 'content',
|
||||
data: {},
|
||||
label: null,
|
||||
metaData: null,
|
||||
isModification: null,
|
||||
},
|
||||
});
|
||||
expect(localForage.getItem).toHaveBeenCalledTimes(1);
|
||||
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistLocalDraftBackup', () => {
|
||||
const { localForage } = require('netlify-cms-lib-util');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should not persist empty entry', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
backend.entryToRaw = jest.fn().mockReturnValue('');
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
|
||||
const slug = 'slug';
|
||||
|
||||
const entry = Map({
|
||||
slug,
|
||||
});
|
||||
|
||||
await backend.persistLocalDraftBackup(entry, collection, List(), List());
|
||||
|
||||
expect(backend.entryToRaw).toHaveBeenCalledTimes(1);
|
||||
expect(backend.entryToRaw).toHaveBeenCalledWith(collection, entry);
|
||||
expect(localForage.setItem).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('should persist non empty entry', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
backend.entryToRaw = jest.fn().mockReturnValue('content');
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
|
||||
const slug = 'slug';
|
||||
|
||||
const entry = Map({
|
||||
slug,
|
||||
path: 'content/posts/entry.md',
|
||||
});
|
||||
|
||||
const mediaFiles = List([{ id: '1' }]);
|
||||
const assets = List([{ public_path: 'public_path' }]);
|
||||
|
||||
await backend.persistLocalDraftBackup(entry, collection, mediaFiles, assets);
|
||||
|
||||
expect(backend.entryToRaw).toHaveBeenCalledTimes(1);
|
||||
expect(backend.entryToRaw).toHaveBeenCalledWith(collection, entry);
|
||||
expect(localForage.setItem).toHaveBeenCalledTimes(2);
|
||||
expect(localForage.setItem).toHaveBeenCalledWith('backup.posts.slug', {
|
||||
assets: [{ public_path: 'public_path' }],
|
||||
mediaFiles: [{ id: '1' }],
|
||||
path: 'content/posts/entry.md',
|
||||
raw: 'content',
|
||||
});
|
||||
expect(localForage.setItem).toHaveBeenCalledWith('backup', 'content');
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistMedia', () => {
|
||||
it('should persist media', async () => {
|
||||
const persistMediaResult = {};
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
persistMedia: jest.fn().mockResolvedValue(persistMediaResult),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const user = { login: 'login', name: 'name' };
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
backend.currentUser = jest.fn().mockResolvedValue(user);
|
||||
|
||||
const file = { path: 'static/media/image.png' };
|
||||
|
||||
const result = await backend.persistMedia(config, file, true);
|
||||
expect(result).toBe(persistMediaResult);
|
||||
expect(implementation.persistMedia).toHaveBeenCalledTimes(1);
|
||||
expect(implementation.persistMedia).toHaveBeenCalledWith(
|
||||
{ path: 'static/media/image.png' },
|
||||
{ commitMessage: 'Upload “static/media/image.png”', draft: true },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('unpublishedEntry', () => {
|
||||
it('should return unpublished entry', async () => {
|
||||
const unpublishedEntryResult = {
|
||||
file: { path: 'path' },
|
||||
isModification: true,
|
||||
metaData: {},
|
||||
mediaFiles: [{ id: '1' }],
|
||||
data: 'content',
|
||||
};
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
unpublishedEntry: jest.fn().mockResolvedValue(unpublishedEntryResult),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
|
||||
const slug = 'slug';
|
||||
|
||||
const result = await backend.unpublishedEntry(collection, slug);
|
||||
expect(result).toEqual({
|
||||
collection: 'draft',
|
||||
slug: '',
|
||||
path: 'path',
|
||||
partial: false,
|
||||
raw: 'content',
|
||||
data: {},
|
||||
label: null,
|
||||
metaData: {},
|
||||
isModification: true,
|
||||
mediaFiles: [{ id: '1' }],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -0,0 +1,230 @@
|
||||
import { BEGIN, COMMIT, REVERT } from 'redux-optimist';
|
||||
import * as actions from '../editorialWorkflow';
|
||||
import { setDraftEntryMediaFiles } from '../entries';
|
||||
import { addAssets } from '../media';
|
||||
import configureMockStore from 'redux-mock-store';
|
||||
import thunk from 'redux-thunk';
|
||||
import { fromJS } from 'immutable';
|
||||
|
||||
jest.mock('coreSrc/backend');
|
||||
jest.mock('Reducers', () => {
|
||||
return {
|
||||
getAsset: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
});
|
||||
jest.mock('ValueObjects/AssetProxy');
|
||||
jest.mock('netlify-cms-lib-util');
|
||||
jest.mock('uuid/v4', () => {
|
||||
return jest.fn().mockReturnValue('000000000000000000000');
|
||||
});
|
||||
jest.mock('redux-notifications', () => {
|
||||
const actual = jest.requireActual('redux-notifications');
|
||||
return {
|
||||
...actual,
|
||||
actions: {
|
||||
notifSend: jest.fn().mockImplementation(payload => ({
|
||||
type: 'NOTIF_SEND',
|
||||
...payload,
|
||||
})),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const middlewares = [thunk];
|
||||
const mockStore = configureMockStore(middlewares);
|
||||
|
||||
describe('editorialWorkflow actions', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('loadUnpublishedEntry', () => {
|
||||
it('should load unpublished entry', () => {
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
const { createAssetProxy } = require('ValueObjects/AssetProxy');
|
||||
|
||||
const assetProxy = { name: 'name', public_path: 'public_path' };
|
||||
const entry = { mediaFiles: [{ file: { name: 'name' }, id: '1' }] };
|
||||
const backend = {
|
||||
unpublishedEntry: jest.fn().mockResolvedValue(entry),
|
||||
};
|
||||
|
||||
const store = mockStore({
|
||||
config: fromJS({}),
|
||||
collections: fromJS({
|
||||
posts: { name: 'posts' },
|
||||
}),
|
||||
mediaLibrary: fromJS({
|
||||
isLoading: false,
|
||||
}),
|
||||
});
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
createAssetProxy.mockResolvedValue(assetProxy);
|
||||
|
||||
const slug = 'slug';
|
||||
const collection = store.getState().collections.get('posts');
|
||||
|
||||
return store.dispatch(actions.loadUnpublishedEntry(collection, slug)).then(() => {
|
||||
const actions = store.getActions();
|
||||
expect(actions).toHaveLength(5);
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_REQUEST',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
slug,
|
||||
},
|
||||
});
|
||||
expect(actions[1]).toEqual(addAssets([assetProxy]));
|
||||
expect(actions[2]).toEqual(
|
||||
setDraftEntryMediaFiles([
|
||||
{
|
||||
file: { name: 'name' },
|
||||
name: 'name',
|
||||
id: '1',
|
||||
draft: true,
|
||||
public_path: 'public_path',
|
||||
},
|
||||
]),
|
||||
);
|
||||
expect(actions[3]).toEqual({
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
payload: {
|
||||
mediaFiles: [{ file: { name: 'name' }, id: '1', draft: true }],
|
||||
},
|
||||
});
|
||||
expect(actions[4]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_SUCCESS',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
entry,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('publishUnpublishedEntry', () => {
|
||||
it('should publish unpublished entry and report success', () => {
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
|
||||
const mediaFiles = [{ file: { name: 'name' }, id: '1' }];
|
||||
const entry = { mediaFiles };
|
||||
const backend = {
|
||||
publishUnpublishedEntry: jest.fn().mockResolvedValue({ mediaFiles }),
|
||||
getEntry: jest.fn().mockResolvedValue(entry),
|
||||
};
|
||||
|
||||
const store = mockStore({
|
||||
config: fromJS({}),
|
||||
mediaLibrary: fromJS({
|
||||
isLoading: false,
|
||||
}),
|
||||
collections: fromJS({
|
||||
posts: { name: 'posts' },
|
||||
}),
|
||||
});
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
|
||||
const slug = 'slug';
|
||||
|
||||
return store.dispatch(actions.publishUnpublishedEntry('posts', slug)).then(() => {
|
||||
const actions = store.getActions();
|
||||
expect(actions).toHaveLength(7);
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_PUBLISH_REQUEST',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
slug,
|
||||
},
|
||||
optimist: { type: BEGIN, id: '000000000000000000000' },
|
||||
});
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'NOTIF_SEND',
|
||||
message: { key: 'ui.toast.entryPublished' },
|
||||
kind: 'success',
|
||||
dismissAfter: 4000,
|
||||
});
|
||||
expect(actions[2]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_PUBLISH_SUCCESS',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
slug,
|
||||
},
|
||||
optimist: { type: COMMIT, id: '000000000000000000000' },
|
||||
});
|
||||
expect(actions[3]).toEqual({
|
||||
type: 'ENTRY_REQUEST',
|
||||
payload: {
|
||||
slug,
|
||||
collection: 'posts',
|
||||
},
|
||||
});
|
||||
expect(actions[4]).toEqual({
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
payload: {
|
||||
mediaFiles: [{ file: { name: 'name' }, id: '1', draft: false }],
|
||||
},
|
||||
});
|
||||
expect(actions[5]).toEqual({
|
||||
type: 'CLEAR_DRAFT_ENTRY_MEDIA_FILES',
|
||||
});
|
||||
expect(actions[6]).toEqual({
|
||||
type: 'ENTRY_SUCCESS',
|
||||
payload: {
|
||||
entry,
|
||||
collection: 'posts',
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should publish unpublished entry and report error', () => {
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
|
||||
const error = new Error('failed to publish entry');
|
||||
const backend = {
|
||||
publishUnpublishedEntry: jest.fn().mockRejectedValue(error),
|
||||
};
|
||||
|
||||
const store = mockStore({
|
||||
config: fromJS({}),
|
||||
collections: fromJS({
|
||||
posts: { name: 'posts' },
|
||||
}),
|
||||
});
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
|
||||
const slug = 'slug';
|
||||
|
||||
return store.dispatch(actions.publishUnpublishedEntry('posts', slug)).then(() => {
|
||||
const actions = store.getActions();
|
||||
expect(actions).toHaveLength(3);
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_PUBLISH_REQUEST',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
slug,
|
||||
},
|
||||
optimist: { type: BEGIN, id: '000000000000000000000' },
|
||||
});
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'NOTIF_SEND',
|
||||
message: { key: 'ui.toast.onFailToPublishEntry', details: error },
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
});
|
||||
expect(actions[2]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_PUBLISH_FAILURE',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
slug,
|
||||
},
|
||||
optimist: { type: REVERT, id: '000000000000000000000' },
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
@ -1,5 +1,27 @@
|
||||
import { fromJS } from 'immutable';
|
||||
import { createEmptyDraftData } from '../entries';
|
||||
import { fromJS, List, Map } from 'immutable';
|
||||
import {
|
||||
createEmptyDraftData,
|
||||
retrieveLocalBackup,
|
||||
persistLocalBackup,
|
||||
getMediaAssets,
|
||||
discardDraft,
|
||||
loadLocalBackup,
|
||||
} from '../entries';
|
||||
import configureMockStore from 'redux-mock-store';
|
||||
import thunk from 'redux-thunk';
|
||||
|
||||
jest.mock('coreSrc/backend');
|
||||
jest.mock('Reducers', () => {
|
||||
return {
|
||||
getAsset: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
});
|
||||
jest.mock('ValueObjects/AssetProxy');
|
||||
jest.mock('netlify-cms-lib-util');
|
||||
jest.mock('../mediaLibrary.js');
|
||||
|
||||
const middlewares = [thunk];
|
||||
const mockStore = configureMockStore(middlewares);
|
||||
|
||||
describe('entries', () => {
|
||||
describe('createEmptyDraftData', () => {
|
||||
@ -79,4 +101,166 @@ describe('entries', () => {
|
||||
expect(createEmptyDraftData(fields)).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('discardDraft', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should delete media files on discard draft', () => {
|
||||
const { deleteMedia } = require('../mediaLibrary');
|
||||
const mediaFiles = [{ draft: false }, { draft: true }];
|
||||
|
||||
deleteMedia.mockImplementation(file => ({ type: 'DELETE_MEDIA', payload: file }));
|
||||
|
||||
const store = mockStore({
|
||||
config: Map(),
|
||||
entryDraft: Map({
|
||||
mediaFiles: List(mediaFiles),
|
||||
}),
|
||||
});
|
||||
|
||||
store.dispatch(discardDraft());
|
||||
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(2);
|
||||
expect(actions[0]).toEqual({ type: 'DELETE_MEDIA', payload: { draft: true } });
|
||||
expect(actions[1]).toEqual({ type: 'DRAFT_DISCARD' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistLocalBackup', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should persist local backup with media files', () => {
|
||||
const getState = jest.fn();
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
const { getAsset } = require('Reducers');
|
||||
|
||||
const backend = {
|
||||
persistLocalDraftBackup: jest.fn((...args) => args),
|
||||
};
|
||||
|
||||
const state = { config: {} };
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
getAsset.mockImplementation((state, path) => path);
|
||||
getState.mockReturnValue(state);
|
||||
|
||||
const entry = Map();
|
||||
const collection = Map();
|
||||
const mediaFiles = [{ public_path: '/static/media/image.png' }];
|
||||
|
||||
const result = persistLocalBackup(entry, collection, mediaFiles)(null, getState);
|
||||
|
||||
expect(result).toEqual([entry, collection, mediaFiles, ['/static/media/image.png']]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('retrieveLocalBackup', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should retrieve media files with local backup', () => {
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
const { createAssetProxy } = require('ValueObjects/AssetProxy');
|
||||
const { addMediaFilesToLibrary } = require('../mediaLibrary');
|
||||
|
||||
addMediaFilesToLibrary.mockImplementation(mediaFiles => ({
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
payload: { mediaFiles },
|
||||
}));
|
||||
|
||||
const backend = {
|
||||
getLocalDraftBackup: jest.fn((...args) => args),
|
||||
};
|
||||
|
||||
const store = mockStore({
|
||||
config: Map(),
|
||||
});
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
createAssetProxy.mockImplementation((value, fileObj) => ({ value, fileObj }));
|
||||
|
||||
const collection = Map({
|
||||
name: 'collection',
|
||||
});
|
||||
const slug = 'slug';
|
||||
|
||||
const entry = {};
|
||||
const mediaFiles = [{ public_path: '/static/media/image.png' }];
|
||||
const assets = [{ value: 'image.png', fileObj: {} }];
|
||||
|
||||
backend.getLocalDraftBackup.mockReturnValue({ entry, mediaFiles, assets });
|
||||
|
||||
return store.dispatch(retrieveLocalBackup(collection, slug)).then(() => {
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(createAssetProxy).toHaveBeenCalledTimes(1);
|
||||
expect(createAssetProxy).toHaveBeenCalledWith(assets[0].value, assets[0].fileObj);
|
||||
expect(actions).toHaveLength(2);
|
||||
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'ADD_ASSETS',
|
||||
payload: [{ value: 'image.png', fileObj: {} }],
|
||||
});
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'DRAFT_LOCAL_BACKUP_RETRIEVED',
|
||||
payload: { entry, mediaFiles },
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadLocalBackup', () => {
|
||||
it('should add backup media files to media library', () => {
|
||||
const store = mockStore({
|
||||
config: Map(),
|
||||
entryDraft: Map({
|
||||
mediaFiles: List([{ path: 'static/media.image.png' }]),
|
||||
}),
|
||||
mediaLibrary: Map({
|
||||
isLoading: false,
|
||||
}),
|
||||
});
|
||||
|
||||
store.dispatch(loadLocalBackup());
|
||||
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(2);
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'DRAFT_CREATE_FROM_LOCAL_BACKUP',
|
||||
});
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
payload: { mediaFiles: [{ path: 'static/media.image.png', draft: true }] },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMediaAssets', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should map mediaFiles to assets', () => {
|
||||
const { getAsset } = require('Reducers');
|
||||
const state = {};
|
||||
const mediaFiles = [{ public_path: 'public_path' }];
|
||||
|
||||
const asset = { name: 'asset1' };
|
||||
|
||||
getAsset.mockReturnValue(asset);
|
||||
|
||||
expect(getMediaAssets(state, mediaFiles)).toEqual([asset]);
|
||||
|
||||
expect(getAsset).toHaveBeenCalledTimes(1);
|
||||
expect(getAsset).toHaveBeenCalledWith(state, 'public_path');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,11 @@
|
||||
import configureMockStore from 'redux-mock-store';
|
||||
import thunk from 'redux-thunk';
|
||||
import { fromJS } from 'immutable';
|
||||
import { insertMedia } from '../mediaLibrary';
|
||||
import { fromJS, List, Map } from 'immutable';
|
||||
import { insertMedia, persistMedia, deleteMedia, addMediaFilesToLibrary } from '../mediaLibrary';
|
||||
|
||||
jest.mock('coreSrc/backend');
|
||||
jest.mock('ValueObjects/AssetProxy');
|
||||
jest.mock('../waitUntil');
|
||||
|
||||
const middlewares = [thunk];
|
||||
const mockStore = configureMockStore(middlewares);
|
||||
@ -110,4 +114,260 @@ describe('mediaLibrary', () => {
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
const { createAssetProxy } = require('ValueObjects/AssetProxy');
|
||||
|
||||
const backend = {
|
||||
persistMedia: jest.fn(() => ({ id: 'id' })),
|
||||
deleteMedia: jest.fn(),
|
||||
};
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
|
||||
describe('persistMedia', () => {
|
||||
global.URL = { createObjectURL: jest.fn().mockReturnValue('displayURL') };
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should persist media as draft in editorial workflow', () => {
|
||||
const store = mockStore({
|
||||
config: Map({
|
||||
publish_mode: 'editorial_workflow',
|
||||
}),
|
||||
integrations: Map(),
|
||||
mediaLibrary: Map({
|
||||
files: List(),
|
||||
}),
|
||||
entryDraft: Map({
|
||||
entry: Map({ isPersisting: false }),
|
||||
}),
|
||||
});
|
||||
|
||||
const file = new File([''], 'name.png');
|
||||
const assetProxy = { public_path: '/media/name.png' };
|
||||
createAssetProxy.mockReturnValue(assetProxy);
|
||||
|
||||
return store.dispatch(persistMedia(file)).then(() => {
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(4);
|
||||
expect(actions[0]).toEqual({ type: 'MEDIA_PERSIST_REQUEST' });
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'ADD_ASSET',
|
||||
payload: { public_path: '/media/name.png' },
|
||||
});
|
||||
expect(actions[2]).toEqual({
|
||||
type: 'ADD_DRAFT_ENTRY_MEDIA_FILE',
|
||||
payload: { draft: true, id: 'id', public_path: '/media/name.png' },
|
||||
});
|
||||
expect(actions[3]).toEqual({
|
||||
type: 'MEDIA_PERSIST_SUCCESS',
|
||||
payload: {
|
||||
file: { draft: true, id: 'id', displayURL: 'displayURL' },
|
||||
},
|
||||
});
|
||||
|
||||
expect(backend.persistMedia).toHaveBeenCalledTimes(1);
|
||||
expect(backend.persistMedia).toHaveBeenCalledWith(
|
||||
store.getState().config,
|
||||
assetProxy,
|
||||
true,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not persist media as draft when not in editorial workflow', () => {
|
||||
const store = mockStore({
|
||||
config: Map({}),
|
||||
integrations: Map(),
|
||||
mediaLibrary: Map({
|
||||
files: List(),
|
||||
}),
|
||||
entryDraft: Map({
|
||||
entry: Map({ isPersisting: false }),
|
||||
}),
|
||||
});
|
||||
|
||||
const file = new File([''], 'name.png');
|
||||
const assetProxy = { public_path: '/media/name.png' };
|
||||
createAssetProxy.mockReturnValue(assetProxy);
|
||||
|
||||
return store.dispatch(persistMedia(file)).then(() => {
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(3);
|
||||
expect(actions[0]).toEqual({ type: 'MEDIA_PERSIST_REQUEST' });
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'ADD_ASSET',
|
||||
payload: { public_path: '/media/name.png' },
|
||||
});
|
||||
expect(actions[2]).toEqual({
|
||||
type: 'MEDIA_PERSIST_SUCCESS',
|
||||
payload: {
|
||||
file: { draft: false, id: 'id', displayURL: 'displayURL' },
|
||||
},
|
||||
});
|
||||
|
||||
expect(backend.persistMedia).toHaveBeenCalledTimes(1);
|
||||
expect(backend.persistMedia).toHaveBeenCalledWith(
|
||||
store.getState().config,
|
||||
assetProxy,
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not persist media as draft when draft is empty', () => {
|
||||
const store = mockStore({
|
||||
config: Map({
|
||||
publish_mode: 'editorial_workflow',
|
||||
}),
|
||||
integrations: Map(),
|
||||
mediaLibrary: Map({
|
||||
files: List(),
|
||||
}),
|
||||
entryDraft: Map({
|
||||
entry: Map(),
|
||||
}),
|
||||
});
|
||||
|
||||
const file = new File([''], 'name.png');
|
||||
const assetProxy = { public_path: '/media/name.png' };
|
||||
createAssetProxy.mockReturnValue(assetProxy);
|
||||
|
||||
return store.dispatch(persistMedia(file)).then(() => {
|
||||
expect(backend.persistMedia).toHaveBeenCalledTimes(1);
|
||||
expect(backend.persistMedia).toHaveBeenCalledWith(
|
||||
store.getState().config,
|
||||
assetProxy,
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteMedia', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should delete non draft file', () => {
|
||||
const store = mockStore({
|
||||
config: Map({
|
||||
publish_mode: 'editorial_workflow',
|
||||
}),
|
||||
integrations: Map(),
|
||||
mediaLibrary: Map({
|
||||
files: List(),
|
||||
}),
|
||||
entryDraft: Map({
|
||||
entry: Map({ isPersisting: false }),
|
||||
}),
|
||||
});
|
||||
|
||||
const file = { name: 'name.png', id: 'id', path: 'static/media/name.png', draft: false };
|
||||
const assetProxy = { public_path: '/media/name.png' };
|
||||
createAssetProxy.mockReturnValue(assetProxy);
|
||||
|
||||
return store.dispatch(deleteMedia(file)).then(() => {
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(4);
|
||||
expect(actions[0]).toEqual({ type: 'MEDIA_DELETE_REQUEST' });
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'REMOVE_ASSET',
|
||||
payload: '/media/name.png',
|
||||
});
|
||||
expect(actions[2]).toEqual({
|
||||
type: 'REMOVE_DRAFT_ENTRY_MEDIA_FILE',
|
||||
payload: { id: 'id' },
|
||||
});
|
||||
expect(actions[3]).toEqual({
|
||||
type: 'MEDIA_DELETE_SUCCESS',
|
||||
payload: { file },
|
||||
});
|
||||
|
||||
expect(backend.deleteMedia).toHaveBeenCalledTimes(1);
|
||||
expect(backend.deleteMedia).toHaveBeenCalledWith(
|
||||
store.getState().config,
|
||||
'static/media/name.png',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not delete a draft file', () => {
|
||||
const store = mockStore({
|
||||
config: Map({
|
||||
publish_mode: 'editorial_workflow',
|
||||
}),
|
||||
integrations: Map(),
|
||||
mediaLibrary: Map({
|
||||
files: List(),
|
||||
}),
|
||||
entryDraft: Map({
|
||||
entry: Map({ isPersisting: false }),
|
||||
}),
|
||||
});
|
||||
|
||||
const file = { name: 'name.png', id: 'id', path: 'static/media/name.png', draft: true };
|
||||
const assetProxy = { public_path: '/media/name.png' };
|
||||
createAssetProxy.mockReturnValue(assetProxy);
|
||||
|
||||
return store.dispatch(deleteMedia(file)).then(() => {
|
||||
expect(backend.deleteMedia).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('addMediaFilesToLibrary', () => {
|
||||
it('should not wait if media library is loaded', () => {
|
||||
const store = mockStore({
|
||||
mediaLibrary: Map({
|
||||
isLoading: false,
|
||||
}),
|
||||
});
|
||||
|
||||
const mediaFiles = [{ id: '1' }];
|
||||
store.dispatch(addMediaFilesToLibrary(mediaFiles));
|
||||
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(1);
|
||||
expect(actions[0]).toEqual({
|
||||
payload: { mediaFiles: [{ id: '1' }] },
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
});
|
||||
});
|
||||
|
||||
it('should wait if media library is not loaded', () => {
|
||||
const { waitUntil } = require('../waitUntil');
|
||||
|
||||
waitUntil.mockImplementation(payload => ({ type: 'WAIT_UNTIL', ...payload }));
|
||||
|
||||
const store = mockStore({
|
||||
mediaLibrary: Map({}),
|
||||
});
|
||||
|
||||
const mediaFiles = [{ id: '1' }];
|
||||
store.dispatch(addMediaFilesToLibrary(mediaFiles));
|
||||
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(1);
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'WAIT_UNTIL',
|
||||
predicate: expect.any(Function),
|
||||
run: expect.any(Function),
|
||||
});
|
||||
|
||||
expect(actions[0].predicate({ type: 'MEDIA_LOAD_SUCCESS' })).toBe(true);
|
||||
expect(actions[0].run(store.dispatch)).toEqual({
|
||||
payload: { mediaFiles: [{ id: '1' }] },
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -3,11 +3,20 @@ import { actions as notifActions } from 'redux-notifications';
|
||||
import { BEGIN, COMMIT, REVERT } from 'redux-optimist';
|
||||
import { serializeValues } from 'Lib/serializeEntryValues';
|
||||
import { currentBackend } from 'coreSrc/backend';
|
||||
import { getAsset, selectPublishedSlugs, selectUnpublishedSlugs } from 'Reducers';
|
||||
import { selectPublishedSlugs, selectUnpublishedSlugs } from 'Reducers';
|
||||
import { selectFields } from 'Reducers/collections';
|
||||
import { EDITORIAL_WORKFLOW } from 'Constants/publishModes';
|
||||
import { EDITORIAL_WORKFLOW_ERROR } from 'netlify-cms-lib-util';
|
||||
import { loadEntry } from './entries';
|
||||
import {
|
||||
loadEntry,
|
||||
getMediaAssets,
|
||||
setDraftEntryMediaFiles,
|
||||
clearDraftEntryMediaFiles,
|
||||
} from './entries';
|
||||
import { createAssetProxy } from 'ValueObjects/AssetProxy';
|
||||
import { addAssets } from './media';
|
||||
import { addMediaFilesToLibrary } from './mediaLibrary';
|
||||
|
||||
import ValidationErrorTypes from 'Constants/validationErrorTypes';
|
||||
|
||||
const { notifSend } = notifActions;
|
||||
@ -230,30 +239,55 @@ function unpublishedEntryDeleteError(collection, slug, transactionID) {
|
||||
*/
|
||||
|
||||
export function loadUnpublishedEntry(collection, slug) {
|
||||
return (dispatch, getState) => {
|
||||
return async (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const backend = currentBackend(state.config);
|
||||
|
||||
dispatch(unpublishedEntryLoading(collection, slug));
|
||||
backend
|
||||
.unpublishedEntry(collection, slug)
|
||||
.then(entry => dispatch(unpublishedEntryLoaded(collection, entry)))
|
||||
.catch(error => {
|
||||
if (error.name === EDITORIAL_WORKFLOW_ERROR && error.notUnderEditorialWorkflow) {
|
||||
dispatch(unpublishedEntryRedirected(collection, slug));
|
||||
dispatch(loadEntry(collection, slug));
|
||||
} else {
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: {
|
||||
key: 'ui.toast.onFailToLoadEntries',
|
||||
details: error,
|
||||
},
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
const entry = await backend.unpublishedEntry(collection, slug);
|
||||
const mediaFiles = entry.mediaFiles;
|
||||
const assetProxies = await Promise.all(
|
||||
mediaFiles.map(({ file }) => createAssetProxy(file.name, file)),
|
||||
);
|
||||
dispatch(addAssets(assetProxies));
|
||||
dispatch(
|
||||
setDraftEntryMediaFiles(
|
||||
assetProxies.map((asset, index) => ({
|
||||
...asset,
|
||||
...mediaFiles[index],
|
||||
draft: true,
|
||||
})),
|
||||
),
|
||||
);
|
||||
dispatch(
|
||||
addMediaFilesToLibrary(
|
||||
mediaFiles.map(file => ({
|
||||
...file,
|
||||
draft: true,
|
||||
})),
|
||||
),
|
||||
);
|
||||
|
||||
dispatch(unpublishedEntryLoaded(collection, entry));
|
||||
} catch (error) {
|
||||
if (error.name === EDITORIAL_WORKFLOW_ERROR && error.notUnderEditorialWorkflow) {
|
||||
dispatch(unpublishedEntryRedirected(collection, slug));
|
||||
dispatch(loadEntry(collection, slug));
|
||||
} else {
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: {
|
||||
key: 'ui.toast.onFailToLoadEntries',
|
||||
details: error,
|
||||
},
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -314,7 +348,7 @@ export function persistUnpublishedEntry(collection, existingUnpublishedEntry) {
|
||||
|
||||
const backend = currentBackend(state.config);
|
||||
const transactionID = uuid();
|
||||
const assetProxies = entryDraft.get('mediaFiles').map(path => getAsset(state, path));
|
||||
const assetProxies = getMediaAssets(state, entryDraft.get('mediaFiles'));
|
||||
const entry = entryDraft.get('entry');
|
||||
|
||||
/**
|
||||
@ -455,7 +489,7 @@ export function publishUnpublishedEntry(collection, slug) {
|
||||
dispatch(unpublishedEntryPublishRequest(collection, slug, transactionID));
|
||||
return backend
|
||||
.publishUnpublishedEntry(collection, slug)
|
||||
.then(() => {
|
||||
.then(({ mediaFiles }) => {
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: { key: 'ui.toast.entryPublished' },
|
||||
@ -463,8 +497,12 @@ export function publishUnpublishedEntry(collection, slug) {
|
||||
dismissAfter: 4000,
|
||||
}),
|
||||
);
|
||||
|
||||
dispatch(unpublishedEntryPublished(collection, slug, transactionID));
|
||||
dispatch(loadEntry(collections.get(collection), slug));
|
||||
|
||||
dispatch(addMediaFilesToLibrary(mediaFiles.map(file => ({ ...file, draft: false }))));
|
||||
dispatch(clearDraftEntryMediaFiles());
|
||||
})
|
||||
.catch(error => {
|
||||
dispatch(
|
||||
|
@ -9,7 +9,10 @@ import { selectFields } from 'Reducers/collections';
|
||||
import { selectCollectionEntriesCursor } from 'Reducers/cursors';
|
||||
import { Cursor } from 'netlify-cms-lib-util';
|
||||
import { createEntry } from 'ValueObjects/Entry';
|
||||
import { createAssetProxy } from 'ValueObjects/AssetProxy';
|
||||
import ValidationErrorTypes from 'Constants/validationErrorTypes';
|
||||
import { deleteMedia, addMediaFilesToLibrary } from './mediaLibrary';
|
||||
import { addAssets } from './media';
|
||||
|
||||
const { notifSend } = notifActions;
|
||||
|
||||
@ -42,6 +45,11 @@ export const ENTRY_DELETE_REQUEST = 'ENTRY_DELETE_REQUEST';
|
||||
export const ENTRY_DELETE_SUCCESS = 'ENTRY_DELETE_SUCCESS';
|
||||
export const ENTRY_DELETE_FAILURE = 'ENTRY_DELETE_FAILURE';
|
||||
|
||||
export const ADD_DRAFT_ENTRY_MEDIA_FILE = 'ADD_DRAFT_ENTRY_MEDIA_FILE';
|
||||
export const SET_DRAFT_ENTRY_MEDIA_FILES = 'SET_DRAFT_ENTRY_MEDIA_FILES';
|
||||
export const REMOVE_DRAFT_ENTRY_MEDIA_FILE = 'REMOVE_DRAFT_ENTRY_MEDIA_FILE';
|
||||
export const CLEAR_DRAFT_ENTRY_MEDIA_FILES = 'CLEAR_DRAFT_ENTRY_MEDIA_FILES';
|
||||
|
||||
/*
|
||||
* Simple Action Creators (Internal)
|
||||
* We still need to export them for tests
|
||||
@ -185,16 +193,24 @@ export function emptyDraftCreated(entry) {
|
||||
/*
|
||||
* Exported simple Action Creators
|
||||
*/
|
||||
export function createDraftFromEntry(entry, metadata) {
|
||||
export function createDraftFromEntry(entry, metadata, mediaFiles) {
|
||||
return {
|
||||
type: DRAFT_CREATE_FROM_ENTRY,
|
||||
payload: { entry, metadata },
|
||||
payload: { entry, metadata, mediaFiles },
|
||||
};
|
||||
}
|
||||
|
||||
export function discardDraft() {
|
||||
return {
|
||||
type: DRAFT_DISCARD,
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
|
||||
const mediaDrafts = state.entryDraft.get('mediaFiles').filter(file => file.draft);
|
||||
|
||||
mediaDrafts.forEach(file => {
|
||||
dispatch(deleteMedia(file));
|
||||
});
|
||||
|
||||
dispatch({ type: DRAFT_DISCARD });
|
||||
};
|
||||
}
|
||||
|
||||
@ -223,24 +239,55 @@ export function clearFieldErrors() {
|
||||
return { type: DRAFT_CLEAR_ERRORS };
|
||||
}
|
||||
|
||||
export function localBackupRetrieved(entry) {
|
||||
export function localBackupRetrieved(entry, mediaFiles) {
|
||||
return {
|
||||
type: DRAFT_LOCAL_BACKUP_RETRIEVED,
|
||||
payload: { entry },
|
||||
payload: { entry, mediaFiles },
|
||||
};
|
||||
}
|
||||
|
||||
export function loadLocalBackup() {
|
||||
return {
|
||||
type: DRAFT_CREATE_FROM_LOCAL_BACKUP,
|
||||
return (dispatch, getState) => {
|
||||
dispatch({
|
||||
type: DRAFT_CREATE_FROM_LOCAL_BACKUP,
|
||||
});
|
||||
|
||||
// only add media files to the library after loading from backup was approved
|
||||
const state = getState();
|
||||
const mediaFiles = state.entryDraft.get('mediaFiles').toJS();
|
||||
const filesToAdd = mediaFiles.map(file => ({
|
||||
...file,
|
||||
draft: true,
|
||||
}));
|
||||
dispatch(addMediaFilesToLibrary(filesToAdd));
|
||||
};
|
||||
}
|
||||
|
||||
export function persistLocalBackup(entry, collection) {
|
||||
export function addDraftEntryMediaFile(file) {
|
||||
return { type: ADD_DRAFT_ENTRY_MEDIA_FILE, payload: file };
|
||||
}
|
||||
|
||||
export function setDraftEntryMediaFiles(files) {
|
||||
return { type: SET_DRAFT_ENTRY_MEDIA_FILES, payload: files };
|
||||
}
|
||||
|
||||
export function removeDraftEntryMediaFile(file) {
|
||||
return { type: REMOVE_DRAFT_ENTRY_MEDIA_FILE, payload: file };
|
||||
}
|
||||
|
||||
export function clearDraftEntryMediaFiles() {
|
||||
return { type: CLEAR_DRAFT_ENTRY_MEDIA_FILES };
|
||||
}
|
||||
|
||||
export function persistLocalBackup(entry, collection, mediaFiles) {
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const backend = currentBackend(state.config);
|
||||
return backend.persistLocalDraftBackup(entry, collection);
|
||||
|
||||
// persist any pending related media files and assets
|
||||
const assets = getMediaAssets(state, mediaFiles);
|
||||
|
||||
return backend.persistLocalDraftBackup(entry, collection, mediaFiles, assets);
|
||||
};
|
||||
}
|
||||
|
||||
@ -248,9 +295,16 @@ export function retrieveLocalBackup(collection, slug) {
|
||||
return async (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const backend = currentBackend(state.config);
|
||||
const entry = await backend.getLocalDraftBackup(collection, slug);
|
||||
const { entry, mediaFiles, assets } = await backend.getLocalDraftBackup(collection, slug);
|
||||
|
||||
if (entry) {
|
||||
return dispatch(localBackupRetrieved(entry));
|
||||
// load assets from backup
|
||||
const assetProxies = await Promise.all(
|
||||
assets.map(asset => createAssetProxy(asset.value, asset.fileObj)),
|
||||
);
|
||||
dispatch(addAssets(assetProxies));
|
||||
|
||||
return dispatch(localBackupRetrieved(entry, mediaFiles));
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -462,6 +516,10 @@ export function createEmptyDraftData(fields, withNameKey = true) {
|
||||
}, {});
|
||||
}
|
||||
|
||||
export function getMediaAssets(state, mediaFiles) {
|
||||
return mediaFiles.map(file => getAsset(state, file.public_path));
|
||||
}
|
||||
|
||||
export function persistEntry(collection) {
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
@ -491,7 +549,7 @@ export function persistEntry(collection) {
|
||||
}
|
||||
|
||||
const backend = currentBackend(state.config);
|
||||
const assetProxies = entryDraft.get('mediaFiles').map(path => getAsset(state, path));
|
||||
const assetProxies = getMediaAssets(state, entryDraft.get('mediaFiles'));
|
||||
const entry = entryDraft.get('entry');
|
||||
|
||||
/**
|
||||
|
@ -1,6 +1,11 @@
|
||||
export const ADD_ASSETS = 'ADD_ASSETS';
|
||||
export const ADD_ASSET = 'ADD_ASSET';
|
||||
export const REMOVE_ASSET = 'REMOVE_ASSET';
|
||||
|
||||
export function addAssets(assets) {
|
||||
return { type: ADD_ASSETS, payload: assets };
|
||||
}
|
||||
|
||||
export function addAsset(assetProxy) {
|
||||
return { type: ADD_ASSET, payload: assetProxy };
|
||||
}
|
||||
|
@ -2,11 +2,14 @@ import { Map } from 'immutable';
|
||||
import { actions as notifActions } from 'redux-notifications';
|
||||
import { resolveMediaFilename, getBlobSHA } from 'netlify-cms-lib-util';
|
||||
import { currentBackend } from 'coreSrc/backend';
|
||||
import { EDITORIAL_WORKFLOW } from 'Constants/publishModes';
|
||||
import { createAssetProxy } from 'ValueObjects/AssetProxy';
|
||||
import { selectIntegration } from 'Reducers';
|
||||
import { getIntegrationProvider } from 'Integrations';
|
||||
import { addAsset } from './media';
|
||||
import { addAsset, removeAsset } from './media';
|
||||
import { addDraftEntryMediaFile, removeDraftEntryMediaFile } from './entries';
|
||||
import { sanitizeSlug } from 'Lib/urlHelper';
|
||||
import { waitUntil } from './waitUntil';
|
||||
|
||||
const { notifSend } = notifActions;
|
||||
|
||||
@ -27,6 +30,7 @@ export const MEDIA_DELETE_FAILURE = 'MEDIA_DELETE_FAILURE';
|
||||
export const MEDIA_DISPLAY_URL_REQUEST = 'MEDIA_DISPLAY_URL_REQUEST';
|
||||
export const MEDIA_DISPLAY_URL_SUCCESS = 'MEDIA_DISPLAY_URL_SUCCESS';
|
||||
export const MEDIA_DISPLAY_URL_FAILURE = 'MEDIA_DISPLAY_URL_FAILURE';
|
||||
export const ADD_MEDIA_FILES_TO_LIBRARY = 'ADD_MEDIA_FILES_TO_LIBRARY';
|
||||
|
||||
export function createMediaLibrary(instance) {
|
||||
const api = {
|
||||
@ -195,14 +199,41 @@ export function persistMedia(file, opts = {}) {
|
||||
const id = await getBlobSHA(file);
|
||||
const assetProxy = await createAssetProxy(fileName, file, false, privateUpload);
|
||||
dispatch(addAsset(assetProxy));
|
||||
|
||||
const entry = state.entryDraft.get('entry');
|
||||
const useWorkflow = state.config.getIn(['publish_mode']) === EDITORIAL_WORKFLOW;
|
||||
const draft = entry && !entry.isEmpty() && useWorkflow;
|
||||
|
||||
if (!integration) {
|
||||
const asset = await backend.persistMedia(state.config, assetProxy);
|
||||
const asset = await backend.persistMedia(state.config, assetProxy, draft);
|
||||
|
||||
const assetId = asset.id || id;
|
||||
const displayURL = asset.displayURL || URL.createObjectURL(file);
|
||||
return dispatch(mediaPersisted({ id, displayURL, ...asset }));
|
||||
|
||||
if (draft) {
|
||||
dispatch(
|
||||
addDraftEntryMediaFile({
|
||||
...asset,
|
||||
id: assetId,
|
||||
draft,
|
||||
public_path: assetProxy.public_path,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return dispatch(
|
||||
mediaPersisted({
|
||||
...asset,
|
||||
id: assetId,
|
||||
displayURL,
|
||||
draft,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return dispatch(
|
||||
mediaPersisted(
|
||||
{ id, displayURL: URL.createObjectURL(file), ...assetProxy.asset },
|
||||
{ id, displayURL: URL.createObjectURL(file), ...assetProxy.asset, draft },
|
||||
{ privateUpload },
|
||||
),
|
||||
);
|
||||
@ -222,37 +253,18 @@ export function persistMedia(file, opts = {}) {
|
||||
|
||||
export function deleteMedia(file, opts = {}) {
|
||||
const { privateUpload } = opts;
|
||||
return (dispatch, getState) => {
|
||||
return async (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const backend = currentBackend(state.config);
|
||||
const integration = selectIntegration(state, null, 'assetStore');
|
||||
if (integration) {
|
||||
const provider = getIntegrationProvider(state.integrations, backend.getToken, integration);
|
||||
dispatch(mediaDeleting());
|
||||
return provider
|
||||
.delete(file.id)
|
||||
.then(() => {
|
||||
return dispatch(mediaDeleted(file, { privateUpload }));
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error);
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: `Failed to delete media: ${error.message}`,
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
return dispatch(mediaDeleteFailed({ privateUpload }));
|
||||
});
|
||||
}
|
||||
dispatch(mediaDeleting());
|
||||
return backend
|
||||
.deleteMedia(state.config, file.path)
|
||||
.then(() => {
|
||||
return dispatch(mediaDeleted(file));
|
||||
})
|
||||
.catch(error => {
|
||||
|
||||
try {
|
||||
await provider.delete(file.id);
|
||||
return dispatch(mediaDeleted(file, { privateUpload }));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
dispatch(
|
||||
notifSend({
|
||||
@ -261,8 +273,32 @@ export function deleteMedia(file, opts = {}) {
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
return dispatch(mediaDeleteFailed());
|
||||
});
|
||||
return dispatch(mediaDeleteFailed({ privateUpload }));
|
||||
}
|
||||
}
|
||||
dispatch(mediaDeleting());
|
||||
|
||||
try {
|
||||
const assetProxy = await createAssetProxy(file.name, file);
|
||||
dispatch(removeAsset(assetProxy.public_path));
|
||||
dispatch(removeDraftEntryMediaFile({ id: file.id }));
|
||||
|
||||
if (!file.draft) {
|
||||
await backend.deleteMedia(state.config, file.path);
|
||||
}
|
||||
|
||||
return dispatch(mediaDeleted(file));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: `Failed to delete media: ${error.message}`,
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
return dispatch(mediaDeleteFailed());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -335,6 +371,27 @@ export function mediaPersisted(asset, opts = {}) {
|
||||
};
|
||||
}
|
||||
|
||||
export function addMediaFilesToLibrary(mediaFiles) {
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const action = {
|
||||
type: ADD_MEDIA_FILES_TO_LIBRARY,
|
||||
payload: { mediaFiles },
|
||||
};
|
||||
// add media files to library only after the library finished loading
|
||||
if (state.mediaLibrary.get('isLoading') === false) {
|
||||
dispatch(action);
|
||||
} else {
|
||||
dispatch(
|
||||
waitUntil({
|
||||
predicate: ({ type }) => type === MEDIA_LOAD_SUCCESS,
|
||||
run: dispatch => dispatch(action),
|
||||
}),
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function mediaPersistFailed(error, opts = {}) {
|
||||
const { privateUpload } = opts;
|
||||
return { type: MEDIA_PERSIST_FAILURE, payload: { privateUpload } };
|
||||
|
9
packages/netlify-cms-core/src/actions/waitUntil.js
Normal file
9
packages/netlify-cms-core/src/actions/waitUntil.js
Normal file
@ -0,0 +1,9 @@
|
||||
import { WAIT_UNTIL_ACTION } from '../redux/middleware/waitUntilAction';
|
||||
|
||||
export function waitUntil({ predicate, run }) {
|
||||
return {
|
||||
type: WAIT_UNTIL_ACTION,
|
||||
predicate,
|
||||
run,
|
||||
};
|
||||
}
|
@ -402,22 +402,31 @@ export class Backend {
|
||||
const key = getEntryBackupKey(collection.get('name'), slug);
|
||||
const backup = await localForage.getItem(key);
|
||||
if (!backup || !backup.raw.trim()) {
|
||||
return;
|
||||
return {};
|
||||
}
|
||||
const { raw, path } = backup;
|
||||
const { raw, path, mediaFiles = [], assets = [] } = backup;
|
||||
|
||||
const label = selectFileEntryLabel(collection, slug);
|
||||
return this.entryWithFormat(collection, slug)(
|
||||
const entry = this.entryWithFormat(collection, slug)(
|
||||
createEntry(collection.get('name'), slug, path, { raw, label }),
|
||||
);
|
||||
|
||||
return { entry, mediaFiles, assets };
|
||||
}
|
||||
|
||||
async persistLocalDraftBackup(entry, collection) {
|
||||
async persistLocalDraftBackup(entry, collection, mediaFiles, assets) {
|
||||
const key = getEntryBackupKey(collection.get('name'), entry.get('slug'));
|
||||
const raw = this.entryToRaw(collection, entry);
|
||||
if (!raw.trim()) {
|
||||
return;
|
||||
}
|
||||
await localForage.setItem(key, { raw, path: entry.get('path') });
|
||||
|
||||
await localForage.setItem(key, {
|
||||
raw,
|
||||
path: entry.get('path'),
|
||||
mediaFiles: mediaFiles.toJS(),
|
||||
assets: assets.toJS(),
|
||||
});
|
||||
return localForage.setItem(getEntryBackupKey(), raw);
|
||||
}
|
||||
|
||||
@ -511,6 +520,7 @@ export class Backend {
|
||||
isModification: loadedEntry.isModification,
|
||||
});
|
||||
entry.metaData = loadedEntry.metaData;
|
||||
entry.mediaFiles = loadedEntry.mediaFiles;
|
||||
return entry;
|
||||
})
|
||||
.then(this.entryWithFormat(collection, slug));
|
||||
@ -663,7 +673,7 @@ export class Backend {
|
||||
return this.implementation.persistEntry(entryObj, MediaFiles, opts).then(() => entryObj.slug);
|
||||
}
|
||||
|
||||
async persistMedia(config, file) {
|
||||
async persistMedia(config, file, draft) {
|
||||
const user = await this.currentUser();
|
||||
const options = {
|
||||
commitMessage: commitMessageFormatter(
|
||||
@ -676,6 +686,7 @@ export class Backend {
|
||||
},
|
||||
user.useOpenAuthoring,
|
||||
),
|
||||
draft,
|
||||
};
|
||||
return this.implementation.persistMedia(file, options);
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ const navigateToNewEntry = collectionName => navigateCollection(`${collectionNam
|
||||
const navigateToEntry = (collectionName, slug) =>
|
||||
navigateCollection(`${collectionName}/entries/${slug}`);
|
||||
|
||||
class Editor extends React.Component {
|
||||
export class Editor extends React.Component {
|
||||
static propTypes = {
|
||||
boundGetAsset: PropTypes.func.isRequired,
|
||||
changeDraftField: PropTypes.func.isRequired,
|
||||
@ -79,10 +79,10 @@ class Editor extends React.Component {
|
||||
}),
|
||||
hasChanged: PropTypes.bool,
|
||||
t: PropTypes.func.isRequired,
|
||||
retrieveLocalBackup: PropTypes.func,
|
||||
localBackup: PropTypes.bool,
|
||||
retrieveLocalBackup: PropTypes.func.isRequired,
|
||||
localBackup: ImmutablePropTypes.map,
|
||||
loadLocalBackup: PropTypes.func,
|
||||
persistLocalBackup: PropTypes.func,
|
||||
persistLocalBackup: PropTypes.func.isRequired,
|
||||
deleteLocalBackup: PropTypes.func,
|
||||
};
|
||||
|
||||
@ -190,7 +190,11 @@ class Editor extends React.Component {
|
||||
}
|
||||
|
||||
if (this.props.hasChanged) {
|
||||
this.createBackup(this.props.entryDraft.get('entry'), this.props.collection);
|
||||
this.createBackup(
|
||||
this.props.entryDraft.get('entry'),
|
||||
this.props.collection,
|
||||
this.props.entryDraft.get('mediaFiles'),
|
||||
);
|
||||
}
|
||||
|
||||
if (prevProps.entry === this.props.entry) return;
|
||||
@ -205,7 +209,8 @@ class Editor extends React.Component {
|
||||
const values = deserializeValues(entry.get('data'), fields);
|
||||
const deserializedEntry = entry.set('data', values);
|
||||
const fieldsMetaData = this.props.entryDraft && this.props.entryDraft.get('fieldsMetaData');
|
||||
this.createDraft(deserializedEntry, fieldsMetaData);
|
||||
const mediaFiles = this.props.entryDraft && this.props.entryDraft.get('mediaFiles');
|
||||
this.createDraft(deserializedEntry, fieldsMetaData, mediaFiles);
|
||||
} else if (newEntry) {
|
||||
prevProps.createEmptyDraft(collection);
|
||||
}
|
||||
@ -217,12 +222,12 @@ class Editor extends React.Component {
|
||||
window.removeEventListener('beforeunload', this.exitBlocker);
|
||||
}
|
||||
|
||||
createBackup = debounce(function(entry, collection) {
|
||||
this.props.persistLocalBackup(entry, collection);
|
||||
createBackup = debounce(function(entry, collection, mediaFiles) {
|
||||
this.props.persistLocalBackup(entry, collection, mediaFiles);
|
||||
}, 2000);
|
||||
|
||||
createDraft = (entry, metadata) => {
|
||||
if (entry) this.props.createDraftFromEntry(entry, metadata);
|
||||
createDraft = (entry, metadata, mediaFiles) => {
|
||||
if (entry) this.props.createDraftFromEntry(entry, metadata, mediaFiles);
|
||||
};
|
||||
|
||||
handleChangeStatus = newStatusName => {
|
||||
|
@ -0,0 +1,247 @@
|
||||
import React from 'react';
|
||||
import { Editor } from '../Editor';
|
||||
import { render } from '@testing-library/react';
|
||||
import { fromJS } from 'immutable';
|
||||
|
||||
jest.mock('lodash/debounce', () => {
|
||||
const flush = jest.fn();
|
||||
return func => {
|
||||
func.flush = flush;
|
||||
return func;
|
||||
};
|
||||
});
|
||||
jest.mock('../EditorInterface', () => props => <mock-editor-interface {...props} />);
|
||||
jest.mock('netlify-cms-ui-default', () => {
|
||||
return {
|
||||
// eslint-disable-next-line react/display-name
|
||||
Loader: props => <mock-loader {...props} />,
|
||||
};
|
||||
});
|
||||
jest.mock('Routing/history');
|
||||
|
||||
describe('Editor', () => {
|
||||
const props = {
|
||||
boundGetAsset: jest.fn(),
|
||||
changeDraftField: jest.fn(),
|
||||
changeDraftFieldValidation: jest.fn(),
|
||||
collection: fromJS({ name: 'posts' }),
|
||||
createDraftFromEntry: jest.fn(),
|
||||
createEmptyDraft: jest.fn(),
|
||||
discardDraft: jest.fn(),
|
||||
entry: fromJS({}),
|
||||
entryDraft: fromJS({}),
|
||||
loadEntry: jest.fn(),
|
||||
persistEntry: jest.fn(),
|
||||
deleteEntry: jest.fn(),
|
||||
showDelete: true,
|
||||
fields: fromJS([]),
|
||||
slug: 'slug',
|
||||
newEntry: true,
|
||||
updateUnpublishedEntryStatus: jest.fn(),
|
||||
publishUnpublishedEntry: jest.fn(),
|
||||
deleteUnpublishedEntry: jest.fn(),
|
||||
logoutUser: jest.fn(),
|
||||
loadEntries: jest.fn(),
|
||||
deployPreview: fromJS({}),
|
||||
loadDeployPreview: jest.fn(),
|
||||
user: fromJS({}),
|
||||
t: jest.fn(key => key),
|
||||
localBackup: fromJS({}),
|
||||
retrieveLocalBackup: jest.fn(),
|
||||
persistLocalBackup: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should render loader when entryDraft is null', () => {
|
||||
// suppress prop type error
|
||||
jest.spyOn(console, 'error').mockImplementation(() => {});
|
||||
const { asFragment } = render(<Editor {...props} entryDraft={null} />);
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
expect(console.error).toHaveBeenCalledTimes(1);
|
||||
expect(console.error).toHaveBeenCalledWith(
|
||||
'Warning: Failed prop type: Required prop `entryDraft` was not specified in `Editor`.\n in Editor',
|
||||
);
|
||||
});
|
||||
|
||||
it('should render loader when entryDraft entry is undefined', () => {
|
||||
const { asFragment } = render(<Editor {...props} entryDraft={fromJS({})} />);
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render loader when entry is fetching', () => {
|
||||
const { asFragment } = render(
|
||||
<Editor {...props} entryDraft={fromJS({ entry: {} })} entry={fromJS({ isFetching: true })} />,
|
||||
);
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render editor interface when entry is not fetching', () => {
|
||||
const { asFragment } = render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should call retrieveLocalBackup on mount', () => {
|
||||
render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.retrieveLocalBackup).toHaveBeenCalledTimes(1);
|
||||
expect(props.retrieveLocalBackup).toHaveBeenCalledWith(props.collection, props.slug);
|
||||
});
|
||||
|
||||
it('should create new draft on new entry when mounting', () => {
|
||||
render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
newEntry={true}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.createEmptyDraft).toHaveBeenCalledTimes(1);
|
||||
expect(props.createEmptyDraft).toHaveBeenCalledWith(props.collection);
|
||||
expect(props.loadEntry).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('should load entry on existing entry when mounting', () => {
|
||||
render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
newEntry={false}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.createEmptyDraft).toHaveBeenCalledTimes(0);
|
||||
expect(props.loadEntry).toHaveBeenCalledTimes(1);
|
||||
expect(props.loadEntry).toHaveBeenCalledWith(props.collection, 'slug');
|
||||
});
|
||||
|
||||
it('should load entires when entries are not loaded when mounting', () => {
|
||||
render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
collectionEntriesLoaded={false}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.loadEntries).toHaveBeenCalledTimes(1);
|
||||
expect(props.loadEntries).toHaveBeenCalledWith(props.collection);
|
||||
});
|
||||
|
||||
it('should not load entires when entries are loaded when mounting', () => {
|
||||
render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
collectionEntriesLoaded={true}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.loadEntries).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('should flush debounce createBackup, discard draft and remove exit blocker on umount', () => {
|
||||
window.removeEventListener = jest.fn();
|
||||
const debounce = require('lodash/debounce');
|
||||
|
||||
const flush = debounce({}).flush;
|
||||
const { unmount } = render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' }, hasChanged: true })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
|
||||
jest.clearAllMocks();
|
||||
unmount();
|
||||
|
||||
expect(flush).toHaveBeenCalledTimes(1);
|
||||
expect(props.discardDraft).toHaveBeenCalledTimes(1);
|
||||
expect(window.removeEventListener).toHaveBeenCalledWith('beforeunload', expect.any(Function));
|
||||
|
||||
const callback = window.removeEventListener.mock.calls.find(
|
||||
call => call[0] === 'beforeunload',
|
||||
)[1];
|
||||
|
||||
const event = {};
|
||||
callback(event);
|
||||
expect(event).toEqual({ returnValue: 'editor.editor.onLeavePage' });
|
||||
});
|
||||
|
||||
it('should persist backup when changed', () => {
|
||||
const { rerender } = render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
|
||||
jest.clearAllMocks();
|
||||
rerender(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' }, mediaFiles: [{ id: '1' }] })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
hasChanged={true}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.persistLocalBackup).toHaveBeenCalledTimes(1);
|
||||
expect(props.persistLocalBackup).toHaveBeenCalledWith(
|
||||
fromJS({ slug: 'slug' }),
|
||||
props.collection,
|
||||
fromJS([{ id: '1' }]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create draft from entry when done fetching', () => {
|
||||
const { rerender } = render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
|
||||
jest.clearAllMocks();
|
||||
rerender(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({
|
||||
entry: { slug: 'slug' },
|
||||
mediaFiles: [{ id: '1' }],
|
||||
fieldsMetaData: {},
|
||||
})}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.createDraftFromEntry).toHaveBeenCalledTimes(1);
|
||||
expect(props.createDraftFromEntry).toHaveBeenCalledWith(
|
||||
fromJS({ isFetching: false, data: {} }),
|
||||
fromJS({}),
|
||||
fromJS([{ id: '1' }]),
|
||||
);
|
||||
});
|
||||
});
|
@ -0,0 +1,45 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Editor should render editor interface when entry is not fetching 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-editor-interface
|
||||
collection="Map { \\"name\\": \\"posts\\" }"
|
||||
deploypreview="Map {}"
|
||||
entry="Map { \\"slug\\": \\"slug\\" }"
|
||||
fields="List []"
|
||||
isnewentry="true"
|
||||
showdelete="true"
|
||||
user="Map {}"
|
||||
/>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Editor should render loader when entry is fetching 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-loader
|
||||
active="true"
|
||||
>
|
||||
editor.editor.loadingEntry
|
||||
</mock-loader>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Editor should render loader when entryDraft entry is undefined 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-loader
|
||||
active="true"
|
||||
>
|
||||
editor.editor.loadingEntry
|
||||
</mock-loader>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Editor should render loader when entryDraft is null 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-loader
|
||||
active="true"
|
||||
>
|
||||
editor.editor.loadingEntry
|
||||
</mock-loader>
|
||||
</DocumentFragment>
|
||||
`;
|
@ -118,7 +118,7 @@ class MediaLibrary extends React.Component {
|
||||
toTableData = files => {
|
||||
const tableData =
|
||||
files &&
|
||||
files.map(({ key, name, id, size, queryOrder, url, urlIsPublicPath, displayURL }) => {
|
||||
files.map(({ key, name, id, size, queryOrder, url, urlIsPublicPath, displayURL, draft }) => {
|
||||
const ext = fileExtension(name).toLowerCase();
|
||||
return {
|
||||
key,
|
||||
@ -130,6 +130,7 @@ class MediaLibrary extends React.Component {
|
||||
url,
|
||||
urlIsPublicPath,
|
||||
displayURL,
|
||||
draft,
|
||||
isImage: IMAGE_EXTENSIONS.includes(ext),
|
||||
isViewableImage: IMAGE_EXTENSIONS_VIEWABLE.includes(ext),
|
||||
};
|
||||
|
@ -27,6 +27,7 @@ const CardImageWrapper = styled.div`
|
||||
${effects.checkerboard};
|
||||
${shadows.inset};
|
||||
border-bottom: solid ${lengths.borderWidth} ${colors.textFieldBorder};
|
||||
position: relative;
|
||||
`;
|
||||
|
||||
const CardImage = styled.img`
|
||||
@ -53,6 +54,14 @@ const CardText = styled.p`
|
||||
line-height: 1.3 !important;
|
||||
`;
|
||||
|
||||
const DraftText = styled.p`
|
||||
color: ${colors.mediaDraftText};
|
||||
background-color: ${colors.mediaDraftBackground};
|
||||
position: absolute;
|
||||
padding: 8px;
|
||||
border-radius: ${lengths.borderRadius} 0px ${lengths.borderRadius} 0;
|
||||
`;
|
||||
|
||||
class MediaLibraryCard extends React.Component {
|
||||
render() {
|
||||
const {
|
||||
@ -60,11 +69,13 @@ class MediaLibraryCard extends React.Component {
|
||||
displayURL,
|
||||
text,
|
||||
onClick,
|
||||
draftText,
|
||||
width,
|
||||
margin,
|
||||
isPrivate,
|
||||
type,
|
||||
isViewableImage,
|
||||
isDraft,
|
||||
} = this.props;
|
||||
const url = displayURL.get('url');
|
||||
return (
|
||||
@ -77,7 +88,12 @@ class MediaLibraryCard extends React.Component {
|
||||
isPrivate={isPrivate}
|
||||
>
|
||||
<CardImageWrapper>
|
||||
{url && isViewableImage ? <CardImage src={url} /> : <CardFileIcon>{type}</CardFileIcon>}
|
||||
{isDraft ? <DraftText data-testid="draft-text">{draftText}</DraftText> : null}
|
||||
{url && isViewableImage ? (
|
||||
<CardImage src={url} />
|
||||
) : (
|
||||
<CardFileIcon data-testid="card-file-icon">{type}</CardFileIcon>
|
||||
)}
|
||||
</CardImageWrapper>
|
||||
<CardText>{text}</CardText>
|
||||
</Card>
|
||||
@ -96,12 +112,14 @@ MediaLibraryCard.propTypes = {
|
||||
displayURL: ImmutablePropTypes.map.isRequired,
|
||||
text: PropTypes.string.isRequired,
|
||||
onClick: PropTypes.func.isRequired,
|
||||
draftText: PropTypes.string.isRequired,
|
||||
width: PropTypes.string.isRequired,
|
||||
margin: PropTypes.string.isRequired,
|
||||
isPrivate: PropTypes.bool,
|
||||
type: PropTypes.string,
|
||||
isViewableImage: PropTypes.bool.isRequired,
|
||||
loadDisplayURL: PropTypes.func.isRequired,
|
||||
isDraft: PropTypes.bool,
|
||||
};
|
||||
|
||||
export default MediaLibraryCard;
|
||||
|
@ -32,6 +32,7 @@ const MediaLibraryCardGrid = ({
|
||||
onLoadMore,
|
||||
isPaginating,
|
||||
paginatingMessage,
|
||||
cardDraftText,
|
||||
cardWidth,
|
||||
cardMargin,
|
||||
isPrivate,
|
||||
@ -46,6 +47,8 @@ const MediaLibraryCardGrid = ({
|
||||
isSelected={isSelectedFile(file)}
|
||||
text={file.name}
|
||||
onClick={() => onAssetClick(file)}
|
||||
isDraft={file.draft}
|
||||
draftText={cardDraftText}
|
||||
width={cardWidth}
|
||||
margin={cardMargin}
|
||||
isPrivate={isPrivate}
|
||||
@ -74,6 +77,7 @@ MediaLibraryCardGrid.propTypes = {
|
||||
type: PropTypes.string.isRequired,
|
||||
url: PropTypes.string,
|
||||
urlIsPublicPath: PropTypes.bool,
|
||||
draft: PropTypes.bool,
|
||||
}),
|
||||
).isRequired,
|
||||
isSelectedFile: PropTypes.func.isRequired,
|
||||
@ -82,6 +86,7 @@ MediaLibraryCardGrid.propTypes = {
|
||||
onLoadMore: PropTypes.func.isRequired,
|
||||
isPaginating: PropTypes.bool,
|
||||
paginatingMessage: PropTypes.string,
|
||||
cardDraftText: PropTypes.string.isRequired,
|
||||
cardWidth: PropTypes.string.isRequired,
|
||||
cardMargin: PropTypes.string.isRequired,
|
||||
loadDisplayURL: PropTypes.func.isRequired,
|
||||
|
@ -170,6 +170,7 @@ const MediaLibraryModal = ({
|
||||
onLoadMore={handleLoadMore}
|
||||
isPaginating={isPaginating}
|
||||
paginatingMessage={t('mediaLibrary.mediaLibraryModal.loading')}
|
||||
cardDraftText={t('mediaLibrary.mediaLibraryCard.draft')}
|
||||
cardWidth={cardWidth}
|
||||
cardMargin={cardMargin}
|
||||
isPrivate={privateUpload}
|
||||
|
@ -0,0 +1,47 @@
|
||||
import React from 'react';
|
||||
import { Map } from 'immutable';
|
||||
import MediaLibraryCard from '../MediaLibraryCard';
|
||||
import { render } from '@testing-library/react';
|
||||
|
||||
describe('MediaLibraryCard', () => {
|
||||
const props = {
|
||||
displayURL: Map({ url: 'url' }),
|
||||
text: 'image.png',
|
||||
onClick: jest.fn(),
|
||||
draftText: 'Draft',
|
||||
width: '100px',
|
||||
margin: '10px',
|
||||
isViewableImage: true,
|
||||
loadDisplayURL: jest.fn(),
|
||||
};
|
||||
|
||||
it('should match snapshot for non draft image', () => {
|
||||
const { asFragment, queryByTestId } = render(<MediaLibraryCard {...props} />);
|
||||
|
||||
expect(queryByTestId('draft-text')).toBeNull();
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should match snapshot for draft image', () => {
|
||||
const { asFragment, getByTestId } = render(<MediaLibraryCard {...props} isDraft={true} />);
|
||||
expect(getByTestId('draft-text')).toHaveTextContent('Draft');
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should match snapshot for non viewable image', () => {
|
||||
const { asFragment, getByTestId } = render(
|
||||
<MediaLibraryCard {...props} isViewableImage={false} type="Not Viewable" />,
|
||||
);
|
||||
expect(getByTestId('card-file-icon')).toHaveTextContent('Not Viewable');
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should call loadDisplayURL on mount when url is empty', () => {
|
||||
const loadDisplayURL = jest.fn();
|
||||
render(
|
||||
<MediaLibraryCard {...props} loadDisplayURL={loadDisplayURL} displayURL={Map({ url: '' })} />,
|
||||
);
|
||||
|
||||
expect(loadDisplayURL).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
@ -0,0 +1,211 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`MediaLibraryCard should match snapshot for draft image 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-8 {
|
||||
width: 100px;
|
||||
height: 240px;
|
||||
margin: 10px;
|
||||
border: solid 2px #dfdfe3;
|
||||
border-radius: 5px;
|
||||
cursor: pointer;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.emotion-8:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
height: 162px;
|
||||
background-color: #f2f2f2;
|
||||
background-size: 16px 16px;
|
||||
background-position: 0 0,8px 8px;
|
||||
background-image: linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 ) , linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 );
|
||||
box-shadow: inset 0 0 4px rgba(68,74,87,0.3);
|
||||
border-bottom: solid 2px #dfdfe3;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
width: 100%;
|
||||
height: 160px;
|
||||
object-fit: contain;
|
||||
border-radius: 2px 2px 0 0;
|
||||
}
|
||||
|
||||
.emotion-6 {
|
||||
color: #798291;
|
||||
padding: 8px;
|
||||
margin-top: 20px;
|
||||
overflow-wrap: break-word;
|
||||
line-height: 1.3 !important;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
color: #70399f;
|
||||
background-color: #f6d8ff;
|
||||
position: absolute;
|
||||
padding: 8px;
|
||||
border-radius: 5px 0px 5px 0;
|
||||
}
|
||||
|
||||
<div
|
||||
class="emotion-8 emotion-9"
|
||||
tabindex="-1"
|
||||
width="100px"
|
||||
>
|
||||
<div
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
<p
|
||||
class="emotion-0 emotion-1"
|
||||
data-testid="draft-text"
|
||||
>
|
||||
Draft
|
||||
</p>
|
||||
<img
|
||||
class="emotion-2 emotion-3"
|
||||
src="url"
|
||||
/>
|
||||
</div>
|
||||
<p
|
||||
class="emotion-6 emotion-7"
|
||||
>
|
||||
image.png
|
||||
</p>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`MediaLibraryCard should match snapshot for non draft image 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-6 {
|
||||
width: 100px;
|
||||
height: 240px;
|
||||
margin: 10px;
|
||||
border: solid 2px #dfdfe3;
|
||||
border-radius: 5px;
|
||||
cursor: pointer;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.emotion-6:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
height: 162px;
|
||||
background-color: #f2f2f2;
|
||||
background-size: 16px 16px;
|
||||
background-position: 0 0,8px 8px;
|
||||
background-image: linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 ) , linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 );
|
||||
box-shadow: inset 0 0 4px rgba(68,74,87,0.3);
|
||||
border-bottom: solid 2px #dfdfe3;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
width: 100%;
|
||||
height: 160px;
|
||||
object-fit: contain;
|
||||
border-radius: 2px 2px 0 0;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
color: #798291;
|
||||
padding: 8px;
|
||||
margin-top: 20px;
|
||||
overflow-wrap: break-word;
|
||||
line-height: 1.3 !important;
|
||||
}
|
||||
|
||||
<div
|
||||
class="emotion-6 emotion-7"
|
||||
tabindex="-1"
|
||||
width="100px"
|
||||
>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<img
|
||||
class="emotion-0 emotion-1"
|
||||
src="url"
|
||||
/>
|
||||
</div>
|
||||
<p
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
image.png
|
||||
</p>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`MediaLibraryCard should match snapshot for non viewable image 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-6 {
|
||||
width: 100px;
|
||||
height: 240px;
|
||||
margin: 10px;
|
||||
border: solid 2px #dfdfe3;
|
||||
border-radius: 5px;
|
||||
cursor: pointer;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.emotion-6:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
height: 162px;
|
||||
background-color: #f2f2f2;
|
||||
background-size: 16px 16px;
|
||||
background-position: 0 0,8px 8px;
|
||||
background-image: linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 ) , linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 );
|
||||
box-shadow: inset 0 0 4px rgba(68,74,87,0.3);
|
||||
border-bottom: solid 2px #dfdfe3;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
color: #798291;
|
||||
padding: 8px;
|
||||
margin-top: 20px;
|
||||
overflow-wrap: break-word;
|
||||
line-height: 1.3 !important;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
width: 100%;
|
||||
height: 160px;
|
||||
object-fit: cover;
|
||||
border-radius: 2px 2px 0 0;
|
||||
padding: 1em;
|
||||
font-size: 3em;
|
||||
}
|
||||
|
||||
<div
|
||||
class="emotion-6 emotion-7"
|
||||
tabindex="-1"
|
||||
width="100px"
|
||||
>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<div
|
||||
class="emotion-0 emotion-1"
|
||||
data-testid="card-file-icon"
|
||||
>
|
||||
Not Viewable
|
||||
</div>
|
||||
</div>
|
||||
<p
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
image.png
|
||||
</p>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
@ -2,7 +2,7 @@ import { Map, List, fromJS } from 'immutable';
|
||||
import * as actions from 'Actions/entries';
|
||||
import reducer from '../entryDraft';
|
||||
|
||||
let initialState = Map({
|
||||
const initialState = Map({
|
||||
entry: Map(),
|
||||
mediaFiles: List(),
|
||||
fieldsMetaData: Map(),
|
||||
@ -62,6 +62,8 @@ describe('entryDraft reducer', () => {
|
||||
});
|
||||
|
||||
describe('persisting', () => {
|
||||
let initialState;
|
||||
|
||||
beforeEach(() => {
|
||||
initialState = fromJS({
|
||||
entities: {
|
||||
@ -111,4 +113,95 @@ describe('entryDraft reducer', () => {
|
||||
expect(newState.getIn(['entry', 'isPersisting'])).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('REMOVE_DRAFT_ENTRY_MEDIA_FILE', () => {
|
||||
it('should remove a media file', () => {
|
||||
const actualState = reducer(
|
||||
initialState.set('mediaFiles', List([{ id: '1' }, { id: '2' }])),
|
||||
actions.removeDraftEntryMediaFile({ id: '1' }),
|
||||
);
|
||||
|
||||
expect(actualState.toJS()).toEqual({
|
||||
entry: {},
|
||||
mediaFiles: [{ id: '2' }],
|
||||
fieldsMetaData: {},
|
||||
fieldsErrors: {},
|
||||
hasChanged: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('ADD_DRAFT_ENTRY_MEDIA_FILE', () => {
|
||||
it('should overwrite an existing media file', () => {
|
||||
const actualState = reducer(
|
||||
initialState.set('mediaFiles', List([{ id: '1', name: 'old' }])),
|
||||
actions.addDraftEntryMediaFile({ id: '1', name: 'new' }),
|
||||
);
|
||||
|
||||
expect(actualState.toJS()).toEqual({
|
||||
entry: {},
|
||||
mediaFiles: [{ id: '1', name: 'new' }],
|
||||
fieldsMetaData: {},
|
||||
fieldsErrors: {},
|
||||
hasChanged: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('SET_DRAFT_ENTRY_MEDIA_FILES', () => {
|
||||
it('should overwrite an existing media file', () => {
|
||||
const actualState = reducer(
|
||||
initialState,
|
||||
actions.setDraftEntryMediaFiles([{ id: '1' }, { id: '2' }]),
|
||||
);
|
||||
|
||||
expect(actualState.toJS()).toEqual({
|
||||
entry: {},
|
||||
mediaFiles: [{ id: '1' }, { id: '2' }],
|
||||
fieldsMetaData: {},
|
||||
fieldsErrors: {},
|
||||
hasChanged: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DRAFT_CREATE_FROM_LOCAL_BACKUP', () => {
|
||||
it('should create draft from local backup', () => {
|
||||
const localBackup = Map({ entry: fromJS(entry), mediaFiles: List([{ id: '1' }]) });
|
||||
|
||||
const actualState = reducer(initialState.set('localBackup', localBackup), {
|
||||
type: actions.DRAFT_CREATE_FROM_LOCAL_BACKUP,
|
||||
});
|
||||
expect(actualState.toJS()).toEqual({
|
||||
entry: {
|
||||
...entry,
|
||||
newRecord: false,
|
||||
},
|
||||
mediaFiles: [{ id: '1' }],
|
||||
fieldsMetaData: {},
|
||||
fieldsErrors: {},
|
||||
hasChanged: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DRAFT_LOCAL_BACKUP_RETRIEVED', () => {
|
||||
it('should set local backup', () => {
|
||||
const mediaFiles = [{ id: '1' }];
|
||||
|
||||
const actualState = reducer(initialState, actions.localBackupRetrieved(entry, mediaFiles));
|
||||
|
||||
expect(actualState.toJS()).toEqual({
|
||||
entry: {},
|
||||
mediaFiles: [],
|
||||
fieldsMetaData: {},
|
||||
fieldsErrors: {},
|
||||
hasChanged: false,
|
||||
localBackup: {
|
||||
entry,
|
||||
mediaFiles: [{ id: '1' }],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -0,0 +1,67 @@
|
||||
import { Map } from 'immutable';
|
||||
import { ADD_MEDIA_FILES_TO_LIBRARY, mediaDeleted } from 'Actions/mediaLibrary';
|
||||
import mediaLibrary from '../mediaLibrary';
|
||||
|
||||
jest.mock('uuid/v4');
|
||||
|
||||
describe('mediaLibrary', () => {
|
||||
const uuid = require('uuid/v4');
|
||||
|
||||
it('should add media files to library', () => {
|
||||
uuid.mockReturnValue('newKey');
|
||||
|
||||
expect(
|
||||
mediaLibrary(
|
||||
Map({
|
||||
files: [
|
||||
{ sha: 'old', path: 'path', key: 'key1' },
|
||||
{ sha: 'sha', path: 'some-other-pas', key: 'key2' },
|
||||
],
|
||||
}),
|
||||
{
|
||||
type: ADD_MEDIA_FILES_TO_LIBRARY,
|
||||
payload: { mediaFiles: [{ sha: 'new', path: 'path' }] },
|
||||
},
|
||||
),
|
||||
).toEqual(
|
||||
Map({
|
||||
files: [
|
||||
{ sha: 'new', path: 'path', key: 'newKey' },
|
||||
{ sha: 'sha', path: 'some-other-pas', key: 'key2' },
|
||||
],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove media file by key', () => {
|
||||
expect(
|
||||
mediaLibrary(
|
||||
Map({
|
||||
files: [{ key: 'key1' }, { key: 'key2' }],
|
||||
}),
|
||||
mediaDeleted({ key: 'key1' }),
|
||||
),
|
||||
).toEqual(
|
||||
Map({
|
||||
isDeleting: false,
|
||||
files: [{ key: 'key2' }],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove media file by id', () => {
|
||||
expect(
|
||||
mediaLibrary(
|
||||
Map({
|
||||
files: [{ id: 'id1' }, { id: 'id2' }],
|
||||
}),
|
||||
mediaDeleted({ id: 'id1' }),
|
||||
),
|
||||
).toEqual(
|
||||
Map({
|
||||
isDeleting: false,
|
||||
files: [{ id: 'id2' }],
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { Map } from 'immutable';
|
||||
import { addAssets, addAsset, removeAsset } from 'Actions/media';
|
||||
import reducer from '../medias';
|
||||
|
||||
jest.mock('ValueObjects/AssetProxy');
|
||||
|
||||
describe('medias', () => {
|
||||
it('should add assets', () => {
|
||||
expect(reducer(Map(), addAssets([{ public_path: 'public_path' }]))).toEqual(
|
||||
Map({ public_path: { public_path: 'public_path' } }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should add asset', () => {
|
||||
expect(reducer(Map(), addAsset({ public_path: 'public_path' }))).toEqual(
|
||||
Map({ public_path: { public_path: 'public_path' } }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove asset', () => {
|
||||
expect(
|
||||
reducer(Map({ public_path: { public_path: 'public_path' } }), removeAsset('public_path')),
|
||||
).toEqual(Map());
|
||||
});
|
||||
});
|
@ -12,13 +12,16 @@ import {
|
||||
ENTRY_PERSIST_SUCCESS,
|
||||
ENTRY_PERSIST_FAILURE,
|
||||
ENTRY_DELETE_SUCCESS,
|
||||
ADD_DRAFT_ENTRY_MEDIA_FILE,
|
||||
SET_DRAFT_ENTRY_MEDIA_FILES,
|
||||
REMOVE_DRAFT_ENTRY_MEDIA_FILE,
|
||||
CLEAR_DRAFT_ENTRY_MEDIA_FILES,
|
||||
} from 'Actions/entries';
|
||||
import {
|
||||
UNPUBLISHED_ENTRY_PERSIST_REQUEST,
|
||||
UNPUBLISHED_ENTRY_PERSIST_SUCCESS,
|
||||
UNPUBLISHED_ENTRY_PERSIST_FAILURE,
|
||||
} from 'Actions/editorialWorkflow';
|
||||
import { ADD_ASSET, REMOVE_ASSET } from 'Actions/media';
|
||||
|
||||
const initialState = Map({
|
||||
entry: Map(),
|
||||
@ -35,7 +38,7 @@ const entryDraftReducer = (state = Map(), action) => {
|
||||
return state.withMutations(state => {
|
||||
state.set('entry', action.payload.entry);
|
||||
state.setIn(['entry', 'newRecord'], false);
|
||||
state.set('mediaFiles', List());
|
||||
state.set('mediaFiles', action.payload.mediaFiles || List());
|
||||
// An existing entry may already have metadata. If we surfed away and back to its
|
||||
// editor page, the metadata will have been fetched already, so we shouldn't
|
||||
// clear it as to not break relation lists.
|
||||
@ -56,19 +59,26 @@ const entryDraftReducer = (state = Map(), action) => {
|
||||
case DRAFT_CREATE_FROM_LOCAL_BACKUP:
|
||||
// Local Backup
|
||||
return state.withMutations(state => {
|
||||
const backupEntry = state.get('localBackup');
|
||||
const backupDraftEntry = state.get('localBackup');
|
||||
const backupEntry = backupDraftEntry.get('entry');
|
||||
state.delete('localBackup');
|
||||
state.set('entry', backupEntry);
|
||||
state.setIn(['entry', 'newRecord'], !backupEntry.get('path'));
|
||||
state.set('mediaFiles', List());
|
||||
state.set('mediaFiles', backupDraftEntry.get('mediaFiles'));
|
||||
state.set('fieldsMetaData', Map());
|
||||
state.set('fieldsErrors', Map());
|
||||
state.set('hasChanged', true);
|
||||
});
|
||||
case DRAFT_DISCARD:
|
||||
return initialState;
|
||||
case DRAFT_LOCAL_BACKUP_RETRIEVED:
|
||||
return state.set('localBackup', fromJS(action.payload.entry));
|
||||
case DRAFT_LOCAL_BACKUP_RETRIEVED: {
|
||||
const { entry, mediaFiles } = action.payload;
|
||||
const newState = new Map({
|
||||
entry: fromJS(entry),
|
||||
mediaFiles: List(mediaFiles),
|
||||
});
|
||||
return state.set('localBackup', newState);
|
||||
}
|
||||
case DRAFT_CHANGE_FIELD:
|
||||
return state.withMutations(state => {
|
||||
state.setIn(['entry', 'data', action.payload.field], action.payload.value);
|
||||
@ -113,14 +123,28 @@ const entryDraftReducer = (state = Map(), action) => {
|
||||
state.set('hasChanged', false);
|
||||
});
|
||||
|
||||
case ADD_ASSET:
|
||||
case ADD_DRAFT_ENTRY_MEDIA_FILE:
|
||||
if (state.has('mediaFiles')) {
|
||||
return state.update('mediaFiles', list => list.push(action.payload.public_path));
|
||||
return state.update('mediaFiles', list =>
|
||||
list.filterNot(file => file.id === action.payload.id).push({ ...action.payload }),
|
||||
);
|
||||
}
|
||||
return state;
|
||||
|
||||
case REMOVE_ASSET:
|
||||
return state.update('mediaFiles', list => list.filterNot(path => path === action.payload));
|
||||
case SET_DRAFT_ENTRY_MEDIA_FILES: {
|
||||
return state.set('mediaFiles', List(action.payload));
|
||||
}
|
||||
|
||||
case REMOVE_DRAFT_ENTRY_MEDIA_FILE:
|
||||
if (state.has('mediaFiles')) {
|
||||
return state.update('mediaFiles', list =>
|
||||
list.filterNot(file => file.id === action.payload.id),
|
||||
);
|
||||
}
|
||||
return state;
|
||||
|
||||
case CLEAR_DRAFT_ENTRY_MEDIA_FILES:
|
||||
return state.set('mediaFiles', List());
|
||||
|
||||
default:
|
||||
return state;
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { Map } from 'immutable';
|
||||
import uuid from 'uuid/v4';
|
||||
import { differenceBy } from 'lodash';
|
||||
import {
|
||||
MEDIA_LIBRARY_OPEN,
|
||||
MEDIA_LIBRARY_CLOSE,
|
||||
@ -18,6 +19,7 @@ import {
|
||||
MEDIA_DISPLAY_URL_REQUEST,
|
||||
MEDIA_DISPLAY_URL_SUCCESS,
|
||||
MEDIA_DISPLAY_URL_FAILURE,
|
||||
ADD_MEDIA_FILES_TO_LIBRARY,
|
||||
} from 'Actions/mediaLibrary';
|
||||
|
||||
const defaultState = {
|
||||
@ -127,6 +129,12 @@ const mediaLibrary = (state = Map(defaultState), action) => {
|
||||
map.set('isPersisting', false);
|
||||
});
|
||||
}
|
||||
case ADD_MEDIA_FILES_TO_LIBRARY: {
|
||||
const { mediaFiles } = action.payload;
|
||||
let updatedFiles = differenceBy(state.get('files'), mediaFiles, 'path');
|
||||
updatedFiles = [...mediaFiles.map(file => ({ ...file, key: uuid() })), ...updatedFiles];
|
||||
return state.set('files', updatedFiles);
|
||||
}
|
||||
case MEDIA_PERSIST_FAILURE: {
|
||||
const privateUploadChanged = state.get('privateUpload') !== action.payload.privateUpload;
|
||||
if (privateUploadChanged) {
|
||||
@ -143,7 +151,9 @@ const mediaLibrary = (state = Map(defaultState), action) => {
|
||||
return state;
|
||||
}
|
||||
return state.withMutations(map => {
|
||||
const updatedFiles = map.get('files').filter(file => file.key !== key);
|
||||
const updatedFiles = map
|
||||
.get('files')
|
||||
.filter(file => (key ? file.key !== key : file.id !== id));
|
||||
map.set('files', updatedFiles);
|
||||
map.deleteIn(['displayURLs', id]);
|
||||
map.set('isDeleting', false);
|
||||
|
@ -1,10 +1,17 @@
|
||||
import { Map } from 'immutable';
|
||||
import { resolvePath } from 'netlify-cms-lib-util';
|
||||
import { ADD_ASSET, REMOVE_ASSET } from 'Actions/media';
|
||||
import { ADD_ASSETS, ADD_ASSET, REMOVE_ASSET } from 'Actions/media';
|
||||
import AssetProxy from 'ValueObjects/AssetProxy';
|
||||
|
||||
const medias = (state = Map(), action) => {
|
||||
switch (action.type) {
|
||||
case ADD_ASSETS: {
|
||||
let newState = state;
|
||||
action.payload.forEach(asset => {
|
||||
newState = newState.set(asset.public_path, asset);
|
||||
});
|
||||
return newState;
|
||||
}
|
||||
case ADD_ASSET:
|
||||
return state.set(action.payload.public_path, action.payload);
|
||||
case REMOVE_ASSET:
|
||||
|
@ -59,7 +59,7 @@ export function createAssetProxy(value, fileObj, uploaded = false, privateUpload
|
||||
() => new AssetProxy(value, fileObj, false),
|
||||
);
|
||||
} else if (privateUpload) {
|
||||
throw new Error('The Private Upload option is only avaible for Asset Store Integration');
|
||||
throw new Error('The Private Upload option is only available for Asset Store Integration');
|
||||
}
|
||||
|
||||
return Promise.resolve(new AssetProxy(value, fileObj, uploaded));
|
||||
|
@ -98,6 +98,9 @@ const de = {
|
||||
},
|
||||
},
|
||||
mediaLibrary: {
|
||||
mediaLibraryCard: {
|
||||
draft: 'Entwurf',
|
||||
},
|
||||
mediaLibrary: {
|
||||
onDelete: 'Soll das ausgewählte Medium wirklich gelöscht werden?',
|
||||
},
|
||||
|
@ -94,6 +94,9 @@ const en = {
|
||||
},
|
||||
},
|
||||
mediaLibrary: {
|
||||
mediaLibraryCard: {
|
||||
draft: 'Draft',
|
||||
},
|
||||
mediaLibrary: {
|
||||
onDelete: 'Are you sure you want to delete selected media?',
|
||||
},
|
||||
|
@ -96,6 +96,9 @@ const fr = {
|
||||
},
|
||||
},
|
||||
mediaLibrary: {
|
||||
mediaLibraryCard: {
|
||||
draft: 'Brouillon',
|
||||
},
|
||||
mediaLibrary: {
|
||||
onDelete: 'Voulez-vous vraiment supprimer la ressource sélectionné ?',
|
||||
},
|
||||
|
@ -79,6 +79,8 @@ const colors = {
|
||||
controlLabel: '#7a8291',
|
||||
checkerboardLight: '#f2f2f2',
|
||||
checkerboardDark: '#e6e6e6',
|
||||
mediaDraftText: colorsRaw.purple,
|
||||
mediaDraftBackground: colorsRaw.purpleLight,
|
||||
};
|
||||
|
||||
const lengths = {
|
||||
|
@ -1,8 +1,6 @@
|
||||
import React from 'react';
|
||||
import { fromJS } from 'immutable';
|
||||
import { render, fireEvent } from 'react-testing-library';
|
||||
import 'react-testing-library/cleanup-after-each';
|
||||
import 'jest-dom/extend-expect';
|
||||
import { render, fireEvent } from '@testing-library/react';
|
||||
import { NetlifyCmsWidgetNumber } from '../';
|
||||
import { validateMinMax } from '../NumberControl';
|
||||
|
||||
|
@ -1,9 +1,7 @@
|
||||
import React from 'react';
|
||||
import { fromJS, Map } from 'immutable';
|
||||
import { last } from 'lodash';
|
||||
import { render, fireEvent, wait } from 'react-testing-library';
|
||||
import 'react-testing-library/cleanup-after-each';
|
||||
import 'jest-dom/extend-expect';
|
||||
import { render, fireEvent, wait } from '@testing-library/react';
|
||||
import { NetlifyCmsWidgetRelation } from '../';
|
||||
|
||||
const RelationControl = NetlifyCmsWidgetRelation.controlComponent;
|
||||
|
@ -1,8 +1,6 @@
|
||||
import React from 'react';
|
||||
import { fromJS } from 'immutable';
|
||||
import { render, fireEvent } from 'react-testing-library';
|
||||
import 'react-testing-library/cleanup-after-each';
|
||||
import 'jest-dom/extend-expect';
|
||||
import { render, fireEvent } from '@testing-library/react';
|
||||
import { NetlifyCmsWidgetSelect } from '../';
|
||||
|
||||
const SelectControl = NetlifyCmsWidgetSelect.controlComponent;
|
||||
|
@ -1,4 +1,5 @@
|
||||
/* eslint-disable emotion/no-vanilla */
|
||||
import '@testing-library/jest-dom/extend-expect';
|
||||
import fetch from 'node-fetch';
|
||||
import * as emotion from 'emotion';
|
||||
import { createSerializer } from 'jest-emotion';
|
||||
|
117
yarn.lock
117
yarn.lock
@ -726,7 +726,7 @@
|
||||
"@babel/plugin-transform-react-jsx-self" "^7.0.0"
|
||||
"@babel/plugin-transform-react-jsx-source" "^7.0.0"
|
||||
|
||||
"@babel/runtime@^7.1.2", "@babel/runtime@^7.2.0", "@babel/runtime@^7.4.2", "@babel/runtime@^7.4.3", "@babel/runtime@^7.5.5":
|
||||
"@babel/runtime@^7.1.2", "@babel/runtime@^7.2.0", "@babel/runtime@^7.4.2", "@babel/runtime@^7.4.3", "@babel/runtime@^7.5.1", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.0", "@babel/runtime@^7.6.2":
|
||||
version "7.7.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.7.2.tgz#111a78002a5c25fc8e3361bedc9529c696b85a6a"
|
||||
integrity sha512-JONRbXbTXc9WQE2mAZd1p0Z3DZ/6vaQIkgYMSTP3KjRCyd7rCZCcfhCyX+YjwcKxcZ82UrxbRD358bpExNgrjw==
|
||||
@ -2083,6 +2083,42 @@
|
||||
resolved "https://registry.yarnpkg.com/@sheerun/mutationobserver-shim/-/mutationobserver-shim-0.3.2.tgz#8013f2af54a2b7d735f71560ff360d3a8176a87b"
|
||||
integrity sha512-vTCdPp/T/Q3oSqwHmZ5Kpa9oI7iLtGl3RQaA/NyLHikvcrPxACkkKVr/XzkSPJWXHRhKGzVvb0urJsbMlRxi1Q==
|
||||
|
||||
"@testing-library/dom@^6.3.0":
|
||||
version "6.10.1"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-6.10.1.tgz#da5bf5065d3f9e484aef4cc495f4e1a5bea6df2e"
|
||||
integrity sha512-5BPKxaO+zSJDUbVZBRNf9KrmDkm/EcjjaHSg3F9+031VZyPACKXlwLBjVzZxheunT9m72DoIq7WvyE457/Xweg==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.6.2"
|
||||
"@sheerun/mutationobserver-shim" "^0.3.2"
|
||||
"@types/testing-library__dom" "^6.0.0"
|
||||
aria-query "3.0.0"
|
||||
pretty-format "^24.9.0"
|
||||
wait-for-expect "^3.0.0"
|
||||
|
||||
"@testing-library/jest-dom@^4.2.3":
|
||||
version "4.2.4"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-4.2.4.tgz#00dfa0cbdd837d9a3c2a7f3f0a248ea6e7b89742"
|
||||
integrity sha512-j31Bn0rQo12fhCWOUWy9fl7wtqkp7In/YP2p5ZFyRuiiB9Qs3g+hS4gAmDWONbAHcRmVooNJ5eOHQDCOmUFXHg==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.5.1"
|
||||
chalk "^2.4.1"
|
||||
css "^2.2.3"
|
||||
css.escape "^1.5.1"
|
||||
jest-diff "^24.0.0"
|
||||
jest-matcher-utils "^24.0.0"
|
||||
lodash "^4.17.11"
|
||||
pretty-format "^24.0.0"
|
||||
redent "^3.0.0"
|
||||
|
||||
"@testing-library/react@^9.3.2":
|
||||
version "9.3.2"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-9.3.2.tgz#418000daa980dafd2d9420cc733d661daece9aa0"
|
||||
integrity sha512-J6ftWtm218tOLS175MF9eWCxGp+X+cUXCpkPIin8KAXWtyZbr9CbqJ8M8QNd6spZxJDAGlw+leLG4MJWLlqVgg==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.6.0"
|
||||
"@testing-library/dom" "^6.3.0"
|
||||
"@types/testing-library__react" "^9.1.0"
|
||||
|
||||
"@types/babel__core@^7.1.0":
|
||||
version "7.1.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.3.tgz#e441ea7df63cd080dfcd02ab199e6d16a735fc30"
|
||||
@ -2183,6 +2219,13 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.3.tgz#2ab0d5da2e5815f94b0b9d4b95d1e5f243ab2ca7"
|
||||
integrity sha512-KfRL3PuHmqQLOG+2tGpRO26Ctg+Cq1E01D2DMriKEATHgWLfeNDmq9e29Q9WIky0dQ3NPkd1mzYH8Lm936Z9qw==
|
||||
|
||||
"@types/react-dom@*":
|
||||
version "16.9.4"
|
||||
resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-16.9.4.tgz#0b58df09a60961dcb77f62d4f1832427513420df"
|
||||
integrity sha512-fya9xteU/n90tda0s+FtN5Ym4tbgxpq/hb/Af24dvs6uYnYn+fspaxw5USlw0R8apDNwxsqumdRoCoKitckQqw==
|
||||
dependencies:
|
||||
"@types/react" "*"
|
||||
|
||||
"@types/react@*", "@types/react@^15.0.0 || ^16.0.0":
|
||||
version "16.9.11"
|
||||
resolved "https://registry.yarnpkg.com/@types/react/-/react-16.9.11.tgz#70e0b7ad79058a7842f25ccf2999807076ada120"
|
||||
@ -2206,6 +2249,21 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-1.0.1.tgz#0a851d3bd96498fa25c33ab7278ed3bd65f06c3e"
|
||||
integrity sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==
|
||||
|
||||
"@types/testing-library__dom@*", "@types/testing-library__dom@^6.0.0":
|
||||
version "6.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/testing-library__dom/-/testing-library__dom-6.10.0.tgz#590d76e3875a7c536dc744eb530cbf51b6483404"
|
||||
integrity sha512-mL/GMlyQxiZplbUuFNwA0vAI3k3uJNSf6slr5AVve9TXmfLfyefNT0uHHnxwdYuPMxYD5gI/+dgAvc/5opW9JQ==
|
||||
dependencies:
|
||||
pretty-format "^24.3.0"
|
||||
|
||||
"@types/testing-library__react@^9.1.0":
|
||||
version "9.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/testing-library__react/-/testing-library__react-9.1.2.tgz#e33af9124c60a010fc03a34eff8f8a34a75c4351"
|
||||
integrity sha512-CYaMqrswQ+cJACy268jsLAw355DZtPZGt3Jwmmotlcu8O/tkoXBI6AeZ84oZBJsIsesozPKzWzmv/0TIU+1E9Q==
|
||||
dependencies:
|
||||
"@types/react-dom" "*"
|
||||
"@types/testing-library__dom" "*"
|
||||
|
||||
"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2":
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e"
|
||||
@ -2704,6 +2762,14 @@ argparse@^1.0.7:
|
||||
dependencies:
|
||||
sprintf-js "~1.0.2"
|
||||
|
||||
aria-query@3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-3.0.0.tgz#65b3fcc1ca1155a8c9ae64d6eee297f15d5133cc"
|
||||
integrity sha1-ZbP8wcoRVajJrmTW7uKX8V1RM8w=
|
||||
dependencies:
|
||||
ast-types-flow "0.0.7"
|
||||
commander "^2.11.0"
|
||||
|
||||
arr-diff@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520"
|
||||
@ -2828,6 +2894,11 @@ assign-symbols@^1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367"
|
||||
integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=
|
||||
|
||||
ast-types-flow@0.0.7:
|
||||
version "0.0.7"
|
||||
resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad"
|
||||
integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0=
|
||||
|
||||
astral-regex@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9"
|
||||
@ -3873,7 +3944,7 @@ commander@2.15.1:
|
||||
resolved "https://registry.yarnpkg.com/commander/-/commander-2.15.1.tgz#df46e867d0fc2aec66a34662b406a9ccafff5b0f"
|
||||
integrity sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==
|
||||
|
||||
commander@^2.20.0, commander@^2.8.1, commander@~2.20.3:
|
||||
commander@^2.11.0, commander@^2.20.0, commander@^2.8.1, commander@~2.20.3:
|
||||
version "2.20.3"
|
||||
resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33"
|
||||
integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==
|
||||
@ -4752,7 +4823,7 @@ dom-serializer@0:
|
||||
domelementtype "^2.0.1"
|
||||
entities "^2.0.0"
|
||||
|
||||
dom-testing-library@^4.0.0, dom-testing-library@^4.1.0:
|
||||
dom-testing-library@^4.0.0:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/dom-testing-library/-/dom-testing-library-4.1.1.tgz#615af61bee06db51bd8ecea60c113eba7cb49dda"
|
||||
integrity sha512-PUsG7aY5BJxzulDrOtkksqudRRypcVQF6d4RGAyj9xNwallOFqrNLOyg2QW2mCpFaNVPELX8hBX/wbHQtOto/A==
|
||||
@ -6711,6 +6782,11 @@ indent-string@^3.0.0:
|
||||
resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289"
|
||||
integrity sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=
|
||||
|
||||
indent-string@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251"
|
||||
integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==
|
||||
|
||||
indexes-of@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607"
|
||||
@ -8577,6 +8653,11 @@ min-document@^2.19.0:
|
||||
dependencies:
|
||||
dom-walk "^0.1.0"
|
||||
|
||||
min-indent@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.0.tgz#cfc45c37e9ec0d8f0a0ec3dd4ef7f7c3abe39256"
|
||||
integrity sha1-z8RcN+nsDY8KDsPdTvf3w6vjklY=
|
||||
|
||||
minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7"
|
||||
@ -10005,7 +10086,7 @@ prettier@1.18.2:
|
||||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.18.2.tgz#6823e7c5900017b4bd3acf46fe9ac4b4d7bda9ea"
|
||||
integrity sha512-OeHeMc0JhFE9idD4ZdtNibzY0+TPHSpSSb9h8FqtP+YnoZZ1sl8Vc9b1sasjfymH3SonAF4QcA2+mzHPhMvIiw==
|
||||
|
||||
pretty-format@^24.0.0, pretty-format@^24.7.0, pretty-format@^24.9.0:
|
||||
pretty-format@^24.0.0, pretty-format@^24.3.0, pretty-format@^24.7.0, pretty-format@^24.9.0:
|
||||
version "24.9.0"
|
||||
resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-24.9.0.tgz#12fac31b37019a4eea3c11aa9a959eb7628aa7c9"
|
||||
integrity sha512-00ZMZUiHaJrNfk33guavqgvfJS30sLYf0f8+Srklv0AMPodGGHcoHgksZ3OThYnIvOd+8yMCn0YiEOogjlgsnA==
|
||||
@ -10532,14 +10613,6 @@ react-test-renderer@^16.8.4:
|
||||
react-is "^16.8.6"
|
||||
scheduler "^0.17.0"
|
||||
|
||||
react-testing-library@^7.0.0:
|
||||
version "7.0.1"
|
||||
resolved "https://registry.yarnpkg.com/react-testing-library/-/react-testing-library-7.0.1.tgz#0cf113bb53a78599f018378f6854e91a52dbf205"
|
||||
integrity sha512-doQkM3/xPcIm22x9jgTkGxU8xqXg4iWvM1WwbbQ7CI5/EMk3DhloYBwMyk+Ywtta3dIAIh9sC7llXoKovf3L+w==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.4.3"
|
||||
dom-testing-library "^4.1.0"
|
||||
|
||||
react-textarea-autosize@^7.1.0:
|
||||
version "7.1.2"
|
||||
resolved "https://registry.yarnpkg.com/react-textarea-autosize/-/react-textarea-autosize-7.1.2.tgz#70fdb333ef86bcca72717e25e623e90c336e2cda"
|
||||
@ -10750,6 +10823,14 @@ redent@^2.0.0:
|
||||
indent-string "^3.0.0"
|
||||
strip-indent "^2.0.0"
|
||||
|
||||
redent@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f"
|
||||
integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==
|
||||
dependencies:
|
||||
indent-string "^4.0.0"
|
||||
strip-indent "^3.0.0"
|
||||
|
||||
redux-mock-store@^1.5.3:
|
||||
version "1.5.3"
|
||||
resolved "https://registry.yarnpkg.com/redux-mock-store/-/redux-mock-store-1.5.3.tgz#1f10528949b7ce8056c2532624f7cafa98576c6d"
|
||||
@ -12162,6 +12243,13 @@ strip-indent@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-2.0.0.tgz#5ef8db295d01e6ed6cbf7aab96998d7822527b68"
|
||||
integrity sha1-XvjbKV0B5u1sv3qrlpmNeCJSe2g=
|
||||
|
||||
strip-indent@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001"
|
||||
integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==
|
||||
dependencies:
|
||||
min-indent "^1.0.0"
|
||||
|
||||
strip-json-comments@^2.0.1, strip-json-comments@~2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
|
||||
@ -13158,6 +13246,11 @@ wait-for-expect@^1.1.1:
|
||||
resolved "https://registry.yarnpkg.com/wait-for-expect/-/wait-for-expect-1.3.0.tgz#65241ce355425f907f5d127bdb5e72c412ff830c"
|
||||
integrity sha512-8fJU7jiA96HfGPt+P/UilelSAZfhMBJ52YhKzlmZQvKEZU2EcD1GQ0yqGB6liLdHjYtYAoGVigYwdxr5rktvzA==
|
||||
|
||||
wait-for-expect@^3.0.0:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/wait-for-expect/-/wait-for-expect-3.0.1.tgz#ec204a76b0038f17711e575720aaf28505ac7185"
|
||||
integrity sha512-3Ha7lu+zshEG/CeHdcpmQsZnnZpPj/UsG3DuKO8FskjuDbkx3jE3845H+CuwZjA2YWYDfKMU2KhnCaXMLd3wVw==
|
||||
|
||||
wait-on@3.3.0:
|
||||
version "3.3.0"
|
||||
resolved "https://registry.yarnpkg.com/wait-on/-/wait-on-3.3.0.tgz#9940981d047a72a9544a97b8b5fca45b2170a082"
|
||||
|
Loading…
x
Reference in New Issue
Block a user