feat: commit media with post (#2851)
* feat: commit media with post - initial commit * feat: add draft media indication * feat: sync UI media files with GitHub on entry load * feat: bug fixes * feat: delete media files from github when removed from library * test: add GitHub backend tests * test: add unit tests * fix: meta data object files are not updated * feat: used nested paths when update a tree instead of recursion * feat(test-backend): update test backend to persist media file with entry * test(e2e): re-record fixtures data * chore: code cleanup * chore: code cleanup * fix: wait for library to load before adding entry media files * chore: code cleanup * fix: don't add media files on entry when not a draft * fix: sync media library after draft entry was published * feat: update media library card draft style, add tests * test: add Editor unit tests * chore: test code cleanup * fix: publishing an entry from workflow tab throws an error * fix: duplicate media files when using test backend * refactor: fix lodash import * chore: update translations and yarn file after rebase * test(cypress): update recorded data * fix(test-backend): fix mapping of media files on publish
This commit is contained in:
@ -1,6 +1,17 @@
|
||||
import { Base64 } from 'js-base64';
|
||||
import semaphore from 'semaphore';
|
||||
import { find, flow, get, hasIn, initial, last, partial, result, uniq } from 'lodash';
|
||||
import {
|
||||
find,
|
||||
flow,
|
||||
get,
|
||||
hasIn,
|
||||
initial,
|
||||
last,
|
||||
partial,
|
||||
result,
|
||||
differenceBy,
|
||||
trimStart,
|
||||
} from 'lodash';
|
||||
import { map } from 'lodash/fp';
|
||||
import {
|
||||
getAllResponses,
|
||||
@ -195,15 +206,10 @@ export default class API {
|
||||
this._metadataSemaphore.take(async () => {
|
||||
try {
|
||||
const branchData = await this.checkMetadataRef();
|
||||
const fileTree = {
|
||||
[`${key}.json`]: {
|
||||
path: `${key}.json`,
|
||||
raw: JSON.stringify(data),
|
||||
file: true,
|
||||
},
|
||||
};
|
||||
await this.uploadBlob(fileTree[`${key}.json`]);
|
||||
const changeTree = await this.updateTree(branchData.sha, '/', fileTree);
|
||||
const file = { path: `${key}.json`, raw: JSON.stringify(data) };
|
||||
|
||||
await this.uploadBlob(file);
|
||||
const changeTree = await this.updateTree(branchData.sha, [file]);
|
||||
const { sha } = await this.commit(`Updating “${key}” metadata`, changeTree);
|
||||
await this.patchRef('meta', '_netlify_cms', sha);
|
||||
localForage.setItem(`gh.meta.${key}`, {
|
||||
@ -304,7 +310,7 @@ export default class API {
|
||||
return text;
|
||||
}
|
||||
|
||||
async getMediaDisplayURL(sha, path) {
|
||||
async getMediaAsBlob(sha, path) {
|
||||
const response = await this.fetchBlob(sha, this.repoURL);
|
||||
let blob;
|
||||
if (path.match(/.svg$/)) {
|
||||
@ -313,6 +319,11 @@ export default class API {
|
||||
} else {
|
||||
blob = await response.blob();
|
||||
}
|
||||
return blob;
|
||||
}
|
||||
|
||||
async getMediaDisplayURL(sha, path) {
|
||||
const blob = await this.getMediaAsBlob(sha, path);
|
||||
|
||||
return URL.createObjectURL(blob);
|
||||
}
|
||||
@ -501,56 +512,23 @@ export default class API {
|
||||
}
|
||||
}
|
||||
|
||||
composeFileTree(files) {
|
||||
let filename;
|
||||
let part;
|
||||
let parts;
|
||||
let subtree;
|
||||
const fileTree = {};
|
||||
|
||||
files.forEach(file => {
|
||||
if (file.uploaded) {
|
||||
return;
|
||||
}
|
||||
parts = file.path.split('/').filter(part => part);
|
||||
filename = parts.pop();
|
||||
subtree = fileTree;
|
||||
while ((part = parts.shift())) {
|
||||
// eslint-disable-line no-cond-assign
|
||||
subtree[part] = subtree[part] || {};
|
||||
subtree = subtree[part];
|
||||
}
|
||||
subtree[filename] = file;
|
||||
file.file = true;
|
||||
});
|
||||
|
||||
return fileTree;
|
||||
}
|
||||
|
||||
persistFiles(entry, mediaFiles, options) {
|
||||
const uploadPromises = [];
|
||||
async persistFiles(entry, mediaFiles, options) {
|
||||
const files = entry ? mediaFiles.concat(entry) : mediaFiles;
|
||||
const uploadPromises = files.filter(file => !file.uploaded).map(file => this.uploadBlob(file));
|
||||
await Promise.all(uploadPromises);
|
||||
|
||||
files.forEach(file => {
|
||||
if (file.uploaded) {
|
||||
return;
|
||||
}
|
||||
uploadPromises.push(this.uploadBlob(file));
|
||||
});
|
||||
|
||||
const fileTree = this.composeFileTree(files);
|
||||
|
||||
return Promise.all(uploadPromises).then(() => {
|
||||
if (!options.useWorkflow) {
|
||||
return this.getBranch()
|
||||
.then(branchData => this.updateTree(branchData.commit.sha, '/', fileTree))
|
||||
.then(changeTree => this.commit(options.commitMessage, changeTree))
|
||||
.then(response => this.patchBranch(this.branch, response.sha));
|
||||
} else {
|
||||
const mediaFilesList = mediaFiles.map(file => ({ path: file.path, sha: file.sha }));
|
||||
return this.editorialWorkflowGit(fileTree, entry, mediaFilesList, options);
|
||||
}
|
||||
});
|
||||
if (!options.useWorkflow) {
|
||||
return this.getBranch()
|
||||
.then(branchData => this.updateTree(branchData.commit.sha, files))
|
||||
.then(changeTree => this.commit(options.commitMessage, changeTree))
|
||||
.then(response => this.patchBranch(this.branch, response.sha));
|
||||
} else {
|
||||
const mediaFilesList = mediaFiles.map(({ sha, path }) => ({
|
||||
path: trimStart(path, '/'),
|
||||
sha,
|
||||
}));
|
||||
return this.editorialWorkflowGit(files, entry, mediaFilesList, options);
|
||||
}
|
||||
}
|
||||
|
||||
getFileSha(path, branch) {
|
||||
@ -597,7 +575,7 @@ export default class API {
|
||||
return this.createPR(commitMessage, branchName);
|
||||
}
|
||||
|
||||
async editorialWorkflowGit(fileTree, entry, filesList, options) {
|
||||
async editorialWorkflowGit(files, entry, mediaFilesList, options) {
|
||||
const contentKey = this.generateContentKey(options.collectionName, entry.slug);
|
||||
const branchName = this.generateBranchName(contentKey);
|
||||
const unpublished = options.unpublished || false;
|
||||
@ -605,7 +583,7 @@ export default class API {
|
||||
// Open new editorial review workflow for this entry - Create new metadata and commit to new branch
|
||||
const userPromise = this.user();
|
||||
const branchData = await this.getBranch();
|
||||
const changeTree = await this.updateTree(branchData.commit.sha, '/', fileTree);
|
||||
const changeTree = await this.updateTree(branchData.commit.sha, files);
|
||||
const commitResponse = await this.commit(options.commitMessage, changeTree);
|
||||
|
||||
let pr;
|
||||
@ -640,24 +618,30 @@ export default class API {
|
||||
path: entry.path,
|
||||
sha: entry.sha,
|
||||
},
|
||||
files: filesList,
|
||||
files: mediaFilesList,
|
||||
},
|
||||
timeStamp: new Date().toISOString(),
|
||||
});
|
||||
} else {
|
||||
// Entry is already on editorial review workflow - just update metadata and commit to existing branch
|
||||
const metadata = await this.retrieveMetadata(contentKey);
|
||||
// mark media files to remove
|
||||
const metadataMediaFiles = get(metadata, 'objects.files', []);
|
||||
const mediaFilesToRemove = differenceBy(metadataMediaFiles, mediaFilesList, 'path').map(
|
||||
file => ({ ...file, remove: true }),
|
||||
);
|
||||
const branchData = await this.getBranch(branchName);
|
||||
const changeTree = await this.updateTree(branchData.commit.sha, '/', fileTree);
|
||||
const commitPromise = this.commit(options.commitMessage, changeTree);
|
||||
const metadataPromise = this.retrieveMetadata(contentKey);
|
||||
const [commit, metadata] = await Promise.all([commitPromise, metadataPromise]);
|
||||
const changeTree = await this.updateTree(
|
||||
branchData.commit.sha,
|
||||
files.concat(mediaFilesToRemove),
|
||||
);
|
||||
const commit = await this.commit(options.commitMessage, changeTree);
|
||||
const { title, description } = options.parsedData || {};
|
||||
const metadataFiles = get(metadata.objects, 'files', []);
|
||||
const files = [...metadataFiles, ...filesList];
|
||||
|
||||
const pr = metadata.pr ? { ...metadata.pr, head: commit.sha } : undefined;
|
||||
const objects = {
|
||||
entry: { path: entry.path, sha: entry.sha },
|
||||
files: uniq(files),
|
||||
files: mediaFilesList,
|
||||
};
|
||||
const updatedMetadata = { ...metadata, pr, title, description, objects };
|
||||
|
||||
@ -667,7 +651,7 @@ export default class API {
|
||||
}
|
||||
|
||||
if (pr) {
|
||||
return this.rebasePullRequest(pr.number, branchName, contentKey, metadata, commit);
|
||||
return this.rebasePullRequest(pr.number, branchName, contentKey, updatedMetadata, commit);
|
||||
} else if (this.useOpenAuthoring) {
|
||||
// if a PR hasn't been created yet for the forked repo, just patch the branch
|
||||
await this.patchBranch(branchName, commit.sha, { force: true });
|
||||
@ -692,7 +676,7 @@ export default class API {
|
||||
*/
|
||||
const [baseBranch, commits] = await Promise.all([
|
||||
this.getBranch(),
|
||||
this.getPullRequestCommits(prNumber, head),
|
||||
this.getPullRequestCommits(prNumber),
|
||||
]);
|
||||
|
||||
/**
|
||||
@ -891,12 +875,14 @@ export default class API {
|
||||
);
|
||||
}
|
||||
|
||||
publishUnpublishedEntry(collectionName, slug) {
|
||||
async publishUnpublishedEntry(collectionName, slug) {
|
||||
const contentKey = this.generateContentKey(collectionName, slug);
|
||||
const branchName = this.generateBranchName(contentKey);
|
||||
return this.retrieveMetadata(contentKey)
|
||||
.then(metadata => this.mergePR(metadata.pr, metadata.objects))
|
||||
.then(() => this.deleteBranch(branchName));
|
||||
const metadata = await this.retrieveMetadata(contentKey);
|
||||
await this.mergePR(metadata.pr, metadata.objects);
|
||||
await this.deleteBranch(branchName);
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
createRef(type, name, sha) {
|
||||
@ -1000,7 +986,6 @@ export default class API {
|
||||
|
||||
forceMergePR(pullrequest, objects) {
|
||||
const files = objects.files.concat(objects.entry);
|
||||
const fileTree = this.composeFileTree(files);
|
||||
let commitMessage = 'Automatically generated. Merged on Netlify CMS\n\nForce merge of:';
|
||||
files.forEach(file => {
|
||||
commitMessage += `\n* "${file.path}"`;
|
||||
@ -1010,7 +995,7 @@ export default class API {
|
||||
'line-height: 30px;text-align: center;font-weight: bold',
|
||||
);
|
||||
return this.getBranch()
|
||||
.then(branchData => this.updateTree(branchData.commit.sha, '/', fileTree))
|
||||
.then(branchData => this.updateTree(branchData.commit.sha, files))
|
||||
.then(changeTree => this.commit(commitMessage, changeTree))
|
||||
.then(response => this.patchBranch(this.branch, response.sha));
|
||||
}
|
||||
@ -1062,47 +1047,17 @@ export default class API {
|
||||
);
|
||||
}
|
||||
|
||||
updateTree(sha, path, fileTree) {
|
||||
return this.getTree(sha).then(tree => {
|
||||
let obj;
|
||||
let filename;
|
||||
let fileOrDir;
|
||||
const updates = [];
|
||||
const added = {};
|
||||
async updateTree(sha, files) {
|
||||
const tree = files.map(file => ({
|
||||
path: trimStart(file.path, '/'),
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: file.remove ? null : file.sha,
|
||||
}));
|
||||
|
||||
for (let i = 0, len = tree.tree.length; i < len; i++) {
|
||||
obj = tree.tree[i];
|
||||
if ((fileOrDir = fileTree[obj.path])) {
|
||||
// eslint-disable-line no-cond-assign
|
||||
added[obj.path] = true;
|
||||
if (fileOrDir.file) {
|
||||
updates.push({ path: obj.path, mode: obj.mode, type: obj.type, sha: fileOrDir.sha });
|
||||
} else {
|
||||
updates.push(this.updateTree(obj.sha, obj.path, fileOrDir));
|
||||
}
|
||||
}
|
||||
}
|
||||
for (filename in fileTree) {
|
||||
fileOrDir = fileTree[filename];
|
||||
if (added[filename]) {
|
||||
continue;
|
||||
}
|
||||
updates.push(
|
||||
fileOrDir.file
|
||||
? { path: filename, mode: '100644', type: 'blob', sha: fileOrDir.sha }
|
||||
: this.updateTree(null, filename, fileOrDir),
|
||||
);
|
||||
}
|
||||
return Promise.all(updates)
|
||||
.then(tree => this.createTree(sha, tree))
|
||||
.then(response => ({
|
||||
path,
|
||||
mode: '040000',
|
||||
type: 'tree',
|
||||
sha: response.sha,
|
||||
parentSha: sha,
|
||||
}));
|
||||
});
|
||||
const newTree = await this.createTree(sha, tree);
|
||||
newTree.parentSha = sha;
|
||||
return newTree;
|
||||
}
|
||||
|
||||
createTree(baseSha, tree) {
|
||||
|
@ -1,40 +1,85 @@
|
||||
import { Base64 } from 'js-base64';
|
||||
import API from '../API';
|
||||
|
||||
global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
|
||||
|
||||
describe('github API', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
const mockAPI = (api, responses) => {
|
||||
api.request = (path, options = {}) => {
|
||||
api.request = jest.fn().mockImplementation((path, options = {}) => {
|
||||
const normalizedPath = path.indexOf('?') !== -1 ? path.substr(0, path.indexOf('?')) : path;
|
||||
const response = responses[normalizedPath];
|
||||
return typeof response === 'function'
|
||||
? Promise.resolve(response(options))
|
||||
: Promise.reject(new Error(`No response for path '${normalizedPath}'`));
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
it('should create PR with correct base branch name when publishing with editorial workflow', () => {
|
||||
let prBaseBranch = null;
|
||||
const api = new API({ branch: 'gh-pages', repo: 'my-repo' });
|
||||
const responses = {
|
||||
'/repos/my-repo/branches/gh-pages': () => ({ commit: { sha: 'def' } }),
|
||||
'/repos/my-repo/git/trees/def': () => ({ tree: [] }),
|
||||
'/repos/my-repo/git/trees': () => ({}),
|
||||
'/repos/my-repo/git/commits': () => ({}),
|
||||
'/repos/my-repo/git/refs': () => ({}),
|
||||
'/repos/my-repo/pulls': pullRequest => {
|
||||
prBaseBranch = JSON.parse(pullRequest.body).base;
|
||||
return { head: { sha: 'cbd' } };
|
||||
},
|
||||
'/user': () => ({}),
|
||||
'/repos/my-repo/git/blobs': () => ({}),
|
||||
'/repos/my-repo/git/refs/meta/_netlify_cms': () => ({ object: {} }),
|
||||
};
|
||||
mockAPI(api, responses);
|
||||
describe('editorialWorkflowGit', () => {
|
||||
it('should create PR with correct base branch name when publishing with editorial workflow', () => {
|
||||
let prBaseBranch = null;
|
||||
const api = new API({ branch: 'gh-pages', repo: 'my-repo' });
|
||||
const responses = {
|
||||
'/repos/my-repo/branches/gh-pages': () => ({ commit: { sha: 'def' } }),
|
||||
'/repos/my-repo/git/trees/def': () => ({ tree: [] }),
|
||||
'/repos/my-repo/git/trees': () => ({}),
|
||||
'/repos/my-repo/git/commits': () => ({}),
|
||||
'/repos/my-repo/git/refs': () => ({}),
|
||||
'/repos/my-repo/pulls': pullRequest => {
|
||||
prBaseBranch = JSON.parse(pullRequest.body).base;
|
||||
return { head: { sha: 'cbd' } };
|
||||
},
|
||||
'/user': () => ({}),
|
||||
'/repos/my-repo/git/blobs': () => ({}),
|
||||
'/repos/my-repo/git/refs/meta/_netlify_cms': () => ({ object: {} }),
|
||||
};
|
||||
mockAPI(api, responses);
|
||||
|
||||
return expect(
|
||||
api
|
||||
.editorialWorkflowGit(null, { slug: 'entry', sha: 'abc' }, null, {})
|
||||
.then(() => prBaseBranch),
|
||||
).resolves.toEqual('gh-pages');
|
||||
return expect(
|
||||
api
|
||||
.editorialWorkflowGit([], { slug: 'entry', sha: 'abc' }, null, {})
|
||||
.then(() => prBaseBranch),
|
||||
).resolves.toEqual('gh-pages');
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateTree', () => {
|
||||
it('should create tree with nested paths', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
api.createTree = jest.fn().mockImplementation(() => Promise.resolve({ sha: 'newTreeSha' }));
|
||||
|
||||
const files = [
|
||||
{ path: '/static/media/new-image.jpeg', sha: 'new-image.jpeg', remove: true },
|
||||
{ path: 'content/posts/new-post.md', sha: 'new-post.md' },
|
||||
];
|
||||
|
||||
const baseTreeSha = 'baseTreeSha';
|
||||
|
||||
await expect(api.updateTree(baseTreeSha, files)).resolves.toEqual({
|
||||
sha: 'newTreeSha',
|
||||
parentSha: baseTreeSha,
|
||||
});
|
||||
|
||||
expect(api.createTree).toHaveBeenCalledTimes(1);
|
||||
expect(api.createTree).toHaveBeenCalledWith(baseTreeSha, [
|
||||
{
|
||||
path: 'static/media/new-image.jpeg',
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: null,
|
||||
},
|
||||
{
|
||||
path: 'content/posts/new-post.md',
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: 'new-post.md',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('request', () => {
|
||||
@ -106,4 +151,191 @@ describe('github API', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMediaAsBlob', () => {
|
||||
it('should return response blob on non svg file', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
const blob = {};
|
||||
const response = { blob: jest.fn().mockResolvedValue(blob) };
|
||||
api.fetchBlob = jest.fn().mockResolvedValue(response);
|
||||
|
||||
await expect(api.getMediaAsBlob('sha', 'static/media/image.png')).resolves.toBe(blob);
|
||||
|
||||
expect(api.fetchBlob).toHaveBeenCalledTimes(1);
|
||||
expect(api.fetchBlob).toHaveBeenCalledWith('sha', '/repos/owner/repo');
|
||||
|
||||
expect(response.blob).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return test blob on non file', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
const response = { text: jest.fn().mockResolvedValue('svg') };
|
||||
api.fetchBlob = jest.fn().mockResolvedValue(response);
|
||||
|
||||
await expect(api.getMediaAsBlob('sha', 'static/media/logo.svg')).resolves.toEqual(
|
||||
new Blob(['svg'], { type: 'image/svg+xml' }),
|
||||
);
|
||||
|
||||
expect(api.fetchBlob).toHaveBeenCalledTimes(1);
|
||||
expect(api.fetchBlob).toHaveBeenCalledWith('sha', '/repos/owner/repo');
|
||||
|
||||
expect(response.text).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMediaDisplayURL', () => {
|
||||
it('should return createObjectURL result', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
const blob = {};
|
||||
api.getMediaAsBlob = jest.fn().mockResolvedValue(blob);
|
||||
global.URL.createObjectURL = jest
|
||||
.fn()
|
||||
.mockResolvedValue('blob:http://localhost:8080/blob-id');
|
||||
|
||||
await expect(api.getMediaDisplayURL('sha', 'static/media/image.png')).resolves.toBe(
|
||||
'blob:http://localhost:8080/blob-id',
|
||||
);
|
||||
|
||||
expect(api.getMediaAsBlob).toHaveBeenCalledTimes(1);
|
||||
expect(api.getMediaAsBlob).toHaveBeenCalledWith('sha', 'static/media/image.png');
|
||||
|
||||
expect(global.URL.createObjectURL).toHaveBeenCalledTimes(1);
|
||||
expect(global.URL.createObjectURL).toHaveBeenCalledWith(blob);
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistFiles', () => {
|
||||
it('should update tree, commit and patch branch when useWorkflow is false', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
const responses = {
|
||||
// upload the file
|
||||
'/repos/owner/repo/git/blobs': () => ({ sha: 'new-file-sha' }),
|
||||
|
||||
// get the branch
|
||||
'/repos/owner/repo/branches/master': () => ({ commit: { sha: 'root' } }),
|
||||
|
||||
// create new tree
|
||||
'/repos/owner/repo/git/trees': options => {
|
||||
const data = JSON.parse(options.body);
|
||||
return { sha: data.base_tree };
|
||||
},
|
||||
|
||||
// update the commit with the tree
|
||||
'/repos/owner/repo/git/commits': () => ({ sha: 'commit-sha' }),
|
||||
|
||||
// patch the branch
|
||||
'/repos/owner/repo/git/refs/heads/master': () => ({}),
|
||||
};
|
||||
mockAPI(api, responses);
|
||||
|
||||
const entry = {
|
||||
slug: 'entry',
|
||||
sha: 'abc',
|
||||
path: 'content/posts/new-post.md',
|
||||
raw: 'content',
|
||||
};
|
||||
await api.persistFiles(entry, [], { commitMessage: 'commitMessage' });
|
||||
|
||||
expect(api.request).toHaveBeenCalledTimes(5);
|
||||
|
||||
expect(api.request.mock.calls[0]).toEqual([
|
||||
'/repos/owner/repo/git/blobs',
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ content: Base64.encode(entry.raw), encoding: 'base64' }),
|
||||
},
|
||||
]);
|
||||
|
||||
expect(api.request.mock.calls[1]).toEqual(['/repos/owner/repo/branches/master']);
|
||||
|
||||
expect(api.request.mock.calls[2]).toEqual([
|
||||
'/repos/owner/repo/git/trees',
|
||||
{
|
||||
body: JSON.stringify({
|
||||
base_tree: 'root',
|
||||
tree: [
|
||||
{
|
||||
path: 'content/posts/new-post.md',
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: 'new-file-sha',
|
||||
},
|
||||
],
|
||||
}),
|
||||
method: 'POST',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(api.request.mock.calls[3]).toEqual([
|
||||
'/repos/owner/repo/git/commits',
|
||||
{
|
||||
body: JSON.stringify({
|
||||
message: 'commitMessage',
|
||||
tree: 'root',
|
||||
parents: ['root'],
|
||||
}),
|
||||
method: 'POST',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(api.request.mock.calls[4]).toEqual([
|
||||
'/repos/owner/repo/git/refs/heads/master',
|
||||
{
|
||||
body: JSON.stringify({
|
||||
sha: 'commit-sha',
|
||||
force: false,
|
||||
}),
|
||||
method: 'PATCH',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should call editorialWorkflowGit when useWorkflow is true', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
api.uploadBlob = jest.fn();
|
||||
api.editorialWorkflowGit = jest.fn();
|
||||
|
||||
const entry = {
|
||||
slug: 'entry',
|
||||
sha: 'abc',
|
||||
path: 'content/posts/new-post.md',
|
||||
raw: 'content',
|
||||
};
|
||||
|
||||
const mediaFiles = [
|
||||
{
|
||||
path: '/static/media/image-1.png',
|
||||
uploaded: true,
|
||||
sha: 'image-1.png',
|
||||
},
|
||||
{
|
||||
path: '/static/media/image-2.png',
|
||||
sha: 'image-2.png',
|
||||
},
|
||||
];
|
||||
|
||||
await api.persistFiles(entry, mediaFiles, { useWorkflow: true });
|
||||
|
||||
expect(api.uploadBlob).toHaveBeenCalledTimes(2);
|
||||
expect(api.uploadBlob).toHaveBeenCalledWith(entry);
|
||||
expect(api.uploadBlob).toHaveBeenCalledWith(mediaFiles[1]);
|
||||
|
||||
expect(api.editorialWorkflowGit).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(api.editorialWorkflowGit).toHaveBeenCalledWith(
|
||||
mediaFiles.concat(entry),
|
||||
entry,
|
||||
[
|
||||
{ path: 'static/media/image-1.png', sha: 'image-1.png' },
|
||||
{ path: 'static/media/image-2.png', sha: 'image-2.png' },
|
||||
],
|
||||
{ useWorkflow: true },
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -20,6 +20,13 @@ describe('github backend implementation', () => {
|
||||
}),
|
||||
};
|
||||
|
||||
const createObjectURL = jest.fn();
|
||||
global.URL = {
|
||||
createObjectURL,
|
||||
};
|
||||
|
||||
createObjectURL.mockReturnValue('displayURL');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
@ -72,4 +79,173 @@ describe('github backend implementation', () => {
|
||||
await expect(gitHubImplementation.forkExists({ token: 'token' })).resolves.toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistMedia', () => {
|
||||
const persistFiles = jest.fn();
|
||||
const mockAPI = {
|
||||
persistFiles,
|
||||
};
|
||||
|
||||
persistFiles.mockImplementation((_, files) => {
|
||||
files.forEach((file, index) => {
|
||||
file.sha = index;
|
||||
});
|
||||
});
|
||||
|
||||
it('should persist media file when not draft', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const mediaFile = {
|
||||
value: 'image.png',
|
||||
fileObj: { size: 100 },
|
||||
path: '/media/image.png',
|
||||
};
|
||||
|
||||
expect.assertions(5);
|
||||
await expect(gitHubImplementation.persistMedia(mediaFile)).resolves.toEqual({
|
||||
id: 0,
|
||||
name: 'image.png',
|
||||
size: 100,
|
||||
displayURL: 'displayURL',
|
||||
path: 'media/image.png',
|
||||
draft: undefined,
|
||||
});
|
||||
|
||||
expect(persistFiles).toHaveBeenCalledTimes(1);
|
||||
expect(persistFiles).toHaveBeenCalledWith(null, [mediaFile], {});
|
||||
expect(createObjectURL).toHaveBeenCalledTimes(1);
|
||||
expect(createObjectURL).toHaveBeenCalledWith(mediaFile.fileObj);
|
||||
});
|
||||
|
||||
it('should not persist media file when draft', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
createObjectURL.mockReturnValue('displayURL');
|
||||
|
||||
const mediaFile = {
|
||||
value: 'image.png',
|
||||
fileObj: { size: 100 },
|
||||
path: '/media/image.png',
|
||||
};
|
||||
|
||||
expect.assertions(4);
|
||||
await expect(gitHubImplementation.persistMedia(mediaFile, { draft: true })).resolves.toEqual({
|
||||
id: undefined,
|
||||
name: 'image.png',
|
||||
size: 100,
|
||||
displayURL: 'displayURL',
|
||||
path: 'media/image.png',
|
||||
draft: true,
|
||||
});
|
||||
|
||||
expect(persistFiles).toHaveBeenCalledTimes(0);
|
||||
expect(createObjectURL).toHaveBeenCalledTimes(1);
|
||||
expect(createObjectURL).toHaveBeenCalledWith(mediaFile.fileObj);
|
||||
});
|
||||
|
||||
it('should log and throw error on "persistFiles" error', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const error = new Error('failed to persist files');
|
||||
persistFiles.mockRejectedValue(error);
|
||||
|
||||
const mediaFile = {
|
||||
value: 'image.png',
|
||||
fileObj: { size: 100 },
|
||||
path: '/media/image.png',
|
||||
};
|
||||
|
||||
expect.assertions(5);
|
||||
await expect(gitHubImplementation.persistMedia(mediaFile)).rejects.toThrowError(error);
|
||||
|
||||
expect(persistFiles).toHaveBeenCalledTimes(1);
|
||||
expect(createObjectURL).toHaveBeenCalledTimes(0);
|
||||
expect(console.error).toHaveBeenCalledTimes(1);
|
||||
expect(console.error).toHaveBeenCalledWith(error);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMediaFiles', () => {
|
||||
const getMediaAsBlob = jest.fn();
|
||||
const mockAPI = {
|
||||
getMediaAsBlob,
|
||||
};
|
||||
|
||||
it('should return media files from meta data', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const blob = new Blob(['']);
|
||||
getMediaAsBlob.mockResolvedValue(blob);
|
||||
|
||||
const file = new File([blob], name);
|
||||
|
||||
const data = {
|
||||
metaData: {
|
||||
objects: {
|
||||
files: [{ path: 'static/media/image.png', sha: 'image.png' }],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
await expect(gitHubImplementation.getMediaFiles(data)).resolves.toEqual([
|
||||
{
|
||||
id: 'image.png',
|
||||
sha: 'image.png',
|
||||
displayURL: 'displayURL',
|
||||
path: 'static/media/image.png',
|
||||
name: 'image.png',
|
||||
size: file.size,
|
||||
file,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('unpublishedEntry', () => {
|
||||
const generateContentKey = jest.fn();
|
||||
const readUnpublishedBranchFile = jest.fn();
|
||||
|
||||
const mockAPI = {
|
||||
generateContentKey,
|
||||
readUnpublishedBranchFile,
|
||||
};
|
||||
|
||||
it('should return unpublished entry', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
gitHubImplementation.getMediaFiles = jest.fn().mockResolvedValue([{ path: 'image.png' }]);
|
||||
|
||||
generateContentKey.mockReturnValue('contentKey');
|
||||
|
||||
const data = {
|
||||
fileData: 'fileData',
|
||||
isModification: true,
|
||||
metaData: { objects: { entry: { path: 'entry-path' } } },
|
||||
};
|
||||
readUnpublishedBranchFile.mockResolvedValue(data);
|
||||
|
||||
const collection = { get: jest.fn().mockReturnValue('posts') };
|
||||
await expect(gitHubImplementation.unpublishedEntry(collection, 'slug')).resolves.toEqual({
|
||||
slug: 'slug',
|
||||
file: { path: 'entry-path' },
|
||||
data: 'fileData',
|
||||
metaData: { objects: { entry: { path: 'entry-path' } } },
|
||||
mediaFiles: [{ path: 'image.png' }],
|
||||
isModification: true,
|
||||
});
|
||||
|
||||
expect(generateContentKey).toHaveBeenCalledTimes(1);
|
||||
expect(generateContentKey).toHaveBeenCalledWith('posts', 'slug');
|
||||
|
||||
expect(readUnpublishedBranchFile).toHaveBeenCalledTimes(1);
|
||||
expect(readUnpublishedBranchFile).toHaveBeenCalledWith('contentKey');
|
||||
|
||||
expect(gitHubImplementation.getMediaFiles).toHaveBeenCalledTimes(1);
|
||||
expect(gitHubImplementation.getMediaFiles).toHaveBeenCalledWith(data);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -4,6 +4,7 @@ import semaphore from 'semaphore';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import { asyncLock } from 'netlify-cms-lib-util';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
import { get } from 'lodash';
|
||||
import API from './API';
|
||||
import GraphQLAPI from './GraphQLAPI';
|
||||
|
||||
@ -331,7 +332,9 @@ export default class GitHub {
|
||||
|
||||
async persistMedia(mediaFile, options = {}) {
|
||||
try {
|
||||
await this.api.persistFiles(null, [mediaFile], options);
|
||||
if (!options.draft) {
|
||||
await this.api.persistFiles(null, [mediaFile], options);
|
||||
}
|
||||
|
||||
const { sha, value, path, fileObj } = mediaFile;
|
||||
const displayURL = URL.createObjectURL(fileObj);
|
||||
@ -341,6 +344,7 @@ export default class GitHub {
|
||||
size: fileObj.size,
|
||||
displayURL,
|
||||
path: trimStart(path, '/'),
|
||||
draft: options.draft,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
@ -352,6 +356,29 @@ export default class GitHub {
|
||||
return this.api.deleteFile(path, commitMessage, options);
|
||||
}
|
||||
|
||||
async getMediaFiles(data) {
|
||||
const files = get(data, 'metaData.objects.files', []);
|
||||
const mediaFiles = await Promise.all(
|
||||
files.map(file =>
|
||||
this.api.getMediaAsBlob(file.sha, file.path).then(blob => {
|
||||
const name = file.path.substring(file.path.lastIndexOf('/') + 1);
|
||||
const fileObj = new File([blob], name);
|
||||
return {
|
||||
id: file.sha,
|
||||
sha: file.sha,
|
||||
displayURL: URL.createObjectURL(fileObj),
|
||||
path: file.path,
|
||||
name: name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
};
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
return mediaFiles;
|
||||
}
|
||||
|
||||
unpublishedEntries() {
|
||||
return this.api
|
||||
.listUnpublishedBranches()
|
||||
@ -371,10 +398,9 @@ export default class GitHub {
|
||||
resolve(null);
|
||||
sem.leave();
|
||||
} else {
|
||||
const path = data.metaData.objects.entry.path;
|
||||
resolve({
|
||||
slug,
|
||||
file: { path },
|
||||
file: { path: data.metaData.objects.entry.path },
|
||||
data: data.fileData,
|
||||
metaData: data.metaData,
|
||||
isModification: data.isModification,
|
||||
@ -400,18 +426,21 @@ export default class GitHub {
|
||||
});
|
||||
}
|
||||
|
||||
unpublishedEntry(collection, slug) {
|
||||
async unpublishedEntry(collection, slug) {
|
||||
const contentKey = this.api.generateContentKey(collection.get('name'), slug);
|
||||
return this.api.readUnpublishedBranchFile(contentKey).then(data => {
|
||||
if (!data) return null;
|
||||
return {
|
||||
slug,
|
||||
file: { path: data.metaData.objects.entry.path },
|
||||
data: data.fileData,
|
||||
metaData: data.metaData,
|
||||
isModification: data.isModification,
|
||||
};
|
||||
});
|
||||
const data = await this.api.readUnpublishedBranchFile(contentKey);
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
const mediaFiles = await this.getMediaFiles(data);
|
||||
return {
|
||||
slug,
|
||||
file: { path: data.metaData.objects.entry.path },
|
||||
data: data.fileData,
|
||||
metaData: data.metaData,
|
||||
mediaFiles,
|
||||
isModification: data.isModification,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@ -456,9 +485,10 @@ export default class GitHub {
|
||||
|
||||
publishUnpublishedEntry(collection, slug) {
|
||||
// publishUnpublishedEntry is a transactional operation
|
||||
return this.runWithLock(
|
||||
() => this.api.publishUnpublishedEntry(collection, slug),
|
||||
'Failed to acquire publish entry lock',
|
||||
);
|
||||
return this.runWithLock(async () => {
|
||||
const metaData = await this.api.publishUnpublishedEntry(collection, slug);
|
||||
const mediaFiles = await this.getMediaFiles({ metaData });
|
||||
return { mediaFiles };
|
||||
}, 'Failed to acquire publish entry lock');
|
||||
}
|
||||
}
|
||||
|
@ -123,6 +123,15 @@ export default class TestBackend {
|
||||
return Promise.resolve(window.repoFilesUnpublished);
|
||||
}
|
||||
|
||||
getMediaFiles(entry) {
|
||||
const mediaFiles = entry.mediaFiles.map(file => ({
|
||||
...file,
|
||||
...this.mediaFileToAsset(file),
|
||||
file: file.fileObj,
|
||||
}));
|
||||
return mediaFiles;
|
||||
}
|
||||
|
||||
unpublishedEntry(collection, slug) {
|
||||
const entry = window.repoFilesUnpublished.find(
|
||||
e => e.metaData.collection === collection.get('name') && e.slug === slug,
|
||||
@ -132,6 +141,8 @@ export default class TestBackend {
|
||||
new EditorialWorkflowError('content is not under editorial workflow', true),
|
||||
);
|
||||
}
|
||||
entry.mediaFiles = this.getMediaFiles(entry);
|
||||
|
||||
return Promise.resolve(entry);
|
||||
}
|
||||
|
||||
@ -144,14 +155,17 @@ export default class TestBackend {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
persistEntry({ path, raw, slug }, mediaFiles, options = {}) {
|
||||
async persistEntry({ path, raw, slug }, mediaFiles, options = {}) {
|
||||
if (options.useWorkflow) {
|
||||
const unpubStore = window.repoFilesUnpublished;
|
||||
|
||||
const existingEntryIndex = unpubStore.findIndex(e => e.file.path === path);
|
||||
if (existingEntryIndex >= 0) {
|
||||
const unpubEntry = { ...unpubStore[existingEntryIndex], data: raw };
|
||||
unpubEntry.title = options.parsedData && options.parsedData.title;
|
||||
unpubEntry.description = options.parsedData && options.parsedData.description;
|
||||
unpubEntry.mediaFiles = mediaFiles;
|
||||
|
||||
unpubStore.splice(existingEntryIndex, 1, unpubEntry);
|
||||
} else {
|
||||
const unpubEntry = {
|
||||
@ -166,6 +180,7 @@ export default class TestBackend {
|
||||
description: options.parsedData && options.parsedData.description,
|
||||
},
|
||||
slug,
|
||||
mediaFiles,
|
||||
};
|
||||
unpubStore.push(unpubEntry);
|
||||
}
|
||||
@ -182,6 +197,7 @@ export default class TestBackend {
|
||||
} else {
|
||||
window.repoFiles[folder][fileName].content = raw;
|
||||
}
|
||||
await Promise.all(mediaFiles.map(file => this.persistMedia(file)));
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
@ -194,7 +210,7 @@ export default class TestBackend {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
publishUnpublishedEntry(collection, slug) {
|
||||
async publishUnpublishedEntry(collection, slug) {
|
||||
const unpubStore = window.repoFilesUnpublished;
|
||||
const unpubEntryIndex = unpubStore.findIndex(
|
||||
e => e.metaData.collection === collection && e.slug === slug,
|
||||
@ -202,19 +218,32 @@ export default class TestBackend {
|
||||
const unpubEntry = unpubStore[unpubEntryIndex];
|
||||
const entry = { raw: unpubEntry.data, slug: unpubEntry.slug, path: unpubEntry.file.path };
|
||||
unpubStore.splice(unpubEntryIndex, 1);
|
||||
return this.persistEntry(entry);
|
||||
|
||||
await this.persistEntry(entry, unpubEntry.mediaFiles);
|
||||
return { mediaFiles: this.getMediaFiles(unpubEntry) };
|
||||
}
|
||||
|
||||
getMedia() {
|
||||
return Promise.resolve(this.assets);
|
||||
}
|
||||
|
||||
persistMedia({ fileObj }) {
|
||||
mediaFileToAsset(mediaFile) {
|
||||
const { fileObj } = mediaFile;
|
||||
const { name, size } = fileObj;
|
||||
const objectUrl = attempt(window.URL.createObjectURL, fileObj);
|
||||
const url = isError(objectUrl) ? '' : objectUrl;
|
||||
const normalizedAsset = { id: uuid(), name, size, path: url, url };
|
||||
const normalizedAsset = { id: uuid(), name, size, path: mediaFile.path, url };
|
||||
|
||||
return normalizedAsset;
|
||||
}
|
||||
|
||||
persistMedia(mediaFile, options = {}) {
|
||||
const normalizedAsset = this.mediaFileToAsset(mediaFile);
|
||||
|
||||
if (!options.draft) {
|
||||
this.assets.push(normalizedAsset);
|
||||
}
|
||||
|
||||
this.assets.push(normalizedAsset);
|
||||
return Promise.resolve(normalizedAsset);
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,10 @@
|
||||
import { resolveBackend } from '../backend';
|
||||
import { resolveBackend, Backend } from '../backend';
|
||||
import registry from 'Lib/registry';
|
||||
import { Map, List } from 'immutable';
|
||||
|
||||
jest.mock('Lib/registry');
|
||||
jest.mock('netlify-cms-lib-util');
|
||||
jest.mock('Formats/formats');
|
||||
|
||||
const configWrapper = inputObject => ({
|
||||
get: prop => inputObject[prop],
|
||||
@ -108,4 +111,271 @@ describe('Backend', () => {
|
||||
expect(result.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLocalDraftBackup', () => {
|
||||
const { localForage } = require('netlify-cms-lib-util');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should return empty object on no item', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
const slug = 'slug';
|
||||
|
||||
localForage.getItem.mockReturnValue();
|
||||
|
||||
const result = await backend.getLocalDraftBackup(collection, slug);
|
||||
|
||||
expect(result).toEqual({});
|
||||
expect(localForage.getItem).toHaveBeenCalledTimes(1);
|
||||
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
|
||||
});
|
||||
|
||||
it('should return empty object on item with empty content', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
const slug = 'slug';
|
||||
|
||||
localForage.getItem.mockReturnValue({ raw: '' });
|
||||
|
||||
const result = await backend.getLocalDraftBackup(collection, slug);
|
||||
|
||||
expect(result).toEqual({});
|
||||
expect(localForage.getItem).toHaveBeenCalledTimes(1);
|
||||
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
|
||||
});
|
||||
|
||||
it('should return backup entry, empty media files and assets when only raw property was saved', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
const slug = 'slug';
|
||||
|
||||
localForage.getItem.mockReturnValue({
|
||||
raw: 'content',
|
||||
});
|
||||
|
||||
const result = await backend.getLocalDraftBackup(collection, slug);
|
||||
|
||||
expect(result).toEqual({
|
||||
assets: [],
|
||||
mediaFiles: [],
|
||||
entry: {
|
||||
collection: 'posts',
|
||||
slug: 'slug',
|
||||
path: '',
|
||||
partial: false,
|
||||
raw: 'content',
|
||||
data: {},
|
||||
label: null,
|
||||
metaData: null,
|
||||
isModification: null,
|
||||
},
|
||||
});
|
||||
expect(localForage.getItem).toHaveBeenCalledTimes(1);
|
||||
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
|
||||
});
|
||||
|
||||
it('should return backup entry, media files and assets when all were backed up', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
const slug = 'slug';
|
||||
|
||||
localForage.getItem.mockReturnValue({
|
||||
raw: 'content',
|
||||
mediaFiles: [{ id: '1' }],
|
||||
assets: [{ public_path: 'public_path' }],
|
||||
});
|
||||
|
||||
const result = await backend.getLocalDraftBackup(collection, slug);
|
||||
|
||||
expect(result).toEqual({
|
||||
assets: [{ public_path: 'public_path' }],
|
||||
mediaFiles: [{ id: '1' }],
|
||||
entry: {
|
||||
collection: 'posts',
|
||||
slug: 'slug',
|
||||
path: '',
|
||||
partial: false,
|
||||
raw: 'content',
|
||||
data: {},
|
||||
label: null,
|
||||
metaData: null,
|
||||
isModification: null,
|
||||
},
|
||||
});
|
||||
expect(localForage.getItem).toHaveBeenCalledTimes(1);
|
||||
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistLocalDraftBackup', () => {
|
||||
const { localForage } = require('netlify-cms-lib-util');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should not persist empty entry', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
backend.entryToRaw = jest.fn().mockReturnValue('');
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
|
||||
const slug = 'slug';
|
||||
|
||||
const entry = Map({
|
||||
slug,
|
||||
});
|
||||
|
||||
await backend.persistLocalDraftBackup(entry, collection, List(), List());
|
||||
|
||||
expect(backend.entryToRaw).toHaveBeenCalledTimes(1);
|
||||
expect(backend.entryToRaw).toHaveBeenCalledWith(collection, entry);
|
||||
expect(localForage.setItem).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('should persist non empty entry', async () => {
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
backend.entryToRaw = jest.fn().mockReturnValue('content');
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
|
||||
const slug = 'slug';
|
||||
|
||||
const entry = Map({
|
||||
slug,
|
||||
path: 'content/posts/entry.md',
|
||||
});
|
||||
|
||||
const mediaFiles = List([{ id: '1' }]);
|
||||
const assets = List([{ public_path: 'public_path' }]);
|
||||
|
||||
await backend.persistLocalDraftBackup(entry, collection, mediaFiles, assets);
|
||||
|
||||
expect(backend.entryToRaw).toHaveBeenCalledTimes(1);
|
||||
expect(backend.entryToRaw).toHaveBeenCalledWith(collection, entry);
|
||||
expect(localForage.setItem).toHaveBeenCalledTimes(2);
|
||||
expect(localForage.setItem).toHaveBeenCalledWith('backup.posts.slug', {
|
||||
assets: [{ public_path: 'public_path' }],
|
||||
mediaFiles: [{ id: '1' }],
|
||||
path: 'content/posts/entry.md',
|
||||
raw: 'content',
|
||||
});
|
||||
expect(localForage.setItem).toHaveBeenCalledWith('backup', 'content');
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistMedia', () => {
|
||||
it('should persist media', async () => {
|
||||
const persistMediaResult = {};
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
persistMedia: jest.fn().mockResolvedValue(persistMediaResult),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const user = { login: 'login', name: 'name' };
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
backend.currentUser = jest.fn().mockResolvedValue(user);
|
||||
|
||||
const file = { path: 'static/media/image.png' };
|
||||
|
||||
const result = await backend.persistMedia(config, file, true);
|
||||
expect(result).toBe(persistMediaResult);
|
||||
expect(implementation.persistMedia).toHaveBeenCalledTimes(1);
|
||||
expect(implementation.persistMedia).toHaveBeenCalledWith(
|
||||
{ path: 'static/media/image.png' },
|
||||
{ commitMessage: 'Upload “static/media/image.png”', draft: true },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('unpublishedEntry', () => {
|
||||
it('should return unpublished entry', async () => {
|
||||
const unpublishedEntryResult = {
|
||||
file: { path: 'path' },
|
||||
isModification: true,
|
||||
metaData: {},
|
||||
mediaFiles: [{ id: '1' }],
|
||||
data: 'content',
|
||||
};
|
||||
const implementation = {
|
||||
init: jest.fn(() => implementation),
|
||||
unpublishedEntry: jest.fn().mockResolvedValue(unpublishedEntryResult),
|
||||
};
|
||||
const config = Map({});
|
||||
|
||||
const backend = new Backend(implementation, { config, backendName: 'github' });
|
||||
|
||||
const collection = Map({
|
||||
name: 'posts',
|
||||
});
|
||||
|
||||
const slug = 'slug';
|
||||
|
||||
const result = await backend.unpublishedEntry(collection, slug);
|
||||
expect(result).toEqual({
|
||||
collection: 'draft',
|
||||
slug: '',
|
||||
path: 'path',
|
||||
partial: false,
|
||||
raw: 'content',
|
||||
data: {},
|
||||
label: null,
|
||||
metaData: {},
|
||||
isModification: true,
|
||||
mediaFiles: [{ id: '1' }],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -0,0 +1,230 @@
|
||||
import { BEGIN, COMMIT, REVERT } from 'redux-optimist';
|
||||
import * as actions from '../editorialWorkflow';
|
||||
import { setDraftEntryMediaFiles } from '../entries';
|
||||
import { addAssets } from '../media';
|
||||
import configureMockStore from 'redux-mock-store';
|
||||
import thunk from 'redux-thunk';
|
||||
import { fromJS } from 'immutable';
|
||||
|
||||
jest.mock('coreSrc/backend');
|
||||
jest.mock('Reducers', () => {
|
||||
return {
|
||||
getAsset: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
});
|
||||
jest.mock('ValueObjects/AssetProxy');
|
||||
jest.mock('netlify-cms-lib-util');
|
||||
jest.mock('uuid/v4', () => {
|
||||
return jest.fn().mockReturnValue('000000000000000000000');
|
||||
});
|
||||
jest.mock('redux-notifications', () => {
|
||||
const actual = jest.requireActual('redux-notifications');
|
||||
return {
|
||||
...actual,
|
||||
actions: {
|
||||
notifSend: jest.fn().mockImplementation(payload => ({
|
||||
type: 'NOTIF_SEND',
|
||||
...payload,
|
||||
})),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const middlewares = [thunk];
|
||||
const mockStore = configureMockStore(middlewares);
|
||||
|
||||
describe('editorialWorkflow actions', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('loadUnpublishedEntry', () => {
|
||||
it('should load unpublished entry', () => {
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
const { createAssetProxy } = require('ValueObjects/AssetProxy');
|
||||
|
||||
const assetProxy = { name: 'name', public_path: 'public_path' };
|
||||
const entry = { mediaFiles: [{ file: { name: 'name' }, id: '1' }] };
|
||||
const backend = {
|
||||
unpublishedEntry: jest.fn().mockResolvedValue(entry),
|
||||
};
|
||||
|
||||
const store = mockStore({
|
||||
config: fromJS({}),
|
||||
collections: fromJS({
|
||||
posts: { name: 'posts' },
|
||||
}),
|
||||
mediaLibrary: fromJS({
|
||||
isLoading: false,
|
||||
}),
|
||||
});
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
createAssetProxy.mockResolvedValue(assetProxy);
|
||||
|
||||
const slug = 'slug';
|
||||
const collection = store.getState().collections.get('posts');
|
||||
|
||||
return store.dispatch(actions.loadUnpublishedEntry(collection, slug)).then(() => {
|
||||
const actions = store.getActions();
|
||||
expect(actions).toHaveLength(5);
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_REQUEST',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
slug,
|
||||
},
|
||||
});
|
||||
expect(actions[1]).toEqual(addAssets([assetProxy]));
|
||||
expect(actions[2]).toEqual(
|
||||
setDraftEntryMediaFiles([
|
||||
{
|
||||
file: { name: 'name' },
|
||||
name: 'name',
|
||||
id: '1',
|
||||
draft: true,
|
||||
public_path: 'public_path',
|
||||
},
|
||||
]),
|
||||
);
|
||||
expect(actions[3]).toEqual({
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
payload: {
|
||||
mediaFiles: [{ file: { name: 'name' }, id: '1', draft: true }],
|
||||
},
|
||||
});
|
||||
expect(actions[4]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_SUCCESS',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
entry,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('publishUnpublishedEntry', () => {
|
||||
it('should publish unpublished entry and report success', () => {
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
|
||||
const mediaFiles = [{ file: { name: 'name' }, id: '1' }];
|
||||
const entry = { mediaFiles };
|
||||
const backend = {
|
||||
publishUnpublishedEntry: jest.fn().mockResolvedValue({ mediaFiles }),
|
||||
getEntry: jest.fn().mockResolvedValue(entry),
|
||||
};
|
||||
|
||||
const store = mockStore({
|
||||
config: fromJS({}),
|
||||
mediaLibrary: fromJS({
|
||||
isLoading: false,
|
||||
}),
|
||||
collections: fromJS({
|
||||
posts: { name: 'posts' },
|
||||
}),
|
||||
});
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
|
||||
const slug = 'slug';
|
||||
|
||||
return store.dispatch(actions.publishUnpublishedEntry('posts', slug)).then(() => {
|
||||
const actions = store.getActions();
|
||||
expect(actions).toHaveLength(7);
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_PUBLISH_REQUEST',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
slug,
|
||||
},
|
||||
optimist: { type: BEGIN, id: '000000000000000000000' },
|
||||
});
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'NOTIF_SEND',
|
||||
message: { key: 'ui.toast.entryPublished' },
|
||||
kind: 'success',
|
||||
dismissAfter: 4000,
|
||||
});
|
||||
expect(actions[2]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_PUBLISH_SUCCESS',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
slug,
|
||||
},
|
||||
optimist: { type: COMMIT, id: '000000000000000000000' },
|
||||
});
|
||||
expect(actions[3]).toEqual({
|
||||
type: 'ENTRY_REQUEST',
|
||||
payload: {
|
||||
slug,
|
||||
collection: 'posts',
|
||||
},
|
||||
});
|
||||
expect(actions[4]).toEqual({
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
payload: {
|
||||
mediaFiles: [{ file: { name: 'name' }, id: '1', draft: false }],
|
||||
},
|
||||
});
|
||||
expect(actions[5]).toEqual({
|
||||
type: 'CLEAR_DRAFT_ENTRY_MEDIA_FILES',
|
||||
});
|
||||
expect(actions[6]).toEqual({
|
||||
type: 'ENTRY_SUCCESS',
|
||||
payload: {
|
||||
entry,
|
||||
collection: 'posts',
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should publish unpublished entry and report error', () => {
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
|
||||
const error = new Error('failed to publish entry');
|
||||
const backend = {
|
||||
publishUnpublishedEntry: jest.fn().mockRejectedValue(error),
|
||||
};
|
||||
|
||||
const store = mockStore({
|
||||
config: fromJS({}),
|
||||
collections: fromJS({
|
||||
posts: { name: 'posts' },
|
||||
}),
|
||||
});
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
|
||||
const slug = 'slug';
|
||||
|
||||
return store.dispatch(actions.publishUnpublishedEntry('posts', slug)).then(() => {
|
||||
const actions = store.getActions();
|
||||
expect(actions).toHaveLength(3);
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_PUBLISH_REQUEST',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
slug,
|
||||
},
|
||||
optimist: { type: BEGIN, id: '000000000000000000000' },
|
||||
});
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'NOTIF_SEND',
|
||||
message: { key: 'ui.toast.onFailToPublishEntry', details: error },
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
});
|
||||
expect(actions[2]).toEqual({
|
||||
type: 'UNPUBLISHED_ENTRY_PUBLISH_FAILURE',
|
||||
payload: {
|
||||
collection: 'posts',
|
||||
slug,
|
||||
},
|
||||
optimist: { type: REVERT, id: '000000000000000000000' },
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
@ -1,5 +1,27 @@
|
||||
import { fromJS } from 'immutable';
|
||||
import { createEmptyDraftData } from '../entries';
|
||||
import { fromJS, List, Map } from 'immutable';
|
||||
import {
|
||||
createEmptyDraftData,
|
||||
retrieveLocalBackup,
|
||||
persistLocalBackup,
|
||||
getMediaAssets,
|
||||
discardDraft,
|
||||
loadLocalBackup,
|
||||
} from '../entries';
|
||||
import configureMockStore from 'redux-mock-store';
|
||||
import thunk from 'redux-thunk';
|
||||
|
||||
jest.mock('coreSrc/backend');
|
||||
jest.mock('Reducers', () => {
|
||||
return {
|
||||
getAsset: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
});
|
||||
jest.mock('ValueObjects/AssetProxy');
|
||||
jest.mock('netlify-cms-lib-util');
|
||||
jest.mock('../mediaLibrary.js');
|
||||
|
||||
const middlewares = [thunk];
|
||||
const mockStore = configureMockStore(middlewares);
|
||||
|
||||
describe('entries', () => {
|
||||
describe('createEmptyDraftData', () => {
|
||||
@ -79,4 +101,166 @@ describe('entries', () => {
|
||||
expect(createEmptyDraftData(fields)).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('discardDraft', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should delete media files on discard draft', () => {
|
||||
const { deleteMedia } = require('../mediaLibrary');
|
||||
const mediaFiles = [{ draft: false }, { draft: true }];
|
||||
|
||||
deleteMedia.mockImplementation(file => ({ type: 'DELETE_MEDIA', payload: file }));
|
||||
|
||||
const store = mockStore({
|
||||
config: Map(),
|
||||
entryDraft: Map({
|
||||
mediaFiles: List(mediaFiles),
|
||||
}),
|
||||
});
|
||||
|
||||
store.dispatch(discardDraft());
|
||||
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(2);
|
||||
expect(actions[0]).toEqual({ type: 'DELETE_MEDIA', payload: { draft: true } });
|
||||
expect(actions[1]).toEqual({ type: 'DRAFT_DISCARD' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistLocalBackup', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should persist local backup with media files', () => {
|
||||
const getState = jest.fn();
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
const { getAsset } = require('Reducers');
|
||||
|
||||
const backend = {
|
||||
persistLocalDraftBackup: jest.fn((...args) => args),
|
||||
};
|
||||
|
||||
const state = { config: {} };
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
getAsset.mockImplementation((state, path) => path);
|
||||
getState.mockReturnValue(state);
|
||||
|
||||
const entry = Map();
|
||||
const collection = Map();
|
||||
const mediaFiles = [{ public_path: '/static/media/image.png' }];
|
||||
|
||||
const result = persistLocalBackup(entry, collection, mediaFiles)(null, getState);
|
||||
|
||||
expect(result).toEqual([entry, collection, mediaFiles, ['/static/media/image.png']]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('retrieveLocalBackup', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should retrieve media files with local backup', () => {
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
const { createAssetProxy } = require('ValueObjects/AssetProxy');
|
||||
const { addMediaFilesToLibrary } = require('../mediaLibrary');
|
||||
|
||||
addMediaFilesToLibrary.mockImplementation(mediaFiles => ({
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
payload: { mediaFiles },
|
||||
}));
|
||||
|
||||
const backend = {
|
||||
getLocalDraftBackup: jest.fn((...args) => args),
|
||||
};
|
||||
|
||||
const store = mockStore({
|
||||
config: Map(),
|
||||
});
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
createAssetProxy.mockImplementation((value, fileObj) => ({ value, fileObj }));
|
||||
|
||||
const collection = Map({
|
||||
name: 'collection',
|
||||
});
|
||||
const slug = 'slug';
|
||||
|
||||
const entry = {};
|
||||
const mediaFiles = [{ public_path: '/static/media/image.png' }];
|
||||
const assets = [{ value: 'image.png', fileObj: {} }];
|
||||
|
||||
backend.getLocalDraftBackup.mockReturnValue({ entry, mediaFiles, assets });
|
||||
|
||||
return store.dispatch(retrieveLocalBackup(collection, slug)).then(() => {
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(createAssetProxy).toHaveBeenCalledTimes(1);
|
||||
expect(createAssetProxy).toHaveBeenCalledWith(assets[0].value, assets[0].fileObj);
|
||||
expect(actions).toHaveLength(2);
|
||||
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'ADD_ASSETS',
|
||||
payload: [{ value: 'image.png', fileObj: {} }],
|
||||
});
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'DRAFT_LOCAL_BACKUP_RETRIEVED',
|
||||
payload: { entry, mediaFiles },
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadLocalBackup', () => {
|
||||
it('should add backup media files to media library', () => {
|
||||
const store = mockStore({
|
||||
config: Map(),
|
||||
entryDraft: Map({
|
||||
mediaFiles: List([{ path: 'static/media.image.png' }]),
|
||||
}),
|
||||
mediaLibrary: Map({
|
||||
isLoading: false,
|
||||
}),
|
||||
});
|
||||
|
||||
store.dispatch(loadLocalBackup());
|
||||
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(2);
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'DRAFT_CREATE_FROM_LOCAL_BACKUP',
|
||||
});
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
payload: { mediaFiles: [{ path: 'static/media.image.png', draft: true }] },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMediaAssets', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should map mediaFiles to assets', () => {
|
||||
const { getAsset } = require('Reducers');
|
||||
const state = {};
|
||||
const mediaFiles = [{ public_path: 'public_path' }];
|
||||
|
||||
const asset = { name: 'asset1' };
|
||||
|
||||
getAsset.mockReturnValue(asset);
|
||||
|
||||
expect(getMediaAssets(state, mediaFiles)).toEqual([asset]);
|
||||
|
||||
expect(getAsset).toHaveBeenCalledTimes(1);
|
||||
expect(getAsset).toHaveBeenCalledWith(state, 'public_path');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,11 @@
|
||||
import configureMockStore from 'redux-mock-store';
|
||||
import thunk from 'redux-thunk';
|
||||
import { fromJS } from 'immutable';
|
||||
import { insertMedia } from '../mediaLibrary';
|
||||
import { fromJS, List, Map } from 'immutable';
|
||||
import { insertMedia, persistMedia, deleteMedia, addMediaFilesToLibrary } from '../mediaLibrary';
|
||||
|
||||
jest.mock('coreSrc/backend');
|
||||
jest.mock('ValueObjects/AssetProxy');
|
||||
jest.mock('../waitUntil');
|
||||
|
||||
const middlewares = [thunk];
|
||||
const mockStore = configureMockStore(middlewares);
|
||||
@ -110,4 +114,260 @@ describe('mediaLibrary', () => {
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const { currentBackend } = require('coreSrc/backend');
|
||||
const { createAssetProxy } = require('ValueObjects/AssetProxy');
|
||||
|
||||
const backend = {
|
||||
persistMedia: jest.fn(() => ({ id: 'id' })),
|
||||
deleteMedia: jest.fn(),
|
||||
};
|
||||
|
||||
currentBackend.mockReturnValue(backend);
|
||||
|
||||
describe('persistMedia', () => {
|
||||
global.URL = { createObjectURL: jest.fn().mockReturnValue('displayURL') };
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should persist media as draft in editorial workflow', () => {
|
||||
const store = mockStore({
|
||||
config: Map({
|
||||
publish_mode: 'editorial_workflow',
|
||||
}),
|
||||
integrations: Map(),
|
||||
mediaLibrary: Map({
|
||||
files: List(),
|
||||
}),
|
||||
entryDraft: Map({
|
||||
entry: Map({ isPersisting: false }),
|
||||
}),
|
||||
});
|
||||
|
||||
const file = new File([''], 'name.png');
|
||||
const assetProxy = { public_path: '/media/name.png' };
|
||||
createAssetProxy.mockReturnValue(assetProxy);
|
||||
|
||||
return store.dispatch(persistMedia(file)).then(() => {
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(4);
|
||||
expect(actions[0]).toEqual({ type: 'MEDIA_PERSIST_REQUEST' });
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'ADD_ASSET',
|
||||
payload: { public_path: '/media/name.png' },
|
||||
});
|
||||
expect(actions[2]).toEqual({
|
||||
type: 'ADD_DRAFT_ENTRY_MEDIA_FILE',
|
||||
payload: { draft: true, id: 'id', public_path: '/media/name.png' },
|
||||
});
|
||||
expect(actions[3]).toEqual({
|
||||
type: 'MEDIA_PERSIST_SUCCESS',
|
||||
payload: {
|
||||
file: { draft: true, id: 'id', displayURL: 'displayURL' },
|
||||
},
|
||||
});
|
||||
|
||||
expect(backend.persistMedia).toHaveBeenCalledTimes(1);
|
||||
expect(backend.persistMedia).toHaveBeenCalledWith(
|
||||
store.getState().config,
|
||||
assetProxy,
|
||||
true,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not persist media as draft when not in editorial workflow', () => {
|
||||
const store = mockStore({
|
||||
config: Map({}),
|
||||
integrations: Map(),
|
||||
mediaLibrary: Map({
|
||||
files: List(),
|
||||
}),
|
||||
entryDraft: Map({
|
||||
entry: Map({ isPersisting: false }),
|
||||
}),
|
||||
});
|
||||
|
||||
const file = new File([''], 'name.png');
|
||||
const assetProxy = { public_path: '/media/name.png' };
|
||||
createAssetProxy.mockReturnValue(assetProxy);
|
||||
|
||||
return store.dispatch(persistMedia(file)).then(() => {
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(3);
|
||||
expect(actions[0]).toEqual({ type: 'MEDIA_PERSIST_REQUEST' });
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'ADD_ASSET',
|
||||
payload: { public_path: '/media/name.png' },
|
||||
});
|
||||
expect(actions[2]).toEqual({
|
||||
type: 'MEDIA_PERSIST_SUCCESS',
|
||||
payload: {
|
||||
file: { draft: false, id: 'id', displayURL: 'displayURL' },
|
||||
},
|
||||
});
|
||||
|
||||
expect(backend.persistMedia).toHaveBeenCalledTimes(1);
|
||||
expect(backend.persistMedia).toHaveBeenCalledWith(
|
||||
store.getState().config,
|
||||
assetProxy,
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not persist media as draft when draft is empty', () => {
|
||||
const store = mockStore({
|
||||
config: Map({
|
||||
publish_mode: 'editorial_workflow',
|
||||
}),
|
||||
integrations: Map(),
|
||||
mediaLibrary: Map({
|
||||
files: List(),
|
||||
}),
|
||||
entryDraft: Map({
|
||||
entry: Map(),
|
||||
}),
|
||||
});
|
||||
|
||||
const file = new File([''], 'name.png');
|
||||
const assetProxy = { public_path: '/media/name.png' };
|
||||
createAssetProxy.mockReturnValue(assetProxy);
|
||||
|
||||
return store.dispatch(persistMedia(file)).then(() => {
|
||||
expect(backend.persistMedia).toHaveBeenCalledTimes(1);
|
||||
expect(backend.persistMedia).toHaveBeenCalledWith(
|
||||
store.getState().config,
|
||||
assetProxy,
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteMedia', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should delete non draft file', () => {
|
||||
const store = mockStore({
|
||||
config: Map({
|
||||
publish_mode: 'editorial_workflow',
|
||||
}),
|
||||
integrations: Map(),
|
||||
mediaLibrary: Map({
|
||||
files: List(),
|
||||
}),
|
||||
entryDraft: Map({
|
||||
entry: Map({ isPersisting: false }),
|
||||
}),
|
||||
});
|
||||
|
||||
const file = { name: 'name.png', id: 'id', path: 'static/media/name.png', draft: false };
|
||||
const assetProxy = { public_path: '/media/name.png' };
|
||||
createAssetProxy.mockReturnValue(assetProxy);
|
||||
|
||||
return store.dispatch(deleteMedia(file)).then(() => {
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(4);
|
||||
expect(actions[0]).toEqual({ type: 'MEDIA_DELETE_REQUEST' });
|
||||
expect(actions[1]).toEqual({
|
||||
type: 'REMOVE_ASSET',
|
||||
payload: '/media/name.png',
|
||||
});
|
||||
expect(actions[2]).toEqual({
|
||||
type: 'REMOVE_DRAFT_ENTRY_MEDIA_FILE',
|
||||
payload: { id: 'id' },
|
||||
});
|
||||
expect(actions[3]).toEqual({
|
||||
type: 'MEDIA_DELETE_SUCCESS',
|
||||
payload: { file },
|
||||
});
|
||||
|
||||
expect(backend.deleteMedia).toHaveBeenCalledTimes(1);
|
||||
expect(backend.deleteMedia).toHaveBeenCalledWith(
|
||||
store.getState().config,
|
||||
'static/media/name.png',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not delete a draft file', () => {
|
||||
const store = mockStore({
|
||||
config: Map({
|
||||
publish_mode: 'editorial_workflow',
|
||||
}),
|
||||
integrations: Map(),
|
||||
mediaLibrary: Map({
|
||||
files: List(),
|
||||
}),
|
||||
entryDraft: Map({
|
||||
entry: Map({ isPersisting: false }),
|
||||
}),
|
||||
});
|
||||
|
||||
const file = { name: 'name.png', id: 'id', path: 'static/media/name.png', draft: true };
|
||||
const assetProxy = { public_path: '/media/name.png' };
|
||||
createAssetProxy.mockReturnValue(assetProxy);
|
||||
|
||||
return store.dispatch(deleteMedia(file)).then(() => {
|
||||
expect(backend.deleteMedia).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('addMediaFilesToLibrary', () => {
|
||||
it('should not wait if media library is loaded', () => {
|
||||
const store = mockStore({
|
||||
mediaLibrary: Map({
|
||||
isLoading: false,
|
||||
}),
|
||||
});
|
||||
|
||||
const mediaFiles = [{ id: '1' }];
|
||||
store.dispatch(addMediaFilesToLibrary(mediaFiles));
|
||||
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(1);
|
||||
expect(actions[0]).toEqual({
|
||||
payload: { mediaFiles: [{ id: '1' }] },
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
});
|
||||
});
|
||||
|
||||
it('should wait if media library is not loaded', () => {
|
||||
const { waitUntil } = require('../waitUntil');
|
||||
|
||||
waitUntil.mockImplementation(payload => ({ type: 'WAIT_UNTIL', ...payload }));
|
||||
|
||||
const store = mockStore({
|
||||
mediaLibrary: Map({}),
|
||||
});
|
||||
|
||||
const mediaFiles = [{ id: '1' }];
|
||||
store.dispatch(addMediaFilesToLibrary(mediaFiles));
|
||||
|
||||
const actions = store.getActions();
|
||||
|
||||
expect(actions).toHaveLength(1);
|
||||
expect(actions[0]).toEqual({
|
||||
type: 'WAIT_UNTIL',
|
||||
predicate: expect.any(Function),
|
||||
run: expect.any(Function),
|
||||
});
|
||||
|
||||
expect(actions[0].predicate({ type: 'MEDIA_LOAD_SUCCESS' })).toBe(true);
|
||||
expect(actions[0].run(store.dispatch)).toEqual({
|
||||
payload: { mediaFiles: [{ id: '1' }] },
|
||||
type: 'ADD_MEDIA_FILES_TO_LIBRARY',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -3,11 +3,20 @@ import { actions as notifActions } from 'redux-notifications';
|
||||
import { BEGIN, COMMIT, REVERT } from 'redux-optimist';
|
||||
import { serializeValues } from 'Lib/serializeEntryValues';
|
||||
import { currentBackend } from 'coreSrc/backend';
|
||||
import { getAsset, selectPublishedSlugs, selectUnpublishedSlugs } from 'Reducers';
|
||||
import { selectPublishedSlugs, selectUnpublishedSlugs } from 'Reducers';
|
||||
import { selectFields } from 'Reducers/collections';
|
||||
import { EDITORIAL_WORKFLOW } from 'Constants/publishModes';
|
||||
import { EDITORIAL_WORKFLOW_ERROR } from 'netlify-cms-lib-util';
|
||||
import { loadEntry } from './entries';
|
||||
import {
|
||||
loadEntry,
|
||||
getMediaAssets,
|
||||
setDraftEntryMediaFiles,
|
||||
clearDraftEntryMediaFiles,
|
||||
} from './entries';
|
||||
import { createAssetProxy } from 'ValueObjects/AssetProxy';
|
||||
import { addAssets } from './media';
|
||||
import { addMediaFilesToLibrary } from './mediaLibrary';
|
||||
|
||||
import ValidationErrorTypes from 'Constants/validationErrorTypes';
|
||||
|
||||
const { notifSend } = notifActions;
|
||||
@ -230,30 +239,55 @@ function unpublishedEntryDeleteError(collection, slug, transactionID) {
|
||||
*/
|
||||
|
||||
export function loadUnpublishedEntry(collection, slug) {
|
||||
return (dispatch, getState) => {
|
||||
return async (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const backend = currentBackend(state.config);
|
||||
|
||||
dispatch(unpublishedEntryLoading(collection, slug));
|
||||
backend
|
||||
.unpublishedEntry(collection, slug)
|
||||
.then(entry => dispatch(unpublishedEntryLoaded(collection, entry)))
|
||||
.catch(error => {
|
||||
if (error.name === EDITORIAL_WORKFLOW_ERROR && error.notUnderEditorialWorkflow) {
|
||||
dispatch(unpublishedEntryRedirected(collection, slug));
|
||||
dispatch(loadEntry(collection, slug));
|
||||
} else {
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: {
|
||||
key: 'ui.toast.onFailToLoadEntries',
|
||||
details: error,
|
||||
},
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
const entry = await backend.unpublishedEntry(collection, slug);
|
||||
const mediaFiles = entry.mediaFiles;
|
||||
const assetProxies = await Promise.all(
|
||||
mediaFiles.map(({ file }) => createAssetProxy(file.name, file)),
|
||||
);
|
||||
dispatch(addAssets(assetProxies));
|
||||
dispatch(
|
||||
setDraftEntryMediaFiles(
|
||||
assetProxies.map((asset, index) => ({
|
||||
...asset,
|
||||
...mediaFiles[index],
|
||||
draft: true,
|
||||
})),
|
||||
),
|
||||
);
|
||||
dispatch(
|
||||
addMediaFilesToLibrary(
|
||||
mediaFiles.map(file => ({
|
||||
...file,
|
||||
draft: true,
|
||||
})),
|
||||
),
|
||||
);
|
||||
|
||||
dispatch(unpublishedEntryLoaded(collection, entry));
|
||||
} catch (error) {
|
||||
if (error.name === EDITORIAL_WORKFLOW_ERROR && error.notUnderEditorialWorkflow) {
|
||||
dispatch(unpublishedEntryRedirected(collection, slug));
|
||||
dispatch(loadEntry(collection, slug));
|
||||
} else {
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: {
|
||||
key: 'ui.toast.onFailToLoadEntries',
|
||||
details: error,
|
||||
},
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -314,7 +348,7 @@ export function persistUnpublishedEntry(collection, existingUnpublishedEntry) {
|
||||
|
||||
const backend = currentBackend(state.config);
|
||||
const transactionID = uuid();
|
||||
const assetProxies = entryDraft.get('mediaFiles').map(path => getAsset(state, path));
|
||||
const assetProxies = getMediaAssets(state, entryDraft.get('mediaFiles'));
|
||||
const entry = entryDraft.get('entry');
|
||||
|
||||
/**
|
||||
@ -455,7 +489,7 @@ export function publishUnpublishedEntry(collection, slug) {
|
||||
dispatch(unpublishedEntryPublishRequest(collection, slug, transactionID));
|
||||
return backend
|
||||
.publishUnpublishedEntry(collection, slug)
|
||||
.then(() => {
|
||||
.then(({ mediaFiles }) => {
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: { key: 'ui.toast.entryPublished' },
|
||||
@ -463,8 +497,12 @@ export function publishUnpublishedEntry(collection, slug) {
|
||||
dismissAfter: 4000,
|
||||
}),
|
||||
);
|
||||
|
||||
dispatch(unpublishedEntryPublished(collection, slug, transactionID));
|
||||
dispatch(loadEntry(collections.get(collection), slug));
|
||||
|
||||
dispatch(addMediaFilesToLibrary(mediaFiles.map(file => ({ ...file, draft: false }))));
|
||||
dispatch(clearDraftEntryMediaFiles());
|
||||
})
|
||||
.catch(error => {
|
||||
dispatch(
|
||||
|
@ -9,7 +9,10 @@ import { selectFields } from 'Reducers/collections';
|
||||
import { selectCollectionEntriesCursor } from 'Reducers/cursors';
|
||||
import { Cursor } from 'netlify-cms-lib-util';
|
||||
import { createEntry } from 'ValueObjects/Entry';
|
||||
import { createAssetProxy } from 'ValueObjects/AssetProxy';
|
||||
import ValidationErrorTypes from 'Constants/validationErrorTypes';
|
||||
import { deleteMedia, addMediaFilesToLibrary } from './mediaLibrary';
|
||||
import { addAssets } from './media';
|
||||
|
||||
const { notifSend } = notifActions;
|
||||
|
||||
@ -42,6 +45,11 @@ export const ENTRY_DELETE_REQUEST = 'ENTRY_DELETE_REQUEST';
|
||||
export const ENTRY_DELETE_SUCCESS = 'ENTRY_DELETE_SUCCESS';
|
||||
export const ENTRY_DELETE_FAILURE = 'ENTRY_DELETE_FAILURE';
|
||||
|
||||
export const ADD_DRAFT_ENTRY_MEDIA_FILE = 'ADD_DRAFT_ENTRY_MEDIA_FILE';
|
||||
export const SET_DRAFT_ENTRY_MEDIA_FILES = 'SET_DRAFT_ENTRY_MEDIA_FILES';
|
||||
export const REMOVE_DRAFT_ENTRY_MEDIA_FILE = 'REMOVE_DRAFT_ENTRY_MEDIA_FILE';
|
||||
export const CLEAR_DRAFT_ENTRY_MEDIA_FILES = 'CLEAR_DRAFT_ENTRY_MEDIA_FILES';
|
||||
|
||||
/*
|
||||
* Simple Action Creators (Internal)
|
||||
* We still need to export them for tests
|
||||
@ -185,16 +193,24 @@ export function emptyDraftCreated(entry) {
|
||||
/*
|
||||
* Exported simple Action Creators
|
||||
*/
|
||||
export function createDraftFromEntry(entry, metadata) {
|
||||
export function createDraftFromEntry(entry, metadata, mediaFiles) {
|
||||
return {
|
||||
type: DRAFT_CREATE_FROM_ENTRY,
|
||||
payload: { entry, metadata },
|
||||
payload: { entry, metadata, mediaFiles },
|
||||
};
|
||||
}
|
||||
|
||||
export function discardDraft() {
|
||||
return {
|
||||
type: DRAFT_DISCARD,
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
|
||||
const mediaDrafts = state.entryDraft.get('mediaFiles').filter(file => file.draft);
|
||||
|
||||
mediaDrafts.forEach(file => {
|
||||
dispatch(deleteMedia(file));
|
||||
});
|
||||
|
||||
dispatch({ type: DRAFT_DISCARD });
|
||||
};
|
||||
}
|
||||
|
||||
@ -223,24 +239,55 @@ export function clearFieldErrors() {
|
||||
return { type: DRAFT_CLEAR_ERRORS };
|
||||
}
|
||||
|
||||
export function localBackupRetrieved(entry) {
|
||||
export function localBackupRetrieved(entry, mediaFiles) {
|
||||
return {
|
||||
type: DRAFT_LOCAL_BACKUP_RETRIEVED,
|
||||
payload: { entry },
|
||||
payload: { entry, mediaFiles },
|
||||
};
|
||||
}
|
||||
|
||||
export function loadLocalBackup() {
|
||||
return {
|
||||
type: DRAFT_CREATE_FROM_LOCAL_BACKUP,
|
||||
return (dispatch, getState) => {
|
||||
dispatch({
|
||||
type: DRAFT_CREATE_FROM_LOCAL_BACKUP,
|
||||
});
|
||||
|
||||
// only add media files to the library after loading from backup was approved
|
||||
const state = getState();
|
||||
const mediaFiles = state.entryDraft.get('mediaFiles').toJS();
|
||||
const filesToAdd = mediaFiles.map(file => ({
|
||||
...file,
|
||||
draft: true,
|
||||
}));
|
||||
dispatch(addMediaFilesToLibrary(filesToAdd));
|
||||
};
|
||||
}
|
||||
|
||||
export function persistLocalBackup(entry, collection) {
|
||||
export function addDraftEntryMediaFile(file) {
|
||||
return { type: ADD_DRAFT_ENTRY_MEDIA_FILE, payload: file };
|
||||
}
|
||||
|
||||
export function setDraftEntryMediaFiles(files) {
|
||||
return { type: SET_DRAFT_ENTRY_MEDIA_FILES, payload: files };
|
||||
}
|
||||
|
||||
export function removeDraftEntryMediaFile(file) {
|
||||
return { type: REMOVE_DRAFT_ENTRY_MEDIA_FILE, payload: file };
|
||||
}
|
||||
|
||||
export function clearDraftEntryMediaFiles() {
|
||||
return { type: CLEAR_DRAFT_ENTRY_MEDIA_FILES };
|
||||
}
|
||||
|
||||
export function persistLocalBackup(entry, collection, mediaFiles) {
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const backend = currentBackend(state.config);
|
||||
return backend.persistLocalDraftBackup(entry, collection);
|
||||
|
||||
// persist any pending related media files and assets
|
||||
const assets = getMediaAssets(state, mediaFiles);
|
||||
|
||||
return backend.persistLocalDraftBackup(entry, collection, mediaFiles, assets);
|
||||
};
|
||||
}
|
||||
|
||||
@ -248,9 +295,16 @@ export function retrieveLocalBackup(collection, slug) {
|
||||
return async (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const backend = currentBackend(state.config);
|
||||
const entry = await backend.getLocalDraftBackup(collection, slug);
|
||||
const { entry, mediaFiles, assets } = await backend.getLocalDraftBackup(collection, slug);
|
||||
|
||||
if (entry) {
|
||||
return dispatch(localBackupRetrieved(entry));
|
||||
// load assets from backup
|
||||
const assetProxies = await Promise.all(
|
||||
assets.map(asset => createAssetProxy(asset.value, asset.fileObj)),
|
||||
);
|
||||
dispatch(addAssets(assetProxies));
|
||||
|
||||
return dispatch(localBackupRetrieved(entry, mediaFiles));
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -462,6 +516,10 @@ export function createEmptyDraftData(fields, withNameKey = true) {
|
||||
}, {});
|
||||
}
|
||||
|
||||
export function getMediaAssets(state, mediaFiles) {
|
||||
return mediaFiles.map(file => getAsset(state, file.public_path));
|
||||
}
|
||||
|
||||
export function persistEntry(collection) {
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
@ -491,7 +549,7 @@ export function persistEntry(collection) {
|
||||
}
|
||||
|
||||
const backend = currentBackend(state.config);
|
||||
const assetProxies = entryDraft.get('mediaFiles').map(path => getAsset(state, path));
|
||||
const assetProxies = getMediaAssets(state, entryDraft.get('mediaFiles'));
|
||||
const entry = entryDraft.get('entry');
|
||||
|
||||
/**
|
||||
|
@ -1,6 +1,11 @@
|
||||
export const ADD_ASSETS = 'ADD_ASSETS';
|
||||
export const ADD_ASSET = 'ADD_ASSET';
|
||||
export const REMOVE_ASSET = 'REMOVE_ASSET';
|
||||
|
||||
export function addAssets(assets) {
|
||||
return { type: ADD_ASSETS, payload: assets };
|
||||
}
|
||||
|
||||
export function addAsset(assetProxy) {
|
||||
return { type: ADD_ASSET, payload: assetProxy };
|
||||
}
|
||||
|
@ -2,11 +2,14 @@ import { Map } from 'immutable';
|
||||
import { actions as notifActions } from 'redux-notifications';
|
||||
import { resolveMediaFilename, getBlobSHA } from 'netlify-cms-lib-util';
|
||||
import { currentBackend } from 'coreSrc/backend';
|
||||
import { EDITORIAL_WORKFLOW } from 'Constants/publishModes';
|
||||
import { createAssetProxy } from 'ValueObjects/AssetProxy';
|
||||
import { selectIntegration } from 'Reducers';
|
||||
import { getIntegrationProvider } from 'Integrations';
|
||||
import { addAsset } from './media';
|
||||
import { addAsset, removeAsset } from './media';
|
||||
import { addDraftEntryMediaFile, removeDraftEntryMediaFile } from './entries';
|
||||
import { sanitizeSlug } from 'Lib/urlHelper';
|
||||
import { waitUntil } from './waitUntil';
|
||||
|
||||
const { notifSend } = notifActions;
|
||||
|
||||
@ -27,6 +30,7 @@ export const MEDIA_DELETE_FAILURE = 'MEDIA_DELETE_FAILURE';
|
||||
export const MEDIA_DISPLAY_URL_REQUEST = 'MEDIA_DISPLAY_URL_REQUEST';
|
||||
export const MEDIA_DISPLAY_URL_SUCCESS = 'MEDIA_DISPLAY_URL_SUCCESS';
|
||||
export const MEDIA_DISPLAY_URL_FAILURE = 'MEDIA_DISPLAY_URL_FAILURE';
|
||||
export const ADD_MEDIA_FILES_TO_LIBRARY = 'ADD_MEDIA_FILES_TO_LIBRARY';
|
||||
|
||||
export function createMediaLibrary(instance) {
|
||||
const api = {
|
||||
@ -195,14 +199,41 @@ export function persistMedia(file, opts = {}) {
|
||||
const id = await getBlobSHA(file);
|
||||
const assetProxy = await createAssetProxy(fileName, file, false, privateUpload);
|
||||
dispatch(addAsset(assetProxy));
|
||||
|
||||
const entry = state.entryDraft.get('entry');
|
||||
const useWorkflow = state.config.getIn(['publish_mode']) === EDITORIAL_WORKFLOW;
|
||||
const draft = entry && !entry.isEmpty() && useWorkflow;
|
||||
|
||||
if (!integration) {
|
||||
const asset = await backend.persistMedia(state.config, assetProxy);
|
||||
const asset = await backend.persistMedia(state.config, assetProxy, draft);
|
||||
|
||||
const assetId = asset.id || id;
|
||||
const displayURL = asset.displayURL || URL.createObjectURL(file);
|
||||
return dispatch(mediaPersisted({ id, displayURL, ...asset }));
|
||||
|
||||
if (draft) {
|
||||
dispatch(
|
||||
addDraftEntryMediaFile({
|
||||
...asset,
|
||||
id: assetId,
|
||||
draft,
|
||||
public_path: assetProxy.public_path,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return dispatch(
|
||||
mediaPersisted({
|
||||
...asset,
|
||||
id: assetId,
|
||||
displayURL,
|
||||
draft,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return dispatch(
|
||||
mediaPersisted(
|
||||
{ id, displayURL: URL.createObjectURL(file), ...assetProxy.asset },
|
||||
{ id, displayURL: URL.createObjectURL(file), ...assetProxy.asset, draft },
|
||||
{ privateUpload },
|
||||
),
|
||||
);
|
||||
@ -222,37 +253,18 @@ export function persistMedia(file, opts = {}) {
|
||||
|
||||
export function deleteMedia(file, opts = {}) {
|
||||
const { privateUpload } = opts;
|
||||
return (dispatch, getState) => {
|
||||
return async (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const backend = currentBackend(state.config);
|
||||
const integration = selectIntegration(state, null, 'assetStore');
|
||||
if (integration) {
|
||||
const provider = getIntegrationProvider(state.integrations, backend.getToken, integration);
|
||||
dispatch(mediaDeleting());
|
||||
return provider
|
||||
.delete(file.id)
|
||||
.then(() => {
|
||||
return dispatch(mediaDeleted(file, { privateUpload }));
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error);
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: `Failed to delete media: ${error.message}`,
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
return dispatch(mediaDeleteFailed({ privateUpload }));
|
||||
});
|
||||
}
|
||||
dispatch(mediaDeleting());
|
||||
return backend
|
||||
.deleteMedia(state.config, file.path)
|
||||
.then(() => {
|
||||
return dispatch(mediaDeleted(file));
|
||||
})
|
||||
.catch(error => {
|
||||
|
||||
try {
|
||||
await provider.delete(file.id);
|
||||
return dispatch(mediaDeleted(file, { privateUpload }));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
dispatch(
|
||||
notifSend({
|
||||
@ -261,8 +273,32 @@ export function deleteMedia(file, opts = {}) {
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
return dispatch(mediaDeleteFailed());
|
||||
});
|
||||
return dispatch(mediaDeleteFailed({ privateUpload }));
|
||||
}
|
||||
}
|
||||
dispatch(mediaDeleting());
|
||||
|
||||
try {
|
||||
const assetProxy = await createAssetProxy(file.name, file);
|
||||
dispatch(removeAsset(assetProxy.public_path));
|
||||
dispatch(removeDraftEntryMediaFile({ id: file.id }));
|
||||
|
||||
if (!file.draft) {
|
||||
await backend.deleteMedia(state.config, file.path);
|
||||
}
|
||||
|
||||
return dispatch(mediaDeleted(file));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
dispatch(
|
||||
notifSend({
|
||||
message: `Failed to delete media: ${error.message}`,
|
||||
kind: 'danger',
|
||||
dismissAfter: 8000,
|
||||
}),
|
||||
);
|
||||
return dispatch(mediaDeleteFailed());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -335,6 +371,27 @@ export function mediaPersisted(asset, opts = {}) {
|
||||
};
|
||||
}
|
||||
|
||||
export function addMediaFilesToLibrary(mediaFiles) {
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const action = {
|
||||
type: ADD_MEDIA_FILES_TO_LIBRARY,
|
||||
payload: { mediaFiles },
|
||||
};
|
||||
// add media files to library only after the library finished loading
|
||||
if (state.mediaLibrary.get('isLoading') === false) {
|
||||
dispatch(action);
|
||||
} else {
|
||||
dispatch(
|
||||
waitUntil({
|
||||
predicate: ({ type }) => type === MEDIA_LOAD_SUCCESS,
|
||||
run: dispatch => dispatch(action),
|
||||
}),
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function mediaPersistFailed(error, opts = {}) {
|
||||
const { privateUpload } = opts;
|
||||
return { type: MEDIA_PERSIST_FAILURE, payload: { privateUpload } };
|
||||
|
9
packages/netlify-cms-core/src/actions/waitUntil.js
Normal file
9
packages/netlify-cms-core/src/actions/waitUntil.js
Normal file
@ -0,0 +1,9 @@
|
||||
import { WAIT_UNTIL_ACTION } from '../redux/middleware/waitUntilAction';
|
||||
|
||||
export function waitUntil({ predicate, run }) {
|
||||
return {
|
||||
type: WAIT_UNTIL_ACTION,
|
||||
predicate,
|
||||
run,
|
||||
};
|
||||
}
|
@ -402,22 +402,31 @@ export class Backend {
|
||||
const key = getEntryBackupKey(collection.get('name'), slug);
|
||||
const backup = await localForage.getItem(key);
|
||||
if (!backup || !backup.raw.trim()) {
|
||||
return;
|
||||
return {};
|
||||
}
|
||||
const { raw, path } = backup;
|
||||
const { raw, path, mediaFiles = [], assets = [] } = backup;
|
||||
|
||||
const label = selectFileEntryLabel(collection, slug);
|
||||
return this.entryWithFormat(collection, slug)(
|
||||
const entry = this.entryWithFormat(collection, slug)(
|
||||
createEntry(collection.get('name'), slug, path, { raw, label }),
|
||||
);
|
||||
|
||||
return { entry, mediaFiles, assets };
|
||||
}
|
||||
|
||||
async persistLocalDraftBackup(entry, collection) {
|
||||
async persistLocalDraftBackup(entry, collection, mediaFiles, assets) {
|
||||
const key = getEntryBackupKey(collection.get('name'), entry.get('slug'));
|
||||
const raw = this.entryToRaw(collection, entry);
|
||||
if (!raw.trim()) {
|
||||
return;
|
||||
}
|
||||
await localForage.setItem(key, { raw, path: entry.get('path') });
|
||||
|
||||
await localForage.setItem(key, {
|
||||
raw,
|
||||
path: entry.get('path'),
|
||||
mediaFiles: mediaFiles.toJS(),
|
||||
assets: assets.toJS(),
|
||||
});
|
||||
return localForage.setItem(getEntryBackupKey(), raw);
|
||||
}
|
||||
|
||||
@ -511,6 +520,7 @@ export class Backend {
|
||||
isModification: loadedEntry.isModification,
|
||||
});
|
||||
entry.metaData = loadedEntry.metaData;
|
||||
entry.mediaFiles = loadedEntry.mediaFiles;
|
||||
return entry;
|
||||
})
|
||||
.then(this.entryWithFormat(collection, slug));
|
||||
@ -663,7 +673,7 @@ export class Backend {
|
||||
return this.implementation.persistEntry(entryObj, MediaFiles, opts).then(() => entryObj.slug);
|
||||
}
|
||||
|
||||
async persistMedia(config, file) {
|
||||
async persistMedia(config, file, draft) {
|
||||
const user = await this.currentUser();
|
||||
const options = {
|
||||
commitMessage: commitMessageFormatter(
|
||||
@ -676,6 +686,7 @@ export class Backend {
|
||||
},
|
||||
user.useOpenAuthoring,
|
||||
),
|
||||
draft,
|
||||
};
|
||||
return this.implementation.persistMedia(file, options);
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ const navigateToNewEntry = collectionName => navigateCollection(`${collectionNam
|
||||
const navigateToEntry = (collectionName, slug) =>
|
||||
navigateCollection(`${collectionName}/entries/${slug}`);
|
||||
|
||||
class Editor extends React.Component {
|
||||
export class Editor extends React.Component {
|
||||
static propTypes = {
|
||||
boundGetAsset: PropTypes.func.isRequired,
|
||||
changeDraftField: PropTypes.func.isRequired,
|
||||
@ -79,10 +79,10 @@ class Editor extends React.Component {
|
||||
}),
|
||||
hasChanged: PropTypes.bool,
|
||||
t: PropTypes.func.isRequired,
|
||||
retrieveLocalBackup: PropTypes.func,
|
||||
localBackup: PropTypes.bool,
|
||||
retrieveLocalBackup: PropTypes.func.isRequired,
|
||||
localBackup: ImmutablePropTypes.map,
|
||||
loadLocalBackup: PropTypes.func,
|
||||
persistLocalBackup: PropTypes.func,
|
||||
persistLocalBackup: PropTypes.func.isRequired,
|
||||
deleteLocalBackup: PropTypes.func,
|
||||
};
|
||||
|
||||
@ -190,7 +190,11 @@ class Editor extends React.Component {
|
||||
}
|
||||
|
||||
if (this.props.hasChanged) {
|
||||
this.createBackup(this.props.entryDraft.get('entry'), this.props.collection);
|
||||
this.createBackup(
|
||||
this.props.entryDraft.get('entry'),
|
||||
this.props.collection,
|
||||
this.props.entryDraft.get('mediaFiles'),
|
||||
);
|
||||
}
|
||||
|
||||
if (prevProps.entry === this.props.entry) return;
|
||||
@ -205,7 +209,8 @@ class Editor extends React.Component {
|
||||
const values = deserializeValues(entry.get('data'), fields);
|
||||
const deserializedEntry = entry.set('data', values);
|
||||
const fieldsMetaData = this.props.entryDraft && this.props.entryDraft.get('fieldsMetaData');
|
||||
this.createDraft(deserializedEntry, fieldsMetaData);
|
||||
const mediaFiles = this.props.entryDraft && this.props.entryDraft.get('mediaFiles');
|
||||
this.createDraft(deserializedEntry, fieldsMetaData, mediaFiles);
|
||||
} else if (newEntry) {
|
||||
prevProps.createEmptyDraft(collection);
|
||||
}
|
||||
@ -217,12 +222,12 @@ class Editor extends React.Component {
|
||||
window.removeEventListener('beforeunload', this.exitBlocker);
|
||||
}
|
||||
|
||||
createBackup = debounce(function(entry, collection) {
|
||||
this.props.persistLocalBackup(entry, collection);
|
||||
createBackup = debounce(function(entry, collection, mediaFiles) {
|
||||
this.props.persistLocalBackup(entry, collection, mediaFiles);
|
||||
}, 2000);
|
||||
|
||||
createDraft = (entry, metadata) => {
|
||||
if (entry) this.props.createDraftFromEntry(entry, metadata);
|
||||
createDraft = (entry, metadata, mediaFiles) => {
|
||||
if (entry) this.props.createDraftFromEntry(entry, metadata, mediaFiles);
|
||||
};
|
||||
|
||||
handleChangeStatus = newStatusName => {
|
||||
|
@ -0,0 +1,247 @@
|
||||
import React from 'react';
|
||||
import { Editor } from '../Editor';
|
||||
import { render } from '@testing-library/react';
|
||||
import { fromJS } from 'immutable';
|
||||
|
||||
jest.mock('lodash/debounce', () => {
|
||||
const flush = jest.fn();
|
||||
return func => {
|
||||
func.flush = flush;
|
||||
return func;
|
||||
};
|
||||
});
|
||||
jest.mock('../EditorInterface', () => props => <mock-editor-interface {...props} />);
|
||||
jest.mock('netlify-cms-ui-default', () => {
|
||||
return {
|
||||
// eslint-disable-next-line react/display-name
|
||||
Loader: props => <mock-loader {...props} />,
|
||||
};
|
||||
});
|
||||
jest.mock('Routing/history');
|
||||
|
||||
describe('Editor', () => {
|
||||
const props = {
|
||||
boundGetAsset: jest.fn(),
|
||||
changeDraftField: jest.fn(),
|
||||
changeDraftFieldValidation: jest.fn(),
|
||||
collection: fromJS({ name: 'posts' }),
|
||||
createDraftFromEntry: jest.fn(),
|
||||
createEmptyDraft: jest.fn(),
|
||||
discardDraft: jest.fn(),
|
||||
entry: fromJS({}),
|
||||
entryDraft: fromJS({}),
|
||||
loadEntry: jest.fn(),
|
||||
persistEntry: jest.fn(),
|
||||
deleteEntry: jest.fn(),
|
||||
showDelete: true,
|
||||
fields: fromJS([]),
|
||||
slug: 'slug',
|
||||
newEntry: true,
|
||||
updateUnpublishedEntryStatus: jest.fn(),
|
||||
publishUnpublishedEntry: jest.fn(),
|
||||
deleteUnpublishedEntry: jest.fn(),
|
||||
logoutUser: jest.fn(),
|
||||
loadEntries: jest.fn(),
|
||||
deployPreview: fromJS({}),
|
||||
loadDeployPreview: jest.fn(),
|
||||
user: fromJS({}),
|
||||
t: jest.fn(key => key),
|
||||
localBackup: fromJS({}),
|
||||
retrieveLocalBackup: jest.fn(),
|
||||
persistLocalBackup: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should render loader when entryDraft is null', () => {
|
||||
// suppress prop type error
|
||||
jest.spyOn(console, 'error').mockImplementation(() => {});
|
||||
const { asFragment } = render(<Editor {...props} entryDraft={null} />);
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
expect(console.error).toHaveBeenCalledTimes(1);
|
||||
expect(console.error).toHaveBeenCalledWith(
|
||||
'Warning: Failed prop type: Required prop `entryDraft` was not specified in `Editor`.\n in Editor',
|
||||
);
|
||||
});
|
||||
|
||||
it('should render loader when entryDraft entry is undefined', () => {
|
||||
const { asFragment } = render(<Editor {...props} entryDraft={fromJS({})} />);
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render loader when entry is fetching', () => {
|
||||
const { asFragment } = render(
|
||||
<Editor {...props} entryDraft={fromJS({ entry: {} })} entry={fromJS({ isFetching: true })} />,
|
||||
);
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render editor interface when entry is not fetching', () => {
|
||||
const { asFragment } = render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should call retrieveLocalBackup on mount', () => {
|
||||
render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.retrieveLocalBackup).toHaveBeenCalledTimes(1);
|
||||
expect(props.retrieveLocalBackup).toHaveBeenCalledWith(props.collection, props.slug);
|
||||
});
|
||||
|
||||
it('should create new draft on new entry when mounting', () => {
|
||||
render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
newEntry={true}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.createEmptyDraft).toHaveBeenCalledTimes(1);
|
||||
expect(props.createEmptyDraft).toHaveBeenCalledWith(props.collection);
|
||||
expect(props.loadEntry).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('should load entry on existing entry when mounting', () => {
|
||||
render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
newEntry={false}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.createEmptyDraft).toHaveBeenCalledTimes(0);
|
||||
expect(props.loadEntry).toHaveBeenCalledTimes(1);
|
||||
expect(props.loadEntry).toHaveBeenCalledWith(props.collection, 'slug');
|
||||
});
|
||||
|
||||
it('should load entires when entries are not loaded when mounting', () => {
|
||||
render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
collectionEntriesLoaded={false}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.loadEntries).toHaveBeenCalledTimes(1);
|
||||
expect(props.loadEntries).toHaveBeenCalledWith(props.collection);
|
||||
});
|
||||
|
||||
it('should not load entires when entries are loaded when mounting', () => {
|
||||
render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
collectionEntriesLoaded={true}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.loadEntries).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('should flush debounce createBackup, discard draft and remove exit blocker on umount', () => {
|
||||
window.removeEventListener = jest.fn();
|
||||
const debounce = require('lodash/debounce');
|
||||
|
||||
const flush = debounce({}).flush;
|
||||
const { unmount } = render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' }, hasChanged: true })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
|
||||
jest.clearAllMocks();
|
||||
unmount();
|
||||
|
||||
expect(flush).toHaveBeenCalledTimes(1);
|
||||
expect(props.discardDraft).toHaveBeenCalledTimes(1);
|
||||
expect(window.removeEventListener).toHaveBeenCalledWith('beforeunload', expect.any(Function));
|
||||
|
||||
const callback = window.removeEventListener.mock.calls.find(
|
||||
call => call[0] === 'beforeunload',
|
||||
)[1];
|
||||
|
||||
const event = {};
|
||||
callback(event);
|
||||
expect(event).toEqual({ returnValue: 'editor.editor.onLeavePage' });
|
||||
});
|
||||
|
||||
it('should persist backup when changed', () => {
|
||||
const { rerender } = render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
|
||||
jest.clearAllMocks();
|
||||
rerender(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' }, mediaFiles: [{ id: '1' }] })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
hasChanged={true}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.persistLocalBackup).toHaveBeenCalledTimes(1);
|
||||
expect(props.persistLocalBackup).toHaveBeenCalledWith(
|
||||
fromJS({ slug: 'slug' }),
|
||||
props.collection,
|
||||
fromJS([{ id: '1' }]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create draft from entry when done fetching', () => {
|
||||
const { rerender } = render(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({ entry: { slug: 'slug' } })}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
|
||||
jest.clearAllMocks();
|
||||
rerender(
|
||||
<Editor
|
||||
{...props}
|
||||
entryDraft={fromJS({
|
||||
entry: { slug: 'slug' },
|
||||
mediaFiles: [{ id: '1' }],
|
||||
fieldsMetaData: {},
|
||||
})}
|
||||
entry={fromJS({ isFetching: false })}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(props.createDraftFromEntry).toHaveBeenCalledTimes(1);
|
||||
expect(props.createDraftFromEntry).toHaveBeenCalledWith(
|
||||
fromJS({ isFetching: false, data: {} }),
|
||||
fromJS({}),
|
||||
fromJS([{ id: '1' }]),
|
||||
);
|
||||
});
|
||||
});
|
@ -0,0 +1,45 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Editor should render editor interface when entry is not fetching 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-editor-interface
|
||||
collection="Map { \\"name\\": \\"posts\\" }"
|
||||
deploypreview="Map {}"
|
||||
entry="Map { \\"slug\\": \\"slug\\" }"
|
||||
fields="List []"
|
||||
isnewentry="true"
|
||||
showdelete="true"
|
||||
user="Map {}"
|
||||
/>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Editor should render loader when entry is fetching 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-loader
|
||||
active="true"
|
||||
>
|
||||
editor.editor.loadingEntry
|
||||
</mock-loader>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Editor should render loader when entryDraft entry is undefined 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-loader
|
||||
active="true"
|
||||
>
|
||||
editor.editor.loadingEntry
|
||||
</mock-loader>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`Editor should render loader when entryDraft is null 1`] = `
|
||||
<DocumentFragment>
|
||||
<mock-loader
|
||||
active="true"
|
||||
>
|
||||
editor.editor.loadingEntry
|
||||
</mock-loader>
|
||||
</DocumentFragment>
|
||||
`;
|
@ -118,7 +118,7 @@ class MediaLibrary extends React.Component {
|
||||
toTableData = files => {
|
||||
const tableData =
|
||||
files &&
|
||||
files.map(({ key, name, id, size, queryOrder, url, urlIsPublicPath, displayURL }) => {
|
||||
files.map(({ key, name, id, size, queryOrder, url, urlIsPublicPath, displayURL, draft }) => {
|
||||
const ext = fileExtension(name).toLowerCase();
|
||||
return {
|
||||
key,
|
||||
@ -130,6 +130,7 @@ class MediaLibrary extends React.Component {
|
||||
url,
|
||||
urlIsPublicPath,
|
||||
displayURL,
|
||||
draft,
|
||||
isImage: IMAGE_EXTENSIONS.includes(ext),
|
||||
isViewableImage: IMAGE_EXTENSIONS_VIEWABLE.includes(ext),
|
||||
};
|
||||
|
@ -27,6 +27,7 @@ const CardImageWrapper = styled.div`
|
||||
${effects.checkerboard};
|
||||
${shadows.inset};
|
||||
border-bottom: solid ${lengths.borderWidth} ${colors.textFieldBorder};
|
||||
position: relative;
|
||||
`;
|
||||
|
||||
const CardImage = styled.img`
|
||||
@ -53,6 +54,14 @@ const CardText = styled.p`
|
||||
line-height: 1.3 !important;
|
||||
`;
|
||||
|
||||
const DraftText = styled.p`
|
||||
color: ${colors.mediaDraftText};
|
||||
background-color: ${colors.mediaDraftBackground};
|
||||
position: absolute;
|
||||
padding: 8px;
|
||||
border-radius: ${lengths.borderRadius} 0px ${lengths.borderRadius} 0;
|
||||
`;
|
||||
|
||||
class MediaLibraryCard extends React.Component {
|
||||
render() {
|
||||
const {
|
||||
@ -60,11 +69,13 @@ class MediaLibraryCard extends React.Component {
|
||||
displayURL,
|
||||
text,
|
||||
onClick,
|
||||
draftText,
|
||||
width,
|
||||
margin,
|
||||
isPrivate,
|
||||
type,
|
||||
isViewableImage,
|
||||
isDraft,
|
||||
} = this.props;
|
||||
const url = displayURL.get('url');
|
||||
return (
|
||||
@ -77,7 +88,12 @@ class MediaLibraryCard extends React.Component {
|
||||
isPrivate={isPrivate}
|
||||
>
|
||||
<CardImageWrapper>
|
||||
{url && isViewableImage ? <CardImage src={url} /> : <CardFileIcon>{type}</CardFileIcon>}
|
||||
{isDraft ? <DraftText data-testid="draft-text">{draftText}</DraftText> : null}
|
||||
{url && isViewableImage ? (
|
||||
<CardImage src={url} />
|
||||
) : (
|
||||
<CardFileIcon data-testid="card-file-icon">{type}</CardFileIcon>
|
||||
)}
|
||||
</CardImageWrapper>
|
||||
<CardText>{text}</CardText>
|
||||
</Card>
|
||||
@ -96,12 +112,14 @@ MediaLibraryCard.propTypes = {
|
||||
displayURL: ImmutablePropTypes.map.isRequired,
|
||||
text: PropTypes.string.isRequired,
|
||||
onClick: PropTypes.func.isRequired,
|
||||
draftText: PropTypes.string.isRequired,
|
||||
width: PropTypes.string.isRequired,
|
||||
margin: PropTypes.string.isRequired,
|
||||
isPrivate: PropTypes.bool,
|
||||
type: PropTypes.string,
|
||||
isViewableImage: PropTypes.bool.isRequired,
|
||||
loadDisplayURL: PropTypes.func.isRequired,
|
||||
isDraft: PropTypes.bool,
|
||||
};
|
||||
|
||||
export default MediaLibraryCard;
|
||||
|
@ -32,6 +32,7 @@ const MediaLibraryCardGrid = ({
|
||||
onLoadMore,
|
||||
isPaginating,
|
||||
paginatingMessage,
|
||||
cardDraftText,
|
||||
cardWidth,
|
||||
cardMargin,
|
||||
isPrivate,
|
||||
@ -46,6 +47,8 @@ const MediaLibraryCardGrid = ({
|
||||
isSelected={isSelectedFile(file)}
|
||||
text={file.name}
|
||||
onClick={() => onAssetClick(file)}
|
||||
isDraft={file.draft}
|
||||
draftText={cardDraftText}
|
||||
width={cardWidth}
|
||||
margin={cardMargin}
|
||||
isPrivate={isPrivate}
|
||||
@ -74,6 +77,7 @@ MediaLibraryCardGrid.propTypes = {
|
||||
type: PropTypes.string.isRequired,
|
||||
url: PropTypes.string,
|
||||
urlIsPublicPath: PropTypes.bool,
|
||||
draft: PropTypes.bool,
|
||||
}),
|
||||
).isRequired,
|
||||
isSelectedFile: PropTypes.func.isRequired,
|
||||
@ -82,6 +86,7 @@ MediaLibraryCardGrid.propTypes = {
|
||||
onLoadMore: PropTypes.func.isRequired,
|
||||
isPaginating: PropTypes.bool,
|
||||
paginatingMessage: PropTypes.string,
|
||||
cardDraftText: PropTypes.string.isRequired,
|
||||
cardWidth: PropTypes.string.isRequired,
|
||||
cardMargin: PropTypes.string.isRequired,
|
||||
loadDisplayURL: PropTypes.func.isRequired,
|
||||
|
@ -170,6 +170,7 @@ const MediaLibraryModal = ({
|
||||
onLoadMore={handleLoadMore}
|
||||
isPaginating={isPaginating}
|
||||
paginatingMessage={t('mediaLibrary.mediaLibraryModal.loading')}
|
||||
cardDraftText={t('mediaLibrary.mediaLibraryCard.draft')}
|
||||
cardWidth={cardWidth}
|
||||
cardMargin={cardMargin}
|
||||
isPrivate={privateUpload}
|
||||
|
@ -0,0 +1,47 @@
|
||||
import React from 'react';
|
||||
import { Map } from 'immutable';
|
||||
import MediaLibraryCard from '../MediaLibraryCard';
|
||||
import { render } from '@testing-library/react';
|
||||
|
||||
describe('MediaLibraryCard', () => {
|
||||
const props = {
|
||||
displayURL: Map({ url: 'url' }),
|
||||
text: 'image.png',
|
||||
onClick: jest.fn(),
|
||||
draftText: 'Draft',
|
||||
width: '100px',
|
||||
margin: '10px',
|
||||
isViewableImage: true,
|
||||
loadDisplayURL: jest.fn(),
|
||||
};
|
||||
|
||||
it('should match snapshot for non draft image', () => {
|
||||
const { asFragment, queryByTestId } = render(<MediaLibraryCard {...props} />);
|
||||
|
||||
expect(queryByTestId('draft-text')).toBeNull();
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should match snapshot for draft image', () => {
|
||||
const { asFragment, getByTestId } = render(<MediaLibraryCard {...props} isDraft={true} />);
|
||||
expect(getByTestId('draft-text')).toHaveTextContent('Draft');
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should match snapshot for non viewable image', () => {
|
||||
const { asFragment, getByTestId } = render(
|
||||
<MediaLibraryCard {...props} isViewableImage={false} type="Not Viewable" />,
|
||||
);
|
||||
expect(getByTestId('card-file-icon')).toHaveTextContent('Not Viewable');
|
||||
expect(asFragment()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should call loadDisplayURL on mount when url is empty', () => {
|
||||
const loadDisplayURL = jest.fn();
|
||||
render(
|
||||
<MediaLibraryCard {...props} loadDisplayURL={loadDisplayURL} displayURL={Map({ url: '' })} />,
|
||||
);
|
||||
|
||||
expect(loadDisplayURL).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
@ -0,0 +1,211 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`MediaLibraryCard should match snapshot for draft image 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-8 {
|
||||
width: 100px;
|
||||
height: 240px;
|
||||
margin: 10px;
|
||||
border: solid 2px #dfdfe3;
|
||||
border-radius: 5px;
|
||||
cursor: pointer;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.emotion-8:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
height: 162px;
|
||||
background-color: #f2f2f2;
|
||||
background-size: 16px 16px;
|
||||
background-position: 0 0,8px 8px;
|
||||
background-image: linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 ) , linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 );
|
||||
box-shadow: inset 0 0 4px rgba(68,74,87,0.3);
|
||||
border-bottom: solid 2px #dfdfe3;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
width: 100%;
|
||||
height: 160px;
|
||||
object-fit: contain;
|
||||
border-radius: 2px 2px 0 0;
|
||||
}
|
||||
|
||||
.emotion-6 {
|
||||
color: #798291;
|
||||
padding: 8px;
|
||||
margin-top: 20px;
|
||||
overflow-wrap: break-word;
|
||||
line-height: 1.3 !important;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
color: #70399f;
|
||||
background-color: #f6d8ff;
|
||||
position: absolute;
|
||||
padding: 8px;
|
||||
border-radius: 5px 0px 5px 0;
|
||||
}
|
||||
|
||||
<div
|
||||
class="emotion-8 emotion-9"
|
||||
tabindex="-1"
|
||||
width="100px"
|
||||
>
|
||||
<div
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
<p
|
||||
class="emotion-0 emotion-1"
|
||||
data-testid="draft-text"
|
||||
>
|
||||
Draft
|
||||
</p>
|
||||
<img
|
||||
class="emotion-2 emotion-3"
|
||||
src="url"
|
||||
/>
|
||||
</div>
|
||||
<p
|
||||
class="emotion-6 emotion-7"
|
||||
>
|
||||
image.png
|
||||
</p>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`MediaLibraryCard should match snapshot for non draft image 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-6 {
|
||||
width: 100px;
|
||||
height: 240px;
|
||||
margin: 10px;
|
||||
border: solid 2px #dfdfe3;
|
||||
border-radius: 5px;
|
||||
cursor: pointer;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.emotion-6:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
height: 162px;
|
||||
background-color: #f2f2f2;
|
||||
background-size: 16px 16px;
|
||||
background-position: 0 0,8px 8px;
|
||||
background-image: linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 ) , linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 );
|
||||
box-shadow: inset 0 0 4px rgba(68,74,87,0.3);
|
||||
border-bottom: solid 2px #dfdfe3;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
width: 100%;
|
||||
height: 160px;
|
||||
object-fit: contain;
|
||||
border-radius: 2px 2px 0 0;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
color: #798291;
|
||||
padding: 8px;
|
||||
margin-top: 20px;
|
||||
overflow-wrap: break-word;
|
||||
line-height: 1.3 !important;
|
||||
}
|
||||
|
||||
<div
|
||||
class="emotion-6 emotion-7"
|
||||
tabindex="-1"
|
||||
width="100px"
|
||||
>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<img
|
||||
class="emotion-0 emotion-1"
|
||||
src="url"
|
||||
/>
|
||||
</div>
|
||||
<p
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
image.png
|
||||
</p>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
||||
|
||||
exports[`MediaLibraryCard should match snapshot for non viewable image 1`] = `
|
||||
<DocumentFragment>
|
||||
.emotion-6 {
|
||||
width: 100px;
|
||||
height: 240px;
|
||||
margin: 10px;
|
||||
border: solid 2px #dfdfe3;
|
||||
border-radius: 5px;
|
||||
cursor: pointer;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.emotion-6:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.emotion-2 {
|
||||
height: 162px;
|
||||
background-color: #f2f2f2;
|
||||
background-size: 16px 16px;
|
||||
background-position: 0 0,8px 8px;
|
||||
background-image: linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 ) , linear-gradient( 45deg, #e6e6e6 25%, transparent 25%, transparent 75%, #e6e6e6 75%, #e6e6e6 );
|
||||
box-shadow: inset 0 0 4px rgba(68,74,87,0.3);
|
||||
border-bottom: solid 2px #dfdfe3;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.emotion-4 {
|
||||
color: #798291;
|
||||
padding: 8px;
|
||||
margin-top: 20px;
|
||||
overflow-wrap: break-word;
|
||||
line-height: 1.3 !important;
|
||||
}
|
||||
|
||||
.emotion-0 {
|
||||
width: 100%;
|
||||
height: 160px;
|
||||
object-fit: cover;
|
||||
border-radius: 2px 2px 0 0;
|
||||
padding: 1em;
|
||||
font-size: 3em;
|
||||
}
|
||||
|
||||
<div
|
||||
class="emotion-6 emotion-7"
|
||||
tabindex="-1"
|
||||
width="100px"
|
||||
>
|
||||
<div
|
||||
class="emotion-2 emotion-3"
|
||||
>
|
||||
<div
|
||||
class="emotion-0 emotion-1"
|
||||
data-testid="card-file-icon"
|
||||
>
|
||||
Not Viewable
|
||||
</div>
|
||||
</div>
|
||||
<p
|
||||
class="emotion-4 emotion-5"
|
||||
>
|
||||
image.png
|
||||
</p>
|
||||
</div>
|
||||
</DocumentFragment>
|
||||
`;
|
@ -2,7 +2,7 @@ import { Map, List, fromJS } from 'immutable';
|
||||
import * as actions from 'Actions/entries';
|
||||
import reducer from '../entryDraft';
|
||||
|
||||
let initialState = Map({
|
||||
const initialState = Map({
|
||||
entry: Map(),
|
||||
mediaFiles: List(),
|
||||
fieldsMetaData: Map(),
|
||||
@ -62,6 +62,8 @@ describe('entryDraft reducer', () => {
|
||||
});
|
||||
|
||||
describe('persisting', () => {
|
||||
let initialState;
|
||||
|
||||
beforeEach(() => {
|
||||
initialState = fromJS({
|
||||
entities: {
|
||||
@ -111,4 +113,95 @@ describe('entryDraft reducer', () => {
|
||||
expect(newState.getIn(['entry', 'isPersisting'])).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('REMOVE_DRAFT_ENTRY_MEDIA_FILE', () => {
|
||||
it('should remove a media file', () => {
|
||||
const actualState = reducer(
|
||||
initialState.set('mediaFiles', List([{ id: '1' }, { id: '2' }])),
|
||||
actions.removeDraftEntryMediaFile({ id: '1' }),
|
||||
);
|
||||
|
||||
expect(actualState.toJS()).toEqual({
|
||||
entry: {},
|
||||
mediaFiles: [{ id: '2' }],
|
||||
fieldsMetaData: {},
|
||||
fieldsErrors: {},
|
||||
hasChanged: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('ADD_DRAFT_ENTRY_MEDIA_FILE', () => {
|
||||
it('should overwrite an existing media file', () => {
|
||||
const actualState = reducer(
|
||||
initialState.set('mediaFiles', List([{ id: '1', name: 'old' }])),
|
||||
actions.addDraftEntryMediaFile({ id: '1', name: 'new' }),
|
||||
);
|
||||
|
||||
expect(actualState.toJS()).toEqual({
|
||||
entry: {},
|
||||
mediaFiles: [{ id: '1', name: 'new' }],
|
||||
fieldsMetaData: {},
|
||||
fieldsErrors: {},
|
||||
hasChanged: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('SET_DRAFT_ENTRY_MEDIA_FILES', () => {
|
||||
it('should overwrite an existing media file', () => {
|
||||
const actualState = reducer(
|
||||
initialState,
|
||||
actions.setDraftEntryMediaFiles([{ id: '1' }, { id: '2' }]),
|
||||
);
|
||||
|
||||
expect(actualState.toJS()).toEqual({
|
||||
entry: {},
|
||||
mediaFiles: [{ id: '1' }, { id: '2' }],
|
||||
fieldsMetaData: {},
|
||||
fieldsErrors: {},
|
||||
hasChanged: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DRAFT_CREATE_FROM_LOCAL_BACKUP', () => {
|
||||
it('should create draft from local backup', () => {
|
||||
const localBackup = Map({ entry: fromJS(entry), mediaFiles: List([{ id: '1' }]) });
|
||||
|
||||
const actualState = reducer(initialState.set('localBackup', localBackup), {
|
||||
type: actions.DRAFT_CREATE_FROM_LOCAL_BACKUP,
|
||||
});
|
||||
expect(actualState.toJS()).toEqual({
|
||||
entry: {
|
||||
...entry,
|
||||
newRecord: false,
|
||||
},
|
||||
mediaFiles: [{ id: '1' }],
|
||||
fieldsMetaData: {},
|
||||
fieldsErrors: {},
|
||||
hasChanged: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DRAFT_LOCAL_BACKUP_RETRIEVED', () => {
|
||||
it('should set local backup', () => {
|
||||
const mediaFiles = [{ id: '1' }];
|
||||
|
||||
const actualState = reducer(initialState, actions.localBackupRetrieved(entry, mediaFiles));
|
||||
|
||||
expect(actualState.toJS()).toEqual({
|
||||
entry: {},
|
||||
mediaFiles: [],
|
||||
fieldsMetaData: {},
|
||||
fieldsErrors: {},
|
||||
hasChanged: false,
|
||||
localBackup: {
|
||||
entry,
|
||||
mediaFiles: [{ id: '1' }],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -0,0 +1,67 @@
|
||||
import { Map } from 'immutable';
|
||||
import { ADD_MEDIA_FILES_TO_LIBRARY, mediaDeleted } from 'Actions/mediaLibrary';
|
||||
import mediaLibrary from '../mediaLibrary';
|
||||
|
||||
jest.mock('uuid/v4');
|
||||
|
||||
describe('mediaLibrary', () => {
|
||||
const uuid = require('uuid/v4');
|
||||
|
||||
it('should add media files to library', () => {
|
||||
uuid.mockReturnValue('newKey');
|
||||
|
||||
expect(
|
||||
mediaLibrary(
|
||||
Map({
|
||||
files: [
|
||||
{ sha: 'old', path: 'path', key: 'key1' },
|
||||
{ sha: 'sha', path: 'some-other-pas', key: 'key2' },
|
||||
],
|
||||
}),
|
||||
{
|
||||
type: ADD_MEDIA_FILES_TO_LIBRARY,
|
||||
payload: { mediaFiles: [{ sha: 'new', path: 'path' }] },
|
||||
},
|
||||
),
|
||||
).toEqual(
|
||||
Map({
|
||||
files: [
|
||||
{ sha: 'new', path: 'path', key: 'newKey' },
|
||||
{ sha: 'sha', path: 'some-other-pas', key: 'key2' },
|
||||
],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove media file by key', () => {
|
||||
expect(
|
||||
mediaLibrary(
|
||||
Map({
|
||||
files: [{ key: 'key1' }, { key: 'key2' }],
|
||||
}),
|
||||
mediaDeleted({ key: 'key1' }),
|
||||
),
|
||||
).toEqual(
|
||||
Map({
|
||||
isDeleting: false,
|
||||
files: [{ key: 'key2' }],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove media file by id', () => {
|
||||
expect(
|
||||
mediaLibrary(
|
||||
Map({
|
||||
files: [{ id: 'id1' }, { id: 'id2' }],
|
||||
}),
|
||||
mediaDeleted({ id: 'id1' }),
|
||||
),
|
||||
).toEqual(
|
||||
Map({
|
||||
isDeleting: false,
|
||||
files: [{ id: 'id2' }],
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
import { Map } from 'immutable';
|
||||
import { addAssets, addAsset, removeAsset } from 'Actions/media';
|
||||
import reducer from '../medias';
|
||||
|
||||
jest.mock('ValueObjects/AssetProxy');
|
||||
|
||||
describe('medias', () => {
|
||||
it('should add assets', () => {
|
||||
expect(reducer(Map(), addAssets([{ public_path: 'public_path' }]))).toEqual(
|
||||
Map({ public_path: { public_path: 'public_path' } }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should add asset', () => {
|
||||
expect(reducer(Map(), addAsset({ public_path: 'public_path' }))).toEqual(
|
||||
Map({ public_path: { public_path: 'public_path' } }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove asset', () => {
|
||||
expect(
|
||||
reducer(Map({ public_path: { public_path: 'public_path' } }), removeAsset('public_path')),
|
||||
).toEqual(Map());
|
||||
});
|
||||
});
|
@ -12,13 +12,16 @@ import {
|
||||
ENTRY_PERSIST_SUCCESS,
|
||||
ENTRY_PERSIST_FAILURE,
|
||||
ENTRY_DELETE_SUCCESS,
|
||||
ADD_DRAFT_ENTRY_MEDIA_FILE,
|
||||
SET_DRAFT_ENTRY_MEDIA_FILES,
|
||||
REMOVE_DRAFT_ENTRY_MEDIA_FILE,
|
||||
CLEAR_DRAFT_ENTRY_MEDIA_FILES,
|
||||
} from 'Actions/entries';
|
||||
import {
|
||||
UNPUBLISHED_ENTRY_PERSIST_REQUEST,
|
||||
UNPUBLISHED_ENTRY_PERSIST_SUCCESS,
|
||||
UNPUBLISHED_ENTRY_PERSIST_FAILURE,
|
||||
} from 'Actions/editorialWorkflow';
|
||||
import { ADD_ASSET, REMOVE_ASSET } from 'Actions/media';
|
||||
|
||||
const initialState = Map({
|
||||
entry: Map(),
|
||||
@ -35,7 +38,7 @@ const entryDraftReducer = (state = Map(), action) => {
|
||||
return state.withMutations(state => {
|
||||
state.set('entry', action.payload.entry);
|
||||
state.setIn(['entry', 'newRecord'], false);
|
||||
state.set('mediaFiles', List());
|
||||
state.set('mediaFiles', action.payload.mediaFiles || List());
|
||||
// An existing entry may already have metadata. If we surfed away and back to its
|
||||
// editor page, the metadata will have been fetched already, so we shouldn't
|
||||
// clear it as to not break relation lists.
|
||||
@ -56,19 +59,26 @@ const entryDraftReducer = (state = Map(), action) => {
|
||||
case DRAFT_CREATE_FROM_LOCAL_BACKUP:
|
||||
// Local Backup
|
||||
return state.withMutations(state => {
|
||||
const backupEntry = state.get('localBackup');
|
||||
const backupDraftEntry = state.get('localBackup');
|
||||
const backupEntry = backupDraftEntry.get('entry');
|
||||
state.delete('localBackup');
|
||||
state.set('entry', backupEntry);
|
||||
state.setIn(['entry', 'newRecord'], !backupEntry.get('path'));
|
||||
state.set('mediaFiles', List());
|
||||
state.set('mediaFiles', backupDraftEntry.get('mediaFiles'));
|
||||
state.set('fieldsMetaData', Map());
|
||||
state.set('fieldsErrors', Map());
|
||||
state.set('hasChanged', true);
|
||||
});
|
||||
case DRAFT_DISCARD:
|
||||
return initialState;
|
||||
case DRAFT_LOCAL_BACKUP_RETRIEVED:
|
||||
return state.set('localBackup', fromJS(action.payload.entry));
|
||||
case DRAFT_LOCAL_BACKUP_RETRIEVED: {
|
||||
const { entry, mediaFiles } = action.payload;
|
||||
const newState = new Map({
|
||||
entry: fromJS(entry),
|
||||
mediaFiles: List(mediaFiles),
|
||||
});
|
||||
return state.set('localBackup', newState);
|
||||
}
|
||||
case DRAFT_CHANGE_FIELD:
|
||||
return state.withMutations(state => {
|
||||
state.setIn(['entry', 'data', action.payload.field], action.payload.value);
|
||||
@ -113,14 +123,28 @@ const entryDraftReducer = (state = Map(), action) => {
|
||||
state.set('hasChanged', false);
|
||||
});
|
||||
|
||||
case ADD_ASSET:
|
||||
case ADD_DRAFT_ENTRY_MEDIA_FILE:
|
||||
if (state.has('mediaFiles')) {
|
||||
return state.update('mediaFiles', list => list.push(action.payload.public_path));
|
||||
return state.update('mediaFiles', list =>
|
||||
list.filterNot(file => file.id === action.payload.id).push({ ...action.payload }),
|
||||
);
|
||||
}
|
||||
return state;
|
||||
|
||||
case REMOVE_ASSET:
|
||||
return state.update('mediaFiles', list => list.filterNot(path => path === action.payload));
|
||||
case SET_DRAFT_ENTRY_MEDIA_FILES: {
|
||||
return state.set('mediaFiles', List(action.payload));
|
||||
}
|
||||
|
||||
case REMOVE_DRAFT_ENTRY_MEDIA_FILE:
|
||||
if (state.has('mediaFiles')) {
|
||||
return state.update('mediaFiles', list =>
|
||||
list.filterNot(file => file.id === action.payload.id),
|
||||
);
|
||||
}
|
||||
return state;
|
||||
|
||||
case CLEAR_DRAFT_ENTRY_MEDIA_FILES:
|
||||
return state.set('mediaFiles', List());
|
||||
|
||||
default:
|
||||
return state;
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { Map } from 'immutable';
|
||||
import uuid from 'uuid/v4';
|
||||
import { differenceBy } from 'lodash';
|
||||
import {
|
||||
MEDIA_LIBRARY_OPEN,
|
||||
MEDIA_LIBRARY_CLOSE,
|
||||
@ -18,6 +19,7 @@ import {
|
||||
MEDIA_DISPLAY_URL_REQUEST,
|
||||
MEDIA_DISPLAY_URL_SUCCESS,
|
||||
MEDIA_DISPLAY_URL_FAILURE,
|
||||
ADD_MEDIA_FILES_TO_LIBRARY,
|
||||
} from 'Actions/mediaLibrary';
|
||||
|
||||
const defaultState = {
|
||||
@ -127,6 +129,12 @@ const mediaLibrary = (state = Map(defaultState), action) => {
|
||||
map.set('isPersisting', false);
|
||||
});
|
||||
}
|
||||
case ADD_MEDIA_FILES_TO_LIBRARY: {
|
||||
const { mediaFiles } = action.payload;
|
||||
let updatedFiles = differenceBy(state.get('files'), mediaFiles, 'path');
|
||||
updatedFiles = [...mediaFiles.map(file => ({ ...file, key: uuid() })), ...updatedFiles];
|
||||
return state.set('files', updatedFiles);
|
||||
}
|
||||
case MEDIA_PERSIST_FAILURE: {
|
||||
const privateUploadChanged = state.get('privateUpload') !== action.payload.privateUpload;
|
||||
if (privateUploadChanged) {
|
||||
@ -143,7 +151,9 @@ const mediaLibrary = (state = Map(defaultState), action) => {
|
||||
return state;
|
||||
}
|
||||
return state.withMutations(map => {
|
||||
const updatedFiles = map.get('files').filter(file => file.key !== key);
|
||||
const updatedFiles = map
|
||||
.get('files')
|
||||
.filter(file => (key ? file.key !== key : file.id !== id));
|
||||
map.set('files', updatedFiles);
|
||||
map.deleteIn(['displayURLs', id]);
|
||||
map.set('isDeleting', false);
|
||||
|
@ -1,10 +1,17 @@
|
||||
import { Map } from 'immutable';
|
||||
import { resolvePath } from 'netlify-cms-lib-util';
|
||||
import { ADD_ASSET, REMOVE_ASSET } from 'Actions/media';
|
||||
import { ADD_ASSETS, ADD_ASSET, REMOVE_ASSET } from 'Actions/media';
|
||||
import AssetProxy from 'ValueObjects/AssetProxy';
|
||||
|
||||
const medias = (state = Map(), action) => {
|
||||
switch (action.type) {
|
||||
case ADD_ASSETS: {
|
||||
let newState = state;
|
||||
action.payload.forEach(asset => {
|
||||
newState = newState.set(asset.public_path, asset);
|
||||
});
|
||||
return newState;
|
||||
}
|
||||
case ADD_ASSET:
|
||||
return state.set(action.payload.public_path, action.payload);
|
||||
case REMOVE_ASSET:
|
||||
|
@ -59,7 +59,7 @@ export function createAssetProxy(value, fileObj, uploaded = false, privateUpload
|
||||
() => new AssetProxy(value, fileObj, false),
|
||||
);
|
||||
} else if (privateUpload) {
|
||||
throw new Error('The Private Upload option is only avaible for Asset Store Integration');
|
||||
throw new Error('The Private Upload option is only available for Asset Store Integration');
|
||||
}
|
||||
|
||||
return Promise.resolve(new AssetProxy(value, fileObj, uploaded));
|
||||
|
@ -98,6 +98,9 @@ const de = {
|
||||
},
|
||||
},
|
||||
mediaLibrary: {
|
||||
mediaLibraryCard: {
|
||||
draft: 'Entwurf',
|
||||
},
|
||||
mediaLibrary: {
|
||||
onDelete: 'Soll das ausgewählte Medium wirklich gelöscht werden?',
|
||||
},
|
||||
|
@ -94,6 +94,9 @@ const en = {
|
||||
},
|
||||
},
|
||||
mediaLibrary: {
|
||||
mediaLibraryCard: {
|
||||
draft: 'Draft',
|
||||
},
|
||||
mediaLibrary: {
|
||||
onDelete: 'Are you sure you want to delete selected media?',
|
||||
},
|
||||
|
@ -96,6 +96,9 @@ const fr = {
|
||||
},
|
||||
},
|
||||
mediaLibrary: {
|
||||
mediaLibraryCard: {
|
||||
draft: 'Brouillon',
|
||||
},
|
||||
mediaLibrary: {
|
||||
onDelete: 'Voulez-vous vraiment supprimer la ressource sélectionné ?',
|
||||
},
|
||||
|
@ -79,6 +79,8 @@ const colors = {
|
||||
controlLabel: '#7a8291',
|
||||
checkerboardLight: '#f2f2f2',
|
||||
checkerboardDark: '#e6e6e6',
|
||||
mediaDraftText: colorsRaw.purple,
|
||||
mediaDraftBackground: colorsRaw.purpleLight,
|
||||
};
|
||||
|
||||
const lengths = {
|
||||
|
@ -1,8 +1,6 @@
|
||||
import React from 'react';
|
||||
import { fromJS } from 'immutable';
|
||||
import { render, fireEvent } from 'react-testing-library';
|
||||
import 'react-testing-library/cleanup-after-each';
|
||||
import 'jest-dom/extend-expect';
|
||||
import { render, fireEvent } from '@testing-library/react';
|
||||
import { NetlifyCmsWidgetNumber } from '../';
|
||||
import { validateMinMax } from '../NumberControl';
|
||||
|
||||
|
@ -1,9 +1,7 @@
|
||||
import React from 'react';
|
||||
import { fromJS, Map } from 'immutable';
|
||||
import { last } from 'lodash';
|
||||
import { render, fireEvent, wait } from 'react-testing-library';
|
||||
import 'react-testing-library/cleanup-after-each';
|
||||
import 'jest-dom/extend-expect';
|
||||
import { render, fireEvent, wait } from '@testing-library/react';
|
||||
import { NetlifyCmsWidgetRelation } from '../';
|
||||
|
||||
const RelationControl = NetlifyCmsWidgetRelation.controlComponent;
|
||||
|
@ -1,8 +1,6 @@
|
||||
import React from 'react';
|
||||
import { fromJS } from 'immutable';
|
||||
import { render, fireEvent } from 'react-testing-library';
|
||||
import 'react-testing-library/cleanup-after-each';
|
||||
import 'jest-dom/extend-expect';
|
||||
import { render, fireEvent } from '@testing-library/react';
|
||||
import { NetlifyCmsWidgetSelect } from '../';
|
||||
|
||||
const SelectControl = NetlifyCmsWidgetSelect.controlComponent;
|
||||
|
Reference in New Issue
Block a user