fix build, migrate test backend

This commit is contained in:
Shawn Erquhart
2018-07-17 18:09:59 -04:00
parent 2e7406862e
commit 040dd6859c
26 changed files with 174 additions and 82 deletions

View File

@ -16,7 +16,7 @@
],
"scripts": {
"watch": "cross-env NETLIFY_CMS_VERSION=$npm_package_version parcel example/index.html --no-cache --open",
"build": "cross-env NETLIFY_CMS_VERSION=$npm_package_version parcel build example/index.html --no-cache"
"build": "cross-env NETLIFY_CMS_VERSION=$npm_package_version parcel build example/index.html --no-cache "
},
"keywords": [
"netlify",

View File

@ -6,7 +6,7 @@ import { getIntegrationProvider } from 'Integrations';
import { getAsset, selectIntegration } from 'Reducers';
import { selectFields } from 'Reducers/collections';
import { selectCollectionEntriesCursor } from 'Reducers/cursors';
import Cursor from 'ValueObjects/Cursor';
import Cursor from 'netlify-cms-lib-util/Cursor'
import { createEntry } from 'ValueObjects/Entry';
import ValidationErrorTypes from 'Constants/validationErrorTypes';
import isArray from 'lodash/isArray';

View File

@ -1,6 +1,8 @@
import { attempt, flatten, isError } from 'lodash';
import { fromJS, Map } from 'immutable';
import fuzzy from 'fuzzy';
import GitHubBackend from "netlify-cms-backend-github";
import TestRepoBackend from "netlify-cms-backend-test";
import { resolveFormat } from "Formats/formats";
import { selectIntegration } from 'Reducers/integrations';
import {
@ -15,13 +17,12 @@ import {
} from "Reducers/collections";
import { createEntry } from "ValueObjects/Entry";
import { sanitizeSlug } from "Lib/urlHelper";
import TestRepoBackend from "./test-repo/implementation";
import GitHubBackend from "./github/implementation";
import GitLabBackend from "./gitlab/implementation";
import BitBucketBackend from "./bitbucket/implementation";
import GitGatewayBackend from "./git-gateway/implementation";
import { registerBackend, getBackend } from 'Lib/registry';
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from '../valueObjects/Cursor';
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from 'netlify-cms-lib-util/Cursor'
import { EDITORIAL_WORKFLOW, status } from 'Constants/publishModes';
/**
* Register internal backends
@ -395,7 +396,7 @@ class Backend {
const commitMessage = commitMessageFormatter(newEntry ? 'create' : 'update', config, { collection, slug: entryObj.slug, path: entryObj.path });
const mode = config.get("publish_mode");
const useWorkflow = config.get("publish_mode") === EDITORIAL_WORKFLOW;
const collectionName = collection.get("name");
@ -404,7 +405,15 @@ class Backend {
*/
const hasAssetStore = integrations && !!selectIntegration(integrations, null, 'assetStore');
const updatedOptions = { ...options, hasAssetStore };
const opts = { newEntry, parsedData, commitMessage, collectionName, mode, ...updatedOptions };
const opts = {
newEntry,
parsedData,
commitMessage,
collectionName,
useWorkflow,
initialStatus: status.first(),
...updatedOptions
};
return this.implementation.persistEntry(entryObj, MediaFiles, opts)
.then(() => entryObj.slug);

View File

@ -1,4 +1,4 @@
import GithubAPI from "Backends/github/API";
import GithubAPI from "netlify-cms-backend-github/API";
import APIError from "netlify-cms-lib-util/APIError";
export default class API extends GithubAPI {

View File

@ -1,4 +1,4 @@
import GithubAPI from "Backends/github/API";
import GithubAPI from "netlify-cms-backend-github/API";
import APIError from "netlify-cms-lib-util/APIError";
export default class API extends GithubAPI {

View File

@ -2,8 +2,8 @@ import GoTrue from "gotrue-js";
import jwtDecode from 'jwt-decode';
import {List} from 'immutable';
import { get, pick, intersection } from "lodash";
import unsentRequest from "Lib/unsentRequest";
import GitHubBackend from "Backends/github/implementation";
import { unsentRequest } from "netlify-cms-lib-util";
import GitHubBackend from "netlify-cms-backend-github";
import GitLabBackend from "Backends/gitlab/implementation";
import BitBucketBackend from "Backends/bitbucket/implementation";
import GitHubAPI from "./GitHubAPI";

View File

@ -1,798 +0,0 @@
import localForage from "netlify-cms-lib-util/localForage";
import { Base64 } from "js-base64";
import { uniq, initial, last, get, find, hasIn, partial } from "lodash";
import { filterPromises, resolvePromiseProperties } from "netlify-cms-lib-util/promise";
import AssetProxy from "ValueObjects/AssetProxy";
import { SIMPLE, EDITORIAL_WORKFLOW, status } from "Constants/publishModes";
import APIError from "netlify-cms-lib-util/APIError";
import EditorialWorkflowError from "netlify-cms-lib-util/EditorialWorkflowError";
const CMS_BRANCH_PREFIX = 'cms/';
export default class API {
constructor(config) {
this.api_root = config.api_root || "https://api.github.com";
this.token = config.token || false;
this.branch = config.branch || "master";
this.repo = config.repo || "";
this.repoURL = `/repos/${ this.repo }`;
this.merge_method = config.squash_merges ? "squash" : "merge";
}
user() {
return this.request("/user");
}
hasWriteAccess() {
return this.request(this.repoURL)
.then(repo => repo.permissions.push)
.catch(error => {
console.error("Problem fetching repo data from GitHub");
throw error;
});
}
requestHeaders(headers = {}) {
const baseHeader = {
"Content-Type": "application/json",
...headers,
};
if (this.token) {
baseHeader.Authorization = `token ${ this.token }`;
return baseHeader;
}
return baseHeader;
}
parseJsonResponse(response) {
return response.json().then((json) => {
if (!response.ok) {
return Promise.reject(json);
}
return json;
});
}
urlFor(path, options) {
const cacheBuster = new Date().getTime();
const params = [`ts=${cacheBuster}`];
if (options.params) {
for (const key in options.params) {
params.push(`${ key }=${ encodeURIComponent(options.params[key]) }`);
}
}
if (params.length) {
path += `?${ params.join("&") }`;
}
return this.api_root + path;
}
request(path, options = {}) {
const headers = this.requestHeaders(options.headers || {});
const url = this.urlFor(path, options);
let responseStatus;
return fetch(url, { ...options, headers }).then((response) => {
responseStatus = response.status;
const contentType = response.headers.get("Content-Type");
if (contentType && contentType.match(/json/)) {
return this.parseJsonResponse(response);
}
const text = response.text();
if (!response.ok) {
return Promise.reject(text);
}
return text;
})
.catch((error) => {
throw new APIError(error.message, responseStatus, 'GitHub');
});
}
generateBranchName(basename) {
return `${CMS_BRANCH_PREFIX}${basename}`;
}
checkMetadataRef() {
return this.request(`${ this.repoURL }/git/refs/meta/_netlify_cms?${ Date.now() }`, {
cache: "no-store",
})
.then(response => response.object)
.catch((error) => {
// Meta ref doesn't exist
const readme = {
raw: "# Netlify CMS\n\nThis tree is used by the Netlify CMS to store metadata information for specific files and branches.",
};
return this.uploadBlob(readme)
.then(item => this.request(`${ this.repoURL }/git/trees`, {
method: "POST",
body: JSON.stringify({ tree: [{ path: "README.md", mode: "100644", type: "blob", sha: item.sha }] }),
}))
.then(tree => this.commit("First Commit", tree))
.then(response => this.createRef("meta", "_netlify_cms", response.sha))
.then(response => response.object);
});
}
storeMetadata(key, data) {
return this.checkMetadataRef()
.then((branchData) => {
const fileTree = {
[`${ key }.json`]: {
path: `${ key }.json`,
raw: JSON.stringify(data),
file: true,
},
};
return this.uploadBlob(fileTree[`${ key }.json`])
.then(item => this.updateTree(branchData.sha, "/", fileTree))
.then(changeTree => this.commit(`Updating “${ key }” metadata`, changeTree))
.then(response => this.patchRef("meta", "_netlify_cms", response.sha))
.then(() => {
localForage.setItem(`gh.meta.${ key }`, {
expires: Date.now() + 300000, // In 5 minutes
data,
});
});
});
}
retrieveMetadata(key) {
const cache = localForage.getItem(`gh.meta.${ key }`);
return cache.then((cached) => {
if (cached && cached.expires > Date.now()) { return cached.data; }
console.log("%c Checking for MetaData files", "line-height: 30px;text-align: center;font-weight: bold"); // eslint-disable-line
return this.request(`${ this.repoURL }/contents/${ key }.json`, {
params: { ref: "refs/meta/_netlify_cms" },
headers: { Accept: "application/vnd.github.VERSION.raw" },
cache: "no-store",
})
.then(response => JSON.parse(response))
.catch(error => console.log("%c %s does not have metadata", "line-height: 30px;text-align: center;font-weight: bold", key)); // eslint-disable-line
});
}
readFile(path, sha, branch = this.branch) {
if (sha) {
return this.getBlob(sha);
} else {
return this.request(`${ this.repoURL }/contents/${ path }`, {
headers: { Accept: "application/vnd.github.VERSION.raw" },
params: { ref: branch },
cache: "no-store",
}).catch(error => {
if (hasIn(error, 'message.errors') && find(error.message.errors, { code: "too_large" })) {
const dir = path.split('/').slice(0, -1).join('/');
return this.listFiles(dir)
.then(files => files.find(file => file.path === path))
.then(file => this.getBlob(file.sha));
}
throw error;
});
}
}
getBlob(sha) {
return localForage.getItem(`gh.${sha}`).then(cached => {
if (cached) { return cached; }
return this.request(`${this.repoURL}/git/blobs/${sha}`, {
headers: { Accept: "application/vnd.github.VERSION.raw" },
}).then(result => {
localForage.setItem(`gh.${sha}`, result);
return result;
});
});
}
listFiles(path) {
return this.request(`${ this.repoURL }/contents/${ path.replace(/\/$/, '') }`, {
params: { ref: this.branch },
})
.then(files => {
if (!Array.isArray(files)) {
throw new Error(`Cannot list files, path ${path} is not a directory but a ${files.type}`);
}
return files;
})
.then(files => files.filter(file => file.type === "file"));
}
readUnpublishedBranchFile(contentKey) {
const metaDataPromise = this.retrieveMetadata(contentKey)
.then(data => (data.objects.entry.path ? data : Promise.reject(null)));
return resolvePromiseProperties({
metaData: metaDataPromise,
fileData: metaDataPromise.then(
data => this.readFile(data.objects.entry.path, null, data.branch)),
isModification: metaDataPromise.then(
data => this.isUnpublishedEntryModification(data.objects.entry.path, this.branch)),
})
.catch(() => {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
});
}
isUnpublishedEntryModification(path, branch) {
return this.readFile(path, null, branch)
.then(data => true)
.catch((err) => {
if (err.message && err.message === "Not Found") {
return false;
}
throw err;
});
}
listUnpublishedBranches() {
console.log("%c Checking for Unpublished entries", "line-height: 30px;text-align: center;font-weight: bold"); // eslint-disable-line
return this.request(`${ this.repoURL }/git/refs/heads/cms`)
.then(branches => filterPromises(branches, (branch) => {
const branchName = branch.ref.substring("/refs/heads/".length - 1);
// Get PRs with a `head` of `branchName`. Note that this is a
// substring match, so we need to check that the `head.ref` of
// at least one of the returned objects matches `branchName`.
return this.request(`${ this.repoURL }/pulls`, {
params: {
head: branchName,
state: 'open',
base: this.branch,
},
})
.then(prs => prs.some(pr => pr.head.ref === branchName));
}))
.catch((error) => {
console.log("%c No Unpublished entries", "line-height: 30px;text-align: center;font-weight: bold"); // eslint-disable-line
throw error;
});
}
composeFileTree(files) {
let filename;
let part;
let parts;
let subtree;
const fileTree = {};
files.forEach((file) => {
if (file.uploaded) { return; }
parts = file.path.split("/").filter(part => part);
filename = parts.pop();
subtree = fileTree;
while (part = parts.shift()) {
subtree[part] = subtree[part] || {};
subtree = subtree[part];
}
subtree[filename] = file;
file.file = true;
});
return fileTree;
}
persistFiles(entry, mediaFiles, options) {
const uploadPromises = [];
const files = entry ? mediaFiles.concat(entry) : mediaFiles;
files.forEach((file) => {
if (file.uploaded) { return; }
uploadPromises.push(this.uploadBlob(file));
});
const fileTree = this.composeFileTree(files);
return Promise.all(uploadPromises).then(() => {
if (!options.mode || (options.mode && options.mode === SIMPLE)) {
return this.getBranch()
.then(branchData => this.updateTree(branchData.commit.sha, "/", fileTree))
.then(changeTree => this.commit(options.commitMessage, changeTree))
.then(response => this.patchBranch(this.branch, response.sha));
} else if (options.mode && options.mode === EDITORIAL_WORKFLOW) {
const mediaFilesList = mediaFiles.map(file => ({ path: file.path, sha: file.sha }));
return this.editorialWorkflowGit(fileTree, entry, mediaFilesList, options);
}
});
}
deleteFile(path, message, options={}) {
const branch = options.branch || this.branch;
const pathArray = path.split('/');
const filename = last(pathArray);
const directory = initial(pathArray).join('/');
const fileDataPath = encodeURIComponent(directory);
const fileDataURL = `${this.repoURL}/git/trees/${branch}:${fileDataPath}`;
const fileURL = `${ this.repoURL }/contents/${ path }`;
/**
* We need to request the tree first to get the SHA. We use extended SHA-1
* syntax (<rev>:<path>) to get a blob from a tree without having to recurse
* through the tree.
*/
return this.request(fileDataURL, { cache: 'no-store' })
.then(resp => {
const { sha } = resp.tree.find(file => file.path === filename);
const opts = { method: 'DELETE', params: { sha, message, branch } };
return this.request(fileURL, opts);
});
}
editorialWorkflowGit(fileTree, entry, filesList, options) {
const contentKey = entry.slug;
const branchName = this.generateBranchName(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
// Open new editorial review workflow for this entry - Create new metadata and commit to new branch`
let prResponse;
return this.getBranch()
.then(branchData => this.updateTree(branchData.commit.sha, "/", fileTree))
.then(changeTree => this.commit(options.commitMessage, changeTree))
.then(commitResponse => this.createBranch(branchName, commitResponse.sha))
.then(branchResponse => this.createPR(options.commitMessage, branchName))
.then(pr => {
prResponse = pr;
return this.user();
})
.then(user => {
return this.storeMetadata(contentKey, {
type: "PR",
pr: {
number: prResponse.number,
head: prResponse.head && prResponse.head.sha,
},
user: user.name || user.login,
status: status.first(),
branch: branchName,
collection: options.collectionName,
title: options.parsedData && options.parsedData.title,
description: options.parsedData && options.parsedData.description,
objects: {
entry: {
path: entry.path,
sha: entry.sha,
},
files: filesList,
},
timeStamp: new Date().toISOString(),
});
});
} else {
// Entry is already on editorial review workflow - just update metadata and commit to existing branch
let newHead;
return this.getBranch(branchName)
.then(branchData => this.updateTree(branchData.commit.sha, "/", fileTree))
.then(changeTree => this.commit(options.commitMessage, changeTree))
.then(commit => {
newHead = commit;
return this.retrieveMetadata(contentKey);
})
.then(metadata => {
const { title, description } = options.parsedData || {};
const metadataFiles = get(metadata.objects, 'files', []);
const files = [ ...metadataFiles, ...filesList ];
const pr = { ...metadata.pr, head: newHead.sha };
const objects = {
entry: { path: entry.path, sha: entry.sha },
files: uniq(files),
};
const updatedMetadata = { ...metadata, pr, title, description, objects };
/**
* If an asset store is in use, assets are always accessible, so we
* can just finish the persist operation here.
*/
if (options.hasAssetStore) {
return this.storeMetadata(contentKey, updatedMetadata)
.then(() => this.patchBranch(branchName, newHead.sha));
}
/**
* If no asset store is in use, assets are being stored in the content
* repo, which means pull requests opened for editorial workflow
* entries must be rebased if assets have been added or removed.
*/
return this.rebasePullRequest(pr.number, branchName, contentKey, metadata, newHead);
});
}
}
/**
* Rebase a pull request onto the latest HEAD of it's target base branch
* (should generally be the configured backend branch). Only rebases changes
* in the entry file.
*/
async rebasePullRequest(prNumber, branchName, contentKey, metadata, head) {
const { path } = metadata.objects.entry;
try {
/**
* Get the published branch and create new commits over it. If the pull
* request is up to date, no rebase will occur.
*/
const baseBranch = await this.getBranch();
const commits = await this.getPullRequestCommits(prNumber, head);
/**
* Sometimes the list of commits for a pull request isn't updated
* immediately after the PR branch is patched. There's also the possibility
* that the branch has changed unexpectedly. We account for both by adding
* the head if it's missing, or else throwing an error if the PR head is
* neither the head we expect nor its parent.
*/
const finalCommits = this.assertHead(commits, head);
const rebasedHead = await this.rebaseSingleBlobCommits(baseBranch.commit, finalCommits, path);
/**
* Update metadata, then force update the pull request branch head.
*/
const pr = { ...metadata.pr, head: rebasedHead.sha };
const timeStamp = new Date().toISOString();
const updatedMetadata = { ...metadata, pr, timeStamp };
await this.storeMetadata(contentKey, updatedMetadata);
return this.patchBranch(branchName, rebasedHead.sha, { force: true });
}
catch(error) {
console.error(error);
throw error;
}
}
/**
* Rebase an array of commits one-by-one, starting from a given base SHA. Can
* accept an array of commits as received from the GitHub API. All commits are
* expected to change the same, single blob.
*/
rebaseSingleBlobCommits(baseCommit, commits, pathToBlob) {
/**
* If the parent of the first commit already matches the target base,
* return commits as is.
*/
if (commits.length === 0 || commits[0].parents[0].sha === baseCommit.sha) {
return Promise.resolve(last(commits));
}
/**
* Re-create each commit over the new base, applying each to the previous,
* changing only the parent SHA and tree for each, but retaining all other
* info, such as the author/committer data.
*/
const newHeadPromise = commits.reduce((lastCommitPromise, commit, idx) => {
return lastCommitPromise.then(newParent => {
/**
* Normalize commit data to ensure it's not nested in `commit.commit`.
*/
const parent = this.normalizeCommit(newParent);
const commitToRebase = this.normalizeCommit(commit);
return this.rebaseSingleBlobCommit(parent, commitToRebase, pathToBlob);
});
}, Promise.resolve(baseCommit));
/**
* Return a promise that resolves when all commits have been created.
*/
return newHeadPromise;
}
/**
* Rebase a commit that changes a single blob. Also handles updating the tree.
*/
rebaseSingleBlobCommit(baseCommit, commit, pathToBlob) {
/**
* Retain original commit metadata.
*/
const { message, author, committer } = commit;
/**
* Set the base commit as the parent.
*/
const parent = [ baseCommit.sha ];
/**
* Get the blob data by path.
*/
return this.getBlobInTree(commit.tree.sha, pathToBlob)
/**
* Create a new tree consisting of the base tree and the single updated
* blob. Use the full path to indicate nesting, GitHub will take care of
* subtree creation.
*/
.then(blob => this.createTree(baseCommit.tree.sha, [{ ...blob, path: pathToBlob }]))
/**
* Create a new commit with the updated tree and original commit metadata.
*/
.then(tree => this.createCommit(message, tree.sha, parent, author, committer));
}
/**
* Get a pull request by PR number.
*/
getPullRequest(prNumber) {
return this.request(`${ this.repoURL }/pulls/${prNumber} }`);
}
/**
* Get the list of commits for a given pull request.
*/
getPullRequestCommits (prNumber) {
return this.request(`${ this.repoURL }/pulls/${prNumber}/commits`);
}
/**
* Returns `commits` with `headToAssert` appended if it's the child of the
* last commit in `commits`. Returns `commits` unaltered if `headToAssert` is
* already the last commit in `commits`. Otherwise throws an error.
*/
assertHead(commits, headToAssert) {
const headIsMissing = headToAssert.parents[0].sha === last(commits).sha;
const headIsNotMissing = headToAssert.sha === last(commits).sha;
if (headIsMissing) {
return commits.concat(headToAssert);
} else if (headIsNotMissing) {
return commits;
}
throw Error('Editorial workflow branch changed unexpectedly.');
}
updateUnpublishedEntryStatus(collection, slug, status) {
const contentKey = slug;
return this.retrieveMetadata(contentKey)
.then(metadata => ({
...metadata,
status,
}))
.then(updatedMetadata => this.storeMetadata(contentKey, updatedMetadata));
}
deleteUnpublishedEntry(collection, slug) {
const contentKey = slug;
const branchName = this.generateBranchName(contentKey);
return this.retrieveMetadata(contentKey)
.then(metadata => this.closePR(metadata.pr, metadata.objects))
.then(() => this.deleteBranch(branchName))
// If the PR doesn't exist, then this has already been deleted -
// deletion should be idempotent, so we can consider this a
// success.
.catch((err) => {
if (err.message === "Reference does not exist") {
return Promise.resolve();
}
return Promise.reject(err);
});
}
publishUnpublishedEntry(collection, slug) {
const contentKey = slug;
const branchName = this.generateBranchName(contentKey);
let prNumber;
return this.retrieveMetadata(contentKey)
.then(metadata => this.mergePR(metadata.pr, metadata.objects))
.then(() => this.deleteBranch(branchName));
}
createRef(type, name, sha) {
return this.request(`${ this.repoURL }/git/refs`, {
method: "POST",
body: JSON.stringify({ ref: `refs/${ type }/${ name }`, sha }),
});
}
patchRef(type, name, sha, opts = {}) {
const force = opts.force || false;
return this.request(`${ this.repoURL }/git/refs/${ type }/${ encodeURIComponent(name) }`, {
method: "PATCH",
body: JSON.stringify({ sha, force }),
});
}
deleteRef(type, name, sha) {
return this.request(`${ this.repoURL }/git/refs/${ type }/${ encodeURIComponent(name) }`, {
method: 'DELETE',
});
}
getBranch(branch = this.branch) {
return this.request(`${ this.repoURL }/branches/${ encodeURIComponent(branch) }`);
}
createBranch(branchName, sha) {
return this.createRef("heads", branchName, sha);
}
assertCmsBranch(branchName) {
return branchName.startsWith(CMS_BRANCH_PREFIX);
}
patchBranch(branchName, sha, opts = {}) {
const force = opts.force || false;
if (force && !this.assertCmsBranch(branchName)) {
throw Error(`Only CMS branches can be force updated, cannot force update ${branchName}`);
}
return this.patchRef("heads", branchName, sha, { force });
}
deleteBranch(branchName) {
return this.deleteRef("heads", branchName);
}
createPR(title, head, base = this.branch) {
const body = "Automatically generated by Netlify CMS";
return this.request(`${ this.repoURL }/pulls`, {
method: "POST",
body: JSON.stringify({ title, body, head, base }),
});
}
closePR(pullrequest, objects) {
const headSha = pullrequest.head;
const prNumber = pullrequest.number;
console.log("%c Deleting PR", "line-height: 30px;text-align: center;font-weight: bold"); // eslint-disable-line
return this.request(`${ this.repoURL }/pulls/${ prNumber }`, {
method: "PATCH",
body: JSON.stringify({
state: closed,
}),
});
}
mergePR(pullrequest, objects) {
const headSha = pullrequest.head;
const prNumber = pullrequest.number;
console.log("%c Merging PR", "line-height: 30px;text-align: center;font-weight: bold"); // eslint-disable-line
return this.request(`${ this.repoURL }/pulls/${ prNumber }/merge`, {
method: "PUT",
body: JSON.stringify({
commit_message: "Automatically generated. Merged on Netlify CMS.",
sha: headSha,
merge_method: this.merge_method,
}),
})
.catch((error) => {
if (error instanceof APIError && error.status === 405) {
return this.forceMergePR(pullrequest, objects);
} else {
throw error;
}
});
}
forceMergePR(pullrequest, objects) {
const files = objects.files.concat(objects.entry);
const fileTree = this.composeFileTree(files);
let commitMessage = "Automatically generated. Merged on Netlify CMS\n\nForce merge of:";
files.forEach((file) => {
commitMessage += `\n* "${ file.path }"`;
});
console.log("%c Automatic merge not possible - Forcing merge.", "line-height: 30px;text-align: center;font-weight: bold"); // eslint-disable-line
return this.getBranch()
.then(branchData => this.updateTree(branchData.commit.sha, "/", fileTree))
.then(changeTree => this.commit(commitMessage, changeTree))
.then(response => this.patchBranch(this.branch, response.sha));
}
getTree(sha) {
if (sha) {
return this.request(`${this.repoURL}/git/trees/${sha}`);
}
return Promise.resolve({ tree: [] });
}
/**
* Get a blob from a tree. Requests individual subtrees recursively if blob is
* nested within one or more directories.
*/
getBlobInTree(treeSha, pathToBlob) {
const pathSegments = pathToBlob.split('/').filter(val => val);
const directories = pathSegments.slice(0, -1);
const filename = pathSegments.slice(-1)[0];
const baseTree = this.getTree(treeSha);
const subTreePromise = directories.reduce((treePromise, segment) => {
return treePromise.then(tree => {
const subTreeSha = find(tree.tree, { path: segment }).sha;
return this.getTree(subTreeSha);
});
}, baseTree);
return subTreePromise.then(subTree => find(subTree.tree, { path: filename }));
}
toBase64(str) {
return Promise.resolve(
Base64.encode(str)
);
}
uploadBlob(item) {
const content = get(item, 'toBase64', partial(this.toBase64, item.raw))();
return content.then(contentBase64 => this.request(`${ this.repoURL }/git/blobs`, {
method: "POST",
body: JSON.stringify({
content: contentBase64,
encoding: "base64",
}),
}).then((response) => {
item.sha = response.sha;
item.uploaded = true;
return item;
}));
}
updateTree(sha, path, fileTree) {
return this.getTree(sha)
.then((tree) => {
let obj;
let filename;
let fileOrDir;
const updates = [];
const added = {};
for (let i = 0, len = tree.tree.length; i < len; i++) {
obj = tree.tree[i];
if (fileOrDir = fileTree[obj.path]) {
added[obj.path] = true;
if (fileOrDir.file) {
updates.push({ path: obj.path, mode: obj.mode, type: obj.type, sha: fileOrDir.sha });
} else {
updates.push(this.updateTree(obj.sha, obj.path, fileOrDir));
}
}
}
for (filename in fileTree) {
fileOrDir = fileTree[filename];
if (added[filename]) { continue; }
updates.push(
fileOrDir.file ?
{ path: filename, mode: "100644", type: "blob", sha: fileOrDir.sha } :
this.updateTree(null, filename, fileOrDir)
);
}
return Promise.all(updates)
.then(tree => this.createTree(sha, tree))
.then(response => ({ path, mode: "040000", type: "tree", sha: response.sha, parentSha: sha }));
});
}
createTree(baseSha, tree) {
return this.request(`${ this.repoURL }/git/trees`, {
method: "POST",
body: JSON.stringify({ base_tree: baseSha, tree }),
});
}
/**
* Some GitHub API calls return commit data in a nested `commit` property,
* with the SHA outside of the nested property, while others return a
* flatter object with no nested `commit` property. This normalizes a commit
* to resemble the latter.
*/
normalizeCommit(commit) {
if (commit.commit) {
return { ...commit.commit, sha: commit.sha };
}
return commit;
}
commit(message, changeTree) {
const parents = changeTree.parentSha ? [changeTree.parentSha] : [];
return this.createCommit(message, changeTree.sha, parents);
}
createCommit(message, treeSha, parents, author, committer) {
return this.request(`${ this.repoURL }/git/commits`, {
method: "POST",
body: JSON.stringify({ message, tree: treeSha, parents, author, committer }),
});
}
}

View File

@ -1,29 +0,0 @@
.nc-githubAuthenticationPage-root {
display: flex;
flex-flow: column nowrap;
align-items: center;
justify-content: center;
height: 100vh;
}
.nc-githubAuthenticationPage-logo {
color: #c4c6d2;
margin-top: -300px;
}
.nc-githubAuthenticationPage-button {
@apply(--button);
@apply(--dropShadowDeep);
@apply(--buttonDefault);
@apply(--buttonGray);
padding: 0 30px;
margin-top: -80px;
display: flex;
align-items: center;
position: relative;
& .nc-icon {
margin-right: 18px;
}
}

View File

@ -1,53 +0,0 @@
import PropTypes from 'prop-types';
import React from 'react';
import Authenticator from 'netlify-cms-lib-auth/netlify-auth';
import Icon from 'netlify-cms-ui-default/Icon';
export default class AuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
base_url: PropTypes.string,
siteId: PropTypes.string,
authEndpoint: PropTypes.string,
};
state = {};
handleLogin = (e) => {
e.preventDefault();
const cfg = {
base_url: this.props.base_url,
site_id: (document.location.host.split(':')[0] === 'localhost') ? 'cms.netlify.com' : this.props.siteId,
auth_endpoint: this.props.authEndpoint,
};
const auth = new Authenticator(cfg);
auth.authenticate({ provider: 'github', scope: 'repo' }, (err, data) => {
if (err) {
this.setState({ loginError: err.toString() });
return;
}
this.props.onLogin(data);
});
};
render() {
const { loginError } = this.state;
const { inProgress } = this.props;
return (
<section className="nc-githubAuthenticationPage-root">
<Icon className="nc-githubAuthenticationPage-logo" size="500px" type="netlify-cms"/>
{loginError && <p>{loginError}</p>}
<button
className="nc-githubAuthenticationPage-button"
disabled={inProgress}
onClick={this.handleLogin}
>
<Icon type="github" /> {inProgress ? "Logging in..." : "Login with GitHub"}
</button>
</section>
);
}
}

View File

@ -1,39 +0,0 @@
import AssetProxy from "ValueObjects/AssetProxy";
import API from "../API";
describe('github API', () => {
const mockAPI = (api, responses) => {
api.request = (path, options = {}) => {
const normalizedPath = path.indexOf('?') !== -1 ? path.substr(0, path.indexOf('?')) : path;
const response = responses[normalizedPath];
return typeof response === 'function'
? Promise.resolve(response(options))
: Promise.reject(new Error(`No response for path '${normalizedPath}'`))
};
}
it('should create PR with correct base branch name when publishing with editorial workflow', () => {
let prBaseBranch = null;
const api = new API({ branch: 'gh-pages', repo: 'my-repo' });
const responses = {
'/repos/my-repo/branches/gh-pages': () => ({ commit: { sha: 'def' } }),
'/repos/my-repo/git/trees/def': () => ({ tree: [] }),
'/repos/my-repo/git/trees': () => ({}),
'/repos/my-repo/git/commits': () => ({}),
'/repos/my-repo/git/refs': () => ({}),
'/repos/my-repo/pulls': (pullRequest) => {
prBaseBranch = JSON.parse(pullRequest.body).base;
return { head: { sha: 'cbd' } };
},
'/user': () => ({}),
'/repos/my-repo/git/blobs': () => ({}),
'/repos/my-repo/git/refs/meta/_netlify_cms': () => ({ 'object': {} })
};
mockAPI(api, responses);
return expect(
api.editorialWorkflowGit(null, { slug: 'entry', sha: 'abc' }, null, {})
.then(() => prBaseBranch)
).resolves.toEqual('gh-pages')
});
});

View File

@ -1,197 +0,0 @@
import trimStart from 'lodash/trimStart';
import semaphore from "semaphore";
import AuthenticationPage from "./AuthenticationPage";
import API from "./API";
const MAX_CONCURRENT_DOWNLOADS = 10;
export default class GitHub {
constructor(config, options={}) {
this.config = config;
this.options = {
proxied: false,
API: null,
...options,
};
if (!this.options.proxied && config.getIn(["backend", "repo"]) == null) {
throw new Error("The GitHub backend needs a \"repo\" in the backend configuration.");
}
this.api = this.options.API || null;
this.repo = config.getIn(["backend", "repo"], "");
this.branch = config.getIn(["backend", "branch"], "master").trim();
this.api_root = config.getIn(["backend", "api_root"], "https://api.github.com");
this.token = '';
this.squash_merges = config.getIn(["backend", "squash_merges"]);
}
authComponent() {
return AuthenticationPage;
}
restoreUser(user) {
return this.authenticate(user);
}
authenticate(state) {
this.token = state.token;
this.api = new API({ token: this.token, branch: this.branch, repo: this.repo, api_root: this.api_root, squash_merges: this.squash_merges });
return this.api.user().then(user =>
this.api.hasWriteAccess().then((isCollab) => {
// Unauthorized user
if (!isCollab) throw new Error("Your GitHub user account does not have access to this repo.");
// Authorized user
user.token = state.token;
return user;
})
);
}
logout() {
this.token = null;
return;
}
getToken() {
return Promise.resolve(this.token);
}
entriesByFolder(collection, extension) {
return this.api.listFiles(collection.get("folder"))
.then(files => files.filter(file => file.name.endsWith('.' + extension)))
.then(this.fetchFiles);
}
entriesByFiles(collection) {
const files = collection.get("files").map(collectionFile => ({
path: collectionFile.get("file"),
label: collectionFile.get("label"),
}));
return this.fetchFiles(files);
}
fetchFiles = (files) => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [];
files.forEach((file) => {
promises.push(new Promise((resolve, reject) => (
sem.take(() => this.api.readFile(file.path, file.sha).then((data) => {
resolve({ file, data });
sem.leave();
}).catch((err = true) => {
sem.leave();
console.error(`failed to load file from GitHub: ${file.path}`);
resolve({ error: err });
}))
)));
});
return Promise.all(promises)
.then(loadedEntries => loadedEntries.filter(loadedEntry => !loadedEntry.error));
};
// Fetches a single entry.
getEntry(collection, slug, path) {
return this.api.readFile(path).then(data => ({
file: { path },
data,
}));
}
getMedia() {
return this.api.listFiles(this.config.get('media_folder'))
.then(files => files.map(({ sha, name, size, download_url, path }) => {
const url = new URL(download_url);
if (url.pathname.match(/.svg$/)) {
url.search += (url.search.slice(1) === '' ? '?' : '&') + 'sanitize=true';
}
return { id: sha, name, size, url: url.href, path };
}));
}
persistEntry(entry, mediaFiles = [], options = {}) {
return this.api.persistFiles(entry, mediaFiles, options);
}
async persistMedia(mediaFile, options = {}) {
try {
const response = await this.api.persistFiles(null, [mediaFile], options);
const { sha, value, size, path, fileObj } = mediaFile;
const url = URL.createObjectURL(fileObj);
return { id: sha, name: value, size: fileObj.size, url, path: trimStart(path, '/') };
}
catch(error) {
console.error(error);
throw error;
}
}
deleteFile(path, commitMessage, options) {
return this.api.deleteFile(path, commitMessage, options);
}
unpublishedEntries() {
return this.api.listUnpublishedBranches().then((branches) => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [];
branches.map((branch) => {
promises.push(new Promise((resolve, reject) => {
const slug = branch.ref.split("refs/heads/cms/").pop();
return sem.take(() => this.api.readUnpublishedBranchFile(slug).then((data) => {
if (data === null || data === undefined) {
resolve(null);
sem.leave();
} else {
const path = data.metaData.objects.entry.path;
resolve({
slug,
file: { path },
data: data.fileData,
metaData: data.metaData,
isModification: data.isModification,
});
sem.leave();
}
}).catch((err) => {
sem.leave();
resolve(null);
}));
}));
});
return Promise.all(promises);
})
.catch((error) => {
if (error.message === "Not Found") {
return Promise.resolve([]);
}
return error;
});
}
unpublishedEntry(collection, slug) {
return this.api.readUnpublishedBranchFile(slug)
.then((data) => {
if (!data) return null;
return {
slug,
file: { path: data.metaData.objects.entry.path },
data: data.fileData,
metaData: data.metaData,
isModification: data.isModification,
};
});
}
updateUnpublishedEntryStatus(collection, slug, newStatus) {
return this.api.updateUnpublishedEntryStatus(collection, slug, newStatus);
}
deleteUnpublishedEntry(collection, slug) {
return this.api.deleteUnpublishedEntry(collection, slug);
}
publishUnpublishedEntry(collection, slug) {
return this.api.publishUnpublishedEntry(collection, slug);
}
}

View File

@ -5,8 +5,8 @@ import { cond, flow, isString, partial, partialRight, pick, omit, set, update, g
import unsentRequest from "netlify-cms-lib-util/unsentRequest";
import { then } from "netlify-cms-lib-util/promise";
import APIError from "netlify-cms-lib-util/APIError";
import Cursor from 'netlify-cms-lib-util/Cursor'
import AssetProxy from "ValueObjects/AssetProxy";
import Cursor from "ValueObjects/Cursor"
export default class API {
constructor(config) {

View File

@ -1,8 +1,9 @@
import trimStart from 'lodash/trimStart';
import semaphore from "semaphore";
import { fileExtension } from 'netlify-cms-lib-util/path';
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from 'netlify-cms-lib-util/Cursor'
import AuthenticationPage from "./AuthenticationPage";
import API from "./API";
import { CURSOR_COMPATIBILITY_SYMBOL } from 'ValueObjects/Cursor';
import { EDITORIAL_WORKFLOW } from "Constants/publishModes";
const MAX_CONCURRENT_DOWNLOADS = 10;

View File

@ -1,44 +0,0 @@
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import React from 'react';
import Icon from 'netlify-cms-ui-default/Icon';
export default class AuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
config: ImmutablePropTypes.map.isRequired,
};
componentWillMount() {
/**
* Allow login screen to be skipped for demo purposes.
*/
const skipLogin = this.props.config.getIn(['backend', 'login']) === false;
if (skipLogin) {
this.props.onLogin(this.state);
}
}
handleLogin = (e) => {
e.preventDefault();
this.props.onLogin(this.state);
};
render() {
const { inProgress } = this.props;
return (
<section className="nc-githubAuthenticationPage-root">
<Icon className="nc-githubAuthenticationPage-logo" size="500px" type="netlify-cms"/>
<button
className="nc-githubAuthenticationPage-button"
disabled={inProgress}
onClick={this.handleLogin}
>
{inProgress ? "Logging in..." : "Login"}
</button>
</section>
);
}
}

View File

@ -1,225 +0,0 @@
import { remove, attempt, isError, take } from 'lodash';
import uuid from 'uuid/v4';
import { fromJS } from 'immutable';
import { EDITORIAL_WORKFLOW, status } from 'Constants/publishModes';
import EditorialWorkflowError from 'netlify-cms-lib-util/EditorialWorkflowError';
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from 'ValueObjects/Cursor'
import AuthenticationPage from './AuthenticationPage';
window.repoFiles = window.repoFiles || {};
window.repoFilesUnpublished = window.repoFilesUnpublished || [];
function getFile(path) {
const segments = path.split('/');
let obj = window.repoFiles;
while (obj && segments.length) {
obj = obj[segments.shift()];
}
return obj || {};
}
const pageSize = 10;
const getCursor = (collection, extension, entries, index) => {
const count = entries.length;
const pageCount = Math.floor(count / pageSize);
return Cursor.create({
actions: [
...(index < pageCount ? ["next", "last"] : []),
...(index > 0 ? ["prev", "first"] : []),
],
meta: { index, count, pageSize, pageCount },
data: { collection, extension, index, pageCount },
});
};
const getFolderEntries = (folder, extension) => {
return Object.keys(window.repoFiles[folder] || {})
.filter(path => path.endsWith(`.${ extension }`))
.map(path => ({
file: { path: `${ folder }/${ path }` },
data: window.repoFiles[folder][path].content,
}))
.reverse();
};
export default class TestRepo {
constructor(config) {
this.config = config;
this.assets = [];
}
authComponent() {
return AuthenticationPage;
}
restoreUser(user) {
return this.authenticate(user);
}
authenticate() {
return Promise.resolve();
}
logout() {
return null;
}
getToken() {
return Promise.resolve('');
}
traverseCursor(cursor, action) {
const { collection, extension, index, pageCount } = cursor.data.toObject();
const newIndex = (() => {
if (action === "next") { return index + 1; }
if (action === "prev") { return index - 1; }
if (action === "first") { return 0; }
if (action === "last") { return pageCount; }
})();
// TODO: stop assuming cursors are for collections
const allEntries = getFolderEntries(collection.get('folder'), extension);
const entries = allEntries.slice(newIndex * pageSize, (newIndex * pageSize) + pageSize);
const newCursor = getCursor(collection, extension, allEntries, newIndex);
return Promise.resolve({ entries, cursor: newCursor });
}
entriesByFolder(collection, extension) {
const folder = collection.get('folder');
const entries = folder ? getFolderEntries(folder, extension) : [];
const cursor = getCursor(collection, extension, entries, 0);
const ret = take(entries, pageSize);
ret[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return Promise.resolve(ret);
}
entriesByFiles(collection) {
const files = collection.get('files').map(collectionFile => ({
path: collectionFile.get('file'),
label: collectionFile.get('label'),
}));
return Promise.all(files.map(file => ({
file,
data: getFile(file.path).content,
})));
}
getEntry(collection, slug, path) {
return Promise.resolve({
file: { path },
data: getFile(path).content,
});
}
unpublishedEntries() {
return Promise.resolve(window.repoFilesUnpublished);
}
unpublishedEntry(collection, slug) {
const entry = window.repoFilesUnpublished.find(e => (
e.metaData.collection === collection.get('name') && e.slug === slug
));
if (!entry) {
return Promise.reject(new EditorialWorkflowError('content is not under editorial workflow', true));
}
return Promise.resolve(entry);
}
deleteUnpublishedEntry(collection, slug) {
const unpubStore = window.repoFilesUnpublished;
const existingEntryIndex = unpubStore.findIndex(e => (
e.metaData.collection === collection && e.slug === slug
));
unpubStore.splice(existingEntryIndex, 1);
return Promise.resolve();
}
persistEntry({ path, raw, slug }, mediaFiles = [], options = {}) {
if (options.mode === EDITORIAL_WORKFLOW) {
const unpubStore = window.repoFilesUnpublished;
const existingEntryIndex = unpubStore.findIndex(e => e.file.path === path);
if (existingEntryIndex >= 0) {
const unpubEntry = { ...unpubStore[existingEntryIndex], data: raw };
unpubEntry.title = options.parsedData && options.parsedData.title;
unpubEntry.description = options.parsedData && options.parsedData.description;
unpubStore.splice(existingEntryIndex, 1, unpubEntry);
} else {
const unpubEntry = {
data: raw,
file: {
path,
},
metaData: {
collection: options.collectionName,
status: status.first(),
title: options.parsedData && options.parsedData.title,
description: options.parsedData && options.parsedData.description,
},
slug,
};
unpubStore.push(unpubEntry);
}
return Promise.resolve();
}
const newEntry = options.newEntry || false;
const folder = path.substring(0, path.lastIndexOf('/'));
const fileName = path.substring(path.lastIndexOf('/') + 1);
window.repoFiles[folder] = window.repoFiles[folder] || {};
window.repoFiles[folder][fileName] = window.repoFiles[folder][fileName] || {};
if (newEntry) {
window.repoFiles[folder][fileName] = { content: raw };
} else {
window.repoFiles[folder][fileName].content = raw;
}
return Promise.resolve();
}
updateUnpublishedEntryStatus(collection, slug, newStatus) {
const unpubStore = window.repoFilesUnpublished;
const entryIndex = unpubStore.findIndex(e => (
e.metaData.collection === collection && e.slug === slug
));
unpubStore[entryIndex].metaData.status = newStatus;
return Promise.resolve();
}
publishUnpublishedEntry(collection, slug) {
const unpubStore = window.repoFilesUnpublished;
const unpubEntryIndex = unpubStore.findIndex(e => (
e.metaData.collection === collection && e.slug === slug
));
const unpubEntry = unpubStore[unpubEntryIndex];
const entry = { raw: unpubEntry.data, slug: unpubEntry.slug, path: unpubEntry.file.path };
unpubStore.splice(unpubEntryIndex, 1);
return this.persistEntry(entry);
}
getMedia() {
return Promise.resolve(this.assets);
}
persistMedia({ fileObj }) {
const { name, size } = fileObj;
const objectUrl = attempt(window.URL.createObjectURL, fileObj);
const url = isError(objectUrl) ? '' : objectUrl;
const normalizedAsset = { id: uuid(), name, size, path: url, url };
this.assets.push(normalizedAsset);
return Promise.resolve(normalizedAsset);
}
deleteFile(path, commitMessage) {
const assetIndex = this.assets.findIndex(asset => asset.path === path);
if (assetIndex > -1) {
this.assets.splice(assetIndex, 1);
}
else {
const folder = path.substring(0, path.lastIndexOf('/'));
const fileName = path.substring(path.lastIndexOf('/') + 1);
delete window.repoFiles[folder][fileName];
}
return Promise.resolve();
}
}

View File

@ -3,13 +3,13 @@ import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { connect } from 'react-redux';
import { partial } from 'lodash';
import Cursor from 'netlify-cms-lib-util/Cursor'
import {
loadEntries as actionLoadEntries,
traverseCollectionCursor as actionTraverseCollectionCursor,
} from 'Actions/entries';
import { selectEntries } from 'Reducers';
import { selectCollectionEntriesCursor } from 'Reducers/cursors';
import Cursor from 'ValueObjects/Cursor';
import Entries from './Entries';
class EntriesCollection extends React.Component {

View File

@ -2,12 +2,12 @@ import React from 'react';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { connect } from 'react-redux';
import Cursor from 'netlify-cms-lib-util/Cursor'
import { selectSearchedEntries } from 'Reducers';
import {
searchEntries as actionSearchEntries,
clearSearch as actionClearSearch
} from 'Actions/search';
import Cursor from 'ValueObjects/Cursor';
import Entries from './Entries';
class EntriesSearch extends React.Component {

View File

@ -4,9 +4,9 @@ import ImmutablePropTypes from 'react-immutable-proptypes';
import styled from 'react-emotion';
import Waypoint from 'react-waypoint';
import { Map } from 'immutable';
import Cursor from 'netlify-cms-lib-util/Cursor';
import { selectFields, selectInferedField } from 'Reducers/collections';
import EntryCard from './EntryCard';
import Cursor from 'ValueObjects/Cursor';
const CardsGrid = styled.div`
display: flex;

View File

@ -7,4 +7,3 @@
* Backend auth pages
*/
@import "./backends/git-gateway/AuthenticationPage.css";
@import "./backends/github/AuthenticationPage.css";

View File

@ -1,5 +1,5 @@
import { fromJS, Map } from 'immutable';
import Cursor from "ValueObjects/Cursor";
import Cursor from 'netlify-cms-lib-util/Cursor'
import {
ENTRIES_SUCCESS,
} from 'Actions/entries';

View File

@ -1,115 +0,0 @@
import { fromJS, Map, Set } from "immutable";
const jsToMap = obj => {
if (obj === undefined) {
return Map();
}
const immutableObj = fromJS(obj);
if (!Map.isMap(immutableObj)) {
throw new Error("Object must be equivalent to a Map.");
}
return immutableObj;
};
const knownMetaKeys = Set(["index", "count", "pageSize", "pageCount", "usingOldPaginationAPI"]);
const filterUnknownMetaKeys = meta => meta.filter((v, k) => knownMetaKeys.has(k));
/*
createCursorMap takes one of three signatures:
- () -> cursor with empty actions, data, and meta
- (cursorMap: <object/Map with optional actions, data, and meta keys>) -> cursor
- (actions: <array/List>, data: <object/Map>, meta: <optional object/Map>) -> cursor
*/
const createCursorMap = (...args) => {
const { actions, data, meta } = args.length === 1
? jsToMap(args[0]).toObject()
: { actions: args[0], data: args[1], meta: args[2] };
return Map({
// actions are a Set, rather than a List, to ensure an efficient .has
actions: Set(actions),
// data and meta are Maps
data: jsToMap(data),
meta: jsToMap(meta).update(filterUnknownMetaKeys),
});
};
const hasAction = (cursorMap, action) => cursorMap.hasIn(["actions", action]);
const getActionHandlers = (cursorMap, handler) =>
cursorMap.get("actions", Set()).toMap().map(action => handler(action));
// The cursor logic is entirely functional, so this class simply
// provides a chainable interface
export default class Cursor {
static create(...args) {
return new Cursor(...args);
}
constructor(...args) {
if (args[0] instanceof Cursor) {
return args[0];
}
this.store = createCursorMap(...args);
this.actions = this.store.get("actions");
this.data = this.store.get("data");
this.meta = this.store.get("meta");
}
updateStore(...args) {
return new Cursor(this.store.update(...args));
}
updateInStore(...args) {
return new Cursor(this.store.updateIn(...args));
}
hasAction(action) {
return hasAction(this.store, action);
}
addAction(action) {
return this.updateStore("actions", actions => actions.add(action));
}
removeAction(action) {
return this.updateStore("actions", actions => actions.delete(action));
}
setActions(actions) {
return this.updateStore(store => store.set("actions", Set(actions)));
}
mergeActions(actions) {
return this.updateStore("actions", oldActions => oldActions.union(actions));
}
getActionHandlers(handler) {
return getActionHandlers(this.store, handler);
}
setData(data) {
return new Cursor(this.store.set("data", jsToMap(data)));
}
mergeData(data) {
return new Cursor(this.store.mergeIn(["data"], jsToMap(data)));
}
wrapData(data) {
return this.updateStore("data", oldData => jsToMap(data).set("wrapped_cursor_data", oldData));
}
unwrapData() {
return [this.store.get("data").delete("wrapped_cursor_data"), this.updateStore("data", data => data.get("wrapped_cursor_data"))];
}
clearData() {
return this.updateStore("data", data => Map());
}
setMeta(meta) {
return this.updateStore(store => store.set("meta", jsToMap(meta)));
}
mergeMeta(meta) {
return this.updateStore(store => store.update("meta", oldMeta => oldMeta.merge(jsToMap(meta))))
}
}
// This is a temporary hack to allow cursors to be added to the
// interface between backend.js and backends without modifying old
// backends at all. This should be removed in favor of wrapping old
// backends with a compatibility layer, as part of the backend API
// refactor.
export const CURSOR_COMPATIBILITY_SYMBOL = Symbol("cursor key for compatibility with old backends");