GitLab backend built with cursor API (#1343)
This commit is contained in:
committed by
Shawn Erquhart
parent
1f94e3123d
commit
b65f68efd4
@ -63,7 +63,7 @@
|
|||||||
|
|
||||||
var ONE_DAY = 60 * 60 * 24 * 1000;
|
var ONE_DAY = 60 * 60 * 24 * 1000;
|
||||||
|
|
||||||
for (var i=1; i<=10; i++) {
|
for (var i=1; i<=20; i++) {
|
||||||
var date = new Date();
|
var date = new Date();
|
||||||
|
|
||||||
date.setTime(date.getTime() + ONE_DAY);
|
date.setTime(date.getTime() + ONE_DAY);
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
import { List } from 'immutable';
|
import { fromJS, List, Set } from 'immutable';
|
||||||
import { actions as notifActions } from 'redux-notifications';
|
import { actions as notifActions } from 'redux-notifications';
|
||||||
import { serializeValues } from 'Lib/serializeEntryValues';
|
import { serializeValues } from 'Lib/serializeEntryValues';
|
||||||
import { currentBackend } from 'Backends/backend';
|
import { currentBackend } from 'Backends/backend';
|
||||||
import { getIntegrationProvider } from 'Integrations';
|
import { getIntegrationProvider } from 'Integrations';
|
||||||
import { getAsset, selectIntegration } from 'Reducers';
|
import { getAsset, selectIntegration } from 'Reducers';
|
||||||
import { selectFields } from 'Reducers/collections';
|
import { selectFields } from 'Reducers/collections';
|
||||||
|
import { selectCollectionEntriesCursor } from 'Reducers/cursors';
|
||||||
|
import Cursor from 'ValueObjects/Cursor';
|
||||||
import { createEntry } from 'ValueObjects/Entry';
|
import { createEntry } from 'ValueObjects/Entry';
|
||||||
import ValidationErrorTypes from 'Constants/validationErrorTypes';
|
import ValidationErrorTypes from 'Constants/validationErrorTypes';
|
||||||
|
import isArray from 'lodash/isArray';
|
||||||
|
|
||||||
const { notifSend } = notifActions;
|
const { notifSend } = notifActions;
|
||||||
|
|
||||||
@ -80,13 +83,15 @@ export function entriesLoading(collection) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function entriesLoaded(collection, entries, pagination) {
|
export function entriesLoaded(collection, entries, pagination, cursor, append = true) {
|
||||||
return {
|
return {
|
||||||
type: ENTRIES_SUCCESS,
|
type: ENTRIES_SUCCESS,
|
||||||
payload: {
|
payload: {
|
||||||
collection: collection.get('name'),
|
collection: collection.get('name'),
|
||||||
entries,
|
entries,
|
||||||
page: pagination,
|
page: pagination,
|
||||||
|
cursor: Cursor.create(cursor),
|
||||||
|
append,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -238,6 +243,16 @@ export function loadEntry(collection, slug) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const appendActions = fromJS({
|
||||||
|
["append_next"]: { action: "next", append: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
const addAppendActionsToCursor = cursor => Cursor
|
||||||
|
.create(cursor)
|
||||||
|
.updateStore("actions", actions => actions.union(
|
||||||
|
appendActions.filter(v => actions.has(v.get("action"))).keySeq()
|
||||||
|
));
|
||||||
|
|
||||||
export function loadEntries(collection, page = 0) {
|
export function loadEntries(collection, page = 0) {
|
||||||
return (dispatch, getState) => {
|
return (dispatch, getState) => {
|
||||||
if (collection.get('isFetching')) {
|
if (collection.get('isFetching')) {
|
||||||
@ -247,14 +262,86 @@ export function loadEntries(collection, page = 0) {
|
|||||||
const backend = currentBackend(state.config);
|
const backend = currentBackend(state.config);
|
||||||
const integration = selectIntegration(state, collection.get('name'), 'listEntries');
|
const integration = selectIntegration(state, collection.get('name'), 'listEntries');
|
||||||
const provider = integration ? getIntegrationProvider(state.integrations, backend.getToken, integration) : backend;
|
const provider = integration ? getIntegrationProvider(state.integrations, backend.getToken, integration) : backend;
|
||||||
|
const append = !!(page && !isNaN(page) && page > 0);
|
||||||
dispatch(entriesLoading(collection));
|
dispatch(entriesLoading(collection));
|
||||||
provider.listEntries(collection, page).then(
|
provider.listEntries(collection, page)
|
||||||
response => dispatch(entriesLoaded(collection, response.entries.reverse(), response.pagination)),
|
.then(response => ({
|
||||||
error => dispatch(entriesFailed(collection, error))
|
...response,
|
||||||
);
|
|
||||||
|
// The only existing backend using the pagination system is the
|
||||||
|
// Algolia integration, which is also the only integration used
|
||||||
|
// to list entries. Thus, this checking for an integration can
|
||||||
|
// determine whether or not this is using the old integer-based
|
||||||
|
// pagination API. Other backends will simply store an empty
|
||||||
|
// cursor, which behaves identically to no cursor at all.
|
||||||
|
cursor: integration
|
||||||
|
? Cursor.create({ actions: ["next"], meta: { usingOldPaginationAPI: true }, data: { nextPage: page + 1 } })
|
||||||
|
: Cursor.create(response.cursor),
|
||||||
|
}))
|
||||||
|
.then(response => dispatch(entriesLoaded(
|
||||||
|
collection,
|
||||||
|
response.cursor.meta.get('usingOldPaginationAPI')
|
||||||
|
? response.entries.reverse()
|
||||||
|
: response.entries,
|
||||||
|
response.pagination,
|
||||||
|
addAppendActionsToCursor(response.cursor),
|
||||||
|
append,
|
||||||
|
)))
|
||||||
|
.catch(err => {
|
||||||
|
dispatch(notifSend({
|
||||||
|
message: `Failed to load entries: ${ err }`,
|
||||||
|
kind: 'danger',
|
||||||
|
dismissAfter: 8000,
|
||||||
|
}));
|
||||||
|
return Promise.reject(dispatch(entriesFailed(collection, err)));
|
||||||
|
});
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function traverseCursor(backend, cursor, action) {
|
||||||
|
if (!cursor.actions.has(action)) {
|
||||||
|
throw new Error(`The current cursor does not support the pagination action "${ action }".`);
|
||||||
|
}
|
||||||
|
return backend.traverseCursor(cursor, action);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function traverseCollectionCursor(collection, action) {
|
||||||
|
return async (dispatch, getState) => {
|
||||||
|
const state = getState();
|
||||||
|
if (state.entries.getIn(['pages', `${ collection.get('name') }`, 'isFetching',])) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const backend = currentBackend(state.config);
|
||||||
|
|
||||||
|
const { action: realAction, append } = appendActions.has(action)
|
||||||
|
? appendActions.get(action).toJS()
|
||||||
|
: { action, append: false };
|
||||||
|
const cursor = selectCollectionEntriesCursor(state.cursors, collection.get('name'));
|
||||||
|
|
||||||
|
// Handle cursors representing pages in the old, integer-based
|
||||||
|
// pagination API
|
||||||
|
if (cursor.meta.get("usingOldPaginationAPI", false)) {
|
||||||
|
return dispatch(loadEntries(collection, cursor.data.get("nextPage")));
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
dispatch(entriesLoading(collection));
|
||||||
|
const { entries, cursor: newCursor } = await traverseCursor(backend, cursor, realAction);
|
||||||
|
// Pass null for the old pagination argument - this will
|
||||||
|
// eventually be removed.
|
||||||
|
return dispatch(entriesLoaded(collection, entries, null, addAppendActionsToCursor(newCursor), append));
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
dispatch(notifSend({
|
||||||
|
message: `Failed to persist entry: ${ err }`,
|
||||||
|
kind: 'danger',
|
||||||
|
dismissAfter: 8000,
|
||||||
|
}));
|
||||||
|
return Promise.reject(dispatch(entriesFailed(collection, err)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export function createEmptyDraft(collection) {
|
export function createEmptyDraft(collection) {
|
||||||
return (dispatch) => {
|
return (dispatch) => {
|
||||||
const dataFields = {};
|
const dataFields = {};
|
||||||
|
@ -105,121 +105,44 @@ export function clearSearch() {
|
|||||||
// SearchEntries will search for complete entries in all collections.
|
// SearchEntries will search for complete entries in all collections.
|
||||||
export function searchEntries(searchTerm, page = 0) {
|
export function searchEntries(searchTerm, page = 0) {
|
||||||
return (dispatch, getState) => {
|
return (dispatch, getState) => {
|
||||||
|
dispatch(searchingEntries(searchTerm));
|
||||||
|
|
||||||
const state = getState();
|
const state = getState();
|
||||||
|
const backend = currentBackend(state.config);
|
||||||
const allCollections = state.collections.keySeq().toArray();
|
const allCollections = state.collections.keySeq().toArray();
|
||||||
const collections = allCollections.filter(collection => selectIntegration(state, collection, 'search'));
|
const collections = allCollections.filter(collection => selectIntegration(state, collection, 'search'));
|
||||||
const integration = selectIntegration(state, collections[0], 'search');
|
const integration = selectIntegration(state, collections[0], 'search');
|
||||||
if (!integration) {
|
|
||||||
localSearch(searchTerm, getState, dispatch);
|
const searchPromise = integration
|
||||||
} else {
|
? getIntegrationProvider(state.integrations, backend.getToken, integration).search(collections, searchTerm, page)
|
||||||
const provider = getIntegrationProvider(state.integrations, currentBackend(state.config).getToken, integration);
|
: backend.search(state.collections.valueSeq().toArray(), searchTerm);
|
||||||
dispatch(searchingEntries(searchTerm));
|
|
||||||
provider.search(collections, searchTerm, page).then(
|
return searchPromise.then(
|
||||||
response => dispatch(searchSuccess(searchTerm, response.entries, response.pagination)),
|
response => dispatch(searchSuccess(searchTerm, response.entries, response.pagination)),
|
||||||
error => dispatch(searchFailure(searchTerm, error))
|
error => dispatch(searchFailure(searchTerm, error))
|
||||||
);
|
);
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Instead of searching for complete entries, query will search for specific fields
|
// Instead of searching for complete entries, query will search for specific fields
|
||||||
// in specific collections and return raw data (no entries).
|
// in specific collections and return raw data (no entries).
|
||||||
export function query(namespace, collection, searchFields, searchTerm) {
|
export function query(namespace, collectionName, searchFields, searchTerm) {
|
||||||
return (dispatch, getState) => {
|
return (dispatch, getState) => {
|
||||||
|
dispatch(querying(namespace, collectionName, searchFields, searchTerm));
|
||||||
|
|
||||||
const state = getState();
|
const state = getState();
|
||||||
const integration = selectIntegration(state, collection, 'search');
|
const backend = currentBackend(state.config);
|
||||||
dispatch(querying(namespace, collection, searchFields, searchTerm));
|
const integration = selectIntegration(state, collectionName, 'search');
|
||||||
if (!integration) {
|
const collection = state.collections.find(collection => collection.get('name') === collectionName);
|
||||||
localQuery(namespace, collection, searchFields, searchTerm, state, dispatch);
|
|
||||||
} else {
|
const queryPromise = integration
|
||||||
const provider = getIntegrationProvider(state.integrations, currentBackend(state.config).getToken, integration);
|
? getIntegrationProvider(state.integrations, backend.getToken, integration)
|
||||||
provider.searchBy(searchFields.map(f => `data.${ f }`), collection, searchTerm).then(
|
.searchBy(searchFields.map(f => `data.${ f }`), collectionName, searchTerm)
|
||||||
response => dispatch(querySuccess(namespace, collection, searchFields, searchTerm, response)),
|
: backend.query(collection, searchFields, searchTerm);
|
||||||
error => dispatch(queryFailure(namespace, collection, searchFields, searchTerm, error))
|
|
||||||
|
return queryPromise.then(
|
||||||
|
response => dispatch(querySuccess(namespace, collectionName, searchFields, searchTerm, response)),
|
||||||
|
error => dispatch(queryFailure(namespace, collectionName, searchFields, searchTerm, error))
|
||||||
);
|
);
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Local Query & Search functions
|
|
||||||
|
|
||||||
function localSearch(searchTerm, getState, dispatch) {
|
|
||||||
return (function acc(localResults = { entries: [] }) {
|
|
||||||
function processCollection(collection, collectionKey) {
|
|
||||||
const state = getState();
|
|
||||||
if (state.entries.hasIn(['pages', collectionKey, 'ids'])) {
|
|
||||||
const searchFields = [
|
|
||||||
selectInferedField(collection, 'title'),
|
|
||||||
selectInferedField(collection, 'shortTitle'),
|
|
||||||
selectInferedField(collection, 'author'),
|
|
||||||
];
|
|
||||||
const collectionEntries = selectEntries(state, collectionKey).toJS();
|
|
||||||
const filteredEntries = fuzzy.filter(searchTerm, collectionEntries, {
|
|
||||||
extract: entry => searchFields.reduce((acc, field) => {
|
|
||||||
const f = entry.data[field];
|
|
||||||
return f ? `${ acc } ${ f }` : acc;
|
|
||||||
}, ""),
|
|
||||||
}).filter(entry => entry.score > 5);
|
|
||||||
localResults[collectionKey] = true;
|
|
||||||
localResults.entries = localResults.entries.concat(filteredEntries);
|
|
||||||
|
|
||||||
const returnedKeys = Object.keys(localResults);
|
|
||||||
const allCollections = state.collections.keySeq().toArray();
|
|
||||||
if (allCollections.every(v => returnedKeys.indexOf(v) !== -1)) {
|
|
||||||
const sortedResults = localResults.entries.sort((a, b) => {
|
|
||||||
if (a.score > b.score) return -1;
|
|
||||||
if (a.score < b.score) return 1;
|
|
||||||
return 0;
|
|
||||||
}).map(f => f.original);
|
|
||||||
if (allCollections.size > 3 || localResults.entries.length > 30) {
|
|
||||||
console.warn('The Netlify CMS is currently using a Built-in search.' +
|
|
||||||
'\nWhile this works great for small sites, bigger projects might benefit from a separate search integration.' +
|
|
||||||
'\nPlease refer to the documentation for more information');
|
|
||||||
}
|
|
||||||
dispatch(searchSuccess(searchTerm, sortedResults, 0));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Collection entries aren't loaded yet.
|
|
||||||
// Dispatch loadEntries and wait before redispatching this action again.
|
|
||||||
dispatch({
|
|
||||||
type: WAIT_UNTIL_ACTION,
|
|
||||||
predicate: action => (action.type === ENTRIES_SUCCESS && action.payload.collection === collectionKey),
|
|
||||||
run: () => processCollection(collection, collectionKey),
|
|
||||||
});
|
|
||||||
dispatch(loadEntries(collection));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
getState().collections.forEach(processCollection);
|
|
||||||
}());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function localQuery(namespace, collection, searchFields, searchTerm, state, dispatch) {
|
|
||||||
// Check if entries in this collection were already loaded
|
|
||||||
if (state.entries.hasIn(['pages', collection, 'ids'])) {
|
|
||||||
const entries = selectEntries(state, collection).toJS();
|
|
||||||
const filteredEntries = fuzzy.filter(searchTerm, entries, {
|
|
||||||
extract: entry => searchFields.reduce((acc, field) => {
|
|
||||||
const f = entry.data[field];
|
|
||||||
return f ? `${ acc } ${ f }` : acc;
|
|
||||||
}, ""),
|
|
||||||
}).filter(entry => entry.score > 5);
|
|
||||||
|
|
||||||
const resultObj = {
|
|
||||||
query: searchTerm,
|
|
||||||
hits: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
resultObj.hits = filteredEntries.map(f => f.original);
|
|
||||||
dispatch(querySuccess(namespace, collection, searchFields, searchTerm, resultObj));
|
|
||||||
} else {
|
|
||||||
// Collection entries aren't loaded yet.
|
|
||||||
// Dispatch loadEntries and wait before redispatching this action again.
|
|
||||||
dispatch({
|
|
||||||
type: WAIT_UNTIL_ACTION,
|
|
||||||
predicate: action => (action.type === ENTRIES_SUCCESS && action.payload.collection === collection),
|
|
||||||
run: dispatch => dispatch(query(namespace, collection, searchFields, searchTerm)),
|
|
||||||
});
|
|
||||||
dispatch(loadEntries(state.collections.get(collection)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import { attempt, isError } from 'lodash';
|
import { attempt, flatten, isError } from 'lodash';
|
||||||
import { Map } from 'immutable';
|
import { fromJS, Map } from 'immutable';
|
||||||
|
import fuzzy from 'fuzzy';
|
||||||
import { resolveFormat } from "Formats/formats";
|
import { resolveFormat } from "Formats/formats";
|
||||||
import { selectIntegration } from 'Reducers/integrations';
|
import { selectIntegration } from 'Reducers/integrations';
|
||||||
import {
|
import {
|
||||||
@ -10,19 +11,23 @@ import {
|
|||||||
selectAllowDeletion,
|
selectAllowDeletion,
|
||||||
selectFolderEntryExtension,
|
selectFolderEntryExtension,
|
||||||
selectIdentifier,
|
selectIdentifier,
|
||||||
|
selectInferedField,
|
||||||
} from "Reducers/collections";
|
} from "Reducers/collections";
|
||||||
import { createEntry } from "ValueObjects/Entry";
|
import { createEntry } from "ValueObjects/Entry";
|
||||||
import { sanitizeSlug } from "Lib/urlHelper";
|
import { sanitizeSlug } from "Lib/urlHelper";
|
||||||
import TestRepoBackend from "./test-repo/implementation";
|
import TestRepoBackend from "./test-repo/implementation";
|
||||||
import GitHubBackend from "./github/implementation";
|
import GitHubBackend from "./github/implementation";
|
||||||
|
import GitLabBackend from "./gitlab/implementation";
|
||||||
import GitGatewayBackend from "./git-gateway/implementation";
|
import GitGatewayBackend from "./git-gateway/implementation";
|
||||||
import { registerBackend, getBackend } from 'Lib/registry';
|
import { registerBackend, getBackend } from 'Lib/registry';
|
||||||
|
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from '../valueObjects/Cursor';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Register internal backends
|
* Register internal backends
|
||||||
*/
|
*/
|
||||||
registerBackend('git-gateway', GitGatewayBackend);
|
registerBackend('git-gateway', GitGatewayBackend);
|
||||||
registerBackend('github', GitHubBackend);
|
registerBackend('github', GitHubBackend);
|
||||||
|
registerBackend('gitlab', GitLabBackend);
|
||||||
registerBackend('test-repo', TestRepoBackend);
|
registerBackend('test-repo', TestRepoBackend);
|
||||||
|
|
||||||
|
|
||||||
@ -107,6 +112,17 @@ const commitMessageFormatter = (type, config, { slug, path, collection }) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const extractSearchFields = searchFields => entry => searchFields.reduce((acc, field) => {
|
||||||
|
const f = entry.data[field];
|
||||||
|
return f ? `${acc} ${f}` : acc;
|
||||||
|
}, "");
|
||||||
|
|
||||||
|
const sortByScore = (a, b) => {
|
||||||
|
if (a.score > b.score) return -1;
|
||||||
|
if (a.score < b.score) return 1;
|
||||||
|
return 0;
|
||||||
|
};
|
||||||
|
|
||||||
class Backend {
|
class Backend {
|
||||||
constructor(implementation, backendName, authStore = null) {
|
constructor(implementation, backendName, authStore = null) {
|
||||||
this.implementation = implementation;
|
this.implementation = implementation;
|
||||||
@ -153,30 +169,112 @@ class Backend {
|
|||||||
|
|
||||||
getToken = () => this.implementation.getToken();
|
getToken = () => this.implementation.getToken();
|
||||||
|
|
||||||
listEntries(collection) {
|
processEntries(loadedEntries, collection) {
|
||||||
const listMethod = this.implementation[selectListMethod(collection)];
|
|
||||||
const extension = selectFolderEntryExtension(collection);
|
|
||||||
const collectionFilter = collection.get('filter');
|
const collectionFilter = collection.get('filter');
|
||||||
return listMethod.call(this.implementation, collection, extension)
|
const entries = loadedEntries.map(loadedEntry => createEntry(
|
||||||
.then(loadedEntries => (
|
|
||||||
loadedEntries.map(loadedEntry => createEntry(
|
|
||||||
collection.get("name"),
|
collection.get("name"),
|
||||||
selectEntrySlug(collection, loadedEntry.file.path),
|
selectEntrySlug(collection, loadedEntry.file.path),
|
||||||
loadedEntry.file.path,
|
loadedEntry.file.path,
|
||||||
{ raw: loadedEntry.data || '', label: loadedEntry.file.label }
|
{ raw: loadedEntry.data || '', label: loadedEntry.file.label }
|
||||||
))
|
|
||||||
))
|
|
||||||
.then(entries => (
|
|
||||||
{
|
|
||||||
entries: entries.map(this.entryWithFormat(collection)),
|
|
||||||
}
|
|
||||||
))
|
|
||||||
// If this collection has a "filter" property, filter entries accordingly
|
|
||||||
.then(loadedCollection => (
|
|
||||||
{
|
|
||||||
entries: collectionFilter ? this.filterEntries(loadedCollection, collectionFilter) : loadedCollection.entries
|
|
||||||
}
|
|
||||||
));
|
));
|
||||||
|
const formattedEntries = entries.map(this.entryWithFormat(collection));
|
||||||
|
// If this collection has a "filter" property, filter entries accordingly
|
||||||
|
const filteredEntries = collectionFilter
|
||||||
|
? this.filterEntries({ entries: formattedEntries }, collectionFilter)
|
||||||
|
: formattedEntries;
|
||||||
|
return filteredEntries;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
listEntries(collection) {
|
||||||
|
const listMethod = this.implementation[selectListMethod(collection)];
|
||||||
|
const extension = selectFolderEntryExtension(collection);
|
||||||
|
return listMethod.call(this.implementation, collection, extension)
|
||||||
|
.then(loadedEntries => ({
|
||||||
|
entries: this.processEntries(loadedEntries, collection),
|
||||||
|
/*
|
||||||
|
Wrap cursors so we can tell which collection the cursor is
|
||||||
|
from. This is done to prevent traverseCursor from requiring a
|
||||||
|
`collection` argument.
|
||||||
|
*/
|
||||||
|
cursor: Cursor.create(loadedEntries[CURSOR_COMPATIBILITY_SYMBOL]).wrapData({
|
||||||
|
cursorType: "collectionEntries",
|
||||||
|
collection,
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// The same as listEntries, except that if a cursor with the "next"
|
||||||
|
// action available is returned, it calls "next" on the cursor and
|
||||||
|
// repeats the process. Once there is no available "next" action, it
|
||||||
|
// returns all the collected entries. Used to retrieve all entries
|
||||||
|
// for local searches and queries.
|
||||||
|
async listAllEntries(collection) {
|
||||||
|
if (collection.get("folder") && this.implementation.allEntriesByFolder) {
|
||||||
|
const extension = selectFolderEntryExtension(collection);
|
||||||
|
return this.implementation.allEntriesByFolder(collection, extension)
|
||||||
|
.then(entries => this.processEntries(entries, collection));
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await this.listEntries(collection);
|
||||||
|
const { entries } = response;
|
||||||
|
let { cursor } = response;
|
||||||
|
while (cursor && cursor.actions.includes("next")) {
|
||||||
|
const { entries: newEntries, cursor: newCursor } = await this.traverseCursor(cursor, "next");
|
||||||
|
entries.push(...newEntries);
|
||||||
|
cursor = newCursor;
|
||||||
|
}
|
||||||
|
return entries;
|
||||||
|
}
|
||||||
|
|
||||||
|
async search(collections, searchTerm) {
|
||||||
|
// Perform a local search by requesting all entries. For each
|
||||||
|
// collection, load it, search, and call onCollectionResults with
|
||||||
|
// its results.
|
||||||
|
const errors = [];
|
||||||
|
const collectionEntriesRequests = collections.map(async collection => {
|
||||||
|
// TODO: pass search fields in as an argument
|
||||||
|
const searchFields = [
|
||||||
|
selectInferedField(collection, 'title'),
|
||||||
|
selectInferedField(collection, 'shortTitle'),
|
||||||
|
selectInferedField(collection, 'author'),
|
||||||
|
];
|
||||||
|
const collectionEntries = await this.listAllEntries(collection);
|
||||||
|
return fuzzy.filter(searchTerm, collectionEntries, {
|
||||||
|
extract: extractSearchFields(searchFields),
|
||||||
|
});
|
||||||
|
}).map(p => p.catch(err => errors.push(err) && []));
|
||||||
|
|
||||||
|
const entries = await Promise.all(collectionEntriesRequests).then(arrs => flatten(arrs));
|
||||||
|
|
||||||
|
if (errors.length > 0) {
|
||||||
|
throw new Error({ message: "Errors ocurred while searching entries locally!", errors });
|
||||||
|
}
|
||||||
|
const hits = entries.filter(({ score }) => score > 5).sort(sortByScore).map(f => f.original);
|
||||||
|
return { entries: hits };
|
||||||
|
}
|
||||||
|
|
||||||
|
async query(collection, searchFields, searchTerm) {
|
||||||
|
const entries = await this.listAllEntries(collection);
|
||||||
|
const hits = fuzzy.filter(searchTerm, entries, { extract: extractSearchFields(searchFields) })
|
||||||
|
.filter(entry => entry.score > 5)
|
||||||
|
.sort(sortByScore)
|
||||||
|
.map(f => f.original);
|
||||||
|
return { query: searchTerm, hits };
|
||||||
|
}
|
||||||
|
|
||||||
|
traverseCursor(cursor, action) {
|
||||||
|
const [data, unwrappedCursor] = cursor.unwrapData();
|
||||||
|
// TODO: stop assuming all cursors are for collections
|
||||||
|
const collection = data.get("collection");
|
||||||
|
return this.implementation.traverseCursor(unwrappedCursor, action)
|
||||||
|
.then(async ({ entries, cursor: newCursor }) => ({
|
||||||
|
entries: this.processEntries(entries, collection),
|
||||||
|
cursor: Cursor.create(newCursor).wrapData({
|
||||||
|
cursorType: "collectionEntries",
|
||||||
|
collection,
|
||||||
|
}),
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
getEntry(collection, slug) {
|
getEntry(collection, slug) {
|
||||||
|
106
src/backends/git-gateway/GitHubAPI.js
Normal file
106
src/backends/git-gateway/GitHubAPI.js
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
import GithubAPI from "Backends/github/API";
|
||||||
|
import { APIError } from "ValueObjects/errors";
|
||||||
|
|
||||||
|
export default class API extends GithubAPI {
|
||||||
|
constructor(config) {
|
||||||
|
super(config);
|
||||||
|
this.api_root = config.api_root;
|
||||||
|
this.tokenPromise = config.tokenPromise;
|
||||||
|
this.commitAuthor = config.commitAuthor;
|
||||||
|
this.repoURL = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
hasWriteAccess() {
|
||||||
|
return this.getBranch()
|
||||||
|
.then(() => true)
|
||||||
|
.catch(error => {
|
||||||
|
if (error.status === 401) {
|
||||||
|
if (error.message === "Bad credentials") {
|
||||||
|
throw new APIError("Git Gateway Error: Please ask your site administrator to reissue the Git Gateway token.", error.status, 'Git Gateway');
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} else if (error.status === 404 && (error.message === undefined || error.message === "Unable to locate site configuration")) {
|
||||||
|
throw new APIError(`Git Gateway Error: Please make sure Git Gateway is enabled on your site.`, error.status, 'Git Gateway');
|
||||||
|
} else {
|
||||||
|
console.error("Problem fetching repo data from Git Gateway");
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getRequestHeaders(headers = {}) {
|
||||||
|
return this.tokenPromise()
|
||||||
|
.then((jwtToken) => {
|
||||||
|
const baseHeader = {
|
||||||
|
"Authorization": `Bearer ${ jwtToken }`,
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
...headers,
|
||||||
|
};
|
||||||
|
|
||||||
|
return baseHeader;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
urlFor(path, options) {
|
||||||
|
const cacheBuster = new Date().getTime();
|
||||||
|
const params = [`ts=${ cacheBuster }`];
|
||||||
|
if (options.params) {
|
||||||
|
for (const key in options.params) {
|
||||||
|
params.push(`${ key }=${ encodeURIComponent(options.params[key]) }`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (params.length) {
|
||||||
|
path += `?${ params.join("&") }`;
|
||||||
|
}
|
||||||
|
return this.api_root + path;
|
||||||
|
}
|
||||||
|
|
||||||
|
user() {
|
||||||
|
return Promise.resolve(this.commitAuthor);
|
||||||
|
}
|
||||||
|
|
||||||
|
request(path, options = {}) {
|
||||||
|
const url = this.urlFor(path, options);
|
||||||
|
let responseStatus;
|
||||||
|
return this.getRequestHeaders(options.headers || {})
|
||||||
|
.then(headers => fetch(url, { ...options, headers }))
|
||||||
|
.then((response) => {
|
||||||
|
responseStatus = response.status;
|
||||||
|
const contentType = response.headers.get("Content-Type");
|
||||||
|
if (contentType && contentType.match(/json/)) {
|
||||||
|
return this.parseJsonResponse(response);
|
||||||
|
}
|
||||||
|
const text = response.text();
|
||||||
|
if (!response.ok) {
|
||||||
|
return Promise.reject(text);
|
||||||
|
}
|
||||||
|
return text;
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
throw new APIError((error.message || error.msg), responseStatus, 'Git Gateway');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
commit(message, changeTree) {
|
||||||
|
const commitParams = {
|
||||||
|
message,
|
||||||
|
tree: changeTree.sha,
|
||||||
|
parents: changeTree.parentSha ? [changeTree.parentSha] : [],
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.commitAuthor) {
|
||||||
|
commitParams.author = {
|
||||||
|
...this.commitAuthor,
|
||||||
|
date: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.request("/git/commits", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(commitParams),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
25
src/backends/git-gateway/GitLabAPI.js
Normal file
25
src/backends/git-gateway/GitLabAPI.js
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import { flow } from "lodash";
|
||||||
|
import unsentRequest from "Lib/unsentRequest";
|
||||||
|
import { then } from "Lib/promiseHelper";
|
||||||
|
import GitlabAPI from "Backends/gitlab/API";
|
||||||
|
|
||||||
|
export default class API extends GitlabAPI {
|
||||||
|
constructor(config) {
|
||||||
|
super(config);
|
||||||
|
this.tokenPromise = config.tokenPromise;
|
||||||
|
this.commitAuthor = config.commitAuthor;
|
||||||
|
this.repoURL = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
authenticateRequest = async req => unsentRequest.withHeaders({
|
||||||
|
Authorization: `Bearer ${ await this.tokenPromise() }`,
|
||||||
|
}, req);
|
||||||
|
|
||||||
|
request = async req => flow([
|
||||||
|
this.buildRequest,
|
||||||
|
this.authenticateRequest,
|
||||||
|
then(unsentRequest.performRequest),
|
||||||
|
])(req);
|
||||||
|
|
||||||
|
hasWriteAccess = () => Promise.resolve(true)
|
||||||
|
}
|
@ -3,18 +3,20 @@ import jwtDecode from 'jwt-decode';
|
|||||||
import {List} from 'immutable';
|
import {List} from 'immutable';
|
||||||
import { get, pick, intersection } from "lodash";
|
import { get, pick, intersection } from "lodash";
|
||||||
import GitHubBackend from "Backends/github/implementation";
|
import GitHubBackend from "Backends/github/implementation";
|
||||||
import API from "./API";
|
import GitLabBackend from "Backends/gitlab/implementation";
|
||||||
|
import GitHubAPI from "./GitHubAPI";
|
||||||
|
import GitLabAPI from "./GitLabAPI";
|
||||||
import AuthenticationPage from "./AuthenticationPage";
|
import AuthenticationPage from "./AuthenticationPage";
|
||||||
|
|
||||||
const localHosts = {
|
const localHosts = {
|
||||||
localhost: true,
|
localhost: true,
|
||||||
'127.0.0.1': true,
|
'127.0.0.1': true,
|
||||||
'0.0.0.0': true
|
'0.0.0.0': true,
|
||||||
}
|
};
|
||||||
const defaults = {
|
const defaults = {
|
||||||
identity: '/.netlify/identity',
|
identity: '/.netlify/identity',
|
||||||
gateway: '/.netlify/git/github'
|
gateway: '/.netlify/git',
|
||||||
}
|
};
|
||||||
|
|
||||||
function getEndpoint(endpoint, netlifySiteURL) {
|
function getEndpoint(endpoint, netlifySiteURL) {
|
||||||
if (localHosts[document.location.host.split(":").shift()] && netlifySiteURL && endpoint.match(/^\/\.netlify\//)) {
|
if (localHosts[document.location.host.split(":").shift()] && netlifySiteURL && endpoint.match(/^\/\.netlify\//)) {
|
||||||
@ -29,65 +31,88 @@ function getEndpoint(endpoint, netlifySiteURL) {
|
|||||||
return endpoint;
|
return endpoint;
|
||||||
}
|
}
|
||||||
|
|
||||||
export default class GitGateway extends GitHubBackend {
|
export default class GitGateway {
|
||||||
constructor(config) {
|
constructor(config) {
|
||||||
super(config, true);
|
this.config = config;
|
||||||
|
this.branch = config.getIn(["backend", "branch"], "master").trim();
|
||||||
this.accept_roles = (config.getIn(["backend", "accept_roles"]) || List()).toArray();
|
this.squash_merges = config.getIn(["backend", "squash_merges"]);
|
||||||
|
|
||||||
const netlifySiteURL = localStorage.getItem("netlifySiteURL");
|
const netlifySiteURL = localStorage.getItem("netlifySiteURL");
|
||||||
const APIUrl = getEndpoint(config.getIn(["backend", "identity_url"], defaults.identity), netlifySiteURL);
|
const APIUrl = getEndpoint(config.getIn(["backend", "identity_url"], defaults.identity), netlifySiteURL);
|
||||||
this.github_proxy_url = getEndpoint(config.getIn(["backend", "gateway_url"], defaults.gateway), netlifySiteURL);
|
this.gatewayUrl = getEndpoint(config.getIn(["backend", "gateway_url"], defaults.gateway), netlifySiteURL);
|
||||||
|
|
||||||
|
const backendTypeRegex = /\/(github|gitlab)\/?$/;
|
||||||
|
const backendTypeMatches = this.gatewayUrl.match(backendTypeRegex);
|
||||||
|
if (backendTypeMatches) {
|
||||||
|
this.backendType = backendTypeMatches[1];
|
||||||
|
this.gatewayUrl = this.gatewayUrl.replace(backendTypeRegex, "/");
|
||||||
|
} else {
|
||||||
|
this.backendType = null;
|
||||||
|
}
|
||||||
|
|
||||||
this.authClient = window.netlifyIdentity ? window.netlifyIdentity.gotrue : new GoTrue({ APIUrl });
|
this.authClient = window.netlifyIdentity ? window.netlifyIdentity.gotrue : new GoTrue({ APIUrl });
|
||||||
|
|
||||||
AuthenticationPage.authClient = this.authClient;
|
AuthenticationPage.authClient = this.authClient;
|
||||||
}
|
|
||||||
|
|
||||||
restoreUser() {
|
this.backend = null;
|
||||||
const user = this.authClient && this.authClient.currentUser();
|
|
||||||
if (!user) return Promise.reject();
|
|
||||||
return this.authenticate(user);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
authenticate(user) {
|
authenticate(user) {
|
||||||
this.tokenPromise = user.jwt.bind(user);
|
this.tokenPromise = user.jwt.bind(user);
|
||||||
return this.tokenPromise()
|
return this.tokenPromise().then(async token => {
|
||||||
.then((token) => {
|
if (!this.backendType) {
|
||||||
let validRole = true;
|
const { github_enabled, gitlab_enabled, roles } = await fetch(`${ this.gatewayUrl }/settings`, {
|
||||||
if (this.accept_roles && this.accept_roles.length > 0) {
|
headers: { Authorization: `Bearer ${ token }` },
|
||||||
const userRoles = get(jwtDecode(token), 'app_metadata.roles', []);
|
}).then(res => res.json());
|
||||||
validRole = intersection(userRoles, this.accept_roles).length > 0;
|
this.acceptRoles = roles;
|
||||||
|
if (github_enabled) {
|
||||||
|
this.backendType = "github";
|
||||||
|
} else if (gitlab_enabled) {
|
||||||
|
this.backendType = "gitlab";
|
||||||
}
|
}
|
||||||
if (validRole) {
|
}
|
||||||
|
|
||||||
|
if (this.acceptRoles && this.acceptRoles.length > 0) {
|
||||||
|
const userRoles = get(jwtDecode(token), 'app_metadata.roles', []);
|
||||||
|
const validRole = intersection(userRoles, this.acceptRoles).length > 0;
|
||||||
|
if (!validRole) {
|
||||||
|
throw new Error("You don't have sufficient permissions to access Netlify CMS");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const userData = {
|
const userData = {
|
||||||
name: user.user_metadata.name || user.email.split('@').shift(),
|
name: user.user_metadata.name || user.email.split('@').shift(),
|
||||||
email: user.email,
|
email: user.email,
|
||||||
avatar_url: user.user_metadata.avatar_url,
|
avatar_url: user.user_metadata.avatar_url,
|
||||||
metadata: user.user_metadata,
|
metadata: user.user_metadata,
|
||||||
};
|
};
|
||||||
this.api = new API({
|
const apiConfig = {
|
||||||
api_root: this.github_proxy_url,
|
api_root: `${ this.gatewayUrl }/${ this.backendType }`,
|
||||||
branch: this.branch,
|
branch: this.branch,
|
||||||
tokenPromise: this.tokenPromise,
|
tokenPromise: this.tokenPromise,
|
||||||
commitAuthor: pick(userData, ["name", "email"]),
|
commitAuthor: pick(userData, ["name", "email"]),
|
||||||
squash_merges: this.squash_merges,
|
squash_merges: this.squash_merges,
|
||||||
});
|
};
|
||||||
return userData;
|
|
||||||
} else {
|
if (this.backendType === "github") {
|
||||||
throw new Error("You don't have sufficient permissions to access Netlify CMS");
|
this.api = new GitHubAPI(apiConfig);
|
||||||
}
|
this.backend = new GitHubBackend(this.config, { proxied: true, API: this.api });
|
||||||
})
|
} else if (this.backendType === "gitlab") {
|
||||||
.then(userData =>
|
this.api = new GitLabAPI(apiConfig);
|
||||||
this.api.hasWriteAccess().then(canWrite => {
|
this.backend = new GitLabBackend(this.config, { proxied: true, API: this.api });
|
||||||
if (canWrite) {
|
|
||||||
return userData;
|
|
||||||
} else {
|
|
||||||
throw new Error("You don't have sufficient permissions to access Netlify CMS");
|
|
||||||
}
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!(await this.api.hasWriteAccess())) {
|
||||||
|
throw new Error("You don't have sufficient permissions to access Netlify CMS");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
restoreUser() {
|
||||||
|
const user = this.authClient && this.authClient.currentUser();
|
||||||
|
if (!user) return Promise.reject();
|
||||||
|
return this.authenticate(user);
|
||||||
|
}
|
||||||
|
authComponent() {
|
||||||
|
return AuthenticationPage;
|
||||||
|
}
|
||||||
logout() {
|
logout() {
|
||||||
if (window.netlifyIdentity) {
|
if (window.netlifyIdentity) {
|
||||||
return window.netlifyIdentity.logout();
|
return window.netlifyIdentity.logout();
|
||||||
@ -95,13 +120,22 @@ export default class GitGateway extends GitHubBackend {
|
|||||||
const user = this.authClient.currentUser();
|
const user = this.authClient.currentUser();
|
||||||
return user && user.logout();
|
return user && user.logout();
|
||||||
}
|
}
|
||||||
|
|
||||||
getToken() {
|
getToken() {
|
||||||
return this.tokenPromise();
|
return this.tokenPromise();
|
||||||
}
|
}
|
||||||
|
|
||||||
authComponent() {
|
entriesByFolder(collection, extension) { return this.backend.entriesByFolder(collection, extension); }
|
||||||
return AuthenticationPage;
|
entriesByFiles(collection) { return this.backend.entriesByFiles(collection); }
|
||||||
}
|
fetchFiles(files) { return this.backend.fetchFiles(files); }
|
||||||
|
getEntry(collection, slug, path) { return this.backend.getEntry(collection, slug, path); }
|
||||||
|
getMedia() { return this.backend.getMedia(); }
|
||||||
|
persistEntry(entry, mediaFiles, options) { return this.backend.persistEntry(entry, mediaFiles, options); }
|
||||||
|
persistMedia(mediaFile, options) { return this.backend.persistMedia(mediaFile, options); }
|
||||||
|
deleteFile(path, commitMessage, options) { return this.backend.deleteFile(path, commitMessage, options); }
|
||||||
|
unpublishedEntries() { return this.backend.unpublishedEntries(); }
|
||||||
|
unpublishedEntry(collection, slug) { return this.backend.unpublishedEntry(collection, slug); }
|
||||||
|
updateUnpublishedEntryStatus(collection, slug, newStatus) { return this.backend.updateUnpublishedEntryStatus(collection, slug, newStatus); }
|
||||||
|
deleteUnpublishedEntry(collection, slug) { return this.backend.deleteUnpublishedEntry(collection, slug); }
|
||||||
|
publishUnpublishedEntry(collection, slug) { return this.backend.publishUnpublishedEntry(collection, slug); }
|
||||||
|
traverseCursor(cursor, action) { return this.backend.traverseCursor(cursor, action); }
|
||||||
}
|
}
|
||||||
|
@ -6,13 +6,20 @@ import API from "./API";
|
|||||||
const MAX_CONCURRENT_DOWNLOADS = 10;
|
const MAX_CONCURRENT_DOWNLOADS = 10;
|
||||||
|
|
||||||
export default class GitHub {
|
export default class GitHub {
|
||||||
constructor(config, proxied = false) {
|
constructor(config, options={}) {
|
||||||
this.config = config;
|
this.config = config;
|
||||||
|
this.options = {
|
||||||
|
proxied: false,
|
||||||
|
API: null,
|
||||||
|
...options,
|
||||||
|
};
|
||||||
|
|
||||||
if (!proxied && config.getIn(["backend", "repo"]) == null) {
|
if (!this.options.proxied && config.getIn(["backend", "repo"]) == null) {
|
||||||
throw new Error("The GitHub backend needs a \"repo\" in the backend configuration.");
|
throw new Error("The GitHub backend needs a \"repo\" in the backend configuration.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.api = this.options.API || null;
|
||||||
|
|
||||||
this.repo = config.getIn(["backend", "repo"], "");
|
this.repo = config.getIn(["backend", "repo"], "");
|
||||||
this.branch = config.getIn(["backend", "branch"], "master").trim();
|
this.branch = config.getIn(["backend", "branch"], "master").trim();
|
||||||
this.api_root = config.getIn(["backend", "api_root"], "https://api.github.com");
|
this.api_root = config.getIn(["backend", "api_root"], "https://api.github.com");
|
||||||
|
224
src/backends/gitlab/API.js
Normal file
224
src/backends/gitlab/API.js
Normal file
@ -0,0 +1,224 @@
|
|||||||
|
import LocalForage from "Lib/LocalForage";
|
||||||
|
import { Base64 } from "js-base64";
|
||||||
|
import { fromJS, List, Map } from "immutable";
|
||||||
|
import { cond, flow, isString, partial, partialRight, pick, omit, set, update } from "lodash";
|
||||||
|
import unsentRequest from "Lib/unsentRequest";
|
||||||
|
import { then } from "Lib/promiseHelper";
|
||||||
|
import AssetProxy from "ValueObjects/AssetProxy";
|
||||||
|
import { APIError } from "ValueObjects/errors";
|
||||||
|
import Cursor from "ValueObjects/Cursor"
|
||||||
|
|
||||||
|
export default class API {
|
||||||
|
constructor(config) {
|
||||||
|
this.api_root = config.api_root || "https://gitlab.com/api/v4";
|
||||||
|
this.token = config.token || false;
|
||||||
|
this.branch = config.branch || "master";
|
||||||
|
this.repo = config.repo || "";
|
||||||
|
this.repoURL = `/projects/${ encodeURIComponent(this.repo) }`;
|
||||||
|
}
|
||||||
|
|
||||||
|
withAuthorizationHeaders = req =>
|
||||||
|
unsentRequest.withHeaders(this.token ? { Authorization: `Bearer ${ this.token }` } : {}, req);
|
||||||
|
|
||||||
|
buildRequest = req => flow([
|
||||||
|
unsentRequest.withRoot(this.api_root),
|
||||||
|
this.withAuthorizationHeaders,
|
||||||
|
unsentRequest.withTimestamp,
|
||||||
|
])(req);
|
||||||
|
|
||||||
|
request = async req => flow([
|
||||||
|
this.buildRequest,
|
||||||
|
unsentRequest.performRequest,
|
||||||
|
p => p.catch(err => Promise.reject(new APIError(err.message, null, "GitLab"))),
|
||||||
|
])(req);
|
||||||
|
|
||||||
|
parseResponse = async (res, { expectingOk=true, expectingFormat=false }) => {
|
||||||
|
const contentType = res.headers.get("Content-Type");
|
||||||
|
const isJSON = contentType === "application/json";
|
||||||
|
let body;
|
||||||
|
try {
|
||||||
|
body = await ((expectingFormat === "json" || isJSON) ? res.json() : res.text());
|
||||||
|
} catch (err) {
|
||||||
|
throw new APIError(err.message, res.status, "GitLab");
|
||||||
|
}
|
||||||
|
if (expectingOk && !res.ok) {
|
||||||
|
throw new APIError((isJSON && body.message) ? body.message : body, res.status, "GitLab");
|
||||||
|
}
|
||||||
|
return body;
|
||||||
|
};
|
||||||
|
|
||||||
|
responseToJSON = res => this.parseResponse(res, { expectingFormat: "json" });
|
||||||
|
responseToText = res => this.parseResponse(res, { expectingFormat: "text" });
|
||||||
|
requestJSON = req => this.request(req).then(this.responseToJSON);
|
||||||
|
requestText = req => this.request(req).then(this.responseToText);
|
||||||
|
|
||||||
|
user = () => this.requestJSON("/user");
|
||||||
|
|
||||||
|
WRITE_ACCESS = 30;
|
||||||
|
hasWriteAccess = user => this.requestJSON(this.repoURL).then(({ permissions }) => {
|
||||||
|
const { project_access, group_access } = permissions;
|
||||||
|
if (project_access && (project_access.access_level >= this.WRITE_ACCESS)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (group_access && (group_access.access_level >= this.WRITE_ACCESS)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
|
||||||
|
readFile = async (path, sha, ref=this.branch) => {
|
||||||
|
const cachedFile = sha ? await LocalForage.getItem(`gl.${ sha }`) : null;
|
||||||
|
if (cachedFile) { return cachedFile; }
|
||||||
|
const result = await this.requestText({
|
||||||
|
url: `${ this.repoURL }/repository/files/${ encodeURIComponent(path) }/raw`,
|
||||||
|
params: { ref },
|
||||||
|
cache: "no-store",
|
||||||
|
});
|
||||||
|
if (sha) { LocalForage.setItem(`gl.${ sha }`, result) }
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
fileDownloadURL = (path, ref=this.branch) => unsentRequest.toURL(this.buildRequest({
|
||||||
|
url: `${ this.repoURL }/repository/files/${ encodeURIComponent(path) }/raw`,
|
||||||
|
params: { ref },
|
||||||
|
}));
|
||||||
|
|
||||||
|
getCursorFromHeaders = headers => {
|
||||||
|
// indices and page counts are assumed to be zero-based, but the
|
||||||
|
// indices and page counts returned from GitLab are one-based
|
||||||
|
const index = parseInt(headers.get("X-Page"), 10) - 1;
|
||||||
|
const pageCount = parseInt(headers.get("X-Total-Pages"), 10) - 1;
|
||||||
|
const pageSize = parseInt(headers.get("X-Per-Page"), 10);
|
||||||
|
const count = parseInt(headers.get("X-Total"), 10);
|
||||||
|
const linksRaw = headers.get("Link");
|
||||||
|
const links = List(linksRaw.split(","))
|
||||||
|
.map(str => str.trim().split(";"))
|
||||||
|
.map(([linkStr, keyStr]) => [
|
||||||
|
keyStr.match(/rel="(.*?)"/)[1],
|
||||||
|
unsentRequest.fromURL(linkStr.trim().match(/<(.*?)>/)[1]),
|
||||||
|
])
|
||||||
|
.update(list => Map(list));
|
||||||
|
const actions = links.keySeq().flatMap(key => (
|
||||||
|
(key === "prev" && index > 0) ||
|
||||||
|
(key === "next" && index < pageCount) ||
|
||||||
|
(key === "first" && index > 0) ||
|
||||||
|
(key === "last" && index < pageCount)
|
||||||
|
) ? [key] : []);
|
||||||
|
return Cursor.create({
|
||||||
|
actions,
|
||||||
|
meta: { index, count, pageSize, pageCount },
|
||||||
|
data: { links },
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
getCursor = ({ headers }) => this.getCursorFromHeaders(headers);
|
||||||
|
|
||||||
|
// Gets a cursor without retrieving the entries by using a HEAD
|
||||||
|
// request
|
||||||
|
fetchCursor = req => flow([unsentRequest.withMethod("HEAD"), this.request, then(this.getCursor)])(req);
|
||||||
|
fetchCursorAndEntries = req => flow([
|
||||||
|
unsentRequest.withMethod("GET"),
|
||||||
|
this.request,
|
||||||
|
p => Promise.all([p.then(this.getCursor), p.then(this.responseToJSON)]),
|
||||||
|
then(([cursor, entries]) => ({ cursor, entries })),
|
||||||
|
])(req);
|
||||||
|
fetchRelativeCursor = async (cursor, action) => this.fetchCursor(cursor.data.links[action]);
|
||||||
|
|
||||||
|
reversableActions = Map({
|
||||||
|
first: "last",
|
||||||
|
last: "first",
|
||||||
|
next: "prev",
|
||||||
|
prev: "next",
|
||||||
|
});
|
||||||
|
|
||||||
|
reverseCursor = cursor => {
|
||||||
|
const pageCount = cursor.meta.get("pageCount", 0);
|
||||||
|
const currentIndex = cursor.meta.get("index", 0);
|
||||||
|
const newIndex = pageCount - currentIndex;
|
||||||
|
|
||||||
|
const links = cursor.data.get("links", Map());
|
||||||
|
const reversedLinks = links.mapEntries(([k, v]) => [this.reversableActions.get(k) || k, v]);
|
||||||
|
|
||||||
|
const reversedActions = cursor.actions.map(action => this.reversableActions.get(action) || action);
|
||||||
|
|
||||||
|
return cursor.updateStore(store => store
|
||||||
|
.setIn(["meta", "index"], newIndex)
|
||||||
|
.setIn(["data", "links"], reversedLinks)
|
||||||
|
.set("actions", reversedActions));
|
||||||
|
};
|
||||||
|
|
||||||
|
// The exported listFiles and traverseCursor reverse the direction
|
||||||
|
// of the cursors, since GitLab's pagination sorts the opposite way
|
||||||
|
// we want to sort by default (it sorts by filename _descending_,
|
||||||
|
// while the CMS defaults to sorting by filename _ascending_, at
|
||||||
|
// least in the current GitHub backend). This should eventually be
|
||||||
|
// refactored.
|
||||||
|
listFiles = async path => {
|
||||||
|
const firstPageCursor = await this.fetchCursor({
|
||||||
|
url: `${ this.repoURL }/repository/tree`,
|
||||||
|
params: { path, ref: this.branch },
|
||||||
|
});
|
||||||
|
const lastPageLink = firstPageCursor.data.getIn(["links", "last"]);
|
||||||
|
const { entries, cursor } = await this.fetchCursorAndEntries(lastPageLink);
|
||||||
|
return { files: entries.filter(({ type }) => type === "blob").reverse(), cursor: this.reverseCursor(cursor) };
|
||||||
|
};
|
||||||
|
|
||||||
|
traverseCursor = async (cursor, action) => {
|
||||||
|
const link = cursor.data.getIn(["links", action]);
|
||||||
|
const { entries, cursor: newCursor } = await this.fetchCursorAndEntries(link);
|
||||||
|
return { entries: entries.reverse(), cursor: this.reverseCursor(newCursor) };
|
||||||
|
};
|
||||||
|
|
||||||
|
listAllFiles = async path => {
|
||||||
|
const entries = [];
|
||||||
|
let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({
|
||||||
|
url: `${ this.repoURL }/repository/tree`,
|
||||||
|
// Get the maximum number of entries per page
|
||||||
|
params: { path, ref: this.branch, per_page: 100 },
|
||||||
|
});
|
||||||
|
entries.push(...initialEntries);
|
||||||
|
while (cursor && cursor.actions.has("next")) {
|
||||||
|
const link = cursor.data.getIn(["links", "next"]);
|
||||||
|
const { cursor: newCursor, entries: newEntries } = await this.fetchCursorAndEntries(link);
|
||||||
|
entries.push(...newEntries);
|
||||||
|
cursor = newCursor;
|
||||||
|
}
|
||||||
|
return entries.filter(({ type }) => type === "blob");
|
||||||
|
};
|
||||||
|
|
||||||
|
toBase64 = str => Promise.resolve(Base64.encode(str));
|
||||||
|
fromBase64 = str => Base64.decode(str);
|
||||||
|
uploadAndCommit = async (item, { commitMessage, updateFile = false, branch = this.branch, author = this.commitAuthor }) => {
|
||||||
|
const content = await (item instanceof AssetProxy ? item.toBase64() : this.toBase64(item.raw));
|
||||||
|
const file_path = item.path.replace(/^\//, "");
|
||||||
|
const action = (updateFile ? "update" : "create");
|
||||||
|
const encoding = "base64";
|
||||||
|
const { name: author_name, email: author_email } = pick(author || {}, ["name", "email"]);
|
||||||
|
const body = JSON.stringify({
|
||||||
|
branch,
|
||||||
|
commit_message: commitMessage,
|
||||||
|
actions: [{ action, file_path, content, encoding }],
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.request({
|
||||||
|
url: `${ this.repoURL }/repository/commits`,
|
||||||
|
method: "POST",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
|
||||||
|
return { ...item, uploaded: true };
|
||||||
|
};
|
||||||
|
|
||||||
|
persistFiles = (files, { commitMessage, newEntry }) =>
|
||||||
|
Promise.all(files.map(file => this.uploadAndCommit(file, { commitMessage, updateFile: newEntry === false })));
|
||||||
|
|
||||||
|
deleteFile = (path, commit_message, options = {}) => {
|
||||||
|
const branch = options.branch || this.branch;
|
||||||
|
return flow([
|
||||||
|
unsentRequest.withMethod("DELETE"),
|
||||||
|
unsentRequest.withParams({ commit_message, branch }),
|
||||||
|
this.request,
|
||||||
|
])(`${ this.repoURL }/repository/files/${ encodeURIComponent(path) }`);
|
||||||
|
};
|
||||||
|
}
|
69
src/backends/gitlab/AuthenticationPage.js
Normal file
69
src/backends/gitlab/AuthenticationPage.js
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
import PropTypes from 'prop-types';
|
||||||
|
import React from 'react';
|
||||||
|
import NetlifyAuthenticator from 'Lib/netlify-auth';
|
||||||
|
import ImplicitAuthenticator from 'Lib/implicit-oauth';
|
||||||
|
import { Icon } from 'UI';
|
||||||
|
|
||||||
|
export default class AuthenticationPage extends React.Component {
|
||||||
|
static propTypes = {
|
||||||
|
onLogin: PropTypes.func.isRequired,
|
||||||
|
inProgress: PropTypes.bool,
|
||||||
|
};
|
||||||
|
|
||||||
|
state = {};
|
||||||
|
|
||||||
|
componentDidMount() {
|
||||||
|
const authType = this.props.config.getIn(['backend', 'auth_type']);
|
||||||
|
if (authType === "implicit") {
|
||||||
|
this.auth = new ImplicitAuthenticator({
|
||||||
|
base_url: this.props.config.getIn(['backend', 'base_url'], "https://gitlab.com"),
|
||||||
|
auth_endpoint: this.props.config.getIn(['backend', 'auth_endpoint'], 'oauth/authorize'),
|
||||||
|
appID: this.props.config.getIn(['backend', 'app_id']),
|
||||||
|
});
|
||||||
|
// Complete implicit authentication if we were redirected back to from the provider.
|
||||||
|
this.auth.completeAuth((err, data) => {
|
||||||
|
if (err) {
|
||||||
|
this.setState({ loginError: err.toString() });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.props.onLogin(data);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
this.auth = new NetlifyAuthenticator({
|
||||||
|
base_url: this.props.base_url,
|
||||||
|
site_id: (document.location.host.split(':')[0] === 'localhost') ? 'cms.netlify.com' : this.props.siteId,
|
||||||
|
auth_endpoint: this.props.authEndpoint,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
handleLogin = (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
this.auth.authenticate({ provider: 'gitlab', scope: 'api' }, (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
this.setState({ loginError: err.toString() });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.props.onLogin(data);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const { loginError } = this.state;
|
||||||
|
const { inProgress } = this.props;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<section className="nc-githubAuthenticationPage-root">
|
||||||
|
<Icon className="nc-githubAuthenticationPage-logo" size="500px" type="netlify-cms"/>
|
||||||
|
{loginError && <p>{loginError}</p>}
|
||||||
|
<button
|
||||||
|
className="nc-githubAuthenticationPage-button"
|
||||||
|
disabled={inProgress}
|
||||||
|
onClick={this.handleLogin}
|
||||||
|
>
|
||||||
|
<Icon type="gitlab" /> {inProgress ? "Logging in..." : "Login with GitLab"}
|
||||||
|
</button>
|
||||||
|
</section>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
155
src/backends/gitlab/implementation.js
Normal file
155
src/backends/gitlab/implementation.js
Normal file
@ -0,0 +1,155 @@
|
|||||||
|
import trimStart from 'lodash/trimStart';
|
||||||
|
import semaphore from "semaphore";
|
||||||
|
import AuthenticationPage from "./AuthenticationPage";
|
||||||
|
import API from "./API";
|
||||||
|
import { fileExtension } from 'Lib/pathHelper';
|
||||||
|
import { CURSOR_COMPATIBILITY_SYMBOL } from 'ValueObjects/Cursor';
|
||||||
|
import { EDITORIAL_WORKFLOW } from "Constants/publishModes";
|
||||||
|
|
||||||
|
const MAX_CONCURRENT_DOWNLOADS = 10;
|
||||||
|
|
||||||
|
export default class GitLab {
|
||||||
|
constructor(config, options={}) {
|
||||||
|
this.config = config;
|
||||||
|
this.options = {
|
||||||
|
proxied: false,
|
||||||
|
API: null,
|
||||||
|
...options,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (config.getIn(["publish_mode"]) === EDITORIAL_WORKFLOW) {
|
||||||
|
throw new Error("The GitLab backend does not support the Editorial Workflow.")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.options.proxied && config.getIn(["backend", "repo"]) == null) {
|
||||||
|
throw new Error("The GitLab backend needs a \"repo\" in the backend configuration.");
|
||||||
|
}
|
||||||
|
|
||||||
|
this.api = this.options.API || null;
|
||||||
|
|
||||||
|
this.repo = config.getIn(["backend", "repo"], "");
|
||||||
|
this.branch = config.getIn(["backend", "branch"], "master");
|
||||||
|
this.api_root = config.getIn(["backend", "api_root"], "https://gitlab.com/api/v4");
|
||||||
|
this.token = '';
|
||||||
|
}
|
||||||
|
|
||||||
|
authComponent() {
|
||||||
|
return AuthenticationPage;
|
||||||
|
}
|
||||||
|
|
||||||
|
restoreUser(user) {
|
||||||
|
return this.authenticate(user);
|
||||||
|
}
|
||||||
|
|
||||||
|
authenticate(state) {
|
||||||
|
this.token = state.token;
|
||||||
|
this.api = new API({ token: this.token, branch: this.branch, repo: this.repo, api_root: this.api_root });
|
||||||
|
return this.api.user().then(user =>
|
||||||
|
this.api.hasWriteAccess(user).then((isCollab) => {
|
||||||
|
// Unauthorized user
|
||||||
|
if (!isCollab) throw new Error("Your GitLab user account does not have access to this repo.");
|
||||||
|
// Authorized user
|
||||||
|
return Object.assign({}, user, { token: state.token });
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
logout() {
|
||||||
|
this.token = null;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
getToken() {
|
||||||
|
return Promise.resolve(this.token);
|
||||||
|
}
|
||||||
|
|
||||||
|
entriesByFolder(collection, extension) {
|
||||||
|
return this.api.listFiles(collection.get("folder"))
|
||||||
|
.then(({ files, cursor }) =>
|
||||||
|
this.fetchFiles(files.filter(file => fileExtension(file.name) === extension))
|
||||||
|
.then(fetchedFiles => {
|
||||||
|
const returnedFiles = fetchedFiles;
|
||||||
|
returnedFiles[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
|
||||||
|
return returnedFiles;
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
allEntriesByFolder(collection, extension) {
|
||||||
|
return this.api.listAllFiles(collection.get("folder"))
|
||||||
|
.then(files => this.fetchFiles(files.filter(file => fileExtension(file.name) === extension)));
|
||||||
|
}
|
||||||
|
|
||||||
|
entriesByFiles(collection) {
|
||||||
|
const files = collection.get("files").map(collectionFile => ({
|
||||||
|
path: collectionFile.get("file"),
|
||||||
|
label: collectionFile.get("label"),
|
||||||
|
}));
|
||||||
|
return this.fetchFiles(files).then(fetchedFiles => {
|
||||||
|
const returnedFiles = fetchedFiles;
|
||||||
|
return returnedFiles;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchFiles = (files) => {
|
||||||
|
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
|
||||||
|
const promises = [];
|
||||||
|
files.forEach((file) => {
|
||||||
|
promises.push(new Promise((resolve, reject) => (
|
||||||
|
sem.take(() => this.api.readFile(file.path, file.id).then((data) => {
|
||||||
|
resolve({ file, data });
|
||||||
|
sem.leave();
|
||||||
|
}).catch((error = true) => {
|
||||||
|
sem.leave();
|
||||||
|
console.error(`failed to load file from GitLab: ${ file.path }`);
|
||||||
|
resolve({ error });
|
||||||
|
}))
|
||||||
|
)));
|
||||||
|
});
|
||||||
|
return Promise.all(promises)
|
||||||
|
.then(loadedEntries => loadedEntries.filter(loadedEntry => !loadedEntry.error));
|
||||||
|
};
|
||||||
|
|
||||||
|
// Fetches a single entry.
|
||||||
|
getEntry(collection, slug, path) {
|
||||||
|
return this.api.readFile(path).then(data => ({
|
||||||
|
file: { path },
|
||||||
|
data,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
getMedia() {
|
||||||
|
return this.api.listAllFiles(this.config.get('media_folder'))
|
||||||
|
.then(files => files.map(({ id, name, path }) => {
|
||||||
|
const url = new URL(this.api.fileDownloadURL(path));
|
||||||
|
if (url.pathname.match(/.svg$/)) {
|
||||||
|
url.search += (url.search.slice(1) === '' ? '?' : '&') + 'sanitize=true';
|
||||||
|
}
|
||||||
|
return { id, name, url: url.href, path };
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async persistEntry(entry, mediaFiles, options = {}) {
|
||||||
|
return this.api.persistFiles([entry], options);
|
||||||
|
}
|
||||||
|
|
||||||
|
async persistMedia(mediaFile, options = {}) {
|
||||||
|
await this.api.persistFiles([mediaFile], options);
|
||||||
|
const { value, path, fileObj } = mediaFile;
|
||||||
|
const url = this.api.fileDownloadURL(path);
|
||||||
|
return { name: value, size: fileObj.size, url, path: trimStart(path, '/') };
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteFile(path, commitMessage, options) {
|
||||||
|
return this.api.deleteFile(path, commitMessage, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
traverseCursor(cursor, action) {
|
||||||
|
return this.api.traverseCursor(cursor, action)
|
||||||
|
.then(async ({ entries, cursor: newCursor }) => ({
|
||||||
|
entries: await Promise.all(entries.map(file => this.api.readFile(file.path, file.id).then(data => ({ file, data })))),
|
||||||
|
cursor: newCursor,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,9 @@
|
|||||||
import { remove, attempt, isError } from 'lodash';
|
import { remove, attempt, isError, take } from 'lodash';
|
||||||
import uuid from 'uuid/v4';
|
import uuid from 'uuid/v4';
|
||||||
|
import { fromJS } from 'immutable';
|
||||||
import { EDITORIAL_WORKFLOW, status } from 'Constants/publishModes';
|
import { EDITORIAL_WORKFLOW, status } from 'Constants/publishModes';
|
||||||
import { EditorialWorkflowError } from 'ValueObjects/errors';
|
import { EditorialWorkflowError } from 'ValueObjects/errors';
|
||||||
|
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from 'ValueObjects/Cursor'
|
||||||
import AuthenticationPage from './AuthenticationPage';
|
import AuthenticationPage from './AuthenticationPage';
|
||||||
|
|
||||||
window.repoFiles = window.repoFiles || {};
|
window.repoFiles = window.repoFiles || {};
|
||||||
@ -16,6 +18,31 @@ function getFile(path) {
|
|||||||
return obj || {};
|
return obj || {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const pageSize = 10;
|
||||||
|
|
||||||
|
const getCursor = (collection, extension, entries, index) => {
|
||||||
|
const count = entries.length;
|
||||||
|
const pageCount = Math.floor(count / pageSize);
|
||||||
|
return Cursor.create({
|
||||||
|
actions: [
|
||||||
|
...(index < pageCount ? ["next", "last"] : []),
|
||||||
|
...(index > 0 ? ["prev", "first"] : []),
|
||||||
|
],
|
||||||
|
meta: { index, count, pageSize, pageCount },
|
||||||
|
data: { collection, extension, index, pageCount },
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const getFolderEntries = (folder, extension) => {
|
||||||
|
return Object.keys(window.repoFiles[folder])
|
||||||
|
.filter(path => path.endsWith(`.${ extension }`))
|
||||||
|
.map(path => ({
|
||||||
|
file: { path: `${ folder }/${ path }` },
|
||||||
|
data: window.repoFiles[folder][path].content,
|
||||||
|
}))
|
||||||
|
.reverse();
|
||||||
|
};
|
||||||
|
|
||||||
export default class TestRepo {
|
export default class TestRepo {
|
||||||
constructor(config) {
|
constructor(config) {
|
||||||
this.config = config;
|
this.config = config;
|
||||||
@ -42,25 +69,28 @@ export default class TestRepo {
|
|||||||
return Promise.resolve('');
|
return Promise.resolve('');
|
||||||
}
|
}
|
||||||
|
|
||||||
entriesByFolder(collection, extension) {
|
traverseCursor(cursor, action) {
|
||||||
const entries = [];
|
const { collection, extension, index, pageCount } = cursor.data.toObject();
|
||||||
const folder = collection.get('folder');
|
const newIndex = (() => {
|
||||||
if (folder) {
|
if (action === "next") { return index + 1; }
|
||||||
for (const path in window.repoFiles[folder]) {
|
if (action === "prev") { return index - 1; }
|
||||||
if (!path.endsWith('.' + extension)) {
|
if (action === "first") { return 0; }
|
||||||
continue;
|
if (action === "last") { return pageCount; }
|
||||||
|
})();
|
||||||
|
// TODO: stop assuming cursors are for collections
|
||||||
|
const allEntries = getFolderEntries(collection.get('folder'), extension);
|
||||||
|
const entries = allEntries.slice(newIndex * pageSize, (newIndex * pageSize) + pageSize);
|
||||||
|
const newCursor = getCursor(collection, extension, allEntries, newIndex);
|
||||||
|
return Promise.resolve({ entries, cursor: newCursor });
|
||||||
}
|
}
|
||||||
|
|
||||||
const file = { path: `${ folder }/${ path }` };
|
entriesByFolder(collection, extension) {
|
||||||
entries.push(
|
const folder = collection.get('folder');
|
||||||
{
|
const entries = folder ? getFolderEntries(folder, extension) : [];
|
||||||
file,
|
const cursor = getCursor(collection, extension, entries, 0);
|
||||||
data: window.repoFiles[folder][path].content,
|
const ret = take(entries, pageSize);
|
||||||
}
|
ret[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
|
||||||
);
|
return Promise.resolve(ret);
|
||||||
}
|
|
||||||
}
|
|
||||||
return Promise.resolve(entries);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
entriesByFiles(collection) {
|
entriesByFiles(collection) {
|
||||||
@ -101,7 +131,7 @@ export default class TestRepo {
|
|||||||
e.metaData.collection === collection && e.slug === slug
|
e.metaData.collection === collection && e.slug === slug
|
||||||
));
|
));
|
||||||
unpubStore.splice(existingEntryIndex, 1);
|
unpubStore.splice(existingEntryIndex, 1);
|
||||||
return Promise.resolve()
|
return Promise.resolve();
|
||||||
}
|
}
|
||||||
|
|
||||||
persistEntry({ path, raw, slug }, mediaFiles = [], options = {}) {
|
persistEntry({ path, raw, slug }, mediaFiles = [], options = {}) {
|
||||||
|
@ -11,7 +11,9 @@ const Entries = ({
|
|||||||
page,
|
page,
|
||||||
onPaginate,
|
onPaginate,
|
||||||
isFetching,
|
isFetching,
|
||||||
viewStyle
|
viewStyle,
|
||||||
|
cursor,
|
||||||
|
handleCursorActions,
|
||||||
}) => {
|
}) => {
|
||||||
const loadingMessages = [
|
const loadingMessages = [
|
||||||
'Loading Entries',
|
'Loading Entries',
|
||||||
@ -25,9 +27,9 @@ const Entries = ({
|
|||||||
collections={collections}
|
collections={collections}
|
||||||
entries={entries}
|
entries={entries}
|
||||||
publicFolder={publicFolder}
|
publicFolder={publicFolder}
|
||||||
page={page}
|
|
||||||
onPaginate={onPaginate}
|
|
||||||
viewStyle={viewStyle}
|
viewStyle={viewStyle}
|
||||||
|
cursor={cursor}
|
||||||
|
handleCursorActions={handleCursorActions}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -46,6 +48,8 @@ Entries.propTypes = {
|
|||||||
page: PropTypes.number,
|
page: PropTypes.number,
|
||||||
isFetching: PropTypes.bool,
|
isFetching: PropTypes.bool,
|
||||||
viewStyle: PropTypes.string,
|
viewStyle: PropTypes.string,
|
||||||
|
cursor: PropTypes.any.isRequired,
|
||||||
|
handleCursorActions: PropTypes.func.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default Entries;
|
export default Entries;
|
||||||
|
@ -2,18 +2,26 @@ import React from 'react';
|
|||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import ImmutablePropTypes from 'react-immutable-proptypes';
|
import ImmutablePropTypes from 'react-immutable-proptypes';
|
||||||
import { connect } from 'react-redux';
|
import { connect } from 'react-redux';
|
||||||
import { loadEntries as actionLoadEntries } from 'Actions/entries';
|
import { partial } from 'lodash';
|
||||||
|
import {
|
||||||
|
loadEntries as actionLoadEntries,
|
||||||
|
traverseCollectionCursor as actionTraverseCollectionCursor,
|
||||||
|
} from 'Actions/entries';
|
||||||
import { selectEntries } from 'Reducers';
|
import { selectEntries } from 'Reducers';
|
||||||
|
import { selectCollectionEntriesCursor } from 'Reducers/cursors';
|
||||||
|
import Cursor from 'ValueObjects/Cursor';
|
||||||
import Entries from './Entries';
|
import Entries from './Entries';
|
||||||
|
|
||||||
class EntriesCollection extends React.Component {
|
class EntriesCollection extends React.Component {
|
||||||
static propTypes = {
|
static propTypes = {
|
||||||
collection: ImmutablePropTypes.map.isRequired,
|
collection: ImmutablePropTypes.map.isRequired,
|
||||||
publicFolder: PropTypes.string.isRequired,
|
publicFolder: PropTypes.string.isRequired,
|
||||||
page: PropTypes.number,
|
|
||||||
entries: ImmutablePropTypes.list,
|
entries: ImmutablePropTypes.list,
|
||||||
isFetching: PropTypes.bool.isRequired,
|
isFetching: PropTypes.bool.isRequired,
|
||||||
viewStyle: PropTypes.string,
|
viewStyle: PropTypes.string,
|
||||||
|
cursor: PropTypes.object.isRequired,
|
||||||
|
loadEntries: PropTypes.func.isRequired,
|
||||||
|
traverseCollectionCursor: PropTypes.func.isRequired,
|
||||||
};
|
};
|
||||||
|
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
@ -30,31 +38,31 @@ class EntriesCollection extends React.Component {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
handleLoadMore = page => {
|
handleCursorActions = (cursor, action) => {
|
||||||
const { collection, loadEntries } = this.props;
|
const { collection, traverseCollectionCursor } = this.props;
|
||||||
loadEntries(collection, page);
|
traverseCollectionCursor(collection, action);
|
||||||
}
|
};
|
||||||
|
|
||||||
render () {
|
render () {
|
||||||
const { collection, entries, publicFolder, page, isFetching, viewStyle } = this.props;
|
const { collection, entries, publicFolder, isFetching, viewStyle, cursor } = this.props;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Entries
|
<Entries
|
||||||
collections={collection}
|
collections={collection}
|
||||||
entries={entries}
|
entries={entries}
|
||||||
publicFolder={publicFolder}
|
publicFolder={publicFolder}
|
||||||
page={page}
|
|
||||||
onPaginate={this.handleLoadMore}
|
|
||||||
isFetching={isFetching}
|
isFetching={isFetching}
|
||||||
collectionName={collection.get('label')}
|
collectionName={collection.get('label')}
|
||||||
viewStyle={viewStyle}
|
viewStyle={viewStyle}
|
||||||
|
cursor={cursor}
|
||||||
|
handleCursorActions={partial(this.handleCursorActions, cursor)}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function mapStateToProps(state, ownProps) {
|
function mapStateToProps(state, ownProps) {
|
||||||
const { name, collection, viewStyle } = ownProps;
|
const { collection, viewStyle } = ownProps;
|
||||||
const { config } = state;
|
const { config } = state;
|
||||||
const publicFolder = config.get('public_folder');
|
const publicFolder = config.get('public_folder');
|
||||||
const page = state.entries.getIn(['pages', collection.get('name'), 'page']);
|
const page = state.entries.getIn(['pages', collection.get('name'), 'page']);
|
||||||
@ -62,11 +70,15 @@ function mapStateToProps(state, ownProps) {
|
|||||||
const entries = selectEntries(state, collection.get('name'));
|
const entries = selectEntries(state, collection.get('name'));
|
||||||
const isFetching = state.entries.getIn(['pages', collection.get('name'), 'isFetching'], false);
|
const isFetching = state.entries.getIn(['pages', collection.get('name'), 'isFetching'], false);
|
||||||
|
|
||||||
return { publicFolder, collection, page, entries, isFetching, viewStyle };
|
const rawCursor = selectCollectionEntriesCursor(state.cursors, collection.get("name"));
|
||||||
|
const cursor = Cursor.create(rawCursor).clearData();
|
||||||
|
|
||||||
|
return { publicFolder, collection, page, entries, isFetching, viewStyle, cursor };
|
||||||
}
|
}
|
||||||
|
|
||||||
const mapDispatchToProps = {
|
const mapDispatchToProps = {
|
||||||
loadEntries: actionLoadEntries,
|
loadEntries: actionLoadEntries,
|
||||||
|
traverseCollectionCursor: actionTraverseCollectionCursor,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default connect(mapStateToProps, mapDispatchToProps)(EntriesCollection);
|
export default connect(mapStateToProps, mapDispatchToProps)(EntriesCollection);
|
||||||
|
@ -7,6 +7,7 @@ import {
|
|||||||
searchEntries as actionSearchEntries,
|
searchEntries as actionSearchEntries,
|
||||||
clearSearch as actionClearSearch
|
clearSearch as actionClearSearch
|
||||||
} from 'Actions/search';
|
} from 'Actions/search';
|
||||||
|
import Cursor from 'ValueObjects/Cursor';
|
||||||
import Entries from './Entries';
|
import Entries from './Entries';
|
||||||
|
|
||||||
class EntriesSearch extends React.Component {
|
class EntriesSearch extends React.Component {
|
||||||
@ -36,15 +37,27 @@ class EntriesSearch extends React.Component {
|
|||||||
this.props.clearSearch();
|
this.props.clearSearch();
|
||||||
}
|
}
|
||||||
|
|
||||||
handleLoadMore = (page) => {
|
getCursor = () => {
|
||||||
const { searchTerm, searchEntries } = this.props;
|
const { page } = this.props;
|
||||||
if (!isNaN(page)) searchEntries(searchTerm, page);
|
return Cursor.create({
|
||||||
|
actions: isNaN(page) ? [] : ["append_next"],
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
handleCursorActions = (action) => {
|
||||||
|
const { page, searchTerm, searchEntries } = this.props;
|
||||||
|
if (action === "append_next") {
|
||||||
|
const nextPage = page + 1;
|
||||||
|
searchEntries(searchTerm, nextPage);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
render () {
|
render () {
|
||||||
const { collections, entries, publicFolder, page, isFetching } = this.props;
|
const { collections, entries, publicFolder, page, isFetching } = this.props;
|
||||||
return (
|
return (
|
||||||
<Entries
|
<Entries
|
||||||
|
cursor={this.getCursor()}
|
||||||
|
handleCursorActions={this.handleCursorActions}
|
||||||
collections={collections}
|
collections={collections}
|
||||||
entries={entries}
|
entries={entries}
|
||||||
publicFolder={publicFolder}
|
publicFolder={publicFolder}
|
||||||
@ -59,8 +72,8 @@ class EntriesSearch extends React.Component {
|
|||||||
function mapStateToProps(state, ownProps) {
|
function mapStateToProps(state, ownProps) {
|
||||||
const { searchTerm } = ownProps;
|
const { searchTerm } = ownProps;
|
||||||
const collections = ownProps.collections.toIndexedSeq();
|
const collections = ownProps.collections.toIndexedSeq();
|
||||||
const isFetching = state.entries.getIn(['search', 'isFetching']);
|
const isFetching = state.search.get('isFetching');
|
||||||
const page = state.entries.getIn(['search', 'page']);
|
const page = state.search.get('page');
|
||||||
const entries = selectSearchedEntries(state);
|
const entries = selectSearchedEntries(state);
|
||||||
const publicFolder = state.config.get('public_folder');
|
const publicFolder = state.config.get('public_folder');
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ import Waypoint from 'react-waypoint';
|
|||||||
import { Map } from 'immutable';
|
import { Map } from 'immutable';
|
||||||
import { selectFields, selectInferedField } from 'Reducers/collections';
|
import { selectFields, selectInferedField } from 'Reducers/collections';
|
||||||
import EntryCard from './EntryCard';
|
import EntryCard from './EntryCard';
|
||||||
|
import Cursor from 'ValueObjects/Cursor';
|
||||||
|
|
||||||
export default class EntryListing extends React.Component {
|
export default class EntryListing extends React.Component {
|
||||||
static propTypes = {
|
static propTypes = {
|
||||||
@ -14,13 +15,14 @@ export default class EntryListing extends React.Component {
|
|||||||
ImmutablePropTypes.iterable,
|
ImmutablePropTypes.iterable,
|
||||||
]).isRequired,
|
]).isRequired,
|
||||||
entries: ImmutablePropTypes.list,
|
entries: ImmutablePropTypes.list,
|
||||||
onPaginate: PropTypes.func.isRequired,
|
|
||||||
page: PropTypes.number,
|
|
||||||
viewStyle: PropTypes.string,
|
viewStyle: PropTypes.string,
|
||||||
};
|
};
|
||||||
|
|
||||||
handleLoadMore = () => {
|
handleLoadMore = () => {
|
||||||
this.props.onPaginate(this.props.page + 1);
|
const { cursor, handleCursorActions } = this.props;
|
||||||
|
if (Cursor.create(cursor).actions.has("append_next")) {
|
||||||
|
handleCursorActions("append_next");
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
inferFields = collection => {
|
inferFields = collection => {
|
||||||
@ -53,7 +55,7 @@ export default class EntryListing extends React.Component {
|
|||||||
};
|
};
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const { collections, entries, publicFolder } = this.props;
|
const { collections } = this.props;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
|
@ -1,35 +1,36 @@
|
|||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
|
|
||||||
const ErrorComponent = () => {
|
const DefaultErrorComponent = () => {
|
||||||
const issueUrl = "https://github.com/netlify/netlify-cms/issues/new";
|
};
|
||||||
|
|
||||||
|
const ISSUE_URL = "https://github.com/netlify/netlify-cms/issues/new";
|
||||||
|
|
||||||
|
export class ErrorBoundary extends React.Component {
|
||||||
|
state = {
|
||||||
|
hasError: false,
|
||||||
|
errorMessage: '',
|
||||||
|
};
|
||||||
|
|
||||||
|
componentDidCatch(error) {
|
||||||
|
console.error(error);
|
||||||
|
this.setState({ hasError: true, errorMessage: error.toString() });
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const { hasError, errorMessage } = this.state;
|
||||||
|
if (!hasError) {
|
||||||
|
return this.props.children;
|
||||||
|
}
|
||||||
return (
|
return (
|
||||||
<div className="nc-errorBoundary">
|
<div className="nc-errorBoundary">
|
||||||
<h1 className="nc-errorBoundary-heading">Sorry!</h1>
|
<h1 className="nc-errorBoundary-heading">Sorry!</h1>
|
||||||
<p>
|
<p>
|
||||||
<span>There's been an error - please </span>
|
<span>There's been an error - please </span>
|
||||||
<a href={issueUrl} target="_blank" className="nc-errorBoundary-link">report it</a>!
|
<a href={ISSUE_URL} target="_blank" className="nc-errorBoundary-link">report it</a>!
|
||||||
</p>
|
</p>
|
||||||
|
<p>{errorMessage}</p>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
|
||||||
|
|
||||||
export class ErrorBoundary extends React.Component {
|
|
||||||
static propTypes = {
|
|
||||||
render: PropTypes.element,
|
|
||||||
};
|
|
||||||
|
|
||||||
state = {
|
|
||||||
hasError: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
componentDidCatch(error) {
|
|
||||||
console.error(error);
|
|
||||||
this.setState({ hasError: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
render() {
|
|
||||||
const errorComponent = this.props.errorComponent || <ErrorComponent/>;
|
|
||||||
return this.state.hasError ? errorComponent : this.props.children;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -13,6 +13,7 @@ import iconDragHandle from './drag-handle.svg';
|
|||||||
import iconEye from './eye.svg';
|
import iconEye from './eye.svg';
|
||||||
import iconFolder from './folder.svg';
|
import iconFolder from './folder.svg';
|
||||||
import iconGithub from './github.svg';
|
import iconGithub from './github.svg';
|
||||||
|
import iconGitlab from './gitlab.svg';
|
||||||
import iconGrid from './grid.svg';
|
import iconGrid from './grid.svg';
|
||||||
import iconH1 from './h1.svg';
|
import iconH1 from './h1.svg';
|
||||||
import iconH2 from './h2.svg';
|
import iconH2 from './h2.svg';
|
||||||
@ -55,6 +56,7 @@ const images = {
|
|||||||
'eye': iconEye,
|
'eye': iconEye,
|
||||||
'folder': iconFolder,
|
'folder': iconFolder,
|
||||||
'github': iconGithub,
|
'github': iconGithub,
|
||||||
|
'gitlab': iconGitlab,
|
||||||
'grid': iconGrid,
|
'grid': iconGrid,
|
||||||
'h1': iconH1,
|
'h1': iconH1,
|
||||||
'h2': iconH2,
|
'h2': iconH2,
|
||||||
|
1
src/components/UI/Icon/images/gitlab.svg
Normal file
1
src/components/UI/Icon/images/gitlab.svg
Normal file
@ -0,0 +1 @@
|
|||||||
|
<svg width="26" height="26" xmlns="http://www.w3.org/2000/svg"><g fill-rule="nonzero" fill="none"><path d="M22.616 14.971L21.52 11.5l-2.173-6.882a.37.37 0 0 0-.71 0l-2.172 6.882H9.252L7.079 4.617a.37.37 0 0 0-.71 0l-2.172 6.882L3.1 14.971c-.1.317.01.664.27.86l9.487 7.094 9.487-7.094a.781.781 0 0 0 .27-.86" fill="#FC6D26"/><path d="M12.858 22.925L16.465 11.5H9.251z" fill="#E24329"/><path d="M12.858 22.925L9.251 11.5H4.197z" fill="#FC6D26"/><path d="M4.197 11.499L3.1 14.971c-.1.317.01.664.27.86l9.487 7.094L4.197 11.5z" fill="#FCA326"/><path d="M4.197 11.499H9.25L7.08 4.617a.37.37 0 0 0-.71 0l-2.172 6.882z" fill="#E24329"/><path d="M12.858 22.925L16.465 11.5h5.055z" fill="#FC6D26"/><path d="M21.52 11.499l1.096 3.472c.1.317-.01.664-.271.86l-9.487 7.094L21.52 11.5z" fill="#FCA326"/><path d="M21.52 11.499h-5.055l2.172-6.882a.37.37 0 0 1 .71 0l2.173 6.882z" fill="#E24329"/></g></svg>
|
After Width: | Height: | Size: 889 B |
74
src/lib/implicit-oauth.js
Normal file
74
src/lib/implicit-oauth.js
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
import { Map } from 'immutable';
|
||||||
|
import { trim, trimEnd } from 'lodash';
|
||||||
|
import { randomStr } from 'Lib/randomGenerator';
|
||||||
|
import history from 'Routing/history';
|
||||||
|
|
||||||
|
function createNonce() {
|
||||||
|
const nonce = randomStr();
|
||||||
|
window.sessionStorage.setItem("netlify-cms-auth", JSON.stringify({ nonce }));
|
||||||
|
return nonce;
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateNonce(check) {
|
||||||
|
const auth = window.sessionStorage.getItem("netlify-cms-auth");
|
||||||
|
const valid = auth && JSON.parse(auth).nonce;
|
||||||
|
window.localStorage.removeItem("netlify-cms-auth");
|
||||||
|
return (check === valid);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default class ImplicitAuthenticator {
|
||||||
|
constructor(config = {}) {
|
||||||
|
const baseURL = trimEnd(config.base_url, '/');
|
||||||
|
const authEndpoint = trim(config.auth_endpoint, '/');
|
||||||
|
this.auth_url = `${ baseURL }/${ authEndpoint }`;
|
||||||
|
this.appID = config.app_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
authenticate(options, cb) {
|
||||||
|
if (
|
||||||
|
document.location.protocol !== "https:"
|
||||||
|
// TODO: Is insecure localhost a bad idea as well? I don't think it is, since you are not actually
|
||||||
|
// sending the token over the internet in this case, assuming the auth URL is secure.
|
||||||
|
&& (document.location.hostname !== "localhost" && document.location.hostname !== "127.0.0.1")
|
||||||
|
) {
|
||||||
|
return cb(new Error("Cannot authenticate over insecure protocol!"));
|
||||||
|
}
|
||||||
|
|
||||||
|
const authURL = new URL(this.auth_url);
|
||||||
|
authURL.searchParams.set('client_id', this.appID);
|
||||||
|
authURL.searchParams.set('redirect_uri', document.location.origin + document.location.pathname);
|
||||||
|
authURL.searchParams.set('response_type', 'token');
|
||||||
|
authURL.searchParams.set('scope', options.scope);
|
||||||
|
authURL.searchParams.set('state', createNonce());
|
||||||
|
|
||||||
|
document.location.assign(authURL.href);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Complete authentication if we were redirected back to from the provider.
|
||||||
|
*/
|
||||||
|
completeAuth(cb) {
|
||||||
|
const hashParams = new URLSearchParams(document.location.hash.replace(/^#?\/?/, ''));
|
||||||
|
if (!hashParams.has("access_token") && !hashParams.has("error")) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Remove tokens from hash so that token does not remain in browser history.
|
||||||
|
history.replace('/');
|
||||||
|
|
||||||
|
const params = Map(hashParams.entries());
|
||||||
|
|
||||||
|
const validNonce = validateNonce(params.get('state'));
|
||||||
|
if (!validNonce) {
|
||||||
|
return cb(new Error("Invalid nonce"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.has('error')) {
|
||||||
|
return cb(new Error(`${ params.get('error') }: ${ params.get('error_description') }`));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.has('access_token')) {
|
||||||
|
const { access_token: token, ...data } = params.toJS();
|
||||||
|
cb(null, { token, ...data });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -18,3 +18,5 @@ export const resolvePromiseProperties = (obj) => {
|
|||||||
// resolved values
|
// resolved values
|
||||||
Object.assign({}, obj, zipObject(promiseKeys, resolvedPromises)));
|
Object.assign({}, obj, zipObject(promiseKeys, resolvedPromises)));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const then = fn => p => Promise.resolve(p).then(fn);
|
||||||
|
@ -2,30 +2,17 @@
|
|||||||
* Random number generator
|
* Random number generator
|
||||||
*/
|
*/
|
||||||
|
|
||||||
let rng;
|
const padNumber = (num, base) => {
|
||||||
|
const padLen = (32 / Math.sqrt(base));
|
||||||
if (window.crypto && crypto.getRandomValues) {
|
const str = num.toString(base);
|
||||||
// WHATWG crypto-based RNG - http://wiki.whatwg.org/wiki/Crypto
|
return (('0' * padLen) + str).slice(-padLen);
|
||||||
// Moderately fast, high quality
|
|
||||||
const _rnds32 = new Uint32Array(1);
|
|
||||||
rng = function whatwgRNG() {
|
|
||||||
crypto.getRandomValues(_rnds32);
|
|
||||||
return _rnds32[0];
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!rng) {
|
export function randomStr(len = 256) {
|
||||||
// Math.random()-based (RNG)
|
const _rnds = new Uint32Array(Math.ceil(len / 32));
|
||||||
// If no Crypto available, use Math.random().
|
window.crypto.getRandomValues(_rnds);
|
||||||
rng = function() {
|
|
||||||
const r = Math.random() * 0x100000000;
|
|
||||||
const _rnds = r >>> 0;
|
|
||||||
return _rnds;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function randomStr() {
|
const str = _rnds.reduce((agg, val) => (agg + padNumber(val, 16)), '');
|
||||||
return rng().toString(36);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default rng;
|
return str.slice(-len);
|
||||||
|
}
|
79
src/lib/unsentRequest.js
Normal file
79
src/lib/unsentRequest.js
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
import { fromJS, List, Map } from 'immutable';
|
||||||
|
import { curry, flow, isString } from "lodash";
|
||||||
|
|
||||||
|
const decodeParams = paramsString => List(paramsString.split("&"))
|
||||||
|
.map(s => List(s.split("=")).map(decodeURIComponent))
|
||||||
|
.update(Map);
|
||||||
|
|
||||||
|
const fromURL = wholeURL => {
|
||||||
|
const [url, allParamsString] = wholeURL.split("?");
|
||||||
|
return Map({ url, ...(allParamsString ? { params: decodeParams(allParamsString) } : {}) });
|
||||||
|
};
|
||||||
|
|
||||||
|
const encodeParams = params => params.entrySeq()
|
||||||
|
.map(([k, v]) => `${ encodeURIComponent(k) }=${ encodeURIComponent(v) }`)
|
||||||
|
.join("&");
|
||||||
|
|
||||||
|
const toURL = req => `${ req.get("url") }${ req.get("params") ? `?${ encodeParams(req.get("params")) }` : "" }`;
|
||||||
|
|
||||||
|
const toFetchArguments = req => [toURL(req), req.delete("url").delete("params").toJS()];
|
||||||
|
|
||||||
|
const maybeRequestArg = req => {
|
||||||
|
if (isString(req)) { return fromURL(req); }
|
||||||
|
if (req) { return fromJS(req); }
|
||||||
|
return Map();
|
||||||
|
};
|
||||||
|
const ensureRequestArg = func => req => func(maybeRequestArg(req));
|
||||||
|
const ensureRequestArg2 = func => (arg, req) => func(arg, maybeRequestArg(req));
|
||||||
|
|
||||||
|
// This actually performs the built request object
|
||||||
|
const performRequest = ensureRequestArg(req => fetch(...toFetchArguments(req)));
|
||||||
|
|
||||||
|
// Each of the following functions takes options and returns another
|
||||||
|
// function that performs the requested action on a request. They each
|
||||||
|
// default to containing an empty object, so you can simply call them
|
||||||
|
// without arguments to generate a request with only those properties.
|
||||||
|
const getCurriedRequestProcessor = flow([ensureRequestArg2, curry]);
|
||||||
|
const getPropSetFunctions = path => [
|
||||||
|
getCurriedRequestProcessor((val, req) => req.setIn(path, val)),
|
||||||
|
getCurriedRequestProcessor((val, req) => (req.getIn(path) ? req : req.setIn(path, val))),
|
||||||
|
];
|
||||||
|
const getPropMergeFunctions = path => [
|
||||||
|
getCurriedRequestProcessor((obj, req) => req.updateIn(path, (p=Map()) => p.merge(obj))),
|
||||||
|
getCurriedRequestProcessor((obj, req) => req.updateIn(path, (p=Map()) => Map(obj).merge(p))),
|
||||||
|
];
|
||||||
|
|
||||||
|
const [withMethod, withDefaultMethod] = getPropSetFunctions(["method"]);
|
||||||
|
const [withBody, withDefaultBody] = getPropSetFunctions(["method"]);
|
||||||
|
const [withParams, withDefaultParams] = getPropMergeFunctions(["params"]);
|
||||||
|
const [withHeaders, withDefaultHeaders] = getPropMergeFunctions(["headers"]);
|
||||||
|
|
||||||
|
// withRoot sets a root URL, unless the URL is already absolute
|
||||||
|
const absolutePath = new RegExp('^(?:[a-z]+:)?//', 'i');
|
||||||
|
const withRoot = getCurriedRequestProcessor((root, req) => req.update("url", p => {
|
||||||
|
if (absolutePath.test(p)) { return p; }
|
||||||
|
return (root && p && p[0] !== "/" && root[root.length - 1] !== "/")
|
||||||
|
? `${ root }/${ p }`
|
||||||
|
: `${ root }${ p }`;
|
||||||
|
}));
|
||||||
|
|
||||||
|
// withTimestamp needs no argument and has to run as late as possible,
|
||||||
|
// so it calls `withParams` only when it's actually called with a
|
||||||
|
// request.
|
||||||
|
const withTimestamp = ensureRequestArg(req => withParams({ ts: new Date().getTime() }, req));
|
||||||
|
|
||||||
|
export default {
|
||||||
|
toURL,
|
||||||
|
fromURL,
|
||||||
|
performRequest,
|
||||||
|
withMethod,
|
||||||
|
withDefaultMethod,
|
||||||
|
withBody,
|
||||||
|
withDefaultBody,
|
||||||
|
withHeaders,
|
||||||
|
withDefaultHeaders,
|
||||||
|
withParams,
|
||||||
|
withDefaultParams,
|
||||||
|
withRoot,
|
||||||
|
withTimestamp,
|
||||||
|
};
|
27
src/reducers/cursors.js
Normal file
27
src/reducers/cursors.js
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
import { fromJS, Map } from 'immutable';
|
||||||
|
import Cursor from "ValueObjects/Cursor";
|
||||||
|
import {
|
||||||
|
ENTRIES_SUCCESS,
|
||||||
|
} from 'Actions/entries';
|
||||||
|
|
||||||
|
// Since pagination can be used for a variety of views (collections
|
||||||
|
// and searches are the most common examples), we namespace cursors by
|
||||||
|
// their type before storing them in the state.
|
||||||
|
export const selectCollectionEntriesCursor = (state, collectionName) =>
|
||||||
|
new Cursor(state.getIn(["cursorsByType", "collectionEntries", collectionName]));
|
||||||
|
|
||||||
|
const cursors = (state = fromJS({ cursorsByType: { collectionEntries: {} } }), action) => {
|
||||||
|
switch (action.type) {
|
||||||
|
case ENTRIES_SUCCESS: {
|
||||||
|
return state.setIn(
|
||||||
|
["cursorsByType", "collectionEntries", action.payload.collection],
|
||||||
|
Cursor.create(action.payload.cursor).store
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default cursors;
|
@ -13,6 +13,7 @@ import { SEARCH_ENTRIES_SUCCESS } from 'Actions/search';
|
|||||||
|
|
||||||
let collection;
|
let collection;
|
||||||
let loadedEntries;
|
let loadedEntries;
|
||||||
|
let append;
|
||||||
let page;
|
let page;
|
||||||
|
|
||||||
const entries = (state = Map({ entities: Map(), pages: Map() }), action) => {
|
const entries = (state = Map({ entities: Map(), pages: Map() }), action) => {
|
||||||
@ -32,6 +33,7 @@ const entries = (state = Map({ entities: Map(), pages: Map() }), action) => {
|
|||||||
case ENTRIES_SUCCESS:
|
case ENTRIES_SUCCESS:
|
||||||
collection = action.payload.collection;
|
collection = action.payload.collection;
|
||||||
loadedEntries = action.payload.entries;
|
loadedEntries = action.payload.entries;
|
||||||
|
append = action.payload.append;
|
||||||
page = action.payload.page;
|
page = action.payload.page;
|
||||||
return state.withMutations((map) => {
|
return state.withMutations((map) => {
|
||||||
loadedEntries.forEach(entry => (
|
loadedEntries.forEach(entry => (
|
||||||
@ -41,7 +43,9 @@ const entries = (state = Map({ entities: Map(), pages: Map() }), action) => {
|
|||||||
const ids = List(loadedEntries.map(entry => entry.slug));
|
const ids = List(loadedEntries.map(entry => entry.slug));
|
||||||
map.setIn(['pages', collection], Map({
|
map.setIn(['pages', collection], Map({
|
||||||
page,
|
page,
|
||||||
ids: (!page || page === 0) ? ids : map.getIn(['pages', collection, 'ids'], List()).concat(ids),
|
ids: append
|
||||||
|
? map.getIn(['pages', collection, 'ids'], List()).concat(ids)
|
||||||
|
: ids,
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ import auth from './auth';
|
|||||||
import config from './config';
|
import config from './config';
|
||||||
import integrations, * as fromIntegrations from './integrations';
|
import integrations, * as fromIntegrations from './integrations';
|
||||||
import entries, * as fromEntries from './entries';
|
import entries, * as fromEntries from './entries';
|
||||||
|
import cursors from './cursors';
|
||||||
import editorialWorkflow, * as fromEditorialWorkflow from './editorialWorkflow';
|
import editorialWorkflow, * as fromEditorialWorkflow from './editorialWorkflow';
|
||||||
import entryDraft from './entryDraft';
|
import entryDraft from './entryDraft';
|
||||||
import collections from './collections';
|
import collections from './collections';
|
||||||
@ -17,6 +18,7 @@ const reducers = {
|
|||||||
search,
|
search,
|
||||||
integrations,
|
integrations,
|
||||||
entries,
|
entries,
|
||||||
|
cursors,
|
||||||
editorialWorkflow,
|
editorialWorkflow,
|
||||||
entryDraft,
|
entryDraft,
|
||||||
mediaLibrary,
|
mediaLibrary,
|
||||||
|
@ -38,7 +38,7 @@ const entries = (state = defaultState, action) => {
|
|||||||
map.set('isFetching', false);
|
map.set('isFetching', false);
|
||||||
map.set('page', page);
|
map.set('page', page);
|
||||||
map.set('term', searchTerm);
|
map.set('term', searchTerm);
|
||||||
map.set('entryIds', page === 0 ? entryIds : map.get('entryIds', List()).concat(entryIds));
|
map.set('entryIds', (!page || isNaN(page) || page === 0) ? entryIds : map.get('entryIds', List()).concat(entryIds));
|
||||||
});
|
});
|
||||||
|
|
||||||
case QUERY_REQUEST:
|
case QUERY_REQUEST:
|
||||||
|
115
src/valueObjects/Cursor.js
Normal file
115
src/valueObjects/Cursor.js
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
import { fromJS, Map, Set } from "immutable";
|
||||||
|
|
||||||
|
const jsToMap = obj => {
|
||||||
|
if (obj === undefined) {
|
||||||
|
return Map();
|
||||||
|
}
|
||||||
|
const immutableObj = fromJS(obj);
|
||||||
|
if (!Map.isMap(immutableObj)) {
|
||||||
|
throw new Error("Object must be equivalent to a Map.");
|
||||||
|
}
|
||||||
|
return immutableObj;
|
||||||
|
};
|
||||||
|
|
||||||
|
const knownMetaKeys = Set(["index", "count", "pageSize", "pageCount", "usingOldPaginationAPI"]);
|
||||||
|
const filterUnknownMetaKeys = meta => meta.filter((v, k) => knownMetaKeys.has(k));
|
||||||
|
|
||||||
|
/*
|
||||||
|
createCursorMap takes one of three signatures:
|
||||||
|
- () -> cursor with empty actions, data, and meta
|
||||||
|
- (cursorMap: <object/Map with optional actions, data, and meta keys>) -> cursor
|
||||||
|
- (actions: <array/List>, data: <object/Map>, meta: <optional object/Map>) -> cursor
|
||||||
|
*/
|
||||||
|
const createCursorMap = (...args) => {
|
||||||
|
const { actions, data, meta } = args.length === 1
|
||||||
|
? jsToMap(args[0]).toObject()
|
||||||
|
: { actions: args[0], data: args[1], meta: args[2] };
|
||||||
|
return Map({
|
||||||
|
// actions are a Set, rather than a List, to ensure an efficient .has
|
||||||
|
actions: Set(actions),
|
||||||
|
|
||||||
|
// data and meta are Maps
|
||||||
|
data: jsToMap(data),
|
||||||
|
meta: jsToMap(meta).update(filterUnknownMetaKeys),
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const hasAction = (cursorMap, action) => cursorMap.hasIn(["actions", action]);
|
||||||
|
|
||||||
|
const getActionHandlers = (cursorMap, handler) =>
|
||||||
|
cursorMap.get("actions", Set()).toMap().map(action => handler(action));
|
||||||
|
|
||||||
|
// The cursor logic is entirely functional, so this class simply
|
||||||
|
// provides a chainable interface
|
||||||
|
export default class Cursor {
|
||||||
|
static create(...args) {
|
||||||
|
return new Cursor(...args);
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(...args) {
|
||||||
|
if (args[0] instanceof Cursor) {
|
||||||
|
return args[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
this.store = createCursorMap(...args);
|
||||||
|
this.actions = this.store.get("actions");
|
||||||
|
this.data = this.store.get("data");
|
||||||
|
this.meta = this.store.get("meta");
|
||||||
|
}
|
||||||
|
|
||||||
|
updateStore(...args) {
|
||||||
|
return new Cursor(this.store.update(...args));
|
||||||
|
}
|
||||||
|
updateInStore(...args) {
|
||||||
|
return new Cursor(this.store.updateIn(...args));
|
||||||
|
}
|
||||||
|
|
||||||
|
hasAction(action) {
|
||||||
|
return hasAction(this.store, action);
|
||||||
|
}
|
||||||
|
addAction(action) {
|
||||||
|
return this.updateStore("actions", actions => actions.add(action));
|
||||||
|
}
|
||||||
|
removeAction(action) {
|
||||||
|
return this.updateStore("actions", actions => actions.delete(action));
|
||||||
|
}
|
||||||
|
setActions(actions) {
|
||||||
|
return this.updateStore(store => store.set("actions", Set(actions)));
|
||||||
|
}
|
||||||
|
mergeActions(actions) {
|
||||||
|
return this.updateStore("actions", oldActions => oldActions.union(actions));
|
||||||
|
}
|
||||||
|
getActionHandlers(handler) {
|
||||||
|
return getActionHandlers(this.store, handler);
|
||||||
|
}
|
||||||
|
|
||||||
|
setData(data) {
|
||||||
|
return new Cursor(this.store.set("data", jsToMap(data)));
|
||||||
|
}
|
||||||
|
mergeData(data) {
|
||||||
|
return new Cursor(this.store.mergeIn(["data"], jsToMap(data)));
|
||||||
|
}
|
||||||
|
wrapData(data) {
|
||||||
|
return this.updateStore("data", oldData => jsToMap(data).set("wrapped_cursor_data", oldData));
|
||||||
|
}
|
||||||
|
unwrapData() {
|
||||||
|
return [this.store.get("data").delete("wrapped_cursor_data"), this.updateStore("data", data => data.get("wrapped_cursor_data"))];
|
||||||
|
}
|
||||||
|
clearData() {
|
||||||
|
return this.updateStore("data", data => Map());
|
||||||
|
}
|
||||||
|
|
||||||
|
setMeta(meta) {
|
||||||
|
return this.updateStore(store => store.set("meta", jsToMap(meta)));
|
||||||
|
}
|
||||||
|
mergeMeta(meta) {
|
||||||
|
return this.updateStore(store => store.update("meta", oldMeta => oldMeta.merge(jsToMap(meta))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is a temporary hack to allow cursors to be added to the
|
||||||
|
// interface between backend.js and backends without modifying old
|
||||||
|
// backends at all. This should be removed in favor of wrapping old
|
||||||
|
// backends with a compatibility layer, as part of the backend API
|
||||||
|
// refactor.
|
||||||
|
export const CURSOR_COMPATIBILITY_SYMBOL = Symbol("cursor key for compatibility with old backends");
|
@ -1,11 +1,12 @@
|
|||||||
export const API_ERROR = 'API_ERROR';
|
export const API_ERROR = 'API_ERROR';
|
||||||
|
|
||||||
export default class APIError extends Error {
|
export default class APIError extends Error {
|
||||||
constructor(message, status, api) {
|
constructor(message, status, api, meta={}) {
|
||||||
super(message);
|
super(message);
|
||||||
this.message = message;
|
this.message = message;
|
||||||
this.status = status;
|
this.status = status;
|
||||||
this.api = api;
|
this.api = api;
|
||||||
this.name = API_ERROR;
|
this.name = API_ERROR;
|
||||||
|
this.meta = meta;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user