2019-08-24 10:54:59 -07:00
|
|
|
import {
|
|
|
|
localForage,
|
|
|
|
parseLinkHeader,
|
|
|
|
unsentRequest,
|
|
|
|
then,
|
|
|
|
APIError,
|
|
|
|
Cursor,
|
|
|
|
} from 'netlify-cms-lib-util';
|
2018-08-07 14:46:54 -06:00
|
|
|
import { Base64 } from 'js-base64';
|
2019-08-24 10:54:59 -07:00
|
|
|
import { fromJS, Map } from 'immutable';
|
2018-08-07 14:46:54 -06:00
|
|
|
import { flow, partial, result } from 'lodash';
|
2018-06-11 19:03:43 -07:00
|
|
|
|
|
|
|
export default class API {
|
|
|
|
constructor(config) {
|
2018-08-07 14:46:54 -06:00
|
|
|
this.api_root = config.api_root || 'https://gitlab.com/api/v4';
|
2018-06-11 19:03:43 -07:00
|
|
|
this.token = config.token || false;
|
2018-08-07 14:46:54 -06:00
|
|
|
this.branch = config.branch || 'master';
|
|
|
|
this.repo = config.repo || '';
|
|
|
|
this.repoURL = `/projects/${encodeURIComponent(this.repo)}`;
|
2018-06-11 19:03:43 -07:00
|
|
|
}
|
|
|
|
|
2018-08-07 14:46:54 -06:00
|
|
|
withAuthorizationHeaders = req =>
|
|
|
|
unsentRequest.withHeaders(this.token ? { Authorization: `Bearer ${this.token}` } : {}, req);
|
|
|
|
|
|
|
|
buildRequest = req =>
|
|
|
|
flow([
|
|
|
|
unsentRequest.withRoot(this.api_root),
|
|
|
|
this.withAuthorizationHeaders,
|
|
|
|
unsentRequest.withTimestamp,
|
|
|
|
])(req);
|
|
|
|
|
|
|
|
request = async req =>
|
|
|
|
flow([
|
|
|
|
this.buildRequest,
|
|
|
|
unsentRequest.performRequest,
|
|
|
|
p => p.catch(err => Promise.reject(new APIError(err.message, null, 'GitLab'))),
|
|
|
|
])(req);
|
|
|
|
|
2018-08-22 12:28:52 -07:00
|
|
|
catchFormatErrors = (format, formatter) => res => {
|
|
|
|
try {
|
|
|
|
return formatter(res);
|
|
|
|
} catch (err) {
|
|
|
|
throw new Error(
|
|
|
|
`Response cannot be parsed into the expected format (${format}): ${err.message}`,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
responseFormats = fromJS({
|
|
|
|
json: async res => {
|
|
|
|
const contentType = res.headers.get('Content-Type');
|
|
|
|
if (contentType !== 'application/json' && contentType !== 'text/json') {
|
|
|
|
throw new Error(`${contentType} is not a valid JSON Content-Type`);
|
|
|
|
}
|
|
|
|
return res.json();
|
|
|
|
},
|
|
|
|
text: async res => res.text(),
|
|
|
|
blob: async res => res.blob(),
|
|
|
|
}).mapEntries(([format, formatter]) => [format, this.catchFormatErrors(format, formatter)]);
|
|
|
|
|
|
|
|
parseResponse = async (res, { expectingOk = true, expectingFormat = 'text' }) => {
|
2018-06-11 19:03:43 -07:00
|
|
|
let body;
|
|
|
|
try {
|
2018-08-22 12:28:52 -07:00
|
|
|
const formatter = this.responseFormats.get(expectingFormat, false);
|
|
|
|
if (!formatter) {
|
|
|
|
throw new Error(`${expectingFormat} is not a supported response format.`);
|
|
|
|
}
|
|
|
|
body = await formatter(res);
|
2018-06-11 19:03:43 -07:00
|
|
|
} catch (err) {
|
2018-08-07 14:46:54 -06:00
|
|
|
throw new APIError(err.message, res.status, 'GitLab');
|
2018-06-11 19:03:43 -07:00
|
|
|
}
|
|
|
|
if (expectingOk && !res.ok) {
|
2018-08-22 12:28:52 -07:00
|
|
|
const isJSON = expectingFormat === 'json';
|
2018-08-07 14:46:54 -06:00
|
|
|
throw new APIError(isJSON && body.message ? body.message : body, res.status, 'GitLab');
|
2018-06-11 19:03:43 -07:00
|
|
|
}
|
|
|
|
return body;
|
|
|
|
};
|
|
|
|
|
2018-08-07 14:46:54 -06:00
|
|
|
responseToJSON = res => this.parseResponse(res, { expectingFormat: 'json' });
|
2018-08-22 12:28:52 -07:00
|
|
|
responseToBlob = res => this.parseResponse(res, { expectingFormat: 'blob' });
|
2018-08-07 14:46:54 -06:00
|
|
|
responseToText = res => this.parseResponse(res, { expectingFormat: 'text' });
|
2018-06-11 19:03:43 -07:00
|
|
|
requestJSON = req => this.request(req).then(this.responseToJSON);
|
|
|
|
requestText = req => this.request(req).then(this.responseToText);
|
|
|
|
|
2018-08-07 14:46:54 -06:00
|
|
|
user = () => this.requestJSON('/user');
|
2018-06-11 19:03:43 -07:00
|
|
|
|
|
|
|
WRITE_ACCESS = 30;
|
2018-08-07 14:46:54 -06:00
|
|
|
hasWriteAccess = () =>
|
|
|
|
this.requestJSON(this.repoURL).then(({ permissions }) => {
|
|
|
|
const { project_access, group_access } = permissions;
|
|
|
|
if (project_access && project_access.access_level >= this.WRITE_ACCESS) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
if (group_access && group_access.access_level >= this.WRITE_ACCESS) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
});
|
2018-06-11 19:03:43 -07:00
|
|
|
|
2018-08-22 12:28:52 -07:00
|
|
|
readFile = async (path, sha, { ref = this.branch, parseText = true } = {}) => {
|
|
|
|
const cacheKey = parseText ? `gl.${sha}` : `gl.${sha}.blob`;
|
|
|
|
const cachedFile = sha ? await localForage.getItem(cacheKey) : null;
|
2018-08-07 14:46:54 -06:00
|
|
|
if (cachedFile) {
|
|
|
|
return cachedFile;
|
|
|
|
}
|
2018-08-22 12:28:52 -07:00
|
|
|
const result = await this.request({
|
2018-08-07 14:46:54 -06:00
|
|
|
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}/raw`,
|
2018-06-11 19:03:43 -07:00
|
|
|
params: { ref },
|
2018-08-07 14:46:54 -06:00
|
|
|
cache: 'no-store',
|
2018-08-22 12:28:52 -07:00
|
|
|
}).then(parseText ? this.responseToText : this.responseToBlob);
|
2018-08-07 14:46:54 -06:00
|
|
|
if (sha) {
|
2018-08-22 12:28:52 -07:00
|
|
|
localForage.setItem(cacheKey, result);
|
2018-08-07 14:46:54 -06:00
|
|
|
}
|
2018-06-11 19:03:43 -07:00
|
|
|
return result;
|
|
|
|
};
|
|
|
|
|
|
|
|
getCursorFromHeaders = headers => {
|
|
|
|
// indices and page counts are assumed to be zero-based, but the
|
|
|
|
// indices and page counts returned from GitLab are one-based
|
2018-08-07 14:46:54 -06:00
|
|
|
const index = parseInt(headers.get('X-Page'), 10) - 1;
|
|
|
|
const pageCount = parseInt(headers.get('X-Total-Pages'), 10) - 1;
|
|
|
|
const pageSize = parseInt(headers.get('X-Per-Page'), 10);
|
|
|
|
const count = parseInt(headers.get('X-Total'), 10);
|
2019-08-24 10:54:59 -07:00
|
|
|
const links = parseLinkHeader(headers.get('Link'));
|
|
|
|
const actions = Map(links)
|
2018-08-07 14:46:54 -06:00
|
|
|
.keySeq()
|
2019-03-15 10:19:57 -04:00
|
|
|
.flatMap(key =>
|
|
|
|
(key === 'prev' && index > 0) ||
|
|
|
|
(key === 'next' && index < pageCount) ||
|
|
|
|
(key === 'first' && index > 0) ||
|
|
|
|
(key === 'last' && index < pageCount)
|
|
|
|
? [key]
|
|
|
|
: [],
|
2018-08-07 14:46:54 -06:00
|
|
|
);
|
2018-06-11 19:03:43 -07:00
|
|
|
return Cursor.create({
|
|
|
|
actions,
|
|
|
|
meta: { index, count, pageSize, pageCount },
|
|
|
|
data: { links },
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
getCursor = ({ headers }) => this.getCursorFromHeaders(headers);
|
|
|
|
|
|
|
|
// Gets a cursor without retrieving the entries by using a HEAD
|
|
|
|
// request
|
2018-08-07 14:46:54 -06:00
|
|
|
fetchCursor = req =>
|
|
|
|
flow([unsentRequest.withMethod('HEAD'), this.request, then(this.getCursor)])(req);
|
|
|
|
fetchCursorAndEntries = req =>
|
|
|
|
flow([
|
|
|
|
unsentRequest.withMethod('GET'),
|
|
|
|
this.request,
|
|
|
|
p => Promise.all([p.then(this.getCursor), p.then(this.responseToJSON)]),
|
|
|
|
then(([cursor, entries]) => ({ cursor, entries })),
|
|
|
|
])(req);
|
2018-06-11 19:03:43 -07:00
|
|
|
fetchRelativeCursor = async (cursor, action) => this.fetchCursor(cursor.data.links[action]);
|
|
|
|
|
|
|
|
reversableActions = Map({
|
2018-08-07 14:46:54 -06:00
|
|
|
first: 'last',
|
|
|
|
last: 'first',
|
|
|
|
next: 'prev',
|
|
|
|
prev: 'next',
|
2018-06-11 19:03:43 -07:00
|
|
|
});
|
|
|
|
|
|
|
|
reverseCursor = cursor => {
|
2018-08-07 14:46:54 -06:00
|
|
|
const pageCount = cursor.meta.get('pageCount', 0);
|
|
|
|
const currentIndex = cursor.meta.get('index', 0);
|
2018-06-11 19:03:43 -07:00
|
|
|
const newIndex = pageCount - currentIndex;
|
|
|
|
|
2018-08-07 14:46:54 -06:00
|
|
|
const links = cursor.data.get('links', Map());
|
2018-06-11 19:03:43 -07:00
|
|
|
const reversedLinks = links.mapEntries(([k, v]) => [this.reversableActions.get(k) || k, v]);
|
|
|
|
|
2018-08-07 14:46:54 -06:00
|
|
|
const reversedActions = cursor.actions.map(
|
|
|
|
action => this.reversableActions.get(action) || action,
|
|
|
|
);
|
2018-06-11 19:03:43 -07:00
|
|
|
|
2018-08-07 14:46:54 -06:00
|
|
|
return cursor.updateStore(store =>
|
|
|
|
store
|
|
|
|
.setIn(['meta', 'index'], newIndex)
|
|
|
|
.setIn(['data', 'links'], reversedLinks)
|
|
|
|
.set('actions', reversedActions),
|
|
|
|
);
|
2018-06-11 19:03:43 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
// The exported listFiles and traverseCursor reverse the direction
|
|
|
|
// of the cursors, since GitLab's pagination sorts the opposite way
|
|
|
|
// we want to sort by default (it sorts by filename _descending_,
|
|
|
|
// while the CMS defaults to sorting by filename _ascending_, at
|
|
|
|
// least in the current GitHub backend). This should eventually be
|
|
|
|
// refactored.
|
|
|
|
listFiles = async path => {
|
|
|
|
const firstPageCursor = await this.fetchCursor({
|
2018-08-07 14:46:54 -06:00
|
|
|
url: `${this.repoURL}/repository/tree`,
|
2019-11-28 05:39:33 +02:00
|
|
|
params: { path, ref: this.branch, recursive: true },
|
2018-06-11 19:03:43 -07:00
|
|
|
});
|
2018-08-07 14:46:54 -06:00
|
|
|
const lastPageLink = firstPageCursor.data.getIn(['links', 'last']);
|
2018-06-11 19:03:43 -07:00
|
|
|
const { entries, cursor } = await this.fetchCursorAndEntries(lastPageLink);
|
2018-08-07 14:46:54 -06:00
|
|
|
return {
|
|
|
|
files: entries.filter(({ type }) => type === 'blob').reverse(),
|
|
|
|
cursor: this.reverseCursor(cursor),
|
|
|
|
};
|
2018-06-11 19:03:43 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
traverseCursor = async (cursor, action) => {
|
2018-08-07 14:46:54 -06:00
|
|
|
const link = cursor.data.getIn(['links', action]);
|
2018-06-11 19:03:43 -07:00
|
|
|
const { entries, cursor: newCursor } = await this.fetchCursorAndEntries(link);
|
2019-09-10 16:31:53 +01:00
|
|
|
return {
|
|
|
|
entries: entries.filter(({ type }) => type === 'blob').reverse(),
|
|
|
|
cursor: this.reverseCursor(newCursor),
|
|
|
|
};
|
2018-06-11 19:03:43 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
listAllFiles = async path => {
|
|
|
|
const entries = [];
|
|
|
|
let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({
|
2018-08-07 14:46:54 -06:00
|
|
|
url: `${this.repoURL}/repository/tree`,
|
2018-06-11 19:03:43 -07:00
|
|
|
// Get the maximum number of entries per page
|
|
|
|
params: { path, ref: this.branch, per_page: 100 },
|
|
|
|
});
|
|
|
|
entries.push(...initialEntries);
|
2018-08-07 14:46:54 -06:00
|
|
|
while (cursor && cursor.actions.has('next')) {
|
|
|
|
const link = cursor.data.getIn(['links', 'next']);
|
2018-06-11 19:03:43 -07:00
|
|
|
const { cursor: newCursor, entries: newEntries } = await this.fetchCursorAndEntries(link);
|
|
|
|
entries.push(...newEntries);
|
|
|
|
cursor = newCursor;
|
|
|
|
}
|
2018-08-07 14:46:54 -06:00
|
|
|
return entries.filter(({ type }) => type === 'blob');
|
2018-06-11 19:03:43 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
toBase64 = str => Promise.resolve(Base64.encode(str));
|
|
|
|
fromBase64 = str => Base64.decode(str);
|
2018-08-07 14:46:54 -06:00
|
|
|
uploadAndCommit = async (
|
|
|
|
item,
|
|
|
|
{ commitMessage, updateFile = false, branch = this.branch, author = this.commitAuthor },
|
|
|
|
) => {
|
2018-08-01 16:46:33 +02:00
|
|
|
const content = await result(item, 'toBase64', partial(this.toBase64, item.raw));
|
2018-08-07 14:46:54 -06:00
|
|
|
const file_path = item.path.replace(/^\//, '');
|
|
|
|
const action = updateFile ? 'update' : 'create';
|
|
|
|
const encoding = 'base64';
|
|
|
|
|
2018-08-07 09:49:53 -06:00
|
|
|
const commitParams = {
|
2018-06-11 19:03:43 -07:00
|
|
|
branch,
|
|
|
|
commit_message: commitMessage,
|
|
|
|
actions: [{ action, file_path, content, encoding }],
|
2018-08-07 09:49:53 -06:00
|
|
|
};
|
|
|
|
if (author) {
|
|
|
|
const { name, email } = author;
|
|
|
|
commitParams.author_name = name;
|
|
|
|
commitParams.author_email = email;
|
|
|
|
}
|
2018-06-11 19:03:43 -07:00
|
|
|
|
|
|
|
await this.request({
|
2018-08-07 14:46:54 -06:00
|
|
|
url: `${this.repoURL}/repository/commits`,
|
|
|
|
method: 'POST',
|
|
|
|
headers: { 'Content-Type': 'application/json' },
|
2018-08-07 09:49:53 -06:00
|
|
|
body: JSON.stringify(commitParams),
|
2018-06-11 19:03:43 -07:00
|
|
|
});
|
|
|
|
|
|
|
|
return { ...item, uploaded: true };
|
|
|
|
};
|
|
|
|
|
|
|
|
persistFiles = (files, { commitMessage, newEntry }) =>
|
2018-08-07 14:46:54 -06:00
|
|
|
Promise.all(
|
|
|
|
files.map(file =>
|
|
|
|
this.uploadAndCommit(file, { commitMessage, updateFile: newEntry === false }),
|
|
|
|
),
|
|
|
|
);
|
2018-06-11 19:03:43 -07:00
|
|
|
|
|
|
|
deleteFile = (path, commit_message, options = {}) => {
|
|
|
|
const branch = options.branch || this.branch;
|
2018-08-07 09:49:53 -06:00
|
|
|
const commitParams = { commit_message, branch };
|
|
|
|
if (this.commitAuthor) {
|
|
|
|
const { name, email } = this.commitAuthor;
|
|
|
|
commitParams.author_name = name;
|
|
|
|
commitParams.author_email = email;
|
|
|
|
}
|
2018-06-11 19:03:43 -07:00
|
|
|
return flow([
|
2018-08-07 14:46:54 -06:00
|
|
|
unsentRequest.withMethod('DELETE'),
|
2018-08-07 09:49:53 -06:00
|
|
|
// TODO: only send author params if they are defined.
|
|
|
|
unsentRequest.withParams(commitParams),
|
2018-06-11 19:03:43 -07:00
|
|
|
this.request,
|
2018-08-07 14:46:54 -06:00
|
|
|
])(`${this.repoURL}/repository/files/${encodeURIComponent(path)}`);
|
2018-06-11 19:03:43 -07:00
|
|
|
};
|
|
|
|
}
|