Feat: editorial workflow bitbucket gitlab (#3014)
* refactor: typescript the backends * feat: support multiple files upload for GitLab and BitBucket * fix: load entry media files from media folder or UI state * chore: cleanup log message * chore: code cleanup * refactor: typescript the test backend * refactor: cleanup getEntry unsued variables * refactor: moved shared backend code to lib util * chore: rename files to preserve history * fix: bind readFile method to API classes * test(e2e): switch to chrome in cypress tests * refactor: extract common api methods * refactor: remove most of immutable js usage from backends * feat(backend-gitlab): initial editorial workflow support * feat(backend-gitlab): implement missing workflow methods * chore: fix lint error * feat(backend-gitlab): support files deletion * test(e2e): add gitlab cypress tests * feat(backend-bitbucket): implement missing editorial workflow methods * test(e2e): add BitBucket backend e2e tests * build: update node version to 12 on netlify builds * fix(backend-bitbucket): extract BitBucket avatar url * test: fix git-gateway AuthenticationPage test * test(e2e): fix some backend tests * test(e2e): fix tests * test(e2e): add git-gateway editorial workflow test * chore: code cleanup * test(e2e): revert back to electron * test(e2e): add non editorial workflow tests * fix(git-gateway-gitlab): don't call unpublishedEntry in simple workflow gitlab git-gateway doesn't support editorial workflow APIs yet. This change makes sure not to call them in simple workflow * refactor(backend-bitbucket): switch to diffstat API instead of raw diff * chore: fix test * test(e2e): add more git-gateway tests * fix: post rebase typescript fixes * test(e2e): fix tests * fix: fix parsing of content key and add tests * refactor: rename test file * test(unit): add getStatues unit tests * chore: update cypress * docs: update beta docs
This commit is contained in:
committed by
Shawn Erquhart
parent
4ff5bc2ee0
commit
6f221ab3c1
@ -1,283 +0,0 @@
|
||||
import {
|
||||
localForage,
|
||||
parseLinkHeader,
|
||||
unsentRequest,
|
||||
then,
|
||||
APIError,
|
||||
Cursor,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import { Base64 } from 'js-base64';
|
||||
import { fromJS, Map } from 'immutable';
|
||||
import { flow, partial, result } from 'lodash';
|
||||
|
||||
export default class API {
|
||||
constructor(config) {
|
||||
this.api_root = config.api_root || 'https://gitlab.com/api/v4';
|
||||
this.token = config.token || false;
|
||||
this.branch = config.branch || 'master';
|
||||
this.repo = config.repo || '';
|
||||
this.repoURL = `/projects/${encodeURIComponent(this.repo)}`;
|
||||
}
|
||||
|
||||
withAuthorizationHeaders = req =>
|
||||
unsentRequest.withHeaders(this.token ? { Authorization: `Bearer ${this.token}` } : {}, req);
|
||||
|
||||
buildRequest = req =>
|
||||
flow([
|
||||
unsentRequest.withRoot(this.api_root),
|
||||
this.withAuthorizationHeaders,
|
||||
unsentRequest.withTimestamp,
|
||||
])(req);
|
||||
|
||||
request = async req =>
|
||||
flow([
|
||||
this.buildRequest,
|
||||
unsentRequest.performRequest,
|
||||
p => p.catch(err => Promise.reject(new APIError(err.message, null, 'GitLab'))),
|
||||
])(req);
|
||||
|
||||
catchFormatErrors = (format, formatter) => res => {
|
||||
try {
|
||||
return formatter(res);
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`Response cannot be parsed into the expected format (${format}): ${err.message}`,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
responseFormats = fromJS({
|
||||
json: async res => {
|
||||
const contentType = res.headers.get('Content-Type');
|
||||
if (contentType !== 'application/json' && contentType !== 'text/json') {
|
||||
throw new Error(`${contentType} is not a valid JSON Content-Type`);
|
||||
}
|
||||
return res.json();
|
||||
},
|
||||
text: async res => res.text(),
|
||||
blob: async res => res.blob(),
|
||||
}).mapEntries(([format, formatter]) => [format, this.catchFormatErrors(format, formatter)]);
|
||||
|
||||
parseResponse = async (res, { expectingOk = true, expectingFormat = 'text' }) => {
|
||||
let body;
|
||||
try {
|
||||
const formatter = this.responseFormats.get(expectingFormat, false);
|
||||
if (!formatter) {
|
||||
throw new Error(`${expectingFormat} is not a supported response format.`);
|
||||
}
|
||||
body = await formatter(res);
|
||||
} catch (err) {
|
||||
throw new APIError(err.message, res.status, 'GitLab');
|
||||
}
|
||||
if (expectingOk && !res.ok) {
|
||||
const isJSON = expectingFormat === 'json';
|
||||
throw new APIError(isJSON && body.message ? body.message : body, res.status, 'GitLab');
|
||||
}
|
||||
return body;
|
||||
};
|
||||
|
||||
responseToJSON = res => this.parseResponse(res, { expectingFormat: 'json' });
|
||||
responseToBlob = res => this.parseResponse(res, { expectingFormat: 'blob' });
|
||||
responseToText = res => this.parseResponse(res, { expectingFormat: 'text' });
|
||||
requestJSON = req => this.request(req).then(this.responseToJSON);
|
||||
requestText = req => this.request(req).then(this.responseToText);
|
||||
|
||||
user = () => this.requestJSON('/user');
|
||||
|
||||
WRITE_ACCESS = 30;
|
||||
hasWriteAccess = () =>
|
||||
this.requestJSON(this.repoURL).then(({ permissions }) => {
|
||||
const { project_access, group_access } = permissions;
|
||||
if (project_access && project_access.access_level >= this.WRITE_ACCESS) {
|
||||
return true;
|
||||
}
|
||||
if (group_access && group_access.access_level >= this.WRITE_ACCESS) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
readFile = async (path, sha, { ref = this.branch, parseText = true } = {}) => {
|
||||
const cacheKey = parseText ? `gl.${sha}` : `gl.${sha}.blob`;
|
||||
const cachedFile = sha ? await localForage.getItem(cacheKey) : null;
|
||||
if (cachedFile) {
|
||||
return cachedFile;
|
||||
}
|
||||
const result = await this.request({
|
||||
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}/raw`,
|
||||
params: { ref },
|
||||
cache: 'no-store',
|
||||
}).then(parseText ? this.responseToText : this.responseToBlob);
|
||||
if (sha) {
|
||||
localForage.setItem(cacheKey, result);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
getCursorFromHeaders = headers => {
|
||||
// indices and page counts are assumed to be zero-based, but the
|
||||
// indices and page counts returned from GitLab are one-based
|
||||
const index = parseInt(headers.get('X-Page'), 10) - 1;
|
||||
const pageCount = parseInt(headers.get('X-Total-Pages'), 10) - 1;
|
||||
const pageSize = parseInt(headers.get('X-Per-Page'), 10);
|
||||
const count = parseInt(headers.get('X-Total'), 10);
|
||||
const links = parseLinkHeader(headers.get('Link'));
|
||||
const actions = Map(links)
|
||||
.keySeq()
|
||||
.flatMap(key =>
|
||||
(key === 'prev' && index > 0) ||
|
||||
(key === 'next' && index < pageCount) ||
|
||||
(key === 'first' && index > 0) ||
|
||||
(key === 'last' && index < pageCount)
|
||||
? [key]
|
||||
: [],
|
||||
);
|
||||
return Cursor.create({
|
||||
actions,
|
||||
meta: { index, count, pageSize, pageCount },
|
||||
data: { links },
|
||||
});
|
||||
};
|
||||
|
||||
getCursor = ({ headers }) => this.getCursorFromHeaders(headers);
|
||||
|
||||
// Gets a cursor without retrieving the entries by using a HEAD
|
||||
// request
|
||||
fetchCursor = req =>
|
||||
flow([unsentRequest.withMethod('HEAD'), this.request, then(this.getCursor)])(req);
|
||||
fetchCursorAndEntries = req =>
|
||||
flow([
|
||||
unsentRequest.withMethod('GET'),
|
||||
this.request,
|
||||
p => Promise.all([p.then(this.getCursor), p.then(this.responseToJSON)]),
|
||||
then(([cursor, entries]) => ({ cursor, entries })),
|
||||
])(req);
|
||||
fetchRelativeCursor = async (cursor, action) => this.fetchCursor(cursor.data.links[action]);
|
||||
|
||||
reversableActions = Map({
|
||||
first: 'last',
|
||||
last: 'first',
|
||||
next: 'prev',
|
||||
prev: 'next',
|
||||
});
|
||||
|
||||
reverseCursor = cursor => {
|
||||
const pageCount = cursor.meta.get('pageCount', 0);
|
||||
const currentIndex = cursor.meta.get('index', 0);
|
||||
const newIndex = pageCount - currentIndex;
|
||||
|
||||
const links = cursor.data.get('links', Map());
|
||||
const reversedLinks = links.mapEntries(([k, v]) => [this.reversableActions.get(k) || k, v]);
|
||||
|
||||
const reversedActions = cursor.actions.map(
|
||||
action => this.reversableActions.get(action) || action,
|
||||
);
|
||||
|
||||
return cursor.updateStore(store =>
|
||||
store
|
||||
.setIn(['meta', 'index'], newIndex)
|
||||
.setIn(['data', 'links'], reversedLinks)
|
||||
.set('actions', reversedActions),
|
||||
);
|
||||
};
|
||||
|
||||
// The exported listFiles and traverseCursor reverse the direction
|
||||
// of the cursors, since GitLab's pagination sorts the opposite way
|
||||
// we want to sort by default (it sorts by filename _descending_,
|
||||
// while the CMS defaults to sorting by filename _ascending_, at
|
||||
// least in the current GitHub backend). This should eventually be
|
||||
// refactored.
|
||||
listFiles = async (path, recursive = false) => {
|
||||
const firstPageCursor = await this.fetchCursor({
|
||||
url: `${this.repoURL}/repository/tree`,
|
||||
params: { path, ref: this.branch, recursive },
|
||||
});
|
||||
const lastPageLink = firstPageCursor.data.getIn(['links', 'last']);
|
||||
const { entries, cursor } = await this.fetchCursorAndEntries(lastPageLink);
|
||||
return {
|
||||
files: entries.filter(({ type }) => type === 'blob').reverse(),
|
||||
cursor: this.reverseCursor(cursor),
|
||||
};
|
||||
};
|
||||
|
||||
traverseCursor = async (cursor, action) => {
|
||||
const link = cursor.data.getIn(['links', action]);
|
||||
const { entries, cursor: newCursor } = await this.fetchCursorAndEntries(link);
|
||||
return {
|
||||
entries: entries.filter(({ type }) => type === 'blob').reverse(),
|
||||
cursor: this.reverseCursor(newCursor),
|
||||
};
|
||||
};
|
||||
|
||||
listAllFiles = async (path, recursive = false) => {
|
||||
const entries = [];
|
||||
let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({
|
||||
url: `${this.repoURL}/repository/tree`,
|
||||
// Get the maximum number of entries per page
|
||||
params: { path, ref: this.branch, per_page: 100, recursive },
|
||||
});
|
||||
entries.push(...initialEntries);
|
||||
while (cursor && cursor.actions.has('next')) {
|
||||
const link = cursor.data.getIn(['links', 'next']);
|
||||
const { cursor: newCursor, entries: newEntries } = await this.fetchCursorAndEntries(link);
|
||||
entries.push(...newEntries);
|
||||
cursor = newCursor;
|
||||
}
|
||||
return entries.filter(({ type }) => type === 'blob');
|
||||
};
|
||||
|
||||
toBase64 = str => Promise.resolve(Base64.encode(str));
|
||||
fromBase64 = str => Base64.decode(str);
|
||||
uploadAndCommit = async (
|
||||
item,
|
||||
{ commitMessage, updateFile = false, branch = this.branch, author = this.commitAuthor },
|
||||
) => {
|
||||
const content = await result(item, 'toBase64', partial(this.toBase64, item.raw));
|
||||
const file_path = item.path.replace(/^\//, '');
|
||||
const action = updateFile ? 'update' : 'create';
|
||||
const encoding = 'base64';
|
||||
|
||||
const commitParams = {
|
||||
branch,
|
||||
commit_message: commitMessage,
|
||||
actions: [{ action, file_path, content, encoding }],
|
||||
};
|
||||
if (author) {
|
||||
const { name, email } = author;
|
||||
commitParams.author_name = name;
|
||||
commitParams.author_email = email;
|
||||
}
|
||||
|
||||
const response = await this.requestJSON({
|
||||
url: `${this.repoURL}/repository/commits`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(commitParams),
|
||||
});
|
||||
|
||||
return { ...item, sha: response.id };
|
||||
};
|
||||
|
||||
persistFiles = (files, { commitMessage, newEntry }) =>
|
||||
Promise.all(
|
||||
files.map(file =>
|
||||
this.uploadAndCommit(file, { commitMessage, updateFile: newEntry === false }),
|
||||
),
|
||||
);
|
||||
|
||||
deleteFile = (path, commit_message, options = {}) => {
|
||||
const branch = options.branch || this.branch;
|
||||
const commitParams = { commit_message, branch };
|
||||
if (this.commitAuthor) {
|
||||
const { name, email } = this.commitAuthor;
|
||||
commitParams.author_name = name;
|
||||
commitParams.author_email = email;
|
||||
}
|
||||
return flow([
|
||||
unsentRequest.withMethod('DELETE'),
|
||||
// TODO: only send author params if they are defined.
|
||||
unsentRequest.withParams(commitParams),
|
||||
this.request,
|
||||
])(`${this.repoURL}/repository/files/${encodeURIComponent(path)}`);
|
||||
};
|
||||
}
|
708
packages/netlify-cms-backend-gitlab/src/API.ts
Normal file
708
packages/netlify-cms-backend-gitlab/src/API.ts
Normal file
@ -0,0 +1,708 @@
|
||||
import {
|
||||
localForage,
|
||||
parseLinkHeader,
|
||||
unsentRequest,
|
||||
then,
|
||||
APIError,
|
||||
Cursor,
|
||||
ApiRequest,
|
||||
Entry,
|
||||
AssetProxy,
|
||||
PersistOptions,
|
||||
readFile,
|
||||
CMS_BRANCH_PREFIX,
|
||||
generateContentKey,
|
||||
isCMSLabel,
|
||||
EditorialWorkflowError,
|
||||
labelToStatus,
|
||||
statusToLabel,
|
||||
DEFAULT_PR_BODY,
|
||||
MERGE_COMMIT_MESSAGE,
|
||||
responseParser,
|
||||
PreviewState,
|
||||
parseContentKey,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import { Base64 } from 'js-base64';
|
||||
import { Map, Set } from 'immutable';
|
||||
import { flow, partial, result, trimStart } from 'lodash';
|
||||
import { CursorStore } from 'netlify-cms-lib-util/src/Cursor';
|
||||
|
||||
export const API_NAME = 'GitLab';
|
||||
|
||||
export interface Config {
|
||||
apiRoot?: string;
|
||||
token?: string;
|
||||
branch?: string;
|
||||
repo?: string;
|
||||
squashMerges: boolean;
|
||||
initialWorkflowStatus: string;
|
||||
}
|
||||
|
||||
export interface CommitAuthor {
|
||||
name: string;
|
||||
email: string;
|
||||
}
|
||||
|
||||
enum CommitAction {
|
||||
CREATE = 'create',
|
||||
DELETE = 'delete',
|
||||
MOVE = 'move',
|
||||
UPDATE = 'update',
|
||||
}
|
||||
|
||||
type CommitItem = {
|
||||
base64Content?: string;
|
||||
path: string;
|
||||
action: CommitAction;
|
||||
};
|
||||
|
||||
interface CommitsParams {
|
||||
commit_message: string;
|
||||
branch: string;
|
||||
author_name?: string;
|
||||
author_email?: string;
|
||||
actions?: {
|
||||
action: string;
|
||||
file_path: string;
|
||||
content?: string;
|
||||
encoding?: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
type GitLabCommitDiff = {
|
||||
diff: string;
|
||||
new_path: string;
|
||||
old_path: string;
|
||||
};
|
||||
|
||||
enum GitLabCommitStatuses {
|
||||
Pending = 'pending',
|
||||
Running = 'running',
|
||||
Success = 'success',
|
||||
Failed = 'failed',
|
||||
Canceled = 'canceled',
|
||||
}
|
||||
|
||||
type GitLabCommitStatus = {
|
||||
status: GitLabCommitStatuses;
|
||||
name: string;
|
||||
author: {
|
||||
username: string;
|
||||
name: string;
|
||||
};
|
||||
description: null;
|
||||
sha: string;
|
||||
ref: string;
|
||||
target_url: string;
|
||||
};
|
||||
|
||||
type GitLabMergeRebase = {
|
||||
rebase_in_progress: boolean;
|
||||
merge_error: string;
|
||||
};
|
||||
|
||||
type GitLabMergeRequest = {
|
||||
id: number;
|
||||
iid: number;
|
||||
title: string;
|
||||
description: string;
|
||||
state: string;
|
||||
merged_by: {
|
||||
name: string;
|
||||
username: string;
|
||||
};
|
||||
merged_at: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
target_branch: string;
|
||||
source_branch: string;
|
||||
author: {
|
||||
name: string;
|
||||
username: string;
|
||||
};
|
||||
labels: string[];
|
||||
sha: string;
|
||||
};
|
||||
|
||||
export default class API {
|
||||
apiRoot: string;
|
||||
token: string | boolean;
|
||||
branch: string;
|
||||
useOpenAuthoring?: boolean;
|
||||
repo: string;
|
||||
repoURL: string;
|
||||
commitAuthor?: CommitAuthor;
|
||||
squashMerges: boolean;
|
||||
initialWorkflowStatus: string;
|
||||
|
||||
constructor(config: Config) {
|
||||
this.apiRoot = config.apiRoot || 'https://gitlab.com/api/v4';
|
||||
this.token = config.token || false;
|
||||
this.branch = config.branch || 'master';
|
||||
this.repo = config.repo || '';
|
||||
this.repoURL = `/projects/${encodeURIComponent(this.repo)}`;
|
||||
this.squashMerges = config.squashMerges;
|
||||
this.initialWorkflowStatus = config.initialWorkflowStatus;
|
||||
}
|
||||
|
||||
withAuthorizationHeaders = (req: ApiRequest) =>
|
||||
unsentRequest.withHeaders(this.token ? { Authorization: `Bearer ${this.token}` } : {}, req);
|
||||
|
||||
buildRequest = (req: ApiRequest) =>
|
||||
flow([
|
||||
unsentRequest.withRoot(this.apiRoot),
|
||||
this.withAuthorizationHeaders,
|
||||
unsentRequest.withTimestamp,
|
||||
])(req);
|
||||
|
||||
request = async (req: ApiRequest): Promise<Response> =>
|
||||
flow([
|
||||
this.buildRequest,
|
||||
unsentRequest.performRequest,
|
||||
p => p.catch((err: Error) => Promise.reject(new APIError(err.message, null, API_NAME))),
|
||||
])(req);
|
||||
|
||||
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
|
||||
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
|
||||
responseToText = responseParser({ format: 'text', apiName: API_NAME });
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
requestJSON = (req: ApiRequest) => this.request(req).then(this.responseToJSON) as Promise<any>;
|
||||
requestText = (req: ApiRequest) => this.request(req).then(this.responseToText) as Promise<string>;
|
||||
|
||||
user = () => this.requestJSON('/user');
|
||||
|
||||
WRITE_ACCESS = 30;
|
||||
hasWriteAccess = () =>
|
||||
this.requestJSON(this.repoURL).then(({ permissions }) => {
|
||||
const { project_access: projectAccess, group_access: groupAccess } = permissions;
|
||||
if (projectAccess && projectAccess.access_level >= this.WRITE_ACCESS) {
|
||||
return true;
|
||||
}
|
||||
if (groupAccess && groupAccess.access_level >= this.WRITE_ACCESS) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
readFile = async (
|
||||
path: string,
|
||||
sha?: string | null,
|
||||
{ parseText = true, branch = this.branch } = {},
|
||||
): Promise<string | Blob> => {
|
||||
const fetchContent = async () => {
|
||||
const content = await this.request({
|
||||
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}/raw`,
|
||||
params: { ref: branch },
|
||||
cache: 'no-store',
|
||||
}).then<Blob | string>(parseText ? this.responseToText : this.responseToBlob);
|
||||
return content;
|
||||
};
|
||||
|
||||
const content = await readFile(sha, fetchContent, localForage, parseText);
|
||||
return content;
|
||||
};
|
||||
|
||||
getCursorFromHeaders = (headers: Headers) => {
|
||||
// indices and page counts are assumed to be zero-based, but the
|
||||
// indices and page counts returned from GitLab are one-based
|
||||
const index = parseInt(headers.get('X-Page') as string, 10) - 1;
|
||||
const pageCount = parseInt(headers.get('X-Total-Pages') as string, 10) - 1;
|
||||
const pageSize = parseInt(headers.get('X-Per-Page') as string, 10);
|
||||
const count = parseInt(headers.get('X-Total') as string, 10);
|
||||
const links = parseLinkHeader(headers.get('Link') as string);
|
||||
const actions = Map(links)
|
||||
.keySeq()
|
||||
.flatMap(key =>
|
||||
(key === 'prev' && index > 0) ||
|
||||
(key === 'next' && index < pageCount) ||
|
||||
(key === 'first' && index > 0) ||
|
||||
(key === 'last' && index < pageCount)
|
||||
? [key]
|
||||
: [],
|
||||
);
|
||||
return Cursor.create({
|
||||
actions,
|
||||
meta: { index, count, pageSize, pageCount },
|
||||
data: { links },
|
||||
});
|
||||
};
|
||||
|
||||
getCursor = ({ headers }: { headers: Headers }) => this.getCursorFromHeaders(headers);
|
||||
|
||||
// Gets a cursor without retrieving the entries by using a HEAD
|
||||
// request
|
||||
fetchCursor = (req: ApiRequest) =>
|
||||
flow([unsentRequest.withMethod('HEAD'), this.request, then(this.getCursor)])(req);
|
||||
|
||||
fetchCursorAndEntries = (
|
||||
req: ApiRequest,
|
||||
): Promise<{
|
||||
entries: { id: string; type: string; path: string; name: string }[];
|
||||
cursor: Cursor;
|
||||
}> =>
|
||||
flow([
|
||||
unsentRequest.withMethod('GET'),
|
||||
this.request,
|
||||
p => Promise.all([p.then(this.getCursor), p.then(this.responseToJSON)]),
|
||||
then(([cursor, entries]: [Cursor, {}[]]) => ({ cursor, entries })),
|
||||
])(req);
|
||||
|
||||
reversableActions = Map({
|
||||
first: 'last',
|
||||
last: 'first',
|
||||
next: 'prev',
|
||||
prev: 'next',
|
||||
});
|
||||
|
||||
reverseCursor = (cursor: Cursor) => {
|
||||
const pageCount = cursor.meta!.get('pageCount', 0) as number;
|
||||
const currentIndex = cursor.meta!.get('index', 0) as number;
|
||||
const newIndex = pageCount - currentIndex;
|
||||
|
||||
const links = cursor.data!.get('links', Map()) as Map<string, string>;
|
||||
const reversedLinks = links.mapEntries(tuple => {
|
||||
const [k, v] = tuple as string[];
|
||||
return [this.reversableActions.get(k) || k, v];
|
||||
});
|
||||
|
||||
const reversedActions = cursor.actions!.map(
|
||||
action => this.reversableActions.get(action as string) || (action as string),
|
||||
);
|
||||
|
||||
return cursor.updateStore((store: CursorStore) =>
|
||||
store!
|
||||
.setIn(['meta', 'index'], newIndex)
|
||||
.setIn(['data', 'links'], reversedLinks)
|
||||
.set('actions', (reversedActions as unknown) as Set<string>),
|
||||
);
|
||||
};
|
||||
|
||||
// The exported listFiles and traverseCursor reverse the direction
|
||||
// of the cursors, since GitLab's pagination sorts the opposite way
|
||||
// we want to sort by default (it sorts by filename _descending_,
|
||||
// while the CMS defaults to sorting by filename _ascending_, at
|
||||
// least in the current GitHub backend). This should eventually be
|
||||
// refactored.
|
||||
listFiles = async (path: string, recursive = false) => {
|
||||
const firstPageCursor = await this.fetchCursor({
|
||||
url: `${this.repoURL}/repository/tree`,
|
||||
params: { path, ref: this.branch, recursive },
|
||||
});
|
||||
const lastPageLink = firstPageCursor.data.getIn(['links', 'last']);
|
||||
const { entries, cursor } = await this.fetchCursorAndEntries(lastPageLink);
|
||||
return {
|
||||
files: entries.filter(({ type }) => type === 'blob').reverse(),
|
||||
cursor: this.reverseCursor(cursor),
|
||||
};
|
||||
};
|
||||
|
||||
traverseCursor = async (cursor: Cursor, action: string) => {
|
||||
const link = cursor.data!.getIn(['links', action]);
|
||||
const { entries, cursor: newCursor } = await this.fetchCursorAndEntries(link);
|
||||
return {
|
||||
entries: entries.filter(({ type }) => type === 'blob').reverse(),
|
||||
cursor: this.reverseCursor(newCursor),
|
||||
};
|
||||
};
|
||||
|
||||
listAllFiles = async (path: string, recursive = false) => {
|
||||
const entries = [];
|
||||
// eslint-disable-next-line prefer-const
|
||||
let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({
|
||||
url: `${this.repoURL}/repository/tree`,
|
||||
// Get the maximum number of entries per page
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
params: { path, ref: this.branch, per_page: 100, recursive },
|
||||
});
|
||||
entries.push(...initialEntries);
|
||||
while (cursor && cursor.actions!.has('next')) {
|
||||
const link = cursor.data!.getIn(['links', 'next']);
|
||||
const { cursor: newCursor, entries: newEntries } = await this.fetchCursorAndEntries(link);
|
||||
entries.push(...newEntries);
|
||||
cursor = newCursor;
|
||||
}
|
||||
return entries.filter(({ type }) => type === 'blob');
|
||||
};
|
||||
|
||||
toBase64 = (str: string) => Promise.resolve(Base64.encode(str));
|
||||
fromBase64 = (str: string) => Base64.decode(str);
|
||||
|
||||
uploadAndCommit(
|
||||
items: CommitItem[],
|
||||
{ commitMessage = '', branch = this.branch, newBranch = false },
|
||||
) {
|
||||
const actions = items.map(item => ({
|
||||
action: item.action,
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
file_path: item.path,
|
||||
...(item.base64Content ? { content: item.base64Content, encoding: 'base64' } : {}),
|
||||
}));
|
||||
|
||||
const commitParams: CommitsParams = {
|
||||
branch,
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
commit_message: commitMessage,
|
||||
actions,
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
...(newBranch ? { start_branch: this.branch } : {}),
|
||||
};
|
||||
if (this.commitAuthor) {
|
||||
const { name, email } = this.commitAuthor;
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
commitParams.author_name = name;
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
commitParams.author_email = email;
|
||||
}
|
||||
|
||||
return this.requestJSON({
|
||||
url: `${this.repoURL}/repository/commits`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json; charset=utf-8' },
|
||||
body: JSON.stringify(commitParams),
|
||||
});
|
||||
}
|
||||
|
||||
async getCommitItems(files: (Entry | AssetProxy)[], branch: string) {
|
||||
const items = await Promise.all(
|
||||
files.map(async file => {
|
||||
const [base64Content, fileExists] = await Promise.all([
|
||||
result(file, 'toBase64', partial(this.toBase64, (file as Entry).raw)),
|
||||
this.isFileExists(file.path, branch),
|
||||
]);
|
||||
return {
|
||||
action: fileExists ? CommitAction.UPDATE : CommitAction.CREATE,
|
||||
base64Content,
|
||||
path: trimStart(file.path, '/'),
|
||||
};
|
||||
}),
|
||||
);
|
||||
return items as CommitItem[];
|
||||
}
|
||||
|
||||
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
|
||||
const files = entry ? [entry, ...mediaFiles] : mediaFiles;
|
||||
if (options.useWorkflow) {
|
||||
return this.editorialWorkflowGit(files, entry as Entry, options);
|
||||
} else {
|
||||
const items = await this.getCommitItems(files, this.branch);
|
||||
return this.uploadAndCommit(items, {
|
||||
commitMessage: options.commitMessage,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
deleteFile = (path: string, commitMessage: string) => {
|
||||
const branch = this.branch;
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
const commitParams: CommitsParams = { commit_message: commitMessage, branch };
|
||||
if (this.commitAuthor) {
|
||||
const { name, email } = this.commitAuthor;
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
commitParams.author_name = name;
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
commitParams.author_email = email;
|
||||
}
|
||||
return flow([
|
||||
unsentRequest.withMethod('DELETE'),
|
||||
// TODO: only send author params if they are defined.
|
||||
unsentRequest.withParams(commitParams),
|
||||
this.request,
|
||||
])(`${this.repoURL}/repository/files/${encodeURIComponent(path)}`);
|
||||
};
|
||||
|
||||
generateContentKey(collectionName: string, slug: string) {
|
||||
return generateContentKey(collectionName, slug);
|
||||
}
|
||||
|
||||
contentKeyFromBranch(branch: string) {
|
||||
return branch.substring(`${CMS_BRANCH_PREFIX}/`.length);
|
||||
}
|
||||
|
||||
branchFromContentKey(contentKey: string) {
|
||||
return `${CMS_BRANCH_PREFIX}/${contentKey}`;
|
||||
}
|
||||
|
||||
async getMergeRequests(sourceBranch?: string) {
|
||||
const mergeRequests: GitLabMergeRequest[] = await this.requestJSON({
|
||||
url: `${this.repoURL}/merge_requests`,
|
||||
params: {
|
||||
state: 'opened',
|
||||
labels: 'Any',
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
target_branch: this.branch,
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
...(sourceBranch ? { source_branch: sourceBranch } : {}),
|
||||
},
|
||||
});
|
||||
|
||||
return mergeRequests.filter(
|
||||
mr => mr.source_branch.startsWith(CMS_BRANCH_PREFIX) && mr.labels.some(isCMSLabel),
|
||||
);
|
||||
}
|
||||
|
||||
async listUnpublishedBranches() {
|
||||
console.log(
|
||||
'%c Checking for Unpublished entries',
|
||||
'line-height: 30px;text-align: center;font-weight: bold',
|
||||
);
|
||||
|
||||
const mergeRequests = await this.getMergeRequests();
|
||||
const branches = mergeRequests.map(mr => mr.source_branch);
|
||||
|
||||
return branches;
|
||||
}
|
||||
|
||||
async isFileExists(path: string, branch: string) {
|
||||
const fileExists = await this.requestText({
|
||||
method: 'HEAD',
|
||||
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}`,
|
||||
params: { ref: branch },
|
||||
cache: 'no-store',
|
||||
})
|
||||
.then(() => true)
|
||||
.catch(error => {
|
||||
if (error instanceof APIError && error.status === 404) {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
|
||||
return fileExists;
|
||||
}
|
||||
|
||||
async getBranchMergeRequest(branch: string) {
|
||||
const mergeRequests = await this.getMergeRequests(branch);
|
||||
if (mergeRequests.length <= 0) {
|
||||
throw new EditorialWorkflowError('content is not under editorial workflow', true);
|
||||
}
|
||||
|
||||
return mergeRequests[0];
|
||||
}
|
||||
|
||||
async getDifferences(to: string) {
|
||||
const result: { diffs: GitLabCommitDiff[] } = await this.requestJSON({
|
||||
url: `${this.repoURL}/repository/compare`,
|
||||
params: {
|
||||
from: this.branch,
|
||||
to,
|
||||
},
|
||||
});
|
||||
|
||||
return result.diffs;
|
||||
}
|
||||
|
||||
async retrieveMetadata(contentKey: string) {
|
||||
const { collection, slug } = parseContentKey(contentKey);
|
||||
const branch = this.branchFromContentKey(contentKey);
|
||||
const mergeRequest = await this.getBranchMergeRequest(branch);
|
||||
const diff = await this.getDifferences(mergeRequest.sha);
|
||||
const path = diff.find(d => d.old_path.includes(slug))?.old_path as string;
|
||||
// TODO: get real file id
|
||||
const mediaFiles = await Promise.all(
|
||||
diff.filter(d => d.old_path !== path).map(d => ({ path: d.new_path, id: null })),
|
||||
);
|
||||
const label = mergeRequest.labels.find(isCMSLabel) as string;
|
||||
const status = labelToStatus(label);
|
||||
return { branch, collection, slug, path, status, mediaFiles };
|
||||
}
|
||||
|
||||
async readUnpublishedBranchFile(contentKey: string) {
|
||||
const { branch, collection, slug, path, status, mediaFiles } = await this.retrieveMetadata(
|
||||
contentKey,
|
||||
);
|
||||
|
||||
const [fileData, isModification] = await Promise.all([
|
||||
this.readFile(path, null, { branch }) as Promise<string>,
|
||||
this.isFileExists(path, this.branch),
|
||||
]);
|
||||
|
||||
return {
|
||||
slug,
|
||||
metaData: { branch, collection, objects: { entry: { path, mediaFiles } }, status },
|
||||
fileData,
|
||||
isModification,
|
||||
};
|
||||
}
|
||||
|
||||
async rebaseMergeRequest(mergeRequest: GitLabMergeRequest) {
|
||||
let rebase: GitLabMergeRebase = await this.requestJSON({
|
||||
method: 'PUT',
|
||||
url: `${this.repoURL}/merge_requests/${mergeRequest.iid}/rebase`,
|
||||
});
|
||||
|
||||
let i = 1;
|
||||
while (rebase.rebase_in_progress) {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
rebase = await this.requestJSON({
|
||||
url: `${this.repoURL}/merge_requests/${mergeRequest.iid}`,
|
||||
params: {
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
include_rebase_in_progress: true,
|
||||
},
|
||||
});
|
||||
if (!rebase.rebase_in_progress || i > 10) {
|
||||
break;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
|
||||
if (rebase.rebase_in_progress) {
|
||||
throw new APIError('Timed out rebasing merge request', null, API_NAME);
|
||||
} else if (rebase.merge_error) {
|
||||
throw new APIError(`Rebase error: ${rebase.merge_error}`, null, API_NAME);
|
||||
}
|
||||
}
|
||||
|
||||
async createMergeRequest(branch: string, commitMessage: string, status: string) {
|
||||
await this.requestJSON({
|
||||
method: 'POST',
|
||||
url: `${this.repoURL}/merge_requests`,
|
||||
params: {
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
source_branch: branch,
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
target_branch: this.branch,
|
||||
title: commitMessage,
|
||||
description: DEFAULT_PR_BODY,
|
||||
labels: statusToLabel(status),
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
remove_source_branch: true,
|
||||
squash: this.squashMerges,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
|
||||
const contentKey = this.generateContentKey(options.collectionName as string, entry.slug);
|
||||
const branch = this.branchFromContentKey(contentKey);
|
||||
const unpublished = options.unpublished || false;
|
||||
if (!unpublished) {
|
||||
const items = await this.getCommitItems(files, this.branch);
|
||||
await this.uploadAndCommit(items, {
|
||||
commitMessage: options.commitMessage,
|
||||
branch,
|
||||
newBranch: true,
|
||||
});
|
||||
await this.createMergeRequest(
|
||||
branch,
|
||||
options.commitMessage,
|
||||
options.status || this.initialWorkflowStatus,
|
||||
);
|
||||
} else {
|
||||
const mergeRequest = await this.getBranchMergeRequest(branch);
|
||||
await this.rebaseMergeRequest(mergeRequest);
|
||||
const [items, diffs] = await Promise.all([
|
||||
this.getCommitItems(files, branch),
|
||||
this.getDifferences(branch),
|
||||
]);
|
||||
|
||||
// mark files for deletion
|
||||
for (const diff of diffs) {
|
||||
if (!items.some(item => item.path === diff.new_path)) {
|
||||
items.push({ action: CommitAction.DELETE, path: diff.new_path });
|
||||
}
|
||||
}
|
||||
|
||||
await this.uploadAndCommit(items, {
|
||||
commitMessage: options.commitMessage,
|
||||
branch,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async updateMergeRequestLabels(mergeRequest: GitLabMergeRequest, labels: string[]) {
|
||||
await this.requestJSON({
|
||||
method: 'PUT',
|
||||
url: `${this.repoURL}/merge_requests/${mergeRequest.iid}`,
|
||||
params: {
|
||||
labels: labels.join(','),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
|
||||
const contentKey = this.generateContentKey(collection, slug);
|
||||
const branch = this.branchFromContentKey(contentKey);
|
||||
const mergeRequest = await this.getBranchMergeRequest(branch);
|
||||
|
||||
const labels = [
|
||||
...mergeRequest.labels.filter(label => !isCMSLabel(label)),
|
||||
statusToLabel(newStatus),
|
||||
];
|
||||
await this.updateMergeRequestLabels(mergeRequest, labels);
|
||||
}
|
||||
|
||||
async mergeMergeRequest(mergeRequest: GitLabMergeRequest) {
|
||||
await this.requestJSON({
|
||||
method: 'PUT',
|
||||
url: `${this.repoURL}/merge_requests/${mergeRequest.iid}/merge`,
|
||||
params: {
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
merge_commit_message: MERGE_COMMIT_MESSAGE,
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
squash_commit_message: MERGE_COMMIT_MESSAGE,
|
||||
squash: this.squashMerges,
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
should_remove_source_branch: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async publishUnpublishedEntry(collectionName: string, slug: string) {
|
||||
const contentKey = this.generateContentKey(collectionName, slug);
|
||||
const branch = this.branchFromContentKey(contentKey);
|
||||
const mergeRequest = await this.getBranchMergeRequest(branch);
|
||||
await this.mergeMergeRequest(mergeRequest);
|
||||
}
|
||||
|
||||
async closeMergeRequest(mergeRequest: GitLabMergeRequest) {
|
||||
await this.requestJSON({
|
||||
method: 'PUT',
|
||||
url: `${this.repoURL}/merge_requests/${mergeRequest.iid}`,
|
||||
params: {
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
state_event: 'close',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async deleteBranch(branch: string) {
|
||||
await this.request({
|
||||
method: 'DELETE',
|
||||
url: `${this.repoURL}/repository/branches/${encodeURIComponent(branch)}`,
|
||||
});
|
||||
}
|
||||
|
||||
async deleteUnpublishedEntry(collectionName: string, slug: string) {
|
||||
const contentKey = this.generateContentKey(collectionName, slug);
|
||||
const branch = this.branchFromContentKey(contentKey);
|
||||
const mergeRequest = await this.getBranchMergeRequest(branch);
|
||||
await this.closeMergeRequest(mergeRequest);
|
||||
await this.deleteBranch(branch);
|
||||
}
|
||||
|
||||
async getMergeRequestStatues(mergeRequest: GitLabMergeRequest, branch: string) {
|
||||
const statuses: GitLabCommitStatus[] = await this.requestJSON({
|
||||
url: `${this.repoURL}/repository/commits/${mergeRequest.sha}/statuses`,
|
||||
params: {
|
||||
ref: branch,
|
||||
},
|
||||
});
|
||||
return statuses;
|
||||
}
|
||||
|
||||
async getStatuses(collectionName: string, slug: string) {
|
||||
const contentKey = this.generateContentKey(collectionName, slug);
|
||||
const branch = this.branchFromContentKey(contentKey);
|
||||
const mergeRequest = await this.getBranchMergeRequest(branch);
|
||||
const statuses: GitLabCommitStatus[] = await this.getMergeRequestStatues(mergeRequest, branch);
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
return statuses.map(({ name, status, target_url }) => ({
|
||||
context: name,
|
||||
state: status === GitLabCommitStatuses.Success ? PreviewState.Success : PreviewState.Other,
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
target_url,
|
||||
}));
|
||||
}
|
||||
}
|
@ -1,6 +1,5 @@
|
||||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import ImmutablePropTypes from 'react-immutable-proptypes';
|
||||
import styled from '@emotion/styled';
|
||||
import { NetlifyAuthenticator, ImplicitAuthenticator } from 'netlify-cms-lib-auth';
|
||||
import { AuthenticationPage, Icon } from 'netlify-cms-ui-default';
|
||||
@ -16,19 +15,25 @@ export default class GitLabAuthenticationPage extends React.Component {
|
||||
base_url: PropTypes.string,
|
||||
siteId: PropTypes.string,
|
||||
authEndpoint: PropTypes.string,
|
||||
config: ImmutablePropTypes.map,
|
||||
config: PropTypes.object.isRequired,
|
||||
clearHash: PropTypes.func,
|
||||
};
|
||||
|
||||
state = {};
|
||||
|
||||
componentDidMount() {
|
||||
const authType = this.props.config.getIn(['backend', 'auth_type']);
|
||||
const {
|
||||
auth_type: authType = '',
|
||||
base_url = 'https://gitlab.com',
|
||||
auth_endpoint = 'oauth/authorize',
|
||||
app_id = '',
|
||||
} = this.props.config.backend;
|
||||
|
||||
if (authType === 'implicit') {
|
||||
this.auth = new ImplicitAuthenticator({
|
||||
base_url: this.props.config.getIn(['backend', 'base_url'], 'https://gitlab.com'),
|
||||
auth_endpoint: this.props.config.getIn(['backend', 'auth_endpoint'], 'oauth/authorize'),
|
||||
app_id: this.props.config.getIn(['backend', 'app_id']),
|
||||
base_url,
|
||||
auth_endpoint,
|
||||
app_id,
|
||||
clearHash: this.props.clearHash,
|
||||
});
|
||||
// Complete implicit authentication if we were redirected back to from the provider.
|
||||
@ -69,8 +74,8 @@ export default class GitLabAuthenticationPage extends React.Component {
|
||||
onLogin={this.handleLogin}
|
||||
loginDisabled={inProgress}
|
||||
loginErrorMessage={this.state.loginError}
|
||||
logoUrl={config.get('logo_url')}
|
||||
siteUrl={config.get('site_url')}
|
||||
logoUrl={config.logo_url}
|
||||
siteUrl={config.site_url}
|
||||
renderButtonContent={() => (
|
||||
<React.Fragment>
|
||||
<LoginButtonIcon type="gitlab" /> {inProgress ? 'Logging in...' : 'Login with GitLab'}
|
||||
|
@ -0,0 +1,35 @@
|
||||
import API from '../API';
|
||||
|
||||
global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
|
||||
|
||||
describe('GitLab API', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
test('should get preview statuses', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
const mr = { sha: 'sha' };
|
||||
const statuses = [
|
||||
{ name: 'deploy', status: 'success', target_url: 'deploy-url' },
|
||||
{ name: 'build', status: 'pending' },
|
||||
];
|
||||
|
||||
api.getBranchMergeRequest = jest.fn(() => Promise.resolve(mr));
|
||||
api.getMergeRequestStatues = jest.fn(() => Promise.resolve(statuses));
|
||||
|
||||
const collectionName = 'posts';
|
||||
const slug = 'title';
|
||||
await expect(api.getStatuses(collectionName, slug)).resolves.toEqual([
|
||||
{ context: 'deploy', state: 'success', target_url: 'deploy-url' },
|
||||
{ context: 'build', state: 'other' },
|
||||
]);
|
||||
|
||||
expect(api.getBranchMergeRequest).toHaveBeenCalledTimes(1);
|
||||
expect(api.getBranchMergeRequest).toHaveBeenCalledWith('cms/posts/title');
|
||||
|
||||
expect(api.getMergeRequestStatues).toHaveBeenCalledTimes(1);
|
||||
expect(api.getMergeRequestStatues).toHaveBeenCalledWith(mr, 'cms/posts/title');
|
||||
});
|
||||
});
|
@ -1,6 +1,5 @@
|
||||
jest.mock('netlify-cms-core/src/backend');
|
||||
import { fromJS } from 'immutable';
|
||||
import { partial } from 'lodash';
|
||||
import { oneLine, stripIndent } from 'common-tags';
|
||||
import nock from 'nock';
|
||||
import { Cursor } from 'netlify-cms-lib-util';
|
||||
@ -175,7 +174,7 @@ describe('gitlab backend', () => {
|
||||
}
|
||||
|
||||
function mockApi(backend) {
|
||||
return nock(backend.implementation.api_root);
|
||||
return nock(backend.implementation.apiRoot);
|
||||
}
|
||||
|
||||
function interceptAuth(backend, { userResponse, projectResponse } = {}) {
|
||||
@ -206,7 +205,7 @@ describe('gitlab backend', () => {
|
||||
function createHeaders(backend, { basePath, path, page, perPage, pageCount, totalCount }) {
|
||||
const pageNum = parseInt(page, 10);
|
||||
const pageCountNum = parseInt(pageCount, 10);
|
||||
const url = `${backend.implementation.api_root}${basePath}`;
|
||||
const url = `${backend.implementation.apiRoot}${basePath}`;
|
||||
const link = linkPage =>
|
||||
`<${url}?id=${expectedRepo}&page=${linkPage}&path=${path}&per_page=${perPage}&recursive=false>`;
|
||||
|
||||
@ -286,18 +285,8 @@ describe('gitlab backend', () => {
|
||||
});
|
||||
}
|
||||
|
||||
it('throws if configuration requires editorial workflow', () => {
|
||||
const resolveBackendWithWorkflow = partial(resolveBackend, {
|
||||
...defaultConfig,
|
||||
publish_mode: 'editorial_workflow',
|
||||
});
|
||||
expect(resolveBackendWithWorkflow).toThrowErrorMatchingInlineSnapshot(
|
||||
`"The GitLab backend does not support the Editorial Workflow."`,
|
||||
);
|
||||
});
|
||||
|
||||
it('throws if configuration does not include repo', () => {
|
||||
expect(resolveBackend).toThrowErrorMatchingInlineSnapshot(
|
||||
expect(() => resolveBackend({ backend: {} })).toThrowErrorMatchingInlineSnapshot(
|
||||
`"The GitLab backend needs a \\"repo\\" in the backend configuration."`,
|
||||
);
|
||||
});
|
||||
@ -382,7 +371,12 @@ describe('gitlab backend', () => {
|
||||
interceptCollection(backend, collectionContentConfig);
|
||||
|
||||
const entry = await backend.getEntry(
|
||||
{ config: fromJS({}), integrations: fromJS([]), entryDraft: fromJS({}) },
|
||||
{
|
||||
config: fromJS({}),
|
||||
integrations: fromJS([]),
|
||||
entryDraft: fromJS({}),
|
||||
mediaLibrary: fromJS({}),
|
||||
},
|
||||
fromJS(collectionContentConfig),
|
||||
slug,
|
||||
);
|
||||
|
@ -1,237 +0,0 @@
|
||||
import trimStart from 'lodash/trimStart';
|
||||
import semaphore from 'semaphore';
|
||||
import { trim } from 'lodash';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import { CURSOR_COMPATIBILITY_SYMBOL, basename, getCollectionDepth } from 'netlify-cms-lib-util';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
import API from './API';
|
||||
|
||||
const MAX_CONCURRENT_DOWNLOADS = 10;
|
||||
|
||||
export default class GitLab {
|
||||
constructor(config, options = {}) {
|
||||
this.config = config;
|
||||
this.options = {
|
||||
proxied: false,
|
||||
API: null,
|
||||
...options,
|
||||
};
|
||||
|
||||
if (this.options.useWorkflow) {
|
||||
throw new Error('The GitLab backend does not support the Editorial Workflow.');
|
||||
}
|
||||
|
||||
if (!this.options.proxied && config.getIn(['backend', 'repo']) == null) {
|
||||
throw new Error('The GitLab backend needs a "repo" in the backend configuration.');
|
||||
}
|
||||
|
||||
this.api = this.options.API || null;
|
||||
|
||||
this.repo = config.getIn(['backend', 'repo'], '');
|
||||
this.branch = config.getIn(['backend', 'branch'], 'master');
|
||||
this.api_root = config.getIn(['backend', 'api_root'], 'https://gitlab.com/api/v4');
|
||||
this.token = '';
|
||||
}
|
||||
|
||||
authComponent() {
|
||||
return AuthenticationPage;
|
||||
}
|
||||
|
||||
restoreUser(user) {
|
||||
return this.authenticate(user);
|
||||
}
|
||||
|
||||
async authenticate(state) {
|
||||
this.token = state.token;
|
||||
this.api = new API({
|
||||
token: this.token,
|
||||
branch: this.branch,
|
||||
repo: this.repo,
|
||||
api_root: this.api_root,
|
||||
});
|
||||
const user = await this.api.user();
|
||||
const isCollab = await this.api.hasWriteAccess(user).catch(error => {
|
||||
error.message = stripIndent`
|
||||
Repo "${this.repo}" not found.
|
||||
|
||||
Please ensure the repo information is spelled correctly.
|
||||
|
||||
If the repo is private, make sure you're logged into a GitLab account with access.
|
||||
`;
|
||||
throw error;
|
||||
});
|
||||
|
||||
// Unauthorized user
|
||||
if (!isCollab) {
|
||||
throw new Error('Your GitLab user account does not have access to this repo.');
|
||||
}
|
||||
|
||||
// Authorized user
|
||||
return { ...user, login: user.username, token: state.token };
|
||||
}
|
||||
|
||||
logout() {
|
||||
this.token = null;
|
||||
return;
|
||||
}
|
||||
|
||||
getToken() {
|
||||
return Promise.resolve(this.token);
|
||||
}
|
||||
|
||||
filterFile(folder, file, extension, depth) {
|
||||
// gitlab paths include the root folder
|
||||
const fileFolder = trim(file.path.split(folder)[1] || '/', '/');
|
||||
return file.name.endsWith('.' + extension) && fileFolder.split('/').length <= depth;
|
||||
}
|
||||
|
||||
entriesByFolder(collection, extension) {
|
||||
const depth = getCollectionDepth(collection);
|
||||
const folder = collection.get('folder');
|
||||
return this.api.listFiles(folder, depth > 1).then(({ files, cursor }) =>
|
||||
this.fetchFiles(files.filter(file => this.filterFile(folder, file, extension, depth))).then(
|
||||
fetchedFiles => {
|
||||
const returnedFiles = fetchedFiles;
|
||||
returnedFiles[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
|
||||
return returnedFiles;
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
allEntriesByFolder(collection, extension) {
|
||||
const depth = getCollectionDepth(collection);
|
||||
const folder = collection.get('folder');
|
||||
return this.api
|
||||
.listAllFiles(folder, depth > 1)
|
||||
.then(files =>
|
||||
this.fetchFiles(files.filter(file => this.filterFile(folder, file, extension, depth))),
|
||||
);
|
||||
}
|
||||
|
||||
entriesByFiles(collection) {
|
||||
const files = collection.get('files').map(collectionFile => ({
|
||||
path: collectionFile.get('file'),
|
||||
label: collectionFile.get('label'),
|
||||
}));
|
||||
return this.fetchFiles(files).then(fetchedFiles => {
|
||||
const returnedFiles = fetchedFiles;
|
||||
return returnedFiles;
|
||||
});
|
||||
}
|
||||
|
||||
fetchFiles = files => {
|
||||
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
|
||||
const promises = [];
|
||||
files.forEach(file => {
|
||||
promises.push(
|
||||
new Promise(resolve =>
|
||||
sem.take(() =>
|
||||
this.api
|
||||
.readFile(file.path, file.id)
|
||||
.then(data => {
|
||||
resolve({ file, data });
|
||||
sem.leave();
|
||||
})
|
||||
.catch((error = true) => {
|
||||
sem.leave();
|
||||
console.error(`failed to load file from GitLab: ${file.path}`);
|
||||
resolve({ error });
|
||||
}),
|
||||
),
|
||||
),
|
||||
);
|
||||
});
|
||||
return Promise.all(promises).then(loadedEntries =>
|
||||
loadedEntries.filter(loadedEntry => !loadedEntry.error),
|
||||
);
|
||||
};
|
||||
|
||||
// Fetches a single entry.
|
||||
getEntry(collection, slug, path) {
|
||||
return this.api.readFile(path).then(data => ({
|
||||
file: { path },
|
||||
data,
|
||||
}));
|
||||
}
|
||||
|
||||
getMedia(mediaFolder = this.config.get('media_folder')) {
|
||||
return this.api.listAllFiles(mediaFolder).then(files =>
|
||||
files.map(({ id, name, path }) => {
|
||||
return { id, name, path, displayURL: { id, name, path } };
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
async getMediaAsBlob(path, id, name) {
|
||||
let blob = await this.api.readFile(path, id, { parseText: false });
|
||||
// svgs are returned with mimetype "text/plain" by gitlab
|
||||
if (blob.type === 'text/plain' && name.match(/\.svg$/i)) {
|
||||
blob = new window.Blob([blob], { type: 'image/svg+xml' });
|
||||
}
|
||||
|
||||
return blob;
|
||||
}
|
||||
|
||||
getMediaDisplayURL(displayURL) {
|
||||
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
|
||||
const { id, name, path } = displayURL;
|
||||
return new Promise((resolve, reject) =>
|
||||
this._mediaDisplayURLSem.take(() =>
|
||||
this.getMediaAsBlob(path, id, name)
|
||||
.then(blob => URL.createObjectURL(blob))
|
||||
.then(resolve, reject)
|
||||
.finally(() => this._mediaDisplayURLSem.leave()),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
async getMediaFile(path) {
|
||||
const name = basename(path);
|
||||
const blob = await this.getMediaAsBlob(path, null, name);
|
||||
const fileObj = new File([blob], name);
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
|
||||
return {
|
||||
displayURL: url,
|
||||
path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
};
|
||||
}
|
||||
|
||||
async persistEntry(entry, mediaFiles, options = {}) {
|
||||
return this.api.persistFiles([entry], options);
|
||||
}
|
||||
|
||||
async persistMedia(mediaFile, options = {}) {
|
||||
const [{ sha }] = await this.api.persistFiles([mediaFile], options);
|
||||
const { path, fileObj } = mediaFile;
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
|
||||
return {
|
||||
displayURL: url,
|
||||
path: trimStart(path, '/'),
|
||||
name: fileObj.name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
id: sha,
|
||||
};
|
||||
}
|
||||
|
||||
deleteFile(path, commitMessage, options) {
|
||||
return this.api.deleteFile(path, commitMessage, options);
|
||||
}
|
||||
|
||||
traverseCursor(cursor, action) {
|
||||
return this.api.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => ({
|
||||
entries: await Promise.all(
|
||||
entries.map(file => this.api.readFile(file.path, file.id).then(data => ({ file, data }))),
|
||||
),
|
||||
cursor: newCursor,
|
||||
}));
|
||||
}
|
||||
}
|
368
packages/netlify-cms-backend-gitlab/src/implementation.ts
Normal file
368
packages/netlify-cms-backend-gitlab/src/implementation.ts
Normal file
@ -0,0 +1,368 @@
|
||||
import trimStart from 'lodash/trimStart';
|
||||
import semaphore, { Semaphore } from 'semaphore';
|
||||
import { trim } from 'lodash';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import {
|
||||
CURSOR_COMPATIBILITY_SYMBOL,
|
||||
basename,
|
||||
Entry,
|
||||
AssetProxy,
|
||||
PersistOptions,
|
||||
Cursor,
|
||||
Implementation,
|
||||
DisplayURL,
|
||||
entriesByFolder,
|
||||
entriesByFiles,
|
||||
getMediaDisplayURL,
|
||||
getMediaAsBlob,
|
||||
User,
|
||||
Credentials,
|
||||
Config,
|
||||
ImplementationFile,
|
||||
unpublishedEntries,
|
||||
getPreviewStatus,
|
||||
UnpublishedEntryMediaFile,
|
||||
asyncLock,
|
||||
AsyncLock,
|
||||
runWithLock,
|
||||
} from 'netlify-cms-lib-util';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
import API, { API_NAME } from './API';
|
||||
import { getBlobSHA } from 'netlify-cms-lib-util/src';
|
||||
|
||||
const MAX_CONCURRENT_DOWNLOADS = 10;
|
||||
|
||||
export default class GitLab implements Implementation {
|
||||
lock: AsyncLock;
|
||||
api: API | null;
|
||||
options: {
|
||||
proxied: boolean;
|
||||
API: API | null;
|
||||
initialWorkflowStatus: string;
|
||||
};
|
||||
repo: string;
|
||||
branch: string;
|
||||
apiRoot: string;
|
||||
token: string | null;
|
||||
squashMerges: boolean;
|
||||
mediaFolder: string;
|
||||
previewContext: string;
|
||||
|
||||
_mediaDisplayURLSem?: Semaphore;
|
||||
|
||||
constructor(config: Config, options = {}) {
|
||||
this.options = {
|
||||
proxied: false,
|
||||
API: null,
|
||||
initialWorkflowStatus: '',
|
||||
...options,
|
||||
};
|
||||
|
||||
if (
|
||||
!this.options.proxied &&
|
||||
(config.backend.repo === null || config.backend.repo === undefined)
|
||||
) {
|
||||
throw new Error('The GitLab backend needs a "repo" in the backend configuration.');
|
||||
}
|
||||
|
||||
this.api = this.options.API || null;
|
||||
|
||||
this.repo = config.backend.repo || '';
|
||||
this.branch = config.backend.branch || 'master';
|
||||
this.apiRoot = config.backend.api_root || 'https://gitlab.com/api/v4';
|
||||
this.token = '';
|
||||
this.squashMerges = config.backend.squash_merges || false;
|
||||
this.mediaFolder = config.media_folder;
|
||||
this.previewContext = config.backend.preview_context || '';
|
||||
this.lock = asyncLock();
|
||||
}
|
||||
|
||||
authComponent() {
|
||||
return AuthenticationPage;
|
||||
}
|
||||
|
||||
restoreUser(user: User) {
|
||||
return this.authenticate(user);
|
||||
}
|
||||
|
||||
async authenticate(state: Credentials) {
|
||||
this.token = state.token as string;
|
||||
this.api = new API({
|
||||
token: this.token,
|
||||
branch: this.branch,
|
||||
repo: this.repo,
|
||||
apiRoot: this.apiRoot,
|
||||
squashMerges: this.squashMerges,
|
||||
initialWorkflowStatus: this.options.initialWorkflowStatus,
|
||||
});
|
||||
const user = await this.api.user();
|
||||
const isCollab = await this.api.hasWriteAccess().catch((error: Error) => {
|
||||
error.message = stripIndent`
|
||||
Repo "${this.repo}" not found.
|
||||
|
||||
Please ensure the repo information is spelled correctly.
|
||||
|
||||
If the repo is private, make sure you're logged into a GitLab account with access.
|
||||
`;
|
||||
throw error;
|
||||
});
|
||||
|
||||
// Unauthorized user
|
||||
if (!isCollab) {
|
||||
throw new Error('Your GitLab user account does not have access to this repo.');
|
||||
}
|
||||
|
||||
// Authorized user
|
||||
return { ...user, login: user.username, token: state.token as string };
|
||||
}
|
||||
|
||||
async logout() {
|
||||
this.token = null;
|
||||
return;
|
||||
}
|
||||
|
||||
getToken() {
|
||||
return Promise.resolve(this.token);
|
||||
}
|
||||
|
||||
filterFile(
|
||||
folder: string,
|
||||
file: { path: string; name: string },
|
||||
extension: string,
|
||||
depth: number,
|
||||
) {
|
||||
// gitlab paths include the root folder
|
||||
const fileFolder = trim(file.path.split(folder)[1] || '/', '/');
|
||||
return file.name.endsWith('.' + extension) && fileFolder.split('/').length <= depth;
|
||||
}
|
||||
|
||||
async entriesByFolder(folder: string, extension: string, depth: number) {
|
||||
let cursor: Cursor;
|
||||
|
||||
const listFiles = () =>
|
||||
this.api!.listFiles(folder, depth > 1).then(({ files, cursor: c }) => {
|
||||
cursor = c;
|
||||
return files.filter(file => this.filterFile(folder, file, extension, depth));
|
||||
});
|
||||
|
||||
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), API_NAME);
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
|
||||
// @ts-ignore
|
||||
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
|
||||
return files;
|
||||
}
|
||||
|
||||
async allEntriesByFolder(folder: string, extension: string, depth: number) {
|
||||
const listFiles = () =>
|
||||
this.api!.listAllFiles(folder, depth > 1).then(files =>
|
||||
files.filter(file => this.filterFile(folder, file, extension, depth)),
|
||||
);
|
||||
|
||||
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), API_NAME);
|
||||
return files;
|
||||
}
|
||||
|
||||
entriesByFiles(files: ImplementationFile[]) {
|
||||
return entriesByFiles(files, this.api!.readFile.bind(this.api!), API_NAME);
|
||||
}
|
||||
|
||||
// Fetches a single entry.
|
||||
getEntry(path: string) {
|
||||
return this.api!.readFile(path).then(data => ({
|
||||
file: { path, id: null },
|
||||
data: data as string,
|
||||
}));
|
||||
}
|
||||
|
||||
getMedia(mediaFolder = this.mediaFolder) {
|
||||
return this.api!.listAllFiles(mediaFolder).then(files =>
|
||||
files.map(({ id, name, path }) => {
|
||||
return { id, name, path, displayURL: { id, name, path } };
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
getMediaDisplayURL(displayURL: DisplayURL) {
|
||||
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
|
||||
return getMediaDisplayURL(
|
||||
displayURL,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this._mediaDisplayURLSem,
|
||||
);
|
||||
}
|
||||
|
||||
async getMediaFile(path: string) {
|
||||
const name = basename(path);
|
||||
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
|
||||
const fileObj = new File([blob], name);
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
const id = await getBlobSHA(blob);
|
||||
|
||||
return {
|
||||
id,
|
||||
displayURL: url,
|
||||
path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
};
|
||||
}
|
||||
|
||||
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
|
||||
// persistEntry is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
() => this.api!.persistFiles(entry, mediaFiles, options),
|
||||
'Failed to acquire persist entry lock',
|
||||
);
|
||||
}
|
||||
|
||||
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
||||
const fileObj = mediaFile.fileObj as File;
|
||||
|
||||
const [id] = await Promise.all([
|
||||
getBlobSHA(fileObj),
|
||||
this.api!.persistFiles(null, [mediaFile], options),
|
||||
]);
|
||||
|
||||
const { path } = mediaFile;
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
|
||||
return {
|
||||
displayURL: url,
|
||||
path: trimStart(path, '/'),
|
||||
name: fileObj!.name,
|
||||
size: fileObj!.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
id,
|
||||
};
|
||||
}
|
||||
|
||||
deleteFile(path: string, commitMessage: string) {
|
||||
return this.api!.deleteFile(path, commitMessage);
|
||||
}
|
||||
|
||||
traverseCursor(cursor: Cursor, action: string) {
|
||||
return this.api!.traverseCursor(cursor, action).then(
|
||||
async ({ entries, cursor: newCursor }) => ({
|
||||
entries: await Promise.all(
|
||||
entries.map(file =>
|
||||
this.api!.readFile(file.path, file.id).then(data => ({ file, data: data as string })),
|
||||
),
|
||||
),
|
||||
cursor: newCursor,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
|
||||
const readFile = (
|
||||
path: string,
|
||||
id: string | null | undefined,
|
||||
{ parseText }: { parseText: boolean },
|
||||
) => this.api!.readFile(path, id, { branch, parseText });
|
||||
|
||||
return getMediaAsBlob(file.path, null, readFile).then(blob => {
|
||||
const name = basename(file.path);
|
||||
const fileObj = new File([blob], name);
|
||||
return {
|
||||
id: file.path,
|
||||
displayURL: URL.createObjectURL(fileObj),
|
||||
path: file.path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
|
||||
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
|
||||
|
||||
return mediaFiles;
|
||||
}
|
||||
|
||||
async unpublishedEntries() {
|
||||
const listEntriesKeys = () =>
|
||||
this.api!.listUnpublishedBranches().then(branches =>
|
||||
branches.map(branch => this.api!.contentKeyFromBranch(branch)),
|
||||
);
|
||||
|
||||
const readUnpublishedBranchFile = (contentKey: string) =>
|
||||
this.api!.readUnpublishedBranchFile(contentKey);
|
||||
|
||||
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, API_NAME);
|
||||
}
|
||||
|
||||
async unpublishedEntry(
|
||||
collection: string,
|
||||
slug: string,
|
||||
{
|
||||
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
|
||||
this.loadEntryMediaFiles(branch, files),
|
||||
} = {},
|
||||
) {
|
||||
const contentKey = this.api!.generateContentKey(collection, slug);
|
||||
const data = await this.api!.readUnpublishedBranchFile(contentKey);
|
||||
const mediaFiles = await loadEntryMediaFiles(
|
||||
data.metaData.branch,
|
||||
// TODO: fix this
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
|
||||
// @ts-ignore
|
||||
data.metaData.objects.entry.mediaFiles,
|
||||
);
|
||||
return {
|
||||
slug,
|
||||
file: { path: data.metaData.objects.entry.path, id: null },
|
||||
data: data.fileData as string,
|
||||
metaData: data.metaData,
|
||||
mediaFiles,
|
||||
isModification: data.isModification,
|
||||
};
|
||||
}
|
||||
|
||||
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
|
||||
// updateUnpublishedEntryStatus is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
|
||||
'Failed to acquire update entry status lock',
|
||||
);
|
||||
}
|
||||
|
||||
async deleteUnpublishedEntry(collection: string, slug: string) {
|
||||
// deleteUnpublishedEntry is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
() => this.api!.deleteUnpublishedEntry(collection, slug),
|
||||
'Failed to acquire delete entry lock',
|
||||
);
|
||||
}
|
||||
|
||||
async publishUnpublishedEntry(collection: string, slug: string) {
|
||||
// publishUnpublishedEntry is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
() => this.api!.publishUnpublishedEntry(collection, slug),
|
||||
'Failed to acquire publish entry lock',
|
||||
);
|
||||
}
|
||||
|
||||
async getDeployPreview(collection: string, slug: string) {
|
||||
try {
|
||||
const statuses = await this.api!.getStatuses(collection, slug);
|
||||
const deployStatus = getPreviewStatus(statuses, this.previewContext);
|
||||
|
||||
if (deployStatus) {
|
||||
const { target_url: url, state } = deployStatus;
|
||||
return { url, status: state };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user