refactor: monorepo setup with lerna (#243)

This commit is contained in:
Daniel Lautzenheiser
2022-12-15 13:44:49 -05:00
committed by GitHub
parent dac29fbf3c
commit 504d95c34f
706 changed files with 16571 additions and 142 deletions

View File

@ -0,0 +1,540 @@
import { Base64 } from 'js-base64';
import partial from 'lodash/partial';
import result from 'lodash/result';
import trimStart from 'lodash/trimStart';
import { dirname } from 'path';
import {
APIError,
Cursor,
localForage,
parseLinkHeader,
readFile,
readFileMetadata,
requestWithBackoff,
responseParser,
throwOnConflictingBranches,
unsentRequest,
} from '@staticcms/core/lib/util';
import type { DataFile, PersistOptions } from '@staticcms/core/interface';
import type { ApiRequest, FetchError } from '@staticcms/core/lib/util';
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
export const API_NAME = 'GitLab';
export interface Config {
apiRoot?: string;
token?: string;
branch?: string;
repo?: string;
}
export interface CommitAuthor {
name: string;
email: string;
}
enum CommitAction {
CREATE = 'create',
DELETE = 'delete',
MOVE = 'move',
UPDATE = 'update',
}
type CommitItem = {
base64Content?: string;
path: string;
oldPath?: string;
action: CommitAction;
};
type FileEntry = { id: string; type: string; path: string; name: string };
interface CommitsParams {
commit_message: string;
branch: string;
author_name?: string;
author_email?: string;
actions?: {
action: string;
file_path: string;
previous_path?: string;
content?: string;
encoding?: string;
}[];
}
type GitLabCommitDiff = {
diff: string;
new_path: string;
old_path: string;
new_file: boolean;
renamed_file: boolean;
deleted_file: boolean;
};
type GitLabRepo = {
shared_with_groups: { group_access_level: number }[] | null;
permissions: {
project_access: { access_level: number } | null;
group_access: { access_level: number } | null;
};
};
type GitLabBranch = {
name: string;
developers_can_push: boolean;
developers_can_merge: boolean;
commit: {
id: string;
};
};
type GitLabCommitRef = {
type: string;
name: string;
};
type GitLabCommit = {
id: string;
short_id: string;
title: string;
author_name: string;
author_email: string;
authored_date: string;
committer_name: string;
committer_email: string;
committed_date: string;
created_at: string;
message: string;
};
export function getMaxAccess(groups: { group_access_level: number }[]) {
return groups.reduce((previous, current) => {
if (current.group_access_level > previous.group_access_level) {
return current;
}
return previous;
}, groups[0]);
}
export default class API {
apiRoot: string;
token: string | boolean;
branch: string;
repo: string;
repoURL: string;
commitAuthor?: CommitAuthor;
constructor(config: Config) {
this.apiRoot = config.apiRoot || 'https://gitlab.com/api/v4';
this.token = config.token || false;
this.branch = config.branch || 'main';
this.repo = config.repo || '';
this.repoURL = `/projects/${encodeURIComponent(this.repo)}`;
}
withAuthorizationHeaders = (req: ApiRequest) => {
const withHeaders = unsentRequest.withHeaders(
this.token ? { Authorization: `Bearer ${this.token}` } : {},
req,
);
return Promise.resolve(withHeaders);
};
buildRequest = async (req: ApiRequest) => {
const withRoot: ApiRequest = unsentRequest.withRoot(this.apiRoot)(req);
const withAuthorizationHeaders = await this.withAuthorizationHeaders(withRoot);
if ('cache' in withAuthorizationHeaders) {
return withAuthorizationHeaders;
} else {
const withNoCache: ApiRequest = unsentRequest.withNoCache(withAuthorizationHeaders);
return withNoCache;
}
};
request = async (req: ApiRequest): Promise<Response> => {
try {
return requestWithBackoff(this, req);
} catch (error: unknown) {
if (error instanceof Error) {
throw new APIError(error.message, null, API_NAME);
}
throw error;
}
};
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
responseToText = responseParser({ format: 'text', apiName: API_NAME });
// eslint-disable-next-line @typescript-eslint/no-explicit-any
requestJSON = (req: ApiRequest) => this.request(req).then(this.responseToJSON) as Promise<any>;
requestText = (req: ApiRequest) => this.request(req).then(this.responseToText) as Promise<string>;
user = () => this.requestJSON('/user');
WRITE_ACCESS = 30;
MAINTAINER_ACCESS = 40;
hasWriteAccess = async () => {
const { shared_with_groups: sharedWithGroups, permissions }: GitLabRepo =
await this.requestJSON(this.repoURL);
const { project_access: projectAccess, group_access: groupAccess } = permissions;
if (projectAccess && projectAccess.access_level >= this.WRITE_ACCESS) {
return true;
}
if (groupAccess && groupAccess.access_level >= this.WRITE_ACCESS) {
return true;
}
// check for group write permissions
if (sharedWithGroups && sharedWithGroups.length > 0) {
const maxAccess = getMaxAccess(sharedWithGroups);
// maintainer access
if (maxAccess.group_access_level >= this.MAINTAINER_ACCESS) {
return true;
}
// developer access
if (maxAccess.group_access_level >= this.WRITE_ACCESS) {
// check permissions to merge and push
try {
const branch = await this.getDefaultBranch();
if (branch.developers_can_merge && branch.developers_can_push) {
return true;
}
} catch (e) {
console.error('Failed getting default branch', e);
}
}
}
return false;
};
readFile = async (
path: string,
sha?: string | null,
{ parseText = true, branch = this.branch } = {},
): Promise<string | Blob> => {
const fetchContent = async () => {
const content = await this.request({
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}/raw`,
params: { ref: branch },
cache: 'no-store',
}).then<Blob | string>(parseText ? this.responseToText : this.responseToBlob);
return content;
};
const content = await readFile(sha, fetchContent, localForage, parseText);
return content;
};
async readFileMetadata(path: string, sha: string | null | undefined) {
const fetchFileMetadata = async () => {
try {
const result: GitLabCommit[] = await this.requestJSON({
url: `${this.repoURL}/repository/commits`,
params: { path, ref_name: this.branch },
});
const commit = result[0];
return {
author: commit.author_name || commit.author_email,
updatedOn: commit.authored_date,
};
} catch (e) {
return { author: '', updatedOn: '' };
}
};
const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
return fileMetadata;
}
getCursorFromHeaders = (headers: Headers) => {
const page = parseInt(headers.get('X-Page') as string, 10);
const pageCount = parseInt(headers.get('X-Total-Pages') as string, 10);
const pageSize = parseInt(headers.get('X-Per-Page') as string, 10);
const count = parseInt(headers.get('X-Total') as string, 10);
const links = parseLinkHeader(headers.get('Link'));
const actions = Object.keys(links).flatMap(key =>
(key === 'prev' && page > 1) ||
(key === 'next' && page < pageCount) ||
(key === 'first' && page > 1) ||
(key === 'last' && page < pageCount)
? [key]
: [],
);
return Cursor.create({
actions,
meta: { page, count, pageSize, pageCount },
data: { links },
});
};
getCursor = ({ headers }: { headers: Headers }) => this.getCursorFromHeaders(headers);
// Gets a cursor without retrieving the entries by using a HEAD request
fetchCursor = (req: ApiRequest) =>
this.request(unsentRequest.withMethod('HEAD', req)).then(value => this.getCursor(value));
fetchCursorAndEntries = (
req: ApiRequest,
): Promise<{
entries: FileEntry[];
cursor: Cursor;
}> => {
const request = this.request(unsentRequest.withMethod('GET', req));
return Promise.all([
request.then(this.getCursor),
request.then(this.responseToJSON).catch((e: FetchError) => {
if (e.status === 404) {
return [];
} else {
throw e;
}
}),
]).then(([cursor, entries]) => ({ cursor, entries }));
};
listFiles = async (path: string, recursive = false) => {
const { entries, cursor } = await this.fetchCursorAndEntries({
url: `${this.repoURL}/repository/tree`,
params: { path, ref: this.branch, recursive: `${recursive}` },
});
return {
files: entries.filter(({ type }) => type === 'blob'),
cursor,
};
};
traverseCursor = async (cursor: Cursor, action: string) => {
const link = (cursor.data?.links as Record<string, ApiRequest>)[action];
const { entries, cursor: newCursor } = await this.fetchCursorAndEntries(link);
return {
entries: entries.filter(({ type }) => type === 'blob'),
cursor: newCursor,
};
};
listAllFiles = async (path: string, recursive = false, branch = this.branch) => {
const entries = [];
// eslint-disable-next-line prefer-const
let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({
url: `${this.repoURL}/repository/tree`,
// Get the maximum number of entries per page
params: { path, ref: branch, per_page: '100', recursive: `${recursive}` },
});
entries.push(...initialEntries);
while (cursor && cursor.actions!.has('next')) {
const link = (cursor.data?.links as Record<string, ApiRequest>).next;
const { cursor: newCursor, entries: newEntries } = await this.fetchCursorAndEntries(link);
entries.push(...newEntries);
cursor = newCursor;
}
return entries.filter(({ type }) => type === 'blob');
};
toBase64 = (str: string) => Promise.resolve(Base64.encode(str));
fromBase64 = (str: string) => Base64.decode(str);
async getBranch(branchName: string) {
const branch: GitLabBranch = await this.requestJSON(
`${this.repoURL}/repository/branches/${encodeURIComponent(branchName)}`,
);
return branch;
}
async uploadAndCommit(
items: CommitItem[],
{ commitMessage = '', branch = this.branch, newBranch = false },
) {
const actions = items.map(item => ({
action: item.action,
file_path: item.path,
...(item.oldPath ? { previous_path: item.oldPath } : {}),
...(item.base64Content !== undefined
? { content: item.base64Content, encoding: 'base64' }
: {}),
}));
const commitParams: CommitsParams = {
branch,
commit_message: commitMessage,
actions,
...(newBranch ? { start_branch: this.branch } : {}),
};
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
commitParams.author_name = name;
commitParams.author_email = email;
}
try {
const result = await this.requestJSON({
url: `${this.repoURL}/repository/commits`,
method: 'POST',
headers: { 'Content-Type': 'application/json; charset=utf-8' },
body: JSON.stringify(commitParams),
});
return result;
} catch (error: unknown) {
if (error instanceof Error) {
const message = error.message || '';
if (newBranch && message.includes(`Could not update ${branch}`)) {
await throwOnConflictingBranches(branch, name => this.getBranch(name), API_NAME);
}
}
throw error;
}
}
async getCommitItems(files: { path: string; newPath?: string }[], branch: string) {
const items: CommitItem[] = await Promise.all(
files.map(async file => {
const [base64Content, fileExists] = await Promise.all([
result(file, 'toBase64', partial(this.toBase64, (file as DataFile).raw)),
this.isFileExists(file.path, branch),
]);
let action = CommitAction.CREATE;
let path = trimStart(file.path, '/');
let oldPath = undefined;
if (fileExists) {
oldPath = file.newPath && path;
action =
file.newPath && file.newPath !== oldPath ? CommitAction.MOVE : CommitAction.UPDATE;
path = file.newPath ? trimStart(file.newPath, '/') : path;
}
return {
action,
base64Content,
path,
oldPath,
};
}),
);
// move children
for (const item of items.filter(i => i.oldPath && i.action === CommitAction.MOVE)) {
const sourceDir = dirname(item.oldPath as string);
const destDir = dirname(item.path);
const children = await this.listAllFiles(sourceDir, true, branch);
children
.filter(f => f.path !== item.oldPath)
.forEach(file => {
items.push({
action: CommitAction.MOVE,
path: file.path.replace(sourceDir, destDir),
oldPath: file.path,
});
});
}
return items;
}
async persistFiles(dataFiles: DataFile[], mediaFiles: AssetProxy[], options: PersistOptions) {
const files = [...dataFiles, ...mediaFiles];
const items = await this.getCommitItems(files, this.branch);
return this.uploadAndCommit(items, {
commitMessage: options.commitMessage,
});
}
deleteFiles = (paths: string[], commitMessage: string) => {
const branch = this.branch;
const commitParams: CommitsParams = { commit_message: commitMessage, branch };
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
commitParams.author_name = name;
commitParams.author_email = email;
}
const items = paths.map(path => ({ path, action: CommitAction.DELETE }));
return this.uploadAndCommit(items, {
commitMessage,
});
};
async getFileId(path: string, branch: string) {
const request = await this.request({
method: 'HEAD',
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}`,
params: { ref: branch },
});
const blobId = request.headers.get('X - Gitlab - Blob - Id') as string;
return blobId;
}
async isFileExists(path: string, branch: string) {
const fileExists = await this.requestText({
method: 'HEAD',
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}`,
params: { ref: branch },
})
.then(() => true)
.catch(error => {
if (error instanceof APIError && error.status === 404) {
return false;
}
throw error;
});
return fileExists;
}
async getDifferences(to: string, from = this.branch) {
if (to === from) {
return [];
}
const result: { diffs: GitLabCommitDiff[] } = await this.requestJSON({
url: `${this.repoURL}/repository/compare`,
params: {
from,
to,
},
});
if (result.diffs.length >= 1000) {
throw new APIError('Diff limit reached', null, API_NAME);
}
return result.diffs.map(d => {
let status = 'modified';
if (d.new_file) {
status = 'added';
} else if (d.deleted_file) {
status = 'deleted';
} else if (d.renamed_file) {
status = 'renamed';
}
return {
status,
oldPath: d.old_path,
newPath: d.new_path,
newFile: d.new_file,
path: d.new_path || d.old_path,
binary: d.diff.startsWith('Binary') || /.svg$/.test(d.new_path),
};
});
}
async getDefaultBranch() {
const branch: GitLabBranch = await this.getBranch(this.branch);
return branch;
}
async isShaExistsInBranch(branch: string, sha: string) {
const refs: GitLabCommitRef[] = await this.requestJSON({
url: `${this.repoURL}/repository/commits/${sha}/refs`,
params: {
type: 'branch',
},
});
return refs.some(r => r.name === branch);
}
}

View File

@ -0,0 +1,99 @@
import { styled } from '@mui/material/styles';
import React, { useCallback, useMemo, useState } from 'react';
import AuthenticationPage from '@staticcms/core/components/UI/AuthenticationPage';
import Icon from '@staticcms/core/components/UI/Icon';
import { NetlifyAuthenticator, PkceAuthenticator } from '@staticcms/core/lib/auth';
import { isNotEmpty } from '@staticcms/core/lib/util/string.util';
import type { MouseEvent } from 'react';
import type {
AuthenticationPageProps,
AuthenticatorConfig,
TranslatedProps,
} from '@staticcms/core/interface';
const LoginButtonIcon = styled(Icon)`
margin-right: 18px;
`;
const clientSideAuthenticators = {
pkce: (config: AuthenticatorConfig) => new PkceAuthenticator(config),
} as const;
const GitLabAuthenticationPage = ({
inProgress = false,
config,
siteId,
authEndpoint,
clearHash,
onLogin,
t,
}: TranslatedProps<AuthenticationPageProps>) => {
const [loginError, setLoginError] = useState<string | null>(null);
const auth = useMemo(() => {
const {
auth_type: authType = '',
base_url = 'https://gitlab.com',
auth_endpoint = 'oauth/authorize',
app_id = '',
} = config.backend;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
if (isNotEmpty(authType) && authType in clientSideAuthenticators) {
const clientSizeAuth = clientSideAuthenticators[
authType as keyof typeof clientSideAuthenticators
]({
base_url,
auth_endpoint,
app_id,
auth_token_endpoint: 'oauth/token',
clearHash,
});
// Complete implicit authentication if we were redirected back to from the provider.
clientSizeAuth.completeAuth((err, data) => {
if (err) {
setLoginError(err.toString());
} else if (data) {
onLogin(data);
}
});
return clientSizeAuth;
} else {
return new NetlifyAuthenticator({
base_url,
site_id: document.location.host.split(':')[0] === 'localhost' ? 'cms.netlify.com' : siteId,
auth_endpoint: authEndpoint,
});
}
}, [authEndpoint, clearHash, config.backend, onLogin, siteId]);
const handleLogin = useCallback(
(e: MouseEvent<HTMLButtonElement>) => {
e.preventDefault();
auth.authenticate({ provider: 'gitlab', scope: 'api' }, err => {
if (err) {
setLoginError(err.toString());
return;
}
});
},
[auth],
);
return (
<AuthenticationPage
onLogin={handleLogin}
loginDisabled={inProgress}
loginErrorMessage={loginError}
logoUrl={config.logo_url}
siteUrl={config.site_url}
icon={<LoginButtonIcon type="gitlab" />}
buttonContent={inProgress ? t('auth.loggingIn') : t('auth.loginWithGitLab')}
t={t}
/>
);
};
export default GitLabAuthenticationPage;

View File

@ -0,0 +1,166 @@
import API, { getMaxAccess } from '../API';
global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
describe('GitLab API', () => {
beforeAll(() => {
// eslint-disable-next-line @typescript-eslint/no-empty-function
jest.spyOn(console, 'error').mockImplementation(() => {});
});
beforeEach(() => {
jest.resetAllMocks();
});
afterAll(() => {
jest.restoreAllMocks();
});
describe('hasWriteAccess', () => {
test('should return true on project access_level >= 30', async () => {
const api = new API({ repo: 'repo' });
api.requestJSON = jest
.fn()
.mockResolvedValueOnce({ permissions: { project_access: { access_level: 30 } } });
await expect(api.hasWriteAccess()).resolves.toBe(true);
});
test('should return false on project access_level < 30', async () => {
const api = new API({ repo: 'repo' });
api.requestJSON = jest
.fn()
.mockResolvedValueOnce({ permissions: { project_access: { access_level: 10 } } });
await expect(api.hasWriteAccess()).resolves.toBe(false);
});
test('should return true on group access_level >= 30', async () => {
const api = new API({ repo: 'repo' });
api.requestJSON = jest
.fn()
.mockResolvedValueOnce({ permissions: { group_access: { access_level: 30 } } });
await expect(api.hasWriteAccess()).resolves.toBe(true);
});
test('should return false on group access_level < 30', async () => {
const api = new API({ repo: 'repo' });
api.requestJSON = jest
.fn()
.mockResolvedValueOnce({ permissions: { group_access: { access_level: 10 } } });
await expect(api.hasWriteAccess()).resolves.toBe(false);
});
test('should return true on shared group access_level >= 40', async () => {
const api = new API({ repo: 'repo' });
api.requestJSON = jest.fn().mockResolvedValueOnce({
permissions: { project_access: null, group_access: null },
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 40 }],
});
await expect(api.hasWriteAccess()).resolves.toBe(true);
expect(api.requestJSON).toHaveBeenCalledTimes(1);
});
test('should return true on shared group access_level >= 30, developers can merge and push', async () => {
const api = new API({ repo: 'repo' });
const requestJSONMock = (api.requestJSON = jest.fn());
requestJSONMock.mockResolvedValueOnce({
permissions: { project_access: null, group_access: null },
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
});
requestJSONMock.mockResolvedValueOnce({
developers_can_merge: true,
developers_can_push: true,
});
await expect(api.hasWriteAccess()).resolves.toBe(true);
});
test('should return false on shared group access_level < 30,', async () => {
const api = new API({ repo: 'repo' });
const requestJSONMock = (api.requestJSON = jest.fn());
requestJSONMock.mockResolvedValueOnce({
permissions: { project_access: null, group_access: null },
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 20 }],
});
requestJSONMock.mockResolvedValueOnce({
developers_can_merge: true,
developers_can_push: true,
});
await expect(api.hasWriteAccess()).resolves.toBe(false);
});
test("should return false on shared group access_level >= 30, developers can't merge", async () => {
const api = new API({ repo: 'repo' });
const requestJSONMock = (api.requestJSON = jest.fn());
requestJSONMock.mockResolvedValueOnce({
permissions: { project_access: null, group_access: null },
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
});
requestJSONMock.mockResolvedValueOnce({
developers_can_merge: false,
developers_can_push: true,
});
await expect(api.hasWriteAccess()).resolves.toBe(false);
});
test("should return false on shared group access_level >= 30, developers can't push", async () => {
const api = new API({ repo: 'repo' });
const requestJSONMock = (api.requestJSON = jest.fn());
requestJSONMock.mockResolvedValueOnce({
permissions: { project_access: null, group_access: null },
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
});
requestJSONMock.mockResolvedValueOnce({
developers_can_merge: true,
developers_can_push: false,
});
await expect(api.hasWriteAccess()).resolves.toBe(false);
});
test('should return false on shared group access_level >= 30, error getting branch', async () => {
const api = new API({ repo: 'repo' });
const requestJSONMock = (api.requestJSON = jest.fn());
requestJSONMock.mockResolvedValueOnce({
permissions: { project_access: null, group_access: null },
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
});
const error = new Error('Not Found');
requestJSONMock.mockRejectedValue(error);
await expect(api.hasWriteAccess()).resolves.toBe(false);
expect(console.error).toHaveBeenCalledTimes(1);
expect(console.error).toHaveBeenCalledWith('Failed getting default branch', error);
});
});
describe('getMaxAccess', () => {
it('should return group with max access level', () => {
const groups = [
{ group_access_level: 10 },
{ group_access_level: 5 },
{ group_access_level: 100 },
{ group_access_level: 1 },
];
expect(getMaxAccess(groups)).toBe(groups[2]);
});
});
});

View File

@ -0,0 +1,316 @@
import { stripIndent } from 'common-tags';
import trim from 'lodash/trim';
import trimStart from 'lodash/trimStart';
import semaphore from 'semaphore';
import {
allEntriesByFolder,
asyncLock,
basename,
blobToFileObj,
CURSOR_COMPATIBILITY_SYMBOL,
entriesByFiles,
entriesByFolder,
filterByExtension,
getBlobSHA,
getMediaAsBlob,
getMediaDisplayURL,
localForage,
runWithLock,
} from '@staticcms/core/lib/util';
import API, { API_NAME } from './API';
import AuthenticationPage from './AuthenticationPage';
import type { Semaphore } from 'semaphore';
import type { AsyncLock, Cursor } from '@staticcms/core/lib/util';
import type {
Config,
Credentials,
DisplayURL,
BackendEntry,
BackendClass,
ImplementationFile,
PersistOptions,
User,
} from '@staticcms/core/interface';
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
const MAX_CONCURRENT_DOWNLOADS = 10;
export default class GitLab implements BackendClass {
lock: AsyncLock;
api: API | null;
options: {
proxied: boolean;
API: API | null;
};
repo: string;
branch: string;
apiRoot: string;
token: string | null;
mediaFolder?: string;
_mediaDisplayURLSem?: Semaphore;
constructor(config: Config, options = {}) {
this.options = {
proxied: false,
API: null,
...options,
};
if (
!this.options.proxied &&
(config.backend.repo === null || config.backend.repo === undefined)
) {
throw new Error('The GitLab backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.repo = config.backend.repo || '';
this.branch = config.backend.branch || 'main';
this.apiRoot = config.backend.api_root || 'https://gitlab.com/api/v4';
this.token = '';
this.mediaFolder = config.media_folder;
this.lock = asyncLock();
}
isGitBackend() {
return true;
}
async status() {
const auth =
(await this.api
?.user()
.then(user => !!user)
.catch(e => {
console.warn('Failed getting GitLab user', e);
return false;
})) || false;
return { auth: { status: auth }, api: { status: true, statusPage: '' } };
}
authComponent() {
return AuthenticationPage;
}
restoreUser(user: User) {
return this.authenticate(user);
}
async authenticate(state: Credentials) {
this.token = state.token as string;
this.api = new API({
token: this.token,
branch: this.branch,
repo: this.repo,
apiRoot: this.apiRoot,
});
const user = await this.api.user();
const isCollab = await this.api.hasWriteAccess().catch((error: Error) => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a GitLab account with access.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your GitLab user account does not have access to this repo.');
}
// Authorized user
return { ...user, login: user.username, token: state.token as string };
}
async logout() {
this.token = null;
return;
}
getToken() {
return Promise.resolve(this.token);
}
filterFile(
folder: string,
file: { path: string; name: string },
extension: string,
depth: number,
) {
// gitlab paths include the root folder
const fileFolder = trim(file.path.split(folder)[1] || '/', '/');
return filterByExtension(file, extension) && fileFolder.split('/').length <= depth;
}
async entriesByFolder(folder: string, extension: string, depth: number) {
let cursor: Cursor;
const listFiles = () =>
this.api!.listFiles(folder, depth > 1).then(({ files, cursor: c }) => {
cursor = c.mergeMeta({ folder, extension, depth });
return files.filter(file => this.filterFile(folder, file, extension, depth));
});
const files = await entriesByFolder(
listFiles,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return files;
}
async listAllFiles(folder: string, extension: string, depth: number) {
const files = await this.api!.listAllFiles(folder, depth > 1);
const filtered = files.filter(file => this.filterFile(folder, file, extension, depth));
return filtered;
}
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const files = await allEntriesByFolder({
listAllFiles: () => this.listAllFiles(folder, extension, depth),
readFile: this.api!.readFile.bind(this.api!),
readFileMetadata: this.api!.readFileMetadata.bind(this.api),
apiName: API_NAME,
branch: this.branch,
localForage,
folder,
extension,
depth,
getDefaultBranch: () =>
this.api!.getDefaultBranch().then(b => ({ name: b.name, sha: b.commit.id })),
isShaExistsInBranch: this.api!.isShaExistsInBranch.bind(this.api!),
getDifferences: (to, from) => this.api!.getDifferences(to, from),
getFileId: path => this.api!.getFileId(path, this.branch),
filterFile: file => this.filterFile(folder, file, extension, depth),
customFetch: undefined,
});
return files;
}
entriesByFiles(files: ImplementationFile[]) {
return entriesByFiles(
files,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
}
// Fetches a single entry.
getEntry(path: string) {
return this.api!.readFile(path).then(data => ({
file: { path, id: null },
data: data as string,
}));
}
async getMedia(mediaFolder = this.mediaFolder) {
if (!mediaFolder) {
return [];
}
return this.api!.listAllFiles(mediaFolder).then(files =>
files.map(({ id, name, path }) => {
return { id, name, path, displayURL: { id, name, path } };
}),
);
}
getMediaDisplayURL(displayURL: DisplayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
return getMediaDisplayURL(
displayURL,
this.api!.readFile.bind(this.api!),
this._mediaDisplayURLSem,
);
}
async getMediaFile(path: string) {
const name = basename(path);
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
const fileObj = blobToFileObj(name, blob);
const url = URL.createObjectURL(fileObj);
const id = await getBlobSHA(blob);
return {
id,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
async persistEntry(entry: BackendEntry, options: PersistOptions) {
// persistEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.persistFiles(entry.dataFiles, entry.assets, options),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
const fileObj = mediaFile.fileObj as File;
const [id] = await Promise.all([
getBlobSHA(fileObj),
this.api!.persistFiles([], [mediaFile], options),
]);
const { path } = mediaFile;
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path: trimStart(path, '/'),
name: fileObj!.name,
size: fileObj!.size,
file: fileObj,
url,
id,
};
}
deleteFiles(paths: string[], commitMessage: string) {
return this.api!.deleteFiles(paths, commitMessage);
}
traverseCursor(cursor: Cursor, action: string) {
return this.api!.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => {
const [folder, depth, extension] = [
cursor.meta?.folder as string,
cursor.meta?.depth as number,
cursor.meta?.extension as string,
];
if (folder && depth && extension) {
entries = entries.filter(f => this.filterFile(folder, f, extension, depth));
newCursor = newCursor.mergeMeta({ folder, extension, depth });
}
const entriesWithData = await entriesByFiles(
entries,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api)!,
API_NAME,
);
return {
entries: entriesWithData,
cursor: newCursor,
};
});
}
}

View File

@ -0,0 +1,3 @@
export { default as GitLabBackend } from './implementation';
export { default as API } from './API';
export { default as AuthenticationPage } from './AuthenticationPage';

View File

@ -0,0 +1,73 @@
import { gql } from 'graphql-tag';
import { oneLine } from 'common-tags';
export const files = gql`
query files($repo: ID!, $branch: String!, $path: String!, $recursive: Boolean!, $cursor: String) {
project(fullPath: $repo) {
repository {
tree(ref: $branch, path: $path, recursive: $recursive) {
blobs(after: $cursor) {
nodes {
type
id: sha
path
name
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
}
}
`;
export const blobs = gql`
query blobs($repo: ID!, $branch: String!, $paths: [String!]!) {
project(fullPath: $repo) {
repository {
blobs(ref: $branch, paths: $paths) {
nodes {
id
data: rawBlob
}
}
}
}
}
`;
export function lastCommits(paths: string[]) {
const tree = paths
.map(
(path, index) => oneLine`
tree${index}: tree(ref: $branch, path: "${path}") {
lastCommit {
authorName
authoredDate
author {
id
username
name
publicEmail
}
}
}
`,
)
.join('\n');
const query = gql`
query lastCommits($repo: ID!, $branch: String!) {
project(fullPath: $repo) {
repository {
${tree}
}
}
}
`;
return query;
}