refactor: monorepo setup with lerna (#243)
This commit is contained in:
committed by
GitHub
parent
dac29fbf3c
commit
504d95c34f
462
packages/core/src/backends/bitbucket/API.ts
Normal file
462
packages/core/src/backends/bitbucket/API.ts
Normal file
@ -0,0 +1,462 @@
|
||||
import flow from 'lodash/flow';
|
||||
import get from 'lodash/get';
|
||||
import { dirname } from 'path';
|
||||
import { parse } from 'what-the-diff';
|
||||
|
||||
import {
|
||||
APIError,
|
||||
basename,
|
||||
Cursor,
|
||||
localForage,
|
||||
readFile,
|
||||
readFileMetadata,
|
||||
requestWithBackoff,
|
||||
responseParser,
|
||||
then,
|
||||
throwOnConflictingBranches,
|
||||
unsentRequest,
|
||||
} from '@staticcms/core/lib/util';
|
||||
|
||||
import type { DataFile, PersistOptions } from '@staticcms/core/interface';
|
||||
import type { ApiRequest, FetchError } from '@staticcms/core/lib/util';
|
||||
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
|
||||
|
||||
interface Config {
|
||||
apiRoot?: string;
|
||||
token?: string;
|
||||
branch?: string;
|
||||
repo?: string;
|
||||
requestFunction?: (req: ApiRequest) => Promise<Response>;
|
||||
hasWriteAccess?: () => Promise<boolean>;
|
||||
}
|
||||
|
||||
interface CommitAuthor {
|
||||
name: string;
|
||||
email: string;
|
||||
}
|
||||
|
||||
type BitBucketFile = {
|
||||
id: string;
|
||||
type: string;
|
||||
path: string;
|
||||
commit?: { hash: string };
|
||||
};
|
||||
|
||||
type BitBucketSrcResult = {
|
||||
size: number;
|
||||
page: number;
|
||||
pagelen: number;
|
||||
next: string;
|
||||
previous: string;
|
||||
values: BitBucketFile[];
|
||||
};
|
||||
|
||||
type BitBucketUser = {
|
||||
username: string;
|
||||
display_name: string;
|
||||
nickname: string;
|
||||
links: {
|
||||
avatar: {
|
||||
href: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
type BitBucketBranch = {
|
||||
name: string;
|
||||
target: { hash: string };
|
||||
};
|
||||
|
||||
type BitBucketCommit = {
|
||||
hash: string;
|
||||
author: {
|
||||
raw: string;
|
||||
user: {
|
||||
display_name: string;
|
||||
nickname: string;
|
||||
};
|
||||
};
|
||||
date: string;
|
||||
};
|
||||
|
||||
export const API_NAME = 'Bitbucket';
|
||||
|
||||
function replace404WithEmptyResponse(err: FetchError) {
|
||||
if (err && err.status === 404) {
|
||||
console.info('This 404 was expected and handled appropriately.');
|
||||
return { size: 0, values: [] as BitBucketFile[] } as BitBucketSrcResult;
|
||||
} else {
|
||||
return Promise.reject(err);
|
||||
}
|
||||
}
|
||||
|
||||
export default class API {
|
||||
apiRoot: string;
|
||||
branch: string;
|
||||
repo: string;
|
||||
requestFunction: (req: ApiRequest) => Promise<Response>;
|
||||
repoURL: string;
|
||||
commitAuthor?: CommitAuthor;
|
||||
|
||||
constructor(config: Config) {
|
||||
this.apiRoot = config.apiRoot || 'https://api.bitbucket.org/2.0';
|
||||
this.branch = config.branch || 'main';
|
||||
this.repo = config.repo || '';
|
||||
this.requestFunction = config.requestFunction || unsentRequest.performRequest;
|
||||
// Allow overriding this.hasWriteAccess
|
||||
this.hasWriteAccess = config.hasWriteAccess || this.hasWriteAccess;
|
||||
this.repoURL = this.repo ? `/repositories/${this.repo}` : '';
|
||||
}
|
||||
|
||||
buildRequest = (req: ApiRequest) => {
|
||||
const withRoot = unsentRequest.withRoot(this.apiRoot)(req);
|
||||
if ('cache' in withRoot) {
|
||||
return withRoot;
|
||||
} else {
|
||||
const withNoCache = unsentRequest.withNoCache(withRoot);
|
||||
return withNoCache;
|
||||
}
|
||||
};
|
||||
|
||||
request = (req: ApiRequest): Promise<Response> => {
|
||||
try {
|
||||
return requestWithBackoff(this, req);
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
throw new APIError(error.message, null, API_NAME);
|
||||
}
|
||||
|
||||
throw new APIError('Unknown api error', null, API_NAME);
|
||||
}
|
||||
};
|
||||
|
||||
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
|
||||
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
|
||||
responseToText = responseParser({ format: 'text', apiName: API_NAME });
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
requestJSON = (req: ApiRequest) => this.request(req).then(this.responseToJSON) as Promise<any>;
|
||||
requestText = (req: ApiRequest) => this.request(req).then(this.responseToText) as Promise<string>;
|
||||
|
||||
user = () => this.requestJSON('/user') as Promise<BitBucketUser>;
|
||||
|
||||
hasWriteAccess = async () => {
|
||||
const response = await this.request(this.repoURL);
|
||||
if (response.status === 404) {
|
||||
throw Error('Repo not found');
|
||||
}
|
||||
return response.ok;
|
||||
};
|
||||
|
||||
getBranch = async (branchName: string) => {
|
||||
const branch: BitBucketBranch = await this.requestJSON(
|
||||
`${this.repoURL}/refs/branches/${branchName}`,
|
||||
);
|
||||
|
||||
return branch;
|
||||
};
|
||||
|
||||
branchCommitSha = async (branch: string) => {
|
||||
const {
|
||||
target: { hash: branchSha },
|
||||
}: BitBucketBranch = await this.getBranch(branch);
|
||||
|
||||
return branchSha;
|
||||
};
|
||||
|
||||
defaultBranchCommitSha = () => {
|
||||
return this.branchCommitSha(this.branch);
|
||||
};
|
||||
|
||||
isFile = ({ type }: BitBucketFile) => type === 'commit_file';
|
||||
|
||||
getFileId = (commitHash: string, path: string) => {
|
||||
return `${commitHash}/${path}`;
|
||||
};
|
||||
|
||||
processFile = (file: BitBucketFile) => ({
|
||||
id: file.id,
|
||||
type: file.type,
|
||||
path: file.path,
|
||||
name: basename(file.path),
|
||||
|
||||
// BitBucket does not return file SHAs, but it does give us the
|
||||
// commit SHA. Since the commit SHA will change if any files do,
|
||||
// we can construct an ID using the commit SHA and the file path
|
||||
// that will help with caching (though not as well as a normal
|
||||
// SHA, since it will change even if the individual file itself
|
||||
// doesn't.)
|
||||
...(file.commit && file.commit.hash ? { id: this.getFileId(file.commit.hash, file.path) } : {}),
|
||||
});
|
||||
processFiles = (files: BitBucketFile[]) => files.filter(this.isFile).map(this.processFile);
|
||||
|
||||
readFile = async (
|
||||
path: string,
|
||||
sha?: string | null,
|
||||
{ parseText = true, branch = this.branch, head = '' } = {},
|
||||
): Promise<string | Blob> => {
|
||||
const fetchContent = async () => {
|
||||
const node = head ? head : await this.branchCommitSha(branch);
|
||||
const content = await this.request({
|
||||
url: `${this.repoURL}/src/${node}/${path}`,
|
||||
cache: 'no-store',
|
||||
}).then<string | Blob>(parseText ? this.responseToText : this.responseToBlob);
|
||||
return content;
|
||||
};
|
||||
const content = await readFile(sha, fetchContent, localForage, parseText);
|
||||
return content;
|
||||
};
|
||||
|
||||
async readFileMetadata(path: string, sha: string | null | undefined) {
|
||||
const fetchFileMetadata = async () => {
|
||||
try {
|
||||
const { values }: { values: BitBucketCommit[] } = await this.requestJSON({
|
||||
url: `${this.repoURL}/commits`,
|
||||
params: { path, include: this.branch },
|
||||
});
|
||||
const commit = values[0];
|
||||
return {
|
||||
author: commit.author.user
|
||||
? commit.author.user.display_name || commit.author.user.nickname
|
||||
: commit.author.raw,
|
||||
updatedOn: commit.date,
|
||||
};
|
||||
} catch (e) {
|
||||
return { author: '', updatedOn: '' };
|
||||
}
|
||||
};
|
||||
const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
|
||||
return fileMetadata;
|
||||
}
|
||||
|
||||
async isShaExistsInBranch(branch: string, sha: string) {
|
||||
const { values }: { values: BitBucketCommit[] } = await this.requestJSON({
|
||||
url: `${this.repoURL}/commits`,
|
||||
params: { include: branch, pagelen: '100' },
|
||||
}).catch(e => {
|
||||
console.info(`Failed getting commits for branch '${branch}'`, e);
|
||||
return [];
|
||||
});
|
||||
|
||||
return values.some(v => v.hash === sha);
|
||||
}
|
||||
|
||||
getEntriesAndCursor = (jsonResponse: BitBucketSrcResult) => {
|
||||
const {
|
||||
size: count,
|
||||
page,
|
||||
pagelen: pageSize,
|
||||
next,
|
||||
previous: prev,
|
||||
values: entries,
|
||||
} = jsonResponse;
|
||||
const pageCount = pageSize && count ? Math.ceil(count / pageSize) : undefined;
|
||||
return {
|
||||
entries,
|
||||
cursor: Cursor.create({
|
||||
actions: [...(next ? ['next'] : []), ...(prev ? ['prev'] : [])],
|
||||
meta: { page, count, pageSize, pageCount },
|
||||
data: { links: { next, prev } },
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
||||
listFiles = async (path: string, depth = 1, pagelen: number, branch: string) => {
|
||||
const node = await this.branchCommitSha(branch);
|
||||
const result: BitBucketSrcResult = await this.requestJSON({
|
||||
url: `${this.repoURL}/src/${node}/${path}`,
|
||||
params: {
|
||||
max_depth: `${depth}`,
|
||||
pagelen: `${pagelen}`,
|
||||
},
|
||||
}).catch(replace404WithEmptyResponse);
|
||||
const { entries, cursor } = this.getEntriesAndCursor(result);
|
||||
|
||||
return { entries: this.processFiles(entries), cursor: cursor as Cursor };
|
||||
};
|
||||
|
||||
traverseCursor = async (
|
||||
cursor: Cursor,
|
||||
action: string,
|
||||
): Promise<{
|
||||
cursor: Cursor;
|
||||
entries: { path: string; name: string; type: string; id: string }[];
|
||||
}> =>
|
||||
flow([
|
||||
this.requestJSON,
|
||||
then(this.getEntriesAndCursor),
|
||||
then<
|
||||
{ cursor: Cursor; entries: BitBucketFile[] },
|
||||
{ cursor: Cursor; entries: BitBucketFile[] }
|
||||
>(({ cursor: newCursor, entries }) => ({
|
||||
cursor: newCursor,
|
||||
entries: this.processFiles(entries),
|
||||
})),
|
||||
])((cursor.data?.links as Record<string, unknown>)[action]);
|
||||
|
||||
listAllFiles = async (path: string, depth: number, branch: string) => {
|
||||
const { cursor: initialCursor, entries: initialEntries } = await this.listFiles(
|
||||
path,
|
||||
depth,
|
||||
100,
|
||||
branch,
|
||||
);
|
||||
const entries = [...initialEntries];
|
||||
let currentCursor = initialCursor;
|
||||
while (currentCursor && currentCursor.actions!.has('next')) {
|
||||
const { cursor: newCursor, entries: newEntries } = await this.traverseCursor(
|
||||
currentCursor,
|
||||
'next',
|
||||
);
|
||||
entries.push(...newEntries);
|
||||
currentCursor = newCursor;
|
||||
}
|
||||
return this.processFiles(entries);
|
||||
};
|
||||
|
||||
async uploadFiles(
|
||||
files: { path: string; newPath?: string; delete?: boolean }[],
|
||||
{
|
||||
commitMessage,
|
||||
branch,
|
||||
parentSha,
|
||||
}: { commitMessage: string; branch: string; parentSha?: string },
|
||||
) {
|
||||
const formData = new FormData();
|
||||
const toMove: { from: string; to: string; contentBlob: Blob }[] = [];
|
||||
files.forEach(file => {
|
||||
if (file.delete) {
|
||||
// delete the file
|
||||
formData.append('files', file.path);
|
||||
} else if (file.newPath) {
|
||||
const contentBlob = get(file, 'fileObj', new Blob([(file as DataFile).raw]));
|
||||
toMove.push({ from: file.path, to: file.newPath, contentBlob });
|
||||
} else {
|
||||
// add/modify the file
|
||||
const contentBlob = get(file, 'fileObj', new Blob([(file as DataFile).raw]));
|
||||
// Third param is filename header, in case path is `message`, `branch`, etc.
|
||||
formData.append(file.path, contentBlob, basename(file.path));
|
||||
}
|
||||
});
|
||||
for (const { from, to, contentBlob } of toMove) {
|
||||
const sourceDir = dirname(from);
|
||||
const destDir = dirname(to);
|
||||
const filesBranch = parentSha ? this.branch : branch;
|
||||
const files = await this.listAllFiles(sourceDir, 100, filesBranch);
|
||||
for (const file of files) {
|
||||
// to move a file in Bitbucket we need to delete the old path
|
||||
// and upload the file content to the new path
|
||||
// NOTE: this is very wasteful, and also the Bitbucket `diff` API
|
||||
// reports these files as deleted+added instead of renamed
|
||||
// delete current path
|
||||
formData.append('files', file.path);
|
||||
// create in new path
|
||||
const content =
|
||||
file.path === from
|
||||
? contentBlob
|
||||
: await this.readFile(file.path, null, {
|
||||
branch: filesBranch,
|
||||
parseText: false,
|
||||
});
|
||||
formData.append(file.path.replace(sourceDir, destDir), content, basename(file.path));
|
||||
}
|
||||
}
|
||||
|
||||
if (commitMessage) {
|
||||
formData.append('message', commitMessage);
|
||||
}
|
||||
if (this.commitAuthor) {
|
||||
const { name, email } = this.commitAuthor;
|
||||
formData.append('author', `${name} <${email}>`);
|
||||
}
|
||||
|
||||
formData.append('branch', branch);
|
||||
|
||||
if (parentSha) {
|
||||
formData.append('parents', parentSha);
|
||||
}
|
||||
|
||||
try {
|
||||
await this.requestText({
|
||||
url: `${this.repoURL}/src`,
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
const message = error.message || '';
|
||||
// very descriptive message from Bitbucket
|
||||
if (parentSha && message.includes('Something went wrong')) {
|
||||
await throwOnConflictingBranches(branch, name => this.getBranch(name), API_NAME);
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
async persistFiles(
|
||||
dataFiles: DataFile[],
|
||||
mediaFiles: (
|
||||
| {
|
||||
fileObj: File;
|
||||
size: number;
|
||||
sha: string;
|
||||
raw: string;
|
||||
path: string;
|
||||
}
|
||||
| AssetProxy
|
||||
)[],
|
||||
options: PersistOptions,
|
||||
) {
|
||||
const files = [...dataFiles, ...mediaFiles];
|
||||
return this.uploadFiles(files, { commitMessage: options.commitMessage, branch: this.branch });
|
||||
}
|
||||
|
||||
async getDifferences(source: string, destination: string = this.branch) {
|
||||
if (source === destination) {
|
||||
return [];
|
||||
}
|
||||
const rawDiff = await this.requestText({
|
||||
url: `${this.repoURL}/diff/${source}..${destination}`,
|
||||
params: {
|
||||
binary: 'false',
|
||||
},
|
||||
});
|
||||
|
||||
const diffs = parse(rawDiff).map(d => {
|
||||
const oldPath = d.oldPath?.replace(/b\//, '') || '';
|
||||
const newPath = d.newPath?.replace(/b\//, '') || '';
|
||||
const path = newPath || (oldPath as string);
|
||||
return {
|
||||
oldPath,
|
||||
newPath,
|
||||
status: d.status,
|
||||
newFile: d.status === 'added',
|
||||
path,
|
||||
binary: d.binary || /.svg$/.test(path),
|
||||
};
|
||||
});
|
||||
return diffs;
|
||||
}
|
||||
|
||||
deleteFiles = (paths: string[], message: string) => {
|
||||
const body = new FormData();
|
||||
paths.forEach(path => {
|
||||
body.append('files', path);
|
||||
});
|
||||
body.append('branch', this.branch);
|
||||
if (message) {
|
||||
body.append('message', message);
|
||||
}
|
||||
if (this.commitAuthor) {
|
||||
const { name, email } = this.commitAuthor;
|
||||
body.append('author', `${name} <${email}>`);
|
||||
}
|
||||
|
||||
return this.request(
|
||||
unsentRequest.withBody(body, unsentRequest.withMethod('POST', `${this.repoURL}/src`)),
|
||||
);
|
||||
};
|
||||
}
|
96
packages/core/src/backends/bitbucket/AuthenticationPage.tsx
Normal file
96
packages/core/src/backends/bitbucket/AuthenticationPage.tsx
Normal file
@ -0,0 +1,96 @@
|
||||
import { styled } from '@mui/material/styles';
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
|
||||
import AuthenticationPage from '@staticcms/core/components/UI/AuthenticationPage';
|
||||
import Icon from '@staticcms/core/components/UI/Icon';
|
||||
import { ImplicitAuthenticator, NetlifyAuthenticator } from '@staticcms/core/lib/auth';
|
||||
|
||||
import type { MouseEvent } from 'react';
|
||||
import type { AuthenticationPageProps, TranslatedProps } from '@staticcms/core/interface';
|
||||
|
||||
const LoginButtonIcon = styled(Icon)`
|
||||
margin-right: 18px;
|
||||
`;
|
||||
|
||||
const BitbucketAuthenticationPage = ({
|
||||
inProgress = false,
|
||||
config,
|
||||
base_url,
|
||||
siteId,
|
||||
authEndpoint,
|
||||
clearHash,
|
||||
onLogin,
|
||||
t,
|
||||
}: TranslatedProps<AuthenticationPageProps>) => {
|
||||
const [loginError, setLoginError] = useState<string | null>(null);
|
||||
|
||||
const [auth, authSettings] = useMemo(() => {
|
||||
const { auth_type: authType = '' } = config.backend;
|
||||
|
||||
if (authType === 'implicit') {
|
||||
const {
|
||||
base_url = 'https://bitbucket.org',
|
||||
auth_endpoint = 'site/oauth2/authorize',
|
||||
app_id = '',
|
||||
} = config.backend;
|
||||
|
||||
const implicityAuth = new ImplicitAuthenticator({
|
||||
base_url,
|
||||
auth_endpoint,
|
||||
app_id,
|
||||
clearHash,
|
||||
});
|
||||
|
||||
// Complete implicit authentication if we were redirected back to from the provider.
|
||||
implicityAuth.completeAuth((err, data) => {
|
||||
if (err) {
|
||||
setLoginError(err.toString());
|
||||
return;
|
||||
} else if (data) {
|
||||
onLogin(data);
|
||||
}
|
||||
});
|
||||
|
||||
return [implicityAuth, { scope: 'repository:write' }];
|
||||
} else {
|
||||
return [
|
||||
new NetlifyAuthenticator({
|
||||
base_url,
|
||||
site_id:
|
||||
document.location.host.split(':')[0] === 'localhost' ? 'cms.netlify.com' : siteId,
|
||||
auth_endpoint: authEndpoint,
|
||||
}),
|
||||
{ provider: 'bitbucket', scope: 'repo' },
|
||||
] as const;
|
||||
}
|
||||
}, [authEndpoint, base_url, clearHash, config.backend, onLogin, siteId]);
|
||||
|
||||
const handleLogin = useCallback(
|
||||
(e: MouseEvent<HTMLButtonElement>) => {
|
||||
e.preventDefault();
|
||||
auth.authenticate(authSettings, (err, data) => {
|
||||
if (err) {
|
||||
setLoginError(err.toString());
|
||||
} else if (data) {
|
||||
onLogin(data);
|
||||
}
|
||||
});
|
||||
},
|
||||
[auth, authSettings, onLogin],
|
||||
);
|
||||
|
||||
return (
|
||||
<AuthenticationPage
|
||||
onLogin={handleLogin}
|
||||
loginDisabled={inProgress}
|
||||
loginErrorMessage={loginError}
|
||||
logoUrl={config.logo_url}
|
||||
siteUrl={config.site_url}
|
||||
icon={<LoginButtonIcon type="bitbucket" />}
|
||||
buttonContent={inProgress ? t('auth.loggingIn') : t('auth.loginWithBitbucket')}
|
||||
t={t}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default BitbucketAuthenticationPage;
|
103
packages/core/src/backends/bitbucket/git-lfs-client.ts
Normal file
103
packages/core/src/backends/bitbucket/git-lfs-client.ts
Normal file
@ -0,0 +1,103 @@
|
||||
import minimatch from 'minimatch';
|
||||
|
||||
import { unsentRequest } from '@staticcms/core/lib/util';
|
||||
|
||||
import type { ApiRequest, PointerFile } from '@staticcms/core/lib/util';
|
||||
|
||||
type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;
|
||||
|
||||
interface LfsBatchAction {
|
||||
href: string;
|
||||
header?: { [key: string]: string };
|
||||
expires_in?: number;
|
||||
expires_at?: string;
|
||||
}
|
||||
|
||||
interface LfsBatchObject {
|
||||
oid: string;
|
||||
size: number;
|
||||
}
|
||||
|
||||
interface LfsBatchObjectUpload extends LfsBatchObject {
|
||||
actions?: {
|
||||
upload: LfsBatchAction;
|
||||
verify?: LfsBatchAction;
|
||||
};
|
||||
}
|
||||
|
||||
interface LfsBatchObjectError extends LfsBatchObject {
|
||||
error: {
|
||||
code: number;
|
||||
message: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface LfsBatchUploadResponse {
|
||||
transfer?: string;
|
||||
objects: (LfsBatchObjectUpload | LfsBatchObjectError)[];
|
||||
}
|
||||
|
||||
export default class GitLfsClient {
|
||||
private static defaultContentHeaders = {
|
||||
Accept: 'application/vnd.git-lfs+json',
|
||||
['Content-Type']: 'application/vnd.git-lfs+json',
|
||||
};
|
||||
|
||||
constructor(
|
||||
public enabled: boolean,
|
||||
public rootURL: string,
|
||||
public patterns: string[],
|
||||
private makeAuthorizedRequest: MakeAuthorizedRequest,
|
||||
) {}
|
||||
|
||||
matchPath(path: string) {
|
||||
return this.patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
|
||||
}
|
||||
|
||||
async uploadResource(pointer: PointerFile, resource: Blob): Promise<string> {
|
||||
const requests = await this.getResourceUploadRequests([pointer]);
|
||||
for (const request of requests) {
|
||||
await this.doUpload(request.actions!.upload, resource);
|
||||
if (request.actions!.verify) {
|
||||
await this.doVerify(request.actions!.verify, request);
|
||||
}
|
||||
}
|
||||
return pointer.sha;
|
||||
}
|
||||
|
||||
private async doUpload(upload: LfsBatchAction, resource: Blob) {
|
||||
await unsentRequest.fetchWithTimeout(decodeURI(upload.href), {
|
||||
method: 'PUT',
|
||||
body: resource,
|
||||
headers: upload.header,
|
||||
});
|
||||
}
|
||||
private async doVerify(verify: LfsBatchAction, object: LfsBatchObject) {
|
||||
this.makeAuthorizedRequest({
|
||||
url: decodeURI(verify.href),
|
||||
method: 'POST',
|
||||
headers: { ...GitLfsClient.defaultContentHeaders, ...verify.header },
|
||||
body: JSON.stringify({ oid: object.oid, size: object.size }),
|
||||
});
|
||||
}
|
||||
|
||||
private async getResourceUploadRequests(objects: PointerFile[]): Promise<LfsBatchObjectUpload[]> {
|
||||
const response = await this.makeAuthorizedRequest({
|
||||
url: `${this.rootURL}/objects/batch`,
|
||||
method: 'POST',
|
||||
headers: GitLfsClient.defaultContentHeaders,
|
||||
body: JSON.stringify({
|
||||
operation: 'upload',
|
||||
transfers: ['basic'],
|
||||
objects: objects.map(({ sha, ...rest }) => ({ ...rest, oid: sha })),
|
||||
}),
|
||||
});
|
||||
return ((await response.json()) as LfsBatchUploadResponse).objects.filter(object => {
|
||||
if ('error' in object) {
|
||||
console.error(object.error);
|
||||
return false;
|
||||
}
|
||||
return object.actions;
|
||||
});
|
||||
}
|
||||
}
|
541
packages/core/src/backends/bitbucket/implementation.ts
Normal file
541
packages/core/src/backends/bitbucket/implementation.ts
Normal file
@ -0,0 +1,541 @@
|
||||
import { stripIndent } from 'common-tags';
|
||||
import trimStart from 'lodash/trimStart';
|
||||
import semaphore from 'semaphore';
|
||||
|
||||
import { NetlifyAuthenticator } from '@staticcms/core/lib/auth';
|
||||
import {
|
||||
AccessTokenError,
|
||||
allEntriesByFolder,
|
||||
asyncLock,
|
||||
basename,
|
||||
blobToFileObj,
|
||||
CURSOR_COMPATIBILITY_SYMBOL,
|
||||
entriesByFiles,
|
||||
entriesByFolder,
|
||||
filterByExtension,
|
||||
getBlobSHA,
|
||||
getLargeMediaFilteredMediaFiles,
|
||||
getLargeMediaPatternsFromGitAttributesFile,
|
||||
getMediaAsBlob,
|
||||
getMediaDisplayURL,
|
||||
getPointerFileForMediaFileObj,
|
||||
localForage,
|
||||
runWithLock,
|
||||
unsentRequest,
|
||||
} from '@staticcms/core/lib/util';
|
||||
import API, { API_NAME } from './API';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
import GitLfsClient from './git-lfs-client';
|
||||
|
||||
import type { Semaphore } from 'semaphore';
|
||||
import type {
|
||||
BackendEntry,
|
||||
BackendClass,
|
||||
Config,
|
||||
Credentials,
|
||||
DisplayURL,
|
||||
ImplementationFile,
|
||||
PersistOptions,
|
||||
User,
|
||||
} from '@staticcms/core/interface';
|
||||
import type { ApiRequest, AsyncLock, Cursor, FetchError } from '@staticcms/core/lib/util';
|
||||
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
|
||||
|
||||
const MAX_CONCURRENT_DOWNLOADS = 10;
|
||||
|
||||
const STATUS_PAGE = 'https://bitbucket.status.atlassian.com';
|
||||
const BITBUCKET_STATUS_ENDPOINT = `${STATUS_PAGE}/api/v2/components.json`;
|
||||
const BITBUCKET_OPERATIONAL_UNITS = ['API', 'Authentication and user management', 'Git LFS'];
|
||||
type BitbucketStatusComponent = {
|
||||
id: string;
|
||||
name: string;
|
||||
status: string;
|
||||
};
|
||||
|
||||
// Implementation wrapper class
|
||||
export default class BitbucketBackend implements BackendClass {
|
||||
lock: AsyncLock;
|
||||
api: API | null;
|
||||
updateUserCredentials: (args: { token: string; refresh_token: string }) => Promise<null>;
|
||||
options: {
|
||||
proxied: boolean;
|
||||
API: API | null;
|
||||
updateUserCredentials: (args: { token: string; refresh_token: string }) => Promise<null>;
|
||||
};
|
||||
repo: string;
|
||||
branch: string;
|
||||
apiRoot: string;
|
||||
baseUrl: string;
|
||||
siteId: string;
|
||||
token: string | null;
|
||||
mediaFolder?: string;
|
||||
refreshToken?: string;
|
||||
refreshedTokenPromise?: Promise<string>;
|
||||
authenticator?: NetlifyAuthenticator;
|
||||
_mediaDisplayURLSem?: Semaphore;
|
||||
largeMediaURL: string;
|
||||
_largeMediaClientPromise?: Promise<GitLfsClient>;
|
||||
authType: string;
|
||||
|
||||
constructor(config: Config, options = {}) {
|
||||
this.options = {
|
||||
proxied: false,
|
||||
API: null,
|
||||
updateUserCredentials: async () => null,
|
||||
...options,
|
||||
};
|
||||
|
||||
if (
|
||||
!this.options.proxied &&
|
||||
(config.backend.repo === null || config.backend.repo === undefined)
|
||||
) {
|
||||
throw new Error('The BitBucket backend needs a "repo" in the backend configuration.');
|
||||
}
|
||||
|
||||
this.api = this.options.API || null;
|
||||
|
||||
this.updateUserCredentials = this.options.updateUserCredentials;
|
||||
|
||||
this.repo = config.backend.repo || '';
|
||||
this.branch = config.backend.branch || 'main';
|
||||
this.apiRoot = config.backend.api_root || 'https://api.bitbucket.org/2.0';
|
||||
this.baseUrl = config.base_url || '';
|
||||
this.siteId = config.site_id || '';
|
||||
this.largeMediaURL =
|
||||
config.backend.large_media_url || `https://bitbucket.org/${config.backend.repo}/info/lfs`;
|
||||
this.token = '';
|
||||
this.mediaFolder = config.media_folder;
|
||||
this.lock = asyncLock();
|
||||
this.authType = config.backend.auth_type || '';
|
||||
}
|
||||
|
||||
isGitBackend() {
|
||||
return true;
|
||||
}
|
||||
|
||||
async status() {
|
||||
const api = await fetch(BITBUCKET_STATUS_ENDPOINT)
|
||||
.then(res => res.json())
|
||||
.then(res => {
|
||||
return res['components']
|
||||
.filter((statusComponent: BitbucketStatusComponent) =>
|
||||
BITBUCKET_OPERATIONAL_UNITS.includes(statusComponent.name),
|
||||
)
|
||||
.every(
|
||||
(statusComponent: BitbucketStatusComponent) => statusComponent.status === 'operational',
|
||||
);
|
||||
})
|
||||
.catch(e => {
|
||||
console.warn('Failed getting BitBucket status', e);
|
||||
return true;
|
||||
});
|
||||
|
||||
let auth = false;
|
||||
// no need to check auth if api is down
|
||||
if (api) {
|
||||
auth =
|
||||
(await this.api
|
||||
?.user()
|
||||
.then(user => !!user)
|
||||
.catch(e => {
|
||||
console.warn('Failed getting Bitbucket user', e);
|
||||
return false;
|
||||
})) || false;
|
||||
}
|
||||
|
||||
return { auth: { status: auth }, api: { status: api, statusPage: STATUS_PAGE } };
|
||||
}
|
||||
|
||||
authComponent() {
|
||||
return AuthenticationPage;
|
||||
}
|
||||
|
||||
setUser(user: { token: string }) {
|
||||
this.token = user.token;
|
||||
this.api = new API({
|
||||
requestFunction: this.apiRequestFunction,
|
||||
branch: this.branch,
|
||||
repo: this.repo,
|
||||
});
|
||||
}
|
||||
|
||||
requestFunction = async (req: ApiRequest) => {
|
||||
const token = await this.getToken();
|
||||
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
|
||||
return unsentRequest.performRequest(authorizedRequest);
|
||||
};
|
||||
|
||||
restoreUser(user: User) {
|
||||
return this.authenticate(user);
|
||||
}
|
||||
|
||||
async authenticate(state: Credentials) {
|
||||
this.token = state.token as string;
|
||||
this.refreshToken = state.refresh_token;
|
||||
this.api = new API({
|
||||
requestFunction: this.apiRequestFunction,
|
||||
branch: this.branch,
|
||||
repo: this.repo,
|
||||
apiRoot: this.apiRoot,
|
||||
});
|
||||
|
||||
const isCollab = await this.api.hasWriteAccess().catch(error => {
|
||||
error.message = stripIndent`
|
||||
Repo "${this.repo}" not found.
|
||||
|
||||
Please ensure the repo information is spelled correctly.
|
||||
|
||||
If the repo is private, make sure you're logged into a Bitbucket account with access.
|
||||
`;
|
||||
throw error;
|
||||
});
|
||||
|
||||
// Unauthorized user
|
||||
if (!isCollab) {
|
||||
throw new Error('Your BitBucket user account does not have access to this repo.');
|
||||
}
|
||||
|
||||
const user = await this.api.user();
|
||||
|
||||
// Authorized user
|
||||
return {
|
||||
...user,
|
||||
name: user.display_name,
|
||||
login: user.username,
|
||||
token: state.token,
|
||||
avatar_url: user.links.avatar.href,
|
||||
refresh_token: state.refresh_token,
|
||||
};
|
||||
}
|
||||
|
||||
getRefreshedAccessToken() {
|
||||
if (this.authType === 'implicit') {
|
||||
throw new AccessTokenError(`Can't refresh access token when using implicit auth`);
|
||||
}
|
||||
if (this.refreshedTokenPromise) {
|
||||
return this.refreshedTokenPromise;
|
||||
}
|
||||
|
||||
// instantiating a new Authenticator on each refresh isn't ideal,
|
||||
if (!this.authenticator) {
|
||||
const cfg = {
|
||||
base_url: this.baseUrl,
|
||||
site_id: this.siteId,
|
||||
};
|
||||
this.authenticator = new NetlifyAuthenticator(cfg);
|
||||
}
|
||||
|
||||
this.refreshedTokenPromise = this.authenticator!.refresh({
|
||||
provider: 'bitbucket',
|
||||
refresh_token: this.refreshToken as string,
|
||||
})?.then(({ token, refresh_token }: { token: string; refresh_token: string }) => {
|
||||
this.token = token;
|
||||
this.refreshToken = refresh_token;
|
||||
this.refreshedTokenPromise = undefined;
|
||||
|
||||
this.updateUserCredentials({ token, refresh_token });
|
||||
return token;
|
||||
});
|
||||
|
||||
return this.refreshedTokenPromise;
|
||||
}
|
||||
|
||||
logout() {
|
||||
this.token = null;
|
||||
return;
|
||||
}
|
||||
|
||||
getToken() {
|
||||
if (this.refreshedTokenPromise) {
|
||||
return this.refreshedTokenPromise;
|
||||
}
|
||||
|
||||
return Promise.resolve(this.token);
|
||||
}
|
||||
|
||||
apiRequestFunction = async (req: ApiRequest) => {
|
||||
const token = (
|
||||
this.refreshedTokenPromise ? await this.refreshedTokenPromise : this.token
|
||||
) as string;
|
||||
|
||||
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
|
||||
const response: Response = await unsentRequest.performRequest(authorizedRequest);
|
||||
if (response.status === 401) {
|
||||
const json = await response.json().catch(() => null);
|
||||
if (json && json.type === 'error' && /^access token expired/i.test(json.error.message)) {
|
||||
const newToken = await this.getRefreshedAccessToken();
|
||||
const reqWithNewToken = unsentRequest.withHeaders(
|
||||
{
|
||||
Authorization: `Bearer ${newToken}`,
|
||||
},
|
||||
req,
|
||||
) as ApiRequest;
|
||||
return unsentRequest.performRequest(reqWithNewToken);
|
||||
}
|
||||
}
|
||||
return response;
|
||||
};
|
||||
|
||||
async entriesByFolder(folder: string, extension: string, depth: number) {
|
||||
let cursor: Cursor;
|
||||
|
||||
const listFiles = () =>
|
||||
this.api!.listFiles(folder, depth, 20, this.branch).then(({ entries, cursor: c }) => {
|
||||
cursor = c.mergeMeta({ extension });
|
||||
return entries.filter(e => filterByExtension(e, extension));
|
||||
});
|
||||
|
||||
const head = await this.api!.defaultBranchCommitSha();
|
||||
const readFile = (path: string, id: string | null | undefined) => {
|
||||
return this.api!.readFile(path, id, { head }) as Promise<string>;
|
||||
};
|
||||
|
||||
const files = await entriesByFolder(
|
||||
listFiles,
|
||||
readFile,
|
||||
this.api!.readFileMetadata.bind(this.api),
|
||||
API_NAME,
|
||||
);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
|
||||
return files;
|
||||
}
|
||||
|
||||
async listAllFiles(folder: string, extension: string, depth: number) {
|
||||
const files = await this.api!.listAllFiles(folder, depth, this.branch);
|
||||
const filtered = files.filter(file => filterByExtension(file, extension));
|
||||
return filtered;
|
||||
}
|
||||
|
||||
async allEntriesByFolder(folder: string, extension: string, depth: number) {
|
||||
const head = await this.api!.defaultBranchCommitSha();
|
||||
|
||||
const readFile = (path: string, id: string | null | undefined) => {
|
||||
return this.api!.readFile(path, id, { head }) as Promise<string>;
|
||||
};
|
||||
|
||||
const files = await allEntriesByFolder({
|
||||
listAllFiles: () => this.listAllFiles(folder, extension, depth),
|
||||
readFile,
|
||||
readFileMetadata: this.api!.readFileMetadata.bind(this.api),
|
||||
apiName: API_NAME,
|
||||
branch: this.branch,
|
||||
localForage,
|
||||
folder,
|
||||
extension,
|
||||
depth,
|
||||
getDefaultBranch: () => Promise.resolve({ name: this.branch, sha: head }),
|
||||
isShaExistsInBranch: this.api!.isShaExistsInBranch.bind(this.api!),
|
||||
getDifferences: (source, destination) => this.api!.getDifferences(source, destination),
|
||||
getFileId: path => Promise.resolve(this.api!.getFileId(head, path)),
|
||||
filterFile: file => filterByExtension(file, extension),
|
||||
});
|
||||
return files;
|
||||
}
|
||||
|
||||
async entriesByFiles(files: ImplementationFile[]) {
|
||||
const head = await this.api!.defaultBranchCommitSha();
|
||||
const readFile = (path: string, id: string | null | undefined) => {
|
||||
return this.api!.readFile(path, id, { head }) as Promise<string>;
|
||||
};
|
||||
|
||||
return entriesByFiles(files, readFile, this.api!.readFileMetadata.bind(this.api), API_NAME);
|
||||
}
|
||||
|
||||
getEntry(path: string) {
|
||||
return this.api!.readFile(path).then(data => ({
|
||||
file: { path, id: null },
|
||||
data: data as string,
|
||||
}));
|
||||
}
|
||||
|
||||
async getMedia(mediaFolder = this.mediaFolder) {
|
||||
if (!mediaFolder) {
|
||||
return [];
|
||||
}
|
||||
return this.api!.listAllFiles(mediaFolder, 1, this.branch).then(files =>
|
||||
files.map(({ id, name, path }) => ({ id, name, path, displayURL: { id, path } })),
|
||||
);
|
||||
}
|
||||
|
||||
getLargeMediaClient() {
|
||||
if (!this._largeMediaClientPromise) {
|
||||
this._largeMediaClientPromise = (async (): Promise<GitLfsClient> => {
|
||||
const patterns = await this.api!.readFile('.gitattributes')
|
||||
.then(attributes => getLargeMediaPatternsFromGitAttributesFile(attributes as string))
|
||||
.catch((err: FetchError) => {
|
||||
if (err.status === 404) {
|
||||
console.info('This 404 was expected and handled appropriately.');
|
||||
} else {
|
||||
console.error(err);
|
||||
}
|
||||
return [];
|
||||
});
|
||||
|
||||
return new GitLfsClient(
|
||||
!!(this.largeMediaURL && patterns.length > 0),
|
||||
this.largeMediaURL,
|
||||
patterns,
|
||||
this.requestFunction,
|
||||
);
|
||||
})();
|
||||
}
|
||||
return this._largeMediaClientPromise;
|
||||
}
|
||||
|
||||
getMediaDisplayURL(displayURL: DisplayURL) {
|
||||
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
|
||||
return getMediaDisplayURL(
|
||||
displayURL,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this._mediaDisplayURLSem,
|
||||
);
|
||||
}
|
||||
|
||||
async getMediaFile(path: string) {
|
||||
const name = basename(path);
|
||||
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
|
||||
const fileObj = blobToFileObj(name, blob);
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
const id = await getBlobSHA(fileObj);
|
||||
|
||||
return {
|
||||
id,
|
||||
displayURL: url,
|
||||
path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
};
|
||||
}
|
||||
|
||||
async persistEntry(entry: BackendEntry, options: PersistOptions) {
|
||||
const client = await this.getLargeMediaClient();
|
||||
// persistEntry is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
async () =>
|
||||
this.api!.persistFiles(
|
||||
entry.dataFiles,
|
||||
client.enabled
|
||||
? await getLargeMediaFilteredMediaFiles(client, entry.assets)
|
||||
: entry.assets,
|
||||
options,
|
||||
),
|
||||
'Failed to acquire persist entry lock',
|
||||
);
|
||||
}
|
||||
|
||||
async persistMedia(
|
||||
mediaFile:
|
||||
| {
|
||||
fileObj: File;
|
||||
size: number;
|
||||
sha: string;
|
||||
raw: string;
|
||||
path: string;
|
||||
}
|
||||
| AssetProxy,
|
||||
options: PersistOptions,
|
||||
) {
|
||||
const { fileObj, path } = mediaFile;
|
||||
const displayURL = URL.createObjectURL(fileObj as Blob);
|
||||
const client = await this.getLargeMediaClient();
|
||||
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
|
||||
if (!client.enabled || !client.matchPath(fixedPath)) {
|
||||
return this._persistMedia(mediaFile, options);
|
||||
}
|
||||
|
||||
const persistMediaArgument = await getPointerFileForMediaFileObj(client, fileObj as File, path);
|
||||
return {
|
||||
...(await this._persistMedia(persistMediaArgument, options)),
|
||||
displayURL,
|
||||
};
|
||||
}
|
||||
|
||||
async _persistMedia(
|
||||
mediaFile:
|
||||
| {
|
||||
fileObj: File;
|
||||
size: number;
|
||||
sha: string;
|
||||
raw: string;
|
||||
path: string;
|
||||
}
|
||||
| AssetProxy,
|
||||
options: PersistOptions,
|
||||
) {
|
||||
const fileObj = mediaFile.fileObj as File;
|
||||
|
||||
const [id] = await Promise.all([
|
||||
getBlobSHA(fileObj),
|
||||
this.api!.persistFiles([], [mediaFile], options),
|
||||
]);
|
||||
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
|
||||
return {
|
||||
displayURL: url,
|
||||
path: trimStart(mediaFile.path, '/k'),
|
||||
name: fileObj!.name,
|
||||
size: fileObj!.size,
|
||||
id,
|
||||
file: fileObj,
|
||||
url,
|
||||
};
|
||||
}
|
||||
|
||||
deleteFiles(paths: string[], commitMessage: string) {
|
||||
return this.api!.deleteFiles(paths, commitMessage);
|
||||
}
|
||||
|
||||
traverseCursor(cursor: Cursor, action: string) {
|
||||
return this.api!.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => {
|
||||
const extension = cursor.meta?.extension as string | undefined;
|
||||
if (extension) {
|
||||
entries = entries.filter(e => filterByExtension(e, extension));
|
||||
newCursor = newCursor.mergeMeta({ extension });
|
||||
}
|
||||
const head = await this.api!.defaultBranchCommitSha();
|
||||
const readFile = (path: string, id: string | null | undefined) => {
|
||||
return this.api!.readFile(path, id, { head }) as Promise<string>;
|
||||
};
|
||||
const entriesWithData = await entriesByFiles(
|
||||
entries,
|
||||
readFile,
|
||||
this.api!.readFileMetadata.bind(this.api)!,
|
||||
API_NAME,
|
||||
);
|
||||
|
||||
return {
|
||||
entries: entriesWithData,
|
||||
cursor: newCursor,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async loadMediaFile(path: string, id: string, { branch }: { branch: string }) {
|
||||
const readFile = async (
|
||||
path: string,
|
||||
id: string | null | undefined,
|
||||
{ parseText }: { parseText: boolean },
|
||||
) => {
|
||||
const content = await this.api!.readFile(path, id, { branch, parseText });
|
||||
return content;
|
||||
};
|
||||
const blob = await getMediaAsBlob(path, id, readFile);
|
||||
const name = basename(path);
|
||||
const fileObj = blobToFileObj(name, blob);
|
||||
return {
|
||||
id: path,
|
||||
displayURL: URL.createObjectURL(fileObj),
|
||||
path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
};
|
||||
}
|
||||
}
|
3
packages/core/src/backends/bitbucket/index.ts
Normal file
3
packages/core/src/backends/bitbucket/index.ts
Normal file
@ -0,0 +1,3 @@
|
||||
export { default as BitbucketBackend } from './implementation';
|
||||
export { default as API } from './API';
|
||||
export { default as AuthenticationPage } from './AuthenticationPage';
|
134
packages/core/src/backends/git-gateway/AuthenticationPage.tsx
Normal file
134
packages/core/src/backends/git-gateway/AuthenticationPage.tsx
Normal file
@ -0,0 +1,134 @@
|
||||
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
|
||||
import AuthenticationPage from '@staticcms/core/components/UI/AuthenticationPage';
|
||||
|
||||
import type { AuthenticationPageProps, TranslatedProps, User } from '@staticcms/core/interface';
|
||||
|
||||
function useNetlifyIdentifyEvent(eventName: 'login', callback: (login: User) => void): void;
|
||||
function useNetlifyIdentifyEvent(eventName: 'logout', callback: () => void): void;
|
||||
function useNetlifyIdentifyEvent(eventName: 'error', callback: (err: Error) => void): void;
|
||||
function useNetlifyIdentifyEvent(
|
||||
eventName: 'login' | 'logout' | 'error',
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
callback: (input?: any) => void,
|
||||
): void {
|
||||
useEffect(() => {
|
||||
window.netlifyIdentity?.on(eventName, callback);
|
||||
}, [callback, eventName]);
|
||||
}
|
||||
|
||||
export interface GitGatewayAuthenticationPageProps
|
||||
extends TranslatedProps<AuthenticationPageProps> {
|
||||
handleAuth: (email: string, password: string) => Promise<User | string>;
|
||||
}
|
||||
|
||||
const GitGatewayAuthenticationPage = ({
|
||||
config,
|
||||
onLogin,
|
||||
t,
|
||||
}: GitGatewayAuthenticationPageProps) => {
|
||||
const [loggingIn, setLoggingIn] = useState(false);
|
||||
const [loggedIn, setLoggedIn] = useState(false);
|
||||
const [errors, setErrors] = useState<{
|
||||
identity?: string;
|
||||
server?: string;
|
||||
email?: string;
|
||||
password?: string;
|
||||
}>({});
|
||||
|
||||
useEffect(() => {
|
||||
if (!loggedIn && window.netlifyIdentity && window.netlifyIdentity.currentUser()) {
|
||||
setLoggingIn(true);
|
||||
setTimeout(() => {
|
||||
if (!window.netlifyIdentity) {
|
||||
setLoggingIn(false);
|
||||
return;
|
||||
}
|
||||
onLogin(window.netlifyIdentity.currentUser());
|
||||
setLoggedIn(true);
|
||||
window.netlifyIdentity.close();
|
||||
});
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const handleIdentityLogin = useCallback(
|
||||
(user: User) => {
|
||||
setLoggingIn(true);
|
||||
setTimeout(() => {
|
||||
onLogin(user);
|
||||
setLoggedIn(true);
|
||||
window.netlifyIdentity?.close();
|
||||
});
|
||||
},
|
||||
[onLogin],
|
||||
);
|
||||
|
||||
useNetlifyIdentifyEvent('login', handleIdentityLogin);
|
||||
|
||||
const handleIdentityLogout = useCallback(() => {
|
||||
window.netlifyIdentity?.open();
|
||||
}, []);
|
||||
|
||||
useNetlifyIdentifyEvent('logout', handleIdentityLogout);
|
||||
|
||||
const handleIdentityError = useCallback(
|
||||
(err: Error) => {
|
||||
if (err?.message?.match(/^Failed to load settings from.+\.netlify\/identity$/)) {
|
||||
window.netlifyIdentity?.close();
|
||||
setErrors({ identity: t('auth.errors.identitySettings') });
|
||||
}
|
||||
},
|
||||
[t],
|
||||
);
|
||||
|
||||
useNetlifyIdentifyEvent('error', handleIdentityError);
|
||||
|
||||
const handleIdentity = useCallback(() => {
|
||||
const user = window.netlifyIdentity?.currentUser();
|
||||
if (user) {
|
||||
setLoggingIn(true);
|
||||
setTimeout(() => {
|
||||
onLogin(user);
|
||||
setLoggedIn(true);
|
||||
});
|
||||
} else {
|
||||
window.netlifyIdentity?.open();
|
||||
}
|
||||
}, [onLogin]);
|
||||
|
||||
const pageContent = useMemo(() => {
|
||||
if (!window.netlifyIdentity) {
|
||||
return t('auth.errors.netlifyIdentityNotFound');
|
||||
}
|
||||
|
||||
if (errors.identity) {
|
||||
return (
|
||||
<a
|
||||
href="https://docs.netlify.com/visitor-access/git-gateway/#setup-and-settings"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{errors.identity}
|
||||
</a>
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
}, [errors.identity, t]);
|
||||
|
||||
return (
|
||||
<AuthenticationPage
|
||||
key="git-gateway-auth"
|
||||
logoUrl={config.logo_url}
|
||||
siteUrl={config.site_url}
|
||||
onLogin={handleIdentity}
|
||||
buttonContent={t('auth.loginWithNetlifyIdentity')}
|
||||
pageContent={pageContent}
|
||||
loginDisabled={loggingIn}
|
||||
t={t}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default GitGatewayAuthenticationPage;
|
121
packages/core/src/backends/git-gateway/GitHubAPI.ts
Normal file
121
packages/core/src/backends/git-gateway/GitHubAPI.ts
Normal file
@ -0,0 +1,121 @@
|
||||
import { APIError } from '@staticcms/core/lib/util';
|
||||
import { API as GithubAPI } from '../github';
|
||||
|
||||
import type { FetchError } from '@staticcms/core/lib/util';
|
||||
import type { Config as GitHubConfig } from '../github/API';
|
||||
|
||||
type Config = GitHubConfig & {
|
||||
apiRoot: string;
|
||||
tokenPromise: () => Promise<string>;
|
||||
commitAuthor: { name: string };
|
||||
isLargeMedia: (filename: string) => Promise<boolean>;
|
||||
};
|
||||
|
||||
export default class API extends GithubAPI {
|
||||
tokenPromise: () => Promise<string>;
|
||||
commitAuthor: { name: string };
|
||||
isLargeMedia: (filename: string) => Promise<boolean>;
|
||||
|
||||
constructor(config: Config) {
|
||||
super(config);
|
||||
this.apiRoot = config.apiRoot;
|
||||
this.tokenPromise = config.tokenPromise;
|
||||
this.commitAuthor = config.commitAuthor;
|
||||
this.isLargeMedia = config.isLargeMedia;
|
||||
this.repoURL = '';
|
||||
this.originRepoURL = '';
|
||||
}
|
||||
|
||||
hasWriteAccess() {
|
||||
return this.getDefaultBranch()
|
||||
.then(() => true)
|
||||
.catch((error: FetchError) => {
|
||||
if (error.status === 401) {
|
||||
if (error.message === 'Bad credentials') {
|
||||
throw new APIError(
|
||||
'Git Gateway Error: Please ask your site administrator to reissue the Git Gateway token.',
|
||||
error.status,
|
||||
'Git Gateway',
|
||||
);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else if (
|
||||
error.status === 404 &&
|
||||
(error.message === undefined || error.message === 'Unable to locate site configuration')
|
||||
) {
|
||||
throw new APIError(
|
||||
`Git Gateway Error: Please make sure Git Gateway is enabled on your site.`,
|
||||
error.status,
|
||||
'Git Gateway',
|
||||
);
|
||||
} else {
|
||||
console.error('Problem fetching repo data from Git Gateway');
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
requestHeaders(headers = {}) {
|
||||
return this.tokenPromise().then(jwtToken => {
|
||||
const baseHeader = {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json; charset=utf-8',
|
||||
...headers,
|
||||
};
|
||||
|
||||
return baseHeader;
|
||||
});
|
||||
}
|
||||
|
||||
handleRequestError(error: FetchError & { msg: string }, responseStatus: number) {
|
||||
throw new APIError(error.message || error.msg, responseStatus, 'Git Gateway');
|
||||
}
|
||||
|
||||
user() {
|
||||
return Promise.resolve({ login: '', ...this.commitAuthor });
|
||||
}
|
||||
|
||||
async getHeadReference(head: string) {
|
||||
if (!this.repoOwner) {
|
||||
// get the repo owner from the branch url
|
||||
// this is required for returning the full head reference, e.g. owner:head
|
||||
// when filtering pull requests based on the head
|
||||
const branch = await this.getDefaultBranch();
|
||||
const self = branch._links.self;
|
||||
const regex = new RegExp('https?://.+?/repos/(.+?)/');
|
||||
const owner = self.match(regex);
|
||||
this.repoOwner = owner ? owner[1] : '';
|
||||
}
|
||||
return super.getHeadReference(head);
|
||||
}
|
||||
|
||||
commit(message: string, changeTree: { parentSha?: string; sha: string }) {
|
||||
const commitParams: {
|
||||
message: string;
|
||||
tree: string;
|
||||
parents: string[];
|
||||
author?: { name: string; date: string };
|
||||
} = {
|
||||
message,
|
||||
tree: changeTree.sha,
|
||||
parents: changeTree.parentSha ? [changeTree.parentSha] : [],
|
||||
};
|
||||
|
||||
if (this.commitAuthor) {
|
||||
commitParams.author = {
|
||||
...this.commitAuthor,
|
||||
date: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
return this.request('/git/commits', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(commitParams),
|
||||
});
|
||||
}
|
||||
|
||||
nextUrlProcessor() {
|
||||
return (url: string) => url.replace(/^(?:[a-z]+:\/\/.+?\/.+?\/.+?\/)/, `${this.apiRoot}/`);
|
||||
}
|
||||
}
|
30
packages/core/src/backends/git-gateway/GitLabAPI.ts
Normal file
30
packages/core/src/backends/git-gateway/GitLabAPI.ts
Normal file
@ -0,0 +1,30 @@
|
||||
import { unsentRequest } from '@staticcms/core/lib/util';
|
||||
import { API as GitlabAPI } from '../gitlab';
|
||||
|
||||
import type { Config as GitLabConfig, CommitAuthor } from '../gitlab/API';
|
||||
import type { ApiRequest } from '@staticcms/core/lib/util';
|
||||
|
||||
type Config = GitLabConfig & { tokenPromise: () => Promise<string>; commitAuthor: CommitAuthor };
|
||||
|
||||
export default class API extends GitlabAPI {
|
||||
tokenPromise: () => Promise<string>;
|
||||
|
||||
constructor(config: Config) {
|
||||
super(config);
|
||||
this.tokenPromise = config.tokenPromise;
|
||||
this.commitAuthor = config.commitAuthor;
|
||||
this.repoURL = '';
|
||||
}
|
||||
|
||||
withAuthorizationHeaders = async (req: ApiRequest) => {
|
||||
const token = await this.tokenPromise();
|
||||
return unsentRequest.withHeaders(
|
||||
{
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
req,
|
||||
);
|
||||
};
|
||||
|
||||
hasWriteAccess = () => Promise.resolve(true);
|
||||
}
|
568
packages/core/src/backends/git-gateway/implementation.tsx
Normal file
568
packages/core/src/backends/git-gateway/implementation.tsx
Normal file
@ -0,0 +1,568 @@
|
||||
import ini from 'ini';
|
||||
import jwtDecode from 'jwt-decode';
|
||||
import get from 'lodash/get';
|
||||
import intersection from 'lodash/intersection';
|
||||
import pick from 'lodash/pick';
|
||||
import React, { useCallback } from 'react';
|
||||
|
||||
import {
|
||||
AccessTokenError,
|
||||
APIError,
|
||||
basename,
|
||||
entriesByFiles,
|
||||
getLargeMediaFilteredMediaFiles,
|
||||
getLargeMediaPatternsFromGitAttributesFile,
|
||||
getPointerFileForMediaFileObj,
|
||||
parsePointerFile,
|
||||
unsentRequest,
|
||||
} from '@staticcms/core/lib/util';
|
||||
import { API as BitBucketAPI, BitbucketBackend } from '../bitbucket';
|
||||
import { GitHubBackend } from '../github';
|
||||
import { GitLabBackend } from '../gitlab';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
import GitHubAPI from './GitHubAPI';
|
||||
import GitLabAPI from './GitLabAPI';
|
||||
import { getClient } from './netlify-lfs-client';
|
||||
|
||||
import type {
|
||||
AuthenticationPageProps,
|
||||
BackendClass,
|
||||
BackendEntry,
|
||||
Config,
|
||||
Credentials,
|
||||
DisplayURL,
|
||||
DisplayURLObject,
|
||||
ImplementationFile,
|
||||
PersistOptions,
|
||||
TranslatedProps,
|
||||
User,
|
||||
} from '@staticcms/core/interface';
|
||||
import type { ApiRequest, Cursor } from '@staticcms/core/lib/util';
|
||||
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
|
||||
import type { Client } from './netlify-lfs-client';
|
||||
|
||||
const STATUS_PAGE = 'https://www.netlifystatus.com';
|
||||
const GIT_GATEWAY_STATUS_ENDPOINT = `${STATUS_PAGE}/api/v2/components.json`;
|
||||
const GIT_GATEWAY_OPERATIONAL_UNITS = ['Git Gateway'];
|
||||
type GitGatewayStatus = {
|
||||
id: string;
|
||||
name: string;
|
||||
status: string;
|
||||
};
|
||||
|
||||
type NetlifyIdentity = {
|
||||
logout: () => void;
|
||||
currentUser: () => User;
|
||||
on: (
|
||||
eventName: 'init' | 'login' | 'logout' | 'error',
|
||||
callback: (input?: unknown) => void,
|
||||
) => void;
|
||||
init: () => void;
|
||||
store: { user: unknown; modal: { page: string }; saving: boolean };
|
||||
open: () => void;
|
||||
close: () => void;
|
||||
};
|
||||
|
||||
type AuthClient = {
|
||||
logout: () => void;
|
||||
currentUser: () => unknown;
|
||||
login?: (email: string, password: string, remember?: boolean) => Promise<User>;
|
||||
clearStore: () => void;
|
||||
};
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
netlifyIdentity?: NetlifyIdentity;
|
||||
}
|
||||
}
|
||||
|
||||
const localHosts: Record<string, boolean> = {
|
||||
localhost: true,
|
||||
'127.0.0.1': true,
|
||||
'0.0.0.0': true,
|
||||
};
|
||||
const defaults = {
|
||||
identity: '/.netlify/identity',
|
||||
gateway: '/.netlify/git',
|
||||
largeMedia: '/.netlify/large-media',
|
||||
};
|
||||
|
||||
function getEndpoint(endpoint: string, netlifySiteURL: string | null) {
|
||||
if (
|
||||
localHosts[document.location.host.split(':').shift() as string] &&
|
||||
netlifySiteURL &&
|
||||
endpoint.match(/^\/\.netlify\//)
|
||||
) {
|
||||
const parts = [];
|
||||
if (netlifySiteURL) {
|
||||
parts.push(netlifySiteURL);
|
||||
if (!netlifySiteURL.match(/\/$/)) {
|
||||
parts.push('/');
|
||||
}
|
||||
}
|
||||
parts.push(endpoint.replace(/^\//, ''));
|
||||
return parts.join('');
|
||||
}
|
||||
return endpoint;
|
||||
}
|
||||
|
||||
// wait for identity widget to initialize
|
||||
// force init on timeout
|
||||
let initPromise = Promise.resolve() as Promise<unknown>;
|
||||
if (window.netlifyIdentity) {
|
||||
let initialized = false;
|
||||
initPromise = Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
window.netlifyIdentity?.on('init', () => {
|
||||
initialized = true;
|
||||
resolve();
|
||||
});
|
||||
}),
|
||||
new Promise(resolve => setTimeout(resolve, 2500)).then(() => {
|
||||
if (!initialized) {
|
||||
console.info('Manually initializing identity widget');
|
||||
window.netlifyIdentity?.init();
|
||||
}
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
interface NetlifyUser extends Credentials {
|
||||
jwt: () => Promise<string>;
|
||||
email: string;
|
||||
user_metadata: { full_name: string; avatar_url: string };
|
||||
}
|
||||
|
||||
export default class GitGateway implements BackendClass {
|
||||
config: Config;
|
||||
api?: GitHubAPI | GitLabAPI | BitBucketAPI;
|
||||
branch: string;
|
||||
mediaFolder?: string;
|
||||
transformImages: boolean;
|
||||
gatewayUrl: string;
|
||||
netlifyLargeMediaURL: string;
|
||||
backendType: string | null;
|
||||
apiUrl: string;
|
||||
authClient?: AuthClient;
|
||||
backend: GitHubBackend | GitLabBackend | BitbucketBackend | null;
|
||||
acceptRoles?: string[];
|
||||
tokenPromise?: () => Promise<string>;
|
||||
_largeMediaClientPromise?: Promise<Client>;
|
||||
|
||||
options: {
|
||||
proxied: boolean;
|
||||
API: GitHubAPI | GitLabAPI | BitBucketAPI | null;
|
||||
};
|
||||
constructor(config: Config, options = {}) {
|
||||
this.options = {
|
||||
proxied: true,
|
||||
API: null,
|
||||
...options,
|
||||
};
|
||||
this.config = config;
|
||||
this.branch = config.backend.branch?.trim() || 'main';
|
||||
this.mediaFolder = config.media_folder;
|
||||
const { use_large_media_transforms_in_media_library: transformImages = true } = config.backend;
|
||||
this.transformImages = transformImages;
|
||||
|
||||
const netlifySiteURL = localStorage.getItem('netlifySiteURL');
|
||||
this.apiUrl = getEndpoint(config.backend.identity_url || defaults.identity, netlifySiteURL);
|
||||
this.gatewayUrl = getEndpoint(config.backend.gateway_url || defaults.gateway, netlifySiteURL);
|
||||
this.netlifyLargeMediaURL = getEndpoint(
|
||||
config.backend.large_media_url || defaults.largeMedia,
|
||||
netlifySiteURL,
|
||||
);
|
||||
const backendTypeRegex = /\/(github|gitlab|bitbucket)\/?$/;
|
||||
const backendTypeMatches = this.gatewayUrl.match(backendTypeRegex);
|
||||
if (backendTypeMatches) {
|
||||
this.backendType = backendTypeMatches[1];
|
||||
this.gatewayUrl = this.gatewayUrl.replace(backendTypeRegex, '');
|
||||
} else {
|
||||
this.backendType = null;
|
||||
}
|
||||
|
||||
this.backend = null;
|
||||
}
|
||||
|
||||
isGitBackend() {
|
||||
return true;
|
||||
}
|
||||
|
||||
async status() {
|
||||
const api = await fetch(GIT_GATEWAY_STATUS_ENDPOINT)
|
||||
.then(res => res.json())
|
||||
.then(res => {
|
||||
return res['components']
|
||||
.filter((statusComponent: GitGatewayStatus) =>
|
||||
GIT_GATEWAY_OPERATIONAL_UNITS.includes(statusComponent.name),
|
||||
)
|
||||
.every((statusComponent: GitGatewayStatus) => statusComponent.status === 'operational');
|
||||
})
|
||||
.catch(e => {
|
||||
console.warn('Failed getting Git Gateway status', e);
|
||||
return true;
|
||||
});
|
||||
|
||||
let auth = false;
|
||||
// no need to check auth if api is down
|
||||
if (api) {
|
||||
auth =
|
||||
(await this.tokenPromise?.()
|
||||
.then(token => !!token)
|
||||
.catch(e => {
|
||||
console.warn('Failed getting Identity token', e);
|
||||
return false;
|
||||
})) || false;
|
||||
}
|
||||
|
||||
return { auth: { status: auth }, api: { status: api, statusPage: STATUS_PAGE } };
|
||||
}
|
||||
|
||||
async getAuthClient() {
|
||||
if (this.authClient) {
|
||||
return this.authClient;
|
||||
}
|
||||
await initPromise;
|
||||
this.authClient = {
|
||||
logout: () => window.netlifyIdentity?.logout(),
|
||||
currentUser: () => window.netlifyIdentity?.currentUser(),
|
||||
clearStore: () => {
|
||||
const store = window.netlifyIdentity?.store;
|
||||
if (store) {
|
||||
store.user = null;
|
||||
store.modal.page = 'login';
|
||||
store.saving = false;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
requestFunction = (req: ApiRequest) =>
|
||||
this.tokenPromise!()
|
||||
.then(
|
||||
token => unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req) as ApiRequest,
|
||||
)
|
||||
.then(unsentRequest.performRequest);
|
||||
|
||||
authenticate(credentials: Credentials) {
|
||||
const user = credentials as NetlifyUser;
|
||||
this.tokenPromise = async () => {
|
||||
try {
|
||||
const func = user.jwt.bind(user);
|
||||
const token = await func();
|
||||
return token;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
throw new AccessTokenError(`Failed getting access token: ${error.message}`);
|
||||
}
|
||||
|
||||
throw new AccessTokenError('Failed getting access token');
|
||||
}
|
||||
};
|
||||
return this.tokenPromise!().then(async token => {
|
||||
if (!this.backendType) {
|
||||
const {
|
||||
github_enabled: githubEnabled,
|
||||
gitlab_enabled: gitlabEnabled,
|
||||
bitbucket_enabled: bitbucketEnabled,
|
||||
roles,
|
||||
} = await unsentRequest
|
||||
.fetchWithTimeout(`${this.gatewayUrl}/settings`, {
|
||||
headers: { Authorization: `Bearer ${token}` },
|
||||
})
|
||||
.then(async res => {
|
||||
const contentType = res.headers.get('Content-Type') || '';
|
||||
if (!contentType.includes('application/json') && !contentType.includes('text/json')) {
|
||||
throw new APIError(
|
||||
`Your Git Gateway backend is not returning valid settings. Please make sure it is enabled.`,
|
||||
res.status,
|
||||
'Git Gateway',
|
||||
);
|
||||
}
|
||||
const body = await res.json();
|
||||
|
||||
if (!res.ok) {
|
||||
throw new APIError(
|
||||
`Git Gateway Error: ${body.message ? body.message : body}`,
|
||||
res.status,
|
||||
'Git Gateway',
|
||||
);
|
||||
}
|
||||
|
||||
return body;
|
||||
});
|
||||
this.acceptRoles = roles;
|
||||
if (githubEnabled) {
|
||||
this.backendType = 'github';
|
||||
} else if (gitlabEnabled) {
|
||||
this.backendType = 'gitlab';
|
||||
} else if (bitbucketEnabled) {
|
||||
this.backendType = 'bitbucket';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.acceptRoles && this.acceptRoles.length > 0) {
|
||||
const userRoles = get(jwtDecode(token), 'app_metadata.roles', []);
|
||||
const validRole = intersection(userRoles, this.acceptRoles).length > 0;
|
||||
if (!validRole) {
|
||||
throw new Error("You don't have sufficient permissions to access Static CMS");
|
||||
}
|
||||
}
|
||||
|
||||
const userData = {
|
||||
name: user.user_metadata.full_name || user.email.split('@').shift()!,
|
||||
email: user.email,
|
||||
avatar_url: user.user_metadata.avatar_url,
|
||||
metadata: user.user_metadata,
|
||||
};
|
||||
const apiConfig = {
|
||||
apiRoot: `${this.gatewayUrl}/${this.backendType}`,
|
||||
branch: this.branch,
|
||||
tokenPromise: this.tokenPromise!,
|
||||
commitAuthor: pick(userData, ['name', 'email']),
|
||||
isLargeMedia: (filename: string) => this.isLargeMediaFile(filename),
|
||||
};
|
||||
|
||||
if (this.backendType === 'github') {
|
||||
this.api = new GitHubAPI(apiConfig);
|
||||
this.backend = new GitHubBackend(this.config, { ...this.options, API: this.api });
|
||||
} else if (this.backendType === 'gitlab') {
|
||||
this.api = new GitLabAPI(apiConfig);
|
||||
this.backend = new GitLabBackend(this.config, { ...this.options, API: this.api });
|
||||
} else if (this.backendType === 'bitbucket') {
|
||||
this.api = new BitBucketAPI({
|
||||
...apiConfig,
|
||||
requestFunction: this.requestFunction,
|
||||
hasWriteAccess: async () => true,
|
||||
});
|
||||
this.backend = new BitbucketBackend(this.config, { ...this.options, API: this.api });
|
||||
}
|
||||
|
||||
if (!(await this.api!.hasWriteAccess())) {
|
||||
throw new Error("You don't have sufficient permissions to access Static CMS");
|
||||
}
|
||||
return { name: userData.name, login: userData.email } as User;
|
||||
});
|
||||
}
|
||||
|
||||
async restoreUser() {
|
||||
const client = await this.getAuthClient();
|
||||
const user = client?.currentUser();
|
||||
if (!user) {
|
||||
return Promise.reject();
|
||||
}
|
||||
return this.authenticate(user as Credentials);
|
||||
}
|
||||
|
||||
authComponent() {
|
||||
const WrappedAuthenticationPage = (props: TranslatedProps<AuthenticationPageProps>) => {
|
||||
const handleAuth = useCallback(
|
||||
async (email: string, password: string): Promise<User | string> => {
|
||||
try {
|
||||
const authClient = await this.getAuthClient();
|
||||
if (!authClient) {
|
||||
return 'Auth client not started';
|
||||
}
|
||||
|
||||
if (!authClient.login) {
|
||||
return 'Auth client login function not found';
|
||||
}
|
||||
|
||||
return authClient.login(email, password, true);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (error: any) {
|
||||
return error.description || error.msg || error;
|
||||
}
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
return <AuthenticationPage {...props} handleAuth={handleAuth} />;
|
||||
};
|
||||
WrappedAuthenticationPage.displayName = 'AuthenticationPage';
|
||||
return WrappedAuthenticationPage;
|
||||
}
|
||||
|
||||
async logout() {
|
||||
const client = await this.getAuthClient();
|
||||
try {
|
||||
client?.logout();
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
getToken() {
|
||||
return this.tokenPromise!();
|
||||
}
|
||||
|
||||
async entriesByFolder(folder: string, extension: string, depth: number) {
|
||||
return this.backend!.entriesByFolder(folder, extension, depth);
|
||||
}
|
||||
allEntriesByFolder(folder: string, extension: string, depth: number) {
|
||||
return this.backend!.allEntriesByFolder(folder, extension, depth);
|
||||
}
|
||||
entriesByFiles(files: ImplementationFile[]) {
|
||||
return this.backend!.entriesByFiles(files);
|
||||
}
|
||||
getEntry(path: string) {
|
||||
return this.backend!.getEntry(path);
|
||||
}
|
||||
|
||||
async isLargeMediaFile(path: string) {
|
||||
const client = await this.getLargeMediaClient();
|
||||
return client.enabled && client.matchPath(path);
|
||||
}
|
||||
|
||||
getMedia(mediaFolder = this.mediaFolder) {
|
||||
return this.backend!.getMedia(mediaFolder);
|
||||
}
|
||||
|
||||
// this method memoizes this._getLargeMediaClient so that there can
|
||||
// only be one client at a time
|
||||
getLargeMediaClient() {
|
||||
if (this._largeMediaClientPromise) {
|
||||
return this._largeMediaClientPromise;
|
||||
}
|
||||
this._largeMediaClientPromise = this._getLargeMediaClient();
|
||||
return this._largeMediaClientPromise;
|
||||
}
|
||||
_getLargeMediaClient() {
|
||||
const netlifyLargeMediaEnabledPromise = this.api!.readFile('.lfsconfig')
|
||||
.then(config => ini.decode<{ lfs: { url: string } }>(config as string))
|
||||
.then(({ lfs: { url } }) => new URL(url))
|
||||
.then(lfsURL => ({
|
||||
enabled: lfsURL.hostname.endsWith('netlify.com') || lfsURL.hostname.endsWith('netlify.app'),
|
||||
}))
|
||||
.catch((err: Error) => ({ enabled: false, err }));
|
||||
|
||||
const lfsPatternsPromise = this.api!.readFile('.gitattributes')
|
||||
.then(attributes => getLargeMediaPatternsFromGitAttributesFile(attributes as string))
|
||||
.then((patterns: string[]) => ({ err: null, patterns }))
|
||||
.catch((err: Error) => {
|
||||
if (err.message.includes('404')) {
|
||||
console.info('This 404 was expected and handled appropriately.');
|
||||
return { err: null, patterns: [] as string[] };
|
||||
} else {
|
||||
return { err, patterns: [] as string[] };
|
||||
}
|
||||
});
|
||||
|
||||
return Promise.all([netlifyLargeMediaEnabledPromise, lfsPatternsPromise]).then(
|
||||
([{ enabled: maybeEnabled }, { patterns, err: patternsErr }]) => {
|
||||
const enabled = maybeEnabled && !patternsErr;
|
||||
|
||||
// We expect LFS patterns to exist when the .lfsconfig states
|
||||
// that we're using Netlify Large Media
|
||||
if (maybeEnabled && patternsErr) {
|
||||
console.error(patternsErr);
|
||||
}
|
||||
|
||||
return getClient({
|
||||
enabled,
|
||||
rootURL: this.netlifyLargeMediaURL,
|
||||
makeAuthorizedRequest: this.requestFunction,
|
||||
patterns,
|
||||
transformImages: this.transformImages ? { nf_resize: 'fit', w: 560, h: 320 } : false,
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
async getLargeMediaDisplayURL(
|
||||
{ path, id }: { path: string; id: string | null },
|
||||
branch = this.branch,
|
||||
) {
|
||||
const readFile = (
|
||||
path: string,
|
||||
id: string | null | undefined,
|
||||
{ parseText }: { parseText: boolean },
|
||||
) => this.api!.readFile(path, id, { branch, parseText });
|
||||
|
||||
const items = await entriesByFiles(
|
||||
[{ path, id }],
|
||||
readFile,
|
||||
this.api!.readFileMetadata.bind(this.api),
|
||||
'Git-Gateway',
|
||||
);
|
||||
const entry = items[0];
|
||||
const pointerFile = parsePointerFile(entry.data);
|
||||
if (!pointerFile.sha) {
|
||||
console.warn(`Failed parsing pointer file ${path}`);
|
||||
return { url: path, blob: new Blob() };
|
||||
}
|
||||
|
||||
const client = await this.getLargeMediaClient();
|
||||
const { url, blob } = await client.getDownloadURL(pointerFile);
|
||||
return { url, blob };
|
||||
}
|
||||
|
||||
async getMediaDisplayURL(displayURL: DisplayURL) {
|
||||
const { path, id } = displayURL as DisplayURLObject;
|
||||
const isLargeMedia = await this.isLargeMediaFile(path);
|
||||
if (isLargeMedia) {
|
||||
const { url } = await this.getLargeMediaDisplayURL({ path, id });
|
||||
return url;
|
||||
}
|
||||
if (typeof displayURL === 'string') {
|
||||
return displayURL;
|
||||
}
|
||||
|
||||
const url = await this.backend!.getMediaDisplayURL(displayURL);
|
||||
return url;
|
||||
}
|
||||
|
||||
async getMediaFile(path: string) {
|
||||
const isLargeMedia = await this.isLargeMediaFile(path);
|
||||
if (isLargeMedia) {
|
||||
const { url, blob } = await this.getLargeMediaDisplayURL({ path, id: null });
|
||||
const name = basename(path);
|
||||
return {
|
||||
id: url,
|
||||
name,
|
||||
path,
|
||||
url,
|
||||
displayURL: url,
|
||||
file: new File([blob], name),
|
||||
size: blob.size,
|
||||
};
|
||||
}
|
||||
return this.backend!.getMediaFile(path);
|
||||
}
|
||||
|
||||
async persistEntry(entry: BackendEntry, options: PersistOptions) {
|
||||
const client = await this.getLargeMediaClient();
|
||||
if (client.enabled) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const assets = (await getLargeMediaFilteredMediaFiles(client, entry.assets)) as any;
|
||||
return this.backend!.persistEntry({ ...entry, assets }, options);
|
||||
} else {
|
||||
return this.backend!.persistEntry(entry, options);
|
||||
}
|
||||
}
|
||||
|
||||
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
||||
const { fileObj, path } = mediaFile;
|
||||
const displayURL = URL.createObjectURL(fileObj as Blob);
|
||||
const client = await this.getLargeMediaClient();
|
||||
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
|
||||
const isLargeMedia = await this.isLargeMediaFile(fixedPath);
|
||||
if (isLargeMedia) {
|
||||
const persistMediaArgument = (await getPointerFileForMediaFileObj(
|
||||
client,
|
||||
fileObj as File,
|
||||
path,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
)) as any;
|
||||
return {
|
||||
...(await this.backend!.persistMedia(persistMediaArgument, options)),
|
||||
displayURL,
|
||||
};
|
||||
}
|
||||
return await this.backend!.persistMedia(mediaFile, options);
|
||||
}
|
||||
deleteFiles(paths: string[], commitMessage: string) {
|
||||
return this.backend!.deleteFiles(paths, commitMessage);
|
||||
}
|
||||
traverseCursor(cursor: Cursor, action: string) {
|
||||
return this.backend!.traverseCursor!(cursor, action);
|
||||
}
|
||||
}
|
2
packages/core/src/backends/git-gateway/index.ts
Normal file
2
packages/core/src/backends/git-gateway/index.ts
Normal file
@ -0,0 +1,2 @@
|
||||
export { default as GitGatewayBackend } from './implementation';
|
||||
export { default as AuthenticationPage } from './AuthenticationPage';
|
181
packages/core/src/backends/git-gateway/netlify-lfs-client.ts
Normal file
181
packages/core/src/backends/git-gateway/netlify-lfs-client.ts
Normal file
@ -0,0 +1,181 @@
|
||||
import { flow, fromPairs, map } from 'lodash/fp';
|
||||
import isPlainObject from 'lodash/isPlainObject';
|
||||
import isEmpty from 'lodash/isEmpty';
|
||||
import minimatch from 'minimatch';
|
||||
|
||||
import { unsentRequest } from '@staticcms/core/lib/util';
|
||||
|
||||
import type { ApiRequest, PointerFile } from '@staticcms/core/lib/util';
|
||||
|
||||
type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;
|
||||
|
||||
type ImageTransformations = { nf_resize: string; w: number; h: number };
|
||||
|
||||
type ClientConfig = {
|
||||
rootURL: string;
|
||||
makeAuthorizedRequest: MakeAuthorizedRequest;
|
||||
patterns: string[];
|
||||
enabled: boolean;
|
||||
transformImages: ImageTransformations | boolean;
|
||||
};
|
||||
|
||||
export function matchPath({ patterns }: ClientConfig, path: string) {
|
||||
return patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
|
||||
}
|
||||
|
||||
//
|
||||
// API interactions
|
||||
|
||||
const defaultContentHeaders = {
|
||||
Accept: 'application/vnd.git-lfs+json',
|
||||
['Content-Type']: 'application/vnd.git-lfs+json',
|
||||
};
|
||||
|
||||
async function resourceExists(
|
||||
{ rootURL, makeAuthorizedRequest }: ClientConfig,
|
||||
{ sha, size }: PointerFile,
|
||||
) {
|
||||
const response = await makeAuthorizedRequest({
|
||||
url: `${rootURL}/verify`,
|
||||
method: 'POST',
|
||||
headers: defaultContentHeaders,
|
||||
body: JSON.stringify({ oid: sha, size }),
|
||||
});
|
||||
if (response.ok) {
|
||||
return true;
|
||||
}
|
||||
if (response.status === 404) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO: what kind of error to throw here? APIError doesn't seem to fit
|
||||
}
|
||||
|
||||
function getTransofrmationsParams(t: boolean | ImageTransformations) {
|
||||
if (isPlainObject(t) && !isEmpty(t)) {
|
||||
const { nf_resize: resize, w, h } = t as ImageTransformations;
|
||||
return `?nf_resize=${resize}&w=${w}&h=${h}`;
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
async function getDownloadURL(
|
||||
{ rootURL, transformImages: t, makeAuthorizedRequest }: ClientConfig,
|
||||
{ sha }: PointerFile,
|
||||
) {
|
||||
try {
|
||||
const transformation = getTransofrmationsParams(t);
|
||||
const transformedPromise = makeAuthorizedRequest(`${rootURL}/origin/${sha}${transformation}`);
|
||||
const [transformed, original] = await Promise.all([
|
||||
transformedPromise,
|
||||
// if transformation is defined, we need to load the original so we have the correct meta data
|
||||
transformation ? makeAuthorizedRequest(`${rootURL}/origin/${sha}`) : transformedPromise,
|
||||
]);
|
||||
if (!transformed.ok) {
|
||||
const error = await transformed.json();
|
||||
throw new Error(
|
||||
`Failed getting large media for sha '${sha}': '${error.code} - ${error.msg}'`,
|
||||
);
|
||||
}
|
||||
|
||||
const transformedBlob = await transformed.blob();
|
||||
const url = URL.createObjectURL(transformedBlob);
|
||||
return { url, blob: transformation ? await original.blob() : transformedBlob };
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return { url: '', blob: new Blob() };
|
||||
}
|
||||
}
|
||||
|
||||
function uploadOperation(objects: PointerFile[]) {
|
||||
return {
|
||||
operation: 'upload',
|
||||
transfers: ['basic'],
|
||||
objects: objects.map(({ sha, ...rest }) => ({ ...rest, oid: sha })),
|
||||
};
|
||||
}
|
||||
|
||||
async function getResourceUploadURLs(
|
||||
{
|
||||
rootURL,
|
||||
makeAuthorizedRequest,
|
||||
}: { rootURL: string; makeAuthorizedRequest: MakeAuthorizedRequest },
|
||||
pointerFiles: PointerFile[],
|
||||
) {
|
||||
const response = await makeAuthorizedRequest({
|
||||
url: `${rootURL}/objects/batch`,
|
||||
method: 'POST',
|
||||
headers: defaultContentHeaders,
|
||||
body: JSON.stringify(uploadOperation(pointerFiles)),
|
||||
});
|
||||
|
||||
const { objects } = await response.json();
|
||||
const uploadUrls = objects.map(
|
||||
(object: { error?: { message: string }; actions: { upload: { href: string } } }) => {
|
||||
if (object.error) {
|
||||
throw new Error(object.error.message);
|
||||
}
|
||||
return object.actions.upload.href;
|
||||
},
|
||||
);
|
||||
return uploadUrls;
|
||||
}
|
||||
|
||||
function uploadBlob(uploadURL: string, blob: Blob) {
|
||||
return unsentRequest.fetchWithTimeout(uploadURL, {
|
||||
method: 'PUT',
|
||||
body: blob,
|
||||
});
|
||||
}
|
||||
|
||||
async function uploadResource(
|
||||
clientConfig: ClientConfig,
|
||||
{ sha, size }: PointerFile,
|
||||
resource: Blob,
|
||||
) {
|
||||
const existingFile = await resourceExists(clientConfig, { sha, size });
|
||||
if (existingFile) {
|
||||
return sha;
|
||||
}
|
||||
const [uploadURL] = await getResourceUploadURLs(clientConfig, [{ sha, size }]);
|
||||
await uploadBlob(uploadURL, resource);
|
||||
return sha;
|
||||
}
|
||||
|
||||
//
|
||||
// Create Large Media client
|
||||
|
||||
function configureFn(config: ClientConfig, fn: Function) {
|
||||
return (...args: unknown[]) => fn(config, ...args);
|
||||
}
|
||||
|
||||
const clientFns: Record<string, Function> = {
|
||||
resourceExists,
|
||||
getResourceUploadURLs,
|
||||
getDownloadURL,
|
||||
uploadResource,
|
||||
matchPath,
|
||||
};
|
||||
|
||||
export type Client = {
|
||||
resourceExists: (pointer: PointerFile) => Promise<boolean | undefined>;
|
||||
getResourceUploadURLs: (objects: PointerFile[]) => Promise<string>;
|
||||
getDownloadURL: (pointer: PointerFile) => Promise<{ url: string; blob: Blob }>;
|
||||
uploadResource: (pointer: PointerFile, blob: Blob) => Promise<string>;
|
||||
matchPath: (path: string) => boolean;
|
||||
patterns: string[];
|
||||
enabled: boolean;
|
||||
};
|
||||
|
||||
export function getClient(clientConfig: ClientConfig) {
|
||||
return flow([
|
||||
Object.keys,
|
||||
map((key: string) => [key, configureFn(clientConfig, clientFns[key])]),
|
||||
fromPairs,
|
||||
configuredFns => ({
|
||||
...configuredFns,
|
||||
patterns: clientConfig.patterns,
|
||||
enabled: clientConfig.enabled,
|
||||
}),
|
||||
])(clientFns);
|
||||
}
|
556
packages/core/src/backends/github/API.ts
Normal file
556
packages/core/src/backends/github/API.ts
Normal file
@ -0,0 +1,556 @@
|
||||
import { Base64 } from 'js-base64';
|
||||
import initial from 'lodash/initial';
|
||||
import last from 'lodash/last';
|
||||
import partial from 'lodash/partial';
|
||||
import result from 'lodash/result';
|
||||
import trim from 'lodash/trim';
|
||||
import trimStart from 'lodash/trimStart';
|
||||
import { dirname } from 'path';
|
||||
|
||||
import {
|
||||
APIError,
|
||||
basename,
|
||||
generateContentKey,
|
||||
getAllResponses,
|
||||
localForage,
|
||||
parseContentKey,
|
||||
readFileMetadata,
|
||||
requestWithBackoff,
|
||||
unsentRequest,
|
||||
} from '@staticcms/core/lib/util';
|
||||
|
||||
import type { DataFile, PersistOptions } from '@staticcms/core/interface';
|
||||
import type { ApiRequest, FetchError } from '@staticcms/core/lib/util';
|
||||
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
|
||||
import type { Semaphore } from 'semaphore';
|
||||
import type {
|
||||
GitCreateCommitResponse,
|
||||
GitCreateRefResponse,
|
||||
GitCreateTreeParamsTree,
|
||||
GitCreateTreeResponse,
|
||||
GitGetBlobResponse,
|
||||
GitGetTreeResponse,
|
||||
GitHubAuthor,
|
||||
GitHubCommitter,
|
||||
GitHubUser,
|
||||
GitUpdateRefResponse,
|
||||
ReposGetBranchResponse,
|
||||
ReposGetResponse,
|
||||
ReposListCommitsResponse,
|
||||
} from './types';
|
||||
|
||||
export const API_NAME = 'GitHub';
|
||||
|
||||
export interface Config {
|
||||
apiRoot?: string;
|
||||
token?: string;
|
||||
branch?: string;
|
||||
repo?: string;
|
||||
originRepo?: string;
|
||||
}
|
||||
|
||||
type Override<T, U> = Pick<T, Exclude<keyof T, keyof U>> & U;
|
||||
|
||||
type TreeEntry = Override<GitCreateTreeParamsTree, { sha: string | null }>;
|
||||
|
||||
interface MetaDataObjects {
|
||||
entry: { path: string; sha: string };
|
||||
files: MediaFile[];
|
||||
}
|
||||
|
||||
export interface Metadata {
|
||||
type: string;
|
||||
objects: MetaDataObjects;
|
||||
branch: string;
|
||||
status: string;
|
||||
collection: string;
|
||||
commitMessage: string;
|
||||
version?: string;
|
||||
user: string;
|
||||
title?: string;
|
||||
description?: string;
|
||||
timeStamp: string;
|
||||
}
|
||||
|
||||
export interface BlobArgs {
|
||||
sha: string;
|
||||
repoURL: string;
|
||||
parseText: boolean;
|
||||
}
|
||||
|
||||
type Param = string | number | undefined;
|
||||
|
||||
export type Options = RequestInit & {
|
||||
params?: Record<string, Param | Record<string, Param> | string[]>;
|
||||
};
|
||||
|
||||
type MediaFile = {
|
||||
sha: string;
|
||||
path: string;
|
||||
};
|
||||
|
||||
export type Diff = {
|
||||
path: string;
|
||||
newFile: boolean;
|
||||
sha: string;
|
||||
binary: boolean;
|
||||
};
|
||||
|
||||
export default class API {
|
||||
apiRoot: string;
|
||||
token: string;
|
||||
branch: string;
|
||||
repo: string;
|
||||
originRepo: string;
|
||||
repoOwner: string;
|
||||
repoName: string;
|
||||
originRepoOwner: string;
|
||||
originRepoName: string;
|
||||
repoURL: string;
|
||||
originRepoURL: string;
|
||||
|
||||
_userPromise?: Promise<GitHubUser>;
|
||||
_metadataSemaphore?: Semaphore;
|
||||
|
||||
commitAuthor?: {};
|
||||
|
||||
constructor(config: Config) {
|
||||
this.apiRoot = config.apiRoot || 'https://api.github.com';
|
||||
this.token = config.token || '';
|
||||
this.branch = config.branch || 'main';
|
||||
this.repo = config.repo || '';
|
||||
this.originRepo = config.originRepo || this.repo;
|
||||
this.repoURL = `/repos/${this.repo}`;
|
||||
this.originRepoURL = `/repos/${this.originRepo}`;
|
||||
|
||||
const [repoParts, originRepoParts] = [this.repo.split('/'), this.originRepo.split('/')];
|
||||
this.repoOwner = repoParts[0];
|
||||
this.repoName = repoParts[1];
|
||||
|
||||
this.originRepoOwner = originRepoParts[0];
|
||||
this.originRepoName = originRepoParts[1];
|
||||
}
|
||||
|
||||
static DEFAULT_COMMIT_MESSAGE = 'Automatically generated by Static CMS';
|
||||
|
||||
user(): Promise<{ name: string; login: string }> {
|
||||
if (!this._userPromise) {
|
||||
this._userPromise = this.getUser();
|
||||
}
|
||||
return this._userPromise;
|
||||
}
|
||||
|
||||
getUser() {
|
||||
return this.request('/user') as Promise<GitHubUser>;
|
||||
}
|
||||
|
||||
async hasWriteAccess() {
|
||||
try {
|
||||
const result: ReposGetResponse = await this.request(this.repoURL);
|
||||
// update config repoOwner to avoid case sensitivity issues with GitHub
|
||||
this.repoOwner = result.owner.login;
|
||||
return result.permissions.push;
|
||||
} catch (error) {
|
||||
console.error('Problem fetching repo data from GitHub');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
reset() {
|
||||
// no op
|
||||
}
|
||||
|
||||
requestHeaders(headers = {}) {
|
||||
const baseHeader: Record<string, string> = {
|
||||
'Content-Type': 'application/json; charset=utf-8',
|
||||
...headers,
|
||||
};
|
||||
|
||||
if (this.token) {
|
||||
baseHeader.Authorization = `token ${this.token}`;
|
||||
return Promise.resolve(baseHeader);
|
||||
}
|
||||
|
||||
return Promise.resolve(baseHeader);
|
||||
}
|
||||
|
||||
parseJsonResponse(response: Response) {
|
||||
return response.json().then(json => {
|
||||
if (!response.ok) {
|
||||
return Promise.reject(json);
|
||||
}
|
||||
|
||||
return json;
|
||||
});
|
||||
}
|
||||
|
||||
urlFor(path: string, options: Options) {
|
||||
const params = [];
|
||||
if (options.params) {
|
||||
for (const key in options.params) {
|
||||
params.push(`${key}=${encodeURIComponent(options.params[key] as string)}`);
|
||||
}
|
||||
}
|
||||
if (params.length) {
|
||||
path += `?${params.join('&')}`;
|
||||
}
|
||||
return this.apiRoot + path;
|
||||
}
|
||||
|
||||
parseResponse(response: Response) {
|
||||
const contentType = response.headers.get('Content-Type');
|
||||
if (contentType && contentType.match(/json/)) {
|
||||
return this.parseJsonResponse(response);
|
||||
}
|
||||
const textPromise = response.text().then(text => {
|
||||
if (!response.ok) {
|
||||
return Promise.reject(text);
|
||||
}
|
||||
return text;
|
||||
});
|
||||
return textPromise;
|
||||
}
|
||||
|
||||
handleRequestError(error: FetchError, responseStatus: number) {
|
||||
throw new APIError(error.message, responseStatus, API_NAME);
|
||||
}
|
||||
|
||||
buildRequest(req: ApiRequest) {
|
||||
return req;
|
||||
}
|
||||
|
||||
async request(
|
||||
path: string,
|
||||
options: Options = {},
|
||||
parser = (response: Response) => this.parseResponse(response),
|
||||
) {
|
||||
options = { cache: 'no-cache', ...options };
|
||||
const headers = await this.requestHeaders(options.headers || {});
|
||||
const url = this.urlFor(path, options);
|
||||
let responseStatus = 500;
|
||||
|
||||
try {
|
||||
const req = unsentRequest.fromFetchArguments(url, {
|
||||
...options,
|
||||
headers,
|
||||
}) as unknown as ApiRequest;
|
||||
const response = await requestWithBackoff(this, req);
|
||||
responseStatus = response.status;
|
||||
const parsedResponse = await parser(response);
|
||||
return parsedResponse;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (error: any) {
|
||||
return this.handleRequestError(error, responseStatus);
|
||||
}
|
||||
}
|
||||
|
||||
nextUrlProcessor() {
|
||||
return (url: string) => url;
|
||||
}
|
||||
|
||||
async requestAllPages<T>(url: string, options: Options = {}) {
|
||||
options = { cache: 'no-cache', ...options };
|
||||
const headers = await this.requestHeaders(options.headers || {});
|
||||
const processedURL = this.urlFor(url, options);
|
||||
const allResponses = await getAllResponses(
|
||||
processedURL,
|
||||
{ ...options, headers },
|
||||
'next',
|
||||
this.nextUrlProcessor(),
|
||||
);
|
||||
const pages: T[][] = await Promise.all(
|
||||
allResponses.map((res: Response) => this.parseResponse(res)),
|
||||
);
|
||||
return ([] as T[]).concat(...pages);
|
||||
}
|
||||
|
||||
generateContentKey(collectionName: string, slug: string) {
|
||||
return generateContentKey(collectionName, slug);
|
||||
}
|
||||
|
||||
parseContentKey(contentKey: string) {
|
||||
return parseContentKey(contentKey);
|
||||
}
|
||||
|
||||
async readFile(
|
||||
path: string,
|
||||
sha?: string | null,
|
||||
{
|
||||
branch = this.branch,
|
||||
repoURL = this.repoURL,
|
||||
parseText = true,
|
||||
}: {
|
||||
branch?: string;
|
||||
repoURL?: string;
|
||||
parseText?: boolean;
|
||||
} = {},
|
||||
) {
|
||||
if (!sha) {
|
||||
sha = await this.getFileSha(path, { repoURL, branch });
|
||||
}
|
||||
const content = await this.fetchBlobContent({ sha: sha as string, repoURL, parseText });
|
||||
return content;
|
||||
}
|
||||
|
||||
async readFileMetadata(path: string, sha: string | null | undefined) {
|
||||
const fetchFileMetadata = async () => {
|
||||
try {
|
||||
const result: ReposListCommitsResponse = await this.request(
|
||||
`${this.originRepoURL}/commits`,
|
||||
{
|
||||
params: { path, sha: this.branch },
|
||||
},
|
||||
);
|
||||
const { commit } = result[0];
|
||||
return {
|
||||
author: commit.author.name || commit.author.email,
|
||||
updatedOn: commit.author.date,
|
||||
};
|
||||
} catch (e) {
|
||||
return { author: '', updatedOn: '' };
|
||||
}
|
||||
};
|
||||
const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
|
||||
return fileMetadata;
|
||||
}
|
||||
|
||||
async fetchBlobContent({ sha, repoURL, parseText }: BlobArgs) {
|
||||
const result: GitGetBlobResponse = await this.request(`${repoURL}/git/blobs/${sha}`, {
|
||||
cache: 'force-cache',
|
||||
});
|
||||
|
||||
if (parseText) {
|
||||
// treat content as a utf-8 string
|
||||
const content = Base64.decode(result.content);
|
||||
return content;
|
||||
} else {
|
||||
// treat content as binary and convert to blob
|
||||
const content = Base64.atob(result.content);
|
||||
const byteArray = new Uint8Array(content.length);
|
||||
for (let i = 0; i < content.length; i++) {
|
||||
byteArray[i] = content.charCodeAt(i);
|
||||
}
|
||||
const blob = new Blob([byteArray]);
|
||||
return blob;
|
||||
}
|
||||
}
|
||||
|
||||
async listFiles(
|
||||
path: string,
|
||||
{ repoURL = this.repoURL, branch = this.branch, depth = 1 } = {},
|
||||
): Promise<{ type: string; id: string; name: string; path: string; size: number }[]> {
|
||||
const folder = trim(path, '/');
|
||||
try {
|
||||
const result: GitGetTreeResponse = await this.request(
|
||||
`${repoURL}/git/trees/${branch}:${folder}`,
|
||||
{
|
||||
// GitHub API supports recursive=1 for getting the entire recursive tree
|
||||
// or omitting it to get the non-recursive tree
|
||||
params: depth > 1 ? { recursive: 1 } : {},
|
||||
},
|
||||
);
|
||||
return (
|
||||
result.tree
|
||||
// filter only files and up to the required depth
|
||||
.filter(file => file.type === 'blob' && file.path.split('/').length <= depth)
|
||||
.map(file => ({
|
||||
type: file.type,
|
||||
id: file.sha,
|
||||
name: basename(file.path),
|
||||
path: `${folder}/${file.path}`,
|
||||
size: file.size!,
|
||||
}))
|
||||
);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (err: any) {
|
||||
if (err && err.status === 404) {
|
||||
console.info('This 404 was expected and handled appropriately.');
|
||||
return [];
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async persistFiles(dataFiles: DataFile[], mediaFiles: AssetProxy[], options: PersistOptions) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const files: (DataFile | AssetProxy)[] = mediaFiles.concat(dataFiles as any);
|
||||
const uploadPromises = files.map(file => this.uploadBlob(file));
|
||||
await Promise.all(uploadPromises);
|
||||
|
||||
return (
|
||||
this.getDefaultBranch()
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
.then(branchData => this.updateTree(branchData.commit.sha, files as any))
|
||||
.then(changeTree => this.commit(options.commitMessage, changeTree))
|
||||
.then(response => this.patchBranch(this.branch, response.sha))
|
||||
);
|
||||
}
|
||||
|
||||
async getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {
|
||||
/**
|
||||
* We need to request the tree first to get the SHA. We use extended SHA-1
|
||||
* syntax (<rev>:<path>) to get a blob from a tree without having to recurse
|
||||
* through the tree.
|
||||
*/
|
||||
|
||||
const pathArray = path.split('/');
|
||||
const filename = last(pathArray);
|
||||
const directory = initial(pathArray).join('/');
|
||||
const fileDataPath = encodeURIComponent(directory);
|
||||
const fileDataURL = `${repoURL}/git/trees/${branch}:${fileDataPath}`;
|
||||
|
||||
const result: GitGetTreeResponse = await this.request(fileDataURL);
|
||||
const file = result.tree.find(file => file.path === filename);
|
||||
if (file) {
|
||||
return file.sha;
|
||||
} else {
|
||||
throw new APIError('Not Found', 404, API_NAME);
|
||||
}
|
||||
}
|
||||
|
||||
async deleteFiles(paths: string[], message: string) {
|
||||
const branchData = await this.getDefaultBranch();
|
||||
const files = paths.map(path => ({ path, sha: null }));
|
||||
const changeTree = await this.updateTree(branchData.commit.sha, files);
|
||||
const commit = await this.commit(message, changeTree);
|
||||
await this.patchBranch(this.branch, commit.sha);
|
||||
}
|
||||
|
||||
async createRef(type: string, name: string, sha: string) {
|
||||
const result: GitCreateRefResponse = await this.request(`${this.repoURL}/git/refs`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ ref: `refs/${type}/${name}`, sha }),
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
async patchRef(type: string, name: string, sha: string) {
|
||||
const result: GitUpdateRefResponse = await this.request(
|
||||
`${this.repoURL}/git/refs/${type}/${encodeURIComponent(name)}`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
body: JSON.stringify({ sha }),
|
||||
},
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
deleteRef(type: string, name: string) {
|
||||
return this.request(`${this.repoURL}/git/refs/${type}/${encodeURIComponent(name)}`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
}
|
||||
|
||||
async getDefaultBranch() {
|
||||
const result: ReposGetBranchResponse = await this.request(
|
||||
`${this.originRepoURL}/branches/${encodeURIComponent(this.branch)}`,
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
patchBranch(branchName: string, sha: string) {
|
||||
return this.patchRef('heads', branchName, sha);
|
||||
}
|
||||
|
||||
async getHeadReference(head: string) {
|
||||
return `${this.repoOwner}:${head}`;
|
||||
}
|
||||
|
||||
toBase64(str: string) {
|
||||
return Promise.resolve(Base64.encode(str));
|
||||
}
|
||||
|
||||
async uploadBlob(item: { raw?: string; sha?: string; toBase64?: () => Promise<string> }) {
|
||||
const contentBase64 = await result(
|
||||
item,
|
||||
'toBase64',
|
||||
partial(this.toBase64, item.raw as string),
|
||||
);
|
||||
const response = await this.request(`${this.repoURL}/git/blobs`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
content: contentBase64,
|
||||
encoding: 'base64',
|
||||
}),
|
||||
});
|
||||
item.sha = response.sha;
|
||||
return item;
|
||||
}
|
||||
|
||||
async updateTree(
|
||||
baseSha: string,
|
||||
files: { path: string; sha: string | null; newPath?: string }[],
|
||||
branch = this.branch,
|
||||
) {
|
||||
const toMove: { from: string; to: string; sha: string }[] = [];
|
||||
const tree = files.reduce((acc, file) => {
|
||||
const entry = {
|
||||
path: trimStart(file.path, '/'),
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: file.sha,
|
||||
} as TreeEntry;
|
||||
|
||||
if (file.newPath) {
|
||||
toMove.push({ from: file.path, to: file.newPath, sha: file.sha as string });
|
||||
} else {
|
||||
acc.push(entry);
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, [] as TreeEntry[]);
|
||||
|
||||
for (const { from, to, sha } of toMove) {
|
||||
const sourceDir = dirname(from);
|
||||
const destDir = dirname(to);
|
||||
const files = await this.listFiles(sourceDir, { branch, depth: 100 });
|
||||
for (const file of files) {
|
||||
// delete current path
|
||||
tree.push({
|
||||
path: file.path,
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: null,
|
||||
});
|
||||
// create in new path
|
||||
tree.push({
|
||||
path: file.path.replace(sourceDir, destDir),
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: file.path === from ? sha : file.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const newTree = await this.createTree(baseSha, tree);
|
||||
return { ...newTree, parentSha: baseSha };
|
||||
}
|
||||
|
||||
async createTree(baseSha: string, tree: TreeEntry[]) {
|
||||
const result: GitCreateTreeResponse = await this.request(`${this.repoURL}/git/trees`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ base_tree: baseSha, tree }),
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
commit(message: string, changeTree: { parentSha?: string; sha: string }) {
|
||||
const parents = changeTree.parentSha ? [changeTree.parentSha] : [];
|
||||
return this.createCommit(message, changeTree.sha, parents);
|
||||
}
|
||||
|
||||
async createCommit(
|
||||
message: string,
|
||||
treeSha: string,
|
||||
parents: string[],
|
||||
author?: GitHubAuthor,
|
||||
committer?: GitHubCommitter,
|
||||
) {
|
||||
const result: GitCreateCommitResponse = await this.request(`${this.repoURL}/git/commits`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ message, tree: treeSha, parents, author, committer }),
|
||||
});
|
||||
return result;
|
||||
}
|
||||
}
|
64
packages/core/src/backends/github/AuthenticationPage.tsx
Normal file
64
packages/core/src/backends/github/AuthenticationPage.tsx
Normal file
@ -0,0 +1,64 @@
|
||||
import { styled } from '@mui/material/styles';
|
||||
import React, { useCallback, useState } from 'react';
|
||||
|
||||
import AuthenticationPage from '@staticcms/core/components/UI/AuthenticationPage';
|
||||
import Icon from '@staticcms/core/components/UI/Icon';
|
||||
import { NetlifyAuthenticator } from '@staticcms/core/lib/auth';
|
||||
|
||||
import type { MouseEvent } from 'react';
|
||||
import type { AuthenticationPageProps, TranslatedProps } from '@staticcms/core/interface';
|
||||
|
||||
const LoginButtonIcon = styled(Icon)`
|
||||
margin-right: 18px;
|
||||
`;
|
||||
|
||||
const GitHubAuthenticationPage = ({
|
||||
inProgress = false,
|
||||
config,
|
||||
base_url,
|
||||
siteId,
|
||||
authEndpoint,
|
||||
onLogin,
|
||||
t,
|
||||
}: TranslatedProps<AuthenticationPageProps>) => {
|
||||
const [loginError, setLoginError] = useState<string | null>(null);
|
||||
|
||||
const handleLogin = useCallback(
|
||||
(e: MouseEvent<HTMLButtonElement>) => {
|
||||
e.preventDefault();
|
||||
const cfg = {
|
||||
base_url,
|
||||
site_id: document.location.host.split(':')[0] === 'localhost' ? 'cms.netlify.com' : siteId,
|
||||
auth_endpoint: authEndpoint,
|
||||
};
|
||||
const auth = new NetlifyAuthenticator(cfg);
|
||||
|
||||
const { auth_scope: authScope = '' } = config.backend;
|
||||
|
||||
const scope = authScope || 'repo';
|
||||
auth.authenticate({ provider: 'github', scope }, (err, data) => {
|
||||
if (err) {
|
||||
setLoginError(err.toString());
|
||||
} else if (data) {
|
||||
onLogin(data);
|
||||
}
|
||||
});
|
||||
},
|
||||
[authEndpoint, base_url, config.backend, onLogin, siteId],
|
||||
);
|
||||
|
||||
return (
|
||||
<AuthenticationPage
|
||||
onLogin={handleLogin}
|
||||
loginDisabled={inProgress}
|
||||
loginErrorMessage={loginError}
|
||||
logoUrl={config.logo_url}
|
||||
siteUrl={config.site_url}
|
||||
icon={<LoginButtonIcon type="github" />}
|
||||
buttonContent={t('auth.loginWithGitHub')}
|
||||
t={t}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default GitHubAuthenticationPage;
|
318
packages/core/src/backends/github/__tests__/API.spec.ts
Normal file
318
packages/core/src/backends/github/__tests__/API.spec.ts
Normal file
@ -0,0 +1,318 @@
|
||||
import { Base64 } from 'js-base64';
|
||||
|
||||
import API from '../API';
|
||||
|
||||
import type { Options } from '../API';
|
||||
|
||||
describe('github API', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
|
||||
global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function mockAPI(api: API, responses: Record<string, (options: Options) => any>) {
|
||||
api.request = jest.fn().mockImplementation((path, options = {}) => {
|
||||
const normalizedPath = path.indexOf('?') !== -1 ? path.slice(0, path.indexOf('?')) : path;
|
||||
const response = responses[normalizedPath];
|
||||
return typeof response === 'function'
|
||||
? Promise.resolve(response(options))
|
||||
: Promise.reject(new Error(`No response for path '${normalizedPath}'`));
|
||||
});
|
||||
}
|
||||
|
||||
describe('updateTree', () => {
|
||||
it('should create tree with nested paths', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
api.createTree = jest.fn().mockImplementation(() => Promise.resolve({ sha: 'newTreeSha' }));
|
||||
|
||||
const files = [
|
||||
{ path: '/static/media/new-image.jpeg', sha: null },
|
||||
{ path: 'content/posts/new-post.md', sha: 'new-post.md' },
|
||||
];
|
||||
|
||||
const baseTreeSha = 'baseTreeSha';
|
||||
|
||||
await expect(api.updateTree(baseTreeSha, files)).resolves.toEqual({
|
||||
sha: 'newTreeSha',
|
||||
parentSha: baseTreeSha,
|
||||
});
|
||||
|
||||
expect(api.createTree).toHaveBeenCalledTimes(1);
|
||||
expect(api.createTree).toHaveBeenCalledWith(baseTreeSha, [
|
||||
{
|
||||
path: 'static/media/new-image.jpeg',
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: null,
|
||||
},
|
||||
{
|
||||
path: 'content/posts/new-post.md',
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: 'new-post.md',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('request', () => {
|
||||
const fetch = jest.fn();
|
||||
beforeEach(() => {
|
||||
global.fetch = fetch;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
it('should fetch url with authorization header', async () => {
|
||||
const api = new API({ branch: 'gh-pages', repo: 'my-repo', token: 'token' });
|
||||
|
||||
fetch.mockResolvedValue({
|
||||
text: jest.fn().mockResolvedValue('some response'),
|
||||
ok: true,
|
||||
status: 200,
|
||||
headers: { get: () => '' },
|
||||
});
|
||||
const result = await api.request('/some-path');
|
||||
expect(result).toEqual('some response');
|
||||
expect(fetch).toHaveBeenCalledTimes(1);
|
||||
expect(fetch).toHaveBeenCalledWith('https://api.github.com/some-path', {
|
||||
cache: 'no-cache',
|
||||
headers: {
|
||||
Authorization: 'token token',
|
||||
'Content-Type': 'application/json; charset=utf-8',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw error on not ok response', async () => {
|
||||
const api = new API({ branch: 'gh-pages', repo: 'my-repo', token: 'token' });
|
||||
|
||||
fetch.mockResolvedValue({
|
||||
text: jest.fn().mockResolvedValue({ message: 'some error' }),
|
||||
ok: false,
|
||||
status: 404,
|
||||
headers: { get: () => '' },
|
||||
});
|
||||
|
||||
await expect(api.request('some-path')).rejects.toThrow(
|
||||
expect.objectContaining({
|
||||
message: 'some error',
|
||||
name: 'API_ERROR',
|
||||
status: 404,
|
||||
api: 'GitHub',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow overriding requestHeaders to return a promise ', async () => {
|
||||
const api = new API({ branch: 'gh-pages', repo: 'my-repo', token: 'token' });
|
||||
|
||||
api.requestHeaders = jest.fn().mockResolvedValue({
|
||||
Authorization: 'promise-token',
|
||||
'Content-Type': 'application/json; charset=utf-8',
|
||||
});
|
||||
|
||||
fetch.mockResolvedValue({
|
||||
text: jest.fn().mockResolvedValue('some response'),
|
||||
ok: true,
|
||||
status: 200,
|
||||
headers: { get: () => '' },
|
||||
});
|
||||
const result = await api.request('/some-path');
|
||||
expect(result).toEqual('some response');
|
||||
expect(fetch).toHaveBeenCalledTimes(1);
|
||||
expect(fetch).toHaveBeenCalledWith('https://api.github.com/some-path', {
|
||||
cache: 'no-cache',
|
||||
headers: {
|
||||
Authorization: 'promise-token',
|
||||
'Content-Type': 'application/json; charset=utf-8',
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('persistFiles', () => {
|
||||
it('should update tree, commit and patch branch when useWorkflow is false', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
const responses = {
|
||||
// upload the file
|
||||
'/repos/owner/repo/git/blobs': () => ({ sha: 'new-file-sha' }),
|
||||
|
||||
// get the branch
|
||||
'/repos/owner/repo/branches/master': () => ({ commit: { sha: 'root' } }),
|
||||
|
||||
// create new tree
|
||||
'/repos/owner/repo/git/trees': (options: Options) => {
|
||||
const data = JSON.parse((options.body as string) ?? '');
|
||||
return { sha: data.base_tree };
|
||||
},
|
||||
|
||||
// update the commit with the tree
|
||||
'/repos/owner/repo/git/commits': () => ({ sha: 'commit-sha' }),
|
||||
|
||||
// patch the branch
|
||||
'/repos/owner/repo/git/refs/heads/master': () => ({}),
|
||||
};
|
||||
mockAPI(api, responses);
|
||||
|
||||
const entry = {
|
||||
dataFiles: [
|
||||
{
|
||||
slug: 'entry',
|
||||
sha: 'abc',
|
||||
path: 'content/posts/new-post.md',
|
||||
raw: 'content',
|
||||
},
|
||||
],
|
||||
assets: [],
|
||||
};
|
||||
await api.persistFiles(entry.dataFiles, entry.assets, { commitMessage: 'commitMessage' });
|
||||
|
||||
expect(api.request).toHaveBeenCalledTimes(5);
|
||||
|
||||
expect((api.request as jest.Mock).mock.calls[0]).toEqual([
|
||||
'/repos/owner/repo/git/blobs',
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
content: Base64.encode(entry.dataFiles[0].raw),
|
||||
encoding: 'base64',
|
||||
}),
|
||||
},
|
||||
]);
|
||||
|
||||
expect((api.request as jest.Mock).mock.calls[1]).toEqual([
|
||||
'/repos/owner/repo/branches/master',
|
||||
]);
|
||||
|
||||
expect((api.request as jest.Mock).mock.calls[2]).toEqual([
|
||||
'/repos/owner/repo/git/trees',
|
||||
{
|
||||
body: JSON.stringify({
|
||||
base_tree: 'root',
|
||||
tree: [
|
||||
{
|
||||
path: 'content/posts/new-post.md',
|
||||
mode: '100644',
|
||||
type: 'blob',
|
||||
sha: 'new-file-sha',
|
||||
},
|
||||
],
|
||||
}),
|
||||
method: 'POST',
|
||||
},
|
||||
]);
|
||||
|
||||
expect((api.request as jest.Mock).mock.calls[3]).toEqual([
|
||||
'/repos/owner/repo/git/commits',
|
||||
{
|
||||
body: JSON.stringify({
|
||||
message: 'commitMessage',
|
||||
tree: 'root',
|
||||
parents: ['root'],
|
||||
}),
|
||||
method: 'POST',
|
||||
},
|
||||
]);
|
||||
|
||||
expect((api.request as jest.Mock).mock.calls[4]).toEqual([
|
||||
'/repos/owner/repo/git/refs/heads/master',
|
||||
{
|
||||
body: JSON.stringify({
|
||||
sha: 'commit-sha',
|
||||
}),
|
||||
method: 'PATCH',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('listFiles', () => {
|
||||
it('should get files by depth', async () => {
|
||||
const api = new API({ branch: 'master', repo: 'owner/repo' });
|
||||
|
||||
const tree = [
|
||||
{
|
||||
path: 'post.md',
|
||||
type: 'blob',
|
||||
},
|
||||
{
|
||||
path: 'dir1',
|
||||
type: 'tree',
|
||||
},
|
||||
{
|
||||
path: 'dir1/nested-post.md',
|
||||
type: 'blob',
|
||||
},
|
||||
{
|
||||
path: 'dir1/dir2',
|
||||
type: 'tree',
|
||||
},
|
||||
{
|
||||
path: 'dir1/dir2/nested-post.md',
|
||||
type: 'blob',
|
||||
},
|
||||
];
|
||||
api.request = jest.fn().mockResolvedValue({ tree });
|
||||
|
||||
await expect(api.listFiles('posts', { depth: 1 })).resolves.toEqual([
|
||||
{
|
||||
path: 'posts/post.md',
|
||||
type: 'blob',
|
||||
name: 'post.md',
|
||||
},
|
||||
]);
|
||||
expect(api.request).toHaveBeenCalledTimes(1);
|
||||
expect(api.request).toHaveBeenCalledWith('/repos/owner/repo/git/trees/master:posts', {
|
||||
params: {},
|
||||
});
|
||||
|
||||
jest.clearAllMocks();
|
||||
await expect(api.listFiles('posts', { depth: 2 })).resolves.toEqual([
|
||||
{
|
||||
path: 'posts/post.md',
|
||||
type: 'blob',
|
||||
name: 'post.md',
|
||||
},
|
||||
{
|
||||
path: 'posts/dir1/nested-post.md',
|
||||
type: 'blob',
|
||||
name: 'nested-post.md',
|
||||
},
|
||||
]);
|
||||
expect(api.request).toHaveBeenCalledTimes(1);
|
||||
expect(api.request).toHaveBeenCalledWith('/repos/owner/repo/git/trees/master:posts', {
|
||||
params: { recursive: 1 },
|
||||
});
|
||||
|
||||
jest.clearAllMocks();
|
||||
await expect(api.listFiles('posts', { depth: 3 })).resolves.toEqual([
|
||||
{
|
||||
path: 'posts/post.md',
|
||||
type: 'blob',
|
||||
name: 'post.md',
|
||||
},
|
||||
{
|
||||
path: 'posts/dir1/nested-post.md',
|
||||
type: 'blob',
|
||||
name: 'nested-post.md',
|
||||
},
|
||||
{
|
||||
path: 'posts/dir1/dir2/nested-post.md',
|
||||
type: 'blob',
|
||||
name: 'nested-post.md',
|
||||
},
|
||||
]);
|
||||
expect(api.request).toHaveBeenCalledTimes(1);
|
||||
expect(api.request).toHaveBeenCalledWith('/repos/owner/repo/git/trees/master:posts', {
|
||||
params: { recursive: 1 },
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,289 @@
|
||||
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from '@staticcms/core/lib/util/Cursor';
|
||||
import GitHubImplementation from '../implementation';
|
||||
|
||||
import type { Config, UnknownField } from '@staticcms/core';
|
||||
import type API from '../API';
|
||||
import type { AssetProxy } from '@staticcms/core/valueObjects';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
declare const global: any;
|
||||
|
||||
describe('github backend implementation', () => {
|
||||
const config = {
|
||||
backend: {
|
||||
repo: 'owner/repo',
|
||||
api_root: 'https://api.github.com',
|
||||
},
|
||||
} as Config<UnknownField>;
|
||||
|
||||
const createObjectURL = jest.fn();
|
||||
global.URL = {
|
||||
createObjectURL,
|
||||
};
|
||||
|
||||
createObjectURL.mockReturnValue('displayURL');
|
||||
|
||||
beforeAll(() => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
jest.spyOn(console, 'error').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('persistMedia', () => {
|
||||
const persistFiles = jest.fn();
|
||||
const mockAPI = {
|
||||
persistFiles,
|
||||
} as unknown as API;
|
||||
|
||||
persistFiles.mockImplementation((_, files: (AssetProxy & { sha: string })[]) => {
|
||||
files.forEach((file, index) => {
|
||||
file.sha = `${index}`;
|
||||
});
|
||||
});
|
||||
|
||||
it('should persist media file', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const mediaFile = {
|
||||
fileObj: { size: 100, name: 'image.png' },
|
||||
path: '/media/image.png',
|
||||
} as AssetProxy;
|
||||
|
||||
expect.assertions(5);
|
||||
await expect(
|
||||
gitHubImplementation.persistMedia(mediaFile, { commitMessage: 'Persisting media' }),
|
||||
).resolves.toEqual({
|
||||
id: '0',
|
||||
name: 'image.png',
|
||||
size: 100,
|
||||
displayURL: 'displayURL',
|
||||
path: 'media/image.png',
|
||||
});
|
||||
|
||||
expect(persistFiles).toHaveBeenCalledTimes(1);
|
||||
expect(persistFiles).toHaveBeenCalledWith([], [mediaFile], {
|
||||
commitMessage: 'Persisting media',
|
||||
});
|
||||
expect(createObjectURL).toHaveBeenCalledTimes(1);
|
||||
expect(createObjectURL).toHaveBeenCalledWith(mediaFile.fileObj);
|
||||
});
|
||||
|
||||
it('should log and throw error on "persistFiles" error', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const error = new Error('failed to persist files');
|
||||
persistFiles.mockRejectedValue(error);
|
||||
|
||||
const mediaFile = {
|
||||
fileObj: { size: 100 },
|
||||
path: '/media/image.png',
|
||||
} as AssetProxy;
|
||||
|
||||
expect.assertions(5);
|
||||
await expect(
|
||||
gitHubImplementation.persistMedia(mediaFile, { commitMessage: 'Persisting media' }),
|
||||
).rejects.toThrowError(error);
|
||||
|
||||
expect(persistFiles).toHaveBeenCalledTimes(1);
|
||||
expect(createObjectURL).toHaveBeenCalledTimes(0);
|
||||
expect(console.error).toHaveBeenCalledTimes(1);
|
||||
expect(console.error).toHaveBeenCalledWith(error);
|
||||
});
|
||||
});
|
||||
|
||||
describe('entriesByFolder', () => {
|
||||
const listFiles = jest.fn();
|
||||
const readFile = jest.fn();
|
||||
const readFileMetadata = jest.fn(() => Promise.resolve({ author: '', updatedOn: '' }));
|
||||
|
||||
const mockAPI = {
|
||||
listFiles,
|
||||
readFile,
|
||||
readFileMetadata,
|
||||
originRepoURL: 'originRepoURL',
|
||||
} as unknown as API;
|
||||
|
||||
it('should return entries and cursor', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const files = [];
|
||||
const count = 1501;
|
||||
for (let i = 0; i < count; i++) {
|
||||
const id = `${i}`.padStart(`${count}`.length, '0');
|
||||
files.push({
|
||||
id,
|
||||
path: `posts/post-${id}.md`,
|
||||
});
|
||||
}
|
||||
|
||||
listFiles.mockResolvedValue(files);
|
||||
readFile.mockImplementation((_path, id) => Promise.resolve(`${id}`));
|
||||
|
||||
const expectedEntries = files
|
||||
.slice(0, 20)
|
||||
.map(({ id, path }) => ({ data: id, file: { path, id, author: '', updatedOn: '' } }));
|
||||
|
||||
const expectedCursor = Cursor.create({
|
||||
actions: ['next', 'last'],
|
||||
meta: { page: 1, count, pageSize: 20, pageCount: 76 },
|
||||
data: { files },
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(expectedEntries as any)[CURSOR_COMPATIBILITY_SYMBOL] = expectedCursor;
|
||||
|
||||
const result = await gitHubImplementation.entriesByFolder('posts', 'md', 1);
|
||||
|
||||
expect(result).toEqual(expectedEntries);
|
||||
expect(listFiles).toHaveBeenCalledTimes(1);
|
||||
expect(listFiles).toHaveBeenCalledWith('posts', { depth: 1, repoURL: 'originRepoURL' });
|
||||
expect(readFile).toHaveBeenCalledTimes(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe('traverseCursor', () => {
|
||||
const listFiles = jest.fn();
|
||||
const readFile = jest.fn((_path, id) => Promise.resolve(`${id}`));
|
||||
const readFileMetadata = jest.fn(() => Promise.resolve({}));
|
||||
|
||||
const mockAPI = {
|
||||
listFiles,
|
||||
readFile,
|
||||
originRepoURL: 'originRepoURL',
|
||||
readFileMetadata,
|
||||
} as unknown as API;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const files: any[] = [];
|
||||
const count = 1501;
|
||||
for (let i = 0; i < count; i++) {
|
||||
const id = `${i}`.padStart(`${count}`.length, '0');
|
||||
files.push({
|
||||
id,
|
||||
path: `posts/post-${id}.md`,
|
||||
});
|
||||
}
|
||||
|
||||
it('should handle next action', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const cursor = Cursor.create({
|
||||
actions: ['next', 'last'],
|
||||
meta: { page: 1, count, pageSize: 20, pageCount: 76 },
|
||||
data: { files },
|
||||
});
|
||||
|
||||
const expectedEntries = files
|
||||
.slice(20, 40)
|
||||
.map(({ id, path }) => ({ data: id, file: { path, id } }));
|
||||
|
||||
const expectedCursor = Cursor.create({
|
||||
actions: ['prev', 'first', 'next', 'last'],
|
||||
meta: { page: 2, count, pageSize: 20, pageCount: 76 },
|
||||
data: { files },
|
||||
});
|
||||
|
||||
const result = await gitHubImplementation.traverseCursor(cursor, 'next');
|
||||
|
||||
expect(result).toEqual({
|
||||
entries: expectedEntries,
|
||||
cursor: expectedCursor,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle prev action', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const cursor = Cursor.create({
|
||||
actions: ['prev', 'first', 'next', 'last'],
|
||||
meta: { page: 2, count, pageSize: 20, pageCount: 76 },
|
||||
data: { files },
|
||||
});
|
||||
|
||||
const expectedEntries = files
|
||||
.slice(0, 20)
|
||||
.map(({ id, path }) => ({ data: id, file: { path, id } }));
|
||||
|
||||
const expectedCursor = Cursor.create({
|
||||
actions: ['next', 'last'],
|
||||
meta: { page: 1, count, pageSize: 20, pageCount: 76 },
|
||||
data: { files },
|
||||
});
|
||||
|
||||
const result = await gitHubImplementation.traverseCursor(cursor, 'prev');
|
||||
|
||||
expect(result).toEqual({
|
||||
entries: expectedEntries,
|
||||
cursor: expectedCursor,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle last action', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const cursor = Cursor.create({
|
||||
actions: ['next', 'last'],
|
||||
meta: { page: 1, count, pageSize: 20, pageCount: 76 },
|
||||
data: { files },
|
||||
});
|
||||
|
||||
const expectedEntries = files
|
||||
.slice(1500)
|
||||
.map(({ id, path }) => ({ data: id, file: { path, id } }));
|
||||
|
||||
const expectedCursor = Cursor.create({
|
||||
actions: ['prev', 'first'],
|
||||
meta: { page: 76, count, pageSize: 20, pageCount: 76 },
|
||||
data: { files },
|
||||
});
|
||||
|
||||
const result = await gitHubImplementation.traverseCursor(cursor, 'last');
|
||||
|
||||
expect(result).toEqual({
|
||||
entries: expectedEntries,
|
||||
cursor: expectedCursor,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle first action', async () => {
|
||||
const gitHubImplementation = new GitHubImplementation(config);
|
||||
gitHubImplementation.api = mockAPI;
|
||||
|
||||
const cursor = Cursor.create({
|
||||
actions: ['prev', 'first'],
|
||||
meta: { page: 76, count, pageSize: 20, pageCount: 76 },
|
||||
data: { files },
|
||||
});
|
||||
|
||||
const expectedEntries = files
|
||||
.slice(0, 20)
|
||||
.map(({ id, path }) => ({ data: id, file: { path, id } }));
|
||||
|
||||
const expectedCursor = Cursor.create({
|
||||
actions: ['next', 'last'],
|
||||
meta: { page: 1, count, pageSize: 20, pageCount: 76 },
|
||||
data: { files },
|
||||
});
|
||||
|
||||
const result = await gitHubImplementation.traverseCursor(cursor, 'first');
|
||||
|
||||
expect(result).toEqual({
|
||||
entries: expectedEntries,
|
||||
cursor: expectedCursor,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
572
packages/core/src/backends/github/fragmentTypes.ts
Normal file
572
packages/core/src/backends/github/fragmentTypes.ts
Normal file
@ -0,0 +1,572 @@
|
||||
export default {
|
||||
__schema: {
|
||||
types: [
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Node',
|
||||
possibleTypes: [
|
||||
{ name: 'AddedToProjectEvent' },
|
||||
{ name: 'App' },
|
||||
{ name: 'AssignedEvent' },
|
||||
{ name: 'BaseRefChangedEvent' },
|
||||
{ name: 'BaseRefForcePushedEvent' },
|
||||
{ name: 'Blob' },
|
||||
{ name: 'Bot' },
|
||||
{ name: 'BranchProtectionRule' },
|
||||
{ name: 'ClosedEvent' },
|
||||
{ name: 'CodeOfConduct' },
|
||||
{ name: 'CommentDeletedEvent' },
|
||||
{ name: 'Commit' },
|
||||
{ name: 'CommitComment' },
|
||||
{ name: 'CommitCommentThread' },
|
||||
{ name: 'ConvertedNoteToIssueEvent' },
|
||||
{ name: 'CrossReferencedEvent' },
|
||||
{ name: 'DemilestonedEvent' },
|
||||
{ name: 'DeployKey' },
|
||||
{ name: 'DeployedEvent' },
|
||||
{ name: 'Deployment' },
|
||||
{ name: 'DeploymentEnvironmentChangedEvent' },
|
||||
{ name: 'DeploymentStatus' },
|
||||
{ name: 'ExternalIdentity' },
|
||||
{ name: 'Gist' },
|
||||
{ name: 'GistComment' },
|
||||
{ name: 'HeadRefDeletedEvent' },
|
||||
{ name: 'HeadRefForcePushedEvent' },
|
||||
{ name: 'HeadRefRestoredEvent' },
|
||||
{ name: 'Issue' },
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'Label' },
|
||||
{ name: 'LabeledEvent' },
|
||||
{ name: 'Language' },
|
||||
{ name: 'License' },
|
||||
{ name: 'LockedEvent' },
|
||||
{ name: 'Mannequin' },
|
||||
{ name: 'MarketplaceCategory' },
|
||||
{ name: 'MarketplaceListing' },
|
||||
{ name: 'MentionedEvent' },
|
||||
{ name: 'MergedEvent' },
|
||||
{ name: 'Milestone' },
|
||||
{ name: 'MilestonedEvent' },
|
||||
{ name: 'MovedColumnsInProjectEvent' },
|
||||
{ name: 'Organization' },
|
||||
{ name: 'OrganizationIdentityProvider' },
|
||||
{ name: 'OrganizationInvitation' },
|
||||
{ name: 'PinnedEvent' },
|
||||
{ name: 'Project' },
|
||||
{ name: 'ProjectCard' },
|
||||
{ name: 'ProjectColumn' },
|
||||
{ name: 'PublicKey' },
|
||||
{ name: 'PullRequest' },
|
||||
{ name: 'PullRequestCommit' },
|
||||
{ name: 'PullRequestCommitCommentThread' },
|
||||
{ name: 'PullRequestReview' },
|
||||
{ name: 'PullRequestReviewComment' },
|
||||
{ name: 'PullRequestReviewThread' },
|
||||
{ name: 'PushAllowance' },
|
||||
{ name: 'Reaction' },
|
||||
{ name: 'ReadyForReviewEvent' },
|
||||
{ name: 'Ref' },
|
||||
{ name: 'ReferencedEvent' },
|
||||
{ name: 'RegistryPackage' },
|
||||
{ name: 'RegistryPackageDependency' },
|
||||
{ name: 'RegistryPackageFile' },
|
||||
{ name: 'RegistryPackageTag' },
|
||||
{ name: 'RegistryPackageVersion' },
|
||||
{ name: 'Release' },
|
||||
{ name: 'ReleaseAsset' },
|
||||
{ name: 'RemovedFromProjectEvent' },
|
||||
{ name: 'RenamedTitleEvent' },
|
||||
{ name: 'ReopenedEvent' },
|
||||
{ name: 'Repository' },
|
||||
{ name: 'RepositoryInvitation' },
|
||||
{ name: 'RepositoryTopic' },
|
||||
{ name: 'ReviewDismissalAllowance' },
|
||||
{ name: 'ReviewDismissedEvent' },
|
||||
{ name: 'ReviewRequest' },
|
||||
{ name: 'ReviewRequestRemovedEvent' },
|
||||
{ name: 'ReviewRequestedEvent' },
|
||||
{ name: 'SavedReply' },
|
||||
{ name: 'SecurityAdvisory' },
|
||||
{ name: 'SponsorsListing' },
|
||||
{ name: 'Sponsorship' },
|
||||
{ name: 'Status' },
|
||||
{ name: 'StatusContext' },
|
||||
{ name: 'SubscribedEvent' },
|
||||
{ name: 'Tag' },
|
||||
{ name: 'Team' },
|
||||
{ name: 'Topic' },
|
||||
{ name: 'TransferredEvent' },
|
||||
{ name: 'Tree' },
|
||||
{ name: 'UnassignedEvent' },
|
||||
{ name: 'UnlabeledEvent' },
|
||||
{ name: 'UnlockedEvent' },
|
||||
{ name: 'UnpinnedEvent' },
|
||||
{ name: 'UnsubscribedEvent' },
|
||||
{ name: 'User' },
|
||||
{ name: 'UserBlockedEvent' },
|
||||
{ name: 'UserContentEdit' },
|
||||
{ name: 'UserStatus' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'UniformResourceLocatable',
|
||||
possibleTypes: [
|
||||
{ name: 'Bot' },
|
||||
{ name: 'ClosedEvent' },
|
||||
{ name: 'Commit' },
|
||||
{ name: 'CrossReferencedEvent' },
|
||||
{ name: 'Gist' },
|
||||
{ name: 'Issue' },
|
||||
{ name: 'Mannequin' },
|
||||
{ name: 'MergedEvent' },
|
||||
{ name: 'Milestone' },
|
||||
{ name: 'Organization' },
|
||||
{ name: 'PullRequest' },
|
||||
{ name: 'PullRequestCommit' },
|
||||
{ name: 'ReadyForReviewEvent' },
|
||||
{ name: 'Release' },
|
||||
{ name: 'Repository' },
|
||||
{ name: 'RepositoryTopic' },
|
||||
{ name: 'ReviewDismissedEvent' },
|
||||
{ name: 'User' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Actor',
|
||||
possibleTypes: [
|
||||
{ name: 'Bot' },
|
||||
{ name: 'Mannequin' },
|
||||
{ name: 'Organization' },
|
||||
{ name: 'User' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'RegistryPackageOwner',
|
||||
possibleTypes: [{ name: 'Organization' }, { name: 'Repository' }, { name: 'User' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'ProjectOwner',
|
||||
possibleTypes: [{ name: 'Organization' }, { name: 'Repository' }, { name: 'User' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Closable',
|
||||
possibleTypes: [
|
||||
{ name: 'Issue' },
|
||||
{ name: 'Milestone' },
|
||||
{ name: 'Project' },
|
||||
{ name: 'PullRequest' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Updatable',
|
||||
possibleTypes: [
|
||||
{ name: 'CommitComment' },
|
||||
{ name: 'GistComment' },
|
||||
{ name: 'Issue' },
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'Project' },
|
||||
{ name: 'PullRequest' },
|
||||
{ name: 'PullRequestReview' },
|
||||
{ name: 'PullRequestReviewComment' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'ProjectCardItem',
|
||||
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Assignable',
|
||||
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Comment',
|
||||
possibleTypes: [
|
||||
{ name: 'CommitComment' },
|
||||
{ name: 'GistComment' },
|
||||
{ name: 'Issue' },
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'PullRequest' },
|
||||
{ name: 'PullRequestReview' },
|
||||
{ name: 'PullRequestReviewComment' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'UpdatableComment',
|
||||
possibleTypes: [
|
||||
{ name: 'CommitComment' },
|
||||
{ name: 'GistComment' },
|
||||
{ name: 'Issue' },
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'PullRequest' },
|
||||
{ name: 'PullRequestReview' },
|
||||
{ name: 'PullRequestReviewComment' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Labelable',
|
||||
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Lockable',
|
||||
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'RegistryPackageSearch',
|
||||
possibleTypes: [{ name: 'Organization' }, { name: 'User' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'RepositoryOwner',
|
||||
possibleTypes: [{ name: 'Organization' }, { name: 'User' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'MemberStatusable',
|
||||
possibleTypes: [{ name: 'Organization' }, { name: 'Team' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'ProfileOwner',
|
||||
possibleTypes: [{ name: 'Organization' }, { name: 'User' }],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'PinnableItem',
|
||||
possibleTypes: [{ name: 'Gist' }, { name: 'Repository' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Starrable',
|
||||
possibleTypes: [{ name: 'Gist' }, { name: 'Repository' }, { name: 'Topic' }],
|
||||
},
|
||||
{ kind: 'INTERFACE', name: 'RepositoryInfo', possibleTypes: [{ name: 'Repository' }] },
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'GitObject',
|
||||
possibleTypes: [{ name: 'Blob' }, { name: 'Commit' }, { name: 'Tag' }, { name: 'Tree' }],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'RepositoryNode',
|
||||
possibleTypes: [
|
||||
{ name: 'CommitComment' },
|
||||
{ name: 'CommitCommentThread' },
|
||||
{ name: 'Issue' },
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'PullRequest' },
|
||||
{ name: 'PullRequestCommitCommentThread' },
|
||||
{ name: 'PullRequestReview' },
|
||||
{ name: 'PullRequestReviewComment' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Subscribable',
|
||||
possibleTypes: [
|
||||
{ name: 'Commit' },
|
||||
{ name: 'Issue' },
|
||||
{ name: 'PullRequest' },
|
||||
{ name: 'Repository' },
|
||||
{ name: 'Team' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Deletable',
|
||||
possibleTypes: [
|
||||
{ name: 'CommitComment' },
|
||||
{ name: 'GistComment' },
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'PullRequestReview' },
|
||||
{ name: 'PullRequestReviewComment' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Reactable',
|
||||
possibleTypes: [
|
||||
{ name: 'CommitComment' },
|
||||
{ name: 'Issue' },
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'PullRequest' },
|
||||
{ name: 'PullRequestReview' },
|
||||
{ name: 'PullRequestReviewComment' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'GitSignature',
|
||||
possibleTypes: [
|
||||
{ name: 'GpgSignature' },
|
||||
{ name: 'SmimeSignature' },
|
||||
{ name: 'UnknownSignature' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'RequestedReviewer',
|
||||
possibleTypes: [{ name: 'User' }, { name: 'Team' }, { name: 'Mannequin' }],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'PullRequestTimelineItem',
|
||||
possibleTypes: [
|
||||
{ name: 'Commit' },
|
||||
{ name: 'CommitCommentThread' },
|
||||
{ name: 'PullRequestReview' },
|
||||
{ name: 'PullRequestReviewThread' },
|
||||
{ name: 'PullRequestReviewComment' },
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'ClosedEvent' },
|
||||
{ name: 'ReopenedEvent' },
|
||||
{ name: 'SubscribedEvent' },
|
||||
{ name: 'UnsubscribedEvent' },
|
||||
{ name: 'MergedEvent' },
|
||||
{ name: 'ReferencedEvent' },
|
||||
{ name: 'CrossReferencedEvent' },
|
||||
{ name: 'AssignedEvent' },
|
||||
{ name: 'UnassignedEvent' },
|
||||
{ name: 'LabeledEvent' },
|
||||
{ name: 'UnlabeledEvent' },
|
||||
{ name: 'MilestonedEvent' },
|
||||
{ name: 'DemilestonedEvent' },
|
||||
{ name: 'RenamedTitleEvent' },
|
||||
{ name: 'LockedEvent' },
|
||||
{ name: 'UnlockedEvent' },
|
||||
{ name: 'DeployedEvent' },
|
||||
{ name: 'DeploymentEnvironmentChangedEvent' },
|
||||
{ name: 'HeadRefDeletedEvent' },
|
||||
{ name: 'HeadRefRestoredEvent' },
|
||||
{ name: 'HeadRefForcePushedEvent' },
|
||||
{ name: 'BaseRefForcePushedEvent' },
|
||||
{ name: 'ReviewRequestedEvent' },
|
||||
{ name: 'ReviewRequestRemovedEvent' },
|
||||
{ name: 'ReviewDismissedEvent' },
|
||||
{ name: 'UserBlockedEvent' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'Closer',
|
||||
possibleTypes: [{ name: 'Commit' }, { name: 'PullRequest' }],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'ReferencedSubject',
|
||||
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'Assignee',
|
||||
possibleTypes: [
|
||||
{ name: 'Bot' },
|
||||
{ name: 'Mannequin' },
|
||||
{ name: 'Organization' },
|
||||
{ name: 'User' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'MilestoneItem',
|
||||
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'RenamedTitleSubject',
|
||||
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'PullRequestTimelineItems',
|
||||
possibleTypes: [
|
||||
{ name: 'PullRequestCommit' },
|
||||
{ name: 'PullRequestCommitCommentThread' },
|
||||
{ name: 'PullRequestReview' },
|
||||
{ name: 'PullRequestReviewThread' },
|
||||
{ name: 'PullRequestRevisionMarker' },
|
||||
{ name: 'BaseRefChangedEvent' },
|
||||
{ name: 'BaseRefForcePushedEvent' },
|
||||
{ name: 'DeployedEvent' },
|
||||
{ name: 'DeploymentEnvironmentChangedEvent' },
|
||||
{ name: 'HeadRefDeletedEvent' },
|
||||
{ name: 'HeadRefForcePushedEvent' },
|
||||
{ name: 'HeadRefRestoredEvent' },
|
||||
{ name: 'MergedEvent' },
|
||||
{ name: 'ReviewDismissedEvent' },
|
||||
{ name: 'ReviewRequestedEvent' },
|
||||
{ name: 'ReviewRequestRemovedEvent' },
|
||||
{ name: 'ReadyForReviewEvent' },
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'CrossReferencedEvent' },
|
||||
{ name: 'AddedToProjectEvent' },
|
||||
{ name: 'AssignedEvent' },
|
||||
{ name: 'ClosedEvent' },
|
||||
{ name: 'CommentDeletedEvent' },
|
||||
{ name: 'ConvertedNoteToIssueEvent' },
|
||||
{ name: 'DemilestonedEvent' },
|
||||
{ name: 'LabeledEvent' },
|
||||
{ name: 'LockedEvent' },
|
||||
{ name: 'MentionedEvent' },
|
||||
{ name: 'MilestonedEvent' },
|
||||
{ name: 'MovedColumnsInProjectEvent' },
|
||||
{ name: 'PinnedEvent' },
|
||||
{ name: 'ReferencedEvent' },
|
||||
{ name: 'RemovedFromProjectEvent' },
|
||||
{ name: 'RenamedTitleEvent' },
|
||||
{ name: 'ReopenedEvent' },
|
||||
{ name: 'SubscribedEvent' },
|
||||
{ name: 'TransferredEvent' },
|
||||
{ name: 'UnassignedEvent' },
|
||||
{ name: 'UnlabeledEvent' },
|
||||
{ name: 'UnlockedEvent' },
|
||||
{ name: 'UserBlockedEvent' },
|
||||
{ name: 'UnpinnedEvent' },
|
||||
{ name: 'UnsubscribedEvent' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'IssueOrPullRequest',
|
||||
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'IssueTimelineItem',
|
||||
possibleTypes: [
|
||||
{ name: 'Commit' },
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'CrossReferencedEvent' },
|
||||
{ name: 'ClosedEvent' },
|
||||
{ name: 'ReopenedEvent' },
|
||||
{ name: 'SubscribedEvent' },
|
||||
{ name: 'UnsubscribedEvent' },
|
||||
{ name: 'ReferencedEvent' },
|
||||
{ name: 'AssignedEvent' },
|
||||
{ name: 'UnassignedEvent' },
|
||||
{ name: 'LabeledEvent' },
|
||||
{ name: 'UnlabeledEvent' },
|
||||
{ name: 'UserBlockedEvent' },
|
||||
{ name: 'MilestonedEvent' },
|
||||
{ name: 'DemilestonedEvent' },
|
||||
{ name: 'RenamedTitleEvent' },
|
||||
{ name: 'LockedEvent' },
|
||||
{ name: 'UnlockedEvent' },
|
||||
{ name: 'TransferredEvent' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'IssueTimelineItems',
|
||||
possibleTypes: [
|
||||
{ name: 'IssueComment' },
|
||||
{ name: 'CrossReferencedEvent' },
|
||||
{ name: 'AddedToProjectEvent' },
|
||||
{ name: 'AssignedEvent' },
|
||||
{ name: 'ClosedEvent' },
|
||||
{ name: 'CommentDeletedEvent' },
|
||||
{ name: 'ConvertedNoteToIssueEvent' },
|
||||
{ name: 'DemilestonedEvent' },
|
||||
{ name: 'LabeledEvent' },
|
||||
{ name: 'LockedEvent' },
|
||||
{ name: 'MentionedEvent' },
|
||||
{ name: 'MilestonedEvent' },
|
||||
{ name: 'MovedColumnsInProjectEvent' },
|
||||
{ name: 'PinnedEvent' },
|
||||
{ name: 'ReferencedEvent' },
|
||||
{ name: 'RemovedFromProjectEvent' },
|
||||
{ name: 'RenamedTitleEvent' },
|
||||
{ name: 'ReopenedEvent' },
|
||||
{ name: 'SubscribedEvent' },
|
||||
{ name: 'TransferredEvent' },
|
||||
{ name: 'UnassignedEvent' },
|
||||
{ name: 'UnlabeledEvent' },
|
||||
{ name: 'UnlockedEvent' },
|
||||
{ name: 'UserBlockedEvent' },
|
||||
{ name: 'UnpinnedEvent' },
|
||||
{ name: 'UnsubscribedEvent' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'ReviewDismissalAllowanceActor',
|
||||
possibleTypes: [{ name: 'User' }, { name: 'Team' }],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'PushAllowanceActor',
|
||||
possibleTypes: [{ name: 'User' }, { name: 'Team' }],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'PermissionGranter',
|
||||
possibleTypes: [{ name: 'Organization' }, { name: 'Repository' }, { name: 'Team' }],
|
||||
},
|
||||
{ kind: 'INTERFACE', name: 'Sponsorable', possibleTypes: [{ name: 'User' }] },
|
||||
{
|
||||
kind: 'INTERFACE',
|
||||
name: 'Contribution',
|
||||
possibleTypes: [
|
||||
{ name: 'CreatedCommitContribution' },
|
||||
{ name: 'CreatedIssueContribution' },
|
||||
{ name: 'CreatedPullRequestContribution' },
|
||||
{ name: 'CreatedPullRequestReviewContribution' },
|
||||
{ name: 'CreatedRepositoryContribution' },
|
||||
{ name: 'JoinedGitHubContribution' },
|
||||
{ name: 'RestrictedContribution' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'CreatedRepositoryOrRestrictedContribution',
|
||||
possibleTypes: [
|
||||
{ name: 'CreatedRepositoryContribution' },
|
||||
{ name: 'RestrictedContribution' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'CreatedIssueOrRestrictedContribution',
|
||||
possibleTypes: [{ name: 'CreatedIssueContribution' }, { name: 'RestrictedContribution' }],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'CreatedPullRequestOrRestrictedContribution',
|
||||
possibleTypes: [
|
||||
{ name: 'CreatedPullRequestContribution' },
|
||||
{ name: 'RestrictedContribution' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'SearchResultItem',
|
||||
possibleTypes: [
|
||||
{ name: 'Issue' },
|
||||
{ name: 'PullRequest' },
|
||||
{ name: 'Repository' },
|
||||
{ name: 'User' },
|
||||
{ name: 'Organization' },
|
||||
{ name: 'MarketplaceListing' },
|
||||
{ name: 'App' },
|
||||
],
|
||||
},
|
||||
{
|
||||
kind: 'UNION',
|
||||
name: 'CollectionItemContent',
|
||||
possibleTypes: [{ name: 'Repository' }, { name: 'Organization' }, { name: 'User' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
92
packages/core/src/backends/github/fragments.ts
Normal file
92
packages/core/src/backends/github/fragments.ts
Normal file
@ -0,0 +1,92 @@
|
||||
import { gql } from 'graphql-tag';
|
||||
|
||||
export const repository = gql`
|
||||
fragment RepositoryParts on Repository {
|
||||
id
|
||||
isFork
|
||||
}
|
||||
`;
|
||||
|
||||
export const blobWithText = gql`
|
||||
fragment BlobWithTextParts on Blob {
|
||||
id
|
||||
text
|
||||
is_binary: isBinary
|
||||
}
|
||||
`;
|
||||
|
||||
export const object = gql`
|
||||
fragment ObjectParts on GitObject {
|
||||
id
|
||||
sha: oid
|
||||
}
|
||||
`;
|
||||
|
||||
export const branch = gql`
|
||||
fragment BranchParts on Ref {
|
||||
commit: target {
|
||||
...ObjectParts
|
||||
}
|
||||
id
|
||||
name
|
||||
prefix
|
||||
repository {
|
||||
...RepositoryParts
|
||||
}
|
||||
}
|
||||
${object}
|
||||
${repository}
|
||||
`;
|
||||
|
||||
export const pullRequest = gql`
|
||||
fragment PullRequestParts on PullRequest {
|
||||
id
|
||||
baseRefName
|
||||
baseRefOid
|
||||
body
|
||||
headRefName
|
||||
headRefOid
|
||||
number
|
||||
state
|
||||
title
|
||||
merged_at: mergedAt
|
||||
updated_at: updatedAt
|
||||
user: author {
|
||||
login
|
||||
... on User {
|
||||
name
|
||||
}
|
||||
}
|
||||
repository {
|
||||
...RepositoryParts
|
||||
}
|
||||
labels(last: 100) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
${repository}
|
||||
`;
|
||||
|
||||
export const treeEntry = gql`
|
||||
fragment TreeEntryParts on TreeEntry {
|
||||
path: name
|
||||
sha: oid
|
||||
type
|
||||
mode
|
||||
}
|
||||
`;
|
||||
|
||||
export const fileEntry = gql`
|
||||
fragment FileEntryParts on TreeEntry {
|
||||
name
|
||||
sha: oid
|
||||
type
|
||||
blob: object {
|
||||
... on Blob {
|
||||
size: byteSize
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
434
packages/core/src/backends/github/implementation.tsx
Normal file
434
packages/core/src/backends/github/implementation.tsx
Normal file
@ -0,0 +1,434 @@
|
||||
import { stripIndent } from 'common-tags';
|
||||
import trimStart from 'lodash/trimStart';
|
||||
import semaphore from 'semaphore';
|
||||
|
||||
import {
|
||||
asyncLock,
|
||||
basename,
|
||||
blobToFileObj,
|
||||
Cursor,
|
||||
CURSOR_COMPATIBILITY_SYMBOL,
|
||||
entriesByFiles,
|
||||
entriesByFolder,
|
||||
filterByExtension,
|
||||
getBlobSHA,
|
||||
getMediaAsBlob,
|
||||
getMediaDisplayURL,
|
||||
runWithLock,
|
||||
unsentRequest,
|
||||
} from '@staticcms/core/lib/util';
|
||||
import API, { API_NAME } from './API';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
|
||||
import type {
|
||||
BackendClass,
|
||||
BackendEntry,
|
||||
Config,
|
||||
Credentials,
|
||||
DisplayURL,
|
||||
ImplementationFile,
|
||||
PersistOptions,
|
||||
User,
|
||||
} from '@staticcms/core/interface';
|
||||
import type { AsyncLock } from '@staticcms/core/lib/util';
|
||||
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
|
||||
import type { Semaphore } from 'semaphore';
|
||||
import type { GitHubUser } from './types';
|
||||
|
||||
const MAX_CONCURRENT_DOWNLOADS = 10;
|
||||
|
||||
type ApiFile = { id: string; type: string; name: string; path: string; size: number };
|
||||
|
||||
const { fetchWithTimeout: fetch } = unsentRequest;
|
||||
|
||||
const STATUS_PAGE = 'https://www.githubstatus.com';
|
||||
const GITHUB_STATUS_ENDPOINT = `${STATUS_PAGE}/api/v2/components.json`;
|
||||
const GITHUB_OPERATIONAL_UNITS = ['API Requests', 'Issues, Pull Requests, Projects'];
|
||||
type GitHubStatusComponent = {
|
||||
id: string;
|
||||
name: string;
|
||||
status: string;
|
||||
};
|
||||
|
||||
export default class GitHub implements BackendClass {
|
||||
lock: AsyncLock;
|
||||
api: API | null;
|
||||
options: {
|
||||
proxied: boolean;
|
||||
API: API | null;
|
||||
};
|
||||
originRepo: string;
|
||||
repo?: string;
|
||||
branch: string;
|
||||
apiRoot: string;
|
||||
mediaFolder?: string;
|
||||
token: string | null;
|
||||
_currentUserPromise?: Promise<GitHubUser>;
|
||||
_userIsOriginMaintainerPromises?: {
|
||||
[key: string]: Promise<boolean>;
|
||||
};
|
||||
_mediaDisplayURLSem?: Semaphore;
|
||||
|
||||
constructor(config: Config, options = {}) {
|
||||
this.options = {
|
||||
proxied: false,
|
||||
API: null,
|
||||
...options,
|
||||
};
|
||||
|
||||
if (
|
||||
!this.options.proxied &&
|
||||
(config.backend.repo === null || config.backend.repo === undefined)
|
||||
) {
|
||||
throw new Error('The GitHub backend needs a "repo" in the backend configuration.');
|
||||
}
|
||||
|
||||
this.api = this.options.API || null;
|
||||
this.repo = this.originRepo = config.backend.repo || '';
|
||||
this.branch = config.backend.branch?.trim() || 'main';
|
||||
this.apiRoot = config.backend.api_root || 'https://api.github.com';
|
||||
this.token = '';
|
||||
this.mediaFolder = config.media_folder;
|
||||
this.lock = asyncLock();
|
||||
}
|
||||
|
||||
isGitBackend() {
|
||||
return true;
|
||||
}
|
||||
|
||||
async status() {
|
||||
const api = await fetch(GITHUB_STATUS_ENDPOINT)
|
||||
.then(res => res.json())
|
||||
.then(res => {
|
||||
return res['components']
|
||||
.filter((statusComponent: GitHubStatusComponent) =>
|
||||
GITHUB_OPERATIONAL_UNITS.includes(statusComponent.name),
|
||||
)
|
||||
.every(
|
||||
(statusComponent: GitHubStatusComponent) => statusComponent.status === 'operational',
|
||||
);
|
||||
})
|
||||
.catch(e => {
|
||||
console.warn('Failed getting GitHub status', e);
|
||||
return true;
|
||||
});
|
||||
|
||||
let auth = false;
|
||||
// no need to check auth if api is down
|
||||
if (api) {
|
||||
auth =
|
||||
(await this.api
|
||||
?.getUser()
|
||||
.then(user => !!user)
|
||||
.catch(e => {
|
||||
console.warn('Failed getting GitHub user', e);
|
||||
return false;
|
||||
})) || false;
|
||||
}
|
||||
|
||||
return { auth: { status: auth }, api: { status: api, statusPage: STATUS_PAGE } };
|
||||
}
|
||||
|
||||
authComponent() {
|
||||
return AuthenticationPage;
|
||||
}
|
||||
|
||||
restoreUser(user: User) {
|
||||
return this.authenticate(user);
|
||||
}
|
||||
|
||||
async currentUser({ token }: { token: string }) {
|
||||
if (!this._currentUserPromise) {
|
||||
this._currentUserPromise = fetch(`${this.apiRoot}/user`, {
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
},
|
||||
}).then(res => res.json());
|
||||
}
|
||||
return this._currentUserPromise;
|
||||
}
|
||||
|
||||
async userIsOriginMaintainer({
|
||||
username: usernameArg,
|
||||
token,
|
||||
}: {
|
||||
username?: string;
|
||||
token: string;
|
||||
}) {
|
||||
const username = usernameArg || (await this.currentUser({ token })).login;
|
||||
this._userIsOriginMaintainerPromises = this._userIsOriginMaintainerPromises || {};
|
||||
if (!this._userIsOriginMaintainerPromises[username]) {
|
||||
this._userIsOriginMaintainerPromises[username] = fetch(
|
||||
`${this.apiRoot}/repos/${this.originRepo}/collaborators/${username}/permission`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
},
|
||||
},
|
||||
)
|
||||
.then(res => res.json())
|
||||
.then(({ permission }) => permission === 'admin' || permission === 'write');
|
||||
}
|
||||
return this._userIsOriginMaintainerPromises[username];
|
||||
}
|
||||
|
||||
async authenticate(state: Credentials) {
|
||||
this.token = state.token as string;
|
||||
const apiCtor = API;
|
||||
this.api = new apiCtor({
|
||||
token: this.token,
|
||||
branch: this.branch,
|
||||
repo: this.repo,
|
||||
originRepo: this.originRepo,
|
||||
apiRoot: this.apiRoot,
|
||||
});
|
||||
const user = await this.api!.user();
|
||||
const isCollab = await this.api!.hasWriteAccess().catch(error => {
|
||||
error.message = stripIndent`
|
||||
Repo "${this.repo}" not found.
|
||||
|
||||
Please ensure the repo information is spelled correctly.
|
||||
|
||||
If the repo is private, make sure you're logged into a GitHub account with access.
|
||||
|
||||
If your repo is under an organization, ensure the organization has granted access to Netlify
|
||||
CMS.
|
||||
`;
|
||||
throw error;
|
||||
});
|
||||
|
||||
// Unauthorized user
|
||||
if (!isCollab) {
|
||||
throw new Error('Your GitHub user account does not have access to this repo.');
|
||||
}
|
||||
|
||||
// Authorized user
|
||||
return { ...user, token: state.token as string };
|
||||
}
|
||||
|
||||
logout() {
|
||||
this.token = null;
|
||||
if (this.api && this.api.reset && typeof this.api.reset === 'function') {
|
||||
return this.api.reset();
|
||||
}
|
||||
}
|
||||
|
||||
getToken() {
|
||||
return Promise.resolve(this.token);
|
||||
}
|
||||
|
||||
getCursorAndFiles = (files: ApiFile[], page: number) => {
|
||||
const pageSize = 20;
|
||||
const count = files.length;
|
||||
const pageCount = Math.ceil(files.length / pageSize);
|
||||
|
||||
const actions = [] as string[];
|
||||
if (page > 1) {
|
||||
actions.push('prev');
|
||||
actions.push('first');
|
||||
}
|
||||
if (page < pageCount) {
|
||||
actions.push('next');
|
||||
actions.push('last');
|
||||
}
|
||||
|
||||
const cursor = Cursor.create({
|
||||
actions,
|
||||
meta: { page, count, pageSize, pageCount },
|
||||
data: { files },
|
||||
});
|
||||
const pageFiles = files.slice((page - 1) * pageSize, page * pageSize);
|
||||
return { cursor, files: pageFiles };
|
||||
};
|
||||
|
||||
async entriesByFolder(folder: string, extension: string, depth: number) {
|
||||
const repoURL = this.api!.originRepoURL;
|
||||
|
||||
let cursor: Cursor;
|
||||
|
||||
const listFiles = () =>
|
||||
this.api!.listFiles(folder, {
|
||||
repoURL,
|
||||
depth,
|
||||
}).then(files => {
|
||||
const filtered = files.filter(file => filterByExtension(file, extension));
|
||||
const result = this.getCursorAndFiles(filtered, 1);
|
||||
cursor = result.cursor;
|
||||
return result.files;
|
||||
});
|
||||
|
||||
const readFile = (path: string, id: string | null | undefined) =>
|
||||
this.api!.readFile(path, id, { repoURL }) as Promise<string>;
|
||||
|
||||
const files = await entriesByFolder(
|
||||
listFiles,
|
||||
readFile,
|
||||
this.api!.readFileMetadata.bind(this.api),
|
||||
API_NAME,
|
||||
);
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
|
||||
return files;
|
||||
}
|
||||
|
||||
async allEntriesByFolder(folder: string, extension: string, depth: number) {
|
||||
const repoURL = this.api!.originRepoURL;
|
||||
|
||||
const listFiles = () =>
|
||||
this.api!.listFiles(folder, {
|
||||
repoURL,
|
||||
depth,
|
||||
}).then(files => files.filter(file => filterByExtension(file, extension)));
|
||||
|
||||
const readFile = (path: string, id: string | null | undefined) => {
|
||||
return this.api!.readFile(path, id, { repoURL }) as Promise<string>;
|
||||
};
|
||||
|
||||
const files = await entriesByFolder(
|
||||
listFiles,
|
||||
readFile,
|
||||
this.api!.readFileMetadata.bind(this.api),
|
||||
API_NAME,
|
||||
);
|
||||
return files;
|
||||
}
|
||||
|
||||
entriesByFiles(files: ImplementationFile[]) {
|
||||
const repoURL = this.api!.repoURL;
|
||||
|
||||
const readFile = (path: string, id: string | null | undefined) =>
|
||||
this.api!.readFile(path, id, { repoURL }).catch(() => '') as Promise<string>;
|
||||
|
||||
return entriesByFiles(files, readFile, this.api!.readFileMetadata.bind(this.api), API_NAME);
|
||||
}
|
||||
|
||||
// Fetches a single entry.
|
||||
getEntry(path: string) {
|
||||
const repoURL = this.api!.originRepoURL;
|
||||
return this.api!.readFile(path, null, { repoURL })
|
||||
.then(data => ({
|
||||
file: { path, id: null },
|
||||
data: data as string,
|
||||
}))
|
||||
.catch(() => ({ file: { path, id: null }, data: '' }));
|
||||
}
|
||||
|
||||
async getMedia(mediaFolder = this.mediaFolder) {
|
||||
if (!mediaFolder) {
|
||||
return [];
|
||||
}
|
||||
return this.api!.listFiles(mediaFolder).then(files =>
|
||||
files.map(({ id, name, size, path }) => {
|
||||
// load media using getMediaDisplayURL to avoid token expiration with GitHub raw content urls
|
||||
// for private repositories
|
||||
return { id, name, size, displayURL: { id, path }, path };
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
async getMediaFile(path: string) {
|
||||
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
|
||||
|
||||
const name = basename(path);
|
||||
const fileObj = blobToFileObj(name, blob);
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
const id = await getBlobSHA(blob);
|
||||
|
||||
return {
|
||||
id,
|
||||
displayURL: url,
|
||||
path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
};
|
||||
}
|
||||
|
||||
getMediaDisplayURL(displayURL: DisplayURL) {
|
||||
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
|
||||
return getMediaDisplayURL(
|
||||
displayURL,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this._mediaDisplayURLSem,
|
||||
);
|
||||
}
|
||||
|
||||
persistEntry(entry: BackendEntry, options: PersistOptions) {
|
||||
// persistEntry is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
() => this.api!.persistFiles(entry.dataFiles, entry.assets, options),
|
||||
'Failed to acquire persist entry lock',
|
||||
);
|
||||
}
|
||||
|
||||
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
||||
try {
|
||||
await this.api!.persistFiles([], [mediaFile], options);
|
||||
const { sha, path, fileObj } = mediaFile as AssetProxy & { sha: string };
|
||||
const displayURL = URL.createObjectURL(fileObj as Blob);
|
||||
return {
|
||||
id: sha,
|
||||
name: fileObj!.name,
|
||||
size: fileObj!.size,
|
||||
displayURL,
|
||||
path: trimStart(path, '/'),
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
deleteFiles(paths: string[], commitMessage: string) {
|
||||
return this.api!.deleteFiles(paths, commitMessage);
|
||||
}
|
||||
|
||||
async traverseCursor(cursor: Cursor, action: string) {
|
||||
const meta = cursor.meta;
|
||||
const files = (cursor.data?.files ?? []) as ApiFile[];
|
||||
|
||||
let result: { cursor: Cursor; files: ApiFile[] };
|
||||
switch (action) {
|
||||
case 'first': {
|
||||
result = this.getCursorAndFiles(files, 1);
|
||||
break;
|
||||
}
|
||||
case 'last': {
|
||||
result = this.getCursorAndFiles(files, (meta?.['pageCount'] as number) ?? 1);
|
||||
break;
|
||||
}
|
||||
case 'next': {
|
||||
result = this.getCursorAndFiles(files, (meta?.['page'] as number) + 1 ?? 1);
|
||||
break;
|
||||
}
|
||||
case 'prev': {
|
||||
result = this.getCursorAndFiles(files, (meta?.['page'] as number) - 1 ?? 1);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
result = this.getCursorAndFiles(files, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const readFile = (path: string, id: string | null | undefined) =>
|
||||
this.api!.readFile(path, id, { repoURL: this.api!.originRepoURL }).catch(
|
||||
() => '',
|
||||
) as Promise<string>;
|
||||
|
||||
const entries = await entriesByFiles(
|
||||
result.files,
|
||||
readFile,
|
||||
this.api!.readFileMetadata.bind(this.api),
|
||||
API_NAME,
|
||||
);
|
||||
|
||||
return {
|
||||
entries,
|
||||
cursor: result.cursor,
|
||||
};
|
||||
}
|
||||
}
|
3
packages/core/src/backends/github/index.ts
Normal file
3
packages/core/src/backends/github/index.ts
Normal file
@ -0,0 +1,3 @@
|
||||
export { default as GitHubBackend } from './implementation';
|
||||
export { default as API } from './API';
|
||||
export { default as AuthenticationPage } from './AuthenticationPage';
|
16
packages/core/src/backends/github/mutations.ts
Normal file
16
packages/core/src/backends/github/mutations.ts
Normal file
@ -0,0 +1,16 @@
|
||||
/* eslint-disable import/prefer-default-export */
|
||||
import { gql } from 'graphql-tag';
|
||||
|
||||
import * as fragments from './fragments';
|
||||
|
||||
// updateRef only works for branches at the moment
|
||||
export const updateBranch = gql`
|
||||
mutation updateRef($input: UpdateRefInput!) {
|
||||
updateRef(input: $input) {
|
||||
branch: ref {
|
||||
...BranchParts
|
||||
}
|
||||
}
|
||||
}
|
||||
${fragments.branch}
|
||||
`;
|
152
packages/core/src/backends/github/queries.ts
Normal file
152
packages/core/src/backends/github/queries.ts
Normal file
@ -0,0 +1,152 @@
|
||||
import { gql } from 'graphql-tag';
|
||||
import { oneLine } from 'common-tags';
|
||||
|
||||
import * as fragments from './fragments';
|
||||
|
||||
export const repoPermission = gql`
|
||||
query repoPermission($owner: String!, $name: String!) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
...RepositoryParts
|
||||
viewerPermission
|
||||
}
|
||||
}
|
||||
${fragments.repository}
|
||||
`;
|
||||
|
||||
export const user = gql`
|
||||
query {
|
||||
viewer {
|
||||
id
|
||||
avatar_url: avatarUrl
|
||||
name
|
||||
login
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const blob = gql`
|
||||
query blob($owner: String!, $name: String!, $expression: String!) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
...RepositoryParts
|
||||
object(expression: $expression) {
|
||||
... on Blob {
|
||||
...BlobWithTextParts
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
${fragments.repository}
|
||||
${fragments.blobWithText}
|
||||
`;
|
||||
|
||||
export const statues = gql`
|
||||
query statues($owner: String!, $name: String!, $sha: GitObjectID!) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
...RepositoryParts
|
||||
object(oid: $sha) {
|
||||
...ObjectParts
|
||||
... on Commit {
|
||||
status {
|
||||
id
|
||||
contexts {
|
||||
id
|
||||
context
|
||||
state
|
||||
target_url: targetUrl
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
${fragments.repository}
|
||||
${fragments.object}
|
||||
`;
|
||||
|
||||
function buildFilesQuery(depth = 1) {
|
||||
const PLACE_HOLDER = 'PLACE_HOLDER';
|
||||
let query = oneLine`
|
||||
...ObjectParts
|
||||
... on Tree {
|
||||
entries {
|
||||
...FileEntryParts
|
||||
${PLACE_HOLDER}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
for (let i = 0; i < depth - 1; i++) {
|
||||
query = query.replace(
|
||||
PLACE_HOLDER,
|
||||
oneLine`
|
||||
object {
|
||||
... on Tree {
|
||||
entries {
|
||||
...FileEntryParts
|
||||
${PLACE_HOLDER}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
);
|
||||
}
|
||||
|
||||
query = query.replace(PLACE_HOLDER, '');
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
export function files(depth: number) {
|
||||
return gql`
|
||||
query files($owner: String!, $name: String!, $expression: String!) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
...RepositoryParts
|
||||
object(expression: $expression) {
|
||||
${buildFilesQuery(depth)}
|
||||
}
|
||||
}
|
||||
}
|
||||
${fragments.repository}
|
||||
${fragments.object}
|
||||
${fragments.fileEntry}
|
||||
`;
|
||||
}
|
||||
|
||||
const branchQueryPart = `
|
||||
branch: ref(qualifiedName: $qualifiedName) {
|
||||
...BranchParts
|
||||
}
|
||||
`;
|
||||
|
||||
export const branch = gql`
|
||||
query branch($owner: String!, $name: String!, $qualifiedName: String!) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
...RepositoryParts
|
||||
${branchQueryPart}
|
||||
}
|
||||
}
|
||||
${fragments.repository}
|
||||
${fragments.branch}
|
||||
`;
|
||||
|
||||
export const repository = gql`
|
||||
query repository($owner: String!, $name: String!) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
...RepositoryParts
|
||||
}
|
||||
}
|
||||
${fragments.repository}
|
||||
`;
|
||||
|
||||
export const fileSha = gql`
|
||||
query fileSha($owner: String!, $name: String!, $expression: String!) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
...RepositoryParts
|
||||
file: object(expression: $expression) {
|
||||
...ObjectParts
|
||||
}
|
||||
}
|
||||
}
|
||||
${fragments.repository}
|
||||
${fragments.object}
|
||||
`;
|
690
packages/core/src/backends/github/types.ts
Normal file
690
packages/core/src/backends/github/types.ts
Normal file
@ -0,0 +1,690 @@
|
||||
type UsersGetAuthenticatedResponsePlan = {
|
||||
collaborators: number;
|
||||
name: string;
|
||||
private_repos: number;
|
||||
space: number;
|
||||
};
|
||||
|
||||
export type GitHubUser = {
|
||||
avatar_url: string;
|
||||
bio: string;
|
||||
blog: string;
|
||||
collaborators?: number;
|
||||
company: string;
|
||||
created_at: string;
|
||||
disk_usage?: number;
|
||||
email: string;
|
||||
events_url: string;
|
||||
followers: number;
|
||||
followers_url: string;
|
||||
following: number;
|
||||
following_url: string;
|
||||
gists_url: string;
|
||||
gravatar_id: string;
|
||||
hireable: boolean;
|
||||
html_url: string;
|
||||
id: number;
|
||||
location: string;
|
||||
login: string;
|
||||
name: string;
|
||||
node_id: string;
|
||||
organizations_url: string;
|
||||
owned_private_repos?: number;
|
||||
plan?: UsersGetAuthenticatedResponsePlan;
|
||||
private_gists?: number;
|
||||
public_gists: number;
|
||||
public_repos: number;
|
||||
received_events_url: string;
|
||||
repos_url: string;
|
||||
site_admin: boolean;
|
||||
starred_url: string;
|
||||
subscriptions_url: string;
|
||||
total_private_repos?: number;
|
||||
two_factor_authentication?: boolean;
|
||||
type: string;
|
||||
updated_at: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
export type GitCreateTreeParamsTree = {
|
||||
content?: string;
|
||||
mode?: '100644' | '100755' | '040000' | '160000' | '120000';
|
||||
path?: string;
|
||||
sha?: string;
|
||||
type?: 'blob' | 'tree' | 'commit';
|
||||
};
|
||||
|
||||
export type GitHubAuthor = {
|
||||
date: string;
|
||||
email: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export type GitHubCommitter = {
|
||||
date: string;
|
||||
email: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
type ReposListCommitsResponseItemAuthor = {
|
||||
avatar_url: string;
|
||||
events_url: string;
|
||||
followers_url: string;
|
||||
following_url: string;
|
||||
gists_url: string;
|
||||
gravatar_id: string;
|
||||
html_url: string;
|
||||
id: number;
|
||||
login: string;
|
||||
node_id: string;
|
||||
organizations_url: string;
|
||||
received_events_url: string;
|
||||
repos_url: string;
|
||||
site_admin: boolean;
|
||||
starred_url: string;
|
||||
subscriptions_url: string;
|
||||
type: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type ReposListCommitsResponseItemCommitCommitter = {
|
||||
date: string;
|
||||
email: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
type ReposListCommitsResponseItemCommitAuthor = {
|
||||
date: string;
|
||||
email: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
type ReposListCommitsResponseItemCommitTree = { sha: string; url: string };
|
||||
|
||||
type ReposListCommitsResponseItemCommitVerification = {
|
||||
payload: null;
|
||||
reason: string;
|
||||
signature: null;
|
||||
verified: boolean;
|
||||
};
|
||||
|
||||
type ReposListCommitsResponseItemCommit = {
|
||||
author: ReposListCommitsResponseItemCommitAuthor;
|
||||
comment_count: number;
|
||||
committer: ReposListCommitsResponseItemCommitCommitter;
|
||||
message: string;
|
||||
tree: ReposListCommitsResponseItemCommitTree;
|
||||
url: string;
|
||||
verification: ReposListCommitsResponseItemCommitVerification;
|
||||
};
|
||||
|
||||
type ReposGetResponseSourcePermissions = {
|
||||
admin: boolean;
|
||||
pull: boolean;
|
||||
push: boolean;
|
||||
};
|
||||
|
||||
type ReposGetResponseSourceOwner = {
|
||||
avatar_url: string;
|
||||
events_url: string;
|
||||
followers_url: string;
|
||||
following_url: string;
|
||||
gists_url: string;
|
||||
gravatar_id: string;
|
||||
html_url: string;
|
||||
id: number;
|
||||
login: string;
|
||||
node_id: string;
|
||||
organizations_url: string;
|
||||
received_events_url: string;
|
||||
repos_url: string;
|
||||
site_admin: boolean;
|
||||
starred_url: string;
|
||||
subscriptions_url: string;
|
||||
type: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type ReposGetResponseSource = {
|
||||
allow_merge_commit: boolean;
|
||||
allow_rebase_merge: boolean;
|
||||
allow_squash_merge: boolean;
|
||||
archive_url: string;
|
||||
archived: boolean;
|
||||
assignees_url: string;
|
||||
blobs_url: string;
|
||||
branches_url: string;
|
||||
clone_url: string;
|
||||
collaborators_url: string;
|
||||
comments_url: string;
|
||||
commits_url: string;
|
||||
compare_url: string;
|
||||
contents_url: string;
|
||||
contributors_url: string;
|
||||
created_at: string;
|
||||
default_branch: string;
|
||||
deployments_url: string;
|
||||
description: string;
|
||||
disabled: boolean;
|
||||
downloads_url: string;
|
||||
events_url: string;
|
||||
fork: boolean;
|
||||
forks_count: number;
|
||||
forks_url: string;
|
||||
full_name: string;
|
||||
git_commits_url: string;
|
||||
git_refs_url: string;
|
||||
git_tags_url: string;
|
||||
git_url: string;
|
||||
has_downloads: boolean;
|
||||
has_issues: boolean;
|
||||
has_pages: boolean;
|
||||
has_projects: boolean;
|
||||
has_wiki: boolean;
|
||||
homepage: string;
|
||||
hooks_url: string;
|
||||
html_url: string;
|
||||
id: number;
|
||||
is_template: boolean;
|
||||
issue_comment_url: string;
|
||||
issue_events_url: string;
|
||||
issues_url: string;
|
||||
keys_url: string;
|
||||
labels_url: string;
|
||||
language: null;
|
||||
languages_url: string;
|
||||
merges_url: string;
|
||||
milestones_url: string;
|
||||
mirror_url: string;
|
||||
name: string;
|
||||
network_count: number;
|
||||
node_id: string;
|
||||
notifications_url: string;
|
||||
open_issues_count: number;
|
||||
owner: ReposGetResponseSourceOwner;
|
||||
permissions: ReposGetResponseSourcePermissions;
|
||||
private: boolean;
|
||||
pulls_url: string;
|
||||
pushed_at: string;
|
||||
releases_url: string;
|
||||
size: number;
|
||||
ssh_url: string;
|
||||
stargazers_count: number;
|
||||
stargazers_url: string;
|
||||
statuses_url: string;
|
||||
subscribers_count: number;
|
||||
subscribers_url: string;
|
||||
subscription_url: string;
|
||||
svn_url: string;
|
||||
tags_url: string;
|
||||
teams_url: string;
|
||||
temp_clone_token: string;
|
||||
template_repository: null;
|
||||
topics: Array<string>;
|
||||
trees_url: string;
|
||||
updated_at: string;
|
||||
url: string;
|
||||
visibility: string;
|
||||
watchers_count: number;
|
||||
};
|
||||
|
||||
type ReposGetResponseLicense = {
|
||||
key: string;
|
||||
name: string;
|
||||
node_id: string;
|
||||
spdx_id: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type ReposGetResponseParentPermissions = {
|
||||
admin: boolean;
|
||||
pull: boolean;
|
||||
push: boolean;
|
||||
};
|
||||
|
||||
type ReposGetResponseParentOwner = {
|
||||
avatar_url: string;
|
||||
events_url: string;
|
||||
followers_url: string;
|
||||
following_url: string;
|
||||
gists_url: string;
|
||||
gravatar_id: string;
|
||||
html_url: string;
|
||||
id: number;
|
||||
login: string;
|
||||
node_id: string;
|
||||
organizations_url: string;
|
||||
received_events_url: string;
|
||||
repos_url: string;
|
||||
site_admin: boolean;
|
||||
starred_url: string;
|
||||
subscriptions_url: string;
|
||||
type: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type ReposGetResponseParent = {
|
||||
allow_merge_commit: boolean;
|
||||
allow_rebase_merge: boolean;
|
||||
allow_squash_merge: boolean;
|
||||
archive_url: string;
|
||||
archived: boolean;
|
||||
assignees_url: string;
|
||||
blobs_url: string;
|
||||
branches_url: string;
|
||||
clone_url: string;
|
||||
collaborators_url: string;
|
||||
comments_url: string;
|
||||
commits_url: string;
|
||||
compare_url: string;
|
||||
contents_url: string;
|
||||
contributors_url: string;
|
||||
created_at: string;
|
||||
default_branch: string;
|
||||
deployments_url: string;
|
||||
description: string;
|
||||
disabled: boolean;
|
||||
downloads_url: string;
|
||||
events_url: string;
|
||||
fork: boolean;
|
||||
forks_count: number;
|
||||
forks_url: string;
|
||||
full_name: string;
|
||||
git_commits_url: string;
|
||||
git_refs_url: string;
|
||||
git_tags_url: string;
|
||||
git_url: string;
|
||||
has_downloads: boolean;
|
||||
has_issues: boolean;
|
||||
has_pages: boolean;
|
||||
has_projects: boolean;
|
||||
has_wiki: boolean;
|
||||
homepage: string;
|
||||
hooks_url: string;
|
||||
html_url: string;
|
||||
id: number;
|
||||
is_template: boolean;
|
||||
issue_comment_url: string;
|
||||
issue_events_url: string;
|
||||
issues_url: string;
|
||||
keys_url: string;
|
||||
labels_url: string;
|
||||
language: null;
|
||||
languages_url: string;
|
||||
merges_url: string;
|
||||
milestones_url: string;
|
||||
mirror_url: string;
|
||||
name: string;
|
||||
network_count: number;
|
||||
node_id: string;
|
||||
notifications_url: string;
|
||||
open_issues_count: number;
|
||||
owner: ReposGetResponseParentOwner;
|
||||
permissions: ReposGetResponseParentPermissions;
|
||||
private: boolean;
|
||||
pulls_url: string;
|
||||
pushed_at: string;
|
||||
releases_url: string;
|
||||
size: number;
|
||||
ssh_url: string;
|
||||
stargazers_count: number;
|
||||
stargazers_url: string;
|
||||
statuses_url: string;
|
||||
subscribers_count: number;
|
||||
subscribers_url: string;
|
||||
subscription_url: string;
|
||||
svn_url: string;
|
||||
tags_url: string;
|
||||
teams_url: string;
|
||||
temp_clone_token: string;
|
||||
template_repository: null;
|
||||
topics: Array<string>;
|
||||
trees_url: string;
|
||||
updated_at: string;
|
||||
url: string;
|
||||
visibility: string;
|
||||
watchers_count: number;
|
||||
};
|
||||
|
||||
type ReposGetResponseOwner = {
|
||||
avatar_url: string;
|
||||
events_url: string;
|
||||
followers_url: string;
|
||||
following_url: string;
|
||||
gists_url: string;
|
||||
gravatar_id: string;
|
||||
html_url: string;
|
||||
id: number;
|
||||
login: string;
|
||||
node_id: string;
|
||||
organizations_url: string;
|
||||
received_events_url: string;
|
||||
repos_url: string;
|
||||
site_admin: boolean;
|
||||
starred_url: string;
|
||||
subscriptions_url: string;
|
||||
type: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type ReposGetResponseOrganization = {
|
||||
avatar_url: string;
|
||||
events_url: string;
|
||||
followers_url: string;
|
||||
following_url: string;
|
||||
gists_url: string;
|
||||
gravatar_id: string;
|
||||
html_url: string;
|
||||
id: number;
|
||||
login: string;
|
||||
node_id: string;
|
||||
organizations_url: string;
|
||||
received_events_url: string;
|
||||
repos_url: string;
|
||||
site_admin: boolean;
|
||||
starred_url: string;
|
||||
subscriptions_url: string;
|
||||
type: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type ReposGetResponsePermissions = {
|
||||
admin: boolean;
|
||||
pull: boolean;
|
||||
push: boolean;
|
||||
};
|
||||
|
||||
export type ReposGetResponse = {
|
||||
allow_merge_commit: boolean;
|
||||
allow_rebase_merge: boolean;
|
||||
allow_squash_merge: boolean;
|
||||
archive_url: string;
|
||||
archived: boolean;
|
||||
assignees_url: string;
|
||||
blobs_url: string;
|
||||
branches_url: string;
|
||||
clone_url: string;
|
||||
collaborators_url: string;
|
||||
comments_url: string;
|
||||
commits_url: string;
|
||||
compare_url: string;
|
||||
contents_url: string;
|
||||
contributors_url: string;
|
||||
created_at: string;
|
||||
default_branch: string;
|
||||
deployments_url: string;
|
||||
description: string;
|
||||
disabled: boolean;
|
||||
downloads_url: string;
|
||||
events_url: string;
|
||||
fork: boolean;
|
||||
forks_count: number;
|
||||
forks_url: string;
|
||||
full_name: string;
|
||||
git_commits_url: string;
|
||||
git_refs_url: string;
|
||||
git_tags_url: string;
|
||||
git_url: string;
|
||||
has_downloads: boolean;
|
||||
has_issues: boolean;
|
||||
has_pages: boolean;
|
||||
has_projects: boolean;
|
||||
has_wiki: boolean;
|
||||
homepage: string;
|
||||
hooks_url: string;
|
||||
html_url: string;
|
||||
id: number;
|
||||
is_template: boolean;
|
||||
issue_comment_url: string;
|
||||
issue_events_url: string;
|
||||
issues_url: string;
|
||||
keys_url: string;
|
||||
labels_url: string;
|
||||
language: null;
|
||||
languages_url: string;
|
||||
license: ReposGetResponseLicense;
|
||||
merges_url: string;
|
||||
milestones_url: string;
|
||||
mirror_url: string;
|
||||
name: string;
|
||||
network_count: number;
|
||||
node_id: string;
|
||||
notifications_url: string;
|
||||
open_issues_count: number;
|
||||
organization: ReposGetResponseOrganization;
|
||||
owner: ReposGetResponseOwner;
|
||||
parent: ReposGetResponseParent;
|
||||
permissions: ReposGetResponsePermissions;
|
||||
private: boolean;
|
||||
pulls_url: string;
|
||||
pushed_at: string;
|
||||
releases_url: string;
|
||||
size: number;
|
||||
source: ReposGetResponseSource;
|
||||
ssh_url: string;
|
||||
stargazers_count: number;
|
||||
stargazers_url: string;
|
||||
statuses_url: string;
|
||||
subscribers_count: number;
|
||||
subscribers_url: string;
|
||||
subscription_url: string;
|
||||
svn_url: string;
|
||||
tags_url: string;
|
||||
teams_url: string;
|
||||
temp_clone_token: string;
|
||||
template_repository: null;
|
||||
topics: Array<string>;
|
||||
trees_url: string;
|
||||
updated_at: string;
|
||||
url: string;
|
||||
visibility: string;
|
||||
watchers_count: number;
|
||||
};
|
||||
|
||||
type ReposListCommitsResponseItemCommitter = {
|
||||
avatar_url: string;
|
||||
events_url: string;
|
||||
followers_url: string;
|
||||
following_url: string;
|
||||
gists_url: string;
|
||||
gravatar_id: string;
|
||||
html_url: string;
|
||||
id: number;
|
||||
login: string;
|
||||
node_id: string;
|
||||
organizations_url: string;
|
||||
received_events_url: string;
|
||||
repos_url: string;
|
||||
site_admin: boolean;
|
||||
starred_url: string;
|
||||
subscriptions_url: string;
|
||||
type: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type ReposListCommitsResponseItemParentsItem = { sha: string; url: string };
|
||||
|
||||
type ReposListCommitsResponseItem = {
|
||||
author: ReposListCommitsResponseItemAuthor;
|
||||
comments_url: string;
|
||||
commit: ReposListCommitsResponseItemCommit;
|
||||
committer: ReposListCommitsResponseItemCommitter;
|
||||
html_url: string;
|
||||
node_id: string;
|
||||
parents: Array<ReposListCommitsResponseItemParentsItem>;
|
||||
sha: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
export type ReposListCommitsResponse = Array<ReposListCommitsResponseItem>;
|
||||
|
||||
export type GitGetBlobResponse = {
|
||||
content: string;
|
||||
encoding: string;
|
||||
sha: string;
|
||||
size: number;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type GitGetTreeResponseTreeItem = {
|
||||
mode: string;
|
||||
path: string;
|
||||
sha: string;
|
||||
size?: number;
|
||||
type: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
export type GitGetTreeResponse = {
|
||||
sha: string;
|
||||
tree: Array<GitGetTreeResponseTreeItem>;
|
||||
truncated: boolean;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type GitCreateRefResponseObject = { sha: string; type: string; url: string };
|
||||
|
||||
export type GitCreateRefResponse = {
|
||||
node_id: string;
|
||||
object: GitCreateRefResponseObject;
|
||||
ref: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type GitUpdateRefResponseObject = { sha: string; type: string; url: string };
|
||||
|
||||
export type GitUpdateRefResponse = {
|
||||
node_id: string;
|
||||
object: GitUpdateRefResponseObject;
|
||||
ref: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type GitCreateCommitResponseVerification = {
|
||||
payload: null;
|
||||
reason: string;
|
||||
signature: null;
|
||||
verified: boolean;
|
||||
};
|
||||
|
||||
type GitCreateCommitResponseTree = { sha: string; url: string };
|
||||
|
||||
type GitCreateCommitResponseParentsItem = { sha: string; url: string };
|
||||
|
||||
type GitCreateCommitResponseCommitter = {
|
||||
date: string;
|
||||
email: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
type GitCreateCommitResponseAuthor = {
|
||||
date: string;
|
||||
email: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export type GitCreateCommitResponse = {
|
||||
author: GitCreateCommitResponseAuthor;
|
||||
committer: GitCreateCommitResponseCommitter;
|
||||
message: string;
|
||||
node_id: string;
|
||||
parents: Array<GitCreateCommitResponseParentsItem>;
|
||||
sha: string;
|
||||
tree: GitCreateCommitResponseTree;
|
||||
url: string;
|
||||
verification: GitCreateCommitResponseVerification;
|
||||
};
|
||||
|
||||
type ReposGetBranchResponseCommitCommitVerification = {
|
||||
payload: null;
|
||||
reason: string;
|
||||
signature: null;
|
||||
verified: boolean;
|
||||
};
|
||||
|
||||
type ReposGetBranchResponseCommitCommitTree = { sha: string; url: string };
|
||||
|
||||
type ReposGetBranchResponseCommitCommitCommitter = {
|
||||
date: string;
|
||||
email: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
type ReposGetBranchResponseCommitCommitAuthor = {
|
||||
date: string;
|
||||
email: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
type ReposGetBranchResponseCommitCommit = {
|
||||
author: ReposGetBranchResponseCommitCommitAuthor;
|
||||
committer: ReposGetBranchResponseCommitCommitCommitter;
|
||||
message: string;
|
||||
tree: ReposGetBranchResponseCommitCommitTree;
|
||||
url: string;
|
||||
verification: ReposGetBranchResponseCommitCommitVerification;
|
||||
};
|
||||
|
||||
type ReposGetBranchResponseCommitAuthor = {
|
||||
avatar_url: string;
|
||||
gravatar_id: string;
|
||||
id: number;
|
||||
login: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type ReposGetBranchResponseCommitCommitter = {
|
||||
avatar_url: string;
|
||||
gravatar_id: string;
|
||||
id: number;
|
||||
login: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type ReposGetBranchResponseCommitParentsItem = { sha: string; url: string };
|
||||
|
||||
type ReposGetBranchResponseCommit = {
|
||||
author: ReposGetBranchResponseCommitAuthor;
|
||||
commit: ReposGetBranchResponseCommitCommit;
|
||||
committer: ReposGetBranchResponseCommitCommitter;
|
||||
node_id: string;
|
||||
parents: Array<ReposGetBranchResponseCommitParentsItem>;
|
||||
sha: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
type ReposGetBranchResponseLinks = { html: string; self: string };
|
||||
|
||||
type ReposGetBranchResponseProtectionRequiredStatusChecks = {
|
||||
contexts: Array<string>;
|
||||
enforcement_level: string;
|
||||
};
|
||||
|
||||
type ReposGetBranchResponseProtection = {
|
||||
enabled: boolean;
|
||||
required_status_checks: ReposGetBranchResponseProtectionRequiredStatusChecks;
|
||||
};
|
||||
|
||||
export type ReposGetBranchResponse = {
|
||||
_links: ReposGetBranchResponseLinks;
|
||||
commit: ReposGetBranchResponseCommit;
|
||||
name: string;
|
||||
protected: boolean;
|
||||
protection: ReposGetBranchResponseProtection;
|
||||
protection_url: string;
|
||||
};
|
||||
|
||||
type GitCreateTreeResponseTreeItem = {
|
||||
mode: string;
|
||||
path: string;
|
||||
sha: string;
|
||||
size: number;
|
||||
type: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
export type GitCreateTreeResponse = {
|
||||
sha: string;
|
||||
tree: Array<GitCreateTreeResponseTreeItem>;
|
||||
url: string;
|
||||
};
|
540
packages/core/src/backends/gitlab/API.ts
Normal file
540
packages/core/src/backends/gitlab/API.ts
Normal file
@ -0,0 +1,540 @@
|
||||
import { Base64 } from 'js-base64';
|
||||
import partial from 'lodash/partial';
|
||||
import result from 'lodash/result';
|
||||
import trimStart from 'lodash/trimStart';
|
||||
import { dirname } from 'path';
|
||||
|
||||
import {
|
||||
APIError,
|
||||
Cursor,
|
||||
localForage,
|
||||
parseLinkHeader,
|
||||
readFile,
|
||||
readFileMetadata,
|
||||
requestWithBackoff,
|
||||
responseParser,
|
||||
throwOnConflictingBranches,
|
||||
unsentRequest,
|
||||
} from '@staticcms/core/lib/util';
|
||||
|
||||
import type { DataFile, PersistOptions } from '@staticcms/core/interface';
|
||||
import type { ApiRequest, FetchError } from '@staticcms/core/lib/util';
|
||||
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
|
||||
|
||||
export const API_NAME = 'GitLab';
|
||||
|
||||
export interface Config {
|
||||
apiRoot?: string;
|
||||
token?: string;
|
||||
branch?: string;
|
||||
repo?: string;
|
||||
}
|
||||
|
||||
export interface CommitAuthor {
|
||||
name: string;
|
||||
email: string;
|
||||
}
|
||||
|
||||
enum CommitAction {
|
||||
CREATE = 'create',
|
||||
DELETE = 'delete',
|
||||
MOVE = 'move',
|
||||
UPDATE = 'update',
|
||||
}
|
||||
|
||||
type CommitItem = {
|
||||
base64Content?: string;
|
||||
path: string;
|
||||
oldPath?: string;
|
||||
action: CommitAction;
|
||||
};
|
||||
|
||||
type FileEntry = { id: string; type: string; path: string; name: string };
|
||||
|
||||
interface CommitsParams {
|
||||
commit_message: string;
|
||||
branch: string;
|
||||
author_name?: string;
|
||||
author_email?: string;
|
||||
actions?: {
|
||||
action: string;
|
||||
file_path: string;
|
||||
previous_path?: string;
|
||||
content?: string;
|
||||
encoding?: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
type GitLabCommitDiff = {
|
||||
diff: string;
|
||||
new_path: string;
|
||||
old_path: string;
|
||||
new_file: boolean;
|
||||
renamed_file: boolean;
|
||||
deleted_file: boolean;
|
||||
};
|
||||
|
||||
type GitLabRepo = {
|
||||
shared_with_groups: { group_access_level: number }[] | null;
|
||||
permissions: {
|
||||
project_access: { access_level: number } | null;
|
||||
group_access: { access_level: number } | null;
|
||||
};
|
||||
};
|
||||
|
||||
type GitLabBranch = {
|
||||
name: string;
|
||||
developers_can_push: boolean;
|
||||
developers_can_merge: boolean;
|
||||
commit: {
|
||||
id: string;
|
||||
};
|
||||
};
|
||||
|
||||
type GitLabCommitRef = {
|
||||
type: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
type GitLabCommit = {
|
||||
id: string;
|
||||
short_id: string;
|
||||
title: string;
|
||||
author_name: string;
|
||||
author_email: string;
|
||||
authored_date: string;
|
||||
committer_name: string;
|
||||
committer_email: string;
|
||||
committed_date: string;
|
||||
created_at: string;
|
||||
message: string;
|
||||
};
|
||||
|
||||
export function getMaxAccess(groups: { group_access_level: number }[]) {
|
||||
return groups.reduce((previous, current) => {
|
||||
if (current.group_access_level > previous.group_access_level) {
|
||||
return current;
|
||||
}
|
||||
return previous;
|
||||
}, groups[0]);
|
||||
}
|
||||
|
||||
export default class API {
|
||||
apiRoot: string;
|
||||
token: string | boolean;
|
||||
branch: string;
|
||||
repo: string;
|
||||
repoURL: string;
|
||||
commitAuthor?: CommitAuthor;
|
||||
|
||||
constructor(config: Config) {
|
||||
this.apiRoot = config.apiRoot || 'https://gitlab.com/api/v4';
|
||||
this.token = config.token || false;
|
||||
this.branch = config.branch || 'main';
|
||||
this.repo = config.repo || '';
|
||||
this.repoURL = `/projects/${encodeURIComponent(this.repo)}`;
|
||||
}
|
||||
|
||||
withAuthorizationHeaders = (req: ApiRequest) => {
|
||||
const withHeaders = unsentRequest.withHeaders(
|
||||
this.token ? { Authorization: `Bearer ${this.token}` } : {},
|
||||
req,
|
||||
);
|
||||
return Promise.resolve(withHeaders);
|
||||
};
|
||||
|
||||
buildRequest = async (req: ApiRequest) => {
|
||||
const withRoot: ApiRequest = unsentRequest.withRoot(this.apiRoot)(req);
|
||||
const withAuthorizationHeaders = await this.withAuthorizationHeaders(withRoot);
|
||||
|
||||
if ('cache' in withAuthorizationHeaders) {
|
||||
return withAuthorizationHeaders;
|
||||
} else {
|
||||
const withNoCache: ApiRequest = unsentRequest.withNoCache(withAuthorizationHeaders);
|
||||
return withNoCache;
|
||||
}
|
||||
};
|
||||
|
||||
request = async (req: ApiRequest): Promise<Response> => {
|
||||
try {
|
||||
return requestWithBackoff(this, req);
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
throw new APIError(error.message, null, API_NAME);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
|
||||
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
|
||||
responseToText = responseParser({ format: 'text', apiName: API_NAME });
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
requestJSON = (req: ApiRequest) => this.request(req).then(this.responseToJSON) as Promise<any>;
|
||||
requestText = (req: ApiRequest) => this.request(req).then(this.responseToText) as Promise<string>;
|
||||
|
||||
user = () => this.requestJSON('/user');
|
||||
|
||||
WRITE_ACCESS = 30;
|
||||
MAINTAINER_ACCESS = 40;
|
||||
|
||||
hasWriteAccess = async () => {
|
||||
const { shared_with_groups: sharedWithGroups, permissions }: GitLabRepo =
|
||||
await this.requestJSON(this.repoURL);
|
||||
|
||||
const { project_access: projectAccess, group_access: groupAccess } = permissions;
|
||||
if (projectAccess && projectAccess.access_level >= this.WRITE_ACCESS) {
|
||||
return true;
|
||||
}
|
||||
if (groupAccess && groupAccess.access_level >= this.WRITE_ACCESS) {
|
||||
return true;
|
||||
}
|
||||
// check for group write permissions
|
||||
if (sharedWithGroups && sharedWithGroups.length > 0) {
|
||||
const maxAccess = getMaxAccess(sharedWithGroups);
|
||||
// maintainer access
|
||||
if (maxAccess.group_access_level >= this.MAINTAINER_ACCESS) {
|
||||
return true;
|
||||
}
|
||||
// developer access
|
||||
if (maxAccess.group_access_level >= this.WRITE_ACCESS) {
|
||||
// check permissions to merge and push
|
||||
try {
|
||||
const branch = await this.getDefaultBranch();
|
||||
if (branch.developers_can_merge && branch.developers_can_push) {
|
||||
return true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed getting default branch', e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
readFile = async (
|
||||
path: string,
|
||||
sha?: string | null,
|
||||
{ parseText = true, branch = this.branch } = {},
|
||||
): Promise<string | Blob> => {
|
||||
const fetchContent = async () => {
|
||||
const content = await this.request({
|
||||
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}/raw`,
|
||||
params: { ref: branch },
|
||||
cache: 'no-store',
|
||||
}).then<Blob | string>(parseText ? this.responseToText : this.responseToBlob);
|
||||
return content;
|
||||
};
|
||||
|
||||
const content = await readFile(sha, fetchContent, localForage, parseText);
|
||||
return content;
|
||||
};
|
||||
|
||||
async readFileMetadata(path: string, sha: string | null | undefined) {
|
||||
const fetchFileMetadata = async () => {
|
||||
try {
|
||||
const result: GitLabCommit[] = await this.requestJSON({
|
||||
url: `${this.repoURL}/repository/commits`,
|
||||
params: { path, ref_name: this.branch },
|
||||
});
|
||||
const commit = result[0];
|
||||
return {
|
||||
author: commit.author_name || commit.author_email,
|
||||
updatedOn: commit.authored_date,
|
||||
};
|
||||
} catch (e) {
|
||||
return { author: '', updatedOn: '' };
|
||||
}
|
||||
};
|
||||
const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
|
||||
return fileMetadata;
|
||||
}
|
||||
|
||||
getCursorFromHeaders = (headers: Headers) => {
|
||||
const page = parseInt(headers.get('X-Page') as string, 10);
|
||||
const pageCount = parseInt(headers.get('X-Total-Pages') as string, 10);
|
||||
const pageSize = parseInt(headers.get('X-Per-Page') as string, 10);
|
||||
const count = parseInt(headers.get('X-Total') as string, 10);
|
||||
const links = parseLinkHeader(headers.get('Link'));
|
||||
const actions = Object.keys(links).flatMap(key =>
|
||||
(key === 'prev' && page > 1) ||
|
||||
(key === 'next' && page < pageCount) ||
|
||||
(key === 'first' && page > 1) ||
|
||||
(key === 'last' && page < pageCount)
|
||||
? [key]
|
||||
: [],
|
||||
);
|
||||
return Cursor.create({
|
||||
actions,
|
||||
meta: { page, count, pageSize, pageCount },
|
||||
data: { links },
|
||||
});
|
||||
};
|
||||
|
||||
getCursor = ({ headers }: { headers: Headers }) => this.getCursorFromHeaders(headers);
|
||||
|
||||
// Gets a cursor without retrieving the entries by using a HEAD request
|
||||
fetchCursor = (req: ApiRequest) =>
|
||||
this.request(unsentRequest.withMethod('HEAD', req)).then(value => this.getCursor(value));
|
||||
|
||||
fetchCursorAndEntries = (
|
||||
req: ApiRequest,
|
||||
): Promise<{
|
||||
entries: FileEntry[];
|
||||
cursor: Cursor;
|
||||
}> => {
|
||||
const request = this.request(unsentRequest.withMethod('GET', req));
|
||||
|
||||
return Promise.all([
|
||||
request.then(this.getCursor),
|
||||
request.then(this.responseToJSON).catch((e: FetchError) => {
|
||||
if (e.status === 404) {
|
||||
return [];
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}),
|
||||
]).then(([cursor, entries]) => ({ cursor, entries }));
|
||||
};
|
||||
|
||||
listFiles = async (path: string, recursive = false) => {
|
||||
const { entries, cursor } = await this.fetchCursorAndEntries({
|
||||
url: `${this.repoURL}/repository/tree`,
|
||||
params: { path, ref: this.branch, recursive: `${recursive}` },
|
||||
});
|
||||
return {
|
||||
files: entries.filter(({ type }) => type === 'blob'),
|
||||
cursor,
|
||||
};
|
||||
};
|
||||
|
||||
traverseCursor = async (cursor: Cursor, action: string) => {
|
||||
const link = (cursor.data?.links as Record<string, ApiRequest>)[action];
|
||||
const { entries, cursor: newCursor } = await this.fetchCursorAndEntries(link);
|
||||
return {
|
||||
entries: entries.filter(({ type }) => type === 'blob'),
|
||||
cursor: newCursor,
|
||||
};
|
||||
};
|
||||
|
||||
listAllFiles = async (path: string, recursive = false, branch = this.branch) => {
|
||||
const entries = [];
|
||||
// eslint-disable-next-line prefer-const
|
||||
let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({
|
||||
url: `${this.repoURL}/repository/tree`,
|
||||
// Get the maximum number of entries per page
|
||||
params: { path, ref: branch, per_page: '100', recursive: `${recursive}` },
|
||||
});
|
||||
entries.push(...initialEntries);
|
||||
while (cursor && cursor.actions!.has('next')) {
|
||||
const link = (cursor.data?.links as Record<string, ApiRequest>).next;
|
||||
const { cursor: newCursor, entries: newEntries } = await this.fetchCursorAndEntries(link);
|
||||
entries.push(...newEntries);
|
||||
cursor = newCursor;
|
||||
}
|
||||
return entries.filter(({ type }) => type === 'blob');
|
||||
};
|
||||
|
||||
toBase64 = (str: string) => Promise.resolve(Base64.encode(str));
|
||||
fromBase64 = (str: string) => Base64.decode(str);
|
||||
|
||||
async getBranch(branchName: string) {
|
||||
const branch: GitLabBranch = await this.requestJSON(
|
||||
`${this.repoURL}/repository/branches/${encodeURIComponent(branchName)}`,
|
||||
);
|
||||
return branch;
|
||||
}
|
||||
|
||||
async uploadAndCommit(
|
||||
items: CommitItem[],
|
||||
{ commitMessage = '', branch = this.branch, newBranch = false },
|
||||
) {
|
||||
const actions = items.map(item => ({
|
||||
action: item.action,
|
||||
file_path: item.path,
|
||||
...(item.oldPath ? { previous_path: item.oldPath } : {}),
|
||||
...(item.base64Content !== undefined
|
||||
? { content: item.base64Content, encoding: 'base64' }
|
||||
: {}),
|
||||
}));
|
||||
|
||||
const commitParams: CommitsParams = {
|
||||
branch,
|
||||
commit_message: commitMessage,
|
||||
actions,
|
||||
...(newBranch ? { start_branch: this.branch } : {}),
|
||||
};
|
||||
if (this.commitAuthor) {
|
||||
const { name, email } = this.commitAuthor;
|
||||
commitParams.author_name = name;
|
||||
commitParams.author_email = email;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await this.requestJSON({
|
||||
url: `${this.repoURL}/repository/commits`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json; charset=utf-8' },
|
||||
body: JSON.stringify(commitParams),
|
||||
});
|
||||
return result;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
const message = error.message || '';
|
||||
if (newBranch && message.includes(`Could not update ${branch}`)) {
|
||||
await throwOnConflictingBranches(branch, name => this.getBranch(name), API_NAME);
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getCommitItems(files: { path: string; newPath?: string }[], branch: string) {
|
||||
const items: CommitItem[] = await Promise.all(
|
||||
files.map(async file => {
|
||||
const [base64Content, fileExists] = await Promise.all([
|
||||
result(file, 'toBase64', partial(this.toBase64, (file as DataFile).raw)),
|
||||
this.isFileExists(file.path, branch),
|
||||
]);
|
||||
|
||||
let action = CommitAction.CREATE;
|
||||
let path = trimStart(file.path, '/');
|
||||
let oldPath = undefined;
|
||||
if (fileExists) {
|
||||
oldPath = file.newPath && path;
|
||||
action =
|
||||
file.newPath && file.newPath !== oldPath ? CommitAction.MOVE : CommitAction.UPDATE;
|
||||
path = file.newPath ? trimStart(file.newPath, '/') : path;
|
||||
}
|
||||
|
||||
return {
|
||||
action,
|
||||
base64Content,
|
||||
path,
|
||||
oldPath,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
// move children
|
||||
for (const item of items.filter(i => i.oldPath && i.action === CommitAction.MOVE)) {
|
||||
const sourceDir = dirname(item.oldPath as string);
|
||||
const destDir = dirname(item.path);
|
||||
const children = await this.listAllFiles(sourceDir, true, branch);
|
||||
children
|
||||
.filter(f => f.path !== item.oldPath)
|
||||
.forEach(file => {
|
||||
items.push({
|
||||
action: CommitAction.MOVE,
|
||||
path: file.path.replace(sourceDir, destDir),
|
||||
oldPath: file.path,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
async persistFiles(dataFiles: DataFile[], mediaFiles: AssetProxy[], options: PersistOptions) {
|
||||
const files = [...dataFiles, ...mediaFiles];
|
||||
const items = await this.getCommitItems(files, this.branch);
|
||||
return this.uploadAndCommit(items, {
|
||||
commitMessage: options.commitMessage,
|
||||
});
|
||||
}
|
||||
|
||||
deleteFiles = (paths: string[], commitMessage: string) => {
|
||||
const branch = this.branch;
|
||||
const commitParams: CommitsParams = { commit_message: commitMessage, branch };
|
||||
if (this.commitAuthor) {
|
||||
const { name, email } = this.commitAuthor;
|
||||
commitParams.author_name = name;
|
||||
commitParams.author_email = email;
|
||||
}
|
||||
|
||||
const items = paths.map(path => ({ path, action: CommitAction.DELETE }));
|
||||
return this.uploadAndCommit(items, {
|
||||
commitMessage,
|
||||
});
|
||||
};
|
||||
|
||||
async getFileId(path: string, branch: string) {
|
||||
const request = await this.request({
|
||||
method: 'HEAD',
|
||||
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}`,
|
||||
params: { ref: branch },
|
||||
});
|
||||
|
||||
const blobId = request.headers.get('X - Gitlab - Blob - Id') as string;
|
||||
return blobId;
|
||||
}
|
||||
|
||||
async isFileExists(path: string, branch: string) {
|
||||
const fileExists = await this.requestText({
|
||||
method: 'HEAD',
|
||||
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}`,
|
||||
params: { ref: branch },
|
||||
})
|
||||
.then(() => true)
|
||||
.catch(error => {
|
||||
if (error instanceof APIError && error.status === 404) {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
|
||||
return fileExists;
|
||||
}
|
||||
|
||||
async getDifferences(to: string, from = this.branch) {
|
||||
if (to === from) {
|
||||
return [];
|
||||
}
|
||||
const result: { diffs: GitLabCommitDiff[] } = await this.requestJSON({
|
||||
url: `${this.repoURL}/repository/compare`,
|
||||
params: {
|
||||
from,
|
||||
to,
|
||||
},
|
||||
});
|
||||
|
||||
if (result.diffs.length >= 1000) {
|
||||
throw new APIError('Diff limit reached', null, API_NAME);
|
||||
}
|
||||
|
||||
return result.diffs.map(d => {
|
||||
let status = 'modified';
|
||||
if (d.new_file) {
|
||||
status = 'added';
|
||||
} else if (d.deleted_file) {
|
||||
status = 'deleted';
|
||||
} else if (d.renamed_file) {
|
||||
status = 'renamed';
|
||||
}
|
||||
return {
|
||||
status,
|
||||
oldPath: d.old_path,
|
||||
newPath: d.new_path,
|
||||
newFile: d.new_file,
|
||||
path: d.new_path || d.old_path,
|
||||
binary: d.diff.startsWith('Binary') || /.svg$/.test(d.new_path),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async getDefaultBranch() {
|
||||
const branch: GitLabBranch = await this.getBranch(this.branch);
|
||||
return branch;
|
||||
}
|
||||
|
||||
async isShaExistsInBranch(branch: string, sha: string) {
|
||||
const refs: GitLabCommitRef[] = await this.requestJSON({
|
||||
url: `${this.repoURL}/repository/commits/${sha}/refs`,
|
||||
params: {
|
||||
type: 'branch',
|
||||
},
|
||||
});
|
||||
return refs.some(r => r.name === branch);
|
||||
}
|
||||
}
|
99
packages/core/src/backends/gitlab/AuthenticationPage.tsx
Normal file
99
packages/core/src/backends/gitlab/AuthenticationPage.tsx
Normal file
@ -0,0 +1,99 @@
|
||||
import { styled } from '@mui/material/styles';
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
|
||||
import AuthenticationPage from '@staticcms/core/components/UI/AuthenticationPage';
|
||||
import Icon from '@staticcms/core/components/UI/Icon';
|
||||
import { NetlifyAuthenticator, PkceAuthenticator } from '@staticcms/core/lib/auth';
|
||||
import { isNotEmpty } from '@staticcms/core/lib/util/string.util';
|
||||
|
||||
import type { MouseEvent } from 'react';
|
||||
import type {
|
||||
AuthenticationPageProps,
|
||||
AuthenticatorConfig,
|
||||
TranslatedProps,
|
||||
} from '@staticcms/core/interface';
|
||||
|
||||
const LoginButtonIcon = styled(Icon)`
|
||||
margin-right: 18px;
|
||||
`;
|
||||
|
||||
const clientSideAuthenticators = {
|
||||
pkce: (config: AuthenticatorConfig) => new PkceAuthenticator(config),
|
||||
} as const;
|
||||
|
||||
const GitLabAuthenticationPage = ({
|
||||
inProgress = false,
|
||||
config,
|
||||
siteId,
|
||||
authEndpoint,
|
||||
clearHash,
|
||||
onLogin,
|
||||
t,
|
||||
}: TranslatedProps<AuthenticationPageProps>) => {
|
||||
const [loginError, setLoginError] = useState<string | null>(null);
|
||||
|
||||
const auth = useMemo(() => {
|
||||
const {
|
||||
auth_type: authType = '',
|
||||
base_url = 'https://gitlab.com',
|
||||
auth_endpoint = 'oauth/authorize',
|
||||
app_id = '',
|
||||
} = config.backend;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
if (isNotEmpty(authType) && authType in clientSideAuthenticators) {
|
||||
const clientSizeAuth = clientSideAuthenticators[
|
||||
authType as keyof typeof clientSideAuthenticators
|
||||
]({
|
||||
base_url,
|
||||
auth_endpoint,
|
||||
app_id,
|
||||
auth_token_endpoint: 'oauth/token',
|
||||
clearHash,
|
||||
});
|
||||
// Complete implicit authentication if we were redirected back to from the provider.
|
||||
clientSizeAuth.completeAuth((err, data) => {
|
||||
if (err) {
|
||||
setLoginError(err.toString());
|
||||
} else if (data) {
|
||||
onLogin(data);
|
||||
}
|
||||
});
|
||||
return clientSizeAuth;
|
||||
} else {
|
||||
return new NetlifyAuthenticator({
|
||||
base_url,
|
||||
site_id: document.location.host.split(':')[0] === 'localhost' ? 'cms.netlify.com' : siteId,
|
||||
auth_endpoint: authEndpoint,
|
||||
});
|
||||
}
|
||||
}, [authEndpoint, clearHash, config.backend, onLogin, siteId]);
|
||||
|
||||
const handleLogin = useCallback(
|
||||
(e: MouseEvent<HTMLButtonElement>) => {
|
||||
e.preventDefault();
|
||||
auth.authenticate({ provider: 'gitlab', scope: 'api' }, err => {
|
||||
if (err) {
|
||||
setLoginError(err.toString());
|
||||
return;
|
||||
}
|
||||
});
|
||||
},
|
||||
[auth],
|
||||
);
|
||||
|
||||
return (
|
||||
<AuthenticationPage
|
||||
onLogin={handleLogin}
|
||||
loginDisabled={inProgress}
|
||||
loginErrorMessage={loginError}
|
||||
logoUrl={config.logo_url}
|
||||
siteUrl={config.site_url}
|
||||
icon={<LoginButtonIcon type="gitlab" />}
|
||||
buttonContent={inProgress ? t('auth.loggingIn') : t('auth.loginWithGitLab')}
|
||||
t={t}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default GitLabAuthenticationPage;
|
166
packages/core/src/backends/gitlab/__tests__/API.spec.ts
Normal file
166
packages/core/src/backends/gitlab/__tests__/API.spec.ts
Normal file
@ -0,0 +1,166 @@
|
||||
import API, { getMaxAccess } from '../API';
|
||||
|
||||
global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
|
||||
|
||||
describe('GitLab API', () => {
|
||||
beforeAll(() => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
jest.spyOn(console, 'error').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('hasWriteAccess', () => {
|
||||
test('should return true on project access_level >= 30', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce({ permissions: { project_access: { access_level: 30 } } });
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
test('should return false on project access_level < 30', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce({ permissions: { project_access: { access_level: 10 } } });
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
});
|
||||
|
||||
test('should return true on group access_level >= 30', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce({ permissions: { group_access: { access_level: 30 } } });
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
test('should return false on group access_level < 30', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce({ permissions: { group_access: { access_level: 10 } } });
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
});
|
||||
|
||||
test('should return true on shared group access_level >= 40', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
api.requestJSON = jest.fn().mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 40 }],
|
||||
});
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(true);
|
||||
|
||||
expect(api.requestJSON).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return true on shared group access_level >= 30, developers can merge and push', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
const requestJSONMock = (api.requestJSON = jest.fn());
|
||||
requestJSONMock.mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
|
||||
});
|
||||
requestJSONMock.mockResolvedValueOnce({
|
||||
developers_can_merge: true,
|
||||
developers_can_push: true,
|
||||
});
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
test('should return false on shared group access_level < 30,', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
const requestJSONMock = (api.requestJSON = jest.fn());
|
||||
requestJSONMock.mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 20 }],
|
||||
});
|
||||
requestJSONMock.mockResolvedValueOnce({
|
||||
developers_can_merge: true,
|
||||
developers_can_push: true,
|
||||
});
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
});
|
||||
|
||||
test("should return false on shared group access_level >= 30, developers can't merge", async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
const requestJSONMock = (api.requestJSON = jest.fn());
|
||||
requestJSONMock.mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
|
||||
});
|
||||
requestJSONMock.mockResolvedValueOnce({
|
||||
developers_can_merge: false,
|
||||
developers_can_push: true,
|
||||
});
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
});
|
||||
|
||||
test("should return false on shared group access_level >= 30, developers can't push", async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
const requestJSONMock = (api.requestJSON = jest.fn());
|
||||
requestJSONMock.mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
|
||||
});
|
||||
requestJSONMock.mockResolvedValueOnce({
|
||||
developers_can_merge: true,
|
||||
developers_can_push: false,
|
||||
});
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
});
|
||||
|
||||
test('should return false on shared group access_level >= 30, error getting branch', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
const requestJSONMock = (api.requestJSON = jest.fn());
|
||||
requestJSONMock.mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
|
||||
});
|
||||
|
||||
const error = new Error('Not Found');
|
||||
requestJSONMock.mockRejectedValue(error);
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
|
||||
expect(console.error).toHaveBeenCalledTimes(1);
|
||||
expect(console.error).toHaveBeenCalledWith('Failed getting default branch', error);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMaxAccess', () => {
|
||||
it('should return group with max access level', () => {
|
||||
const groups = [
|
||||
{ group_access_level: 10 },
|
||||
{ group_access_level: 5 },
|
||||
{ group_access_level: 100 },
|
||||
{ group_access_level: 1 },
|
||||
];
|
||||
expect(getMaxAccess(groups)).toBe(groups[2]);
|
||||
});
|
||||
});
|
||||
});
|
316
packages/core/src/backends/gitlab/implementation.ts
Normal file
316
packages/core/src/backends/gitlab/implementation.ts
Normal file
@ -0,0 +1,316 @@
|
||||
import { stripIndent } from 'common-tags';
|
||||
import trim from 'lodash/trim';
|
||||
import trimStart from 'lodash/trimStart';
|
||||
import semaphore from 'semaphore';
|
||||
|
||||
import {
|
||||
allEntriesByFolder,
|
||||
asyncLock,
|
||||
basename,
|
||||
blobToFileObj,
|
||||
CURSOR_COMPATIBILITY_SYMBOL,
|
||||
entriesByFiles,
|
||||
entriesByFolder,
|
||||
filterByExtension,
|
||||
getBlobSHA,
|
||||
getMediaAsBlob,
|
||||
getMediaDisplayURL,
|
||||
localForage,
|
||||
runWithLock,
|
||||
} from '@staticcms/core/lib/util';
|
||||
import API, { API_NAME } from './API';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
|
||||
import type { Semaphore } from 'semaphore';
|
||||
import type { AsyncLock, Cursor } from '@staticcms/core/lib/util';
|
||||
import type {
|
||||
Config,
|
||||
Credentials,
|
||||
DisplayURL,
|
||||
BackendEntry,
|
||||
BackendClass,
|
||||
ImplementationFile,
|
||||
PersistOptions,
|
||||
User,
|
||||
} from '@staticcms/core/interface';
|
||||
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
|
||||
|
||||
const MAX_CONCURRENT_DOWNLOADS = 10;
|
||||
|
||||
export default class GitLab implements BackendClass {
|
||||
lock: AsyncLock;
|
||||
api: API | null;
|
||||
options: {
|
||||
proxied: boolean;
|
||||
API: API | null;
|
||||
};
|
||||
repo: string;
|
||||
branch: string;
|
||||
apiRoot: string;
|
||||
token: string | null;
|
||||
mediaFolder?: string;
|
||||
|
||||
_mediaDisplayURLSem?: Semaphore;
|
||||
|
||||
constructor(config: Config, options = {}) {
|
||||
this.options = {
|
||||
proxied: false,
|
||||
API: null,
|
||||
...options,
|
||||
};
|
||||
|
||||
if (
|
||||
!this.options.proxied &&
|
||||
(config.backend.repo === null || config.backend.repo === undefined)
|
||||
) {
|
||||
throw new Error('The GitLab backend needs a "repo" in the backend configuration.');
|
||||
}
|
||||
|
||||
this.api = this.options.API || null;
|
||||
|
||||
this.repo = config.backend.repo || '';
|
||||
this.branch = config.backend.branch || 'main';
|
||||
this.apiRoot = config.backend.api_root || 'https://gitlab.com/api/v4';
|
||||
this.token = '';
|
||||
this.mediaFolder = config.media_folder;
|
||||
this.lock = asyncLock();
|
||||
}
|
||||
|
||||
isGitBackend() {
|
||||
return true;
|
||||
}
|
||||
|
||||
async status() {
|
||||
const auth =
|
||||
(await this.api
|
||||
?.user()
|
||||
.then(user => !!user)
|
||||
.catch(e => {
|
||||
console.warn('Failed getting GitLab user', e);
|
||||
return false;
|
||||
})) || false;
|
||||
|
||||
return { auth: { status: auth }, api: { status: true, statusPage: '' } };
|
||||
}
|
||||
|
||||
authComponent() {
|
||||
return AuthenticationPage;
|
||||
}
|
||||
|
||||
restoreUser(user: User) {
|
||||
return this.authenticate(user);
|
||||
}
|
||||
|
||||
async authenticate(state: Credentials) {
|
||||
this.token = state.token as string;
|
||||
this.api = new API({
|
||||
token: this.token,
|
||||
branch: this.branch,
|
||||
repo: this.repo,
|
||||
apiRoot: this.apiRoot,
|
||||
});
|
||||
const user = await this.api.user();
|
||||
const isCollab = await this.api.hasWriteAccess().catch((error: Error) => {
|
||||
error.message = stripIndent`
|
||||
Repo "${this.repo}" not found.
|
||||
|
||||
Please ensure the repo information is spelled correctly.
|
||||
|
||||
If the repo is private, make sure you're logged into a GitLab account with access.
|
||||
`;
|
||||
throw error;
|
||||
});
|
||||
|
||||
// Unauthorized user
|
||||
if (!isCollab) {
|
||||
throw new Error('Your GitLab user account does not have access to this repo.');
|
||||
}
|
||||
|
||||
// Authorized user
|
||||
return { ...user, login: user.username, token: state.token as string };
|
||||
}
|
||||
|
||||
async logout() {
|
||||
this.token = null;
|
||||
return;
|
||||
}
|
||||
|
||||
getToken() {
|
||||
return Promise.resolve(this.token);
|
||||
}
|
||||
|
||||
filterFile(
|
||||
folder: string,
|
||||
file: { path: string; name: string },
|
||||
extension: string,
|
||||
depth: number,
|
||||
) {
|
||||
// gitlab paths include the root folder
|
||||
const fileFolder = trim(file.path.split(folder)[1] || '/', '/');
|
||||
return filterByExtension(file, extension) && fileFolder.split('/').length <= depth;
|
||||
}
|
||||
|
||||
async entriesByFolder(folder: string, extension: string, depth: number) {
|
||||
let cursor: Cursor;
|
||||
|
||||
const listFiles = () =>
|
||||
this.api!.listFiles(folder, depth > 1).then(({ files, cursor: c }) => {
|
||||
cursor = c.mergeMeta({ folder, extension, depth });
|
||||
return files.filter(file => this.filterFile(folder, file, extension, depth));
|
||||
});
|
||||
|
||||
const files = await entriesByFolder(
|
||||
listFiles,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this.api!.readFileMetadata.bind(this.api),
|
||||
API_NAME,
|
||||
);
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
|
||||
return files;
|
||||
}
|
||||
|
||||
async listAllFiles(folder: string, extension: string, depth: number) {
|
||||
const files = await this.api!.listAllFiles(folder, depth > 1);
|
||||
const filtered = files.filter(file => this.filterFile(folder, file, extension, depth));
|
||||
return filtered;
|
||||
}
|
||||
|
||||
async allEntriesByFolder(folder: string, extension: string, depth: number) {
|
||||
const files = await allEntriesByFolder({
|
||||
listAllFiles: () => this.listAllFiles(folder, extension, depth),
|
||||
readFile: this.api!.readFile.bind(this.api!),
|
||||
readFileMetadata: this.api!.readFileMetadata.bind(this.api),
|
||||
apiName: API_NAME,
|
||||
branch: this.branch,
|
||||
localForage,
|
||||
folder,
|
||||
extension,
|
||||
depth,
|
||||
getDefaultBranch: () =>
|
||||
this.api!.getDefaultBranch().then(b => ({ name: b.name, sha: b.commit.id })),
|
||||
isShaExistsInBranch: this.api!.isShaExistsInBranch.bind(this.api!),
|
||||
getDifferences: (to, from) => this.api!.getDifferences(to, from),
|
||||
getFileId: path => this.api!.getFileId(path, this.branch),
|
||||
filterFile: file => this.filterFile(folder, file, extension, depth),
|
||||
customFetch: undefined,
|
||||
});
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
entriesByFiles(files: ImplementationFile[]) {
|
||||
return entriesByFiles(
|
||||
files,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this.api!.readFileMetadata.bind(this.api),
|
||||
API_NAME,
|
||||
);
|
||||
}
|
||||
|
||||
// Fetches a single entry.
|
||||
getEntry(path: string) {
|
||||
return this.api!.readFile(path).then(data => ({
|
||||
file: { path, id: null },
|
||||
data: data as string,
|
||||
}));
|
||||
}
|
||||
|
||||
async getMedia(mediaFolder = this.mediaFolder) {
|
||||
if (!mediaFolder) {
|
||||
return [];
|
||||
}
|
||||
return this.api!.listAllFiles(mediaFolder).then(files =>
|
||||
files.map(({ id, name, path }) => {
|
||||
return { id, name, path, displayURL: { id, name, path } };
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
getMediaDisplayURL(displayURL: DisplayURL) {
|
||||
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
|
||||
return getMediaDisplayURL(
|
||||
displayURL,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this._mediaDisplayURLSem,
|
||||
);
|
||||
}
|
||||
|
||||
async getMediaFile(path: string) {
|
||||
const name = basename(path);
|
||||
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
|
||||
const fileObj = blobToFileObj(name, blob);
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
const id = await getBlobSHA(blob);
|
||||
|
||||
return {
|
||||
id,
|
||||
displayURL: url,
|
||||
path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
};
|
||||
}
|
||||
|
||||
async persistEntry(entry: BackendEntry, options: PersistOptions) {
|
||||
// persistEntry is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
() => this.api!.persistFiles(entry.dataFiles, entry.assets, options),
|
||||
'Failed to acquire persist entry lock',
|
||||
);
|
||||
}
|
||||
|
||||
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
||||
const fileObj = mediaFile.fileObj as File;
|
||||
|
||||
const [id] = await Promise.all([
|
||||
getBlobSHA(fileObj),
|
||||
this.api!.persistFiles([], [mediaFile], options),
|
||||
]);
|
||||
|
||||
const { path } = mediaFile;
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
|
||||
return {
|
||||
displayURL: url,
|
||||
path: trimStart(path, '/'),
|
||||
name: fileObj!.name,
|
||||
size: fileObj!.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
id,
|
||||
};
|
||||
}
|
||||
|
||||
deleteFiles(paths: string[], commitMessage: string) {
|
||||
return this.api!.deleteFiles(paths, commitMessage);
|
||||
}
|
||||
|
||||
traverseCursor(cursor: Cursor, action: string) {
|
||||
return this.api!.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => {
|
||||
const [folder, depth, extension] = [
|
||||
cursor.meta?.folder as string,
|
||||
cursor.meta?.depth as number,
|
||||
cursor.meta?.extension as string,
|
||||
];
|
||||
if (folder && depth && extension) {
|
||||
entries = entries.filter(f => this.filterFile(folder, f, extension, depth));
|
||||
newCursor = newCursor.mergeMeta({ folder, extension, depth });
|
||||
}
|
||||
const entriesWithData = await entriesByFiles(
|
||||
entries,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this.api!.readFileMetadata.bind(this.api)!,
|
||||
API_NAME,
|
||||
);
|
||||
return {
|
||||
entries: entriesWithData,
|
||||
cursor: newCursor,
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
3
packages/core/src/backends/gitlab/index.ts
Normal file
3
packages/core/src/backends/gitlab/index.ts
Normal file
@ -0,0 +1,3 @@
|
||||
export { default as GitLabBackend } from './implementation';
|
||||
export { default as API } from './API';
|
||||
export { default as AuthenticationPage } from './AuthenticationPage';
|
73
packages/core/src/backends/gitlab/queries.ts
Normal file
73
packages/core/src/backends/gitlab/queries.ts
Normal file
@ -0,0 +1,73 @@
|
||||
import { gql } from 'graphql-tag';
|
||||
import { oneLine } from 'common-tags';
|
||||
|
||||
export const files = gql`
|
||||
query files($repo: ID!, $branch: String!, $path: String!, $recursive: Boolean!, $cursor: String) {
|
||||
project(fullPath: $repo) {
|
||||
repository {
|
||||
tree(ref: $branch, path: $path, recursive: $recursive) {
|
||||
blobs(after: $cursor) {
|
||||
nodes {
|
||||
type
|
||||
id: sha
|
||||
path
|
||||
name
|
||||
}
|
||||
pageInfo {
|
||||
endCursor
|
||||
hasNextPage
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const blobs = gql`
|
||||
query blobs($repo: ID!, $branch: String!, $paths: [String!]!) {
|
||||
project(fullPath: $repo) {
|
||||
repository {
|
||||
blobs(ref: $branch, paths: $paths) {
|
||||
nodes {
|
||||
id
|
||||
data: rawBlob
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export function lastCommits(paths: string[]) {
|
||||
const tree = paths
|
||||
.map(
|
||||
(path, index) => oneLine`
|
||||
tree${index}: tree(ref: $branch, path: "${path}") {
|
||||
lastCommit {
|
||||
authorName
|
||||
authoredDate
|
||||
author {
|
||||
id
|
||||
username
|
||||
name
|
||||
publicEmail
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
)
|
||||
.join('\n');
|
||||
|
||||
const query = gql`
|
||||
query lastCommits($repo: ID!, $branch: String!) {
|
||||
project(fullPath: $repo) {
|
||||
repository {
|
||||
${tree}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
return query;
|
||||
}
|
6
packages/core/src/backends/index.tsx
Normal file
6
packages/core/src/backends/index.tsx
Normal file
@ -0,0 +1,6 @@
|
||||
export { BitbucketBackend } from './bitbucket';
|
||||
export { GitGatewayBackend } from './git-gateway';
|
||||
export { GitHubBackend } from './github';
|
||||
export { GitLabBackend } from './gitlab';
|
||||
export { ProxyBackend } from './proxy';
|
||||
export { TestBackend } from './test';
|
48
packages/core/src/backends/proxy/AuthenticationPage.tsx
Normal file
48
packages/core/src/backends/proxy/AuthenticationPage.tsx
Normal file
@ -0,0 +1,48 @@
|
||||
import Button from '@mui/material/Button';
|
||||
import { styled } from '@mui/material/styles';
|
||||
import React, { useCallback } from 'react';
|
||||
|
||||
import GoBackButton from '@staticcms/core/components/UI/GoBackButton';
|
||||
import Icon from '@staticcms/core/components/UI/Icon';
|
||||
|
||||
import type { MouseEvent } from 'react';
|
||||
import type { AuthenticationPageProps, TranslatedProps } from '@staticcms/core/interface';
|
||||
|
||||
const StyledAuthenticationPage = styled('section')`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100vh;
|
||||
`;
|
||||
|
||||
const PageLogoIcon = styled(Icon)`
|
||||
color: #c4c6d2;
|
||||
`;
|
||||
|
||||
const AuthenticationPage = ({
|
||||
inProgress = false,
|
||||
config,
|
||||
onLogin,
|
||||
t,
|
||||
}: TranslatedProps<AuthenticationPageProps>) => {
|
||||
const handleLogin = useCallback(
|
||||
(e: MouseEvent<HTMLButtonElement>) => {
|
||||
e.preventDefault();
|
||||
onLogin({ token: 'fake_token' });
|
||||
},
|
||||
[onLogin],
|
||||
);
|
||||
|
||||
return (
|
||||
<StyledAuthenticationPage>
|
||||
<PageLogoIcon width={300} height={150} type="static-cms" />
|
||||
<Button variant="contained" disabled={inProgress} onClick={handleLogin}>
|
||||
{inProgress ? t('auth.loggingIn') : t('auth.login')}
|
||||
</Button>
|
||||
{config.site_url && <GoBackButton href={config.site_url} t={t}></GoBackButton>}
|
||||
</StyledAuthenticationPage>
|
||||
);
|
||||
};
|
||||
|
||||
export default AuthenticationPage;
|
196
packages/core/src/backends/proxy/implementation.ts
Normal file
196
packages/core/src/backends/proxy/implementation.ts
Normal file
@ -0,0 +1,196 @@
|
||||
import { APIError, basename, blobToFileObj, unsentRequest } from '@staticcms/core/lib/util';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
|
||||
import type {
|
||||
BackendEntry,
|
||||
BackendClass,
|
||||
Config,
|
||||
DisplayURL,
|
||||
ImplementationEntry,
|
||||
ImplementationFile,
|
||||
PersistOptions,
|
||||
User,
|
||||
} from '@staticcms/core/interface';
|
||||
import type { Cursor } from '@staticcms/core/lib/util';
|
||||
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
|
||||
|
||||
async function serializeAsset(assetProxy: AssetProxy) {
|
||||
const base64content = await assetProxy.toBase64!();
|
||||
return { path: assetProxy.path, content: base64content, encoding: 'base64' };
|
||||
}
|
||||
|
||||
type MediaFile = {
|
||||
id: string;
|
||||
content: string;
|
||||
encoding: string;
|
||||
name: string;
|
||||
path: string;
|
||||
};
|
||||
|
||||
function deserializeMediaFile({ id, content, encoding, path, name }: MediaFile) {
|
||||
let byteArray = new Uint8Array(0);
|
||||
if (encoding !== 'base64') {
|
||||
console.error(`Unsupported encoding '${encoding}' for file '${path}'`);
|
||||
} else {
|
||||
const decodedContent = atob(content);
|
||||
byteArray = new Uint8Array(decodedContent.length);
|
||||
for (let i = 0; i < decodedContent.length; i++) {
|
||||
byteArray[i] = decodedContent.charCodeAt(i);
|
||||
}
|
||||
}
|
||||
const blob = new Blob([byteArray]);
|
||||
const file = blobToFileObj(name, blob);
|
||||
const url = URL.createObjectURL(file);
|
||||
return { id, name, path, file, size: file.size, url, displayURL: url };
|
||||
}
|
||||
|
||||
export default class ProxyBackend implements BackendClass {
|
||||
proxyUrl: string;
|
||||
mediaFolder?: string;
|
||||
options: {};
|
||||
branch: string;
|
||||
|
||||
constructor(config: Config, options = {}) {
|
||||
if (!config.backend.proxy_url) {
|
||||
throw new Error('The Proxy backend needs a "proxy_url" in the backend configuration.');
|
||||
}
|
||||
|
||||
this.branch = config.backend.branch || 'main';
|
||||
this.proxyUrl = config.backend.proxy_url;
|
||||
this.mediaFolder = config.media_folder;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
isGitBackend() {
|
||||
return false;
|
||||
}
|
||||
|
||||
status() {
|
||||
return Promise.resolve({ auth: { status: true }, api: { status: true, statusPage: '' } });
|
||||
}
|
||||
|
||||
authComponent() {
|
||||
return AuthenticationPage;
|
||||
}
|
||||
|
||||
restoreUser() {
|
||||
return this.authenticate();
|
||||
}
|
||||
|
||||
authenticate() {
|
||||
return Promise.resolve() as unknown as Promise<User>;
|
||||
}
|
||||
|
||||
logout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
getToken() {
|
||||
return Promise.resolve('');
|
||||
}
|
||||
|
||||
async request(payload: { action: string; params: Record<string, unknown> }) {
|
||||
const response = await unsentRequest.fetchWithTimeout(this.proxyUrl, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json; charset=utf-8' },
|
||||
body: JSON.stringify({ branch: this.branch, ...payload }),
|
||||
});
|
||||
|
||||
const json = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
return json;
|
||||
} else {
|
||||
throw new APIError(json.error, response.status, 'Proxy');
|
||||
}
|
||||
}
|
||||
|
||||
entriesByFolder(folder: string, extension: string, depth: number) {
|
||||
return this.request({
|
||||
action: 'entriesByFolder',
|
||||
params: { branch: this.branch, folder, extension, depth },
|
||||
});
|
||||
}
|
||||
|
||||
entriesByFiles(files: ImplementationFile[]) {
|
||||
return this.request({
|
||||
action: 'entriesByFiles',
|
||||
params: { branch: this.branch, files },
|
||||
});
|
||||
}
|
||||
|
||||
getEntry(path: string) {
|
||||
return this.request({
|
||||
action: 'getEntry',
|
||||
params: { branch: this.branch, path },
|
||||
});
|
||||
}
|
||||
|
||||
async persistEntry(entry: BackendEntry, options: PersistOptions) {
|
||||
const assets = await Promise.all(entry.assets.map(serializeAsset));
|
||||
return this.request({
|
||||
action: 'persistEntry',
|
||||
params: {
|
||||
branch: this.branch,
|
||||
dataFiles: entry.dataFiles,
|
||||
assets,
|
||||
options: { ...options },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async getMedia(mediaFolder = this.mediaFolder) {
|
||||
const files: { path: string; url: string }[] = await this.request({
|
||||
action: 'getMedia',
|
||||
params: { branch: this.branch, mediaFolder },
|
||||
});
|
||||
|
||||
return files.map(({ url, path }) => {
|
||||
const id = url;
|
||||
const name = basename(path);
|
||||
|
||||
return { id, name, displayURL: { id, path }, path };
|
||||
});
|
||||
}
|
||||
|
||||
async getMediaFile(path: string) {
|
||||
const file = await this.request({
|
||||
action: 'getMediaFile',
|
||||
params: { branch: this.branch, path },
|
||||
});
|
||||
return deserializeMediaFile(file);
|
||||
}
|
||||
|
||||
getMediaDisplayURL(displayURL: DisplayURL) {
|
||||
return Promise.resolve(typeof displayURL === 'string' ? displayURL : displayURL.id);
|
||||
}
|
||||
|
||||
async persistMedia(assetProxy: AssetProxy, options: PersistOptions) {
|
||||
const asset = await serializeAsset(assetProxy);
|
||||
const file: MediaFile = await this.request({
|
||||
action: 'persistMedia',
|
||||
params: { branch: this.branch, asset, options: { commitMessage: options.commitMessage } },
|
||||
});
|
||||
|
||||
return deserializeMediaFile(file);
|
||||
}
|
||||
|
||||
deleteFiles(paths: string[], commitMessage: string) {
|
||||
return this.request({
|
||||
action: 'deleteFiles',
|
||||
params: { branch: this.branch, paths, options: { commitMessage } },
|
||||
});
|
||||
}
|
||||
|
||||
traverseCursor(): Promise<{ entries: ImplementationEntry[]; cursor: Cursor }> {
|
||||
throw new Error('Not supported');
|
||||
}
|
||||
|
||||
allEntriesByFolder(
|
||||
_folder: string,
|
||||
_extension: string,
|
||||
_depth: number,
|
||||
): Promise<ImplementationEntry[]> {
|
||||
throw new Error('Not supported');
|
||||
}
|
||||
}
|
2
packages/core/src/backends/proxy/index.ts
Normal file
2
packages/core/src/backends/proxy/index.ts
Normal file
@ -0,0 +1,2 @@
|
||||
export { default as ProxyBackend } from './implementation';
|
||||
export { default as AuthenticationPage } from './AuthenticationPage';
|
63
packages/core/src/backends/test/AuthenticationPage.tsx
Normal file
63
packages/core/src/backends/test/AuthenticationPage.tsx
Normal file
@ -0,0 +1,63 @@
|
||||
import Button from '@mui/material/Button';
|
||||
import { styled } from '@mui/material/styles';
|
||||
import React, { useCallback, useEffect } from 'react';
|
||||
|
||||
import GoBackButton from '@staticcms/core/components/UI/GoBackButton';
|
||||
import Icon from '@staticcms/core/components/UI/Icon';
|
||||
|
||||
import type { MouseEvent } from 'react';
|
||||
import type { AuthenticationPageProps, TranslatedProps } from '@staticcms/core/interface';
|
||||
|
||||
const StyledAuthenticationPage = styled('section')`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100vh;
|
||||
`;
|
||||
|
||||
const PageLogoIcon = styled(Icon)`
|
||||
color: #c4c6d2;
|
||||
`;
|
||||
|
||||
const AuthenticationPage = ({
|
||||
inProgress = false,
|
||||
config,
|
||||
onLogin,
|
||||
t,
|
||||
}: TranslatedProps<AuthenticationPageProps>) => {
|
||||
useEffect(() => {
|
||||
/**
|
||||
* Allow login screen to be skipped for demo purposes.
|
||||
*/
|
||||
const skipLogin = config.backend.login === false;
|
||||
if (skipLogin) {
|
||||
onLogin({ token: 'fake_token' });
|
||||
}
|
||||
}, [config.backend.login, onLogin]);
|
||||
|
||||
const handleLogin = useCallback(
|
||||
(e: MouseEvent<HTMLButtonElement>) => {
|
||||
e.preventDefault();
|
||||
onLogin({ token: 'fake_token' });
|
||||
},
|
||||
[onLogin],
|
||||
);
|
||||
|
||||
return (
|
||||
<StyledAuthenticationPage>
|
||||
<PageLogoIcon width={300} height={150} type="static-cms" />
|
||||
<Button
|
||||
disabled={inProgress}
|
||||
onClick={handleLogin}
|
||||
variant="contained"
|
||||
sx={{ marginBottom: '32px' }}
|
||||
>
|
||||
{inProgress ? t('auth.loggingIn') : t('auth.login')}
|
||||
</Button>
|
||||
{config.site_url && <GoBackButton href={config.site_url} t={t}></GoBackButton>}
|
||||
</StyledAuthenticationPage>
|
||||
);
|
||||
};
|
||||
|
||||
export default AuthenticationPage;
|
299
packages/core/src/backends/test/implementation.ts
Normal file
299
packages/core/src/backends/test/implementation.ts
Normal file
@ -0,0 +1,299 @@
|
||||
import attempt from 'lodash/attempt';
|
||||
import isError from 'lodash/isError';
|
||||
import take from 'lodash/take';
|
||||
import unset from 'lodash/unset';
|
||||
import { extname } from 'path';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import { basename, Cursor, CURSOR_COMPATIBILITY_SYMBOL } from '@staticcms/core/lib/util';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
|
||||
import type {
|
||||
BackendEntry,
|
||||
BackendClass,
|
||||
Config,
|
||||
DisplayURL,
|
||||
ImplementationEntry,
|
||||
ImplementationFile,
|
||||
User,
|
||||
} from '@staticcms/core/interface';
|
||||
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
|
||||
|
||||
type RepoFile = { path: string; content: string | AssetProxy };
|
||||
type RepoTree = { [key: string]: RepoFile | RepoTree };
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
repoFiles: RepoTree;
|
||||
}
|
||||
}
|
||||
|
||||
window.repoFiles = window.repoFiles || {};
|
||||
|
||||
function getFile(path: string, tree: RepoTree) {
|
||||
const segments = path.split('/');
|
||||
let obj: RepoTree = tree;
|
||||
while (obj && segments.length) {
|
||||
obj = obj[segments.shift() as string] as RepoTree;
|
||||
}
|
||||
return (obj as unknown as RepoFile) || {};
|
||||
}
|
||||
|
||||
function writeFile(path: string, content: string | AssetProxy, tree: RepoTree) {
|
||||
const segments = path.split('/');
|
||||
let obj = tree;
|
||||
while (segments.length > 1) {
|
||||
const segment = segments.shift() as string;
|
||||
obj[segment] = obj[segment] || {};
|
||||
obj = obj[segment] as RepoTree;
|
||||
}
|
||||
(obj[segments.shift() as string] as RepoFile) = { content, path };
|
||||
}
|
||||
|
||||
function deleteFile(path: string, tree: RepoTree) {
|
||||
unset(tree, path.split('/'));
|
||||
}
|
||||
|
||||
const pageSize = 10;
|
||||
|
||||
function getCursor(
|
||||
folder: string,
|
||||
extension: string,
|
||||
entries: ImplementationEntry[],
|
||||
index: number,
|
||||
depth: number,
|
||||
) {
|
||||
const count = entries.length;
|
||||
const pageCount = Math.floor(count / pageSize);
|
||||
return Cursor.create({
|
||||
actions: [
|
||||
...(index < pageCount ? ['next', 'last'] : []),
|
||||
...(index > 0 ? ['prev', 'first'] : []),
|
||||
],
|
||||
meta: { index, count, pageSize, pageCount },
|
||||
data: { folder, extension, index, pageCount, depth },
|
||||
});
|
||||
}
|
||||
|
||||
export function getFolderFiles(
|
||||
tree: RepoTree,
|
||||
folder: string,
|
||||
extension: string,
|
||||
depth: number,
|
||||
files = [] as RepoFile[],
|
||||
path = folder,
|
||||
) {
|
||||
if (depth <= 0) {
|
||||
return files;
|
||||
}
|
||||
|
||||
Object.keys(tree[folder] || {}).forEach(key => {
|
||||
if (extname(key)) {
|
||||
const file = (tree[folder] as RepoTree)[key] as RepoFile;
|
||||
if (!extension || key.endsWith(`.${extension}`)) {
|
||||
files.unshift({ content: file.content, path: `${path}/${key}` });
|
||||
}
|
||||
} else {
|
||||
const subTree = tree[folder] as RepoTree;
|
||||
return getFolderFiles(subTree, key, extension, depth - 1, files, `${path}/${key}`);
|
||||
}
|
||||
});
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
export default class TestBackend implements BackendClass {
|
||||
mediaFolder?: string;
|
||||
options: {};
|
||||
|
||||
constructor(config: Config, options = {}) {
|
||||
this.options = options;
|
||||
this.mediaFolder = config.media_folder;
|
||||
}
|
||||
|
||||
isGitBackend() {
|
||||
return false;
|
||||
}
|
||||
|
||||
status() {
|
||||
return Promise.resolve({ auth: { status: true }, api: { status: true, statusPage: '' } });
|
||||
}
|
||||
|
||||
authComponent() {
|
||||
return AuthenticationPage;
|
||||
}
|
||||
|
||||
restoreUser() {
|
||||
return this.authenticate();
|
||||
}
|
||||
|
||||
authenticate() {
|
||||
return Promise.resolve() as unknown as Promise<User>;
|
||||
}
|
||||
|
||||
logout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
getToken() {
|
||||
return Promise.resolve('');
|
||||
}
|
||||
|
||||
traverseCursor(cursor: Cursor, action: string) {
|
||||
const { folder, extension, index, pageCount, depth } = cursor.data as {
|
||||
folder: string;
|
||||
extension: string;
|
||||
index: number;
|
||||
pageCount: number;
|
||||
depth: number;
|
||||
};
|
||||
const newIndex = (() => {
|
||||
if (action === 'next') {
|
||||
return (index as number) + 1;
|
||||
}
|
||||
if (action === 'prev') {
|
||||
return (index as number) - 1;
|
||||
}
|
||||
if (action === 'first') {
|
||||
return 0;
|
||||
}
|
||||
if (action === 'last') {
|
||||
return pageCount;
|
||||
}
|
||||
return 0;
|
||||
})();
|
||||
// TODO: stop assuming cursors are for collections
|
||||
const allFiles = getFolderFiles(window.repoFiles, folder, extension, depth);
|
||||
const allEntries = allFiles.map(f => ({
|
||||
data: f.content as string,
|
||||
file: { path: f.path, id: f.path },
|
||||
}));
|
||||
const entries = allEntries.slice(newIndex * pageSize, newIndex * pageSize + pageSize);
|
||||
const newCursor = getCursor(folder, extension, allEntries, newIndex, depth);
|
||||
return Promise.resolve({ entries, cursor: newCursor });
|
||||
}
|
||||
|
||||
entriesByFolder(folder: string, extension: string, depth: number) {
|
||||
const files = folder ? getFolderFiles(window.repoFiles, folder, extension, depth) : [];
|
||||
const entries = files.map(f => ({
|
||||
data: f.content as string,
|
||||
file: { path: f.path, id: f.path },
|
||||
}));
|
||||
const cursor = getCursor(folder, extension, entries, 0, depth);
|
||||
const ret = take(entries, pageSize);
|
||||
// TODO Remove
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
ret[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
|
||||
return Promise.resolve(ret);
|
||||
}
|
||||
|
||||
entriesByFiles(files: ImplementationFile[]) {
|
||||
return Promise.all(
|
||||
files.map(file => ({
|
||||
file,
|
||||
data: getFile(file.path, window.repoFiles).content as string,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
getEntry(path: string) {
|
||||
return Promise.resolve({
|
||||
file: { path, id: null },
|
||||
data: getFile(path, window.repoFiles).content as string,
|
||||
});
|
||||
}
|
||||
|
||||
async persistEntry(entry: BackendEntry) {
|
||||
entry.dataFiles.forEach(dataFile => {
|
||||
const { path, raw } = dataFile;
|
||||
writeFile(path, raw, window.repoFiles);
|
||||
});
|
||||
entry.assets.forEach(a => {
|
||||
writeFile(a.path, a, window.repoFiles);
|
||||
});
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async getMedia(mediaFolder = this.mediaFolder) {
|
||||
if (!mediaFolder) {
|
||||
return [];
|
||||
}
|
||||
const files = getFolderFiles(window.repoFiles, mediaFolder.split('/')[0], '', 100).filter(f =>
|
||||
f.path.startsWith(mediaFolder),
|
||||
);
|
||||
return files.map(f => this.normalizeAsset(f.content as AssetProxy));
|
||||
}
|
||||
|
||||
async getMediaFile(path: string) {
|
||||
const asset = getFile(path, window.repoFiles).content as AssetProxy;
|
||||
|
||||
const url = asset?.toString() ?? '';
|
||||
const name = basename(path);
|
||||
const blob = await fetch(url).then(res => res.blob());
|
||||
const fileObj = new File([blob], name);
|
||||
|
||||
return {
|
||||
id: url,
|
||||
displayURL: url,
|
||||
path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
};
|
||||
}
|
||||
|
||||
normalizeAsset(assetProxy: AssetProxy) {
|
||||
const fileObj = assetProxy.fileObj as File;
|
||||
const { name, size } = fileObj;
|
||||
const objectUrl = attempt(window.URL.createObjectURL, fileObj);
|
||||
const url = isError(objectUrl) ? '' : objectUrl;
|
||||
const normalizedAsset = {
|
||||
id: uuid(),
|
||||
name,
|
||||
size,
|
||||
path: assetProxy.path,
|
||||
url,
|
||||
displayURL: url,
|
||||
fileObj,
|
||||
};
|
||||
|
||||
return normalizedAsset;
|
||||
}
|
||||
|
||||
persistMedia(assetProxy: AssetProxy) {
|
||||
const normalizedAsset = this.normalizeAsset(assetProxy);
|
||||
|
||||
writeFile(assetProxy.path, assetProxy, window.repoFiles);
|
||||
|
||||
return Promise.resolve(normalizedAsset);
|
||||
}
|
||||
|
||||
deleteFiles(paths: string[]) {
|
||||
paths.forEach(path => {
|
||||
deleteFile(path, window.repoFiles);
|
||||
});
|
||||
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async allEntriesByFolder(
|
||||
folder: string,
|
||||
extension: string,
|
||||
depth: number,
|
||||
): Promise<ImplementationEntry[]> {
|
||||
const files = folder ? getFolderFiles(window.repoFiles, folder, extension, depth) : [];
|
||||
|
||||
const entries = files.map(f => ({
|
||||
data: f.content as string,
|
||||
file: { path: f.path, id: f.path },
|
||||
}));
|
||||
|
||||
return Promise.resolve(entries);
|
||||
}
|
||||
|
||||
getMediaDisplayURL(_displayURL: DisplayURL): Promise<string> {
|
||||
throw new Error('Not supported');
|
||||
}
|
||||
}
|
2
packages/core/src/backends/test/index.ts
Normal file
2
packages/core/src/backends/test/index.ts
Normal file
@ -0,0 +1,2 @@
|
||||
export { default as TestBackend } from './implementation';
|
||||
export { default as AuthenticationPage } from './AuthenticationPage';
|
Reference in New Issue
Block a user