Feat: editorial workflow bitbucket gitlab (#3014)

* refactor: typescript the backends

* feat: support multiple files upload for GitLab and BitBucket

* fix: load entry media files from media folder or UI state

* chore: cleanup log message

* chore: code cleanup

* refactor: typescript the test backend

* refactor: cleanup getEntry unsued variables

* refactor: moved shared backend code to lib util

* chore: rename files to preserve history

* fix: bind readFile method to API classes

* test(e2e): switch to chrome in cypress tests

* refactor: extract common api methods

* refactor: remove most of immutable js usage from backends

* feat(backend-gitlab): initial editorial workflow support

* feat(backend-gitlab): implement missing workflow methods

* chore: fix lint error

* feat(backend-gitlab): support files deletion

* test(e2e): add gitlab cypress tests

* feat(backend-bitbucket): implement missing editorial workflow methods

* test(e2e): add BitBucket backend e2e tests

* build: update node version to 12 on netlify builds

* fix(backend-bitbucket): extract BitBucket avatar url

* test: fix git-gateway AuthenticationPage test

* test(e2e): fix some backend tests

* test(e2e): fix tests

* test(e2e): add git-gateway editorial workflow test

* chore: code cleanup

* test(e2e): revert back to electron

* test(e2e): add non editorial workflow tests

* fix(git-gateway-gitlab): don't call unpublishedEntry in simple workflow

gitlab git-gateway doesn't support editorial workflow APIs yet. This change makes sure not to call them in simple workflow

* refactor(backend-bitbucket): switch to diffstat API instead of raw diff

* chore: fix test

* test(e2e): add more git-gateway tests

* fix: post rebase typescript fixes

* test(e2e): fix tests

* fix: fix parsing of content key and add tests

* refactor: rename test file

* test(unit): add getStatues unit tests

* chore: update cypress

* docs: update beta docs
This commit is contained in:
Erez Rokah
2020-01-15 00:15:14 +02:00
committed by Shawn Erquhart
parent 4ff5bc2ee0
commit 6f221ab3c1
251 changed files with 70910 additions and 15974 deletions

View File

@ -9,14 +9,21 @@ import {
flowAsync,
localForage,
onlySuccessfulPromises,
resolvePromiseProperties,
ResponseParser,
basename,
AssetProxy,
Entry as LibEntry,
PersistOptions,
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
PreviewState,
FetchError,
} from 'netlify-cms-lib-util';
import {
UsersGetAuthenticatedResponse as GitHubUser,
ReposGetResponse as GitHubRepo,
ReposGetContentsResponseItem as GitHubFile,
ReposGetBranchResponse as GitHubBranch,
GitGetBlobResponse as GitHubBlob,
GitCreateTreeResponse as GitHubTree,
@ -28,35 +35,33 @@ import {
ReposCompareCommitsResponseBaseCommit as GitHubCompareBaseCommit,
GitCreateCommitResponseAuthor as GitHubAuthor,
GitCreateCommitResponseCommitter as GitHubCommiter,
ReposListStatusesForRefResponseItem,
} from '@octokit/rest';
const CMS_BRANCH_PREFIX = 'cms';
const CURRENT_METADATA_VERSION = '1';
interface FetchError extends Error {
status: number;
}
export const API_NAME = 'GitHub';
interface Config {
api_root?: string;
export interface Config {
apiRoot?: string;
token?: string;
branch?: string;
useOpenAuthoring: boolean;
useOpenAuthoring?: boolean;
repo?: string;
originRepo?: string;
squash_merges?: string;
squashMerges: boolean;
initialWorkflowStatus: string;
}
interface File {
interface TreeFile {
type: 'blob' | 'tree';
sha: string;
path: string;
raw?: string;
}
interface Entry extends File {
slug: string;
export interface Entry extends LibEntry {
sha?: string;
}
type Override<T, U> = Pick<T, Exclude<keyof T, keyof U>> & U;
@ -69,18 +74,20 @@ type GitHubCompareFile = ReposCompareCommitsResponseFilesItem & { previous_filen
type GitHubCompareFiles = GitHubCompareFile[];
interface CommitFields {
parents: { sha: string }[];
sha: string;
message: string;
author: string;
committer: string;
tree: { sha: string };
enum GitHubCommitStatusState {
Error = 'error',
Failure = 'failure',
Pending = 'pending',
Success = 'success',
}
interface PR {
type GitHubCommitStatus = ReposListStatusesForRefResponseItem & {
state: GitHubCommitStatusState;
};
export interface PR {
number: number;
head: string;
head: string | { sha: string };
}
interface MetaDataObjects {
@ -88,7 +95,7 @@ interface MetaDataObjects {
files: MediaFile[];
}
interface Metadata {
export interface Metadata {
type: string;
objects: MetaDataObjects;
branch: string;
@ -103,23 +110,16 @@ interface Metadata {
timeStamp: string;
}
interface Branch {
export interface Branch {
ref: string;
}
interface BlobArgs {
export interface BlobArgs {
sha: string;
repoURL: string;
parseText: boolean;
}
interface ContentArgs {
path: string;
branch: string;
repoURL: string;
parseText: boolean;
}
type Param = string | number | undefined;
type Options = RequestInit & { params?: Record<string, Param | Record<string, Param>> };
@ -133,30 +133,21 @@ const replace404WithEmptyArray = (err: FetchError) => {
}
};
type PersistOptions = {
useWorkflow: boolean;
commitMessage: string;
collectionName: string;
unpublished: boolean;
parsedData?: { title: string; description: string };
status: string;
};
type MediaFile = {
sha: string;
path: string;
};
export default class API {
api_root: string;
apiRoot: string;
token: string;
branch: string;
useOpenAuthoring: boolean;
useOpenAuthoring?: boolean;
repo: string;
originRepo: string;
repoURL: string;
originRepoURL: string;
merge_method: string;
mergeMethod: string;
initialWorkflowStatus: string;
_userPromise?: Promise<GitHubUser>;
@ -165,8 +156,7 @@ export default class API {
commitAuthor?: {};
constructor(config: Config) {
// eslint-disable-next-line @typescript-eslint/camelcase
this.api_root = config.api_root || 'https://api.github.com';
this.apiRoot = config.apiRoot || 'https://api.github.com';
this.token = config.token || '';
this.branch = config.branch || 'master';
this.useOpenAuthoring = config.useOpenAuthoring;
@ -175,15 +165,13 @@ export default class API {
this.repoURL = `/repos/${this.repo}`;
// when not in 'useOpenAuthoring' mode originRepoURL === repoURL
this.originRepoURL = `/repos/${this.originRepo}`;
// eslint-disable-next-line @typescript-eslint/camelcase
this.merge_method = config.squash_merges ? 'squash' : 'merge';
this.mergeMethod = config.squashMerges ? 'squash' : 'merge';
this.initialWorkflowStatus = config.initialWorkflowStatus;
}
static DEFAULT_COMMIT_MESSAGE = 'Automatically generated by Netlify CMS';
static DEFAULT_PR_BODY = 'Automatically generated by Netlify CMS';
user() {
user(): Promise<{ name: string; login: string }> {
if (!this._userPromise) {
this._userPromise = this.request('/user') as Promise<GitHubUser>;
}
@ -199,6 +187,10 @@ export default class API {
});
}
reset() {
// no op
}
requestHeaders(headers = {}) {
const baseHeader: Record<string, string> = {
'Content-Type': 'application/json; charset=utf-8',
@ -207,10 +199,10 @@ export default class API {
if (this.token) {
baseHeader.Authorization = `token ${this.token}`;
return baseHeader;
return Promise.resolve(baseHeader);
}
return baseHeader;
return Promise.resolve(baseHeader);
}
parseJsonResponse(response: Response) {
@ -234,7 +226,7 @@ export default class API {
if (params.length) {
path += `?${params.join('&')}`;
}
return this.api_root + path;
return this.apiRoot + path;
}
parseResponse(response: Response) {
@ -252,16 +244,15 @@ export default class API {
}
handleRequestError(error: FetchError, responseStatus: number) {
throw new APIError(error.message, responseStatus, 'GitHub');
throw new APIError(error.message, responseStatus, API_NAME);
}
async request(
path: string,
options: Options = {},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
parser: ResponseParser<any> = response => this.parseResponse(response),
parser = (response: Response) => this.parseResponse(response),
) {
// overriding classes can return a promise from requestHeaders
const headers = await this.requestHeaders(options.headers || {});
const url = this.urlFor(path, options);
let responseStatus: number;
@ -274,7 +265,6 @@ export default class API {
}
async requestAllPages<T>(url: string, options: Options = {}) {
// overriding classes can return a promise from requestHeaders
const headers = await this.requestHeaders(options.headers || {});
const processedURL = this.urlFor(url, options);
const allResponses = await getAllResponses(processedURL, { ...options, headers });
@ -286,7 +276,7 @@ export default class API {
generateContentKey(collectionName: string, slug: string) {
if (!this.useOpenAuthoring) {
return `${collectionName}/${slug}`;
return generateContentKey(collectionName, slug);
}
return `${this.repo}/${collectionName}/${slug}`;
@ -353,7 +343,7 @@ export default class API {
const file = { path: `${key}.json`, raw: JSON.stringify(data) };
await this.uploadBlob(file);
const changeTree = await this.updateTree(branchData.sha, [file as File]);
const changeTree = await this.updateTree(branchData.sha, [file as TreeFile]);
const { sha } = await this.commit(`Updating “${key}” metadata`, changeTree);
await this.patchRef('meta', '_netlify_cms', sha);
localForage.setItem(`gh.meta.${key}`, {
@ -433,16 +423,9 @@ export default class API {
});
}
retrieveContent({ path, branch, repoURL, parseText }: ContentArgs) {
return this.request(`${repoURL}/contents/${path}`, {
params: { ref: branch },
cache: 'no-store',
}).then((file: GitHubFile) => this.getBlob({ sha: file.sha, repoURL, parseText }));
}
readFile(
async readFile(
path: string,
sha: string | null,
sha?: string | null,
{
branch = this.branch,
repoURL = this.repoURL,
@ -453,11 +436,12 @@ export default class API {
parseText?: boolean;
} = {},
) {
if (sha) {
return this.getBlob({ sha, repoURL, parseText });
} else {
return this.retrieveContent({ path, branch, repoURL, parseText });
if (!sha) {
sha = await this.getFileSha(path, { repoURL, branch });
}
const fetchContent = () => this.fetchBlobContent({ sha: sha as string, repoURL, parseText });
const content = await readFile(sha, fetchContent, localForage, parseText);
return content;
}
async fetchBlobContent({ sha, repoURL, parseText }: BlobArgs) {
@ -479,38 +463,10 @@ export default class API {
}
}
async getMediaAsBlob(sha: string | null, path: string) {
let blob: Blob;
if (path.match(/.svg$/)) {
const text = (await this.readFile(path, sha, { parseText: true })) as string;
blob = new Blob([text], { type: 'image/svg+xml' });
} else {
blob = (await this.readFile(path, sha, { parseText: false })) as Blob;
}
return blob;
}
async getMediaDisplayURL(sha: string, path: string) {
const blob = await this.getMediaAsBlob(sha, path);
return URL.createObjectURL(blob);
}
getBlob({ sha, repoURL = this.repoURL, parseText = true }: BlobArgs) {
const key = parseText ? `gh.${sha}` : `gh.${sha}.blob`;
return localForage.getItem<string | Blob>(key).then(cached => {
if (cached) {
return cached;
}
return this.fetchBlobContent({ sha, repoURL, parseText }).then(result => {
localForage.setItem(key, result);
return result;
});
});
}
async listFiles(path: string, { repoURL = this.repoURL, branch = this.branch, depth = 1 } = {}) {
async listFiles(
path: string,
{ repoURL = this.repoURL, branch = this.branch, depth = 1 } = {},
): Promise<{ type: string; id: string; name: string; path: string; size: number }[]> {
const folder = trim(path, '/');
return this.request(`${repoURL}/git/trees/${branch}:${folder}`, {
// GitHub API supports recursive=1 for getting the entire recursive tree
@ -522,43 +478,50 @@ export default class API {
// filter only files and up to the required depth
.filter(file => file.type === 'blob' && file.path.split('/').length <= depth)
.map(file => ({
...file,
type: file.type,
id: file.sha,
name: basename(file.path),
path: `${folder}/${file.path}`,
size: file.size,
})),
)
.catch(replace404WithEmptyArray);
}
readUnpublishedBranchFile(contentKey: string) {
const metaDataPromise = this.retrieveMetadata(contentKey).then(data =>
data.objects.entry.path ? data : Promise.reject(null),
);
const repoURL = this.useOpenAuthoring
? `/repos/${contentKey
.split('/')
.slice(0, 2)
.join('/')}`
: this.repoURL;
return resolvePromiseProperties({
metaData: metaDataPromise,
fileData: metaDataPromise.then(data =>
this.readFile(data.objects.entry.path, null, {
branch: data.branch,
async readUnpublishedBranchFile(contentKey: string) {
try {
const metaData = await this.retrieveMetadata(contentKey).then(data =>
data.objects.entry.path ? data : Promise.reject(null),
);
const repoURL = this.useOpenAuthoring
? `/repos/${contentKey
.split('/')
.slice(0, 2)
.join('/')}`
: this.repoURL;
const [fileData, isModification] = await Promise.all([
this.readFile(metaData.objects.entry.path, null, {
branch: metaData.branch,
repoURL,
}),
),
isModification: metaDataPromise.then(data =>
this.isUnpublishedEntryModification(data.objects.entry.path, this.branch),
),
}).catch(() => {
}) as Promise<string>,
this.isUnpublishedEntryModification(metaData.objects.entry.path),
]);
return {
metaData,
fileData,
isModification,
slug: this.slugFromContentKey(contentKey, metaData.collection),
};
} catch (e) {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
});
}
}
isUnpublishedEntryModification(path: string, branch: string) {
isUnpublishedEntryModification(path: string) {
return this.readFile(path, null, {
branch,
branch: this.branch,
repoURL: this.originRepoURL,
})
.then(() => true)
@ -635,7 +598,7 @@ export default class API {
const newBranchName = `cms/${newContentKey}`;
// create new branch and pull request in new format
const newBranch = await this.createBranch(newBranchName, (metaData.pr as PR).head);
const newBranch = await this.createBranch(newBranchName, (metaData.pr as PR).head as string);
const pr = await this.createPR(metaData.commitMessage, newBranchName);
// store new metadata
@ -667,7 +630,7 @@ export default class API {
return branch;
}
async listUnpublishedBranches() {
async listUnpublishedBranches(): Promise<Branch[]> {
console.log(
'%c Checking for Unpublished entries',
'line-height: 30px;text-align: center;font-weight: bold',
@ -720,8 +683,16 @@ export default class API {
*/
async getStatuses(sha: string) {
try {
const resp = await this.request(`${this.originRepoURL}/commits/${sha}/status`);
return resp.statuses;
const resp: { statuses: GitHubCommitStatus[] } = await this.request(
`${this.originRepoURL}/commits/${sha}/status`,
);
return resp.statuses.map(s => ({
context: s.context,
// eslint-disable-next-line @typescript-eslint/camelcase
target_url: s.target_url,
state:
s.state === GitHubCommitStatusState.Success ? PreviewState.Success : PreviewState.Other,
}));
} catch (err) {
if (err && err.message && err.message === 'Ref not found') {
return [];
@ -730,26 +701,35 @@ export default class API {
}
}
async persistFiles(entry: Entry, mediaFiles: File[], options: PersistOptions) {
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
const files = entry ? mediaFiles.concat(entry) : mediaFiles;
const uploadPromises = files.map(file => this.uploadBlob(file));
await Promise.all(uploadPromises);
if (!options.useWorkflow) {
return this.getDefaultBranch()
.then(branchData => this.updateTree(branchData.commit.sha, files))
.then(branchData =>
this.updateTree(branchData.commit.sha, files as { sha: string; path: string }[]),
)
.then(changeTree => this.commit(options.commitMessage, changeTree))
.then(response => this.patchBranch(this.branch, response.sha));
} else {
const mediaFilesList = mediaFiles.map(({ sha, path }) => ({
path: trimStart(path, '/'),
sha,
}));
return this.editorialWorkflowGit(files, entry, mediaFilesList, options);
const mediaFilesList = (mediaFiles as { sha: string; path: string }[]).map(
({ sha, path }) => ({
path: trimStart(path, '/'),
sha,
}),
);
return this.editorialWorkflowGit(
files as TreeFile[],
entry as Entry,
mediaFilesList,
options,
);
}
}
getFileSha(path: string, branch: string) {
getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {
/**
* We need to request the tree first to get the SHA. We use extended SHA-1
* syntax (<rev>:<path>) to get a blob from a tree without having to recurse
@ -760,22 +740,25 @@ export default class API {
const filename = last(pathArray);
const directory = initial(pathArray).join('/');
const fileDataPath = encodeURIComponent(directory);
const fileDataURL = `${this.repoURL}/git/trees/${branch}:${fileDataPath}`;
const fileDataURL = `${repoURL}/git/trees/${branch}:${fileDataPath}`;
return this.request(fileDataURL, { cache: 'no-store' }).then(resp => {
const { sha } = resp.tree.find((file: File) => file.path === filename);
return sha;
return this.request(fileDataURL, { cache: 'no-store' }).then((resp: GitHubTree) => {
const file = resp.tree.find(file => file.path === filename);
if (file) {
return file.sha;
}
throw new APIError('Not Found', 404, API_NAME);
});
}
deleteFile(path: string, message: string, options: { branch?: string } = {}) {
deleteFile(path: string, message: string) {
if (this.useOpenAuthoring) {
return Promise.reject('Cannot delete published entries as an Open Authoring user!');
}
const branch = options.branch || this.branch;
const branch = this.branch;
return this.getFileSha(path, branch).then(sha => {
return this.getFileSha(path, { branch }).then(sha => {
const params: { sha: string; message: string; branch: string; author?: { date: string } } = {
sha,
message,
@ -799,12 +782,12 @@ export default class API {
}
async editorialWorkflowGit(
files: File[],
files: TreeFile[],
entry: Entry,
mediaFilesList: MediaFile[],
options: PersistOptions,
) {
const contentKey = this.generateContentKey(options.collectionName, entry.slug);
const contentKey = this.generateContentKey(options.collectionName as string, entry.slug);
const branchName = this.generateBranchName(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
@ -837,14 +820,14 @@ export default class API {
user: user.name || user.login,
status: options.status || this.initialWorkflowStatus,
branch: branchName,
collection: options.collectionName,
collection: options.collectionName as string,
commitMessage: options.commitMessage,
title: options.parsedData && options.parsedData.title,
description: options.parsedData && options.parsedData.description,
objects: {
entry: {
path: entry.path,
sha: entry.sha,
sha: entry.sha as string,
},
files: mediaFilesList,
},
@ -871,7 +854,7 @@ export default class API {
const pr = metadata.pr ? { ...metadata.pr, head: commit.sha } : undefined;
const objects = {
entry: { path: entry.path, sha: entry.sha },
entry: { path: entry.path, sha: entry.sha as string },
files: mediaFilesList,
};
@ -1114,7 +1097,7 @@ export default class API {
method: 'POST',
body: JSON.stringify({
title,
body: API.DEFAULT_PR_BODY,
body: DEFAULT_PR_BODY,
head: headReference,
base: this.branch,
}),
@ -1150,10 +1133,10 @@ export default class API {
method: 'PUT',
body: JSON.stringify({
// eslint-disable-next-line @typescript-eslint/camelcase
commit_message: 'Automatically generated. Merged on Netlify CMS.',
commit_message: MERGE_COMMIT_MESSAGE,
sha: headSha,
// eslint-disable-next-line @typescript-eslint/camelcase
merge_method: this.merge_method,
merge_method: this.mergeMethod,
}),
}).catch(error => {
if (error instanceof APIError && error.status === 405) {
@ -1184,7 +1167,7 @@ export default class API {
return Promise.resolve(Base64.encode(str));
}
uploadBlob(item: { raw?: string; sha?: string }) {
uploadBlob(item: { raw?: string; sha?: string; toBase64?: () => Promise<string> }) {
const content = result(item, 'toBase64', partial(this.toBase64, item.raw as string));
return content.then(contentBase64 =>

View File

@ -1,6 +1,5 @@
import React from 'react';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import styled from '@emotion/styled';
import { NetlifyAuthenticator } from 'netlify-cms-lib-auth';
import { AuthenticationPage, Icon } from 'netlify-cms-ui-default';
@ -28,7 +27,7 @@ export default class GitHubAuthenticationPage extends React.Component {
base_url: PropTypes.string,
siteId: PropTypes.string,
authEndpoint: PropTypes.string,
config: ImmutablePropTypes.map,
config: PropTypes.object.isRequired,
clearHash: PropTypes.func,
};
@ -75,11 +74,12 @@ export default class GitHubAuthenticationPage extends React.Component {
};
const auth = new NetlifyAuthenticator(cfg);
const openAuthoring = this.props.config.getIn(['backend', 'open_authoring'], false);
const scope = this.props.config.getIn(
['backend', 'auth_scope'],
openAuthoring ? 'public_repo' : 'repo',
);
const {
open_authoring: openAuthoring = false,
auth_scope: authScope = '',
} = this.props.config.backend;
const scope = authScope || (openAuthoring ? 'public_repo' : 'repo');
auth.authenticate({ provider: 'github', scope }, (err, data) => {
if (err) {
this.setState({ loginError: err.toString() });
@ -137,8 +137,8 @@ export default class GitHubAuthenticationPage extends React.Component {
onLogin={this.handleLogin}
loginDisabled={inProgress || findingFork || requestingFork}
loginErrorMessage={loginError}
logoUrl={config.get('logo_url')}
siteUrl={config.get('site_url')}
logoUrl={config.logo_url}
siteUrl={config.site_url}
{...this.getAuthenticationPageRenderArgs()}
/>
);

View File

@ -1,16 +1,24 @@
import { ApolloClient } from 'apollo-client';
import { ApolloClient, QueryOptions, MutationOptions, OperationVariables } from 'apollo-client';
import {
InMemoryCache,
defaultDataIdFromObject,
IntrospectionFragmentMatcher,
NormalizedCacheObject,
} from 'apollo-cache-inmemory';
import { createHttpLink } from 'apollo-link-http';
import { setContext } from 'apollo-link-context';
import { APIError, EditorialWorkflowError } from 'netlify-cms-lib-util';
import {
APIError,
EditorialWorkflowError,
readFile,
localForage,
DEFAULT_PR_BODY,
} from 'netlify-cms-lib-util';
import introspectionQueryResultData from './fragmentTypes';
import API from './API';
import API, { Config, BlobArgs, PR, API_NAME } from './API';
import * as queries from './queries';
import * as mutations from './mutations';
import { GraphQLError } from 'graphql';
const NO_CACHE = 'no-cache';
const CACHE_FIRST = 'cache-first';
@ -19,16 +27,44 @@ const fragmentMatcher = new IntrospectionFragmentMatcher({
introspectionQueryResultData,
});
interface TreeEntry {
object?: {
entries: TreeEntry[];
};
type: 'blob' | 'tree';
name: string;
sha: string;
blob?: {
size: number;
};
}
interface TreeFile {
path: string;
id: string;
size: number;
type: string;
name: string;
}
type Error = GraphQLError & { type: string };
export default class GraphQLAPI extends API {
constructor(config) {
repoOwner: string;
repoName: string;
originRepoOwner: string;
originRepoName: string;
client: ApolloClient<NormalizedCacheObject>;
constructor(config: Config) {
super(config);
const [repoParts, originRepoParts] = [this.repo.split('/'), this.originRepo.split('/')];
this.repo_owner = repoParts[0];
this.repo_name = repoParts[1];
this.repoOwner = repoParts[0];
this.repoName = repoParts[1];
this.origin_repo_owner = originRepoParts[0];
this.origin_repo_name = originRepoParts[1];
this.originRepoOwner = originRepoParts[0];
this.originRepoName = originRepoParts[1];
this.client = this.getApolloClient();
}
@ -43,7 +79,7 @@ export default class GraphQLAPI extends API {
},
};
});
const httpLink = createHttpLink({ uri: `${this.api_root}/graphql` });
const httpLink = createHttpLink({ uri: `${this.apiRoot}/graphql` });
return new ApolloClient({
link: authLink.concat(httpLink),
cache: new InMemoryCache({ fragmentMatcher }),
@ -64,7 +100,7 @@ export default class GraphQLAPI extends API {
return this.client.resetStore();
}
async getRepository(owner, name) {
async getRepository(owner: string, name: string) {
const { data } = await this.query({
query: queries.repository,
variables: { owner, name },
@ -73,20 +109,20 @@ export default class GraphQLAPI extends API {
return data.repository;
}
query(options = {}) {
query(options: QueryOptions<OperationVariables>) {
return this.client.query(options).catch(error => {
throw new APIError(error.message, 500, 'GitHub');
});
}
mutate(options = {}) {
mutate(options: MutationOptions<OperationVariables>) {
return this.client.mutate(options).catch(error => {
throw new APIError(error.message, 500, 'GitHub');
});
}
async hasWriteAccess() {
const { repo_owner: owner, repo_name: name } = this;
const { repoOwner: owner, repoName: name } = this;
try {
const { data } = await this.query({
query: queries.repoPermission,
@ -110,7 +146,7 @@ export default class GraphQLAPI extends API {
return data.viewer;
}
async retrieveBlobObject(owner, name, expression, options = {}) {
async retrieveBlobObject(owner: string, name: string, expression: string, options = {}) {
const { data } = await this.query({
query: queries.blob,
variables: { owner, name, expression },
@ -118,62 +154,67 @@ export default class GraphQLAPI extends API {
});
// https://developer.github.com/v4/object/blob/
if (data.repository.object) {
const { is_binary, text } = data.repository.object;
return { is_null: false, is_binary, text };
const { is_binary: isBinary, text } = data.repository.object;
return { isNull: false, isBinary, text };
} else {
return { is_null: true };
return { isNull: true };
}
}
getOwnerAndNameFromRepoUrl(repoURL) {
let { repo_owner: owner, repo_name: name } = this;
getOwnerAndNameFromRepoUrl(repoURL: string) {
let { repoOwner: owner, repoName: name } = this;
if (repoURL === this.originRepoURL) {
({ origin_repo_owner: owner, origin_repo_name: name } = this);
({ originRepoOwner: owner, originRepoName: name } = this);
}
return { owner, name };
}
async retrieveContent({ path, branch, repoURL, parseText }) {
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const { is_null, is_binary, text } = await this.retrieveBlobObject(
owner,
name,
`${branch}:${path}`,
);
if (is_null) {
throw new APIError('Not Found', 404, 'GitHub');
} else if (!is_binary) {
return text;
} else {
return super.retrieveContent({ path, branch, repoURL, parseText });
async readFile(
path: string,
sha?: string | null,
{
branch = this.branch,
repoURL = this.repoURL,
parseText = true,
}: {
branch?: string;
repoURL?: string;
parseText?: boolean;
} = {},
) {
if (!sha) {
sha = await this.getFileSha(path, { repoURL, branch });
}
const fetchContent = () => this.fetchBlobContent({ sha: sha as string, repoURL, parseText });
const content = await readFile(sha, fetchContent, localForage, parseText);
return content;
}
async fetchBlobContent(sha, repoURL, parseText) {
async fetchBlobContent({ sha, repoURL, parseText }: BlobArgs) {
if (!parseText) {
return super.fetchBlobContent(sha, repoURL);
return super.fetchBlobContent({ sha, repoURL, parseText });
}
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const { is_null, is_binary, text } = await this.retrieveBlobObject(
const { isNull, isBinary, text } = await this.retrieveBlobObject(
owner,
name,
sha,
{ fetchPolicy: CACHE_FIRST }, // blob sha is derived from file content
);
if (is_null) {
if (isNull) {
throw new APIError('Not Found', 404, 'GitHub');
} else if (!is_binary) {
} else if (!isBinary) {
return text;
} else {
return super.fetchBlobContent(sha, repoURL);
return super.fetchBlobContent({ sha, repoURL, parseText });
}
}
async getStatuses(sha) {
const { origin_repo_owner: owner, origin_repo_name: name } = this;
async getStatuses(sha: string) {
const { originRepoOwner: owner, originRepoName: name } = this;
const { data } = await this.query({ query: queries.statues, variables: { owner, name, sha } });
if (data.repository.object) {
const { status } = data.repository.object;
@ -184,8 +225,8 @@ export default class GraphQLAPI extends API {
}
}
getAllFiles(entries, path) {
const allFiles = entries.reduce((acc, item) => {
getAllFiles(entries: TreeEntry[], path: string) {
const allFiles: TreeFile[] = entries.reduce((acc, item) => {
if (item.type === 'tree') {
const entries = item.object?.entries || [];
return [...acc, ...this.getAllFiles(entries, `${path}/${item.name}`)];
@ -193,19 +234,21 @@ export default class GraphQLAPI extends API {
return [
...acc,
{
...item,
name: item.name,
type: item.type,
id: item.sha,
path: `${path}/${item.name}`,
size: item.blob && item.blob.size,
size: item.blob ? item.blob.size : 0,
},
];
}
return acc;
}, []);
}, [] as TreeFile[]);
return allFiles;
}
async listFiles(path, { repoURL = this.repoURL, branch = this.branch, depth = 1 } = {}) {
async listFiles(path: string, { repoURL = this.repoURL, branch = this.branch, depth = 1 } = {}) {
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const { data } = await this.query({
query: queries.files(depth),
@ -228,14 +271,18 @@ export default class GraphQLAPI extends API {
'%c Checking for Unpublished entries',
'line-height: 30px;text-align: center;font-weight: bold',
);
const { repo_owner: owner, repo_name: name } = this;
const { repoOwner: owner, repoName: name } = this;
const { data } = await this.query({
query: queries.unpublishedPrBranches,
variables: { owner, name },
});
const { nodes } = data.repository.refs;
const { nodes } = data.repository.refs as {
nodes: {
associatedPullRequests: { nodes: { headRef: { prefix: string; name: string } }[] };
}[];
};
if (nodes.length > 0) {
const branches = [];
const branches = [] as { ref: string }[];
nodes.forEach(({ associatedPullRequests }) => {
associatedPullRequests.nodes.forEach(({ headRef }) => {
branches.push({ ref: `${headRef.prefix}${headRef.name}` });
@ -252,13 +299,13 @@ export default class GraphQLAPI extends API {
}
}
async readUnpublishedBranchFile(contentKey) {
async readUnpublishedBranchFile(contentKey: string) {
// retrieveMetadata(contentKey) rejects in case of no metadata
const metaData = await this.retrieveMetadata(contentKey).catch(() => null);
if (metaData && metaData.objects && metaData.objects.entry && metaData.objects.entry.path) {
const { path } = metaData.objects.entry;
const { repo_owner: headOwner, repo_name: headRepoName } = this;
const { origin_repo_owner: baseOwner, origin_repo_name: baseRepoName } = this;
const { repoOwner: headOwner, repoName: headRepoName } = this;
const { originRepoOwner: baseOwner, originRepoName: baseRepoName } = this;
const { data } = await this.query({
query: queries.unpublishedBranchFile,
@ -278,6 +325,7 @@ export default class GraphQLAPI extends API {
metaData,
fileData: data.head.object.text,
isModification: !!data.base.object,
slug: this.slugFromContentKey(contentKey, metaData.collection),
};
return result;
} else {
@ -285,11 +333,11 @@ export default class GraphQLAPI extends API {
}
}
getBranchQualifiedName(branch) {
getBranchQualifiedName(branch: string) {
return `refs/heads/${branch}`;
}
getBranchQuery(branch, owner, name) {
getBranchQuery(branch: string, owner: string, name: string) {
return {
query: queries.branch,
variables: {
@ -302,20 +350,20 @@ export default class GraphQLAPI extends API {
async getDefaultBranch() {
const { data } = await this.query({
...this.getBranchQuery(this.branch, this.origin_repo_owner, this.origin_repo_name),
...this.getBranchQuery(this.branch, this.originRepoOwner, this.originRepoName),
});
return data.repository.branch;
}
async getBranch(branch) {
async getBranch(branch: string) {
const { data } = await this.query({
...this.getBranchQuery(branch, this.repo_owner, this.repo_name),
...this.getBranchQuery(branch, this.repoOwner, this.repoName),
fetchPolicy: CACHE_FIRST,
});
return data.repository.branch;
}
async patchRef(type, name, sha, opts = {}) {
async patchRef(type: string, name: string, sha: string, opts: { force?: boolean } = {}) {
if (type !== 'heads') {
return super.patchRef(type, name, sha, opts);
}
@ -329,24 +377,25 @@ export default class GraphQLAPI extends API {
input: { oid: sha, refId: branch.id, force },
},
});
return data.updateRef.branch;
return data!.updateRef.branch;
}
async deleteBranch(branchName) {
async deleteBranch(branchName: string) {
const branch = await this.getBranch(branchName);
const { data } = await this.mutate({
mutation: mutations.deleteBranch,
variables: {
deleteRefInput: { refId: branch.id },
},
update: store => store.data.delete(defaultDataIdFromObject(branch)),
// eslint-disable-next-line @typescript-eslint/no-explicit-any
update: (store: any) => store.data.delete(defaultDataIdFromObject(branch)),
});
return data.deleteRef;
return data!.deleteRef;
}
getPullRequestQuery(number) {
const { origin_repo_owner: owner, origin_repo_name: name } = this;
getPullRequestQuery(number: number) {
const { originRepoOwner: owner, originRepoName: name } = this;
return {
query: queries.pullRequest,
@ -354,7 +403,7 @@ export default class GraphQLAPI extends API {
};
}
async getPullRequest(number) {
async getPullRequest(number: number) {
const { data } = await this.query({
...this.getPullRequestQuery(number),
fetchPolicy: CACHE_FIRST,
@ -370,24 +419,24 @@ export default class GraphQLAPI extends API {
};
}
getPullRequestAndBranchQuery(branch, number) {
const { repo_owner: owner, repo_name: name } = this;
const { origin_repo_owner: origin_owner, origin_repo_name: origin_name } = this;
getPullRequestAndBranchQuery(branch: string, number: number) {
const { repoOwner: owner, repoName: name } = this;
const { originRepoOwner, originRepoName } = this;
return {
query: queries.pullRequestAndBranch,
variables: {
owner,
name,
origin_owner,
origin_name,
originRepoOwner,
originRepoName,
number,
qualifiedName: this.getBranchQualifiedName(branch),
},
};
}
async getPullRequestAndBranch(branch, number) {
async getPullRequestAndBranch(branch: string, number: number) {
const { data } = await this.query({
...this.getPullRequestAndBranchQuery(branch, number),
fetchPolicy: CACHE_FIRST,
@ -397,7 +446,7 @@ export default class GraphQLAPI extends API {
return { branch: repository.branch, pullRequest: origin.pullRequest };
}
async openPR({ number }) {
async openPR({ number }: PR) {
const pullRequest = await this.getPullRequest(number);
const { data } = await this.mutate({
@ -406,7 +455,7 @@ export default class GraphQLAPI extends API {
reopenPullRequestInput: { pullRequestId: pullRequest.id },
},
update: (store, { data: mutationResult }) => {
const { pullRequest } = mutationResult.reopenPullRequest;
const { pullRequest } = mutationResult!.reopenPullRequest;
const pullRequestData = { repository: { ...pullRequest.repository, pullRequest } };
store.writeQuery({
@ -416,10 +465,10 @@ export default class GraphQLAPI extends API {
},
});
return data.closePullRequest;
return data!.closePullRequest;
}
async closePR({ number }) {
async closePR({ number }: PR) {
const pullRequest = await this.getPullRequest(number);
const { data } = await this.mutate({
@ -428,7 +477,7 @@ export default class GraphQLAPI extends API {
closePullRequestInput: { pullRequestId: pullRequest.id },
},
update: (store, { data: mutationResult }) => {
const { pullRequest } = mutationResult.closePullRequest;
const { pullRequest } = mutationResult!.closePullRequest;
const pullRequestData = { repository: { ...pullRequest.repository, pullRequest } };
store.writeQuery({
@ -438,10 +487,10 @@ export default class GraphQLAPI extends API {
},
});
return data.closePullRequest;
return data!.closePullRequest;
}
async deleteUnpublishedEntry(collectionName, slug) {
async deleteUnpublishedEntry(collectionName: string, slug: string) {
try {
const contentKey = this.generateContentKey(collectionName, slug);
const branchName = this.generateBranchName(contentKey);
@ -459,20 +508,21 @@ export default class GraphQLAPI extends API {
deleteRefInput: { refId: branch.id },
closePullRequestInput: { pullRequestId: pullRequest.id },
},
update: store => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
update: (store: any) => {
store.data.delete(defaultDataIdFromObject(branch));
store.data.delete(defaultDataIdFromObject(pullRequest));
},
});
return data.closePullRequest;
return data!.closePullRequest;
} else {
return await this.deleteBranch(branchName);
}
} catch (e) {
const { graphQLErrors } = e;
if (graphQLErrors && graphQLErrors.length > 0) {
const branchNotFound = graphQLErrors.some(e => e.type === 'NOT_FOUND');
const branchNotFound = graphQLErrors.some((e: Error) => e.type === 'NOT_FOUND');
if (branchNotFound) {
return;
}
@ -481,9 +531,9 @@ export default class GraphQLAPI extends API {
}
}
async createPR(title, head) {
async createPR(title: string, head: string) {
const [repository, headReference] = await Promise.all([
this.getRepository(this.origin_repo_owner, this.origin_repo_name),
this.getRepository(this.originRepoOwner, this.originRepoName),
this.useOpenAuthoring ? `${(await this.user()).login}:${head}` : head,
]);
const { data } = await this.mutate({
@ -491,14 +541,14 @@ export default class GraphQLAPI extends API {
variables: {
createPullRequestInput: {
baseRefName: this.branch,
body: API.DEFAULT_PR_BODY,
body: DEFAULT_PR_BODY,
title,
headRefName: headReference,
repositoryId: repository.id,
},
},
update: (store, { data: mutationResult }) => {
const { pullRequest } = mutationResult.createPullRequest;
const { pullRequest } = mutationResult!.createPullRequest;
const pullRequestData = { repository: { ...pullRequest.repository, pullRequest } };
store.writeQuery({
@ -507,13 +557,13 @@ export default class GraphQLAPI extends API {
});
},
});
const { pullRequest } = data.createPullRequest;
const { pullRequest } = data!.createPullRequest;
return { ...pullRequest, head: { sha: pullRequest.headRefOid } };
}
async createBranch(branchName, sha) {
const owner = this.repo_owner;
const name = this.repo_name;
async createBranch(branchName: string, sha: string) {
const owner = this.repoOwner;
const name = this.repoName;
const repository = await this.getRepository(owner, name);
const { data } = await this.mutate({
mutation: mutations.createBranch,
@ -525,7 +575,7 @@ export default class GraphQLAPI extends API {
},
},
update: (store, { data: mutationResult }) => {
const { branch } = mutationResult.createRef;
const { branch } = mutationResult!.createRef;
const branchData = { repository: { ...branch.repository, branch } };
store.writeQuery({
@ -534,13 +584,13 @@ export default class GraphQLAPI extends API {
});
},
});
const { branch } = data.createRef;
const { branch } = data!.createRef;
return { ...branch, ref: `${branch.prefix}${branch.name}` };
}
async createBranchAndPullRequest(branchName, sha, title) {
const owner = this.origin_repo_owner;
const name = this.origin_repo_name;
async createBranchAndPullRequest(branchName: string, sha: string, title: string) {
const owner = this.originRepoOwner;
const name = this.originRepoName;
const repository = await this.getRepository(owner, name);
const { data } = await this.mutate({
mutation: mutations.createBranchAndPullRequest,
@ -552,15 +602,15 @@ export default class GraphQLAPI extends API {
},
createPullRequestInput: {
baseRefName: this.branch,
body: API.DEFAULT_PR_BODY,
body: DEFAULT_PR_BODY,
title,
headRefName: branchName,
repositoryId: repository.id,
},
},
update: (store, { data: mutationResult }) => {
const { branch } = mutationResult.createRef;
const { pullRequest } = mutationResult.createPullRequest;
const { branch } = mutationResult!.createRef;
const { pullRequest } = mutationResult!.createPullRequest;
const branchData = { repository: { ...branch.repository, branch } };
const pullRequestData = {
repository: { ...pullRequest.repository, branch },
@ -578,29 +628,20 @@ export default class GraphQLAPI extends API {
});
},
});
const { pullRequest } = data.createPullRequest;
const { pullRequest } = data!.createPullRequest;
return { ...pullRequest, head: { sha: pullRequest.headRefOid } };
}
async getPullRequestCommits(number) {
const { origin_repo_owner: owner, origin_repo_name: name } = this;
const { data } = await this.query({
query: queries.pullRequestCommits,
variables: { owner, name, number },
});
const { nodes } = data.repository.pullRequest.commits;
const commits = nodes.map(n => ({ ...n.commit, parents: n.commit.parents.nodes }));
return commits;
}
async getFileSha(path, branch) {
const { repo_owner: owner, repo_name: name } = this;
async getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const { data } = await this.query({
query: queries.fileSha,
variables: { owner, name, expression: `${branch}:${path}` },
});
return data.repository.file.sha;
if (data.repository.file) {
return data.repository.file.sha;
}
throw new APIError('Not Found', 404, API_NAME);
}
}

View File

@ -159,60 +159,6 @@ describe('github API', () => {
});
});
describe('getMediaAsBlob', () => {
it('should return response blob on non svg file', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const blob = {};
api.readFile = jest.fn().mockResolvedValue(blob);
await expect(api.getMediaAsBlob('sha', 'static/media/image.png')).resolves.toBe(blob);
expect(api.readFile).toHaveBeenCalledTimes(1);
expect(api.readFile).toHaveBeenCalledWith('static/media/image.png', 'sha', {
parseText: false,
});
});
it('should return text blob on svg file', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const text = 'svg';
api.readFile = jest.fn().mockResolvedValue(text);
await expect(api.getMediaAsBlob('sha', 'static/media/logo.svg')).resolves.toEqual(
new Blob([text], { type: 'image/svg+xml' }),
);
expect(api.readFile).toHaveBeenCalledTimes(1);
expect(api.readFile).toHaveBeenCalledWith('static/media/logo.svg', 'sha', {
parseText: true,
});
});
});
describe('getMediaDisplayURL', () => {
it('should return createObjectURL result', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const blob = {};
api.getMediaAsBlob = jest.fn().mockResolvedValue(blob);
global.URL.createObjectURL = jest
.fn()
.mockResolvedValue('blob:http://localhost:8080/blob-id');
await expect(api.getMediaDisplayURL('sha', 'static/media/image.png')).resolves.toBe(
'blob:http://localhost:8080/blob-id',
);
expect(api.getMediaAsBlob).toHaveBeenCalledTimes(1);
expect(api.getMediaAsBlob).toHaveBeenCalledWith('sha', 'static/media/image.png');
expect(global.URL.createObjectURL).toHaveBeenCalledTimes(1);
expect(global.URL.createObjectURL).toHaveBeenCalledWith(blob);
});
});
describe('persistFiles', () => {
it('should update tree, commit and patch branch when useWorkflow is false', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
@ -572,4 +518,24 @@ describe('github API', () => {
});
});
});
test('should get preview statuses', async () => {
const api = new API({ repo: 'repo' });
const statuses = [
{ context: 'deploy', state: 'success', target_url: 'deploy-url' },
{ context: 'build', state: 'error' },
];
api.request = jest.fn(() => Promise.resolve({ statuses }));
const sha = 'sha';
await expect(api.getStatuses(sha)).resolves.toEqual([
{ context: 'deploy', state: 'success', target_url: 'deploy-url' },
{ context: 'build', state: 'other' },
]);
expect(api.request).toHaveBeenCalledTimes(1);
expect(api.request).toHaveBeenCalledWith(`/repos/repo/commits/${sha}/status`);
});
});

View File

@ -44,27 +44,24 @@ describe('github GraphQL API', () => {
expect(api.getAllFiles(entries, path)).toEqual([
{
name: 'post-1.md',
sha: 'sha-1',
id: 'sha-1',
type: 'blob',
size: 1,
path: 'posts/post-1.md',
blob: { size: 1 },
},
{
name: 'post-2.md',
sha: 'sha-2',
id: 'sha-2',
type: 'blob',
size: 2,
path: 'posts/post-2.md',
blob: { size: 2 },
},
{
name: 'nested-post.md',
sha: 'nested-post-sha',
id: 'nested-post-sha',
type: 'blob',
size: 3,
path: 'posts/2019/nested-post.md',
blob: { size: 3 },
},
]);
});

View File

@ -4,20 +4,11 @@ jest.spyOn(console, 'error').mockImplementation(() => {});
describe('github backend implementation', () => {
const config = {
getIn: jest.fn().mockImplementation(array => {
if (array[0] === 'backend' && array[1] === 'repo') {
return 'owner/repo';
}
if (array[0] === 'backend' && array[1] === 'open_authoring') {
return false;
}
if (array[0] === 'backend' && array[1] === 'branch') {
return 'master';
}
if (array[0] === 'backend' && array[1] === 'api_root') {
return 'https://api.github.com';
}
}),
backend: {
repo: 'owner/repo',
open_authoring: false,
api_root: 'https://api.github.com',
},
};
const createObjectURL = jest.fn();
@ -102,7 +93,7 @@ describe('github backend implementation', () => {
};
expect.assertions(5);
await expect(gitHubImplementation.persistMedia(mediaFile)).resolves.toEqual({
await expect(gitHubImplementation.persistMedia(mediaFile, {})).resolves.toEqual({
id: 0,
name: 'image.png',
size: 100,
@ -140,9 +131,9 @@ describe('github backend implementation', () => {
});
describe('loadEntryMediaFiles', () => {
const getMediaAsBlob = jest.fn();
const readFile = jest.fn();
const mockAPI = {
getMediaAsBlob,
readFile,
};
it('should return media files from meta data', async () => {
@ -150,18 +141,17 @@ describe('github backend implementation', () => {
gitHubImplementation.api = mockAPI;
const blob = new Blob(['']);
getMediaAsBlob.mockResolvedValue(blob);
readFile.mockResolvedValue(blob);
const file = new File([blob], name);
await expect(
gitHubImplementation.loadEntryMediaFiles([
{ path: 'static/media/image.png', sha: 'image.png' },
gitHubImplementation.loadEntryMediaFiles('branch', [
{ path: 'static/media/image.png', id: 'sha' },
]),
).resolves.toEqual([
{
id: 'image.png',
sha: 'image.png',
id: 'sha',
displayURL: 'displayURL',
path: 'static/media/image.png',
name: 'image.png',
@ -186,24 +176,27 @@ describe('github backend implementation', () => {
gitHubImplementation.api = mockAPI;
gitHubImplementation.loadEntryMediaFiles = jest
.fn()
.mockResolvedValue([{ path: 'image.png', sha: 'sha' }]);
.mockResolvedValue([{ path: 'image.png', id: 'sha' }]);
generateContentKey.mockReturnValue('contentKey');
const data = {
fileData: 'fileData',
isModification: true,
metaData: { objects: { entry: { path: 'entry-path' }, files: [{ path: 'image.png' }] } },
metaData: {
branch: 'branch',
objects: { entry: { path: 'entry-path' }, files: [{ path: 'image.png', sha: 'sha' }] },
},
};
readUnpublishedBranchFile.mockResolvedValue(data);
const collection = { get: jest.fn().mockReturnValue('posts') };
const collection = 'posts';
await expect(gitHubImplementation.unpublishedEntry(collection, 'slug')).resolves.toEqual({
slug: 'slug',
file: { path: 'entry-path' },
file: { path: 'entry-path', id: null },
data: 'fileData',
metaData: { objects: { entry: { path: 'entry-path' }, files: [{ path: 'image.png' }] } },
mediaFiles: [{ path: 'image.png', sha: 'sha' }],
metaData: data.metaData,
mediaFiles: [{ path: 'image.png', id: 'sha' }],
isModification: true,
});
@ -214,9 +207,9 @@ describe('github backend implementation', () => {
expect(readUnpublishedBranchFile).toHaveBeenCalledWith('contentKey');
expect(gitHubImplementation.loadEntryMediaFiles).toHaveBeenCalledTimes(1);
expect(gitHubImplementation.loadEntryMediaFiles).toHaveBeenCalledWith(
data.metaData.objects.files,
);
expect(gitHubImplementation.loadEntryMediaFiles).toHaveBeenCalledWith('branch', [
{ path: 'image.png', id: 'sha' },
]);
});
});
});

View File

@ -1,512 +0,0 @@
import React from 'react';
import trimStart from 'lodash/trimStart';
import semaphore from 'semaphore';
import { stripIndent } from 'common-tags';
import { asyncLock, basename, getCollectionDepth } from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import { get } from 'lodash';
import API from './API';
import GraphQLAPI from './GraphQLAPI';
const MAX_CONCURRENT_DOWNLOADS = 10;
/**
* Keywords for inferring a status that will provide a deploy preview URL.
*/
const PREVIEW_CONTEXT_KEYWORDS = ['deploy'];
/**
* Check a given status context string to determine if it provides a link to a
* deploy preview. Checks for an exact match against `previewContext` if given,
* otherwise checks for inclusion of a value from `PREVIEW_CONTEXT_KEYWORDS`.
*/
function isPreviewContext(context, previewContext) {
if (previewContext) {
return context === previewContext;
}
return PREVIEW_CONTEXT_KEYWORDS.some(keyword => context.includes(keyword));
}
/**
* Retrieve a deploy preview URL from an array of statuses. By default, a
* matching status is inferred via `isPreviewContext`.
*/
function getPreviewStatus(statuses, config) {
const previewContext = config.getIn(['backend', 'preview_context']);
return statuses.find(({ context }) => {
return isPreviewContext(context, previewContext);
});
}
export default class GitHub {
constructor(config, options = {}) {
this.config = config;
this.options = {
proxied: false,
API: null,
...options,
};
if (!this.options.proxied && config.getIn(['backend', 'repo']) == null) {
throw new Error('The GitHub backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.openAuthoringEnabled = config.getIn(['backend', 'open_authoring'], false);
if (this.openAuthoringEnabled) {
if (!this.options.useWorkflow) {
throw new Error(
'backend.open_authoring is true but publish_mode is not set to editorial_workflow.',
);
}
this.originRepo = config.getIn(['backend', 'repo'], '');
} else {
this.repo = this.originRepo = config.getIn(['backend', 'repo'], '');
}
this.branch = config.getIn(['backend', 'branch'], 'master').trim();
this.api_root = config.getIn(['backend', 'api_root'], 'https://api.github.com');
this.token = '';
this.squash_merges = config.getIn(['backend', 'squash_merges']);
this.use_graphql = config.getIn(['backend', 'use_graphql']);
this.lock = asyncLock();
}
async runWithLock(func, message) {
try {
const acquired = await this.lock.acquire();
if (!acquired) {
console.warn(message);
}
const result = await func();
return result;
} finally {
this.lock.release();
}
}
authComponent() {
const wrappedAuthenticationPage = props => <AuthenticationPage {...props} backend={this} />;
wrappedAuthenticationPage.displayName = 'AuthenticationPage';
return wrappedAuthenticationPage;
}
restoreUser(user) {
return this.openAuthoringEnabled
? this.authenticateWithFork({ userData: user, getPermissionToFork: () => true }).then(() =>
this.authenticate(user),
)
: this.authenticate(user);
}
async pollUntilForkExists({ repo, token }) {
const pollDelay = 250; // milliseconds
var repoExists = false;
while (!repoExists) {
repoExists = await fetch(`${this.api_root}/repos/${repo}`, {
headers: { Authorization: `token ${token}` },
})
.then(() => true)
.catch(err => {
if (err && err.status === 404) {
console.log('This 404 was expected and handled appropriately.');
return false;
} else {
return Promise.reject(err);
}
});
// wait between polls
if (!repoExists) {
await new Promise(resolve => setTimeout(resolve, pollDelay));
}
}
return Promise.resolve();
}
async currentUser({ token }) {
if (!this._currentUserPromise) {
this._currentUserPromise = fetch(`${this.api_root}/user`, {
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
}
return this._currentUserPromise;
}
async userIsOriginMaintainer({ username: usernameArg, token }) {
const username = usernameArg || (await this.currentUser({ token })).login;
this._userIsOriginMaintainerPromises = this._userIsOriginMaintainerPromises || {};
if (!this._userIsOriginMaintainerPromises[username]) {
this._userIsOriginMaintainerPromises[username] = fetch(
`${this.api_root}/repos/${this.originRepo}/collaborators/${username}/permission`,
{
headers: {
Authorization: `token ${token}`,
},
},
)
.then(res => res.json())
.then(({ permission }) => permission === 'admin' || permission === 'write');
}
return this._userIsOriginMaintainerPromises[username];
}
async forkExists({ token }) {
try {
const currentUser = await this.currentUser({ token });
const repoName = this.originRepo.split('/')[1];
const repo = await fetch(`${this.api_root}/repos/${currentUser.login}/${repoName}`, {
method: 'GET',
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
// https://developer.github.com/v3/repos/#get
// The parent and source objects are present when the repository is a fork.
// parent is the repository this repository was forked from, source is the ultimate source for the network.
const forkExists =
repo.fork === true &&
repo.parent &&
repo.parent.full_name.toLowerCase() === this.originRepo.toLowerCase();
return forkExists;
} catch {
return false;
}
}
async authenticateWithFork({ userData, getPermissionToFork }) {
if (!this.openAuthoringEnabled) {
throw new Error('Cannot authenticate with fork; Open Authoring is turned off.');
}
const { token } = userData;
// Origin maintainers should be able to use the CMS normally
if (await this.userIsOriginMaintainer({ token })) {
this.repo = this.originRepo;
this.useOpenAuthoring = false;
return Promise.resolve();
}
if (!(await this.forkExists({ token }))) {
await getPermissionToFork();
}
const fork = await fetch(`${this.api_root}/repos/${this.originRepo}/forks`, {
method: 'POST',
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
this.useOpenAuthoring = true;
this.repo = fork.full_name;
return this.pollUntilForkExists({ repo: fork.full_name, token });
}
async authenticate(state) {
this.token = state.token;
const apiCtor = this.use_graphql ? GraphQLAPI : API;
this.api = new apiCtor({
token: this.token,
branch: this.branch,
repo: this.repo,
originRepo: this.originRepo,
api_root: this.api_root,
squash_merges: this.squash_merges,
useOpenAuthoring: this.useOpenAuthoring,
initialWorkflowStatus: this.options.initialWorkflowStatus,
});
const user = await this.api.user();
const isCollab = await this.api.hasWriteAccess().catch(error => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a GitHub account with access.
If your repo is under an organization, ensure the organization has granted access to Netlify
CMS.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your GitHub user account does not have access to this repo.');
}
// Authorized user
return { ...user, token: state.token, useOpenAuthoring: this.useOpenAuthoring };
}
logout() {
this.token = null;
if (this.api && typeof this.api.reset === 'function') {
return this.api.reset();
}
return;
}
getToken() {
return Promise.resolve(this.token);
}
async entriesByFolder(collection, extension) {
const repoURL = this.useOpenAuthoring ? this.api.originRepoURL : this.api.repoURL;
const files = await this.api.listFiles(collection.get('folder'), {
repoURL,
depth: getCollectionDepth(collection),
});
const filteredFiles = files.filter(file => file.name.endsWith('.' + extension));
return this.fetchFiles(filteredFiles, { repoURL });
}
entriesByFiles(collection) {
const repoURL = this.useOpenAuthoring ? this.api.originRepoURL : this.api.repoURL;
const files = collection.get('files').map(collectionFile => ({
path: collectionFile.get('file'),
label: collectionFile.get('label'),
}));
return this.fetchFiles(files, { repoURL });
}
fetchFiles = (files, { repoURL = this.api.repoURL } = {}) => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [];
files.forEach(file => {
promises.push(
new Promise(resolve =>
sem.take(() =>
this.api
.readFile(file.path, file.sha, { repoURL })
.then(data => {
resolve({ file, data });
sem.leave();
})
.catch((err = true) => {
sem.leave();
console.error(`failed to load file from GitHub: ${file.path}`);
resolve({ error: err });
}),
),
),
);
});
return Promise.all(promises).then(loadedEntries =>
loadedEntries.filter(loadedEntry => !loadedEntry.error),
);
};
// Fetches a single entry.
getEntry(collection, slug, path) {
const repoURL = this.api.originRepoURL;
return this.api.readFile(path, null, { repoURL }).then(data => ({
file: { path },
data,
}));
}
getMedia(mediaFolder = this.config.get('media_folder')) {
return this.api.listFiles(mediaFolder).then(files =>
files.map(({ sha, name, size, path }) => {
// load media using getMediaDisplayURL to avoid token expiration with GitHub raw content urls
// for private repositories
return { id: sha, name, size, displayURL: { id: sha, path }, path };
}),
);
}
async getMediaFile(path) {
const blob = await this.api.getMediaAsBlob(null, path);
const name = basename(path);
const fileObj = new File([blob], name);
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
async getMediaDisplayURL(displayURL) {
const { id, path } = displayURL;
const mediaURL = await this.api.getMediaDisplayURL(id, path);
return mediaURL;
}
persistEntry(entry, mediaFiles = [], options = {}) {
// persistEntry is a transactional operation
return this.runWithLock(
() => this.api.persistFiles(entry, mediaFiles, options),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile, options = {}) {
try {
await this.api.persistFiles(null, [mediaFile], options);
const { sha, path, fileObj } = mediaFile;
const displayURL = URL.createObjectURL(fileObj);
return {
id: sha,
name: fileObj.name,
size: fileObj.size,
displayURL,
path: trimStart(path, '/'),
};
} catch (error) {
console.error(error);
throw error;
}
}
deleteFile(path, commitMessage, options) {
return this.api.deleteFile(path, commitMessage, options);
}
async loadMediaFile(file) {
return this.api.getMediaAsBlob(file.sha, file.path).then(blob => {
const name = basename(file.path);
const fileObj = new File([blob], name);
return {
id: file.sha,
sha: file.sha,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
});
}
async loadEntryMediaFiles(files) {
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(file)));
return mediaFiles;
}
unpublishedEntries() {
return this.api
.listUnpublishedBranches()
.then(branches => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [];
branches.map(({ ref }) => {
promises.push(
new Promise(resolve => {
const contentKey = this.api.contentKeyFromRef(ref);
return sem.take(() =>
this.api
.readUnpublishedBranchFile(contentKey)
.then(data => {
if (data === null || data === undefined) {
resolve(null);
sem.leave();
} else {
resolve({
slug: this.api.slugFromContentKey(contentKey, data.metaData.collection),
file: { path: data.metaData.objects.entry.path },
data: data.fileData,
metaData: data.metaData,
isModification: data.isModification,
});
sem.leave();
}
})
.catch(() => {
sem.leave();
resolve(null);
}),
);
}),
);
});
return Promise.all(promises);
})
.catch(error => {
if (error.message === 'Not Found') {
return Promise.resolve([]);
}
return Promise.reject(error);
});
}
async unpublishedEntry(
collection,
slug,
{ loadEntryMediaFiles = files => this.loadEntryMediaFiles(files) } = {},
) {
const contentKey = this.api.generateContentKey(collection.get('name'), slug);
const data = await this.api.readUnpublishedBranchFile(contentKey);
if (!data) {
return null;
}
const files = get(data, 'metaData.objects.files', []);
const mediaFiles = await loadEntryMediaFiles(files);
return {
slug,
file: { path: data.metaData.objects.entry.path },
data: data.fileData,
metaData: data.metaData,
mediaFiles,
isModification: data.isModification,
};
}
/**
* Uses GitHub's Statuses API to retrieve statuses, infers which is for a
* deploy preview via `getPreviewStatus`. Returns the url provided by the
* status, as well as the status state, which should be one of 'success',
* 'pending', and 'failure'.
*/
async getDeployPreview(collection, slug) {
const contentKey = this.api.generateContentKey(collection.get('name'), slug);
const data = await this.api.retrieveMetadata(contentKey);
if (!data || !data.pr) {
return null;
}
const headSHA = typeof data.pr.head === 'string' ? data.pr.head : data.pr.head.sha;
const statuses = await this.api.getStatuses(headSHA);
const deployStatus = getPreviewStatus(statuses, this.config);
if (deployStatus) {
const { target_url, state } = deployStatus;
return { url: target_url, status: state };
}
}
updateUnpublishedEntryStatus(collection, slug, newStatus) {
// updateUnpublishedEntryStatus is a transactional operation
return this.runWithLock(
() => this.api.updateUnpublishedEntryStatus(collection, slug, newStatus),
'Failed to acquire update entry status lock',
);
}
deleteUnpublishedEntry(collection, slug) {
// deleteUnpublishedEntry is a transactional operation
return this.runWithLock(
() => this.api.deleteUnpublishedEntry(collection, slug),
'Failed to acquire delete entry lock',
);
}
publishUnpublishedEntry(collection, slug) {
// publishUnpublishedEntry is a transactional operation
return this.runWithLock(
() => this.api.publishUnpublishedEntry(collection, slug),
'Failed to acquire publish entry lock',
);
}
}

View File

@ -0,0 +1,498 @@
import * as React from 'react';
import semaphore, { Semaphore } from 'semaphore';
import trimStart from 'lodash/trimStart';
import { stripIndent } from 'common-tags';
import {
asyncLock,
basename,
AsyncLock,
Implementation,
AssetProxy,
PersistOptions,
DisplayURL,
getBlobSHA,
entriesByFolder,
entriesByFiles,
unpublishedEntries,
User,
getMediaDisplayURL,
getMediaAsBlob,
Credentials,
filterByPropExtension,
Config,
ImplementationFile,
getPreviewStatus,
UnpublishedEntryMediaFile,
runWithLock,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import { UsersGetAuthenticatedResponse as GitHubUser } from '@octokit/rest';
import API, { Entry } from './API';
import GraphQLAPI from './GraphQLAPI';
const MAX_CONCURRENT_DOWNLOADS = 10;
export default class GitHub implements Implementation {
lock: AsyncLock;
api: API | null;
options: {
proxied: boolean;
API: API | null;
useWorkflow?: boolean;
initialWorkflowStatus: string;
};
originRepo: string;
repo?: string;
openAuthoringEnabled: boolean;
useOpenAuthoring?: boolean;
branch: string;
apiRoot: string;
mediaFolder: string;
previewContext: string;
token: string | null;
squashMerges: boolean;
useGraphql: boolean;
_currentUserPromise?: Promise<GitHubUser>;
_userIsOriginMaintainerPromises?: {
[key: string]: Promise<boolean>;
};
_mediaDisplayURLSem?: Semaphore;
constructor(config: Config, options = {}) {
this.options = {
proxied: false,
API: null,
initialWorkflowStatus: '',
...options,
};
if (
!this.options.proxied &&
(config.backend.repo === null || config.backend.repo === undefined)
) {
throw new Error('The GitHub backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.openAuthoringEnabled = config.backend.open_authoring || false;
if (this.openAuthoringEnabled) {
if (!this.options.useWorkflow) {
throw new Error(
'backend.open_authoring is true but publish_mode is not set to editorial_workflow.',
);
}
this.originRepo = config.backend.repo || '';
} else {
this.repo = this.originRepo = config.backend.repo || '';
}
this.branch = config.backend.branch?.trim() || 'master';
this.apiRoot = config.backend.api_root || 'https://api.github.com';
this.token = '';
this.squashMerges = config.backend.squash_merges || false;
this.useGraphql = config.backend.use_graphql || false;
this.mediaFolder = config.media_folder;
this.previewContext = config.backend.preview_context || '';
this.lock = asyncLock();
}
authComponent() {
const wrappedAuthenticationPage = (props: Record<string, unknown>) => (
<AuthenticationPage {...props} backend={this} />
);
wrappedAuthenticationPage.displayName = 'AuthenticationPage';
return wrappedAuthenticationPage;
}
restoreUser(user: User) {
return this.openAuthoringEnabled
? this.authenticateWithFork({ userData: user, getPermissionToFork: () => true }).then(() =>
this.authenticate(user),
)
: this.authenticate(user);
}
async pollUntilForkExists({ repo, token }: { repo: string; token: string }) {
const pollDelay = 250; // milliseconds
let repoExists = false;
while (!repoExists) {
repoExists = await fetch(`${this.apiRoot}/repos/${repo}`, {
headers: { Authorization: `token ${token}` },
})
.then(() => true)
.catch(err => {
if (err && err.status === 404) {
console.log('This 404 was expected and handled appropriately.');
return false;
} else {
return Promise.reject(err);
}
});
// wait between polls
if (!repoExists) {
await new Promise(resolve => setTimeout(resolve, pollDelay));
}
}
return Promise.resolve();
}
async currentUser({ token }: { token: string }) {
if (!this._currentUserPromise) {
this._currentUserPromise = fetch(`${this.apiRoot}/user`, {
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
}
return this._currentUserPromise;
}
async userIsOriginMaintainer({
username: usernameArg,
token,
}: {
username?: string;
token: string;
}) {
const username = usernameArg || (await this.currentUser({ token })).login;
this._userIsOriginMaintainerPromises = this._userIsOriginMaintainerPromises || {};
if (!this._userIsOriginMaintainerPromises[username]) {
this._userIsOriginMaintainerPromises[username] = fetch(
`${this.apiRoot}/repos/${this.originRepo}/collaborators/${username}/permission`,
{
headers: {
Authorization: `token ${token}`,
},
},
)
.then(res => res.json())
.then(({ permission }) => permission === 'admin' || permission === 'write');
}
return this._userIsOriginMaintainerPromises[username];
}
async forkExists({ token }: { token: string }) {
try {
const currentUser = await this.currentUser({ token });
const repoName = this.originRepo.split('/')[1];
const repo = await fetch(`${this.apiRoot}/repos/${currentUser.login}/${repoName}`, {
method: 'GET',
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
// https://developer.github.com/v3/repos/#get
// The parent and source objects are present when the repository is a fork.
// parent is the repository this repository was forked from, source is the ultimate source for the network.
const forkExists =
repo.fork === true &&
repo.parent &&
repo.parent.full_name.toLowerCase() === this.originRepo.toLowerCase();
return forkExists;
} catch {
return false;
}
}
async authenticateWithFork({
userData,
getPermissionToFork,
}: {
userData: User;
getPermissionToFork: () => Promise<boolean> | boolean;
}) {
if (!this.openAuthoringEnabled) {
throw new Error('Cannot authenticate with fork; Open Authoring is turned off.');
}
const token = userData.token as string;
// Origin maintainers should be able to use the CMS normally
if (await this.userIsOriginMaintainer({ token })) {
this.repo = this.originRepo;
this.useOpenAuthoring = false;
return Promise.resolve();
}
if (!(await this.forkExists({ token }))) {
await getPermissionToFork();
}
const fork = await fetch(`${this.apiRoot}/repos/${this.originRepo}/forks`, {
method: 'POST',
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
this.useOpenAuthoring = true;
this.repo = fork.full_name;
return this.pollUntilForkExists({ repo: fork.full_name, token });
}
async authenticate(state: Credentials) {
this.token = state.token as string;
const apiCtor = this.useGraphql ? GraphQLAPI : API;
this.api = new apiCtor({
token: this.token,
branch: this.branch,
repo: this.repo,
originRepo: this.originRepo,
apiRoot: this.apiRoot,
squashMerges: this.squashMerges,
useOpenAuthoring: this.useOpenAuthoring,
initialWorkflowStatus: this.options.initialWorkflowStatus,
});
const user = await this.api!.user();
const isCollab = await this.api!.hasWriteAccess().catch(error => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a GitHub account with access.
If your repo is under an organization, ensure the organization has granted access to Netlify
CMS.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your GitHub user account does not have access to this repo.');
}
// Authorized user
return { ...user, token: state.token as string, useOpenAuthoring: this.useOpenAuthoring };
}
logout() {
this.token = null;
if (this.api && this.api.reset && typeof this.api.reset === 'function') {
return this.api.reset();
}
}
getToken() {
return Promise.resolve(this.token);
}
async entriesByFolder(folder: string, extension: string, depth: number) {
const repoURL = this.useOpenAuthoring ? this.api!.originRepoURL : this.api!.repoURL;
const listFiles = () =>
this.api!.listFiles(folder, {
repoURL,
depth,
}).then(filterByPropExtension(extension, 'path'));
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL }) as Promise<string>;
return entriesByFolder(listFiles, readFile, 'GitHub');
}
entriesByFiles(files: ImplementationFile[]) {
const repoURL = this.useOpenAuthoring ? this.api!.originRepoURL : this.api!.repoURL;
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL }) as Promise<string>;
return entriesByFiles(files, readFile, 'GitHub');
}
// Fetches a single entry.
getEntry(path: string) {
const repoURL = this.api!.originRepoURL;
return this.api!.readFile(path, null, { repoURL }).then(data => ({
file: { path, id: null },
data: data as string,
}));
}
getMedia(mediaFolder = this.mediaFolder) {
return this.api!.listFiles(mediaFolder).then(files =>
files.map(({ id, name, size, path }) => {
// load media using getMediaDisplayURL to avoid token expiration with GitHub raw content urls
// for private repositories
return { id, name, size, displayURL: { id, path }, path };
}),
);
}
async getMediaFile(path: string) {
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
const name = basename(path);
const fileObj = new File([blob], name);
const url = URL.createObjectURL(fileObj);
const id = await getBlobSHA(blob);
return {
id,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
getMediaDisplayURL(displayURL: DisplayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
return getMediaDisplayURL(
displayURL,
this.api!.readFile.bind(this.api!),
this._mediaDisplayURLSem,
);
}
persistEntry(entry: Entry, mediaFiles: AssetProxy[] = [], options: PersistOptions) {
// persistEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.persistFiles(entry, mediaFiles, options),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
try {
await this.api!.persistFiles(null, [mediaFile], options);
const { sha, path, fileObj } = mediaFile as AssetProxy & { sha: string };
const displayURL = URL.createObjectURL(fileObj);
return {
id: sha,
name: fileObj!.name,
size: fileObj!.size,
displayURL,
path: trimStart(path, '/'),
};
} catch (error) {
console.error(error);
throw error;
}
}
deleteFile(path: string, commitMessage: string) {
return this.api!.deleteFile(path, commitMessage);
}
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
const readFile = (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
return getMediaAsBlob(file.path, file.id, readFile).then(blob => {
const name = basename(file.path);
const fileObj = new File([blob], name);
return {
id: file.id,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
});
}
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
return mediaFiles;
}
unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(({ ref }) => this.api!.contentKeyFromRef(ref)),
);
const readUnpublishedBranchFile = (contentKey: string) =>
this.api!.readUnpublishedBranchFile(contentKey);
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, 'GitHub');
}
async unpublishedEntry(
collection: string,
slug: string,
{
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
this.loadEntryMediaFiles(branch, files),
} = {},
) {
const contentKey = this.api!.generateContentKey(collection, slug);
const data = await this.api!.readUnpublishedBranchFile(contentKey);
const files = data.metaData.objects.files || [];
const mediaFiles = await loadEntryMediaFiles(
data.metaData.branch,
files.map(({ sha: id, path }) => ({ id, path })),
);
return {
slug,
file: { path: data.metaData.objects.entry.path, id: null },
data: data.fileData as string,
metaData: data.metaData,
mediaFiles,
isModification: data.isModification,
};
}
/**
* Uses GitHub's Statuses API to retrieve statuses, infers which is for a
* deploy preview via `getPreviewStatus`. Returns the url provided by the
* status, as well as the status state, which should be one of 'success',
* 'pending', and 'failure'.
*/
async getDeployPreview(collectionName: string, slug: string) {
const contentKey = this.api!.generateContentKey(collectionName, slug);
const data = await this.api!.retrieveMetadata(contentKey);
if (!data || !data.pr) {
return null;
}
const headSHA = typeof data.pr.head === 'string' ? data.pr.head : data.pr.head.sha;
const statuses = await this.api!.getStatuses(headSHA);
const deployStatus = getPreviewStatus(statuses, this.previewContext);
if (deployStatus) {
const { target_url: url, state } = deployStatus;
return { url, status: state };
} else {
return null;
}
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
// updateUnpublishedEntryStatus is a transactional operation
return runWithLock(
this.lock,
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
'Failed to acquire update entry status lock',
);
}
deleteUnpublishedEntry(collection: string, slug: string) {
// deleteUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.deleteUnpublishedEntry(collection, slug),
'Failed to acquire delete entry lock',
);
}
publishUnpublishedEntry(collection: string, slug: string) {
// publishUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.publishUnpublishedEntry(collection, slug),
'Failed to acquire publish entry lock',
);
}
}

View File

@ -126,7 +126,7 @@ const buildFilesQuery = (depth = 1) => {
return query;
};
export const files = depth => gql`
export const files = (depth: number) => gql`
query files($owner: String!, $name: String!, $expression: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
@ -263,32 +263,6 @@ export const tree = gql`
${fragments.treeEntry}
`;
export const pullRequestCommits = gql`
query pullRequestCommits($owner: String!, $name: String!, $number: Int!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
pullRequest(number: $number) {
id
commits(last: 100) {
nodes {
id
commit {
...ObjectParts
parents(last: 100) {
nodes {
...ObjectParts
}
}
}
}
}
}
}
}
${fragments.repository}
${fragments.object}
`;
export const fileSha = gql`
query fileSha($owner: String!, $name: String!, $expression: String!) {
repository(owner: $owner, name: $name) {