2018-08-07 14:46:54 -06:00
|
|
|
import { Base64 } from 'js-base64';
|
2019-12-18 18:16:02 +02:00
|
|
|
import semaphore, { Semaphore } from 'semaphore';
|
2020-01-08 19:02:44 +02:00
|
|
|
import { flow, get, initial, last, partial, result, differenceBy, trimStart, trim } from 'lodash';
|
2019-11-26 09:40:27 +01:00
|
|
|
import { map, filter } from 'lodash/fp';
|
2019-03-15 10:19:57 -04:00
|
|
|
import {
|
2019-09-03 21:56:20 +03:00
|
|
|
getAllResponses,
|
2019-03-15 10:19:57 -04:00
|
|
|
APIError,
|
|
|
|
EditorialWorkflowError,
|
2019-11-26 09:40:27 +01:00
|
|
|
flowAsync,
|
2019-07-24 15:20:41 -07:00
|
|
|
localForage,
|
|
|
|
onlySuccessfulPromises,
|
2019-12-22 15:20:42 +02:00
|
|
|
basename,
|
2020-01-15 00:15:14 +02:00
|
|
|
AssetProxy,
|
|
|
|
Entry as LibEntry,
|
|
|
|
PersistOptions,
|
|
|
|
readFile,
|
|
|
|
CMS_BRANCH_PREFIX,
|
|
|
|
generateContentKey,
|
|
|
|
DEFAULT_PR_BODY,
|
|
|
|
MERGE_COMMIT_MESSAGE,
|
|
|
|
PreviewState,
|
|
|
|
FetchError,
|
2019-03-15 10:19:57 -04:00
|
|
|
} from 'netlify-cms-lib-util';
|
2020-02-17 11:10:56 +01:00
|
|
|
import { Octokit } from '@octokit/rest';
|
|
|
|
|
|
|
|
type GitHubUser = Octokit.UsersGetAuthenticatedResponse;
|
|
|
|
type GitHubRepo = Octokit.ReposGetResponse;
|
|
|
|
type GitHubBranch = Octokit.ReposGetBranchResponse;
|
|
|
|
type GitHubBlob = Octokit.GitGetBlobResponse;
|
|
|
|
type GitHubTree = Octokit.GitCreateTreeResponse;
|
|
|
|
type GitCreateTreeParamsTree = Octokit.GitCreateTreeParamsTree;
|
|
|
|
type GitHubCommit = Octokit.GitCreateCommitResponse;
|
|
|
|
type GitHubCompareCommit = Octokit.ReposCompareCommitsResponseCommitsItem;
|
|
|
|
type ReposCompareCommitsResponseFilesItem = Octokit.ReposCompareCommitsResponseFilesItem;
|
|
|
|
type GitHubCompareResponse = Octokit.ReposCompareCommitsResponse;
|
|
|
|
type GitHubCompareBaseCommit = Octokit.ReposCompareCommitsResponseBaseCommit;
|
|
|
|
type GitHubAuthor = Octokit.GitCreateCommitResponseAuthor;
|
|
|
|
type GitHubCommitter = Octokit.GitCreateCommitResponseCommitter;
|
|
|
|
type ReposListStatusesForRefResponseItem = Octokit.ReposListStatusesForRefResponseItem;
|
2016-08-30 19:06:20 -03:00
|
|
|
|
2019-11-26 09:40:27 +01:00
|
|
|
const CURRENT_METADATA_VERSION = '1';
|
2018-09-17 12:19:14 -04:00
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
export const API_NAME = 'GitHub';
|
2019-12-18 18:16:02 +02:00
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
export interface Config {
|
|
|
|
apiRoot?: string;
|
2019-12-18 18:16:02 +02:00
|
|
|
token?: string;
|
|
|
|
branch?: string;
|
2020-01-15 00:15:14 +02:00
|
|
|
useOpenAuthoring?: boolean;
|
2019-12-18 18:16:02 +02:00
|
|
|
repo?: string;
|
|
|
|
originRepo?: string;
|
2020-01-15 00:15:14 +02:00
|
|
|
squashMerges: boolean;
|
2019-12-18 18:16:02 +02:00
|
|
|
initialWorkflowStatus: string;
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
interface TreeFile {
|
2019-12-18 18:16:02 +02:00
|
|
|
type: 'blob' | 'tree';
|
|
|
|
sha: string;
|
|
|
|
path: string;
|
|
|
|
raw?: string;
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
export interface Entry extends LibEntry {
|
|
|
|
sha?: string;
|
2019-12-18 18:16:02 +02:00
|
|
|
}
|
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
type Override<T, U> = Pick<T, Exclude<keyof T, keyof U>> & U;
|
|
|
|
|
|
|
|
type TreeEntry = Override<GitCreateTreeParamsTree, { sha: string | null }>;
|
|
|
|
|
|
|
|
type GitHubCompareCommits = GitHubCompareCommit[];
|
|
|
|
|
|
|
|
type GitHubCompareFile = ReposCompareCommitsResponseFilesItem & { previous_filename?: string };
|
|
|
|
|
|
|
|
type GitHubCompareFiles = GitHubCompareFile[];
|
2019-12-18 18:16:02 +02:00
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
enum GitHubCommitStatusState {
|
|
|
|
Error = 'error',
|
|
|
|
Failure = 'failure',
|
|
|
|
Pending = 'pending',
|
|
|
|
Success = 'success',
|
2019-12-18 18:16:02 +02:00
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
type GitHubCommitStatus = ReposListStatusesForRefResponseItem & {
|
|
|
|
state: GitHubCommitStatusState;
|
|
|
|
};
|
|
|
|
|
|
|
|
export interface PR {
|
2019-12-18 18:16:02 +02:00
|
|
|
number: number;
|
2020-01-15 00:15:14 +02:00
|
|
|
head: string | { sha: string };
|
2019-12-18 18:16:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
interface MetaDataObjects {
|
|
|
|
entry: { path: string; sha: string };
|
|
|
|
files: MediaFile[];
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
export interface Metadata {
|
2019-12-18 18:16:02 +02:00
|
|
|
type: string;
|
|
|
|
objects: MetaDataObjects;
|
|
|
|
branch: string;
|
|
|
|
status: string;
|
|
|
|
pr?: PR;
|
|
|
|
collection: string;
|
|
|
|
commitMessage: string;
|
|
|
|
version?: string;
|
|
|
|
user: string;
|
|
|
|
title?: string;
|
|
|
|
description?: string;
|
|
|
|
timeStamp: string;
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
export interface Branch {
|
2019-12-18 18:16:02 +02:00
|
|
|
ref: string;
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
export interface BlobArgs {
|
2019-12-18 18:16:02 +02:00
|
|
|
sha: string;
|
|
|
|
repoURL: string;
|
|
|
|
parseText: boolean;
|
|
|
|
}
|
|
|
|
|
|
|
|
type Param = string | number | undefined;
|
|
|
|
|
|
|
|
type Options = RequestInit & { params?: Record<string, Param | Record<string, Param>> };
|
|
|
|
|
|
|
|
const replace404WithEmptyArray = (err: FetchError) => {
|
2019-11-10 00:34:03 -08:00
|
|
|
if (err && err.status === 404) {
|
|
|
|
console.log('This 404 was expected and handled appropriately.');
|
|
|
|
return [];
|
|
|
|
} else {
|
|
|
|
return Promise.reject(err);
|
|
|
|
}
|
|
|
|
};
|
2019-07-24 15:20:41 -07:00
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
type MediaFile = {
|
|
|
|
sha: string;
|
|
|
|
path: string;
|
|
|
|
};
|
|
|
|
|
2016-08-30 19:06:20 -03:00
|
|
|
export default class API {
|
2020-01-15 00:15:14 +02:00
|
|
|
apiRoot: string;
|
2019-12-18 18:16:02 +02:00
|
|
|
token: string;
|
|
|
|
branch: string;
|
2020-01-15 00:15:14 +02:00
|
|
|
useOpenAuthoring?: boolean;
|
2019-12-18 18:16:02 +02:00
|
|
|
repo: string;
|
|
|
|
originRepo: string;
|
|
|
|
repoURL: string;
|
|
|
|
originRepoURL: string;
|
2020-01-15 00:15:14 +02:00
|
|
|
mergeMethod: string;
|
2019-12-18 18:16:02 +02:00
|
|
|
initialWorkflowStatus: string;
|
|
|
|
|
|
|
|
_userPromise?: Promise<GitHubUser>;
|
|
|
|
_metadataSemaphore?: Semaphore;
|
|
|
|
|
|
|
|
commitAuthor?: {};
|
|
|
|
|
|
|
|
constructor(config: Config) {
|
2020-01-15 00:15:14 +02:00
|
|
|
this.apiRoot = config.apiRoot || 'https://api.github.com';
|
2019-12-18 18:16:02 +02:00
|
|
|
this.token = config.token || '';
|
2018-08-07 14:46:54 -06:00
|
|
|
this.branch = config.branch || 'master';
|
2019-08-24 10:54:59 -07:00
|
|
|
this.useOpenAuthoring = config.useOpenAuthoring;
|
2018-08-07 14:46:54 -06:00
|
|
|
this.repo = config.repo || '';
|
2019-09-03 21:56:20 +03:00
|
|
|
this.originRepo = config.originRepo || this.repo;
|
2018-08-07 14:46:54 -06:00
|
|
|
this.repoURL = `/repos/${this.repo}`;
|
2019-09-03 21:56:20 +03:00
|
|
|
// when not in 'useOpenAuthoring' mode originRepoURL === repoURL
|
|
|
|
this.originRepoURL = `/repos/${this.originRepo}`;
|
2020-01-15 00:15:14 +02:00
|
|
|
this.mergeMethod = config.squashMerges ? 'squash' : 'merge';
|
2018-07-31 12:17:25 -04:00
|
|
|
this.initialWorkflowStatus = config.initialWorkflowStatus;
|
2016-08-30 19:06:20 -03:00
|
|
|
}
|
|
|
|
|
2019-09-03 21:56:20 +03:00
|
|
|
static DEFAULT_COMMIT_MESSAGE = 'Automatically generated by Netlify CMS';
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
user(): Promise<{ name: string; login: string }> {
|
2019-07-24 15:20:41 -07:00
|
|
|
if (!this._userPromise) {
|
2019-12-18 18:16:02 +02:00
|
|
|
this._userPromise = this.request('/user') as Promise<GitHubUser>;
|
2019-07-24 15:20:41 -07:00
|
|
|
}
|
|
|
|
return this._userPromise;
|
2016-08-30 19:06:20 -03:00
|
|
|
}
|
|
|
|
|
2017-08-20 16:02:57 -04:00
|
|
|
hasWriteAccess() {
|
2017-08-19 12:32:57 -06:00
|
|
|
return this.request(this.repoURL)
|
2019-12-18 18:16:02 +02:00
|
|
|
.then((repo: GitHubRepo) => repo.permissions.push)
|
|
|
|
.catch((error: Error) => {
|
2018-08-07 14:46:54 -06:00
|
|
|
console.error('Problem fetching repo data from GitHub');
|
2017-08-19 12:32:57 -06:00
|
|
|
throw error;
|
|
|
|
});
|
2017-08-01 20:28:03 -07:00
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
reset() {
|
|
|
|
// no op
|
|
|
|
}
|
|
|
|
|
2016-08-30 19:06:20 -03:00
|
|
|
requestHeaders(headers = {}) {
|
2019-12-18 18:16:02 +02:00
|
|
|
const baseHeader: Record<string, string> = {
|
|
|
|
'Content-Type': 'application/json; charset=utf-8',
|
2016-10-10 18:33:49 -03:00
|
|
|
...headers,
|
2016-08-30 19:06:20 -03:00
|
|
|
};
|
2016-12-23 16:59:48 -02:00
|
|
|
|
|
|
|
if (this.token) {
|
2018-08-07 14:46:54 -06:00
|
|
|
baseHeader.Authorization = `token ${this.token}`;
|
2020-01-15 00:15:14 +02:00
|
|
|
return Promise.resolve(baseHeader);
|
2016-12-23 16:59:48 -02:00
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
return Promise.resolve(baseHeader);
|
2016-08-30 19:06:20 -03:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
parseJsonResponse(response: Response) {
|
2018-08-07 14:46:54 -06:00
|
|
|
return response.json().then(json => {
|
2016-08-30 19:06:20 -03:00
|
|
|
if (!response.ok) {
|
|
|
|
return Promise.reject(json);
|
|
|
|
}
|
|
|
|
|
|
|
|
return json;
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
urlFor(path: string, options: Options) {
|
2017-06-12 13:01:53 -07:00
|
|
|
const cacheBuster = new Date().getTime();
|
|
|
|
const params = [`ts=${cacheBuster}`];
|
2016-09-04 14:01:28 +02:00
|
|
|
if (options.params) {
|
|
|
|
for (const key in options.params) {
|
2019-12-18 18:16:02 +02:00
|
|
|
params.push(`${key}=${encodeURIComponent(options.params[key] as string)}`);
|
2016-09-04 14:01:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if (params.length) {
|
2018-08-07 14:46:54 -06:00
|
|
|
path += `?${params.join('&')}`;
|
2016-09-04 14:01:28 +02:00
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
return this.apiRoot + path;
|
2016-09-04 14:01:28 +02:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
parseResponse(response: Response) {
|
2019-08-24 10:54:59 -07:00
|
|
|
const contentType = response.headers.get('Content-Type');
|
|
|
|
if (contentType && contentType.match(/json/)) {
|
|
|
|
return this.parseJsonResponse(response);
|
|
|
|
}
|
|
|
|
const textPromise = response.text().then(text => {
|
|
|
|
if (!response.ok) {
|
|
|
|
return Promise.reject(text);
|
|
|
|
}
|
|
|
|
return text;
|
|
|
|
});
|
|
|
|
return textPromise;
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
handleRequestError(error: FetchError, responseStatus: number) {
|
2020-01-15 00:15:14 +02:00
|
|
|
throw new APIError(error.message, responseStatus, API_NAME);
|
2019-11-11 18:33:20 +02:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async request(
|
|
|
|
path: string,
|
|
|
|
options: Options = {},
|
|
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
2020-01-15 00:15:14 +02:00
|
|
|
parser = (response: Response) => this.parseResponse(response),
|
2019-12-18 18:16:02 +02:00
|
|
|
) {
|
2019-11-11 18:33:20 +02:00
|
|
|
const headers = await this.requestHeaders(options.headers || {});
|
2016-09-04 14:01:28 +02:00
|
|
|
const url = this.urlFor(path, options);
|
2019-12-18 18:16:02 +02:00
|
|
|
let responseStatus: number;
|
2018-08-07 14:46:54 -06:00
|
|
|
return fetch(url, { ...options, headers })
|
|
|
|
.then(response => {
|
|
|
|
responseStatus = response.status;
|
2019-12-18 18:16:02 +02:00
|
|
|
return parser(response);
|
2018-08-07 14:46:54 -06:00
|
|
|
})
|
2019-11-11 18:33:20 +02:00
|
|
|
.catch(error => this.handleRequestError(error, responseStatus));
|
2016-08-30 19:06:20 -03:00
|
|
|
}
|
|
|
|
|
2020-01-24 04:14:33 +02:00
|
|
|
nextUrlProcessor() {
|
|
|
|
return (url: string) => url;
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async requestAllPages<T>(url: string, options: Options = {}) {
|
2019-11-11 18:33:20 +02:00
|
|
|
const headers = await this.requestHeaders(options.headers || {});
|
2019-08-24 10:54:59 -07:00
|
|
|
const processedURL = this.urlFor(url, options);
|
2020-01-24 04:14:33 +02:00
|
|
|
const allResponses = await getAllResponses(
|
|
|
|
processedURL,
|
|
|
|
{ ...options, headers },
|
|
|
|
'next',
|
|
|
|
this.nextUrlProcessor(),
|
|
|
|
);
|
2019-12-18 18:16:02 +02:00
|
|
|
const pages: T[][] = await Promise.all(
|
|
|
|
allResponses.map((res: Response) => this.parseResponse(res)),
|
|
|
|
);
|
|
|
|
return ([] as T[]).concat(...pages);
|
2019-08-24 10:54:59 -07:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
generateContentKey(collectionName: string, slug: string) {
|
2019-08-24 10:54:59 -07:00
|
|
|
if (!this.useOpenAuthoring) {
|
2020-01-15 00:15:14 +02:00
|
|
|
return generateContentKey(collectionName, slug);
|
2019-07-24 15:20:41 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return `${this.repo}/${collectionName}/${slug}`;
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
slugFromContentKey(contentKey: string, collectionName: string) {
|
2019-11-28 05:39:33 +02:00
|
|
|
if (!this.useOpenAuthoring) {
|
|
|
|
return contentKey.substring(collectionName.length + 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
return contentKey.substring(this.repo.length + collectionName.length + 2);
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
generateBranchName(contentKey: string) {
|
2019-08-24 10:54:59 -07:00
|
|
|
return `${CMS_BRANCH_PREFIX}/${contentKey}`;
|
2019-07-24 15:20:41 -07:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
branchNameFromRef(ref: string) {
|
2019-08-24 10:54:59 -07:00
|
|
|
return ref.substring('refs/heads/'.length);
|
2019-07-24 15:20:41 -07:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
contentKeyFromRef(ref: string) {
|
2019-08-24 10:54:59 -07:00
|
|
|
return ref.substring(`refs/heads/${CMS_BRANCH_PREFIX}/`.length);
|
2017-08-14 09:00:47 -04:00
|
|
|
}
|
|
|
|
|
2016-08-31 16:41:29 -03:00
|
|
|
checkMetadataRef() {
|
2019-10-22 19:59:13 +03:00
|
|
|
return this.request(`${this.repoURL}/git/refs/meta/_netlify_cms`, {
|
2018-08-07 14:46:54 -06:00
|
|
|
cache: 'no-store',
|
2016-08-30 19:06:20 -03:00
|
|
|
})
|
2018-08-07 14:46:54 -06:00
|
|
|
.then(response => response.object)
|
|
|
|
.catch(() => {
|
|
|
|
// Meta ref doesn't exist
|
|
|
|
const readme = {
|
|
|
|
raw:
|
|
|
|
'# Netlify CMS\n\nThis tree is used by the Netlify CMS to store metadata information for specific files and branches.',
|
|
|
|
};
|
|
|
|
|
|
|
|
return this.uploadBlob(readme)
|
|
|
|
.then(item =>
|
|
|
|
this.request(`${this.repoURL}/git/trees`, {
|
|
|
|
method: 'POST',
|
|
|
|
body: JSON.stringify({
|
|
|
|
tree: [{ path: 'README.md', mode: '100644', type: 'blob', sha: item.sha }],
|
|
|
|
}),
|
|
|
|
}),
|
|
|
|
)
|
|
|
|
.then(tree => this.commit('First Commit', tree))
|
|
|
|
.then(response => this.createRef('meta', '_netlify_cms', response.sha))
|
|
|
|
.then(response => response.object);
|
|
|
|
});
|
2016-08-31 13:30:14 -03:00
|
|
|
}
|
2016-08-30 19:06:20 -03:00
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async storeMetadata(key: string, data: Metadata) {
|
2019-07-24 15:20:41 -07:00
|
|
|
// semaphore ensures metadata updates are always ordered, even if
|
|
|
|
// calls to storeMetadata are not. concurrent metadata updates
|
|
|
|
// will result in the metadata branch being unable to update.
|
|
|
|
if (!this._metadataSemaphore) {
|
|
|
|
this._metadataSemaphore = semaphore(1);
|
|
|
|
}
|
|
|
|
return new Promise((resolve, reject) =>
|
2019-12-18 18:16:02 +02:00
|
|
|
this._metadataSemaphore?.take(async () => {
|
2019-07-24 15:20:41 -07:00
|
|
|
try {
|
|
|
|
const branchData = await this.checkMetadataRef();
|
2019-11-17 11:51:50 +02:00
|
|
|
const file = { path: `${key}.json`, raw: JSON.stringify(data) };
|
|
|
|
|
|
|
|
await this.uploadBlob(file);
|
2020-01-15 00:15:14 +02:00
|
|
|
const changeTree = await this.updateTree(branchData.sha, [file as TreeFile]);
|
2019-07-24 15:20:41 -07:00
|
|
|
const { sha } = await this.commit(`Updating “${key}” metadata`, changeTree);
|
|
|
|
await this.patchRef('meta', '_netlify_cms', sha);
|
2018-08-07 14:46:54 -06:00
|
|
|
localForage.setItem(`gh.meta.${key}`, {
|
|
|
|
expires: Date.now() + 300000, // In 5 minutes
|
|
|
|
data,
|
|
|
|
});
|
2019-12-18 18:16:02 +02:00
|
|
|
this._metadataSemaphore?.leave();
|
2019-07-24 15:20:41 -07:00
|
|
|
resolve();
|
|
|
|
} catch (err) {
|
|
|
|
reject(err);
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
);
|
2016-08-30 19:06:20 -03:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
deleteMetadata(key: string) {
|
2019-11-26 09:40:27 +01:00
|
|
|
if (!this._metadataSemaphore) {
|
|
|
|
this._metadataSemaphore = semaphore(1);
|
|
|
|
}
|
|
|
|
return new Promise(resolve =>
|
2019-12-18 18:16:02 +02:00
|
|
|
this._metadataSemaphore?.take(async () => {
|
2019-11-26 09:40:27 +01:00
|
|
|
try {
|
|
|
|
const branchData = await this.checkMetadataRef();
|
|
|
|
const file = { path: `${key}.json`, sha: null };
|
|
|
|
|
|
|
|
const changeTree = await this.updateTree(branchData.sha, [file]);
|
|
|
|
const { sha } = await this.commit(`Deleting “${key}” metadata`, changeTree);
|
|
|
|
await this.patchRef('meta', '_netlify_cms', sha);
|
2019-12-18 18:16:02 +02:00
|
|
|
this._metadataSemaphore?.leave();
|
2019-11-26 09:40:27 +01:00
|
|
|
resolve();
|
|
|
|
} catch (err) {
|
2019-12-18 18:16:02 +02:00
|
|
|
this._metadataSemaphore?.leave();
|
2019-11-26 09:40:27 +01:00
|
|
|
resolve();
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
retrieveMetadata(key: string): Promise<Metadata> {
|
|
|
|
const cache = localForage.getItem<{ data: Metadata; expires: number }>(`gh.meta.${key}`);
|
2018-08-07 14:46:54 -06:00
|
|
|
return cache.then(cached => {
|
|
|
|
if (cached && cached.expires > Date.now()) {
|
2019-12-18 18:16:02 +02:00
|
|
|
return cached.data as Metadata;
|
2018-08-07 14:46:54 -06:00
|
|
|
}
|
|
|
|
console.log(
|
|
|
|
'%c Checking for MetaData files',
|
|
|
|
'line-height: 30px;text-align: center;font-weight: bold',
|
|
|
|
);
|
2019-07-24 15:20:41 -07:00
|
|
|
|
|
|
|
const metadataRequestOptions = {
|
2018-08-07 14:46:54 -06:00
|
|
|
params: { ref: 'refs/meta/_netlify_cms' },
|
2019-12-18 18:16:02 +02:00
|
|
|
headers: { Accept: 'application/vnd.github.v3.raw' },
|
|
|
|
cache: 'no-store' as RequestCache,
|
2019-07-24 15:20:41 -07:00
|
|
|
};
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
const errorHandler = (err: Error) => {
|
2019-09-03 21:56:20 +03:00
|
|
|
if (err.message === 'Not Found') {
|
|
|
|
console.log(
|
|
|
|
'%c %s does not have metadata',
|
|
|
|
'line-height: 30px;text-align: center;font-weight: bold',
|
|
|
|
key,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
throw err;
|
|
|
|
};
|
|
|
|
|
2019-08-24 10:54:59 -07:00
|
|
|
if (!this.useOpenAuthoring) {
|
2019-07-24 15:20:41 -07:00
|
|
|
return this.request(`${this.repoURL}/contents/${key}.json`, metadataRequestOptions)
|
2019-12-18 18:16:02 +02:00
|
|
|
.then((response: string) => JSON.parse(response))
|
2019-09-03 21:56:20 +03:00
|
|
|
.catch(errorHandler);
|
2019-07-24 15:20:41 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
const [user, repo] = key.split('/');
|
|
|
|
return this.request(`/repos/${user}/${repo}/contents/${key}.json`, metadataRequestOptions)
|
2019-12-18 18:16:02 +02:00
|
|
|
.then((response: string) => JSON.parse(response))
|
2019-09-03 21:56:20 +03:00
|
|
|
.catch(errorHandler);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
async readFile(
|
2019-12-18 18:16:02 +02:00
|
|
|
path: string,
|
2020-01-15 00:15:14 +02:00
|
|
|
sha?: string | null,
|
2019-12-18 18:16:02 +02:00
|
|
|
{
|
|
|
|
branch = this.branch,
|
|
|
|
repoURL = this.repoURL,
|
|
|
|
parseText = true,
|
|
|
|
}: {
|
|
|
|
branch?: string;
|
|
|
|
repoURL?: string;
|
|
|
|
parseText?: boolean;
|
|
|
|
} = {},
|
|
|
|
) {
|
2020-01-15 00:15:14 +02:00
|
|
|
if (!sha) {
|
|
|
|
sha = await this.getFileSha(path, { repoURL, branch });
|
2018-05-17 10:05:37 -06:00
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
const fetchContent = () => this.fetchBlobContent({ sha: sha as string, repoURL, parseText });
|
|
|
|
const content = await readFile(sha, fetchContent, localForage, parseText);
|
|
|
|
return content;
|
2018-05-17 10:05:37 -06:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async fetchBlobContent({ sha, repoURL, parseText }: BlobArgs) {
|
2019-12-20 19:33:43 +02:00
|
|
|
const result: GitHubBlob = await this.request(`${repoURL}/git/blobs/${sha}`);
|
2019-09-09 22:42:10 +03:00
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
if (parseText) {
|
|
|
|
// treat content as a utf-8 string
|
|
|
|
const content = Base64.decode(result.content);
|
|
|
|
return content;
|
|
|
|
} else {
|
|
|
|
// treat content as binary and convert to blob
|
|
|
|
const content = Base64.atob(result.content);
|
|
|
|
const byteArray = new Uint8Array(content.length);
|
|
|
|
for (let i = 0; i < content.length; i++) {
|
|
|
|
byteArray[i] = content.charCodeAt(i);
|
|
|
|
}
|
|
|
|
const blob = new Blob([byteArray]);
|
|
|
|
return blob;
|
|
|
|
}
|
2019-09-09 22:42:10 +03:00
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
async listFiles(
|
|
|
|
path: string,
|
|
|
|
{ repoURL = this.repoURL, branch = this.branch, depth = 1 } = {},
|
|
|
|
): Promise<{ type: string; id: string; name: string; path: string; size: number }[]> {
|
2020-01-08 19:02:44 +02:00
|
|
|
const folder = trim(path, '/');
|
2019-12-20 19:33:43 +02:00
|
|
|
return this.request(`${repoURL}/git/trees/${branch}:${folder}`, {
|
2019-12-22 15:20:42 +02:00
|
|
|
// GitHub API supports recursive=1 for getting the entire recursive tree
|
|
|
|
// or omitting it to get the non-recursive tree
|
|
|
|
params: depth > 1 ? { recursive: 1 } : {},
|
2019-12-18 18:16:02 +02:00
|
|
|
})
|
2019-12-20 19:33:43 +02:00
|
|
|
.then((res: GitHubTree) =>
|
2019-12-18 18:16:02 +02:00
|
|
|
res.tree
|
2019-12-22 15:20:42 +02:00
|
|
|
// filter only files and up to the required depth
|
|
|
|
.filter(file => file.type === 'blob' && file.path.split('/').length <= depth)
|
2019-12-20 19:33:43 +02:00
|
|
|
.map(file => ({
|
2020-01-15 00:15:14 +02:00
|
|
|
type: file.type,
|
|
|
|
id: file.sha,
|
2019-12-22 15:20:42 +02:00
|
|
|
name: basename(file.path),
|
2019-12-20 19:33:43 +02:00
|
|
|
path: `${folder}/${file.path}`,
|
2020-01-15 00:15:14 +02:00
|
|
|
size: file.size,
|
2019-12-18 18:16:02 +02:00
|
|
|
})),
|
|
|
|
)
|
2019-12-20 19:33:43 +02:00
|
|
|
.catch(replace404WithEmptyArray);
|
2019-12-18 18:16:02 +02:00
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
async readUnpublishedBranchFile(contentKey: string) {
|
|
|
|
try {
|
|
|
|
const metaData = await this.retrieveMetadata(contentKey).then(data =>
|
|
|
|
data.objects.entry.path ? data : Promise.reject(null),
|
|
|
|
);
|
|
|
|
const repoURL = this.useOpenAuthoring
|
|
|
|
? `/repos/${contentKey
|
|
|
|
.split('/')
|
|
|
|
.slice(0, 2)
|
|
|
|
.join('/')}`
|
|
|
|
: this.repoURL;
|
|
|
|
|
|
|
|
const [fileData, isModification] = await Promise.all([
|
|
|
|
this.readFile(metaData.objects.entry.path, null, {
|
|
|
|
branch: metaData.branch,
|
2019-07-24 15:20:41 -07:00
|
|
|
repoURL,
|
2020-01-15 00:15:14 +02:00
|
|
|
}) as Promise<string>,
|
|
|
|
this.isUnpublishedEntryModification(metaData.objects.entry.path),
|
|
|
|
]);
|
|
|
|
|
|
|
|
return {
|
|
|
|
metaData,
|
|
|
|
fileData,
|
|
|
|
isModification,
|
|
|
|
slug: this.slugFromContentKey(contentKey, metaData.collection),
|
|
|
|
};
|
|
|
|
} catch (e) {
|
2017-01-11 20:58:15 -02:00
|
|
|
throw new EditorialWorkflowError('content is not under editorial workflow', true);
|
2020-01-15 00:15:14 +02:00
|
|
|
}
|
2016-09-06 17:18:27 -03:00
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
isUnpublishedEntryModification(path: string) {
|
2019-07-24 15:20:41 -07:00
|
|
|
return this.readFile(path, null, {
|
2020-01-15 00:15:14 +02:00
|
|
|
branch: this.branch,
|
2019-09-03 21:56:20 +03:00
|
|
|
repoURL: this.originRepoURL,
|
2019-07-24 15:20:41 -07:00
|
|
|
})
|
2018-08-07 14:46:54 -06:00
|
|
|
.then(() => true)
|
2019-12-18 18:16:02 +02:00
|
|
|
.catch((err: Error) => {
|
2018-08-07 14:46:54 -06:00
|
|
|
if (err.message && err.message === 'Not Found') {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
throw err;
|
|
|
|
});
|
2017-03-15 18:47:18 -07:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
getPRsForBranchName = (branchName: string) => {
|
2019-07-24 15:20:41 -07:00
|
|
|
// Get PRs with a `head` of `branchName`. Note that this is a
|
|
|
|
// substring match, so we need to check that the `head.ref` of
|
|
|
|
// at least one of the returned objects matches `branchName`.
|
2019-12-18 18:16:02 +02:00
|
|
|
return this.requestAllPages<{ head: { ref: string } }>(`${this.repoURL}/pulls`, {
|
2019-07-24 15:20:41 -07:00
|
|
|
params: {
|
2019-11-26 09:40:27 +01:00
|
|
|
head: branchName,
|
|
|
|
state: 'open',
|
|
|
|
base: this.branch,
|
2019-07-24 15:20:41 -07:00
|
|
|
},
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
getUpdatedOpenAuthoringMetadata = async (
|
|
|
|
contentKey: string,
|
|
|
|
{ metadata: metadataArg }: { metadata?: Metadata } = {},
|
|
|
|
) => {
|
2019-07-24 15:20:41 -07:00
|
|
|
const metadata = metadataArg || (await this.retrieveMetadata(contentKey)) || {};
|
|
|
|
const { pr: prMetadata, status } = metadata;
|
|
|
|
|
|
|
|
// Set the status to draft if no corresponding PR is recorded
|
|
|
|
if (!prMetadata && status !== 'draft') {
|
|
|
|
const newMetadata = { ...metadata, status: 'draft' };
|
|
|
|
this.storeMetadata(contentKey, newMetadata);
|
|
|
|
return newMetadata;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If no status is recorded, but there is a PR, check if the PR is
|
|
|
|
// closed or not and update the status accordingly.
|
|
|
|
if (prMetadata) {
|
|
|
|
const { number: prNumber } = prMetadata;
|
2019-09-03 21:56:20 +03:00
|
|
|
const originPRInfo = await this.getPullRequest(prNumber);
|
2019-07-24 15:20:41 -07:00
|
|
|
const { state: currentState, merged_at: mergedAt } = originPRInfo;
|
|
|
|
if (currentState === 'closed' && mergedAt) {
|
|
|
|
// The PR has been merged; delete the unpublished entry
|
2019-11-28 05:39:33 +02:00
|
|
|
const { collection } = metadata;
|
|
|
|
const slug = this.slugFromContentKey(contentKey, collection);
|
|
|
|
this.deleteUnpublishedEntry(collection, slug);
|
2019-07-24 15:20:41 -07:00
|
|
|
return;
|
|
|
|
} else if (currentState === 'closed' && !mergedAt) {
|
|
|
|
if (status !== 'draft') {
|
|
|
|
const newMetadata = { ...metadata, status: 'draft' };
|
|
|
|
await this.storeMetadata(contentKey, newMetadata);
|
|
|
|
return newMetadata;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (status !== 'pending_review') {
|
|
|
|
// PR is open and has not been merged
|
|
|
|
const newMetadata = { ...metadata, status: 'pending_review' };
|
|
|
|
await this.storeMetadata(contentKey, newMetadata);
|
|
|
|
return newMetadata;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return metadata;
|
|
|
|
};
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async migrateToVersion1(branch: Branch, metaData: Metadata) {
|
2019-11-26 09:40:27 +01:00
|
|
|
// hard code key/branch generation logic to ignore future changes
|
|
|
|
const oldContentKey = branch.ref.substring(`refs/heads/cms/`.length);
|
|
|
|
const newContentKey = `${metaData.collection}/${oldContentKey}`;
|
|
|
|
const newBranchName = `cms/${newContentKey}`;
|
|
|
|
|
|
|
|
// create new branch and pull request in new format
|
2020-01-15 00:15:14 +02:00
|
|
|
const newBranch = await this.createBranch(newBranchName, (metaData.pr as PR).head as string);
|
2019-11-26 09:40:27 +01:00
|
|
|
const pr = await this.createPR(metaData.commitMessage, newBranchName);
|
|
|
|
|
|
|
|
// store new metadata
|
|
|
|
await this.storeMetadata(newContentKey, {
|
|
|
|
...metaData,
|
|
|
|
pr: {
|
|
|
|
number: pr.number,
|
|
|
|
head: pr.head.sha,
|
|
|
|
},
|
|
|
|
branch: newBranchName,
|
|
|
|
version: '1',
|
|
|
|
});
|
|
|
|
|
|
|
|
// remove old data
|
2019-12-18 18:16:02 +02:00
|
|
|
await this.closePR(metaData.pr as PR);
|
2019-11-26 09:40:27 +01:00
|
|
|
await this.deleteBranch(metaData.branch);
|
|
|
|
await this.deleteMetadata(oldContentKey);
|
|
|
|
|
|
|
|
return newBranch;
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async migrateBranch(branch: Branch) {
|
2019-11-26 09:40:27 +01:00
|
|
|
const metadata = await this.retrieveMetadata(this.contentKeyFromRef(branch.ref));
|
|
|
|
if (!metadata.version) {
|
|
|
|
// migrate branch from cms/slug to cms/collection/slug
|
|
|
|
branch = await this.migrateToVersion1(branch, metadata);
|
|
|
|
}
|
|
|
|
|
|
|
|
return branch;
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
async listUnpublishedBranches(): Promise<Branch[]> {
|
2018-08-07 14:46:54 -06:00
|
|
|
console.log(
|
|
|
|
'%c Checking for Unpublished entries',
|
|
|
|
'line-height: 30px;text-align: center;font-weight: bold',
|
|
|
|
);
|
2019-11-26 09:40:27 +01:00
|
|
|
|
2019-07-24 15:20:41 -07:00
|
|
|
try {
|
2019-12-18 18:16:02 +02:00
|
|
|
const branches: Branch[] = await this.request(`${this.repoURL}/git/refs/heads/cms`).catch(
|
2019-07-24 15:20:41 -07:00
|
|
|
replace404WithEmptyArray,
|
|
|
|
);
|
2019-11-26 09:40:27 +01:00
|
|
|
|
|
|
|
let filterFunction;
|
|
|
|
if (this.useOpenAuthoring) {
|
|
|
|
const getUpdatedOpenAuthoringBranches = flow([
|
2019-12-18 18:16:02 +02:00
|
|
|
map(async (branch: Branch) => {
|
2019-11-26 09:40:27 +01:00
|
|
|
const contentKey = this.contentKeyFromRef(branch.ref);
|
|
|
|
const metadata = await this.getUpdatedOpenAuthoringMetadata(contentKey);
|
|
|
|
// filter out removed entries
|
|
|
|
if (!metadata) {
|
|
|
|
return Promise.reject('Unpublished entry was removed');
|
|
|
|
}
|
|
|
|
return branch;
|
|
|
|
}),
|
|
|
|
onlySuccessfulPromises,
|
|
|
|
]);
|
|
|
|
filterFunction = getUpdatedOpenAuthoringBranches;
|
|
|
|
} else {
|
|
|
|
const prs = await this.getPRsForBranchName(CMS_BRANCH_PREFIX);
|
|
|
|
const onlyBranchesWithOpenPRs = flowAsync([
|
2019-12-18 18:16:02 +02:00
|
|
|
filter(({ ref }: Branch) => prs.some(pr => pr.head.ref === this.branchNameFromRef(ref))),
|
|
|
|
map((branch: Branch) => this.migrateBranch(branch)),
|
2019-11-26 09:40:27 +01:00
|
|
|
onlySuccessfulPromises,
|
|
|
|
]);
|
|
|
|
|
|
|
|
filterFunction = onlyBranchesWithOpenPRs;
|
|
|
|
}
|
|
|
|
|
2019-07-24 15:20:41 -07:00
|
|
|
return await filterFunction(branches);
|
|
|
|
} catch (err) {
|
|
|
|
console.log(
|
|
|
|
'%c No Unpublished entries',
|
|
|
|
'line-height: 30px;text-align: center;font-weight: bold',
|
|
|
|
);
|
|
|
|
throw err;
|
|
|
|
}
|
2016-09-06 17:18:27 -03:00
|
|
|
}
|
|
|
|
|
2019-02-08 12:26:59 -05:00
|
|
|
/**
|
|
|
|
* Retrieve statuses for a given SHA. Unrelated to the editorial workflow
|
|
|
|
* concept of entry "status". Useful for things like deploy preview links.
|
|
|
|
*/
|
2019-12-18 18:16:02 +02:00
|
|
|
async getStatuses(sha: string) {
|
2019-07-24 15:20:41 -07:00
|
|
|
try {
|
2020-01-15 00:15:14 +02:00
|
|
|
const resp: { statuses: GitHubCommitStatus[] } = await this.request(
|
|
|
|
`${this.originRepoURL}/commits/${sha}/status`,
|
|
|
|
);
|
|
|
|
return resp.statuses.map(s => ({
|
|
|
|
context: s.context,
|
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
|
|
|
target_url: s.target_url,
|
|
|
|
state:
|
|
|
|
s.state === GitHubCommitStatusState.Success ? PreviewState.Success : PreviewState.Other,
|
|
|
|
}));
|
2019-07-24 15:20:41 -07:00
|
|
|
} catch (err) {
|
|
|
|
if (err && err.message && err.message === 'Ref not found') {
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
throw err;
|
|
|
|
}
|
2019-02-08 12:26:59 -05:00
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
|
2017-08-14 09:00:47 -04:00
|
|
|
const files = entry ? mediaFiles.concat(entry) : mediaFiles;
|
2019-12-18 18:16:02 +02:00
|
|
|
const uploadPromises = files.map(file => this.uploadBlob(file));
|
2019-11-17 11:51:50 +02:00
|
|
|
await Promise.all(uploadPromises);
|
|
|
|
|
|
|
|
if (!options.useWorkflow) {
|
2019-12-20 19:33:43 +02:00
|
|
|
return this.getDefaultBranch()
|
2020-01-15 00:15:14 +02:00
|
|
|
.then(branchData =>
|
|
|
|
this.updateTree(branchData.commit.sha, files as { sha: string; path: string }[]),
|
|
|
|
)
|
2019-11-17 11:51:50 +02:00
|
|
|
.then(changeTree => this.commit(options.commitMessage, changeTree))
|
|
|
|
.then(response => this.patchBranch(this.branch, response.sha));
|
|
|
|
} else {
|
2020-01-15 00:15:14 +02:00
|
|
|
const mediaFilesList = (mediaFiles as { sha: string; path: string }[]).map(
|
|
|
|
({ sha, path }) => ({
|
|
|
|
path: trimStart(path, '/'),
|
|
|
|
sha,
|
|
|
|
}),
|
|
|
|
);
|
|
|
|
return this.editorialWorkflowGit(
|
|
|
|
files as TreeFile[],
|
|
|
|
entry as Entry,
|
|
|
|
mediaFilesList,
|
|
|
|
options,
|
|
|
|
);
|
2019-11-17 11:51:50 +02:00
|
|
|
}
|
2016-09-13 14:31:18 -03:00
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {
|
2019-09-03 21:56:20 +03:00
|
|
|
/**
|
|
|
|
* We need to request the tree first to get the SHA. We use extended SHA-1
|
|
|
|
* syntax (<rev>:<path>) to get a blob from a tree without having to recurse
|
|
|
|
* through the tree.
|
|
|
|
*/
|
|
|
|
|
2017-08-14 09:00:47 -04:00
|
|
|
const pathArray = path.split('/');
|
|
|
|
const filename = last(pathArray);
|
|
|
|
const directory = initial(pathArray).join('/');
|
|
|
|
const fileDataPath = encodeURIComponent(directory);
|
2020-01-15 00:15:14 +02:00
|
|
|
const fileDataURL = `${repoURL}/git/trees/${branch}:${fileDataPath}`;
|
2017-08-14 09:00:47 -04:00
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
return this.request(fileDataURL, { cache: 'no-store' }).then((resp: GitHubTree) => {
|
|
|
|
const file = resp.tree.find(file => file.path === filename);
|
|
|
|
if (file) {
|
|
|
|
return file.sha;
|
|
|
|
}
|
|
|
|
throw new APIError('Not Found', 404, API_NAME);
|
2019-09-03 21:56:20 +03:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
deleteFile(path: string, message: string) {
|
2019-09-03 21:56:20 +03:00
|
|
|
if (this.useOpenAuthoring) {
|
|
|
|
return Promise.reject('Cannot delete published entries as an Open Authoring user!');
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
const branch = this.branch;
|
2019-09-03 21:56:20 +03:00
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
return this.getFileSha(path, { branch }).then(sha => {
|
2019-12-18 18:16:02 +02:00
|
|
|
const params: { sha: string; message: string; branch: string; author?: { date: string } } = {
|
|
|
|
sha,
|
|
|
|
message,
|
|
|
|
branch,
|
|
|
|
};
|
|
|
|
const opts = { method: 'DELETE', params };
|
2018-08-07 14:46:54 -06:00
|
|
|
if (this.commitAuthor) {
|
|
|
|
opts.params.author = {
|
|
|
|
...this.commitAuthor,
|
|
|
|
date: new Date().toISOString(),
|
|
|
|
};
|
|
|
|
}
|
2019-09-03 21:56:20 +03:00
|
|
|
const fileURL = `${this.repoURL}/contents/${path}`;
|
2018-08-07 14:46:54 -06:00
|
|
|
return this.request(fileURL, opts);
|
|
|
|
});
|
2017-07-21 23:40:33 -07:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async createBranchAndPullRequest(branchName: string, sha: string, commitMessage: string) {
|
2019-09-03 21:56:20 +03:00
|
|
|
await this.createBranch(branchName, sha);
|
|
|
|
return this.createPR(commitMessage, branchName);
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async editorialWorkflowGit(
|
2020-01-15 00:15:14 +02:00
|
|
|
files: TreeFile[],
|
2019-12-18 18:16:02 +02:00
|
|
|
entry: Entry,
|
|
|
|
mediaFilesList: MediaFile[],
|
|
|
|
options: PersistOptions,
|
|
|
|
) {
|
2020-01-15 00:15:14 +02:00
|
|
|
const contentKey = this.generateContentKey(options.collectionName as string, entry.slug);
|
2017-08-14 09:00:47 -04:00
|
|
|
const branchName = this.generateBranchName(contentKey);
|
2016-09-13 14:31:18 -03:00
|
|
|
const unpublished = options.unpublished || false;
|
|
|
|
if (!unpublished) {
|
2019-07-24 15:20:41 -07:00
|
|
|
// Open new editorial review workflow for this entry - Create new metadata and commit to new branch
|
|
|
|
const userPromise = this.user();
|
2019-12-20 19:33:43 +02:00
|
|
|
const branchData = await this.getDefaultBranch();
|
2019-11-17 11:51:50 +02:00
|
|
|
const changeTree = await this.updateTree(branchData.commit.sha, files);
|
2019-07-24 15:20:41 -07:00
|
|
|
const commitResponse = await this.commit(options.commitMessage, changeTree);
|
2019-09-03 21:56:20 +03:00
|
|
|
|
|
|
|
let pr;
|
|
|
|
if (this.useOpenAuthoring) {
|
|
|
|
await this.createBranch(branchName, commitResponse.sha);
|
|
|
|
} else {
|
|
|
|
pr = await this.createBranchAndPullRequest(
|
|
|
|
branchName,
|
|
|
|
commitResponse.sha,
|
|
|
|
options.commitMessage,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2019-07-24 15:20:41 -07:00
|
|
|
const user = await userPromise;
|
|
|
|
return this.storeMetadata(contentKey, {
|
|
|
|
type: 'PR',
|
|
|
|
pr: pr
|
|
|
|
? {
|
|
|
|
number: pr.number,
|
|
|
|
head: pr.head && pr.head.sha,
|
|
|
|
}
|
|
|
|
: undefined,
|
|
|
|
user: user.name || user.login,
|
2019-11-26 11:14:04 +01:00
|
|
|
status: options.status || this.initialWorkflowStatus,
|
2019-07-24 15:20:41 -07:00
|
|
|
branch: branchName,
|
2020-01-15 00:15:14 +02:00
|
|
|
collection: options.collectionName as string,
|
2019-07-24 15:20:41 -07:00
|
|
|
commitMessage: options.commitMessage,
|
|
|
|
title: options.parsedData && options.parsedData.title,
|
|
|
|
description: options.parsedData && options.parsedData.description,
|
|
|
|
objects: {
|
|
|
|
entry: {
|
|
|
|
path: entry.path,
|
2020-01-15 00:15:14 +02:00
|
|
|
sha: entry.sha as string,
|
2019-07-24 15:20:41 -07:00
|
|
|
},
|
2019-11-17 11:51:50 +02:00
|
|
|
files: mediaFilesList,
|
2019-07-24 15:20:41 -07:00
|
|
|
},
|
|
|
|
timeStamp: new Date().toISOString(),
|
2019-11-26 09:40:27 +01:00
|
|
|
version: CURRENT_METADATA_VERSION,
|
2019-07-24 15:20:41 -07:00
|
|
|
});
|
2016-09-13 14:31:18 -03:00
|
|
|
} else {
|
|
|
|
// Entry is already on editorial review workflow - just update metadata and commit to existing branch
|
2019-11-17 11:51:50 +02:00
|
|
|
const metadata = await this.retrieveMetadata(contentKey);
|
|
|
|
// mark media files to remove
|
2019-12-18 18:16:02 +02:00
|
|
|
const metadataMediaFiles: MediaFile[] = get(metadata, 'objects.files', []);
|
2019-12-20 19:33:43 +02:00
|
|
|
const mediaFilesToRemove: { path: string; sha: string | null }[] = differenceBy(
|
2019-11-18 17:51:51 +02:00
|
|
|
metadataMediaFiles,
|
|
|
|
mediaFilesList,
|
|
|
|
'path',
|
2019-12-20 19:33:43 +02:00
|
|
|
).map(file => ({ ...file, type: 'blob', sha: null }));
|
|
|
|
|
|
|
|
// rebase the branch before applying new changes
|
|
|
|
const rebasedHead = await this.rebaseBranch(branchName);
|
|
|
|
const treeFiles = mediaFilesToRemove.concat(files);
|
|
|
|
const changeTree = await this.updateTree(rebasedHead.sha, treeFiles);
|
2019-11-17 11:51:50 +02:00
|
|
|
const commit = await this.commit(options.commitMessage, changeTree);
|
2019-07-24 15:20:41 -07:00
|
|
|
const { title, description } = options.parsedData || {};
|
2019-11-17 11:51:50 +02:00
|
|
|
|
2019-07-24 15:20:41 -07:00
|
|
|
const pr = metadata.pr ? { ...metadata.pr, head: commit.sha } : undefined;
|
|
|
|
const objects = {
|
2020-01-15 00:15:14 +02:00
|
|
|
entry: { path: entry.path, sha: entry.sha as string },
|
2019-11-17 11:51:50 +02:00
|
|
|
files: mediaFilesList,
|
2019-07-24 15:20:41 -07:00
|
|
|
};
|
2017-11-11 09:48:47 -05:00
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
const updatedMetadata = { ...metadata, pr, title, description, objects };
|
2019-07-24 15:20:41 -07:00
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
await this.storeMetadata(contentKey, updatedMetadata);
|
|
|
|
return this.patchBranch(branchName, commit.sha, { force: true });
|
2017-08-14 09:00:47 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
async compareBranchToDefault(
|
2019-12-18 18:16:02 +02:00
|
|
|
branchName: string,
|
2019-12-20 19:33:43 +02:00
|
|
|
): Promise<{ baseCommit: GitHubCompareBaseCommit; commits: GitHubCompareCommits }> {
|
|
|
|
const headReference = await this.getHeadReference(branchName);
|
|
|
|
const { base_commit: baseCommit, commits }: GitHubCompareResponse = await this.request(
|
|
|
|
`${this.originRepoURL}/compare/${this.branch}...${headReference}`,
|
|
|
|
);
|
|
|
|
return { baseCommit, commits };
|
|
|
|
}
|
2017-08-14 09:00:47 -04:00
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
async getCommitsDiff(baseSha: string, headSha: string): Promise<GitHubCompareFiles> {
|
|
|
|
const { files }: GitHubCompareResponse = await this.request(
|
|
|
|
`${this.repoURL}/compare/${baseSha}...${headSha}`,
|
|
|
|
);
|
|
|
|
return files;
|
|
|
|
}
|
|
|
|
|
|
|
|
async rebaseSingleCommit(baseCommit: GitHubCompareCommit, commit: GitHubCompareCommit) {
|
|
|
|
// first get the diff between the commits
|
|
|
|
const files = await this.getCommitsDiff(commit.parents[0].sha, commit.sha);
|
|
|
|
const treeFiles = files.reduce((arr, file) => {
|
|
|
|
if (file.status === 'removed') {
|
|
|
|
// delete the file
|
|
|
|
arr.push({ sha: null, path: file.filename });
|
|
|
|
} else if (file.status === 'renamed') {
|
|
|
|
// delete the previous file
|
|
|
|
arr.push({ sha: null, path: file.previous_filename as string });
|
|
|
|
// add the renamed file
|
|
|
|
arr.push({ sha: file.sha, path: file.filename });
|
|
|
|
} else {
|
|
|
|
// add the file
|
|
|
|
arr.push({ sha: file.sha, path: file.filename });
|
|
|
|
}
|
|
|
|
return arr;
|
|
|
|
}, [] as { sha: string | null; path: string }[]);
|
2017-08-14 09:00:47 -04:00
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
// create a tree with baseCommit as the base with the diff applied
|
|
|
|
const tree = await this.updateTree(baseCommit.sha, treeFiles);
|
|
|
|
const { message, author, committer } = commit.commit;
|
2017-08-14 09:00:47 -04:00
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
// create a new commit from the updated tree
|
|
|
|
return (this.createCommit(
|
|
|
|
message,
|
|
|
|
tree.sha,
|
|
|
|
[baseCommit.sha],
|
|
|
|
author,
|
|
|
|
committer,
|
|
|
|
) as unknown) as GitHubCompareCommit;
|
2017-08-14 09:00:47 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2019-12-20 19:33:43 +02:00
|
|
|
* Rebase an array of commits one-by-one, starting from a given base SHA
|
2017-08-14 09:00:47 -04:00
|
|
|
*/
|
2019-12-20 19:33:43 +02:00
|
|
|
async rebaseCommits(baseCommit: GitHubCompareCommit, commits: GitHubCompareCommits) {
|
2017-08-14 09:00:47 -04:00
|
|
|
/**
|
|
|
|
* If the parent of the first commit already matches the target base,
|
|
|
|
* return commits as is.
|
|
|
|
*/
|
|
|
|
if (commits.length === 0 || commits[0].parents[0].sha === baseCommit.sha) {
|
2019-12-20 19:33:43 +02:00
|
|
|
const head = last(commits) as GitHubCompareCommit;
|
|
|
|
return head;
|
|
|
|
} else {
|
|
|
|
/**
|
|
|
|
* Re-create each commit over the new base, applying each to the previous,
|
|
|
|
* changing only the parent SHA and tree for each, but retaining all other
|
|
|
|
* info, such as the author/committer data.
|
|
|
|
*/
|
|
|
|
const newHeadPromise = commits.reduce((lastCommitPromise, commit) => {
|
|
|
|
return lastCommitPromise.then(newParent => {
|
|
|
|
const parent = newParent;
|
|
|
|
const commitToRebase = commit;
|
|
|
|
return this.rebaseSingleCommit(parent, commitToRebase);
|
|
|
|
});
|
|
|
|
}, Promise.resolve(baseCommit));
|
|
|
|
return newHeadPromise;
|
2017-08-14 09:00:47 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
async rebaseBranch(branchName: string) {
|
|
|
|
try {
|
|
|
|
// Get the diff between the default branch the published branch
|
|
|
|
const { baseCommit, commits } = await this.compareBranchToDefault(branchName);
|
|
|
|
// Rebase the branch based on the diff
|
|
|
|
const rebasedHead = await this.rebaseCommits(baseCommit, commits);
|
|
|
|
return rebasedHead;
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error);
|
|
|
|
throw error;
|
|
|
|
}
|
2017-08-14 09:00:47 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Get a pull request by PR number.
|
|
|
|
*/
|
2019-12-18 18:16:02 +02:00
|
|
|
getPullRequest(prNumber: number) {
|
2019-09-03 21:56:20 +03:00
|
|
|
return this.request(`${this.originRepoURL}/pulls/${prNumber} }`);
|
2017-08-14 09:00:47 -04:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async updateUnpublishedEntryStatus(collectionName: string, slug: string, status: string) {
|
2019-07-24 15:20:41 -07:00
|
|
|
const contentKey = this.generateContentKey(collectionName, slug);
|
|
|
|
const metadata = await this.retrieveMetadata(contentKey);
|
|
|
|
|
2019-08-24 10:54:59 -07:00
|
|
|
if (!this.useOpenAuthoring) {
|
2019-07-24 15:20:41 -07:00
|
|
|
return this.storeMetadata(contentKey, {
|
|
|
|
...metadata,
|
|
|
|
status,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
if (status === 'pending_publish') {
|
2019-08-24 10:54:59 -07:00
|
|
|
throw new Error('Open Authoring entries may not be set to the status "pending_publish".');
|
2019-07-24 15:20:41 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
const { pr: prMetadata } = metadata;
|
|
|
|
if (prMetadata) {
|
|
|
|
const { number: prNumber } = prMetadata;
|
2019-09-03 21:56:20 +03:00
|
|
|
const originPRInfo = await this.getPullRequest(prNumber);
|
2019-07-24 15:20:41 -07:00
|
|
|
const { state } = originPRInfo;
|
|
|
|
if (state === 'open' && status === 'draft') {
|
|
|
|
await this.closePR(prMetadata);
|
|
|
|
return this.storeMetadata(contentKey, {
|
|
|
|
...metadata,
|
|
|
|
status,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
if (state === 'closed' && status === 'pending_review') {
|
|
|
|
await this.openPR(prMetadata);
|
|
|
|
return this.storeMetadata(contentKey, {
|
|
|
|
...metadata,
|
|
|
|
status,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!prMetadata && status === 'pending_review') {
|
|
|
|
const branchName = this.generateBranchName(contentKey);
|
2019-09-03 21:56:20 +03:00
|
|
|
const commitMessage = metadata.commitMessage || API.DEFAULT_COMMIT_MESSAGE;
|
2019-07-24 15:20:41 -07:00
|
|
|
const { number, head } = await this.createPR(commitMessage, branchName);
|
|
|
|
return this.storeMetadata(contentKey, {
|
2018-08-07 14:46:54 -06:00
|
|
|
...metadata,
|
2019-07-24 15:20:41 -07:00
|
|
|
pr: { number, head },
|
2018-08-07 14:46:54 -06:00
|
|
|
status,
|
2019-07-24 15:20:41 -07:00
|
|
|
});
|
|
|
|
}
|
2016-09-13 16:00:24 -03:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async deleteUnpublishedEntry(collectionName: string, slug: string) {
|
2019-07-24 15:20:41 -07:00
|
|
|
const contentKey = this.generateContentKey(collectionName, slug);
|
2017-08-14 09:00:47 -04:00
|
|
|
const branchName = this.generateBranchName(contentKey);
|
2019-12-13 17:02:57 +02:00
|
|
|
return this.retrieveMetadata(contentKey)
|
|
|
|
.then(metadata => (metadata && metadata.pr ? this.closePR(metadata.pr) : Promise.resolve()))
|
|
|
|
.then(() => this.deleteBranch(branchName))
|
|
|
|
.then(() => this.deleteMetadata(contentKey));
|
2017-03-11 13:47:36 -05:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async publishUnpublishedEntry(collectionName: string, slug: string) {
|
2019-08-24 10:54:59 -07:00
|
|
|
const contentKey = this.generateContentKey(collectionName, slug);
|
2017-08-14 09:00:47 -04:00
|
|
|
const branchName = this.generateBranchName(contentKey);
|
2019-11-17 11:51:50 +02:00
|
|
|
const metadata = await this.retrieveMetadata(contentKey);
|
2019-12-18 18:16:02 +02:00
|
|
|
await this.mergePR(metadata.pr as PR, metadata.objects);
|
2019-11-17 11:51:50 +02:00
|
|
|
await this.deleteBranch(branchName);
|
2019-11-26 09:40:27 +01:00
|
|
|
await this.deleteMetadata(contentKey);
|
2019-11-17 11:51:50 +02:00
|
|
|
|
|
|
|
return metadata;
|
2016-09-14 18:25:45 -03:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
createRef(type: string, name: string, sha: string) {
|
2018-08-07 14:46:54 -06:00
|
|
|
return this.request(`${this.repoURL}/git/refs`, {
|
|
|
|
method: 'POST',
|
|
|
|
body: JSON.stringify({ ref: `refs/${type}/${name}`, sha }),
|
2016-08-30 19:06:20 -03:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
patchRef(type: string, name: string, sha: string, opts: { force?: boolean } = {}) {
|
2017-08-14 09:00:47 -04:00
|
|
|
const force = opts.force || false;
|
2018-08-07 14:46:54 -06:00
|
|
|
return this.request(`${this.repoURL}/git/refs/${type}/${encodeURIComponent(name)}`, {
|
|
|
|
method: 'PATCH',
|
2017-08-14 09:00:47 -04:00
|
|
|
body: JSON.stringify({ sha, force }),
|
2016-08-30 19:06:20 -03:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
deleteRef(type: string, name: string) {
|
2018-08-07 14:46:54 -06:00
|
|
|
return this.request(`${this.repoURL}/git/refs/${type}/${encodeURIComponent(name)}`, {
|
2017-07-21 23:40:33 -07:00
|
|
|
method: 'DELETE',
|
2016-09-14 18:55:42 -03:00
|
|
|
});
|
2016-08-31 16:41:29 -03:00
|
|
|
}
|
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
getDefaultBranch(): Promise<GitHubBranch> {
|
|
|
|
return this.request(`${this.originRepoURL}/branches/${encodeURIComponent(this.branch)}`);
|
2016-08-31 16:41:29 -03:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
createBranch(branchName: string, sha: string) {
|
2018-08-07 14:46:54 -06:00
|
|
|
return this.createRef('heads', branchName, sha);
|
2016-09-14 18:55:42 -03:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
assertCmsBranch(branchName: string) {
|
2019-08-24 10:54:59 -07:00
|
|
|
return branchName.startsWith(`${CMS_BRANCH_PREFIX}/`);
|
2017-08-14 09:00:47 -04:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
patchBranch(branchName: string, sha: string, opts: { force?: boolean } = {}) {
|
2017-08-14 09:00:47 -04:00
|
|
|
const force = opts.force || false;
|
|
|
|
if (force && !this.assertCmsBranch(branchName)) {
|
|
|
|
throw Error(`Only CMS branches can be force updated, cannot force update ${branchName}`);
|
|
|
|
}
|
2018-08-07 14:46:54 -06:00
|
|
|
return this.patchRef('heads', branchName, sha, { force });
|
2016-09-14 18:55:42 -03:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
deleteBranch(branchName: string) {
|
|
|
|
return this.deleteRef('heads', branchName).catch((err: Error) => {
|
2019-12-13 17:02:57 +02:00
|
|
|
// If the branch doesn't exist, then it has already been deleted -
|
|
|
|
// deletion should be idempotent, so we can consider this a
|
|
|
|
// success.
|
|
|
|
if (err.message === 'Reference does not exist') {
|
|
|
|
return Promise.resolve();
|
|
|
|
}
|
|
|
|
console.error(err);
|
|
|
|
return Promise.reject(err);
|
|
|
|
});
|
2016-09-14 18:55:42 -03:00
|
|
|
}
|
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
async getHeadReference(head: string) {
|
2019-08-24 10:54:59 -07:00
|
|
|
const headReference = this.useOpenAuthoring ? `${(await this.user()).login}:${head}` : head;
|
2019-12-20 19:33:43 +02:00
|
|
|
return headReference;
|
|
|
|
}
|
|
|
|
|
|
|
|
async createPR(title: string, head: string) {
|
|
|
|
const headReference = await this.getHeadReference(head);
|
2019-09-03 21:56:20 +03:00
|
|
|
return this.request(`${this.originRepoURL}/pulls`, {
|
2018-08-07 14:46:54 -06:00
|
|
|
method: 'POST',
|
2019-09-03 21:56:20 +03:00
|
|
|
body: JSON.stringify({
|
|
|
|
title,
|
2020-01-15 00:15:14 +02:00
|
|
|
body: DEFAULT_PR_BODY,
|
2019-09-03 21:56:20 +03:00
|
|
|
head: headReference,
|
|
|
|
base: this.branch,
|
|
|
|
}),
|
2019-07-24 15:20:41 -07:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
async openPR(pullRequest: PR) {
|
2019-07-24 15:20:41 -07:00
|
|
|
const { number } = pullRequest;
|
|
|
|
console.log('%c Re-opening PR', 'line-height: 30px;text-align: center;font-weight: bold');
|
2019-09-03 21:56:20 +03:00
|
|
|
return this.request(`${this.originRepoURL}/pulls/${number}`, {
|
2019-07-24 15:20:41 -07:00
|
|
|
method: 'PATCH',
|
|
|
|
body: JSON.stringify({
|
|
|
|
state: 'open',
|
|
|
|
}),
|
2016-08-31 17:33:12 -03:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
closePR(pullRequest: PR) {
|
2019-09-03 21:56:20 +03:00
|
|
|
const { number } = pullRequest;
|
2018-08-07 14:46:54 -06:00
|
|
|
console.log('%c Deleting PR', 'line-height: 30px;text-align: center;font-weight: bold');
|
2019-09-03 21:56:20 +03:00
|
|
|
return this.request(`${this.originRepoURL}/pulls/${number}`, {
|
2018-08-07 14:46:54 -06:00
|
|
|
method: 'PATCH',
|
2017-03-11 13:47:36 -05:00
|
|
|
body: JSON.stringify({
|
2019-07-24 15:20:41 -07:00
|
|
|
state: 'closed',
|
2017-03-11 13:47:36 -05:00
|
|
|
}),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
mergePR(pullrequest: PR, objects: MetaDataObjects) {
|
2019-09-03 21:56:20 +03:00
|
|
|
const { head: headSha, number } = pullrequest;
|
2018-08-07 14:46:54 -06:00
|
|
|
console.log('%c Merging PR', 'line-height: 30px;text-align: center;font-weight: bold');
|
2019-09-03 21:56:20 +03:00
|
|
|
return this.request(`${this.originRepoURL}/pulls/${number}/merge`, {
|
2018-08-07 14:46:54 -06:00
|
|
|
method: 'PUT',
|
2016-09-14 18:25:45 -03:00
|
|
|
body: JSON.stringify({
|
2019-12-18 18:16:02 +02:00
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
2020-01-15 00:15:14 +02:00
|
|
|
commit_message: MERGE_COMMIT_MESSAGE,
|
2016-10-10 18:33:49 -03:00
|
|
|
sha: headSha,
|
2019-12-18 18:16:02 +02:00
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
2020-01-15 00:15:14 +02:00
|
|
|
merge_method: this.mergeMethod,
|
2016-09-14 18:25:45 -03:00
|
|
|
}),
|
2018-08-07 14:46:54 -06:00
|
|
|
}).catch(error => {
|
2017-01-11 20:58:15 -02:00
|
|
|
if (error instanceof APIError && error.status === 405) {
|
2019-12-18 18:16:02 +02:00
|
|
|
return this.forceMergePR(objects);
|
2017-01-11 20:58:15 -02:00
|
|
|
} else {
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
forceMergePR(objects: MetaDataObjects) {
|
2017-01-11 20:58:15 -02:00
|
|
|
const files = objects.files.concat(objects.entry);
|
2018-08-07 14:46:54 -06:00
|
|
|
let commitMessage = 'Automatically generated. Merged on Netlify CMS\n\nForce merge of:';
|
|
|
|
files.forEach(file => {
|
|
|
|
commitMessage += `\n* "${file.path}"`;
|
2016-09-14 18:25:45 -03:00
|
|
|
});
|
2018-08-07 14:46:54 -06:00
|
|
|
console.log(
|
|
|
|
'%c Automatic merge not possible - Forcing merge.',
|
|
|
|
'line-height: 30px;text-align: center;font-weight: bold',
|
|
|
|
);
|
2019-12-20 19:33:43 +02:00
|
|
|
return this.getDefaultBranch()
|
2019-11-17 11:51:50 +02:00
|
|
|
.then(branchData => this.updateTree(branchData.commit.sha, files))
|
2018-08-07 14:46:54 -06:00
|
|
|
.then(changeTree => this.commit(commitMessage, changeTree))
|
|
|
|
.then(response => this.patchBranch(this.branch, response.sha));
|
2016-09-14 18:25:45 -03:00
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
toBase64(str: string) {
|
2018-08-07 14:46:54 -06:00
|
|
|
return Promise.resolve(Base64.encode(str));
|
2016-08-30 19:06:20 -03:00
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
uploadBlob(item: { raw?: string; sha?: string; toBase64?: () => Promise<string> }) {
|
2019-12-18 18:16:02 +02:00
|
|
|
const content = result(item, 'toBase64', partial(this.toBase64, item.raw as string));
|
2016-08-30 19:06:20 -03:00
|
|
|
|
2018-08-07 14:46:54 -06:00
|
|
|
return content.then(contentBase64 =>
|
|
|
|
this.request(`${this.repoURL}/git/blobs`, {
|
|
|
|
method: 'POST',
|
|
|
|
body: JSON.stringify({
|
|
|
|
content: contentBase64,
|
|
|
|
encoding: 'base64',
|
|
|
|
}),
|
|
|
|
}).then(response => {
|
|
|
|
item.sha = response.sha;
|
|
|
|
return item;
|
2016-12-23 16:59:48 -02:00
|
|
|
}),
|
2018-08-07 14:46:54 -06:00
|
|
|
);
|
2016-08-30 19:06:20 -03:00
|
|
|
}
|
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
async updateTree(baseSha: string, files: { path: string; sha: string | null }[]) {
|
2019-12-18 18:16:02 +02:00
|
|
|
const tree: TreeEntry[] = files.map(file => ({
|
2019-11-17 11:51:50 +02:00
|
|
|
path: trimStart(file.path, '/'),
|
|
|
|
mode: '100644',
|
|
|
|
type: 'blob',
|
2019-12-20 19:33:43 +02:00
|
|
|
sha: file.sha,
|
2019-11-17 11:51:50 +02:00
|
|
|
}));
|
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
const newTree = await this.createTree(baseSha, tree);
|
|
|
|
return { ...newTree, parentSha: baseSha };
|
2016-08-30 19:06:20 -03:00
|
|
|
}
|
|
|
|
|
2019-12-20 19:33:43 +02:00
|
|
|
createTree(baseSha: string, tree: TreeEntry[]): Promise<GitHubTree> {
|
2018-08-07 14:46:54 -06:00
|
|
|
return this.request(`${this.repoURL}/git/trees`, {
|
|
|
|
method: 'POST',
|
2019-12-18 18:16:02 +02:00
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
2017-08-14 09:00:47 -04:00
|
|
|
body: JSON.stringify({ base_tree: baseSha, tree }),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
commit(message: string, changeTree: { parentSha?: string; sha: string }) {
|
2016-08-31 13:30:14 -03:00
|
|
|
const parents = changeTree.parentSha ? [changeTree.parentSha] : [];
|
2017-08-14 09:00:47 -04:00
|
|
|
return this.createCommit(message, changeTree.sha, parents);
|
|
|
|
}
|
|
|
|
|
2019-12-18 18:16:02 +02:00
|
|
|
createCommit(
|
|
|
|
message: string,
|
|
|
|
treeSha: string,
|
|
|
|
parents: string[],
|
2019-12-20 19:33:43 +02:00
|
|
|
author?: GitHubAuthor,
|
2020-02-17 11:10:56 +01:00
|
|
|
committer?: GitHubCommitter,
|
2019-12-20 19:33:43 +02:00
|
|
|
): Promise<GitHubCommit> {
|
2018-08-07 14:46:54 -06:00
|
|
|
return this.request(`${this.repoURL}/git/commits`, {
|
|
|
|
method: 'POST',
|
2017-08-14 09:00:47 -04:00
|
|
|
body: JSON.stringify({ message, tree: treeSha, parents, author, committer }),
|
2016-08-31 13:30:14 -03:00
|
|
|
});
|
|
|
|
}
|
2016-08-30 19:06:20 -03:00
|
|
|
}
|