import { Base64 } from 'js-base64'; import semaphore, { Semaphore } from 'semaphore'; import { initial, last, partial, result, trimStart, trim } from 'lodash'; import { getAllResponses, APIError, EditorialWorkflowError, localForage, basename, AssetProxy, Entry as LibEntry, PersistOptions, readFile, CMS_BRANCH_PREFIX, generateContentKey, DEFAULT_PR_BODY, MERGE_COMMIT_MESSAGE, PreviewState, FetchError, parseContentKey, branchFromContentKey, isCMSLabel, labelToStatus, statusToLabel, contentKeyFromBranch, } from 'netlify-cms-lib-util'; import { Octokit } from '@octokit/rest'; type GitHubUser = Octokit.UsersGetAuthenticatedResponse; type GitCreateTreeParamsTree = Octokit.GitCreateTreeParamsTree; type GitHubCompareCommit = Octokit.ReposCompareCommitsResponseCommitsItem; type GitHubAuthor = Octokit.GitCreateCommitResponseAuthor; type GitHubCommitter = Octokit.GitCreateCommitResponseCommitter; type GitHubPull = Octokit.PullsListResponseItem; export const API_NAME = 'GitHub'; export const MOCK_PULL_REQUEST = -1; export interface Config { apiRoot?: string; token?: string; branch?: string; useOpenAuthoring?: boolean; repo?: string; originRepo?: string; squashMerges: boolean; initialWorkflowStatus: string; } interface TreeFile { type: 'blob' | 'tree'; sha: string; path: string; raw?: string; } export interface Entry extends LibEntry { sha?: string; } type Override = Pick> & U; type TreeEntry = Override; type GitHubCompareCommits = GitHubCompareCommit[]; type GitHubCompareFile = Octokit.ReposCompareCommitsResponseFilesItem & { previous_filename?: string; }; type GitHubCompareFiles = GitHubCompareFile[]; enum GitHubCommitStatusState { Error = 'error', Failure = 'failure', Pending = 'pending', Success = 'success', } export enum PullRequestState { Open = 'open', Closed = 'closed', All = 'all', } type GitHubCommitStatus = Octokit.ReposListStatusesForRefResponseItem & { state: GitHubCommitStatusState; }; interface MetaDataObjects { entry: { path: string; sha: string }; files: MediaFile[]; } export interface Metadata { type: string; objects: MetaDataObjects; branch: string; status: string; pr?: { number: number; head: string | { sha: string }; }; collection: string; commitMessage: string; version?: string; user: string; title?: string; description?: string; timeStamp: string; } export interface BlobArgs { sha: string; repoURL: string; parseText: boolean; } type Param = string | number | undefined; type Options = RequestInit & { params?: Record | string[]> }; type MediaFile = { sha: string; path: string; }; const withCmsLabel = (pr: GitHubPull) => pr.labels.some(l => isCMSLabel(l.name)); const withoutCmsLabel = (pr: GitHubPull) => pr.labels.every(l => !isCMSLabel(l.name)); const getTreeFiles = (files: GitHubCompareFiles) => { const treeFiles = files.reduce((arr, file) => { if (file.status === 'removed') { // delete the file arr.push({ sha: null, path: file.filename }); } else if (file.status === 'renamed') { // delete the previous file arr.push({ sha: null, path: file.previous_filename as string }); // add the renamed file arr.push({ sha: file.sha, path: file.filename }); } else { // add the file arr.push({ sha: file.sha, path: file.filename }); } return arr; }, [] as { sha: string | null; path: string }[]); return treeFiles; }; export default class API { apiRoot: string; token: string; branch: string; useOpenAuthoring?: boolean; repo: string; originRepo: string; repoOwner: string; repoName: string; originRepoOwner: string; originRepoName: string; repoURL: string; originRepoURL: string; mergeMethod: string; initialWorkflowStatus: string; _userPromise?: Promise; _metadataSemaphore?: Semaphore; commitAuthor?: {}; constructor(config: Config) { this.apiRoot = config.apiRoot || 'https://api.github.com'; this.token = config.token || ''; this.branch = config.branch || 'master'; this.useOpenAuthoring = config.useOpenAuthoring; this.repo = config.repo || ''; this.originRepo = config.originRepo || this.repo; this.repoURL = `/repos/${this.repo}`; // when not in 'useOpenAuthoring' mode originRepoURL === repoURL this.originRepoURL = `/repos/${this.originRepo}`; const [repoParts, originRepoParts] = [this.repo.split('/'), this.originRepo.split('/')]; this.repoOwner = repoParts[0]; this.repoName = repoParts[1]; this.originRepoOwner = originRepoParts[0]; this.originRepoName = originRepoParts[1]; this.mergeMethod = config.squashMerges ? 'squash' : 'merge'; this.initialWorkflowStatus = config.initialWorkflowStatus; } static DEFAULT_COMMIT_MESSAGE = 'Automatically generated by Netlify CMS'; user(): Promise<{ name: string; login: string }> { if (!this._userPromise) { this._userPromise = this.request('/user') as Promise; } return this._userPromise; } async hasWriteAccess() { try { const result: Octokit.ReposGetResponse = await this.request(this.repoURL); return result.permissions.push; } catch (error) { console.error('Problem fetching repo data from GitHub'); throw error; } } reset() { // no op } requestHeaders(headers = {}) { const baseHeader: Record = { 'Content-Type': 'application/json; charset=utf-8', ...headers, }; if (this.token) { baseHeader.Authorization = `token ${this.token}`; return Promise.resolve(baseHeader); } return Promise.resolve(baseHeader); } parseJsonResponse(response: Response) { return response.json().then(json => { if (!response.ok) { return Promise.reject(json); } return json; }); } urlFor(path: string, options: Options) { const cacheBuster = new Date().getTime(); const params = [`ts=${cacheBuster}`]; if (options.params) { for (const key in options.params) { params.push(`${key}=${encodeURIComponent(options.params[key] as string)}`); } } if (params.length) { path += `?${params.join('&')}`; } return this.apiRoot + path; } parseResponse(response: Response) { const contentType = response.headers.get('Content-Type'); if (contentType && contentType.match(/json/)) { return this.parseJsonResponse(response); } const textPromise = response.text().then(text => { if (!response.ok) { return Promise.reject(text); } return text; }); return textPromise; } handleRequestError(error: FetchError, responseStatus: number) { throw new APIError(error.message, responseStatus, API_NAME); } async request( path: string, options: Options = {}, // eslint-disable-next-line @typescript-eslint/no-explicit-any parser = (response: Response) => this.parseResponse(response), ) { const headers = await this.requestHeaders(options.headers || {}); const url = this.urlFor(path, options); let responseStatus: number; return fetch(url, { ...options, headers }) .then(response => { responseStatus = response.status; return parser(response); }) .catch(error => this.handleRequestError(error, responseStatus)); } nextUrlProcessor() { return (url: string) => url; } async requestAllPages(url: string, options: Options = {}) { const headers = await this.requestHeaders(options.headers || {}); const processedURL = this.urlFor(url, options); const allResponses = await getAllResponses( processedURL, { ...options, headers }, 'next', this.nextUrlProcessor(), ); const pages: T[][] = await Promise.all( allResponses.map((res: Response) => this.parseResponse(res)), ); return ([] as T[]).concat(...pages); } generateContentKey(collectionName: string, slug: string) { const contentKey = generateContentKey(collectionName, slug); if (!this.useOpenAuthoring) { return contentKey; } return `${this.repo}/${contentKey}`; } parseContentKey(contentKey: string) { if (!this.useOpenAuthoring) { return parseContentKey(contentKey); } return parseContentKey(contentKey.substring(this.repo.length + 1)); } checkMetadataRef() { return this.request(`${this.repoURL}/git/refs/meta/_netlify_cms`, { cache: 'no-store', }) .then(response => response.object) .catch(() => { // Meta ref doesn't exist const readme = { raw: '# Netlify CMS\n\nThis tree is used by the Netlify CMS to store metadata information for specific files and branches.', }; return this.uploadBlob(readme) .then(item => this.request(`${this.repoURL}/git/trees`, { method: 'POST', body: JSON.stringify({ tree: [{ path: 'README.md', mode: '100644', type: 'blob', sha: item.sha }], }), }), ) .then(tree => this.commit('First Commit', tree)) .then(response => this.createRef('meta', '_netlify_cms', response.sha)) .then(response => response.object); }); } async storeMetadata(key: string, data: Metadata) { // semaphore ensures metadata updates are always ordered, even if // calls to storeMetadata are not. concurrent metadata updates // will result in the metadata branch being unable to update. if (!this._metadataSemaphore) { this._metadataSemaphore = semaphore(1); } return new Promise((resolve, reject) => this._metadataSemaphore?.take(async () => { try { const branchData = await this.checkMetadataRef(); const file = { path: `${key}.json`, raw: JSON.stringify(data) }; await this.uploadBlob(file); const changeTree = await this.updateTree(branchData.sha, [file as TreeFile]); const { sha } = await this.commit(`Updating “${key}” metadata`, changeTree); await this.patchRef('meta', '_netlify_cms', sha); localForage.setItem(`gh.meta.${key}`, { expires: Date.now() + 300000, // In 5 minutes data, }); this._metadataSemaphore?.leave(); resolve(); } catch (err) { reject(err); } }), ); } deleteMetadata(key: string) { if (!this._metadataSemaphore) { this._metadataSemaphore = semaphore(1); } return new Promise(resolve => this._metadataSemaphore?.take(async () => { try { const branchData = await this.checkMetadataRef(); const file = { path: `${key}.json`, sha: null }; const changeTree = await this.updateTree(branchData.sha, [file]); const { sha } = await this.commit(`Deleting “${key}” metadata`, changeTree); await this.patchRef('meta', '_netlify_cms', sha); this._metadataSemaphore?.leave(); resolve(); } catch (err) { this._metadataSemaphore?.leave(); resolve(); } }), ); } retrieveMetadataOld(key: string): Promise { const cache = localForage.getItem<{ data: Metadata; expires: number }>(`gh.meta.${key}`); return cache.then(cached => { if (cached && cached.expires > Date.now()) { return cached.data as Metadata; } console.log( '%c Checking for MetaData files', 'line-height: 30px;text-align: center;font-weight: bold', ); const metadataRequestOptions = { params: { ref: 'refs/meta/_netlify_cms' }, headers: { Accept: 'application/vnd.github.v3.raw' }, cache: 'no-store' as RequestCache, }; const errorHandler = (err: Error) => { if (err.message === 'Not Found') { console.log( '%c %s does not have metadata', 'line-height: 30px;text-align: center;font-weight: bold', key, ); } throw err; }; if (!this.useOpenAuthoring) { return this.request(`${this.repoURL}/contents/${key}.json`, metadataRequestOptions) .then((response: string) => JSON.parse(response)) .catch(errorHandler); } const [user, repo] = key.split('/'); return this.request(`/repos/${user}/${repo}/contents/${key}.json`, metadataRequestOptions) .then((response: string) => JSON.parse(response)) .catch(errorHandler); }); } async getPullRequests( head: string | undefined, state: PullRequestState, predicate: (pr: GitHubPull) => boolean, ) { const pullRequests: Octokit.PullsListResponse = await this.requestAllPages( `${this.originRepoURL}/pulls`, { params: { ...(head ? { head: await this.getHeadReference(head) } : {}), base: this.branch, state, // eslint-disable-next-line @typescript-eslint/camelcase per_page: 100, }, }, ); return pullRequests.filter(predicate); } async getOpenAuthoringPullRequest(branch: string, pullRequests: GitHubPull[]) { // we can't use labels when using open authoring // since the contributor doesn't have access to set labels // a branch without a pr (or a closed pr) means a 'draft' entry // a branch with an opened pr means a 'pending_review' entry const data = await this.getBranch(branch); // since we get all (open and closed) pull requests by branch name, make sure to filter by head sha const pullRequest = pullRequests.filter(pr => pr.head.sha === data.commit.sha)[0]; // if no pull request is found for the branch we return a mocked one if (!pullRequest) { try { return { head: { sha: data.commit.sha }, number: MOCK_PULL_REQUEST, labels: [{ name: statusToLabel(this.initialWorkflowStatus) }], state: PullRequestState.Open, } as GitHubPull; } catch (e) { throw new EditorialWorkflowError('content is not under editorial workflow', true); } } else { pullRequest.labels = pullRequest.labels.filter(l => !isCMSLabel(l.name)); const cmsLabel = pullRequest.state === PullRequestState.Closed ? { name: statusToLabel(this.initialWorkflowStatus) } : { name: statusToLabel('pending_review') }; pullRequest.labels.push(cmsLabel as Octokit.PullsGetResponseLabelsItem); return pullRequest; } } async getBranchPullRequest(branch: string) { if (this.useOpenAuthoring) { const pullRequests = await this.getPullRequests(branch, PullRequestState.All, () => true); return this.getOpenAuthoringPullRequest(branch, pullRequests); } else { const pullRequests = await this.getPullRequests(branch, PullRequestState.Open, withCmsLabel); if (pullRequests.length <= 0) { throw new EditorialWorkflowError('content is not under editorial workflow', true); } return pullRequests[0]; } } async retrieveMetadata(contentKey: string) { const { collection, slug } = this.parseContentKey(contentKey); const branch = branchFromContentKey(contentKey); const pullRequest = await this.getBranchPullRequest(branch); const { files: diffs } = await this.getDifferences( this.branch, pullRequest.head.sha, this.repoURL, ); // media files don't have a patch attribute, except svg files const { path, newFile } = diffs .filter(d => d.patch && !d.filename.endsWith('.svg')) .map(f => ({ path: f.filename, newFile: f.status === 'added' }))[0]; const mediaFiles = diffs .filter(d => d.filename !== path) .map(({ filename: path, sha: id }) => ({ path, id, })); const label = pullRequest.labels.find(l => isCMSLabel(l.name)) as { name: string }; const status = labelToStatus(label.name); return { branch, collection, slug, path, status, newFile, mediaFiles, pullRequest }; } async readFile( path: string, sha?: string | null, { branch = this.branch, repoURL = this.repoURL, parseText = true, }: { branch?: string; repoURL?: string; parseText?: boolean; } = {}, ) { if (!sha) { sha = await this.getFileSha(path, { repoURL, branch }); } const fetchContent = () => this.fetchBlobContent({ sha: sha as string, repoURL, parseText }); const content = await readFile(sha, fetchContent, localForage, parseText); return content; } async fetchBlobContent({ sha, repoURL, parseText }: BlobArgs) { const result: Octokit.GitGetBlobResponse = await this.request(`${repoURL}/git/blobs/${sha}`); if (parseText) { // treat content as a utf-8 string const content = Base64.decode(result.content); return content; } else { // treat content as binary and convert to blob const content = Base64.atob(result.content); const byteArray = new Uint8Array(content.length); for (let i = 0; i < content.length; i++) { byteArray[i] = content.charCodeAt(i); } const blob = new Blob([byteArray]); return blob; } } async listFiles( path: string, { repoURL = this.repoURL, branch = this.branch, depth = 1 } = {}, ): Promise<{ type: string; id: string; name: string; path: string; size: number }[]> { const folder = trim(path, '/'); try { const result: Octokit.GitGetTreeResponse = await this.request( `${repoURL}/git/trees/${branch}:${folder}`, { // GitHub API supports recursive=1 for getting the entire recursive tree // or omitting it to get the non-recursive tree params: depth > 1 ? { recursive: 1 } : {}, }, ); return ( result.tree // filter only files and up to the required depth .filter(file => file.type === 'blob' && file.path.split('/').length <= depth) .map(file => ({ type: file.type, id: file.sha, name: basename(file.path), path: `${folder}/${file.path}`, size: file.size!, })) ); } catch (err) { if (err && err.status === 404) { console.log('This 404 was expected and handled appropriately.'); return []; } else { throw err; } } } async readUnpublishedBranchFile(contentKey: string) { try { const { branch, collection, slug, path, status, newFile, mediaFiles, } = await this.retrieveMetadata(contentKey); const repoURL = this.useOpenAuthoring ? `/repos/${contentKey .split('/') .slice(0, 2) .join('/')}` : this.repoURL; const fileData = (await this.readFile(path, null, { branch, repoURL })) as string; return { slug, metaData: { branch, collection, objects: { entry: { path, mediaFiles } }, status }, fileData, isModification: !newFile, }; } catch (e) { throw new EditorialWorkflowError('content is not under editorial workflow', true); } } filterOpenAuthoringBranches = async (branch: string) => { const contentKey = contentKeyFromBranch(branch); const { pullRequest, collection, slug } = await this.retrieveMetadata(contentKey); const { state: currentState, merged_at: mergedAt } = pullRequest; if ( pullRequest.number !== MOCK_PULL_REQUEST && currentState === PullRequestState.Closed && mergedAt ) { // pr was merged, delete entry await this.deleteUnpublishedEntry(collection, slug); return { branch, filter: false }; } else { return { branch, filter: true }; } }; async migrateToVersion1(pullRequest: GitHubPull, metadata: Metadata) { // hard code key/branch generation logic to ignore future changes const oldContentKey = pullRequest.head.ref.substring(`cms/`.length); const newContentKey = `${metadata.collection}/${oldContentKey}`; const newBranchName = `cms/${newContentKey}`; // create new branch and pull request in new format await this.createBranch(newBranchName, pullRequest.head.sha as string); const pr = await this.createPR(pullRequest.title, newBranchName); // store new metadata const newMetadata = { ...metadata, pr: { number: pr.number, head: pr.head.sha, }, branch: newBranchName, version: '1', }; await this.storeMetadata(newContentKey, newMetadata); // remove old data await this.closePR(metadata.pr!.number); await this.deleteBranch(metadata.branch); await this.deleteMetadata(oldContentKey); return { metadata: newMetadata, pullRequest: pr }; } async migrateToPullRequestLabels(pullRequest: GitHubPull, metadata: Metadata) { await this.setPullRequestStatus(pullRequest, metadata.status); const contentKey = pullRequest.head.ref.substring(`cms/`.length); await this.deleteMetadata(contentKey); } async migratePullRequest(pullRequest: GitHubPull) { let metadata = await this.retrieveMetadataOld(contentKeyFromBranch(pullRequest.head.ref)).catch( () => undefined, ); if (!metadata) { return; } if (!metadata.version) { // migrate branch from cms/slug to cms/collection/slug ({ metadata, pullRequest } = await this.migrateToVersion1(pullRequest, metadata)); } if (metadata.version === '1') { // migrate branch from using orphan ref to store metadata to pull requests label await this.migrateToPullRequestLabels(pullRequest, metadata); } } async getCmsBranches() { const cmsBranches = await this.requestAllPages( `${this.repoURL}/git/refs/heads/cms`, ).catch(() => [] as Octokit.GitListMatchingRefsResponseItem[]); return cmsBranches; } async listUnpublishedBranches() { console.log( '%c Checking for Unpublished entries', 'line-height: 30px;text-align: center;font-weight: bold', ); let branches: string[]; if (this.useOpenAuthoring) { // open authoring branches can exist without a pr const cmsBranches: Octokit.GitListMatchingRefsResponse = await this.getCmsBranches(); branches = cmsBranches.map(b => b.ref.substring('refs/heads/'.length)); // filter irrelevant branches const branchesWithFilter = await Promise.all( branches.map(b => this.filterOpenAuthoringBranches(b)), ); branches = branchesWithFilter.filter(b => b.filter).map(b => b.branch); } else { // backwards compatibility code, get relevant pull requests and migrate them const pullRequests = await this.getPullRequests( undefined, PullRequestState.Open, withoutCmsLabel, ); for (const pr of pullRequests) { await this.migratePullRequest(pr); } const cmsPullRequests = await this.getPullRequests( undefined, PullRequestState.Open, withCmsLabel, ); branches = cmsPullRequests.map(pr => pr.head.ref); } return branches; } /** * Retrieve statuses for a given SHA. Unrelated to the editorial workflow * concept of entry "status". Useful for things like deploy preview links. */ async getStatuses(collectionName: string, slug: string) { const contentKey = this.generateContentKey(collectionName, slug); const branch = branchFromContentKey(contentKey); const pullRequest = await this.getBranchPullRequest(branch); const sha = pullRequest.head.sha; const resp: { statuses: GitHubCommitStatus[] } = await this.request( `${this.originRepoURL}/commits/${sha}/status`, ); return resp.statuses.map(s => ({ context: s.context, // eslint-disable-next-line @typescript-eslint/camelcase target_url: s.target_url, state: s.state === GitHubCommitStatusState.Success ? PreviewState.Success : PreviewState.Other, })); } async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) { const files = entry ? mediaFiles.concat(entry) : mediaFiles; const uploadPromises = files.map(file => this.uploadBlob(file)); await Promise.all(uploadPromises); if (!options.useWorkflow) { return this.getDefaultBranch() .then(branchData => this.updateTree(branchData.commit.sha, files as { sha: string; path: string }[]), ) .then(changeTree => this.commit(options.commitMessage, changeTree)) .then(response => this.patchBranch(this.branch, response.sha)); } else { const mediaFilesList = (mediaFiles as { sha: string; path: string }[]).map( ({ sha, path }) => ({ path: trimStart(path, '/'), sha, }), ); return this.editorialWorkflowGit( files as TreeFile[], entry as Entry, mediaFilesList, options, ); } } async getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) { /** * We need to request the tree first to get the SHA. We use extended SHA-1 * syntax (:) to get a blob from a tree without having to recurse * through the tree. */ const pathArray = path.split('/'); const filename = last(pathArray); const directory = initial(pathArray).join('/'); const fileDataPath = encodeURIComponent(directory); const fileDataURL = `${repoURL}/git/trees/${branch}:${fileDataPath}`; const result: Octokit.GitGetTreeResponse = await this.request(fileDataURL, { cache: 'no-store', }); const file = result.tree.find(file => file.path === filename); if (file) { return file.sha; } else { throw new APIError('Not Found', 404, API_NAME); } } deleteFile(path: string, message: string) { if (this.useOpenAuthoring) { return Promise.reject('Cannot delete published entries as an Open Authoring user!'); } const branch = this.branch; return this.getFileSha(path, { branch }).then(sha => { const params: { sha: string; message: string; branch: string; author?: { date: string } } = { sha, message, branch, }; const opts = { method: 'DELETE', params }; if (this.commitAuthor) { opts.params.author = { ...this.commitAuthor, date: new Date().toISOString(), }; } const fileURL = `${this.repoURL}/contents/${path}`; return this.request(fileURL, opts); }); } async createBranchAndPullRequest(branchName: string, sha: string, commitMessage: string) { await this.createBranch(branchName, sha); return this.createPR(commitMessage, branchName); } async updatePullRequestLabels(number: number, labels: string[]) { await this.request(`${this.repoURL}/issues/${number}/labels`, { method: 'PUT', body: JSON.stringify({ labels }), }); } async editorialWorkflowGit( files: TreeFile[], entry: Entry, mediaFilesList: MediaFile[], options: PersistOptions, ) { const contentKey = this.generateContentKey(options.collectionName as string, entry.slug); const branch = branchFromContentKey(contentKey); const unpublished = options.unpublished || false; if (!unpublished) { const branchData = await this.getDefaultBranch(); const changeTree = await this.updateTree(branchData.commit.sha, files); const commitResponse = await this.commit(options.commitMessage, changeTree); if (this.useOpenAuthoring) { await this.createBranch(branch, commitResponse.sha); } else { const pr = await this.createBranchAndPullRequest( branch, commitResponse.sha, options.commitMessage, ); await this.setPullRequestStatus(pr, options.status || this.initialWorkflowStatus); } } else { // Entry is already on editorial review workflow - commit to existing branch const { files: diffs } = await this.getDifferences(this.branch, branch, this.repoURL); // mark media files to remove const mediaFilesToRemove: { path: string; sha: string | null }[] = []; for (const diff of diffs) { if (!mediaFilesList.some(file => file.path === diff.filename)) { mediaFilesToRemove.push({ path: diff.filename, sha: null }); } } // rebase the branch before applying new changes const rebasedHead = await this.rebaseBranch(branch); const treeFiles = mediaFilesToRemove.concat(files); const changeTree = await this.updateTree(rebasedHead.sha, treeFiles); const commit = await this.commit(options.commitMessage, changeTree); return this.patchBranch(branch, commit.sha, { force: true }); } } async getDifferences(from: string, to: string, repoURL: string) { const result: Octokit.ReposCompareCommitsResponse = await this.request( `${repoURL}/compare/${from}...${to}`, ); return result; } async rebaseSingleCommit(baseCommit: GitHubCompareCommit, commit: GitHubCompareCommit) { // first get the diff between the commits const result = await this.getDifferences(commit.parents[0].sha, commit.sha, this.repoURL); const files = getTreeFiles(result.files as GitHubCompareFiles); // create a tree with baseCommit as the base with the diff applied const tree = await this.updateTree(baseCommit.sha, files); const { message, author, committer } = commit.commit; // create a new commit from the updated tree return (this.createCommit( message, tree.sha, [baseCommit.sha], author, committer, ) as unknown) as GitHubCompareCommit; } /** * Rebase an array of commits one-by-one, starting from a given base SHA */ async rebaseCommits(baseCommit: GitHubCompareCommit, commits: GitHubCompareCommits) { /** * If the parent of the first commit already matches the target base, * return commits as is. */ if (commits.length === 0 || commits[0].parents[0].sha === baseCommit.sha) { const head = last(commits) as GitHubCompareCommit; return head; } else { /** * Re-create each commit over the new base, applying each to the previous, * changing only the parent SHA and tree for each, but retaining all other * info, such as the author/committer data. */ const newHeadPromise = commits.reduce((lastCommitPromise, commit) => { return lastCommitPromise.then(newParent => { const parent = newParent; const commitToRebase = commit; return this.rebaseSingleCommit(parent, commitToRebase); }); }, Promise.resolve(baseCommit)); return newHeadPromise; } } async rebaseBranch(branch: string) { try { // Get the diff between the default branch the published branch const { base_commit: baseCommit, commits } = await this.getDifferences( this.branch, await this.getHeadReference(branch), this.originRepoURL, ); // Rebase the branch based on the diff const rebasedHead = await this.rebaseCommits(baseCommit, commits); return rebasedHead; } catch (error) { console.error(error); throw error; } } async setPullRequestStatus(pullRequest: GitHubPull, newStatus: string) { const labels = [ ...pullRequest.labels.filter(label => !isCMSLabel(label.name)).map(l => l.name), statusToLabel(newStatus), ]; await this.updatePullRequestLabels(pullRequest.number, labels); } async updateUnpublishedEntryStatus(collectionName: string, slug: string, newStatus: string) { const contentKey = this.generateContentKey(collectionName, slug); const branch = branchFromContentKey(contentKey); const pullRequest = await this.getBranchPullRequest(branch); if (!this.useOpenAuthoring) { await this.setPullRequestStatus(pullRequest, newStatus); } else { if (status === 'pending_publish') { throw new Error('Open Authoring entries may not be set to the status "pending_publish".'); } if (pullRequest.number !== MOCK_PULL_REQUEST) { const { state } = pullRequest; if (state === PullRequestState.Open && newStatus === 'draft') { await this.closePR(pullRequest.number); } if (state === PullRequestState.Closed && newStatus === 'pending_review') { await this.openPR(pullRequest.number); } } else if (newStatus === 'pending_review') { const branch = branchFromContentKey(contentKey); // get the first commit message as the pr title const diff = await this.getDifferences(this.branch, branch, this.repoURL); const title = diff.commits[0]?.commit?.message || API.DEFAULT_COMMIT_MESSAGE; await this.createPR(title, branch); } } } async deleteUnpublishedEntry(collectionName: string, slug: string) { const contentKey = this.generateContentKey(collectionName, slug); const branch = branchFromContentKey(contentKey); const pullRequest = await this.getBranchPullRequest(branch); if (pullRequest.number !== MOCK_PULL_REQUEST) { await this.closePR(pullRequest.number); } await this.deleteBranch(branch); } async publishUnpublishedEntry(collectionName: string, slug: string) { const contentKey = this.generateContentKey(collectionName, slug); const branch = branchFromContentKey(contentKey); const pullRequest = await this.getBranchPullRequest(branch); await this.mergePR(pullRequest); await this.deleteBranch(branch); } async createRef(type: string, name: string, sha: string) { const result: Octokit.GitCreateRefResponse = await this.request(`${this.repoURL}/git/refs`, { method: 'POST', body: JSON.stringify({ ref: `refs/${type}/${name}`, sha }), }); return result; } async patchRef(type: string, name: string, sha: string, opts: { force?: boolean } = {}) { const force = opts.force || false; const result: Octokit.GitUpdateRefResponse = await this.request( `${this.repoURL}/git/refs/${type}/${encodeURIComponent(name)}`, { method: 'PATCH', body: JSON.stringify({ sha, force }), }, ); return result; } deleteRef(type: string, name: string) { return this.request(`${this.repoURL}/git/refs/${type}/${encodeURIComponent(name)}`, { method: 'DELETE', }); } async getBranch(branch: string) { const result: Octokit.ReposGetBranchResponse = await this.request( `${this.repoURL}/branches/${encodeURIComponent(branch)}`, ); return result; } async getDefaultBranch() { const result: Octokit.ReposGetBranchResponse = await this.request( `${this.originRepoURL}/branches/${encodeURIComponent(this.branch)}`, ); return result; } createBranch(branchName: string, sha: string) { return this.createRef('heads', branchName, sha); } assertCmsBranch(branchName: string) { return branchName.startsWith(`${CMS_BRANCH_PREFIX}/`); } patchBranch(branchName: string, sha: string, opts: { force?: boolean } = {}) { const force = opts.force || false; if (force && !this.assertCmsBranch(branchName)) { throw Error(`Only CMS branches can be force updated, cannot force update ${branchName}`); } return this.patchRef('heads', branchName, sha, { force }); } deleteBranch(branchName: string) { return this.deleteRef('heads', branchName).catch((err: Error) => { // If the branch doesn't exist, then it has already been deleted - // deletion should be idempotent, so we can consider this a // success. if (err.message === 'Reference does not exist') { return Promise.resolve(); } console.error(err); return Promise.reject(err); }); } async getHeadReference(head: string) { return `${this.repoOwner}:${head}`; } async createPR(title: string, head: string) { const result: Octokit.PullsCreateResponse = await this.request(`${this.originRepoURL}/pulls`, { method: 'POST', body: JSON.stringify({ title, body: DEFAULT_PR_BODY, head: await this.getHeadReference(head), base: this.branch, }), }); return result; } async openPR(number: number) { console.log('%c Re-opening PR', 'line-height: 30px;text-align: center;font-weight: bold'); const result: Octokit.PullsUpdateBranchResponse = await this.request( `${this.originRepoURL}/pulls/${number}`, { method: 'PATCH', body: JSON.stringify({ state: PullRequestState.Open, }), }, ); return result; } async closePR(number: number) { console.log('%c Deleting PR', 'line-height: 30px;text-align: center;font-weight: bold'); const result: Octokit.PullsUpdateBranchResponse = await this.request( `${this.originRepoURL}/pulls/${number}`, { method: 'PATCH', body: JSON.stringify({ state: PullRequestState.Closed, }), }, ); return result; } async mergePR(pullrequest: GitHubPull) { console.log('%c Merging PR', 'line-height: 30px;text-align: center;font-weight: bold'); try { const result: Octokit.PullsMergeResponse = await this.request( `${this.originRepoURL}/pulls/${pullrequest.number}/merge`, { method: 'PUT', body: JSON.stringify({ // eslint-disable-next-line @typescript-eslint/camelcase commit_message: MERGE_COMMIT_MESSAGE, sha: pullrequest.head.sha, // eslint-disable-next-line @typescript-eslint/camelcase merge_method: this.mergeMethod, }), }, ); return result; } catch (error) { if (error instanceof APIError && error.status === 405) { return this.forceMergePR(pullrequest); } else { throw error; } } } async forceMergePR(pullRequest: GitHubPull) { const result = await this.getDifferences( pullRequest.base.sha, pullRequest.head.sha, this.repoURL, ); const files = getTreeFiles(result.files as GitHubCompareFiles); let commitMessage = 'Automatically generated. Merged on Netlify CMS\n\nForce merge of:'; files.forEach(file => { commitMessage += `\n* "${file.path}"`; }); console.log( '%c Automatic merge not possible - Forcing merge.', 'line-height: 30px;text-align: center;font-weight: bold', ); return this.getDefaultBranch() .then(branchData => this.updateTree(branchData.commit.sha, files)) .then(changeTree => this.commit(commitMessage, changeTree)) .then(response => this.patchBranch(this.branch, response.sha)); } toBase64(str: string) { return Promise.resolve(Base64.encode(str)); } uploadBlob(item: { raw?: string; sha?: string; toBase64?: () => Promise }) { const content = result(item, 'toBase64', partial(this.toBase64, item.raw as string)); return content.then(contentBase64 => this.request(`${this.repoURL}/git/blobs`, { method: 'POST', body: JSON.stringify({ content: contentBase64, encoding: 'base64', }), }).then(response => { item.sha = response.sha; return item; }), ); } async updateTree(baseSha: string, files: { path: string; sha: string | null }[]) { const tree: TreeEntry[] = files.map(file => ({ path: trimStart(file.path, '/'), mode: '100644', type: 'blob', sha: file.sha, })); const newTree = await this.createTree(baseSha, tree); return { ...newTree, parentSha: baseSha }; } async createTree(baseSha: string, tree: TreeEntry[]) { const result: Octokit.GitCreateTreeResponse = await this.request(`${this.repoURL}/git/trees`, { method: 'POST', // eslint-disable-next-line @typescript-eslint/camelcase body: JSON.stringify({ base_tree: baseSha, tree }), }); return result; } commit(message: string, changeTree: { parentSha?: string; sha: string }) { const parents = changeTree.parentSha ? [changeTree.parentSha] : []; return this.createCommit(message, changeTree.sha, parents); } async createCommit( message: string, treeSha: string, parents: string[], author?: GitHubAuthor, committer?: GitHubCommitter, ) { const result: Octokit.GitCreateCommitResponse = await this.request( `${this.repoURL}/git/commits`, { method: 'POST', body: JSON.stringify({ message, tree: treeSha, parents, author, committer }), }, ); return result; } }