fix(backend-github): prepend collection name (#2878)

* fix(backend-github): prepend collection name

* chore: prefer migrating entries

* chore: cleanup

* chore: move migration to listUnpublishedBranches

* chore: prefer flowAsync

* chore: feedback updates

* refactor: extract current metadata version to a const

* refactor: don't send pulls request on open authoring

* test: update recorded data

* fix: hardcode migration key/branch logic

* test(backend-github): add unit tests for migration code

* fix(github-graphql): add ref property to result of createBranch

* test(cypress): update recorded data

* fix: load unpublished entries once

* fix: run migration for published draft entry

* fix: failing test

* chore: use hardcoded version number

* fix: use hardcoded version number

* test(cypress): update recorded data
This commit is contained in:
Bartholomew 2019-11-26 09:40:27 +01:00 committed by Erez Rokah
parent 695b0e0380
commit 465f463959
45 changed files with 16553 additions and 16755 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -12,18 +12,19 @@ import {
differenceBy,
trimStart,
} from 'lodash';
import { map } from 'lodash/fp';
import { map, filter } from 'lodash/fp';
import {
getAllResponses,
APIError,
EditorialWorkflowError,
filterPromisesWith,
flowAsync,
localForage,
onlySuccessfulPromises,
resolvePromiseProperties,
} from 'netlify-cms-lib-util';
const CMS_BRANCH_PREFIX = 'cms';
const CURRENT_METADATA_VERSION = '1';
const replace404WithEmptyArray = err => {
if (err && err.status === 404) {
@ -148,9 +149,7 @@ export default class API {
generateContentKey(collectionName, slug) {
if (!this.useOpenAuthoring) {
// this doesn't use the collection, but we need to leave it that way for backwards
// compatibility
return slug;
return `${collectionName}/${slug}`;
}
return `${this.repo}/${collectionName}/${slug}`;
@ -225,6 +224,29 @@ export default class API {
);
}
deleteMetadata(key) {
if (!this._metadataSemaphore) {
this._metadataSemaphore = semaphore(1);
}
return new Promise(resolve =>
this._metadataSemaphore.take(async () => {
try {
const branchData = await this.checkMetadataRef();
const file = { path: `${key}.json`, sha: null };
const changeTree = await this.updateTree(branchData.sha, [file]);
const { sha } = await this.commit(`Deleting “${key}” metadata`, changeTree);
await this.patchRef('meta', '_netlify_cms', sha);
this._metadataSemaphore.leave();
resolve();
} catch (err) {
this._metadataSemaphore.leave();
resolve();
}
}),
);
}
retrieveMetadata(key) {
const cache = localForage.getItem(`gh.meta.${key}`);
return cache.then(cached => {
@ -394,30 +416,19 @@ export default class API {
});
}
getPRsForBranchName = ({
branchName,
state,
base = this.branch,
repoURL = this.repoURL,
usernameOfFork,
} = {}) => {
getPRsForBranchName = branchName => {
// Get PRs with a `head` of `branchName`. Note that this is a
// substring match, so we need to check that the `head.ref` of
// at least one of the returned objects matches `branchName`.
return this.requestAllPages(`${repoURL}/pulls`, {
return this.requestAllPages(`${this.repoURL}/pulls`, {
params: {
head: usernameOfFork ? `${usernameOfFork}:${branchName}` : branchName,
...(state ? { state } : {}),
base,
head: branchName,
state: 'open',
base: this.branch,
},
});
};
branchHasPR = async ({ branchName, ...rest }) => {
const prs = await this.getPRsForBranchName({ branchName, ...rest });
return prs.some(pr => pr.head.ref === branchName);
};
getUpdatedOpenAuthoringMetadata = async (contentKey, { metadata: metadataArg } = {}) => {
const metadata = metadataArg || (await this.retrieveMetadata(contentKey)) || {};
const { pr: prMetadata, status } = metadata;
@ -459,33 +470,82 @@ export default class API {
return metadata;
};
async migrateToVersion1(branch, metaData) {
// hard code key/branch generation logic to ignore future changes
const oldContentKey = branch.ref.substring(`refs/heads/cms/`.length);
const newContentKey = `${metaData.collection}/${oldContentKey}`;
const newBranchName = `cms/${newContentKey}`;
// create new branch and pull request in new format
const newBranch = await this.createBranch(newBranchName, metaData.pr.head);
const pr = await this.createPR(metaData.commitMessage, newBranchName);
// store new metadata
await this.storeMetadata(newContentKey, {
...metaData,
pr: {
number: pr.number,
head: pr.head.sha,
},
branch: newBranchName,
version: '1',
});
// remove old data
await this.closePR(metaData.pr);
await this.deleteBranch(metaData.branch);
await this.deleteMetadata(oldContentKey);
return newBranch;
}
async migrateBranch(branch) {
const metadata = await this.retrieveMetadata(this.contentKeyFromRef(branch.ref));
if (!metadata.version) {
// migrate branch from cms/slug to cms/collection/slug
branch = await this.migrateToVersion1(branch, metadata);
}
return branch;
}
async listUnpublishedBranches() {
console.log(
'%c Checking for Unpublished entries',
'line-height: 30px;text-align: center;font-weight: bold',
);
const onlyBranchesWithOpenPRs = filterPromisesWith(({ ref }) =>
this.branchHasPR({ branchName: this.branchNameFromRef(ref), state: 'open' }),
);
const getUpdatedOpenAuthoringBranches = flow([
map(async branch => {
const contentKey = this.contentKeyFromRef(branch.ref);
const metadata = await this.getUpdatedOpenAuthoringMetadata(contentKey);
// filter out removed entries
if (!metadata) {
return Promise.reject('Unpublished entry was removed');
}
return branch;
}),
onlySuccessfulPromises,
]);
try {
const branches = await this.request(`${this.repoURL}/git/refs/heads/cms`).catch(
replace404WithEmptyArray,
);
const filterFunction = this.useOpenAuthoring
? getUpdatedOpenAuthoringBranches
: onlyBranchesWithOpenPRs;
let filterFunction;
if (this.useOpenAuthoring) {
const getUpdatedOpenAuthoringBranches = flow([
map(async branch => {
const contentKey = this.contentKeyFromRef(branch.ref);
const metadata = await this.getUpdatedOpenAuthoringMetadata(contentKey);
// filter out removed entries
if (!metadata) {
return Promise.reject('Unpublished entry was removed');
}
return branch;
}),
onlySuccessfulPromises,
]);
filterFunction = getUpdatedOpenAuthoringBranches;
} else {
const prs = await this.getPRsForBranchName(CMS_BRANCH_PREFIX);
const onlyBranchesWithOpenPRs = flowAsync([
filter(({ ref }) => prs.some(pr => pr.head.ref === this.branchNameFromRef(ref))),
map(branch => this.migrateBranch(branch)),
onlySuccessfulPromises,
]);
filterFunction = onlyBranchesWithOpenPRs;
}
return await filterFunction(branches);
} catch (err) {
console.log(
@ -621,6 +681,7 @@ export default class API {
files: mediaFilesList,
},
timeStamp: new Date().toISOString(),
version: CURRENT_METADATA_VERSION,
});
} else {
// Entry is already on editorial review workflow - just update metadata and commit to existing branch
@ -864,6 +925,7 @@ export default class API {
this.retrieveMetadata(contentKey)
.then(metadata => (metadata && metadata.pr ? this.closePR(metadata.pr) : Promise.resolve()))
.then(() => this.deleteBranch(branchName))
.then(() => this.deleteMetadata(contentKey))
// If the PR doesn't exist, then this has already been deleted -
// deletion should be idempotent, so we can consider this a
// success.
@ -883,6 +945,7 @@ export default class API {
const metadata = await this.retrieveMetadata(contentKey);
await this.mergePR(metadata.pr, metadata.objects);
await this.deleteBranch(branchName);
await this.deleteMetadata(contentKey);
return metadata;
}

View File

@ -228,7 +228,8 @@ export default class GraphQLAPI extends API {
branches.push({ ref: `${headRef.prefix}${headRef.name}` });
});
});
return branches;
return await Promise.all(branches.map(branch => this.migrateBranch(branch)));
} else {
console.log(
'%c No Unpublished entries',
@ -516,7 +517,7 @@ export default class GraphQLAPI extends API {
},
});
const { branch } = data.createRef;
return branch;
return { ...branch, ref: `${branch.prefix}${branch.name}` };
}
async createBranchAndPullRequest(branchName, sha, title) {

View File

@ -338,4 +338,93 @@ describe('github API', () => {
);
});
});
describe('migrateBranch', () => {
it('should migrate to version 1 when no version', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const newBranch = { ref: 'refs/heads/cms/posts/2019-11-11-post-title' };
api.migrateToVersion1 = jest.fn().mockResolvedValue(newBranch);
const metadata = { type: 'PR' };
api.retrieveMetadata = jest.fn().mockResolvedValue(metadata);
const branch = { ref: 'refs/heads/cms/2019-11-11-post-title' };
await expect(api.migrateBranch(branch)).resolves.toBe(newBranch);
expect(api.migrateToVersion1).toHaveBeenCalledTimes(1);
expect(api.migrateToVersion1).toHaveBeenCalledWith(branch, metadata);
expect(api.retrieveMetadata).toHaveBeenCalledTimes(1);
expect(api.retrieveMetadata).toHaveBeenCalledWith('2019-11-11-post-title');
});
it('should not migrate to version 1 when version is 1', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
api.migrateToVersion1 = jest.fn();
const metadata = { type: 'PR', version: '1' };
api.retrieveMetadata = jest.fn().mockResolvedValue(metadata);
const branch = { ref: 'refs/heads/cms/posts/2019-11-11-post-title' };
await expect(api.migrateBranch(branch)).resolves.toBe(branch);
expect(api.migrateToVersion1).toHaveBeenCalledTimes(0);
expect(api.retrieveMetadata).toHaveBeenCalledTimes(1);
expect(api.retrieveMetadata).toHaveBeenCalledWith('posts/2019-11-11-post-title');
});
});
describe('migrateToVersion1', () => {
it('should migrate to version 1', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const newBranch = { ref: 'refs/heads/cms/posts/2019-11-11-post-title' };
api.createBranch = jest.fn().mockResolvedValue(newBranch);
const newPr = { number: 2, head: { sha: 'new_head' } };
api.createPR = jest.fn().mockResolvedValue(newPr);
api.storeMetadata = jest.fn();
api.closePR = jest.fn();
api.deleteBranch = jest.fn();
api.deleteMetadata = jest.fn();
const branch = { ref: 'refs/heads/cms/2019-11-11-post-title' };
const metadata = {
branch: 'cms/2019-11-11-post-title',
type: 'PR',
pr: { head: 'old_head' },
commitMessage: 'commitMessage',
collection: 'posts',
};
await expect(api.migrateToVersion1(branch, metadata)).resolves.toBe(newBranch);
expect(api.createBranch).toHaveBeenCalledTimes(1);
expect(api.createBranch).toHaveBeenCalledWith('cms/posts/2019-11-11-post-title', 'old_head');
expect(api.createPR).toHaveBeenCalledTimes(1);
expect(api.createPR).toHaveBeenCalledWith('commitMessage', 'cms/posts/2019-11-11-post-title');
expect(api.storeMetadata).toHaveBeenCalledTimes(1);
expect(api.storeMetadata).toHaveBeenCalledWith('posts/2019-11-11-post-title', {
type: 'PR',
pr: { head: 'new_head', number: 2 },
commitMessage: 'commitMessage',
collection: 'posts',
branch: 'cms/posts/2019-11-11-post-title',
version: '1',
});
expect(api.closePR).toHaveBeenCalledTimes(1);
expect(api.closePR).toHaveBeenCalledWith(metadata.pr);
expect(api.deleteBranch).toHaveBeenCalledTimes(1);
expect(api.deleteBranch).toHaveBeenCalledWith('cms/2019-11-11-post-title');
expect(api.deleteMetadata).toHaveBeenCalledTimes(1);
expect(api.deleteMetadata).toHaveBeenCalledWith('2019-11-11-post-title');
});
});
});

View File

@ -28,6 +28,7 @@ export const branch = gql`
}
id
name
prefix
repository {
...RepositoryParts
}

View File

@ -388,7 +388,7 @@ export default class GitHub {
branches.map(({ ref }) => {
promises.push(
new Promise(resolve => {
const contentKey = ref.split('refs/heads/cms/').pop();
const contentKey = this.api.contentKeyFromRef(ref);
const slug = contentKey.split('/').pop();
return sem.take(() =>
this.api

View File

@ -57,6 +57,9 @@ describe('editorialWorkflow actions', () => {
mediaLibrary: fromJS({
isLoading: false,
}),
editorialWorkflow: fromJS({
pages: { ids: [] },
}),
});
currentBackend.mockReturnValue(backend);

View File

@ -1,4 +1,5 @@
import uuid from 'uuid/v4';
import { get } from 'lodash';
import { actions as notifActions } from 'redux-notifications';
import { BEGIN, COMMIT, REVERT } from 'redux-optimist';
import { serializeValues } from 'Lib/serializeEntryValues';
@ -242,6 +243,12 @@ export function loadUnpublishedEntry(collection, slug) {
return async (dispatch, getState) => {
const state = getState();
const backend = currentBackend(state.config);
const entriesLoaded = get(state.editorialWorkflow.toJS(), 'pages.ids', false);
//run possible unpublishedEntries migration
if (!entriesLoaded) {
const response = await backend.unpublishedEntries(state.collections).catch(() => false);
response && dispatch(unpublishedEntriesLoaded(response.entries, response.pagination));
}
dispatch(unpublishedEntryLoading(collection, slug));
@ -294,8 +301,10 @@ export function loadUnpublishedEntry(collection, slug) {
export function loadUnpublishedEntries(collections) {
return (dispatch, getState) => {
const state = getState();
if (state.config.get('publish_mode') !== EDITORIAL_WORKFLOW) return;
const backend = currentBackend(state.config);
const entriesLoaded = get(state.editorialWorkflow.toJS(), 'pages.ids', false);
if (state.config.get('publish_mode') !== EDITORIAL_WORKFLOW || entriesLoaded) return;
dispatch(unpublishedEntriesLoading());
backend
.unpublishedEntries(collections)

View File

@ -14,6 +14,7 @@ import {
filterPromisesWith,
onlySuccessfulPromises,
resolvePromiseProperties,
flowAsync,
then,
} from './promise';
import unsentRequest from './unsentRequest';
@ -44,6 +45,7 @@ export const NetlifyCmsLibUtil = {
filterPromisesWith,
onlySuccessfulPromises,
resolvePromiseProperties,
flowAsync,
then,
unsentRequest,
filterByPropExtension,
@ -69,6 +71,7 @@ export {
filterPromisesWith,
onlySuccessfulPromises,
resolvePromiseProperties,
flowAsync,
then,
unsentRequest,
filterByPropExtension,

View File

@ -33,3 +33,6 @@ export const onlySuccessfulPromises = flow([
then(Promise.all.bind(Promise)),
then(filter(maybeValue => maybeValue !== filterPromiseSymbol)),
]);
const wrapFlowAsync = fn => async arg => fn(await arg);
export const flowAsync = fns => flow(fns.map(fn => wrapFlowAsync(fn)));