feat(core): align GitHub metadata handling with other backends (#3316)

* Revert "Revert "feat(core): Align GitHub metadata handling with other backends (#3292)""

This reverts commit 5bdd3df9ccbb5149c22d79987ebdcd6cab4b261f.

* fix(backend-github): fix migration code

* test(backend-github): fix test

* test(e2e): shorten wait time

* test(e2e): try and fix test on CI
This commit is contained in:
Erez Rokah 2020-02-24 23:44:10 +01:00 committed by GitHub
parent dcb0c9cfbe
commit 7e0a8ad532
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
95 changed files with 36118 additions and 36295 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,52 @@
import '../../utils/dismiss-local-backup';
import {
login,
createPostAndExit,
goToWorkflow,
goToCollections,
updateWorkflowStatus,
publishWorkflowEntry,
assertPublishedEntry,
} from '../../utils/steps';
import { workflowStatus } from '../../utils/constants';
const versions = ['2.9.7', '2.10.24'];
export default function({ entries, getUser }) {
versions.forEach(version => {
it(`migrate from ${version} to latest`, () => {
cy.task('switchToVersion', {
version,
});
cy.reload();
login(getUser());
createPostAndExit(entries[0]);
createPostAndExit(entries[1]);
createPostAndExit(entries[2]);
goToWorkflow();
updateWorkflowStatus(entries[2], workflowStatus.draft, workflowStatus.ready);
// eslint-disable-next-line cypress/no-unnecessary-waiting
cy.wait(1500); // older versions of the CMS didn't wait fully for the update to be resolved
updateWorkflowStatus(entries[1], workflowStatus.draft, workflowStatus.ready);
// eslint-disable-next-line cypress/no-unnecessary-waiting
cy.wait(1500); // older versions of the CMS didn't wait fully for the update to be resolved
updateWorkflowStatus(entries[0], workflowStatus.draft, workflowStatus.ready);
// eslint-disable-next-line cypress/no-unnecessary-waiting
cy.wait(1500); // older versions of the CMS didn't wait fully for the update to be resolved
cy.task('switchToVersion', {
version: 'latest',
});
cy.reload();
// allow migration code to run for 5 minutes
Cypress.config('defaultCommandTimeout', 5 * 60 * 1000);
publishWorkflowEntry(entries[2]);
publishWorkflowEntry(entries[1]);
publishWorkflowEntry(entries[0]);
goToCollections();
assertPublishedEntry([entries[2], entries[1], entries[0]]);
});
});
}

View File

@ -50,3 +50,10 @@ export const afterEach = (taskResult, backend) => {
Cypress.runner.stop();
}
};
export const seedRepo = (taskResult, backend) => {
cy.task('seedRepo', {
backend,
...taskResult.data,
});
};

View File

@ -0,0 +1,38 @@
import fixture from './common/editorial_workflow_migrations';
import * as specUtils from './common/spec_utils';
import { entry1, entry2, entry3 } from './common/entries';
const backend = 'github';
describe('Github Backend Editorial Workflow Migration - REST API', () => {
const taskResult = { data: {} };
before(() => {
specUtils.before(
taskResult,
{
backend: { use_graphql: false, open_authoring: false },
publish_mode: 'editorial_workflow',
},
backend,
);
});
after(() => {
specUtils.after(taskResult, backend);
});
beforeEach(() => {
specUtils.beforeEach(taskResult, backend);
specUtils.seedRepo(taskResult, backend);
});
afterEach(() => {
specUtils.afterEach(taskResult, backend);
});
fixture({
entries: [entry1, entry2, entry3],
getUser: () => taskResult.data.user,
});
});

View File

@ -1,4 +1,4 @@
const Octokit = require('@octokit/rest');
const { Octokit } = require('@octokit/rest');
const fs = require('fs-extra');
const path = require('path');
const {
@ -139,6 +139,14 @@ async function deleteRepositories({ owner, repo, tempDir }) {
.catch(errorHandler);
}
async function batchRequests(items, batchSize, func) {
while (items.length > 0) {
const batch = items.splice(0, batchSize);
await Promise.all(batch.map(func));
await new Promise(resolve => setTimeout(resolve, 2500));
}
}
async function resetOriginRepo({ owner, repo, tempDir }) {
console.log('Resetting origin repo:', `${owner}/${repo}`);
const { token } = getEnvs();
@ -151,30 +159,28 @@ async function resetOriginRepo({ owner, repo, tempDir }) {
});
const numbers = prs.map(pr => pr.number);
console.log('Closing prs:', numbers);
await Promise.all(
numbers.map(pull_number =>
client.pulls.update({
await batchRequests(numbers, 10, async pull_number => {
await client.pulls.update({
owner,
repo,
pull_number,
state: 'closed',
}),
),
);
});
});
const { data: branches } = await client.repos.listBranches({ owner, repo });
const refs = branches.filter(b => b.name !== 'master').map(b => `heads/${b.name}`);
console.log('Deleting refs', refs);
await Promise.all(
refs.map(ref =>
client.git.deleteRef({
await batchRequests(refs, 10, async ref => {
await client.git.deleteRef({
owner,
repo,
ref,
}),
),
);
});
});
console.log('Resetting master');
const git = getGitClient(tempDir);
@ -404,10 +410,69 @@ async function teardownGitHubTest(taskData, { transformRecordedData } = defaultO
return null;
}
async function seedGitHubRepo(taskData) {
if (process.env.RECORD_FIXTURES) {
const { owner, token } = getEnvs();
const client = getGitHubClient(token);
const repo = taskData.repo;
try {
console.log('Getting master branch');
const { data: master } = await client.repos.getBranch({
owner,
repo,
branch: 'master',
});
const prCount = 120;
const prs = new Array(prCount).fill(0).map((v, i) => i);
const batchSize = 5;
await batchRequests(prs, batchSize, async i => {
const branch = `seed_branch_${i}`;
console.log(`Creating branch ${branch}`);
await client.git.createRef({
owner,
repo,
ref: `refs/heads/${branch}`,
sha: master.commit.sha,
});
const path = `seed/file_${i}`;
console.log(`Creating file ${path}`);
await client.repos.createOrUpdateFile({
owner,
repo,
branch,
content: Buffer.from(`Seed File ${i}`).toString('base64'),
message: `Create seed file ${i}`,
path,
});
const title = `Non CMS Pull Request ${i}`;
console.log(`Creating PR ${title}`);
await client.pulls.create({
owner,
repo,
base: 'master',
head: branch,
title,
});
});
} catch (e) {
console.log(e);
throw e;
}
}
return null;
}
module.exports = {
transformRecordedData,
setupGitHub,
teardownGitHub,
setupGitHubTest,
teardownGitHubTest,
seedGitHubRepo,
};

View File

@ -13,7 +13,13 @@
require('dotenv').config();
const { addMatchImageSnapshotPlugin } = require('cypress-image-snapshot/plugin');
const { setupGitHub, teardownGitHub, setupGitHubTest, teardownGitHubTest } = require('./github');
const {
setupGitHub,
teardownGitHub,
setupGitHubTest,
teardownGitHubTest,
seedGitHubRepo,
} = require('./github');
const {
setupGitGateway,
teardownGitGateway,
@ -29,7 +35,7 @@ const {
} = require('./bitbucket');
const { setupProxy, teardownProxy, setupProxyTest, teardownProxyTest } = require('./proxy');
const { copyBackendFiles } = require('../utils/config');
const { copyBackendFiles, switchVersion } = require('../utils/config');
module.exports = async (on, config) => {
// `on` is used to hook into various events Cypress emits
@ -133,6 +139,28 @@ module.exports = async (on, config) => {
break;
}
return null;
},
async seedRepo(taskData) {
const { backend } = taskData;
console.log(`Seeding repository for backend`, backend);
switch (backend) {
case 'github':
await seedGitHubRepo(taskData);
break;
}
return null;
},
async switchToVersion(taskData) {
const { version } = taskData;
console.log(`Switching CMS to version '${version}'`);
await switchVersion(version);
return null;
},
});

View File

@ -24,4 +24,19 @@ async function updateConfig(configModifier) {
await fs.writeFileSync(configFile, yaml.safeDump(config));
}
module.exports = { copyBackendFiles, updateConfig };
async function switchVersion(version) {
const htmlFile = path.join(devTestDirectory, 'index.html');
const content = await fs.readFile(htmlFile);
const replaceString =
version === 'latest'
? '<script src="dist/netlify-cms.js"></script>'
: `<script src="https://unpkg.com/netlify-cms@${version}/dist/netlify-cms.js"></script>`;
await fs.writeFile(
htmlFile,
content.toString().replace(/<script src=".+?netlify-cms.+?"><\/script>/, replaceString),
);
}
module.exports = { copyBackendFiles, updateConfig, switchVersion };

View File

@ -200,7 +200,7 @@ function flushClockAndSave() {
function populateEntry(entry, onDone = flushClockAndSave) {
const keys = Object.keys(entry);
for (let key of keys) {
for (const key of keys) {
const value = entry[key];
if (key === 'body') {
cy.getMarkdownEditor()

View File

@ -23,6 +23,7 @@ import {
PreviewState,
FetchError,
parseContentKey,
branchFromContentKey,
} from 'netlify-cms-lib-util';
import { oneLine } from 'common-tags';
import { parse } from 'what-the-diff';
@ -449,8 +450,8 @@ export default class API {
}
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
const contentKey = this.generateContentKey(options.collectionName as string, entry.slug);
const branch = this.branchFromContentKey(contentKey);
const contentKey = generateContentKey(options.collectionName as string, entry.slug);
const branch = branchFromContentKey(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
const defaultBranchSha = await this.branchCommitSha(this.branch);
@ -497,18 +498,6 @@ export default class API {
);
};
generateContentKey(collectionName: string, slug: string) {
return generateContentKey(collectionName, slug);
}
contentKeyFromBranch(branch: string) {
return branch.substring(`${CMS_BRANCH_PREFIX}/`.length);
}
branchFromContentKey(contentKey: string) {
return `${CMS_BRANCH_PREFIX}/${contentKey}`;
}
async isFileExists(path: string, branch: string) {
const fileExists = await this.readFile(path, null, { branch })
.then(() => true)
@ -559,7 +548,7 @@ export default class API {
async retrieveMetadata(contentKey: string) {
const { collection, slug } = parseContentKey(contentKey);
const branch = this.branchFromContentKey(contentKey);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const diff = await this.getDifferences(branch);
const { newPath: path, newFile } = diff.find(d => !d.binary) as {
@ -609,8 +598,8 @@ export default class API {
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
const contentKey = this.generateContentKey(collection, slug);
const branch = this.branchFromContentKey(contentKey);
const contentKey = generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.addPullRequestComment(pullRequest, statusToLabel(newStatus));
@ -632,8 +621,8 @@ export default class API {
}
async publishUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.mergePullRequest(pullRequest);
@ -654,8 +643,8 @@ export default class API {
}
async deleteUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.declinePullRequest(pullRequest);
@ -674,8 +663,8 @@ export default class API {
}
async getStatuses(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const statuses = await this.getPullRequestStatuses(pullRequest);

View File

@ -34,6 +34,8 @@ import {
getLargeMediaFilteredMediaFiles,
FetchError,
blobToFileObj,
contentKeyFromBranch,
generateContentKey,
} from 'netlify-cms-lib-util';
import NetlifyAuthenticator from 'netlify-cms-lib-auth';
import AuthenticationPage from './AuthenticationPage';
@ -445,7 +447,7 @@ export default class BitbucketBackend implements Implementation {
async unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(branch => this.api!.contentKeyFromBranch(branch)),
branches.map(branch => contentKeyFromBranch(branch)),
);
const readUnpublishedBranchFile = (contentKey: string) =>
@ -462,7 +464,7 @@ export default class BitbucketBackend implements Implementation {
this.loadEntryMediaFiles(branch, files),
} = {},
) {
const contentKey = this.api!.generateContentKey(collection, slug);
const contentKey = generateContentKey(collection, slug);
const data = await this.api!.readUnpublishedBranchFile(contentKey);
const mediaFiles = await loadEntryMediaFiles(
data.metaData.branch,

View File

@ -71,6 +71,20 @@ export default class API extends GithubAPI {
return Promise.resolve({ login: '', ...this.commitAuthor });
}
async getHeadReference(head: string) {
if (!this.repoOwner) {
// get the repo owner from the branch url
// this is required for returning the full head reference, e.g. owner:head
// when filtering pull requests based on the head
const branch = await this.getDefaultBranch();
const self = branch._links.self;
const regex = new RegExp('https?://.+?/repos/(.+?)/');
const owner = self.match(regex);
this.repoOwner = owner ? owner[1] : '';
}
return super.getHeadReference(head);
}
commit(message: string, changeTree: { parentSha?: string; sha: string }) {
const commitParams: {
message: string;

File diff suppressed because it is too large Load Diff

View File

@ -9,17 +9,19 @@ import { createHttpLink } from 'apollo-link-http';
import { setContext } from 'apollo-link-context';
import {
APIError,
EditorialWorkflowError,
readFile,
localForage,
DEFAULT_PR_BODY,
branchFromContentKey,
CMS_BRANCH_PREFIX,
} from 'netlify-cms-lib-util';
import { trim } from 'lodash';
import introspectionQueryResultData from './fragmentTypes';
import API, { Config, BlobArgs, PR, API_NAME } from './API';
import API, { Config, BlobArgs, API_NAME, PullRequestState, MOCK_PULL_REQUEST } from './API';
import * as queries from './queries';
import * as mutations from './mutations';
import { GraphQLError } from 'graphql';
import { Octokit } from '@octokit/rest';
const NO_CACHE = 'no-cache';
const CACHE_FIRST = 'cache-first';
@ -48,25 +50,37 @@ interface TreeFile {
name: string;
}
type GraphQLPullRequest = {
id: string;
baseRefName: string;
baseRefOid: string;
body: string;
headRefName: string;
headRefOid: string;
number: number;
state: string;
title: string;
mergedAt: string | null;
labels: { nodes: { name: string }[] };
};
const transformPullRequest = (pr: GraphQLPullRequest) => {
return {
...pr,
labels: pr.labels.nodes,
head: { ref: pr.headRefName, sha: pr.headRefOid },
base: { ref: pr.baseRefName, sha: pr.baseRefOid },
};
};
type Error = GraphQLError & { type: string };
export default class GraphQLAPI extends API {
repoOwner: string;
repoName: string;
originRepoOwner: string;
originRepoName: string;
client: ApolloClient<NormalizedCacheObject>;
constructor(config: Config) {
super(config);
const [repoParts, originRepoParts] = [this.repo.split('/'), this.originRepo.split('/')];
this.repoOwner = repoParts[0];
this.repoName = repoParts[1];
this.originRepoOwner = originRepoParts[0];
this.originRepoName = originRepoParts[1];
this.client = this.getApolloClient();
}
@ -214,7 +228,64 @@ export default class GraphQLAPI extends API {
}
}
async getStatuses(sha: string) {
async getPullRequests(
head: string | undefined,
state: PullRequestState,
predicate: (pr: Octokit.PullsListResponseItem) => boolean,
) {
const { originRepoOwner: owner, originRepoName: name } = this;
let states;
if (state === PullRequestState.Open) {
states = ['OPEN'];
} else if (state === PullRequestState.Closed) {
states = ['CLOSED', 'MERGED'];
} else {
states = ['OPEN', 'CLOSED', 'MERGED'];
}
const { data } = await this.query({
query: queries.pullRequests,
variables: {
owner,
name,
...(head ? { head } : {}),
states,
},
});
const {
pullRequests,
}: {
pullRequests: {
nodes: GraphQLPullRequest[];
};
} = data.repository;
const mapped = pullRequests.nodes.map(transformPullRequest);
return ((mapped as unknown) as Octokit.PullsListResponseItem[]).filter(
pr => pr.head.ref.startsWith(`${CMS_BRANCH_PREFIX}/`) && predicate(pr),
);
}
async getCmsBranches() {
const { repoOwner: owner, repoName: name } = this;
const { data } = await this.query({
query: queries.cmsBranches,
variables: {
owner,
name,
},
});
return data.repository.refs.nodes.map(({ name, prefix }: { name: string; prefix: string }) => ({
ref: `${prefix}${name}`,
}));
}
async getStatuses(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const sha = pullRequest.head.sha;
const { originRepoOwner: owner, originRepoName: name } = this;
const { data } = await this.query({ query: queries.statues, variables: { owner, name, sha } });
if (data.repository.object) {
@ -265,76 +336,6 @@ export default class GraphQLAPI extends API {
}
}
async listUnpublishedBranches() {
if (this.useOpenAuthoring) {
return super.listUnpublishedBranches();
}
console.log(
'%c Checking for Unpublished entries',
'line-height: 30px;text-align: center;font-weight: bold',
);
const { repoOwner: owner, repoName: name } = this;
const { data } = await this.query({
query: queries.unpublishedPrBranches,
variables: { owner, name },
});
const { nodes } = data.repository.refs as {
nodes: {
associatedPullRequests: { nodes: { headRef: { prefix: string; name: string } }[] };
}[];
};
if (nodes.length > 0) {
const branches = [] as { ref: string }[];
nodes.forEach(({ associatedPullRequests }) => {
associatedPullRequests.nodes.forEach(({ headRef }) => {
branches.push({ ref: `${headRef.prefix}${headRef.name}` });
});
});
return await Promise.all(branches.map(branch => this.migrateBranch(branch)));
} else {
console.log(
'%c No Unpublished entries',
'line-height: 30px;text-align: center;font-weight: bold',
);
throw new APIError('Not Found', 404, 'GitHub');
}
}
async readUnpublishedBranchFile(contentKey: string) {
// retrieveMetadata(contentKey) rejects in case of no metadata
const metaData = await this.retrieveMetadata(contentKey).catch(() => null);
if (metaData && metaData.objects && metaData.objects.entry && metaData.objects.entry.path) {
const { path } = metaData.objects.entry;
const { repoOwner: headOwner, repoName: headRepoName } = this;
const { originRepoOwner: baseOwner, originRepoName: baseRepoName } = this;
const { data } = await this.query({
query: queries.unpublishedBranchFile,
variables: {
headOwner,
headRepoName,
headExpression: `${metaData.branch}:${path}`,
baseOwner,
baseRepoName,
baseExpression: `${this.branch}:${path}`,
},
});
if (!data.head.object) {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
}
const result = {
metaData,
fileData: data.head.object.text,
isModification: !!data.base.object,
slug: this.slugFromContentKey(contentKey, metaData.collection),
};
return result;
} else {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
}
}
getBranchQualifiedName(branch: string) {
return `refs/heads/${branch}`;
}
@ -414,7 +415,10 @@ export default class GraphQLAPI extends API {
// https://developer.github.com/v4/enum/pullrequeststate/
// GraphQL state: [CLOSED, MERGED, OPEN]
// REST API state: [closed, open]
const state = data.repository.pullRequest.state === 'OPEN' ? 'open' : 'closed';
const state =
data.repository.pullRequest.state === 'OPEN'
? PullRequestState.Open
: PullRequestState.Closed;
return {
...data.repository.pullRequest,
state,
@ -424,7 +428,6 @@ export default class GraphQLAPI extends API {
getPullRequestAndBranchQuery(branch: string, number: number) {
const { repoOwner: owner, repoName: name } = this;
const { originRepoOwner, originRepoName } = this;
return {
query: queries.pullRequestAndBranch,
variables: {
@ -448,7 +451,7 @@ export default class GraphQLAPI extends API {
return { branch: repository.branch, pullRequest: origin.pullRequest };
}
async openPR({ number }: PR) {
async openPR(number: number) {
const pullRequest = await this.getPullRequest(number);
const { data } = await this.mutate({
@ -467,10 +470,10 @@ export default class GraphQLAPI extends API {
},
});
return data!.closePullRequest;
return data!.reopenPullRequest;
}
async closePR({ number }: PR) {
async closePR(number: number) {
const pullRequest = await this.getPullRequest(number);
const { data } = await this.mutate({
@ -495,13 +498,12 @@ export default class GraphQLAPI extends API {
async deleteUnpublishedEntry(collectionName: string, slug: string) {
try {
const contentKey = this.generateContentKey(collectionName, slug);
const branchName = this.generateBranchName(contentKey);
const branchName = branchFromContentKey(contentKey);
const metadata = await this.retrieveMetadata(contentKey);
if (metadata && metadata.pr) {
if (metadata.pullRequest.number !== MOCK_PULL_REQUEST) {
const { branch, pullRequest } = await this.getPullRequestAndBranch(
branchName,
metadata.pr.number,
metadata.pullRequest.number,
);
const { data } = await this.mutate({
@ -631,7 +633,7 @@ export default class GraphQLAPI extends API {
},
});
const { pullRequest } = data!.createPullRequest;
return { ...pullRequest, head: { sha: pullRequest.headRefOid } };
return (transformPullRequest(pullRequest) as unknown) as Octokit.PullsCreateResponse;
}
async getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {

View File

@ -21,28 +21,35 @@ describe('github API', () => {
describe('editorialWorkflowGit', () => {
it('should create PR with correct base branch name when publishing with editorial workflow', () => {
let prBaseBranch = null;
const api = new API({ branch: 'gh-pages', repo: 'my-repo' });
let labels = null;
const api = new API({
branch: 'gh-pages',
repo: 'owner/my-repo',
initialWorkflowStatus: 'draft',
});
const responses = {
'/repos/my-repo/branches/gh-pages': () => ({ commit: { sha: 'def' } }),
'/repos/my-repo/git/trees/def': () => ({ tree: [] }),
'/repos/my-repo/git/trees': () => ({}),
'/repos/my-repo/git/commits': () => ({}),
'/repos/my-repo/git/refs': () => ({}),
'/repos/my-repo/pulls': pullRequest => {
prBaseBranch = JSON.parse(pullRequest.body).base;
return { head: { sha: 'cbd' } };
'/repos/owner/my-repo/branches/gh-pages': () => ({ commit: { sha: 'def' } }),
'/repos/owner/my-repo/git/trees/def': () => ({ tree: [] }),
'/repos/owner/my-repo/git/trees': () => ({}),
'/repos/owner/my-repo/git/commits': () => ({}),
'/repos/owner/my-repo/git/refs': () => ({}),
'/repos/owner/my-repo/pulls': req => {
prBaseBranch = JSON.parse(req.body).base;
return { head: { sha: 'cbd' }, labels: [], number: 1 };
},
'/repos/owner/my-repo/issues/1/labels': req => {
labels = JSON.parse(req.body).labels;
return {};
},
'/user': () => ({}),
'/repos/my-repo/git/blobs': () => ({}),
'/repos/my-repo/git/refs/meta/_netlify_cms': () => ({ object: {} }),
};
mockAPI(api, responses);
return expect(
api
.editorialWorkflowGit([], { slug: 'entry', sha: 'abc' }, null, {})
.then(() => prBaseBranch),
).resolves.toEqual('gh-pages');
api.editorialWorkflowGit([], { slug: 'entry', sha: 'abc' }, null, {}).then(() => ({
prBaseBranch,
labels,
})),
).resolves.toEqual({ prBaseBranch: 'gh-pages', labels: ['netlify-cms/draft'] });
});
});
@ -291,39 +298,64 @@ describe('github API', () => {
});
});
describe('migrateBranch', () => {
it('should migrate to version 1 when no version', async () => {
describe('migratePullRequest', () => {
it('should migrate to pull request labels when no version', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const newBranch = { ref: 'refs/heads/cms/posts/2019-11-11-post-title' };
api.migrateToVersion1 = jest.fn().mockResolvedValue(newBranch);
const pr = {
head: { ref: 'cms/2019-11-11-post-title' },
title: 'pr title',
number: 1,
labels: [],
};
const metadata = { type: 'PR' };
api.retrieveMetadata = jest.fn().mockResolvedValue(metadata);
api.retrieveMetadataOld = jest.fn().mockResolvedValue(metadata);
const newBranch = 'cms/posts/2019-11-11-post-title';
const migrateToVersion1Result = {
metadata: { ...metadata, branch: newBranch, version: '1' },
pullRequest: { ...pr, number: 2 },
};
api.migrateToVersion1 = jest.fn().mockResolvedValue(migrateToVersion1Result);
api.migrateToPullRequestLabels = jest.fn();
const branch = { ref: 'refs/heads/cms/2019-11-11-post-title' };
await expect(api.migrateBranch(branch)).resolves.toBe(newBranch);
await api.migratePullRequest(pr);
expect(api.migrateToVersion1).toHaveBeenCalledTimes(1);
expect(api.migrateToVersion1).toHaveBeenCalledWith(branch, metadata);
expect(api.migrateToVersion1).toHaveBeenCalledWith(pr, metadata);
expect(api.retrieveMetadata).toHaveBeenCalledTimes(1);
expect(api.retrieveMetadata).toHaveBeenCalledWith('2019-11-11-post-title');
expect(api.migrateToPullRequestLabels).toHaveBeenCalledTimes(1);
expect(api.migrateToPullRequestLabels).toHaveBeenCalledWith(
migrateToVersion1Result.pullRequest,
migrateToVersion1Result.metadata,
);
expect(api.retrieveMetadataOld).toHaveBeenCalledTimes(1);
expect(api.retrieveMetadataOld).toHaveBeenCalledWith('2019-11-11-post-title');
});
it('should not migrate to version 1 when version is 1', async () => {
it('should migrate to pull request labels when version is 1', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
api.migrateToVersion1 = jest.fn();
const pr = {
head: { ref: 'cms/posts/2019-11-11-post-title' },
title: 'pr title',
number: 1,
labels: [],
};
const metadata = { type: 'PR', version: '1' };
api.retrieveMetadata = jest.fn().mockResolvedValue(metadata);
api.retrieveMetadataOld = jest.fn().mockResolvedValue(metadata);
api.migrateToPullRequestLabels = jest.fn().mockResolvedValue(pr, metadata);
const branch = { ref: 'refs/heads/cms/posts/2019-11-11-post-title' };
await expect(api.migrateBranch(branch)).resolves.toBe(branch);
await api.migratePullRequest(pr);
expect(api.migrateToVersion1).toHaveBeenCalledTimes(0);
expect(api.retrieveMetadata).toHaveBeenCalledTimes(1);
expect(api.retrieveMetadata).toHaveBeenCalledWith('posts/2019-11-11-post-title');
expect(api.migrateToPullRequestLabels).toHaveBeenCalledTimes(1);
expect(api.migrateToPullRequestLabels).toHaveBeenCalledWith(pr, metadata);
expect(api.retrieveMetadataOld).toHaveBeenCalledTimes(1);
expect(api.retrieveMetadataOld).toHaveBeenCalledWith('posts/2019-11-11-post-title');
});
});
@ -331,10 +363,17 @@ describe('github API', () => {
it('should migrate to version 1', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const pr = {
head: { ref: 'cms/2019-11-11-post-title', sha: 'pr_head' },
title: 'pr title',
number: 1,
labels: [],
};
const newBranch = { ref: 'refs/heads/cms/posts/2019-11-11-post-title' };
api.createBranch = jest.fn().mockResolvedValue(newBranch);
const newPr = { number: 2, head: { sha: 'new_head' } };
const newPr = { ...pr, number: 2 };
api.createPR = jest.fn().mockResolvedValue(newPr);
api.storeMetadata = jest.fn();
@ -342,35 +381,42 @@ describe('github API', () => {
api.deleteBranch = jest.fn();
api.deleteMetadata = jest.fn();
const branch = { ref: 'refs/heads/cms/2019-11-11-post-title' };
const branch = 'cms/2019-11-11-post-title';
const metadata = {
branch: 'cms/2019-11-11-post-title',
branch,
type: 'PR',
pr: { head: 'old_head' },
pr: { head: pr.head.sha },
commitMessage: 'commitMessage',
collection: 'posts',
};
await expect(api.migrateToVersion1(branch, metadata)).resolves.toBe(newBranch);
expect(api.createBranch).toHaveBeenCalledTimes(1);
expect(api.createBranch).toHaveBeenCalledWith('cms/posts/2019-11-11-post-title', 'old_head');
expect(api.createPR).toHaveBeenCalledTimes(1);
expect(api.createPR).toHaveBeenCalledWith('commitMessage', 'cms/posts/2019-11-11-post-title');
expect(api.storeMetadata).toHaveBeenCalledTimes(1);
expect(api.storeMetadata).toHaveBeenCalledWith('posts/2019-11-11-post-title', {
const expectedMetadata = {
type: 'PR',
pr: { head: 'new_head', number: 2 },
pr: { head: newPr.head.sha, number: 2 },
commitMessage: 'commitMessage',
collection: 'posts',
branch: 'cms/posts/2019-11-11-post-title',
version: '1',
};
await expect(api.migrateToVersion1(pr, metadata)).resolves.toEqual({
metadata: expectedMetadata,
pullRequest: newPr,
});
expect(api.createBranch).toHaveBeenCalledTimes(1);
expect(api.createBranch).toHaveBeenCalledWith('cms/posts/2019-11-11-post-title', 'pr_head');
expect(api.createPR).toHaveBeenCalledTimes(1);
expect(api.createPR).toHaveBeenCalledWith('pr title', 'cms/posts/2019-11-11-post-title');
expect(api.storeMetadata).toHaveBeenCalledTimes(1);
expect(api.storeMetadata).toHaveBeenCalledWith(
'posts/2019-11-11-post-title',
expectedMetadata,
);
expect(api.closePR).toHaveBeenCalledTimes(1);
expect(api.closePR).toHaveBeenCalledWith(metadata.pr);
expect(api.closePR).toHaveBeenCalledWith(pr.number);
expect(api.deleteBranch).toHaveBeenCalledTimes(1);
expect(api.deleteBranch).toHaveBeenCalledWith('cms/2019-11-11-post-title');
@ -380,11 +426,45 @@ describe('github API', () => {
});
});
describe('migrateToPullRequestLabels', () => {
it('should migrate to pull request labels', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const pr = {
head: { ref: 'cms/posts/2019-11-11-post-title', sha: 'pr_head' },
title: 'pr title',
number: 1,
labels: [],
};
api.setPullRequestStatus = jest.fn();
api.deleteMetadata = jest.fn();
const metadata = {
branch: pr.head.ref,
type: 'PR',
pr: { head: pr.head.sha },
commitMessage: 'commitMessage',
collection: 'posts',
status: 'pending_review',
};
await api.migrateToPullRequestLabels(pr, metadata);
expect(api.setPullRequestStatus).toHaveBeenCalledTimes(1);
expect(api.setPullRequestStatus).toHaveBeenCalledWith(pr, 'pending_review');
expect(api.deleteMetadata).toHaveBeenCalledTimes(1);
expect(api.deleteMetadata).toHaveBeenCalledWith('posts/2019-11-11-post-title');
});
});
describe('rebaseSingleCommit', () => {
it('should create updated tree and commit', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
api.getCommitsDiff = jest.fn().mockResolvedValueOnce([
api.getDifferences = jest.fn().mockResolvedValueOnce({
files: [
{ filename: 'removed.md', status: 'removed', sha: 'removed_sha' },
{
filename: 'renamed.md',
@ -393,7 +473,8 @@ describe('github API', () => {
sha: 'renamed_sha',
},
{ filename: 'added.md', status: 'added', sha: 'added_sha' },
]);
],
});
const newTree = { sha: 'new_tree_sha' };
api.updateTree = jest.fn().mockResolvedValueOnce(newTree);
@ -414,8 +495,8 @@ describe('github API', () => {
await expect(api.rebaseSingleCommit(baseCommit, commit)).resolves.toBe(newCommit);
expect(api.getCommitsDiff).toHaveBeenCalledTimes(1);
expect(api.getCommitsDiff).toHaveBeenCalledWith('parent_sha', 'sha');
expect(api.getDifferences).toHaveBeenCalledTimes(1);
expect(api.getDifferences).toHaveBeenCalledWith('parent_sha', 'sha', '/repos/owner/repo');
expect(api.updateTree).toHaveBeenCalledTimes(1);
expect(api.updateTree).toHaveBeenCalledWith('base_commit_sha', [
@ -528,13 +609,18 @@ describe('github API', () => {
];
api.request = jest.fn(() => Promise.resolve({ statuses }));
const sha = 'sha';
await expect(api.getStatuses(sha)).resolves.toEqual([
api.getBranchPullRequest = jest.fn(() => Promise.resolve({ head: { sha } }));
const collection = 'collection';
const slug = 'slug';
await expect(api.getStatuses(collection, slug)).resolves.toEqual([
{ context: 'deploy', state: 'success', target_url: 'deploy-url' },
{ context: 'build', state: 'other' },
]);
expect(api.getBranchPullRequest).toHaveBeenCalledTimes(1);
expect(api.getBranchPullRequest).toHaveBeenCalledWith('cms/collection/slug');
expect(api.request).toHaveBeenCalledTimes(1);
expect(api.request).toHaveBeenCalledWith(`/repos/repo/commits/${sha}/status`);
});

View File

@ -9,7 +9,7 @@ describe('github GraphQL API', () => {
describe('editorialWorkflowGit', () => {
it('should should flatten nested tree into a list of files', () => {
const api = new GraphQLAPI({ branch: 'gh-pages', repo: 'my-repo' });
const api = new GraphQLAPI({ branch: 'gh-pages', repo: 'owner/my-repo' });
const entries = [
{
name: 'post-1.md',

View File

@ -185,7 +185,9 @@ describe('github backend implementation', () => {
isModification: true,
metaData: {
branch: 'branch',
objects: { entry: { path: 'entry-path' }, files: [{ path: 'image.png', sha: 'sha' }] },
objects: {
entry: { path: 'entry-path', mediaFiles: [{ path: 'image.png', id: 'sha' }] },
},
},
};
readUnpublishedBranchFile.mockResolvedValue(data);

View File

@ -41,6 +41,7 @@ export const pullRequest = gql`
fragment PullRequestParts on PullRequest {
id
baseRefName
baseRefOid
body
headRefName
headRefOid
@ -51,6 +52,11 @@ export const pullRequest = gql`
repository {
...RepositoryParts
}
labels(last: 100) {
nodes {
name
}
}
}
${repository}
`;

View File

@ -25,6 +25,7 @@ import {
UnpublishedEntryMediaFile,
runWithLock,
blobToFileObj,
contentKeyFromBranch,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import { Octokit } from '@octokit/rest';
@ -299,7 +300,7 @@ export default class GitHub implements Implementation {
const repoURL = this.useOpenAuthoring ? this.api!.originRepoURL : this.api!.repoURL;
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL }) as Promise<string>;
this.api!.readFile(path, id, { repoURL }).catch(() => '') as Promise<string>;
return entriesByFiles(files, readFile, 'GitHub');
}
@ -307,10 +308,12 @@ export default class GitHub implements Implementation {
// Fetches a single entry.
getEntry(path: string) {
const repoURL = this.api!.originRepoURL;
return this.api!.readFile(path, null, { repoURL }).then(data => ({
return this.api!.readFile(path, null, { repoURL })
.then(data => ({
file: { path, id: null },
data: data as string,
}));
}))
.catch(() => ({ file: { path, id: null }, data: '' }));
}
getMedia(mediaFolder = this.mediaFolder) {
@ -412,7 +415,7 @@ export default class GitHub implements Implementation {
unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(({ ref }) => this.api!.contentKeyFromRef(ref)),
branches.map(branch => contentKeyFromBranch(branch)),
);
const readUnpublishedBranchFile = (contentKey: string) =>
@ -431,10 +434,10 @@ export default class GitHub implements Implementation {
) {
const contentKey = this.api!.generateContentKey(collection, slug);
const data = await this.api!.readUnpublishedBranchFile(contentKey);
const files = data.metaData.objects.files || [];
const files = data.metaData.objects.entry.mediaFiles || [];
const mediaFiles = await loadEntryMediaFiles(
data.metaData.branch,
files.map(({ sha: id, path }) => ({ id, path })),
files.map(({ id, path }) => ({ id, path })),
);
return {
slug,
@ -446,22 +449,9 @@ export default class GitHub implements Implementation {
};
}
/**
* Uses GitHub's Statuses API to retrieve statuses, infers which is for a
* deploy preview via `getPreviewStatus`. Returns the url provided by the
* status, as well as the status state, which should be one of 'success',
* 'pending', and 'failure'.
*/
async getDeployPreview(collectionName: string, slug: string) {
const contentKey = this.api!.generateContentKey(collectionName, slug);
const data = await this.api!.retrieveMetadata(contentKey);
if (!data || !data.pr) {
return null;
}
const headSHA = typeof data.pr.head === 'string' ? data.pr.head : data.pr.head.sha;
const statuses = await this.api!.getStatuses(headSHA);
async getDeployPreview(collection: string, slug: string) {
try {
const statuses = await this.api!.getStatuses(collection, slug);
const deployStatus = getPreviewStatus(statuses, this.previewContext);
if (deployStatus) {
@ -470,6 +460,9 @@ export default class GitHub implements Implementation {
} else {
return null;
}
} catch (e) {
return null;
}
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {

View File

@ -38,37 +38,6 @@ export const blob = gql`
${fragments.blobWithText}
`;
export const unpublishedBranchFile = gql`
query unpublishedBranchFile(
$headOwner: String!
$headRepoName: String!
$headExpression: String!
$baseOwner: String!
$baseRepoName: String!
$baseExpression: String!
) {
head: repository(owner: $headOwner, name: $headRepoName) {
...RepositoryParts
object(expression: $headExpression) {
... on Blob {
...BlobWithTextParts
}
}
}
base: repository(owner: $baseOwner, name: $baseRepoName) {
...RepositoryParts
object(expression: $baseExpression) {
... on Blob {
id
oid
}
}
}
}
${fragments.repository}
${fragments.blobWithText}
`;
export const statues = gql`
query statues($owner: String!, $name: String!, $sha: GitObjectID!) {
repository(owner: $owner, name: $name) {
@ -140,30 +109,6 @@ export const files = (depth: number) => gql`
${fragments.fileEntry}
`;
export const unpublishedPrBranches = gql`
query unpublishedPrBranches($owner: String!, $name: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
refs(refPrefix: "refs/heads/cms/", last: 50) {
nodes {
id
associatedPullRequests(last: 50, states: OPEN) {
nodes {
id
headRef {
id
name
prefix
}
}
}
}
}
}
}
${fragments.repository}
`;
const branchQueryPart = `
branch: ref(qualifiedName: $qualifiedName) {
...BranchParts
@ -181,6 +126,21 @@ export const branch = gql`
${fragments.branch}
`;
export const cmsBranches = gql`
query cmsBranches($owner: String!, $name: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
refs(refPrefix: "refs/heads/cms/", last: 100) {
nodes {
...BranchParts
}
}
}
}
${fragments.repository}
${fragments.branch}
`;
export const repository = gql`
query repository($owner: String!, $name: String!) {
repository(owner: $owner, name: $name) {
@ -206,13 +166,27 @@ export const pullRequest = gql`
${fragments.pullRequest}
`;
export const pullRequests = gql`
query pullRequests($owner: String!, $name: String!, $head: String, $states: [PullRequestState!]) {
repository(owner: $owner, name: $name) {
id
pullRequests(last: 100, headRefName: $head, states: $states) {
nodes {
...PullRequestParts
}
}
}
}
${fragments.pullRequest}
`;
export const pullRequestAndBranch = gql`
query pullRequestAndBranch($owner: String!, $name: String!, $origin_owner: String!, $origin_name: String!, $qualifiedName: String!, $number: Int!) {
query pullRequestAndBranch($owner: String!, $name: String!, $originRepoOwner: String!, $originRepoName: String!, $qualifiedName: String!, $number: Int!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
${branchQueryPart}
}
origin: repository(owner: $origin_owner, name: $origin_name) {
origin: repository(owner: $originRepoOwner, name: $originRepoName) {
...RepositoryParts
${pullRequestQueryPart}
}
@ -222,47 +196,6 @@ export const pullRequestAndBranch = gql`
${fragments.pullRequest}
`;
export const commitTree = gql`
query commitTree($owner: String!, $name: String!, $sha: GitObjectID!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
commit: object(oid: $sha) {
...ObjectParts
... on Commit {
tree {
...ObjectParts
entries {
...TreeEntryParts
}
}
}
}
}
}
${fragments.repository}
${fragments.object}
${fragments.treeEntry}
`;
export const tree = gql`
query tree($owner: String!, $name: String!, $sha: GitObjectID!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
tree: object(oid: $sha) {
...ObjectParts
... on Tree {
entries {
...TreeEntryParts
}
}
}
}
}
${fragments.repository}
${fragments.object}
${fragments.treeEntry}
`;
export const fileSha = gql`
query fileSha($owner: String!, $name: String!, $expression: String!) {
repository(owner: $owner, name: $name) {

View File

@ -21,6 +21,7 @@ import {
responseParser,
PreviewState,
parseContentKey,
branchFromContentKey,
} from 'netlify-cms-lib-util';
import { Base64 } from 'js-base64';
import { Map, Set } from 'immutable';
@ -457,18 +458,6 @@ export default class API {
])(`${this.repoURL}/repository/files/${encodeURIComponent(path)}`);
};
generateContentKey(collectionName: string, slug: string) {
return generateContentKey(collectionName, slug);
}
contentKeyFromBranch(branch: string) {
return branch.substring(`${CMS_BRANCH_PREFIX}/`.length);
}
branchFromContentKey(contentKey: string) {
return `${CMS_BRANCH_PREFIX}/${contentKey}`;
}
async getMergeRequests(sourceBranch?: string) {
const mergeRequests: GitLabMergeRequest[] = await this.requestJSON({
url: `${this.repoURL}/merge_requests`,
@ -555,7 +544,7 @@ export default class API {
async retrieveMetadata(contentKey: string) {
const { collection, slug } = parseContentKey(contentKey);
const branch = this.branchFromContentKey(contentKey);
const branch = branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
const diff = await this.getDifferences(mergeRequest.sha);
const { old_path: path, new_file: newFile } = diff.find(d => !d.binary) as {
@ -646,8 +635,8 @@ export default class API {
}
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
const contentKey = this.generateContentKey(options.collectionName as string, entry.slug);
const branch = this.branchFromContentKey(contentKey);
const contentKey = generateContentKey(options.collectionName as string, entry.slug);
const branch = branchFromContentKey(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
const items = await this.getCommitItems(files, this.branch);
@ -694,8 +683,8 @@ export default class API {
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
const contentKey = this.generateContentKey(collection, slug);
const branch = this.branchFromContentKey(contentKey);
const contentKey = generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
const labels = [
@ -722,8 +711,8 @@ export default class API {
}
async publishUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
await this.mergeMergeRequest(mergeRequest);
}
@ -747,8 +736,8 @@ export default class API {
}
async deleteUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
await this.closeMergeRequest(mergeRequest);
await this.deleteBranch(branch);
@ -765,8 +754,8 @@ export default class API {
}
async getStatuses(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
const statuses: GitLabCommitStatus[] = await this.getMergeRequestStatues(mergeRequest, branch);
// eslint-disable-next-line @typescript-eslint/camelcase

View File

@ -27,6 +27,8 @@ import {
runWithLock,
getBlobSHA,
blobToFileObj,
contentKeyFromBranch,
generateContentKey,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
@ -297,7 +299,7 @@ export default class GitLab implements Implementation {
async unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(branch => this.api!.contentKeyFromBranch(branch)),
branches.map(branch => contentKeyFromBranch(branch)),
);
const readUnpublishedBranchFile = (contentKey: string) =>
@ -314,7 +316,7 @@ export default class GitLab implements Implementation {
this.loadEntryMediaFiles(branch, files),
} = {},
) {
const contentKey = this.api!.generateContentKey(collection, slug);
const contentKey = generateContentKey(collection, slug);
const data = await this.api!.readUnpublishedBranchFile(contentKey);
const mediaFiles = await loadEntryMediaFiles(
data.metaData.branch,

View File

@ -41,7 +41,7 @@ export const readFile = async (
const content = await fetchContent();
if (key) {
localForage.setItem(key, content);
await localForage.setItem(key, content);
}
return content;
};