Feat: editorial workflow bitbucket gitlab (#3014)

* refactor: typescript the backends

* feat: support multiple files upload for GitLab and BitBucket

* fix: load entry media files from media folder or UI state

* chore: cleanup log message

* chore: code cleanup

* refactor: typescript the test backend

* refactor: cleanup getEntry unsued variables

* refactor: moved shared backend code to lib util

* chore: rename files to preserve history

* fix: bind readFile method to API classes

* test(e2e): switch to chrome in cypress tests

* refactor: extract common api methods

* refactor: remove most of immutable js usage from backends

* feat(backend-gitlab): initial editorial workflow support

* feat(backend-gitlab): implement missing workflow methods

* chore: fix lint error

* feat(backend-gitlab): support files deletion

* test(e2e): add gitlab cypress tests

* feat(backend-bitbucket): implement missing editorial workflow methods

* test(e2e): add BitBucket backend e2e tests

* build: update node version to 12 on netlify builds

* fix(backend-bitbucket): extract BitBucket avatar url

* test: fix git-gateway AuthenticationPage test

* test(e2e): fix some backend tests

* test(e2e): fix tests

* test(e2e): add git-gateway editorial workflow test

* chore: code cleanup

* test(e2e): revert back to electron

* test(e2e): add non editorial workflow tests

* fix(git-gateway-gitlab): don't call unpublishedEntry in simple workflow

gitlab git-gateway doesn't support editorial workflow APIs yet. This change makes sure not to call them in simple workflow

* refactor(backend-bitbucket): switch to diffstat API instead of raw diff

* chore: fix test

* test(e2e): add more git-gateway tests

* fix: post rebase typescript fixes

* test(e2e): fix tests

* fix: fix parsing of content key and add tests

* refactor: rename test file

* test(unit): add getStatues unit tests

* chore: update cypress

* docs: update beta docs
This commit is contained in:
Erez Rokah
2020-01-15 00:15:14 +02:00
committed by Shawn Erquhart
parent 4ff5bc2ee0
commit 6f221ab3c1
251 changed files with 70910 additions and 15974 deletions

View File

@ -17,9 +17,10 @@
"scripts": {
"develop": "yarn build:esm --watch",
"build": "cross-env NODE_ENV=production webpack",
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward"
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
},
"dependencies": {
"common-tags": "^1.8.0",
"js-base64": "^2.5.1",
"semaphore": "^1.1.0"
},

View File

@ -1,193 +0,0 @@
import { flow, get } from 'lodash';
import {
localForage,
unsentRequest,
responseParser,
then,
basename,
Cursor,
APIError,
} from 'netlify-cms-lib-util';
export default class API {
constructor(config) {
this.api_root = config.api_root || 'https://api.bitbucket.org/2.0';
this.branch = config.branch || 'master';
this.repo = config.repo || '';
this.requestFunction = config.requestFunction || unsentRequest.performRequest;
// Allow overriding this.hasWriteAccess
this.hasWriteAccess = config.hasWriteAccess || this.hasWriteAccess;
this.repoURL = this.repo ? `/repositories/${this.repo}` : '';
}
buildRequest = req =>
flow([unsentRequest.withRoot(this.api_root), unsentRequest.withTimestamp])(req);
request = req =>
flow([
this.buildRequest,
this.requestFunction,
p => p.catch(err => Promise.reject(new APIError(err.message, null, 'BitBucket'))),
])(req);
requestJSON = req =>
flow([
unsentRequest.withDefaultHeaders({ 'Content-Type': 'application/json' }),
this.request,
then(responseParser({ format: 'json' })),
p => p.catch(err => Promise.reject(new APIError(err.message, null, 'BitBucket'))),
])(req);
requestText = req =>
flow([
unsentRequest.withDefaultHeaders({ 'Content-Type': 'text/plain' }),
this.request,
then(responseParser({ format: 'text' })),
p => p.catch(err => Promise.reject(new APIError(err.message, null, 'BitBucket'))),
])(req);
user = () => this.requestJSON('/user');
hasWriteAccess = async () => {
const response = await this.request(this.repoURL);
if (response.status === 404) {
throw Error('Repo not found');
}
return response.ok;
};
branchCommitSha = async () => {
const {
target: { hash: branchSha },
} = await this.requestJSON(`${this.repoURL}/refs/branches/${this.branch}`);
return branchSha;
};
isFile = ({ type }) => type === 'commit_file';
processFile = file => ({
...file,
name: basename(file.path),
// BitBucket does not return file SHAs, but it does give us the
// commit SHA. Since the commit SHA will change if any files do,
// we can construct an ID using the commit SHA and the file path
// that will help with caching (though not as well as a normal
// SHA, since it will change even if the individual file itself
// doesn't.)
...(file.commit && file.commit.hash ? { id: `${file.commit.hash}/${file.path}` } : {}),
});
processFiles = files => files.filter(this.isFile).map(this.processFile);
readFile = async (path, sha, { parseText = true } = {}) => {
const cacheKey = parseText ? `bb.${sha}` : `bb.${sha}.blob`;
const cachedFile = sha ? await localForage.getItem(cacheKey) : null;
if (cachedFile) {
return cachedFile;
}
const node = await this.branchCommitSha();
const result = await this.request({
url: `${this.repoURL}/src/${node}/${path}`,
cache: 'no-store',
}).then(parseText ? responseParser({ format: 'text' }) : responseParser({ format: 'blob' }));
if (sha) {
localForage.setItem(cacheKey, result);
}
return result;
};
getEntriesAndCursor = jsonResponse => {
const {
size: count,
page: index,
pagelen: pageSize,
next,
previous: prev,
values: entries,
} = jsonResponse;
const pageCount = pageSize && count ? Math.ceil(count / pageSize) : undefined;
return {
entries,
cursor: Cursor.create({
actions: [...(next ? ['next'] : []), ...(prev ? ['prev'] : [])],
meta: { index, count, pageSize, pageCount },
data: { links: { next, prev } },
}),
};
};
listFiles = async (path, depth = 1) => {
const node = await this.branchCommitSha();
const { entries, cursor } = await flow([
// sort files by filename ascending
unsentRequest.withParams({ sort: '-path', max_depth: depth }),
this.requestJSON,
then(this.getEntriesAndCursor),
])(`${this.repoURL}/src/${node}/${path}`);
return { entries: this.processFiles(entries), cursor };
};
traverseCursor = async (cursor, action) =>
flow([
this.requestJSON,
then(this.getEntriesAndCursor),
then(({ cursor: newCursor, entries }) => ({
cursor: newCursor,
entries: this.processFiles(entries),
})),
])(cursor.data.getIn(['links', action]));
listAllFiles = async (path, depth = 1) => {
const { cursor: initialCursor, entries: initialEntries } = await this.listFiles(path, depth);
const entries = [...initialEntries];
let currentCursor = initialCursor;
while (currentCursor && currentCursor.actions.has('next')) {
const { cursor: newCursor, entries: newEntries } = await this.traverseCursor(
currentCursor,
'next',
);
entries.push(...newEntries);
currentCursor = newCursor;
}
return this.processFiles(entries);
};
uploadBlob = (item, { commitMessage, branch = this.branch } = {}) => {
const contentBlob = get(item, 'fileObj', new Blob([item.raw]));
const formData = new FormData();
// Third param is filename header, in case path is `message`, `branch`, etc.
formData.append(item.path, contentBlob, basename(item.path));
formData.append('branch', branch);
if (commitMessage) {
formData.append('message', commitMessage);
}
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
formData.append('author', `${name} <${email}>`);
}
return flow([
unsentRequest.withMethod('POST'),
unsentRequest.withBody(formData),
this.request,
then(() => ({ ...item })),
])(`${this.repoURL}/src`);
};
persistFiles = (files, { commitMessage }) =>
Promise.all(files.map(file => this.uploadBlob(file, { commitMessage })));
deleteFile = (path, message, { branch = this.branch } = {}) => {
const body = new FormData();
body.append('files', path);
body.append('branch', branch);
if (message) {
body.append('message', message);
}
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
body.append('author', `${name} <${email}>`);
}
return flow([unsentRequest.withMethod('POST'), unsentRequest.withBody(body), this.request])(
`${this.repoURL}/src`,
);
};
}

View File

@ -0,0 +1,695 @@
import { flow, get } from 'lodash';
import {
localForage,
unsentRequest,
responseParser,
then,
basename,
Cursor,
APIError,
ApiRequest,
AssetProxy,
Entry,
PersistOptions,
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
labelToStatus,
isCMSLabel,
EditorialWorkflowError,
statusToLabel,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
PreviewState,
FetchError,
parseContentKey,
} from 'netlify-cms-lib-util';
import { oneLine } from 'common-tags';
interface Config {
apiRoot?: string;
token?: string;
branch?: string;
repo?: string;
requestFunction?: (req: ApiRequest) => Promise<Response>;
hasWriteAccess?: () => Promise<boolean>;
squashMerges: boolean;
initialWorkflowStatus: string;
}
interface CommitAuthor {
name: string;
email: string;
}
enum BitBucketPullRequestState {
MERGED = 'MERGED',
SUPERSEDED = 'SUPERSEDED',
OPEN = 'OPEN',
DECLINED = 'DECLINED',
}
type BitBucketPullRequest = {
description: string;
id: number;
title: string;
state: BitBucketPullRequestState;
summary: {
raw: string;
};
source: {
commit: {
hash: string;
};
branch: {
name: string;
};
};
destination: {
commit: {
hash: string;
};
branch: {
name: string;
};
};
};
type BitBucketPullRequests = {
size: number;
page: number;
pagelen: number;
next: string;
preview: string;
values: BitBucketPullRequest[];
};
type BitBucketPullComment = {
content: {
raw: string;
};
};
type BitBucketPullComments = {
size: number;
page: number;
pagelen: number;
next: string;
preview: string;
values: BitBucketPullComment[];
};
enum BitBucketPullRequestStatusState {
Successful = 'SUCCESSFUL',
Failed = 'FAILED',
InProgress = 'INPROGRESS',
Stopped = 'STOPPED',
}
type BitBucketPullRequestStatus = {
uuid: string;
name: string;
key: string;
refname: string;
url: string;
description: string;
state: BitBucketPullRequestStatusState;
};
type BitBucketPullRequestStatues = {
size: number;
page: number;
pagelen: number;
next: string;
preview: string;
values: BitBucketPullRequestStatus[];
};
type BitBucketDiffStat = {
pagelen: number;
page: number;
size: number;
values: {
status: string;
lines_removed: number;
lines_added: number;
new: {
path: string;
type: 'commit_file';
};
}[];
};
type DeleteEntry = {
path: string;
delete: true;
};
type BitBucketFile = {
id: string;
type: string;
path: string;
commit?: { hash: string };
};
type BitBucketSrcResult = {
size: number;
page: number;
pagelen: number;
next: string;
previous: string;
values: BitBucketFile[];
};
type BitBucketUser = {
username: string;
display_name: string;
nickname: string;
links: {
avatar: {
href: string;
};
};
};
export const API_NAME = 'BitBucket';
const APPLICATION_JSON = 'application/json; charset=utf-8';
const replace404WithEmptyResponse = (err: FetchError) => {
if (err && err.status === 404) {
console.log('This 404 was expected and handled appropriately.');
return { size: 0, values: [] as BitBucketFile[] } as BitBucketSrcResult;
} else {
return Promise.reject(err);
}
};
export default class API {
apiRoot: string;
branch: string;
repo: string;
requestFunction: (req: ApiRequest) => Promise<Response>;
repoURL: string;
commitAuthor?: CommitAuthor;
mergeStrategy: string;
initialWorkflowStatus: string;
constructor(config: Config) {
this.apiRoot = config.apiRoot || 'https://api.bitbucket.org/2.0';
this.branch = config.branch || 'master';
this.repo = config.repo || '';
this.requestFunction = config.requestFunction || unsentRequest.performRequest;
// Allow overriding this.hasWriteAccess
this.hasWriteAccess = config.hasWriteAccess || this.hasWriteAccess;
this.repoURL = this.repo ? `/repositories/${this.repo}` : '';
this.mergeStrategy = config.squashMerges ? 'squash' : 'merge_commit';
this.initialWorkflowStatus = config.initialWorkflowStatus;
}
buildRequest = (req: ApiRequest) =>
flow([unsentRequest.withRoot(this.apiRoot), unsentRequest.withTimestamp])(req);
request = (req: ApiRequest): Promise<Response> =>
flow([
this.buildRequest,
this.requestFunction,
p => p.catch((err: Error) => Promise.reject(new APIError(err.message, null, API_NAME))),
])(req);
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
responseToText = responseParser({ format: 'text', apiName: API_NAME });
// eslint-disable-next-line @typescript-eslint/no-explicit-any
requestJSON = (req: ApiRequest) => this.request(req).then(this.responseToJSON) as Promise<any>;
requestText = (req: ApiRequest) => this.request(req).then(this.responseToText) as Promise<string>;
user = () => this.requestJSON('/user') as Promise<BitBucketUser>;
hasWriteAccess = async () => {
const response = await this.request(this.repoURL);
if (response.status === 404) {
throw Error('Repo not found');
}
return response.ok;
};
branchCommitSha = async (branch: string) => {
const {
target: { hash: branchSha },
} = await this.requestJSON(`${this.repoURL}/refs/branches/${branch}`);
return branchSha as string;
};
isFile = ({ type }: BitBucketFile) => type === 'commit_file';
processFile = (file: BitBucketFile) => ({
id: file.id,
type: file.type,
path: file.path,
name: basename(file.path),
// BitBucket does not return file SHAs, but it does give us the
// commit SHA. Since the commit SHA will change if any files do,
// we can construct an ID using the commit SHA and the file path
// that will help with caching (though not as well as a normal
// SHA, since it will change even if the individual file itself
// doesn't.)
...(file.commit && file.commit.hash ? { id: `${file.commit.hash}/${file.path}` } : {}),
});
processFiles = (files: BitBucketFile[]) => files.filter(this.isFile).map(this.processFile);
readFile = async (
path: string,
sha?: string | null,
{ parseText = true, branch = this.branch } = {},
): Promise<string | Blob> => {
const fetchContent = async () => {
const node = await this.branchCommitSha(branch);
const content = await this.request({
url: `${this.repoURL}/src/${node}/${path}`,
cache: 'no-store',
}).then<string | Blob>(parseText ? this.responseToText : this.responseToBlob);
return content;
};
const content = await readFile(sha, fetchContent, localForage, parseText);
return content;
};
getEntriesAndCursor = (jsonResponse: BitBucketSrcResult) => {
const {
size: count,
page: index,
pagelen: pageSize,
next,
previous: prev,
values: entries,
} = jsonResponse;
const pageCount = pageSize && count ? Math.ceil(count / pageSize) : undefined;
return {
entries,
cursor: Cursor.create({
actions: [...(next ? ['next'] : []), ...(prev ? ['prev'] : [])],
meta: { index, count, pageSize, pageCount },
data: { links: { next, prev } },
}),
};
};
listFiles = async (path: string, depth = 1) => {
const node = await this.branchCommitSha(this.branch);
const result: BitBucketSrcResult = await this.requestJSON({
url: `${this.repoURL}/src/${node}/${path}`,
params: {
// sort files by filename ascending
sort: '-path',
// eslint-disable-next-line @typescript-eslint/camelcase
max_depth: depth,
},
}).catch(replace404WithEmptyResponse);
const { entries, cursor } = this.getEntriesAndCursor(result);
return { entries: this.processFiles(entries), cursor: cursor as Cursor };
};
traverseCursor = async (
cursor: Cursor,
action: string,
): Promise<{
cursor: Cursor;
entries: { path: string; name: string; type: string; id: string }[];
}> =>
flow([
this.requestJSON,
then(this.getEntriesAndCursor),
then<
{ cursor: Cursor; entries: BitBucketFile[] },
{ cursor: Cursor; entries: BitBucketFile[] }
>(({ cursor: newCursor, entries }) => ({
cursor: newCursor,
entries: this.processFiles(entries),
})),
])(cursor.data!.getIn(['links', action]));
listAllFiles = async (path: string, depth = 1) => {
const { cursor: initialCursor, entries: initialEntries } = await this.listFiles(path, depth);
const entries = [...initialEntries];
let currentCursor = initialCursor;
while (currentCursor && currentCursor.actions!.has('next')) {
const { cursor: newCursor, entries: newEntries } = await this.traverseCursor(
currentCursor,
'next',
);
entries.push(...newEntries);
currentCursor = newCursor;
}
return this.processFiles(entries);
};
async uploadFiles(
files: (Entry | AssetProxy | DeleteEntry)[],
{
commitMessage,
branch,
parentSha,
}: { commitMessage: string; branch: string; parentSha?: string },
) {
const formData = new FormData();
files.forEach(file => {
if ((file as DeleteEntry).delete) {
// delete the file
formData.append('files', file.path);
} else {
// add/modify the file
const contentBlob = get(file, 'fileObj', new Blob([(file as Entry).raw]));
// Third param is filename header, in case path is `message`, `branch`, etc.
formData.append(file.path, contentBlob, basename(file.path));
}
});
if (commitMessage) {
formData.append('message', commitMessage);
}
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
formData.append('author', `${name} <${email}>`);
}
formData.append('branch', branch);
if (parentSha) {
formData.append('parents', parentSha);
}
await this.request({
url: `${this.repoURL}/src`,
method: 'POST',
body: formData,
});
return files;
}
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
const files = entry ? [entry, ...mediaFiles] : mediaFiles;
if (options.useWorkflow) {
return this.editorialWorkflowGit(files, entry as Entry, options);
} else {
return this.uploadFiles(files, { commitMessage: options.commitMessage, branch: this.branch });
}
}
async addPullRequestComment(pullRequest: BitBucketPullRequest, comment: string) {
await this.requestJSON({
method: 'POST',
url: `${this.repoURL}/pullrequests/${pullRequest.id}/comments`,
headers: { 'Content-Type': APPLICATION_JSON },
body: JSON.stringify({
content: {
raw: comment,
},
}),
});
}
async getPullRequestLabel(id: number) {
const comments: BitBucketPullComments = await this.requestJSON({
url: `${this.repoURL}/pullrequests/${id}/comments`,
params: {
pagelen: 100,
},
});
return comments.values.map(c => c.content.raw)[comments.values.length - 1];
}
async createPullRequest(branch: string, commitMessage: string, status: string) {
const pullRequest: BitBucketPullRequest = await this.requestJSON({
method: 'POST',
url: `${this.repoURL}/pullrequests`,
headers: { 'Content-Type': APPLICATION_JSON },
body: JSON.stringify({
title: commitMessage,
source: {
branch: {
name: branch,
},
},
destination: {
branch: {
name: this.branch,
},
},
description: DEFAULT_PR_BODY,
// eslint-disable-next-line @typescript-eslint/camelcase
close_source_branch: true,
}),
});
// use comments for status labels
await this.addPullRequestComment(pullRequest, statusToLabel(status));
}
async getDifferences(branch: string) {
const diff: BitBucketDiffStat = await this.requestJSON({
url: `${this.repoURL}/diffstat/${branch}..${this.branch}`,
params: {
pagelen: 100,
},
});
return diff.values;
}
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
const contentKey = this.generateContentKey(options.collectionName as string, entry.slug);
const branch = this.branchFromContentKey(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
const defaultBranchSha = await this.branchCommitSha(this.branch);
await this.uploadFiles(files, {
commitMessage: options.commitMessage,
branch,
parentSha: defaultBranchSha,
});
await this.createPullRequest(
branch,
options.commitMessage,
options.status || this.initialWorkflowStatus,
);
} else {
// mark files for deletion
const diffs = await this.getDifferences(branch);
const toDelete: DeleteEntry[] = [];
for (const diff of diffs) {
if (!files.some(file => file.path === diff.new.path)) {
toDelete.push({ path: diff.new.path, delete: true });
}
}
await this.uploadFiles([...files, ...toDelete], {
commitMessage: options.commitMessage,
branch,
});
}
}
deleteFile = (path: string, message: string) => {
const body = new FormData();
body.append('files', path);
body.append('branch', this.branch);
if (message) {
body.append('message', message);
}
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
body.append('author', `${name} <${email}>`);
}
return flow([unsentRequest.withMethod('POST'), unsentRequest.withBody(body), this.request])(
`${this.repoURL}/src`,
);
};
generateContentKey(collectionName: string, slug: string) {
return generateContentKey(collectionName, slug);
}
contentKeyFromBranch(branch: string) {
return branch.substring(`${CMS_BRANCH_PREFIX}/`.length);
}
branchFromContentKey(contentKey: string) {
return `${CMS_BRANCH_PREFIX}/${contentKey}`;
}
async isFileExists(path: string, branch: string) {
const fileExists = await this.readFile(path, null, { branch })
.then(() => true)
.catch(error => {
if (error instanceof APIError && error.status === 404) {
return false;
}
throw error;
});
return fileExists;
}
async getPullRequests(sourceBranch?: string) {
const sourceQuery = sourceBranch
? `source.branch.name = "${sourceBranch}"`
: `source.branch.name ~ "${CMS_BRANCH_PREFIX}/"`;
const pullRequests: BitBucketPullRequests = await this.requestJSON({
url: `${this.repoURL}/pullrequests`,
params: {
pagelen: 50,
q: oneLine`
source.repository.full_name = "${this.repo}"
AND state = "${BitBucketPullRequestState.OPEN}"
AND destination.branch.name = "${this.branch}"
AND comment_count > 0
AND ${sourceQuery}
`,
},
});
const labels = await Promise.all(
pullRequests.values.map(pr => this.getPullRequestLabel(pr.id)),
);
return pullRequests.values.filter((_, index) => isCMSLabel(labels[index]));
}
async getBranchPullRequest(branch: string) {
const pullRequests = await this.getPullRequests(branch);
if (pullRequests.length <= 0) {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
}
return pullRequests[0];
}
async retrieveMetadata(contentKey: string) {
const { collection, slug } = parseContentKey(contentKey);
const branch = this.branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const diff = await this.getDifferences(branch);
const path = diff.find(d => d.new.path.includes(slug))?.new.path as string;
// TODO: get real file id
const mediaFiles = await Promise.all(
diff.filter(d => d.new.path !== path).map(d => ({ path: d.new.path, id: null })),
);
const label = await this.getPullRequestLabel(pullRequest.id);
const status = labelToStatus(label);
return { branch, collection, slug, path, status, mediaFiles };
}
async readUnpublishedBranchFile(contentKey: string) {
const { branch, collection, slug, path, status, mediaFiles } = await this.retrieveMetadata(
contentKey,
);
const [fileData, isModification] = await Promise.all([
this.readFile(path, null, { branch }) as Promise<string>,
this.isFileExists(path, this.branch),
]);
return {
slug,
metaData: { branch, collection, objects: { entry: { path, mediaFiles } }, status },
fileData,
isModification,
};
}
async listUnpublishedBranches() {
console.log(
'%c Checking for Unpublished entries',
'line-height: 30px;text-align: center;font-weight: bold',
);
const pullRequests = await this.getPullRequests();
const branches = pullRequests.map(mr => mr.source.branch.name);
return branches;
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
const contentKey = this.generateContentKey(collection, slug);
const branch = this.branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.addPullRequestComment(pullRequest, statusToLabel(newStatus));
}
async mergePullRequest(pullRequest: BitBucketPullRequest) {
await this.requestJSON({
method: 'POST',
url: `${this.repoURL}/pullrequests/${pullRequest.id}/merge`,
headers: { 'Content-Type': APPLICATION_JSON },
body: JSON.stringify({
message: MERGE_COMMIT_MESSAGE,
// eslint-disable-next-line @typescript-eslint/camelcase
close_source_branch: true,
// eslint-disable-next-line @typescript-eslint/camelcase
merge_strategy: this.mergeStrategy,
}),
});
}
async publishUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.mergePullRequest(pullRequest);
}
async declinePullRequest(pullRequest: BitBucketPullRequest) {
await this.requestJSON({
method: 'POST',
url: `${this.repoURL}/pullrequests/${pullRequest.id}/decline`,
});
}
async deleteBranch(branch: string) {
await this.request({
method: 'DELETE',
url: `${this.repoURL}/refs/branches/${branch}`,
});
}
async deleteUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.declinePullRequest(pullRequest);
await this.deleteBranch(branch);
}
async getPullRequestStatuses(pullRequest: BitBucketPullRequest) {
const statuses: BitBucketPullRequestStatues = await this.requestJSON({
url: `${this.repoURL}/pullrequests/${pullRequest.id}/statuses`,
params: {
pagelen: 100,
},
});
return statuses.values;
}
async getStatuses(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const statuses = await this.getPullRequestStatuses(pullRequest);
return statuses.map(({ key, state, url }) => ({
context: key,
state:
state === BitBucketPullRequestStatusState.Successful
? PreviewState.Success
: PreviewState.Other,
// eslint-disable-next-line @typescript-eslint/camelcase
target_url: url,
}));
}
}

View File

@ -1,6 +1,5 @@
import React from 'react';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import styled from '@emotion/styled';
import { NetlifyAuthenticator, ImplicitAuthenticator } from 'netlify-cms-lib-auth';
import { AuthenticationPage, Icon } from 'netlify-cms-ui-default';
@ -16,22 +15,25 @@ export default class BitbucketAuthenticationPage extends React.Component {
base_url: PropTypes.string,
siteId: PropTypes.string,
authEndpoint: PropTypes.string,
config: ImmutablePropTypes.map,
config: PropTypes.object.isRequired,
clearHash: PropTypes.func,
};
state = {};
componentDidMount() {
const authType = this.props.config.getIn(['backend', 'auth_type']);
const {
auth_type: authType = '',
base_url = 'https://bitbucket.org',
auth_endpoint = 'site/oauth2/authorize',
app_id = '',
} = this.props.config.backend;
if (authType === 'implicit') {
this.auth = new ImplicitAuthenticator({
base_url: this.props.config.getIn(['backend', 'base_url'], 'https://bitbucket.org'),
auth_endpoint: this.props.config.getIn(
['backend', 'auth_endpoint'],
'site/oauth2/authorize',
),
app_id: this.props.config.getIn(['backend', 'app_id']),
base_url,
auth_endpoint,
app_id,
clearHash: this.props.clearHash,
});
// Complete implicit authentication if we were redirected back to from the provider.
@ -75,8 +77,8 @@ export default class BitbucketAuthenticationPage extends React.Component {
onLogin={this.handleLogin}
loginDisabled={inProgress}
loginErrorMessage={this.state.loginError}
logoUrl={config.get('logo_url')}
siteUrl={config.get('site_url')}
logoUrl={config.logo_url}
siteUrl={config.site_url}
renderButtonContent={() => (
<React.Fragment>
<LoginButtonIcon type="bitbucket" />

View File

@ -0,0 +1,35 @@
import API from '../API';
global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
describe('bitbucket API', () => {
beforeEach(() => {
jest.resetAllMocks();
});
test('should get preview statuses', async () => {
const api = new API({});
const pr = { id: 1 };
const statuses = [
{ key: 'deploy', state: 'SUCCESSFUL', url: 'deploy-url' },
{ key: 'build', state: 'FAILED' },
];
api.getBranchPullRequest = jest.fn(() => Promise.resolve(pr));
api.getPullRequestStatuses = jest.fn(() => Promise.resolve(statuses));
const collectionName = 'posts';
const slug = 'title';
await expect(api.getStatuses(collectionName, slug)).resolves.toEqual([
{ context: 'deploy', state: 'success', target_url: 'deploy-url' },
{ context: 'build', state: 'other' },
]);
expect(api.getBranchPullRequest).toHaveBeenCalledTimes(1);
expect(api.getBranchPullRequest).toHaveBeenCalledWith(`cms/posts/title`);
expect(api.getPullRequestStatuses).toHaveBeenCalledTimes(1);
expect(api.getPullRequestStatuses).toHaveBeenCalledWith(pr);
});
});

View File

@ -1,312 +0,0 @@
import semaphore from 'semaphore';
import { flow, trimStart } from 'lodash';
import { stripIndent } from 'common-tags';
import {
CURSOR_COMPATIBILITY_SYMBOL,
filterByPropExtension,
resolvePromiseProperties,
then,
unsentRequest,
basename,
getBlobSHA,
getCollectionDepth,
} from 'netlify-cms-lib-util';
import { NetlifyAuthenticator } from 'netlify-cms-lib-auth';
import AuthenticationPage from './AuthenticationPage';
import API from './API';
const MAX_CONCURRENT_DOWNLOADS = 10;
// Implementation wrapper class
export default class BitbucketBackend {
constructor(config, options = {}) {
this.config = config;
this.options = {
proxied: false,
API: null,
updateUserCredentials: async () => null,
...options,
};
if (this.options.useWorkflow) {
throw new Error('The BitBucket backend does not support the Editorial Workflow.');
}
if (!this.options.proxied && !config.getIn(['backend', 'repo'], false)) {
throw new Error('The BitBucket backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.updateUserCredentials = this.options.updateUserCredentials;
this.repo = config.getIn(['backend', 'repo'], '');
this.branch = config.getIn(['backend', 'branch'], 'master');
this.api_root = config.getIn(['backend', 'api_root'], 'https://api.bitbucket.org/2.0');
this.base_url = config.get('base_url');
this.site_id = config.get('site_id');
this.token = '';
}
authComponent() {
return AuthenticationPage;
}
setUser(user) {
this.token = user.token;
this.api = new API({
requestFunction: this.apiRequestFunction,
branch: this.branch,
repo: this.repo,
});
}
restoreUser(user) {
return this.authenticate(user);
}
async authenticate(state) {
this.token = state.token;
this.refreshToken = state.refresh_token;
this.api = new API({
requestFunction: this.apiRequestFunction,
branch: this.branch,
repo: this.repo,
api_root: this.api_root,
});
const isCollab = await this.api.hasWriteAccess().catch(error => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a Bitbucket account with access.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your BitBucket user account does not have access to this repo.');
}
const user = await this.api.user();
// Authorized user
return {
...user,
name: user.display_name,
login: user.username,
token: state.token,
refresh_token: state.refresh_token,
};
}
getRefreshedAccessToken() {
if (this.refreshedTokenPromise) {
return this.refreshedTokenPromise;
}
// instantiating a new Authenticator on each refresh isn't ideal,
if (!this.auth) {
const cfg = {
base_url: this.base_url,
site_id: this.site_id,
};
this.authenticator = new NetlifyAuthenticator(cfg);
}
this.refreshedTokenPromise = this.authenticator
.refresh({ provider: 'bitbucket', refresh_token: this.refreshToken })
.then(({ token, refresh_token }) => {
this.token = token;
this.refreshToken = refresh_token;
this.refreshedTokenPromise = undefined;
this.updateUserCredentials({ token, refresh_token });
return token;
});
return this.refreshedTokenPromise;
}
logout() {
this.token = null;
return;
}
getToken() {
if (this.refreshedTokenPromise) {
return this.refreshedTokenPromise;
}
return Promise.resolve(this.token);
}
apiRequestFunction = async req => {
const token = this.refreshedTokenPromise ? await this.refreshedTokenPromise : this.token;
return flow([
unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }),
unsentRequest.performRequest,
then(async res => {
if (res.status === 401) {
const json = await res.json().catch(() => null);
if (json && json.type === 'error' && /^access token expired/i.test(json.error.message)) {
const newToken = await this.getRefreshedAccessToken();
const reqWithNewToken = unsentRequest.withHeaders(
{ Authorization: `Bearer ${newToken}` },
req,
);
return unsentRequest.performRequest(reqWithNewToken);
}
}
return res;
}),
])(req);
};
entriesByFolder(collection, extension) {
const listPromise = this.api.listFiles(
collection.get('folder'),
getCollectionDepth(collection),
);
return resolvePromiseProperties({
files: listPromise
.then(({ entries }) => entries)
.then(filterByPropExtension(extension, 'path'))
.then(this.fetchFiles),
cursor: listPromise.then(({ cursor }) => cursor),
}).then(({ files, cursor }) => {
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return files;
});
}
allEntriesByFolder(collection, extension) {
return this.api
.listAllFiles(collection.get('folder'), getCollectionDepth(collection))
.then(filterByPropExtension(extension, 'path'))
.then(this.fetchFiles);
}
entriesByFiles(collection) {
const files = collection.get('files').map(collectionFile => ({
path: collectionFile.get('file'),
label: collectionFile.get('label'),
}));
return this.fetchFiles(files);
}
fetchFiles = files => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [];
files.forEach(file => {
promises.push(
new Promise(resolve =>
sem.take(() =>
this.api
.readFile(file.path, file.id)
.then(data => {
resolve({ file, data });
sem.leave();
})
.catch((error = true) => {
sem.leave();
console.error(`failed to load file from BitBucket: ${file.path}`);
resolve({ error });
}),
),
),
);
});
return Promise.all(promises).then(loadedEntries =>
loadedEntries.filter(loadedEntry => !loadedEntry.error),
);
};
getEntry(collection, slug, path) {
return this.api.readFile(path).then(data => ({
file: { path },
data,
}));
}
getMedia(mediaFolder = this.config.get('media_folder')) {
return this.api
.listAllFiles(mediaFolder)
.then(files =>
files.map(({ id, name, path }) => ({ id, name, path, displayURL: { id, path } })),
);
}
getMediaAsBlob(path, id) {
return this.api.readFile(path, id, { parseText: false });
}
getMediaDisplayURL(displayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
const { id, path } = displayURL;
return new Promise((resolve, reject) =>
this._mediaDisplayURLSem.take(() =>
this.getMediaAsBlob(path, id)
.then(blob => URL.createObjectURL(blob))
.then(resolve, reject)
.finally(() => this._mediaDisplayURLSem.leave()),
),
);
}
async getMediaFile(path) {
const name = basename(path);
const blob = await this.getMediaAsBlob(path, null);
const fileObj = new File([blob], name);
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
persistEntry(entry, mediaFiles, options = {}) {
return this.api.persistFiles([entry], options);
}
async persistMedia(mediaFile, options = {}) {
const { fileObj } = mediaFile;
const [sha] = await Promise.all([
getBlobSHA(fileObj),
this.api.persistFiles([mediaFile], options),
]);
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path: trimStart(mediaFile.path, '/k'),
name: fileObj.name,
size: fileObj.size,
id: sha,
file: fileObj,
url,
};
}
deleteFile(path, commitMessage, options) {
return this.api.deleteFile(path, commitMessage, options);
}
traverseCursor(cursor, action) {
return this.api.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => ({
entries: await Promise.all(
entries.map(file => this.api.readFile(file.path, file.id).then(data => ({ file, data }))),
),
cursor: newCursor,
}));
}
}

View File

@ -0,0 +1,457 @@
import semaphore, { Semaphore } from 'semaphore';
import { flow, trimStart } from 'lodash';
import { stripIndent } from 'common-tags';
import {
CURSOR_COMPATIBILITY_SYMBOL,
filterByPropExtension,
then,
unsentRequest,
basename,
getBlobSHA,
Entry,
ApiRequest,
Cursor,
AssetProxy,
PersistOptions,
DisplayURL,
Implementation,
entriesByFolder,
entriesByFiles,
User,
Credentials,
getMediaDisplayURL,
getMediaAsBlob,
Config,
ImplementationFile,
unpublishedEntries,
UnpublishedEntryMediaFile,
runWithLock,
AsyncLock,
asyncLock,
getPreviewStatus,
} from 'netlify-cms-lib-util';
import NetlifyAuthenticator from 'netlify-cms-lib-auth';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
const MAX_CONCURRENT_DOWNLOADS = 10;
// Implementation wrapper class
export default class BitbucketBackend implements Implementation {
lock: AsyncLock;
api: API | null;
updateUserCredentials: (args: { token: string; refresh_token: string }) => Promise<null>;
options: {
proxied: boolean;
API: API | null;
updateUserCredentials: (args: { token: string; refresh_token: string }) => Promise<null>;
initialWorkflowStatus: string;
};
repo: string;
branch: string;
apiRoot: string;
baseUrl: string;
siteId: string;
token: string | null;
mediaFolder: string;
refreshToken?: string;
refreshedTokenPromise?: Promise<string>;
authenticator?: NetlifyAuthenticator;
auth?: unknown;
_mediaDisplayURLSem?: Semaphore;
squashMerges: boolean;
previewContext: string;
constructor(config: Config, options = {}) {
this.options = {
proxied: false,
API: null,
updateUserCredentials: async () => null,
initialWorkflowStatus: '',
...options,
};
if (
!this.options.proxied &&
(config.backend.repo === null || config.backend.repo === undefined)
) {
throw new Error('The BitBucket backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.updateUserCredentials = this.options.updateUserCredentials;
this.repo = config.backend.repo || '';
this.branch = config.backend.branch || 'master';
this.apiRoot = config.backend.api_root || 'https://api.bitbucket.org/2.0';
this.baseUrl = config.base_url || '';
this.siteId = config.site_id || '';
this.token = '';
this.mediaFolder = config.media_folder;
this.squashMerges = config.backend.squash_merges || false;
this.previewContext = config.backend.preview_context || '';
this.lock = asyncLock();
}
authComponent() {
return AuthenticationPage;
}
setUser(user: { token: string }) {
this.token = user.token;
this.api = new API({
requestFunction: this.apiRequestFunction,
branch: this.branch,
repo: this.repo,
squashMerges: this.squashMerges,
initialWorkflowStatus: this.options.initialWorkflowStatus,
});
}
restoreUser(user: User) {
return this.authenticate(user);
}
async authenticate(state: Credentials) {
this.token = state.token as string;
this.refreshToken = state.refresh_token;
this.api = new API({
requestFunction: this.apiRequestFunction,
branch: this.branch,
repo: this.repo,
apiRoot: this.apiRoot,
squashMerges: this.squashMerges,
initialWorkflowStatus: this.options.initialWorkflowStatus,
});
const isCollab = await this.api.hasWriteAccess().catch(error => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a Bitbucket account with access.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your BitBucket user account does not have access to this repo.');
}
const user = await this.api.user();
// Authorized user
return {
...user,
name: user.display_name,
login: user.username,
token: state.token,
// eslint-disable-next-line @typescript-eslint/camelcase
avatar_url: user.links.avatar.href,
// eslint-disable-next-line @typescript-eslint/camelcase
refresh_token: state.refresh_token,
};
}
getRefreshedAccessToken() {
if (this.refreshedTokenPromise) {
return this.refreshedTokenPromise;
}
// instantiating a new Authenticator on each refresh isn't ideal,
if (!this.auth) {
const cfg = {
// eslint-disable-next-line @typescript-eslint/camelcase
base_url: this.baseUrl,
// eslint-disable-next-line @typescript-eslint/camelcase
site_id: this.siteId,
};
this.authenticator = new NetlifyAuthenticator(cfg);
}
this.refreshedTokenPromise = this.authenticator! // eslint-disable-next-line @typescript-eslint/camelcase
.refresh({ provider: 'bitbucket', refresh_token: this.refreshToken as string })
// eslint-disable-next-line @typescript-eslint/camelcase
.then(({ token, refresh_token }) => {
this.token = token;
// eslint-disable-next-line @typescript-eslint/camelcase
this.refreshToken = refresh_token;
this.refreshedTokenPromise = undefined;
// eslint-disable-next-line @typescript-eslint/camelcase
this.updateUserCredentials({ token, refresh_token });
return token;
});
return this.refreshedTokenPromise;
}
logout() {
this.token = null;
return;
}
getToken() {
if (this.refreshedTokenPromise) {
return this.refreshedTokenPromise;
}
return Promise.resolve(this.token);
}
apiRequestFunction = async (req: ApiRequest) => {
const token = (this.refreshedTokenPromise
? await this.refreshedTokenPromise
: this.token) as string;
return flow([
unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }) as (
req: ApiRequest,
) => ApiRequest,
unsentRequest.performRequest,
then(async (res: Response) => {
if (res.status === 401) {
const json = await res.json().catch(() => null);
if (json && json.type === 'error' && /^access token expired/i.test(json.error.message)) {
const newToken = await this.getRefreshedAccessToken();
const reqWithNewToken = unsentRequest.withHeaders(
{
Authorization: `Bearer ${newToken}`,
},
req,
) as ApiRequest;
return unsentRequest.performRequest(reqWithNewToken);
}
}
return res;
}),
])(req);
};
async entriesByFolder(folder: string, extension: string, depth: number) {
let cursor: Cursor;
const listFiles = () =>
this.api!.listFiles(folder, depth).then(({ entries, cursor: c }) => {
cursor = c;
return filterByPropExtension(extension, 'path')(entries);
});
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), 'BitBucket');
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return files;
}
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const listFiles = () =>
this.api!.listAllFiles(folder, depth).then(filterByPropExtension(extension, 'path'));
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), 'BitBucket');
return files;
}
async entriesByFiles(files: ImplementationFile[]) {
return entriesByFiles(files, this.api!.readFile.bind(this.api!), 'BitBucket');
}
getEntry(path: string) {
return this.api!.readFile(path).then(data => ({
file: { path, id: null },
data: data as string,
}));
}
getMedia(mediaFolder = this.mediaFolder) {
return this.api!.listAllFiles(mediaFolder).then(files =>
files.map(({ id, name, path }) => ({ id, name, path, displayURL: { id, path } })),
);
}
getMediaDisplayURL(displayURL: DisplayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
return getMediaDisplayURL(
displayURL,
this.api!.readFile.bind(this.api!),
this._mediaDisplayURLSem,
);
}
async getMediaFile(path: string) {
const name = basename(path);
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
const fileObj = new File([blob], name);
const url = URL.createObjectURL(fileObj);
const id = await getBlobSHA(fileObj);
return {
id,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
// persistEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.persistFiles(entry, mediaFiles, options),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
const fileObj = mediaFile.fileObj as File;
const [id] = await Promise.all([
getBlobSHA(fileObj),
this.api!.persistFiles(null, [mediaFile], options),
]);
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path: trimStart(mediaFile.path, '/k'),
name: fileObj!.name,
size: fileObj!.size,
id,
file: fileObj,
url,
};
}
deleteFile(path: string, commitMessage: string) {
return this.api!.deleteFile(path, commitMessage);
}
traverseCursor(cursor: Cursor, action: string) {
return this.api!.traverseCursor(cursor, action).then(
async ({ entries, cursor: newCursor }) => ({
entries: await Promise.all(
entries.map(file =>
this.api!.readFile(file.path, file.id).then(data => ({ file, data: data as string })),
),
),
cursor: newCursor,
}),
);
}
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
const readFile = (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
return getMediaAsBlob(file.path, null, readFile).then(blob => {
const name = basename(file.path);
const fileObj = new File([blob], name);
return {
id: file.path,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
});
}
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
return mediaFiles;
}
async unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(branch => this.api!.contentKeyFromBranch(branch)),
);
const readUnpublishedBranchFile = (contentKey: string) =>
this.api!.readUnpublishedBranchFile(contentKey);
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, API_NAME);
}
async unpublishedEntry(
collection: string,
slug: string,
{
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
this.loadEntryMediaFiles(branch, files),
} = {},
) {
const contentKey = this.api!.generateContentKey(collection, slug);
const data = await this.api!.readUnpublishedBranchFile(contentKey);
const mediaFiles = await loadEntryMediaFiles(
data.metaData.branch,
// TODO: fix this
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
data.metaData.objects.entry.mediaFiles,
);
return {
slug,
file: { path: data.metaData.objects.entry.path, id: null },
data: data.fileData as string,
metaData: data.metaData,
mediaFiles,
isModification: data.isModification,
};
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
// updateUnpublishedEntryStatus is a transactional operation
return runWithLock(
this.lock,
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
'Failed to acquire update entry status lock',
);
}
async deleteUnpublishedEntry(collection: string, slug: string) {
// deleteUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.deleteUnpublishedEntry(collection, slug),
'Failed to acquire delete entry lock',
);
}
async publishUnpublishedEntry(collection: string, slug: string) {
// publishUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.publishUnpublishedEntry(collection, slug),
'Failed to acquire publish entry lock',
);
}
async getDeployPreview(collection: string, slug: string) {
try {
const statuses = await this.api!.getStatuses(collection, slug);
const deployStatus = getPreviewStatus(statuses, this.previewContext);
if (deployStatus) {
const { target_url: url, state } = deployStatus;
return { url, status: state };
} else {
return null;
}
} catch (e) {
return null;
}
}
}

View File

@ -0,0 +1,5 @@
declare module 'semaphore' {
export type Semaphore = { take: (f: Function) => void; leave: () => void };
const semaphore: (count: number) => Semaphore;
export default semaphore;
}

View File

@ -18,7 +18,7 @@
"scripts": {
"develop": "yarn build:esm --watch",
"build": "cross-env NODE_ENV=production webpack",
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward"
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
},
"dependencies": {
"gotrue-js": "^0.9.24",
@ -29,7 +29,6 @@
"peerDependencies": {
"@emotion/core": "^10.0.9",
"@emotion/styled": "^10.0.9",
"immutable": "^3.7.6",
"lodash": "^4.17.11",
"netlify-cms-backend-bitbucket": "^2.3.0",
"netlify-cms-backend-github": "^2.4.0",

View File

@ -1,5 +1,4 @@
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import React from 'react';
import styled from '@emotion/styled';
import { partial } from 'lodash';
@ -66,6 +65,8 @@ if (window.netlifyIdentity) {
}
export default class GitGatewayAuthenticationPage extends React.Component {
static authClient;
constructor(props) {
super(props);
component = this;
@ -113,7 +114,7 @@ export default class GitGatewayAuthenticationPage extends React.Component {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool.isRequired,
error: PropTypes.node,
config: ImmutablePropTypes.map,
config: PropTypes.object.isRequired,
t: PropTypes.func.isRequired,
};
@ -162,7 +163,7 @@ export default class GitGatewayAuthenticationPage extends React.Component {
if (errors.identity) {
return (
<AuthenticationPage
logoUrl={config.get('logo_url')}
logoUrl={config.logo_url}
onLogin={this.handleIdentity}
renderPageContent={() => (
<a
@ -178,7 +179,7 @@ export default class GitGatewayAuthenticationPage extends React.Component {
} else {
return (
<AuthenticationPage
logoUrl={config.get('logo_url')}
logoUrl={config.logo_url}
onLogin={this.handleIdentity}
renderButtonContent={() => t('auth.loginWithNetlifyIdentity')}
/>
@ -188,8 +189,8 @@ export default class GitGatewayAuthenticationPage extends React.Component {
return (
<AuthenticationPage
logoUrl={config.get('logo_url')}
siteUrl={config.get('site_url')}
logoUrl={config.logo_url}
siteUrl={config.site_url}
renderPageContent={() => (
<AuthForm onSubmit={this.handleLogin}>
{!error ? null : <ErrorMessage>{error}</ErrorMessage>}

View File

@ -1,10 +1,20 @@
import { API as GithubAPI } from 'netlify-cms-backend-github';
import { APIError } from 'netlify-cms-lib-util';
import { Config as GitHubConfig } from 'netlify-cms-backend-github/src/API';
import { APIError, FetchError } from 'netlify-cms-lib-util';
type Config = GitHubConfig & {
apiRoot: string;
tokenPromise: () => Promise<string>;
commitAuthor: { name: string };
};
export default class API extends GithubAPI {
constructor(config) {
tokenPromise: () => Promise<string>;
commitAuthor: { name: string };
constructor(config: Config) {
super(config);
this.api_root = config.api_root;
this.apiRoot = config.apiRoot;
this.tokenPromise = config.tokenPromise;
this.commitAuthor = config.commitAuthor;
this.repoURL = '';
@ -14,7 +24,7 @@ export default class API extends GithubAPI {
hasWriteAccess() {
return this.getDefaultBranch()
.then(() => true)
.catch(error => {
.catch((error: FetchError) => {
if (error.status === 401) {
if (error.message === 'Bad credentials') {
throw new APIError(
@ -53,16 +63,21 @@ export default class API extends GithubAPI {
});
}
handleRequestError(error, responseStatus) {
handleRequestError(error: FetchError & { msg: string }, responseStatus: number) {
throw new APIError(error.message || error.msg, responseStatus, 'Git Gateway');
}
user() {
return Promise.resolve(this.commitAuthor);
return Promise.resolve({ login: '', ...this.commitAuthor });
}
commit(message, changeTree) {
const commitParams = {
commit(message: string, changeTree: { parentSha?: string; sha: string }) {
const commitParams: {
message: string;
tree: string;
parents: string[];
author?: { name: string; date: string };
} = {
message,
tree: changeTree.sha,
parents: changeTree.parentSha ? [changeTree.parentSha] : [],

View File

@ -1,16 +1,21 @@
import { flow } from 'lodash';
import { API as GitlabAPI } from 'netlify-cms-backend-gitlab';
import { unsentRequest, then } from 'netlify-cms-lib-util';
import { Config as GitHubConfig, CommitAuthor } from 'netlify-cms-backend-gitlab/src/API';
import { unsentRequest, then, ApiRequest } from 'netlify-cms-lib-util';
type Config = GitHubConfig & { tokenPromise: () => Promise<string>; commitAuthor: CommitAuthor };
export default class API extends GitlabAPI {
constructor(config) {
tokenPromise: () => Promise<string>;
constructor(config: Config) {
super(config);
this.tokenPromise = config.tokenPromise;
this.commitAuthor = config.commitAuthor;
this.repoURL = '';
}
authenticateRequest = async req =>
authenticateRequest = async (req: ApiRequest) =>
unsentRequest.withHeaders(
{
Authorization: `Bearer ${await this.tokenPromise()}`,
@ -18,7 +23,7 @@ export default class API extends GitlabAPI {
req,
);
request = async req =>
request = async (req: ApiRequest) =>
flow([this.buildRequest, this.authenticateRequest, then(unsentRequest.performRequest)])(req);
hasWriteAccess = () => Promise.resolve(true);

View File

@ -1,5 +1,4 @@
import React from 'react';
import { Map } from 'immutable';
import { render } from '@testing-library/react';
window.netlifyIdentity = {
@ -10,7 +9,7 @@ window.netlifyIdentity = {
describe('GitGatewayAuthenticationPage', () => {
const props = {
config: Map({ logo_url: 'logo_url' }),
config: { logo_url: 'logo_url' },
t: jest.fn(key => key),
onLogin: jest.fn(),
inProgress: false,

View File

@ -14,7 +14,7 @@ describe('github API', () => {
it('should fetch url with authorization header', async () => {
const api = new API({
api_root: 'https://site.netlify.com/.netlify/git/github',
apiRoot: 'https://site.netlify.com/.netlify/git/github',
tokenPromise: () => Promise.resolve('token'),
});
@ -40,7 +40,7 @@ describe('github API', () => {
it('should throw error on not ok response with message property', async () => {
const api = new API({
api_root: 'https://site.netlify.com/.netlify/git/github',
apiRoot: 'https://site.netlify.com/.netlify/git/github',
tokenPromise: () => Promise.resolve('token'),
});
@ -63,7 +63,7 @@ describe('github API', () => {
it('should throw error on not ok response with msg property', async () => {
const api = new API({
api_root: 'https://site.netlify.com/.netlify/git/github',
apiRoot: 'https://site.netlify.com/.netlify/git/github',
tokenPromise: () => Promise.resolve('token'),
});

View File

@ -2,7 +2,25 @@ import GoTrue from 'gotrue-js';
import jwtDecode from 'jwt-decode';
import { fromPairs, get, pick, intersection, unzip } from 'lodash';
import ini from 'ini';
import { APIError, getBlobSHA, unsentRequest, basename } from 'netlify-cms-lib-util';
import {
APIError,
getBlobSHA,
unsentRequest,
basename,
ApiRequest,
AssetProxy,
PersistOptions,
Entry,
Cursor,
Implementation,
DisplayURL,
User,
Credentials,
entriesByFiles,
Config,
ImplementationFile,
UnpublishedEntryMediaFile,
} from 'netlify-cms-lib-util';
import { GitHubBackend } from 'netlify-cms-backend-github';
import { GitLabBackend } from 'netlify-cms-backend-gitlab';
import { BitbucketBackend, API as BitBucketAPI } from 'netlify-cms-backend-bitbucket';
@ -14,9 +32,17 @@ import {
createPointerFile,
getLargeMediaPatternsFromGitAttributesFile,
getClient,
Client,
PointerFile,
} from './netlify-lfs-client';
const localHosts = {
declare global {
interface Window {
netlifyIdentity?: { gotrue: GoTrue; logout: () => void };
}
}
const localHosts: Record<string, boolean> = {
localhost: true,
'127.0.0.1': true,
'0.0.0.0': true,
@ -27,9 +53,9 @@ const defaults = {
largeMedia: '/.netlify/large-media',
};
function getEndpoint(endpoint, netlifySiteURL) {
function getEndpoint(endpoint: string, netlifySiteURL: string | null) {
if (
localHosts[document.location.host.split(':').shift()] &&
localHosts[document.location.host.split(':').shift() as string] &&
netlifySiteURL &&
endpoint.match(/^\/\.netlify\//)
) {
@ -46,28 +72,57 @@ function getEndpoint(endpoint, netlifySiteURL) {
return endpoint;
}
export default class GitGateway {
constructor(config, options = {}) {
interface NetlifyUser extends Credentials {
jwt: () => Promise<string>;
email: string;
user_metadata: { full_name: string; avatar_url: string };
}
interface GetMediaDisplayURLArgs {
path: string;
original: { id: string; path: string } | string;
largeMedia: PointerFile;
}
export default class GitGateway implements Implementation {
config: Config;
api?: GitHubAPI | GitLabAPI | BitBucketAPI;
branch: string;
squashMerges: boolean;
mediaFolder: string;
transformImages: boolean;
gatewayUrl: string;
netlifyLargeMediaURL: string;
backendType: string | null;
authClient: GoTrue;
backend: GitHubBackend | GitLabBackend | BitbucketBackend | null;
acceptRoles?: string[];
tokenPromise?: () => Promise<string>;
_largeMediaClientPromise?: Promise<Client>;
options: {
proxied: boolean;
API: GitHubAPI | GitLabAPI | BitBucketAPI | null;
initialWorkflowStatus: string;
};
constructor(config: Config, options = {}) {
this.options = {
proxied: true,
API: null,
initialWorkflowStatus: '',
...options,
};
this.config = config;
this.branch = config.getIn(['backend', 'branch'], 'master').trim();
this.squash_merges = config.getIn(['backend', 'squash_merges']);
this.branch = config.backend.branch?.trim() || 'master';
this.squashMerges = config.backend.squash_merges || false;
this.mediaFolder = config.media_folder;
this.transformImages = config.backend.use_large_media_transforms_in_media_library || true;
const netlifySiteURL = localStorage.getItem('netlifySiteURL');
const APIUrl = getEndpoint(
config.getIn(['backend', 'identity_url'], defaults.identity),
netlifySiteURL,
);
this.gatewayUrl = getEndpoint(
config.getIn(['backend', 'gateway_url'], defaults.gateway),
netlifySiteURL,
);
const APIUrl = getEndpoint(config.backend.identity_url || defaults.identity, netlifySiteURL);
this.gatewayUrl = getEndpoint(config.backend.gateway_url || defaults.gateway, netlifySiteURL);
this.netlifyLargeMediaURL = getEndpoint(
config.getIn(['backend', 'large_media_url'], defaults.largeMedia),
config.backend.large_media_url || defaults.largeMedia,
netlifySiteURL,
);
const backendTypeRegex = /\/(github|gitlab|bitbucket)\/?$/;
@ -87,22 +142,27 @@ export default class GitGateway {
this.backend = null;
}
requestFunction = req =>
this.tokenPromise()
.then(token => unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req))
requestFunction = (req: ApiRequest) =>
this.tokenPromise!()
.then(
token => unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req) as ApiRequest,
)
.then(unsentRequest.performRequest);
authenticate(user) {
authenticate(credentials: Credentials) {
const user = credentials as NetlifyUser;
this.tokenPromise = user.jwt.bind(user);
return this.tokenPromise().then(async token => {
return this.tokenPromise!().then(async token => {
if (!this.backendType) {
const { github_enabled, gitlab_enabled, bitbucket_enabled, roles } = await fetch(
`${this.gatewayUrl}/settings`,
{
headers: { Authorization: `Bearer ${token}` },
},
).then(async res => {
const contentType = res.headers.get('Content-Type');
const {
github_enabled: githubEnabled,
gitlab_enabled: gitlabEnabled,
bitbucket_enabled: bitbucketEnabled,
roles,
} = await fetch(`${this.gatewayUrl}/settings`, {
headers: { Authorization: `Bearer ${token}` },
}).then(async res => {
const contentType = res.headers.get('Content-Type') || '';
if (!contentType.includes('application/json') && !contentType.includes('text/json')) {
throw new APIError(
`Your Git Gateway backend is not returning valid settings. Please make sure it is enabled.`,
@ -124,11 +184,11 @@ export default class GitGateway {
return body;
});
this.acceptRoles = roles;
if (github_enabled) {
if (githubEnabled) {
this.backendType = 'github';
} else if (gitlab_enabled) {
} else if (gitlabEnabled) {
this.backendType = 'gitlab';
} else if (bitbucket_enabled) {
} else if (bitbucketEnabled) {
this.backendType = 'bitbucket';
}
}
@ -142,17 +202,18 @@ export default class GitGateway {
}
const userData = {
name: user.user_metadata.full_name || user.email.split('@').shift(),
name: user.user_metadata.full_name || user.email.split('@').shift()!,
email: user.email,
// eslint-disable-next-line @typescript-eslint/camelcase
avatar_url: user.user_metadata.avatar_url,
metadata: user.user_metadata,
};
const apiConfig = {
api_root: `${this.gatewayUrl}/${this.backendType}`,
apiRoot: `${this.gatewayUrl}/${this.backendType}`,
branch: this.branch,
tokenPromise: this.tokenPromise,
tokenPromise: this.tokenPromise!,
commitAuthor: pick(userData, ['name', 'email']),
squash_merges: this.squash_merges,
squashMerges: this.squashMerges,
initialWorkflowStatus: this.options.initialWorkflowStatus,
};
@ -171,20 +232,21 @@ export default class GitGateway {
this.backend = new BitbucketBackend(this.config, { ...this.options, API: this.api });
}
if (!(await this.api.hasWriteAccess())) {
if (!(await this.api!.hasWriteAccess())) {
throw new Error("You don't have sufficient permissions to access Netlify CMS");
}
return { name: userData.name, login: userData.email };
return { name: userData.name, login: userData.email } as User;
});
}
restoreUser() {
const user = this.authClient && this.authClient.currentUser();
if (!user) return Promise.reject();
return this.authenticate(user);
return this.authenticate(user as Credentials);
}
authComponent() {
return AuthenticationPage;
}
logout() {
if (window.netlifyIdentity) {
return window.netlifyIdentity.logout();
@ -193,44 +255,43 @@ export default class GitGateway {
return user && user.logout();
}
getToken() {
return this.tokenPromise();
return this.tokenPromise!();
}
entriesByFolder(collection, extension) {
return this.backend.entriesByFolder(collection, extension);
entriesByFolder(folder: string, extension: string, depth: number) {
return this.backend!.entriesByFolder(folder, extension, depth);
}
entriesByFiles(collection) {
return this.backend.entriesByFiles(collection);
entriesByFiles(files: ImplementationFile[]) {
return this.backend!.entriesByFiles(files);
}
fetchFiles(files) {
return this.backend.fetchFiles(files);
}
getEntry(collection, slug, path) {
return this.backend.getEntry(collection, slug, path);
getEntry(path: string) {
return this.backend!.getEntry(path);
}
async loadEntryMediaFiles(files) {
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
const client = await this.getLargeMediaClient();
const backend = this.backend as GitLabBackend | GitHubBackend;
if (!client.enabled) {
return this.backend.loadEntryMediaFiles(files);
return backend!.loadEntryMediaFiles(branch, files);
}
const mediaFiles = await Promise.all(
files.map(async file => {
if (client.matchPath(file.path)) {
const { sha: id, path } = file;
const { id, path } = file;
const largeMediaDisplayURLs = await this.getLargeMediaDisplayURLs([{ ...file, id }]);
const url = await client.getDownloadURL(largeMediaDisplayURLs[id]);
return {
...file,
id,
name: basename(path),
path,
url,
displayURL: url,
file: new File([], name),
size: 0,
};
} else {
return this.backend.loadMediaFile(file);
return backend!.loadMediaFile(branch, file);
}
}),
);
@ -238,8 +299,8 @@ export default class GitGateway {
return mediaFiles;
}
getMedia(mediaFolder = this.config.get('media_folder')) {
return Promise.all([this.backend.getMedia(mediaFolder), this.getLargeMediaClient()]).then(
getMedia(mediaFolder = this.mediaFolder) {
return Promise.all([this.backend!.getMedia(mediaFolder), this.getLargeMediaClient()]).then(
async ([mediaFiles, largeMediaClient]) => {
if (!largeMediaClient.enabled) {
return mediaFiles.map(({ displayURL, ...rest }) => ({
@ -277,23 +338,21 @@ export default class GitGateway {
return this._largeMediaClientPromise;
}
_getLargeMediaClient() {
const netlifyLargeMediaEnabledPromise = this.api
.readFile('.lfsconfig')
.then(ini.decode)
const netlifyLargeMediaEnabledPromise = this.api!.readFile('.lfsconfig')
.then(config => ini.decode<{ lfs: { url: string } }>(config as string))
.then(({ lfs: { url } }) => new URL(url))
.then(lfsURL => ({ enabled: lfsURL.hostname.endsWith('netlify.com') }))
.catch(err => ({ enabled: false, err }));
.catch((err: Error) => ({ enabled: false, err }));
const lfsPatternsPromise = this.api
.readFile('.gitattributes')
.then(getLargeMediaPatternsFromGitAttributesFile)
.then(patterns => ({ patterns }))
.catch(err => {
const lfsPatternsPromise = this.api!.readFile('.gitattributes')
.then(attributes => getLargeMediaPatternsFromGitAttributesFile(attributes as string))
.then((patterns: string[]) => ({ err: null, patterns }))
.catch((err: Error) => {
if (err.message.includes('404')) {
console.log('This 404 was expected and handled appropriately.');
return [];
return { err: null, patterns: [] as string[] };
} else {
return { err };
return { err, patterns: [] as string[] };
}
});
@ -312,29 +371,29 @@ export default class GitGateway {
rootURL: this.netlifyLargeMediaURL,
makeAuthorizedRequest: this.requestFunction,
patterns,
transformImages: this.config.getIn(
['backend', 'use_large_media_transforms_in_media_library'],
true,
)
? { nf_resize: 'fit', w: 560, h: 320 }
transformImages: this.transformImages
? // eslint-disable-next-line @typescript-eslint/camelcase
{ nf_resize: 'fit', w: 560, h: 320 }
: false,
});
},
);
}
async getLargeMediaDisplayURLs(mediaFiles) {
async getLargeMediaDisplayURLs(mediaFiles: { path: string; id: string | null }[]) {
const client = await this.getLargeMediaClient();
const largeMediaItems = mediaFiles
.filter(({ path }) => client.matchPath(path))
.map(({ id, path }) => ({ path, sha: id }));
return this.backend
.fetchFiles(largeMediaItems)
const filesPromise = entriesByFiles(
mediaFiles,
this.api!.readFile.bind(this.api!),
'Git-Gateway',
);
return filesPromise
.then(items =>
items.map(({ file: { sha }, data }) => {
items.map(({ file: { id }, data }) => {
const parsedPointerFile = parsePointerFile(data);
return [
{
pointerId: sha,
pointerId: id,
resourceId: parsedPointerFile.sha,
},
parsedPointerFile,
@ -343,7 +402,10 @@ export default class GitGateway {
)
.then(unzip)
.then(([idMaps, files]) =>
Promise.all([idMaps, client.getResourceDownloadURLArgs(files).then(fromPairs)]),
Promise.all([
idMaps as { pointerId: string; resourceId: string }[],
client.getResourceDownloadURLArgs(files as PointerFile[]).then(r => fromPairs(r)),
]),
)
.then(([idMaps, resourceMap]) =>
idMaps.map(({ pointerId, resourceId }) => [pointerId, resourceMap[resourceId]]),
@ -351,8 +413,12 @@ export default class GitGateway {
.then(fromPairs);
}
getMediaDisplayURL(displayURL) {
const { path, original, largeMedia: largeMediaDisplayURL } = displayURL;
getMediaDisplayURL(displayURL: DisplayURL) {
const {
path,
original,
largeMedia: largeMediaDisplayURL,
} = (displayURL as unknown) as GetMediaDisplayURLArgs;
return this.getLargeMediaClient().then(client => {
if (client.enabled && client.matchPath(path)) {
return client.getDownloadURL(largeMediaDisplayURL);
@ -360,33 +426,36 @@ export default class GitGateway {
if (typeof original === 'string') {
return original;
}
if (this.backend.getMediaDisplayURL) {
return this.backend.getMediaDisplayURL(original);
if (this.backend!.getMediaDisplayURL) {
return this.backend!.getMediaDisplayURL(original);
}
const err = new Error(
`getMediaDisplayURL is not implemented by the ${this.backendType} backend, but the backend returned a displayURL which was not a string!`,
);
) as Error & {
displayURL: DisplayURL;
};
err.displayURL = displayURL;
return Promise.reject(err);
});
}
async getMediaFile(path) {
async getMediaFile(path: string) {
const client = await this.getLargeMediaClient();
if (client.enabled && client.matchPath(path)) {
const largeMediaDisplayURLs = await this.getLargeMediaDisplayURLs([{ path }]);
const largeMediaDisplayURLs = await this.getLargeMediaDisplayURLs([{ path, id: null }]);
const url = await client.getDownloadURL(Object.values(largeMediaDisplayURLs)[0]);
return {
id: url,
name: basename(path),
path,
url,
displayURL: url,
};
}
return this.backend.getMediaFile(path);
return this.backend!.getMediaFile(path);
}
async getPointerFileForMediaFileObj(fileObj) {
async getPointerFileForMediaFileObj(fileObj: File) {
const client = await this.getLargeMediaClient();
const { name, size } = fileObj;
const sha = await getBlobSHA(fileObj);
@ -403,10 +472,10 @@ export default class GitGateway {
};
}
async persistEntry(entry, mediaFiles, options) {
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
const client = await this.getLargeMediaClient();
if (!client.enabled) {
return this.backend.persistEntry(entry, mediaFiles, options);
return this.backend!.persistEntry(entry, mediaFiles, options);
}
const largeMediaFilteredMediaFiles = await Promise.all(
@ -417,7 +486,7 @@ export default class GitGateway {
return mediaFile;
}
const pointerFileDetails = await this.getPointerFileForMediaFileObj(fileObj);
const pointerFileDetails = await this.getPointerFileForMediaFileObj(fileObj as File);
return {
...mediaFile,
fileObj: pointerFileDetails.file,
@ -428,62 +497,55 @@ export default class GitGateway {
}),
);
return this.backend.persistEntry(entry, largeMediaFilteredMediaFiles, options);
return this.backend!.persistEntry(entry, largeMediaFilteredMediaFiles, options);
}
async persistMedia(mediaFile, options) {
const { fileObj, path, value } = mediaFile;
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
const { fileObj, path } = mediaFile;
const displayURL = URL.createObjectURL(fileObj);
const client = await this.getLargeMediaClient();
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
if (!client.enabled || !client.matchPath(fixedPath)) {
return this.backend.persistMedia(mediaFile, options);
return this.backend!.persistMedia(mediaFile, options);
}
const pointerFileDetails = await this.getPointerFileForMediaFileObj(fileObj);
const pointerFileDetails = await this.getPointerFileForMediaFileObj(fileObj as File);
const persistMediaArgument = {
fileObj: pointerFileDetails.file,
size: pointerFileDetails.blob.size,
path,
sha: pointerFileDetails.sha,
raw: pointerFileDetails.raw,
value,
};
return {
...(await this.backend.persistMedia(persistMediaArgument, options)),
...(await this.backend!.persistMedia(persistMediaArgument, options)),
displayURL,
};
}
deleteFile(path, commitMessage, options) {
return this.backend.deleteFile(path, commitMessage, options);
deleteFile(path: string, commitMessage: string) {
return this.backend!.deleteFile(path, commitMessage);
}
getDeployPreview(collection, slug) {
if (this.backend.getDeployPreview) {
return this.backend.getDeployPreview(collection, slug);
}
async getDeployPreview(collection: string, slug: string) {
return this.backend!.getDeployPreview(collection, slug);
}
unpublishedEntries() {
return this.backend.unpublishedEntries();
return this.backend!.unpublishedEntries();
}
unpublishedEntry(collection, slug) {
if (!this.backend.unpublishedEntry) {
return Promise.resolve(false);
}
return this.backend.unpublishedEntry(collection, slug, {
loadEntryMediaFiles: files => this.loadEntryMediaFiles(files),
unpublishedEntry(collection: string, slug: string) {
return this.backend!.unpublishedEntry(collection, slug, {
loadEntryMediaFiles: (branch, files) => this.loadEntryMediaFiles(branch, files),
});
}
updateUnpublishedEntryStatus(collection, slug, newStatus) {
return this.backend.updateUnpublishedEntryStatus(collection, slug, newStatus);
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
return this.backend!.updateUnpublishedEntryStatus(collection, slug, newStatus);
}
deleteUnpublishedEntry(collection, slug) {
return this.backend.deleteUnpublishedEntry(collection, slug);
deleteUnpublishedEntry(collection: string, slug: string) {
return this.backend!.deleteUnpublishedEntry(collection, slug);
}
publishUnpublishedEntry(collection, slug) {
return this.backend.publishUnpublishedEntry(collection, slug);
publishUnpublishedEntry(collection: string, slug: string) {
return this.backend!.publishUnpublishedEntry(collection, slug);
}
traverseCursor(cursor, action) {
return this.backend.traverseCursor(cursor, action);
traverseCursor(cursor: Cursor, action: string) {
return (this.backend as GitLabBackend | BitbucketBackend).traverseCursor!(cursor, action);
}
}

View File

@ -1,183 +0,0 @@
import { filter, flow, fromPairs, map } from 'lodash/fp';
import minimatch from 'minimatch';
//
// Pointer file parsing
const splitIntoLines = str => str.split('\n');
const splitIntoWords = str => str.split(/\s+/g);
const isNonEmptyString = str => str !== '';
const withoutEmptyLines = flow([map(str => str.trim()), filter(isNonEmptyString)]);
export const parsePointerFile = flow([
splitIntoLines,
withoutEmptyLines,
map(splitIntoWords),
fromPairs,
({ size, oid, ...rest }) => ({
size: parseInt(size),
sha: oid.split(':')[1],
...rest,
}),
]);
export const createPointerFile = ({ size, sha }) => `\
version https://git-lfs.github.com/spec/v1
oid sha256:${sha}
size ${size}
`;
//
// .gitattributes file parsing
const removeGitAttributesCommentsFromLine = line => line.split('#')[0];
const parseGitPatternAttribute = attributeString => {
// There are three kinds of attribute settings:
// - a key=val pair sets an attribute to a specific value
// - a key without a value and a leading hyphen sets an attribute to false
// - a key without a value and no leading hyphen sets an attribute
// to true
if (attributeString.includes('=')) {
return attributeString.split('=');
}
if (attributeString.startsWith('-')) {
return [attributeString.slice(1), false];
}
return [attributeString, true];
};
const parseGitPatternAttributes = flow([map(parseGitPatternAttribute), fromPairs]);
const parseGitAttributesPatternLine = flow([
splitIntoWords,
([pattern, ...attributes]) => [pattern, parseGitPatternAttributes(attributes)],
]);
const parseGitAttributesFileToPatternAttributePairs = flow([
splitIntoLines,
map(removeGitAttributesCommentsFromLine),
withoutEmptyLines,
map(parseGitAttributesPatternLine),
]);
export const getLargeMediaPatternsFromGitAttributesFile = flow([
parseGitAttributesFileToPatternAttributePairs,
filter(
// eslint-disable-next-line no-unused-vars
([pattern, attributes]) =>
attributes.filter === 'lfs' && attributes.diff === 'lfs' && attributes.merge === 'lfs',
),
map(([pattern]) => pattern),
]);
export const matchPath = ({ patterns }, path) =>
patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
//
// API interactions
const defaultContentHeaders = {
Accept: 'application/vnd.git-lfs+json',
['Content-Type']: 'application/vnd.git-lfs+json',
};
const resourceExists = async ({ rootURL, makeAuthorizedRequest }, { sha, size }) => {
const response = await makeAuthorizedRequest({
url: `${rootURL}/verify`,
method: 'POST',
headers: defaultContentHeaders,
body: JSON.stringify({ oid: sha, size }),
});
if (response.ok) {
return true;
}
if (response.status === 404) {
return false;
}
// TODO: what kind of error to throw here? APIError doesn't seem
// to fit
};
const getDownloadURL = ({ rootURL, transformImages: t, makeAuthorizedRequest }, { sha }) =>
makeAuthorizedRequest(
`${rootURL}/origin/${sha}${
t && Object.keys(t).length > 0 ? `?nf_resize=${t.nf_resize}&w=${t.w}&h=${t.h}` : ''
}`,
)
.then(res => (res.ok ? res : Promise.reject(res)))
.then(res => res.blob())
.then(blob => URL.createObjectURL(blob))
.catch(err => console.error(err) || Promise.resolve(''));
const getResourceDownloadURLArgs = (clientConfig, objects) => {
return Promise.resolve(objects.map(({ sha }) => [sha, { sha }]));
};
const getResourceDownloadURLs = (clientConfig, objects) =>
getResourceDownloadURLArgs(clientConfig, objects)
.then(map(downloadURLArg => getDownloadURL(downloadURLArg)))
.then(Promise.all.bind(Promise));
const uploadOperation = objects => ({
operation: 'upload',
transfers: ['basic'],
objects: objects.map(({ sha, ...rest }) => ({ ...rest, oid: sha })),
});
const getResourceUploadURLs = async ({ rootURL, makeAuthorizedRequest }, objects) => {
const response = await makeAuthorizedRequest({
url: `${rootURL}/objects/batch`,
method: 'POST',
headers: defaultContentHeaders,
body: JSON.stringify(uploadOperation(objects)),
});
return (await response.json()).objects.map(object => {
if (object.error) {
throw new Error(object.error.message);
}
return object.actions.upload.href;
});
};
const uploadBlob = (clientConfig, uploadURL, blob) =>
fetch(uploadURL, {
method: 'PUT',
body: blob,
});
const uploadResource = async (clientConfig, { sha, size }, resource) => {
const existingFile = await resourceExists(clientConfig, { sha, size });
if (existingFile) {
return sha;
}
const [uploadURL] = await getResourceUploadURLs(clientConfig, [{ sha, size }]);
await uploadBlob(clientConfig, uploadURL, resource);
return sha;
};
//
// Create Large Media client
const configureFn = (config, fn) => (...args) => fn(config, ...args);
const clientFns = {
resourceExists,
getResourceUploadURLs,
getResourceDownloadURLs,
getResourceDownloadURLArgs,
getDownloadURL,
uploadResource,
matchPath,
};
export const getClient = clientConfig => {
return flow([
Object.keys,
map(key => [key, configureFn(clientConfig, clientFns[key])]),
fromPairs,
configuredFns => ({
...configuredFns,
patterns: clientConfig.patterns,
enabled: clientConfig.enabled,
}),
])(clientFns);
};

View File

@ -0,0 +1,234 @@
import { filter, flow, fromPairs, map } from 'lodash/fp';
import minimatch from 'minimatch';
import { ApiRequest } from 'netlify-cms-lib-util';
//
// Pointer file parsing
const splitIntoLines = (str: string) => str.split('\n');
const splitIntoWords = (str: string) => str.split(/\s+/g);
const isNonEmptyString = (str: string) => str !== '';
const withoutEmptyLines = flow([map((str: string) => str.trim()), filter(isNonEmptyString)]);
export const parsePointerFile: (data: string) => PointerFile = flow([
splitIntoLines,
withoutEmptyLines,
map(splitIntoWords),
fromPairs,
({ size, oid, ...rest }) => ({
size: parseInt(size),
sha: oid.split(':')[1],
...rest,
}),
]);
export type PointerFile = {
size: number;
sha: string;
};
type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;
type ImageTransformations = { nf_resize: string; w: number; h: number };
type ClientConfig = {
rootURL: string;
makeAuthorizedRequest: MakeAuthorizedRequest;
patterns: string[];
enabled: boolean;
transformImages: ImageTransformations | boolean;
};
export const createPointerFile = ({ size, sha }: PointerFile) => `\
version https://git-lfs.github.com/spec/v1
oid sha256:${sha}
size ${size}
`;
//
// .gitattributes file parsing
const removeGitAttributesCommentsFromLine = (line: string) => line.split('#')[0];
const parseGitPatternAttribute = (attributeString: string) => {
// There are three kinds of attribute settings:
// - a key=val pair sets an attribute to a specific value
// - a key without a value and a leading hyphen sets an attribute to false
// - a key without a value and no leading hyphen sets an attribute
// to true
if (attributeString.includes('=')) {
return attributeString.split('=');
}
if (attributeString.startsWith('-')) {
return [attributeString.slice(1), false];
}
return [attributeString, true];
};
const parseGitPatternAttributes = flow([map(parseGitPatternAttribute), fromPairs]);
const parseGitAttributesPatternLine = flow([
splitIntoWords,
([pattern, ...attributes]) => [pattern, parseGitPatternAttributes(attributes)],
]);
const parseGitAttributesFileToPatternAttributePairs = flow([
splitIntoLines,
map(removeGitAttributesCommentsFromLine),
withoutEmptyLines,
map(parseGitAttributesPatternLine),
]);
export const getLargeMediaPatternsFromGitAttributesFile = flow([
parseGitAttributesFileToPatternAttributePairs,
filter(
// eslint-disable-next-line @typescript-eslint/no-unused-vars
([_pattern, attributes]) =>
attributes.filter === 'lfs' && attributes.diff === 'lfs' && attributes.merge === 'lfs',
),
map(([pattern]) => pattern),
]);
export const matchPath = ({ patterns }: ClientConfig, path: string) =>
patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
//
// API interactions
const defaultContentHeaders = {
Accept: 'application/vnd.git-lfs+json',
['Content-Type']: 'application/vnd.git-lfs+json',
};
const resourceExists = async (
{ rootURL, makeAuthorizedRequest }: ClientConfig,
{ sha, size }: PointerFile,
) => {
const response = await makeAuthorizedRequest({
url: `${rootURL}/verify`,
method: 'POST',
headers: defaultContentHeaders,
body: JSON.stringify({ oid: sha, size }),
});
if (response.ok) {
return true;
}
if (response.status === 404) {
return false;
}
// TODO: what kind of error to throw here? APIError doesn't seem
// to fit
};
const getTransofrmationsParams = (t: ImageTransformations) =>
`?nf_resize=${t.nf_resize}&w=${t.w}&h=${t.h}`;
const getDownloadURL = (
{ rootURL, transformImages: t, makeAuthorizedRequest }: ClientConfig,
{ sha }: PointerFile,
) =>
makeAuthorizedRequest(
`${rootURL}/origin/${sha}${
t && Object.keys(t).length > 0 ? getTransofrmationsParams(t as ImageTransformations) : ''
}`,
)
.then(res => (res.ok ? res : Promise.reject(res)))
.then(res => res.blob())
.then(blob => URL.createObjectURL(blob))
.catch((err: Error) => {
console.error(err);
return Promise.resolve('');
});
const getResourceDownloadURLArgs = (_clientConfig: ClientConfig, objects: PointerFile[]) => {
const result = objects.map(({ sha }) => [sha, { sha }]) as [string, { sha: string }][];
return Promise.resolve(result);
};
const uploadOperation = (objects: PointerFile[]) => ({
operation: 'upload',
transfers: ['basic'],
objects: objects.map(({ sha, ...rest }) => ({ ...rest, oid: sha })),
});
const getResourceUploadURLs = async (
{
rootURL,
makeAuthorizedRequest,
}: { rootURL: string; makeAuthorizedRequest: MakeAuthorizedRequest },
objects: PointerFile[],
) => {
const response = await makeAuthorizedRequest({
url: `${rootURL}/objects/batch`,
method: 'POST',
headers: defaultContentHeaders,
body: JSON.stringify(uploadOperation(objects)),
});
return (await response.json()).objects.map(
(object: { error?: { message: string }; actions: { upload: { href: string } } }) => {
if (object.error) {
throw new Error(object.error.message);
}
return object.actions.upload.href;
},
);
};
const uploadBlob = (uploadURL: string, blob: Blob) =>
fetch(uploadURL, {
method: 'PUT',
body: blob,
});
const uploadResource = async (
clientConfig: ClientConfig,
{ sha, size }: PointerFile,
resource: Blob,
) => {
const existingFile = await resourceExists(clientConfig, { sha, size });
if (existingFile) {
return sha;
}
const [uploadURL] = await getResourceUploadURLs(clientConfig, [{ sha, size }]);
await uploadBlob(uploadURL, resource);
return sha;
};
//
// Create Large Media client
const configureFn = (config: ClientConfig, fn: Function) => (...args: unknown[]) =>
fn(config, ...args);
const clientFns: Record<string, Function> = {
resourceExists,
getResourceUploadURLs,
getResourceDownloadURLArgs,
getDownloadURL,
uploadResource,
matchPath,
};
export type Client = {
resourceExists: (pointer: PointerFile) => Promise<boolean | undefined>;
getResourceUploadURLs: (objects: PointerFile[]) => Promise<string>;
getResourceDownloadURLArgs: (objects: PointerFile[]) => Promise<[string, { sha: string }][]>;
getDownloadURL: (pointer: PointerFile) => Promise<string>;
uploadResource: (pointer: PointerFile, blob: Blob) => Promise<string>;
matchPath: (path: string) => boolean;
patterns: string[];
enabled: boolean;
};
export const getClient = (clientConfig: ClientConfig) => {
return flow([
Object.keys,
map((key: string) => [key, configureFn(clientConfig, clientFns[key])]),
fromPairs,
configuredFns => ({
...configuredFns,
patterns: clientConfig.patterns,
enabled: clientConfig.enabled,
}),
])(clientFns);
};

View File

@ -0,0 +1,4 @@
declare module 'ini' {
const ini: { decode: <T>(ini: string) => T };
export default ini;
}

View File

@ -9,14 +9,21 @@ import {
flowAsync,
localForage,
onlySuccessfulPromises,
resolvePromiseProperties,
ResponseParser,
basename,
AssetProxy,
Entry as LibEntry,
PersistOptions,
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
PreviewState,
FetchError,
} from 'netlify-cms-lib-util';
import {
UsersGetAuthenticatedResponse as GitHubUser,
ReposGetResponse as GitHubRepo,
ReposGetContentsResponseItem as GitHubFile,
ReposGetBranchResponse as GitHubBranch,
GitGetBlobResponse as GitHubBlob,
GitCreateTreeResponse as GitHubTree,
@ -28,35 +35,33 @@ import {
ReposCompareCommitsResponseBaseCommit as GitHubCompareBaseCommit,
GitCreateCommitResponseAuthor as GitHubAuthor,
GitCreateCommitResponseCommitter as GitHubCommiter,
ReposListStatusesForRefResponseItem,
} from '@octokit/rest';
const CMS_BRANCH_PREFIX = 'cms';
const CURRENT_METADATA_VERSION = '1';
interface FetchError extends Error {
status: number;
}
export const API_NAME = 'GitHub';
interface Config {
api_root?: string;
export interface Config {
apiRoot?: string;
token?: string;
branch?: string;
useOpenAuthoring: boolean;
useOpenAuthoring?: boolean;
repo?: string;
originRepo?: string;
squash_merges?: string;
squashMerges: boolean;
initialWorkflowStatus: string;
}
interface File {
interface TreeFile {
type: 'blob' | 'tree';
sha: string;
path: string;
raw?: string;
}
interface Entry extends File {
slug: string;
export interface Entry extends LibEntry {
sha?: string;
}
type Override<T, U> = Pick<T, Exclude<keyof T, keyof U>> & U;
@ -69,18 +74,20 @@ type GitHubCompareFile = ReposCompareCommitsResponseFilesItem & { previous_filen
type GitHubCompareFiles = GitHubCompareFile[];
interface CommitFields {
parents: { sha: string }[];
sha: string;
message: string;
author: string;
committer: string;
tree: { sha: string };
enum GitHubCommitStatusState {
Error = 'error',
Failure = 'failure',
Pending = 'pending',
Success = 'success',
}
interface PR {
type GitHubCommitStatus = ReposListStatusesForRefResponseItem & {
state: GitHubCommitStatusState;
};
export interface PR {
number: number;
head: string;
head: string | { sha: string };
}
interface MetaDataObjects {
@ -88,7 +95,7 @@ interface MetaDataObjects {
files: MediaFile[];
}
interface Metadata {
export interface Metadata {
type: string;
objects: MetaDataObjects;
branch: string;
@ -103,23 +110,16 @@ interface Metadata {
timeStamp: string;
}
interface Branch {
export interface Branch {
ref: string;
}
interface BlobArgs {
export interface BlobArgs {
sha: string;
repoURL: string;
parseText: boolean;
}
interface ContentArgs {
path: string;
branch: string;
repoURL: string;
parseText: boolean;
}
type Param = string | number | undefined;
type Options = RequestInit & { params?: Record<string, Param | Record<string, Param>> };
@ -133,30 +133,21 @@ const replace404WithEmptyArray = (err: FetchError) => {
}
};
type PersistOptions = {
useWorkflow: boolean;
commitMessage: string;
collectionName: string;
unpublished: boolean;
parsedData?: { title: string; description: string };
status: string;
};
type MediaFile = {
sha: string;
path: string;
};
export default class API {
api_root: string;
apiRoot: string;
token: string;
branch: string;
useOpenAuthoring: boolean;
useOpenAuthoring?: boolean;
repo: string;
originRepo: string;
repoURL: string;
originRepoURL: string;
merge_method: string;
mergeMethod: string;
initialWorkflowStatus: string;
_userPromise?: Promise<GitHubUser>;
@ -165,8 +156,7 @@ export default class API {
commitAuthor?: {};
constructor(config: Config) {
// eslint-disable-next-line @typescript-eslint/camelcase
this.api_root = config.api_root || 'https://api.github.com';
this.apiRoot = config.apiRoot || 'https://api.github.com';
this.token = config.token || '';
this.branch = config.branch || 'master';
this.useOpenAuthoring = config.useOpenAuthoring;
@ -175,15 +165,13 @@ export default class API {
this.repoURL = `/repos/${this.repo}`;
// when not in 'useOpenAuthoring' mode originRepoURL === repoURL
this.originRepoURL = `/repos/${this.originRepo}`;
// eslint-disable-next-line @typescript-eslint/camelcase
this.merge_method = config.squash_merges ? 'squash' : 'merge';
this.mergeMethod = config.squashMerges ? 'squash' : 'merge';
this.initialWorkflowStatus = config.initialWorkflowStatus;
}
static DEFAULT_COMMIT_MESSAGE = 'Automatically generated by Netlify CMS';
static DEFAULT_PR_BODY = 'Automatically generated by Netlify CMS';
user() {
user(): Promise<{ name: string; login: string }> {
if (!this._userPromise) {
this._userPromise = this.request('/user') as Promise<GitHubUser>;
}
@ -199,6 +187,10 @@ export default class API {
});
}
reset() {
// no op
}
requestHeaders(headers = {}) {
const baseHeader: Record<string, string> = {
'Content-Type': 'application/json; charset=utf-8',
@ -207,10 +199,10 @@ export default class API {
if (this.token) {
baseHeader.Authorization = `token ${this.token}`;
return baseHeader;
return Promise.resolve(baseHeader);
}
return baseHeader;
return Promise.resolve(baseHeader);
}
parseJsonResponse(response: Response) {
@ -234,7 +226,7 @@ export default class API {
if (params.length) {
path += `?${params.join('&')}`;
}
return this.api_root + path;
return this.apiRoot + path;
}
parseResponse(response: Response) {
@ -252,16 +244,15 @@ export default class API {
}
handleRequestError(error: FetchError, responseStatus: number) {
throw new APIError(error.message, responseStatus, 'GitHub');
throw new APIError(error.message, responseStatus, API_NAME);
}
async request(
path: string,
options: Options = {},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
parser: ResponseParser<any> = response => this.parseResponse(response),
parser = (response: Response) => this.parseResponse(response),
) {
// overriding classes can return a promise from requestHeaders
const headers = await this.requestHeaders(options.headers || {});
const url = this.urlFor(path, options);
let responseStatus: number;
@ -274,7 +265,6 @@ export default class API {
}
async requestAllPages<T>(url: string, options: Options = {}) {
// overriding classes can return a promise from requestHeaders
const headers = await this.requestHeaders(options.headers || {});
const processedURL = this.urlFor(url, options);
const allResponses = await getAllResponses(processedURL, { ...options, headers });
@ -286,7 +276,7 @@ export default class API {
generateContentKey(collectionName: string, slug: string) {
if (!this.useOpenAuthoring) {
return `${collectionName}/${slug}`;
return generateContentKey(collectionName, slug);
}
return `${this.repo}/${collectionName}/${slug}`;
@ -353,7 +343,7 @@ export default class API {
const file = { path: `${key}.json`, raw: JSON.stringify(data) };
await this.uploadBlob(file);
const changeTree = await this.updateTree(branchData.sha, [file as File]);
const changeTree = await this.updateTree(branchData.sha, [file as TreeFile]);
const { sha } = await this.commit(`Updating “${key}” metadata`, changeTree);
await this.patchRef('meta', '_netlify_cms', sha);
localForage.setItem(`gh.meta.${key}`, {
@ -433,16 +423,9 @@ export default class API {
});
}
retrieveContent({ path, branch, repoURL, parseText }: ContentArgs) {
return this.request(`${repoURL}/contents/${path}`, {
params: { ref: branch },
cache: 'no-store',
}).then((file: GitHubFile) => this.getBlob({ sha: file.sha, repoURL, parseText }));
}
readFile(
async readFile(
path: string,
sha: string | null,
sha?: string | null,
{
branch = this.branch,
repoURL = this.repoURL,
@ -453,11 +436,12 @@ export default class API {
parseText?: boolean;
} = {},
) {
if (sha) {
return this.getBlob({ sha, repoURL, parseText });
} else {
return this.retrieveContent({ path, branch, repoURL, parseText });
if (!sha) {
sha = await this.getFileSha(path, { repoURL, branch });
}
const fetchContent = () => this.fetchBlobContent({ sha: sha as string, repoURL, parseText });
const content = await readFile(sha, fetchContent, localForage, parseText);
return content;
}
async fetchBlobContent({ sha, repoURL, parseText }: BlobArgs) {
@ -479,38 +463,10 @@ export default class API {
}
}
async getMediaAsBlob(sha: string | null, path: string) {
let blob: Blob;
if (path.match(/.svg$/)) {
const text = (await this.readFile(path, sha, { parseText: true })) as string;
blob = new Blob([text], { type: 'image/svg+xml' });
} else {
blob = (await this.readFile(path, sha, { parseText: false })) as Blob;
}
return blob;
}
async getMediaDisplayURL(sha: string, path: string) {
const blob = await this.getMediaAsBlob(sha, path);
return URL.createObjectURL(blob);
}
getBlob({ sha, repoURL = this.repoURL, parseText = true }: BlobArgs) {
const key = parseText ? `gh.${sha}` : `gh.${sha}.blob`;
return localForage.getItem<string | Blob>(key).then(cached => {
if (cached) {
return cached;
}
return this.fetchBlobContent({ sha, repoURL, parseText }).then(result => {
localForage.setItem(key, result);
return result;
});
});
}
async listFiles(path: string, { repoURL = this.repoURL, branch = this.branch, depth = 1 } = {}) {
async listFiles(
path: string,
{ repoURL = this.repoURL, branch = this.branch, depth = 1 } = {},
): Promise<{ type: string; id: string; name: string; path: string; size: number }[]> {
const folder = trim(path, '/');
return this.request(`${repoURL}/git/trees/${branch}:${folder}`, {
// GitHub API supports recursive=1 for getting the entire recursive tree
@ -522,43 +478,50 @@ export default class API {
// filter only files and up to the required depth
.filter(file => file.type === 'blob' && file.path.split('/').length <= depth)
.map(file => ({
...file,
type: file.type,
id: file.sha,
name: basename(file.path),
path: `${folder}/${file.path}`,
size: file.size,
})),
)
.catch(replace404WithEmptyArray);
}
readUnpublishedBranchFile(contentKey: string) {
const metaDataPromise = this.retrieveMetadata(contentKey).then(data =>
data.objects.entry.path ? data : Promise.reject(null),
);
const repoURL = this.useOpenAuthoring
? `/repos/${contentKey
.split('/')
.slice(0, 2)
.join('/')}`
: this.repoURL;
return resolvePromiseProperties({
metaData: metaDataPromise,
fileData: metaDataPromise.then(data =>
this.readFile(data.objects.entry.path, null, {
branch: data.branch,
async readUnpublishedBranchFile(contentKey: string) {
try {
const metaData = await this.retrieveMetadata(contentKey).then(data =>
data.objects.entry.path ? data : Promise.reject(null),
);
const repoURL = this.useOpenAuthoring
? `/repos/${contentKey
.split('/')
.slice(0, 2)
.join('/')}`
: this.repoURL;
const [fileData, isModification] = await Promise.all([
this.readFile(metaData.objects.entry.path, null, {
branch: metaData.branch,
repoURL,
}),
),
isModification: metaDataPromise.then(data =>
this.isUnpublishedEntryModification(data.objects.entry.path, this.branch),
),
}).catch(() => {
}) as Promise<string>,
this.isUnpublishedEntryModification(metaData.objects.entry.path),
]);
return {
metaData,
fileData,
isModification,
slug: this.slugFromContentKey(contentKey, metaData.collection),
};
} catch (e) {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
});
}
}
isUnpublishedEntryModification(path: string, branch: string) {
isUnpublishedEntryModification(path: string) {
return this.readFile(path, null, {
branch,
branch: this.branch,
repoURL: this.originRepoURL,
})
.then(() => true)
@ -635,7 +598,7 @@ export default class API {
const newBranchName = `cms/${newContentKey}`;
// create new branch and pull request in new format
const newBranch = await this.createBranch(newBranchName, (metaData.pr as PR).head);
const newBranch = await this.createBranch(newBranchName, (metaData.pr as PR).head as string);
const pr = await this.createPR(metaData.commitMessage, newBranchName);
// store new metadata
@ -667,7 +630,7 @@ export default class API {
return branch;
}
async listUnpublishedBranches() {
async listUnpublishedBranches(): Promise<Branch[]> {
console.log(
'%c Checking for Unpublished entries',
'line-height: 30px;text-align: center;font-weight: bold',
@ -720,8 +683,16 @@ export default class API {
*/
async getStatuses(sha: string) {
try {
const resp = await this.request(`${this.originRepoURL}/commits/${sha}/status`);
return resp.statuses;
const resp: { statuses: GitHubCommitStatus[] } = await this.request(
`${this.originRepoURL}/commits/${sha}/status`,
);
return resp.statuses.map(s => ({
context: s.context,
// eslint-disable-next-line @typescript-eslint/camelcase
target_url: s.target_url,
state:
s.state === GitHubCommitStatusState.Success ? PreviewState.Success : PreviewState.Other,
}));
} catch (err) {
if (err && err.message && err.message === 'Ref not found') {
return [];
@ -730,26 +701,35 @@ export default class API {
}
}
async persistFiles(entry: Entry, mediaFiles: File[], options: PersistOptions) {
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
const files = entry ? mediaFiles.concat(entry) : mediaFiles;
const uploadPromises = files.map(file => this.uploadBlob(file));
await Promise.all(uploadPromises);
if (!options.useWorkflow) {
return this.getDefaultBranch()
.then(branchData => this.updateTree(branchData.commit.sha, files))
.then(branchData =>
this.updateTree(branchData.commit.sha, files as { sha: string; path: string }[]),
)
.then(changeTree => this.commit(options.commitMessage, changeTree))
.then(response => this.patchBranch(this.branch, response.sha));
} else {
const mediaFilesList = mediaFiles.map(({ sha, path }) => ({
path: trimStart(path, '/'),
sha,
}));
return this.editorialWorkflowGit(files, entry, mediaFilesList, options);
const mediaFilesList = (mediaFiles as { sha: string; path: string }[]).map(
({ sha, path }) => ({
path: trimStart(path, '/'),
sha,
}),
);
return this.editorialWorkflowGit(
files as TreeFile[],
entry as Entry,
mediaFilesList,
options,
);
}
}
getFileSha(path: string, branch: string) {
getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {
/**
* We need to request the tree first to get the SHA. We use extended SHA-1
* syntax (<rev>:<path>) to get a blob from a tree without having to recurse
@ -760,22 +740,25 @@ export default class API {
const filename = last(pathArray);
const directory = initial(pathArray).join('/');
const fileDataPath = encodeURIComponent(directory);
const fileDataURL = `${this.repoURL}/git/trees/${branch}:${fileDataPath}`;
const fileDataURL = `${repoURL}/git/trees/${branch}:${fileDataPath}`;
return this.request(fileDataURL, { cache: 'no-store' }).then(resp => {
const { sha } = resp.tree.find((file: File) => file.path === filename);
return sha;
return this.request(fileDataURL, { cache: 'no-store' }).then((resp: GitHubTree) => {
const file = resp.tree.find(file => file.path === filename);
if (file) {
return file.sha;
}
throw new APIError('Not Found', 404, API_NAME);
});
}
deleteFile(path: string, message: string, options: { branch?: string } = {}) {
deleteFile(path: string, message: string) {
if (this.useOpenAuthoring) {
return Promise.reject('Cannot delete published entries as an Open Authoring user!');
}
const branch = options.branch || this.branch;
const branch = this.branch;
return this.getFileSha(path, branch).then(sha => {
return this.getFileSha(path, { branch }).then(sha => {
const params: { sha: string; message: string; branch: string; author?: { date: string } } = {
sha,
message,
@ -799,12 +782,12 @@ export default class API {
}
async editorialWorkflowGit(
files: File[],
files: TreeFile[],
entry: Entry,
mediaFilesList: MediaFile[],
options: PersistOptions,
) {
const contentKey = this.generateContentKey(options.collectionName, entry.slug);
const contentKey = this.generateContentKey(options.collectionName as string, entry.slug);
const branchName = this.generateBranchName(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
@ -837,14 +820,14 @@ export default class API {
user: user.name || user.login,
status: options.status || this.initialWorkflowStatus,
branch: branchName,
collection: options.collectionName,
collection: options.collectionName as string,
commitMessage: options.commitMessage,
title: options.parsedData && options.parsedData.title,
description: options.parsedData && options.parsedData.description,
objects: {
entry: {
path: entry.path,
sha: entry.sha,
sha: entry.sha as string,
},
files: mediaFilesList,
},
@ -871,7 +854,7 @@ export default class API {
const pr = metadata.pr ? { ...metadata.pr, head: commit.sha } : undefined;
const objects = {
entry: { path: entry.path, sha: entry.sha },
entry: { path: entry.path, sha: entry.sha as string },
files: mediaFilesList,
};
@ -1114,7 +1097,7 @@ export default class API {
method: 'POST',
body: JSON.stringify({
title,
body: API.DEFAULT_PR_BODY,
body: DEFAULT_PR_BODY,
head: headReference,
base: this.branch,
}),
@ -1150,10 +1133,10 @@ export default class API {
method: 'PUT',
body: JSON.stringify({
// eslint-disable-next-line @typescript-eslint/camelcase
commit_message: 'Automatically generated. Merged on Netlify CMS.',
commit_message: MERGE_COMMIT_MESSAGE,
sha: headSha,
// eslint-disable-next-line @typescript-eslint/camelcase
merge_method: this.merge_method,
merge_method: this.mergeMethod,
}),
}).catch(error => {
if (error instanceof APIError && error.status === 405) {
@ -1184,7 +1167,7 @@ export default class API {
return Promise.resolve(Base64.encode(str));
}
uploadBlob(item: { raw?: string; sha?: string }) {
uploadBlob(item: { raw?: string; sha?: string; toBase64?: () => Promise<string> }) {
const content = result(item, 'toBase64', partial(this.toBase64, item.raw as string));
return content.then(contentBase64 =>

View File

@ -1,6 +1,5 @@
import React from 'react';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import styled from '@emotion/styled';
import { NetlifyAuthenticator } from 'netlify-cms-lib-auth';
import { AuthenticationPage, Icon } from 'netlify-cms-ui-default';
@ -28,7 +27,7 @@ export default class GitHubAuthenticationPage extends React.Component {
base_url: PropTypes.string,
siteId: PropTypes.string,
authEndpoint: PropTypes.string,
config: ImmutablePropTypes.map,
config: PropTypes.object.isRequired,
clearHash: PropTypes.func,
};
@ -75,11 +74,12 @@ export default class GitHubAuthenticationPage extends React.Component {
};
const auth = new NetlifyAuthenticator(cfg);
const openAuthoring = this.props.config.getIn(['backend', 'open_authoring'], false);
const scope = this.props.config.getIn(
['backend', 'auth_scope'],
openAuthoring ? 'public_repo' : 'repo',
);
const {
open_authoring: openAuthoring = false,
auth_scope: authScope = '',
} = this.props.config.backend;
const scope = authScope || (openAuthoring ? 'public_repo' : 'repo');
auth.authenticate({ provider: 'github', scope }, (err, data) => {
if (err) {
this.setState({ loginError: err.toString() });
@ -137,8 +137,8 @@ export default class GitHubAuthenticationPage extends React.Component {
onLogin={this.handleLogin}
loginDisabled={inProgress || findingFork || requestingFork}
loginErrorMessage={loginError}
logoUrl={config.get('logo_url')}
siteUrl={config.get('site_url')}
logoUrl={config.logo_url}
siteUrl={config.site_url}
{...this.getAuthenticationPageRenderArgs()}
/>
);

View File

@ -1,16 +1,24 @@
import { ApolloClient } from 'apollo-client';
import { ApolloClient, QueryOptions, MutationOptions, OperationVariables } from 'apollo-client';
import {
InMemoryCache,
defaultDataIdFromObject,
IntrospectionFragmentMatcher,
NormalizedCacheObject,
} from 'apollo-cache-inmemory';
import { createHttpLink } from 'apollo-link-http';
import { setContext } from 'apollo-link-context';
import { APIError, EditorialWorkflowError } from 'netlify-cms-lib-util';
import {
APIError,
EditorialWorkflowError,
readFile,
localForage,
DEFAULT_PR_BODY,
} from 'netlify-cms-lib-util';
import introspectionQueryResultData from './fragmentTypes';
import API from './API';
import API, { Config, BlobArgs, PR, API_NAME } from './API';
import * as queries from './queries';
import * as mutations from './mutations';
import { GraphQLError } from 'graphql';
const NO_CACHE = 'no-cache';
const CACHE_FIRST = 'cache-first';
@ -19,16 +27,44 @@ const fragmentMatcher = new IntrospectionFragmentMatcher({
introspectionQueryResultData,
});
interface TreeEntry {
object?: {
entries: TreeEntry[];
};
type: 'blob' | 'tree';
name: string;
sha: string;
blob?: {
size: number;
};
}
interface TreeFile {
path: string;
id: string;
size: number;
type: string;
name: string;
}
type Error = GraphQLError & { type: string };
export default class GraphQLAPI extends API {
constructor(config) {
repoOwner: string;
repoName: string;
originRepoOwner: string;
originRepoName: string;
client: ApolloClient<NormalizedCacheObject>;
constructor(config: Config) {
super(config);
const [repoParts, originRepoParts] = [this.repo.split('/'), this.originRepo.split('/')];
this.repo_owner = repoParts[0];
this.repo_name = repoParts[1];
this.repoOwner = repoParts[0];
this.repoName = repoParts[1];
this.origin_repo_owner = originRepoParts[0];
this.origin_repo_name = originRepoParts[1];
this.originRepoOwner = originRepoParts[0];
this.originRepoName = originRepoParts[1];
this.client = this.getApolloClient();
}
@ -43,7 +79,7 @@ export default class GraphQLAPI extends API {
},
};
});
const httpLink = createHttpLink({ uri: `${this.api_root}/graphql` });
const httpLink = createHttpLink({ uri: `${this.apiRoot}/graphql` });
return new ApolloClient({
link: authLink.concat(httpLink),
cache: new InMemoryCache({ fragmentMatcher }),
@ -64,7 +100,7 @@ export default class GraphQLAPI extends API {
return this.client.resetStore();
}
async getRepository(owner, name) {
async getRepository(owner: string, name: string) {
const { data } = await this.query({
query: queries.repository,
variables: { owner, name },
@ -73,20 +109,20 @@ export default class GraphQLAPI extends API {
return data.repository;
}
query(options = {}) {
query(options: QueryOptions<OperationVariables>) {
return this.client.query(options).catch(error => {
throw new APIError(error.message, 500, 'GitHub');
});
}
mutate(options = {}) {
mutate(options: MutationOptions<OperationVariables>) {
return this.client.mutate(options).catch(error => {
throw new APIError(error.message, 500, 'GitHub');
});
}
async hasWriteAccess() {
const { repo_owner: owner, repo_name: name } = this;
const { repoOwner: owner, repoName: name } = this;
try {
const { data } = await this.query({
query: queries.repoPermission,
@ -110,7 +146,7 @@ export default class GraphQLAPI extends API {
return data.viewer;
}
async retrieveBlobObject(owner, name, expression, options = {}) {
async retrieveBlobObject(owner: string, name: string, expression: string, options = {}) {
const { data } = await this.query({
query: queries.blob,
variables: { owner, name, expression },
@ -118,62 +154,67 @@ export default class GraphQLAPI extends API {
});
// https://developer.github.com/v4/object/blob/
if (data.repository.object) {
const { is_binary, text } = data.repository.object;
return { is_null: false, is_binary, text };
const { is_binary: isBinary, text } = data.repository.object;
return { isNull: false, isBinary, text };
} else {
return { is_null: true };
return { isNull: true };
}
}
getOwnerAndNameFromRepoUrl(repoURL) {
let { repo_owner: owner, repo_name: name } = this;
getOwnerAndNameFromRepoUrl(repoURL: string) {
let { repoOwner: owner, repoName: name } = this;
if (repoURL === this.originRepoURL) {
({ origin_repo_owner: owner, origin_repo_name: name } = this);
({ originRepoOwner: owner, originRepoName: name } = this);
}
return { owner, name };
}
async retrieveContent({ path, branch, repoURL, parseText }) {
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const { is_null, is_binary, text } = await this.retrieveBlobObject(
owner,
name,
`${branch}:${path}`,
);
if (is_null) {
throw new APIError('Not Found', 404, 'GitHub');
} else if (!is_binary) {
return text;
} else {
return super.retrieveContent({ path, branch, repoURL, parseText });
async readFile(
path: string,
sha?: string | null,
{
branch = this.branch,
repoURL = this.repoURL,
parseText = true,
}: {
branch?: string;
repoURL?: string;
parseText?: boolean;
} = {},
) {
if (!sha) {
sha = await this.getFileSha(path, { repoURL, branch });
}
const fetchContent = () => this.fetchBlobContent({ sha: sha as string, repoURL, parseText });
const content = await readFile(sha, fetchContent, localForage, parseText);
return content;
}
async fetchBlobContent(sha, repoURL, parseText) {
async fetchBlobContent({ sha, repoURL, parseText }: BlobArgs) {
if (!parseText) {
return super.fetchBlobContent(sha, repoURL);
return super.fetchBlobContent({ sha, repoURL, parseText });
}
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const { is_null, is_binary, text } = await this.retrieveBlobObject(
const { isNull, isBinary, text } = await this.retrieveBlobObject(
owner,
name,
sha,
{ fetchPolicy: CACHE_FIRST }, // blob sha is derived from file content
);
if (is_null) {
if (isNull) {
throw new APIError('Not Found', 404, 'GitHub');
} else if (!is_binary) {
} else if (!isBinary) {
return text;
} else {
return super.fetchBlobContent(sha, repoURL);
return super.fetchBlobContent({ sha, repoURL, parseText });
}
}
async getStatuses(sha) {
const { origin_repo_owner: owner, origin_repo_name: name } = this;
async getStatuses(sha: string) {
const { originRepoOwner: owner, originRepoName: name } = this;
const { data } = await this.query({ query: queries.statues, variables: { owner, name, sha } });
if (data.repository.object) {
const { status } = data.repository.object;
@ -184,8 +225,8 @@ export default class GraphQLAPI extends API {
}
}
getAllFiles(entries, path) {
const allFiles = entries.reduce((acc, item) => {
getAllFiles(entries: TreeEntry[], path: string) {
const allFiles: TreeFile[] = entries.reduce((acc, item) => {
if (item.type === 'tree') {
const entries = item.object?.entries || [];
return [...acc, ...this.getAllFiles(entries, `${path}/${item.name}`)];
@ -193,19 +234,21 @@ export default class GraphQLAPI extends API {
return [
...acc,
{
...item,
name: item.name,
type: item.type,
id: item.sha,
path: `${path}/${item.name}`,
size: item.blob && item.blob.size,
size: item.blob ? item.blob.size : 0,
},
];
}
return acc;
}, []);
}, [] as TreeFile[]);
return allFiles;
}
async listFiles(path, { repoURL = this.repoURL, branch = this.branch, depth = 1 } = {}) {
async listFiles(path: string, { repoURL = this.repoURL, branch = this.branch, depth = 1 } = {}) {
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const { data } = await this.query({
query: queries.files(depth),
@ -228,14 +271,18 @@ export default class GraphQLAPI extends API {
'%c Checking for Unpublished entries',
'line-height: 30px;text-align: center;font-weight: bold',
);
const { repo_owner: owner, repo_name: name } = this;
const { repoOwner: owner, repoName: name } = this;
const { data } = await this.query({
query: queries.unpublishedPrBranches,
variables: { owner, name },
});
const { nodes } = data.repository.refs;
const { nodes } = data.repository.refs as {
nodes: {
associatedPullRequests: { nodes: { headRef: { prefix: string; name: string } }[] };
}[];
};
if (nodes.length > 0) {
const branches = [];
const branches = [] as { ref: string }[];
nodes.forEach(({ associatedPullRequests }) => {
associatedPullRequests.nodes.forEach(({ headRef }) => {
branches.push({ ref: `${headRef.prefix}${headRef.name}` });
@ -252,13 +299,13 @@ export default class GraphQLAPI extends API {
}
}
async readUnpublishedBranchFile(contentKey) {
async readUnpublishedBranchFile(contentKey: string) {
// retrieveMetadata(contentKey) rejects in case of no metadata
const metaData = await this.retrieveMetadata(contentKey).catch(() => null);
if (metaData && metaData.objects && metaData.objects.entry && metaData.objects.entry.path) {
const { path } = metaData.objects.entry;
const { repo_owner: headOwner, repo_name: headRepoName } = this;
const { origin_repo_owner: baseOwner, origin_repo_name: baseRepoName } = this;
const { repoOwner: headOwner, repoName: headRepoName } = this;
const { originRepoOwner: baseOwner, originRepoName: baseRepoName } = this;
const { data } = await this.query({
query: queries.unpublishedBranchFile,
@ -278,6 +325,7 @@ export default class GraphQLAPI extends API {
metaData,
fileData: data.head.object.text,
isModification: !!data.base.object,
slug: this.slugFromContentKey(contentKey, metaData.collection),
};
return result;
} else {
@ -285,11 +333,11 @@ export default class GraphQLAPI extends API {
}
}
getBranchQualifiedName(branch) {
getBranchQualifiedName(branch: string) {
return `refs/heads/${branch}`;
}
getBranchQuery(branch, owner, name) {
getBranchQuery(branch: string, owner: string, name: string) {
return {
query: queries.branch,
variables: {
@ -302,20 +350,20 @@ export default class GraphQLAPI extends API {
async getDefaultBranch() {
const { data } = await this.query({
...this.getBranchQuery(this.branch, this.origin_repo_owner, this.origin_repo_name),
...this.getBranchQuery(this.branch, this.originRepoOwner, this.originRepoName),
});
return data.repository.branch;
}
async getBranch(branch) {
async getBranch(branch: string) {
const { data } = await this.query({
...this.getBranchQuery(branch, this.repo_owner, this.repo_name),
...this.getBranchQuery(branch, this.repoOwner, this.repoName),
fetchPolicy: CACHE_FIRST,
});
return data.repository.branch;
}
async patchRef(type, name, sha, opts = {}) {
async patchRef(type: string, name: string, sha: string, opts: { force?: boolean } = {}) {
if (type !== 'heads') {
return super.patchRef(type, name, sha, opts);
}
@ -329,24 +377,25 @@ export default class GraphQLAPI extends API {
input: { oid: sha, refId: branch.id, force },
},
});
return data.updateRef.branch;
return data!.updateRef.branch;
}
async deleteBranch(branchName) {
async deleteBranch(branchName: string) {
const branch = await this.getBranch(branchName);
const { data } = await this.mutate({
mutation: mutations.deleteBranch,
variables: {
deleteRefInput: { refId: branch.id },
},
update: store => store.data.delete(defaultDataIdFromObject(branch)),
// eslint-disable-next-line @typescript-eslint/no-explicit-any
update: (store: any) => store.data.delete(defaultDataIdFromObject(branch)),
});
return data.deleteRef;
return data!.deleteRef;
}
getPullRequestQuery(number) {
const { origin_repo_owner: owner, origin_repo_name: name } = this;
getPullRequestQuery(number: number) {
const { originRepoOwner: owner, originRepoName: name } = this;
return {
query: queries.pullRequest,
@ -354,7 +403,7 @@ export default class GraphQLAPI extends API {
};
}
async getPullRequest(number) {
async getPullRequest(number: number) {
const { data } = await this.query({
...this.getPullRequestQuery(number),
fetchPolicy: CACHE_FIRST,
@ -370,24 +419,24 @@ export default class GraphQLAPI extends API {
};
}
getPullRequestAndBranchQuery(branch, number) {
const { repo_owner: owner, repo_name: name } = this;
const { origin_repo_owner: origin_owner, origin_repo_name: origin_name } = this;
getPullRequestAndBranchQuery(branch: string, number: number) {
const { repoOwner: owner, repoName: name } = this;
const { originRepoOwner, originRepoName } = this;
return {
query: queries.pullRequestAndBranch,
variables: {
owner,
name,
origin_owner,
origin_name,
originRepoOwner,
originRepoName,
number,
qualifiedName: this.getBranchQualifiedName(branch),
},
};
}
async getPullRequestAndBranch(branch, number) {
async getPullRequestAndBranch(branch: string, number: number) {
const { data } = await this.query({
...this.getPullRequestAndBranchQuery(branch, number),
fetchPolicy: CACHE_FIRST,
@ -397,7 +446,7 @@ export default class GraphQLAPI extends API {
return { branch: repository.branch, pullRequest: origin.pullRequest };
}
async openPR({ number }) {
async openPR({ number }: PR) {
const pullRequest = await this.getPullRequest(number);
const { data } = await this.mutate({
@ -406,7 +455,7 @@ export default class GraphQLAPI extends API {
reopenPullRequestInput: { pullRequestId: pullRequest.id },
},
update: (store, { data: mutationResult }) => {
const { pullRequest } = mutationResult.reopenPullRequest;
const { pullRequest } = mutationResult!.reopenPullRequest;
const pullRequestData = { repository: { ...pullRequest.repository, pullRequest } };
store.writeQuery({
@ -416,10 +465,10 @@ export default class GraphQLAPI extends API {
},
});
return data.closePullRequest;
return data!.closePullRequest;
}
async closePR({ number }) {
async closePR({ number }: PR) {
const pullRequest = await this.getPullRequest(number);
const { data } = await this.mutate({
@ -428,7 +477,7 @@ export default class GraphQLAPI extends API {
closePullRequestInput: { pullRequestId: pullRequest.id },
},
update: (store, { data: mutationResult }) => {
const { pullRequest } = mutationResult.closePullRequest;
const { pullRequest } = mutationResult!.closePullRequest;
const pullRequestData = { repository: { ...pullRequest.repository, pullRequest } };
store.writeQuery({
@ -438,10 +487,10 @@ export default class GraphQLAPI extends API {
},
});
return data.closePullRequest;
return data!.closePullRequest;
}
async deleteUnpublishedEntry(collectionName, slug) {
async deleteUnpublishedEntry(collectionName: string, slug: string) {
try {
const contentKey = this.generateContentKey(collectionName, slug);
const branchName = this.generateBranchName(contentKey);
@ -459,20 +508,21 @@ export default class GraphQLAPI extends API {
deleteRefInput: { refId: branch.id },
closePullRequestInput: { pullRequestId: pullRequest.id },
},
update: store => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
update: (store: any) => {
store.data.delete(defaultDataIdFromObject(branch));
store.data.delete(defaultDataIdFromObject(pullRequest));
},
});
return data.closePullRequest;
return data!.closePullRequest;
} else {
return await this.deleteBranch(branchName);
}
} catch (e) {
const { graphQLErrors } = e;
if (graphQLErrors && graphQLErrors.length > 0) {
const branchNotFound = graphQLErrors.some(e => e.type === 'NOT_FOUND');
const branchNotFound = graphQLErrors.some((e: Error) => e.type === 'NOT_FOUND');
if (branchNotFound) {
return;
}
@ -481,9 +531,9 @@ export default class GraphQLAPI extends API {
}
}
async createPR(title, head) {
async createPR(title: string, head: string) {
const [repository, headReference] = await Promise.all([
this.getRepository(this.origin_repo_owner, this.origin_repo_name),
this.getRepository(this.originRepoOwner, this.originRepoName),
this.useOpenAuthoring ? `${(await this.user()).login}:${head}` : head,
]);
const { data } = await this.mutate({
@ -491,14 +541,14 @@ export default class GraphQLAPI extends API {
variables: {
createPullRequestInput: {
baseRefName: this.branch,
body: API.DEFAULT_PR_BODY,
body: DEFAULT_PR_BODY,
title,
headRefName: headReference,
repositoryId: repository.id,
},
},
update: (store, { data: mutationResult }) => {
const { pullRequest } = mutationResult.createPullRequest;
const { pullRequest } = mutationResult!.createPullRequest;
const pullRequestData = { repository: { ...pullRequest.repository, pullRequest } };
store.writeQuery({
@ -507,13 +557,13 @@ export default class GraphQLAPI extends API {
});
},
});
const { pullRequest } = data.createPullRequest;
const { pullRequest } = data!.createPullRequest;
return { ...pullRequest, head: { sha: pullRequest.headRefOid } };
}
async createBranch(branchName, sha) {
const owner = this.repo_owner;
const name = this.repo_name;
async createBranch(branchName: string, sha: string) {
const owner = this.repoOwner;
const name = this.repoName;
const repository = await this.getRepository(owner, name);
const { data } = await this.mutate({
mutation: mutations.createBranch,
@ -525,7 +575,7 @@ export default class GraphQLAPI extends API {
},
},
update: (store, { data: mutationResult }) => {
const { branch } = mutationResult.createRef;
const { branch } = mutationResult!.createRef;
const branchData = { repository: { ...branch.repository, branch } };
store.writeQuery({
@ -534,13 +584,13 @@ export default class GraphQLAPI extends API {
});
},
});
const { branch } = data.createRef;
const { branch } = data!.createRef;
return { ...branch, ref: `${branch.prefix}${branch.name}` };
}
async createBranchAndPullRequest(branchName, sha, title) {
const owner = this.origin_repo_owner;
const name = this.origin_repo_name;
async createBranchAndPullRequest(branchName: string, sha: string, title: string) {
const owner = this.originRepoOwner;
const name = this.originRepoName;
const repository = await this.getRepository(owner, name);
const { data } = await this.mutate({
mutation: mutations.createBranchAndPullRequest,
@ -552,15 +602,15 @@ export default class GraphQLAPI extends API {
},
createPullRequestInput: {
baseRefName: this.branch,
body: API.DEFAULT_PR_BODY,
body: DEFAULT_PR_BODY,
title,
headRefName: branchName,
repositoryId: repository.id,
},
},
update: (store, { data: mutationResult }) => {
const { branch } = mutationResult.createRef;
const { pullRequest } = mutationResult.createPullRequest;
const { branch } = mutationResult!.createRef;
const { pullRequest } = mutationResult!.createPullRequest;
const branchData = { repository: { ...branch.repository, branch } };
const pullRequestData = {
repository: { ...pullRequest.repository, branch },
@ -578,29 +628,20 @@ export default class GraphQLAPI extends API {
});
},
});
const { pullRequest } = data.createPullRequest;
const { pullRequest } = data!.createPullRequest;
return { ...pullRequest, head: { sha: pullRequest.headRefOid } };
}
async getPullRequestCommits(number) {
const { origin_repo_owner: owner, origin_repo_name: name } = this;
const { data } = await this.query({
query: queries.pullRequestCommits,
variables: { owner, name, number },
});
const { nodes } = data.repository.pullRequest.commits;
const commits = nodes.map(n => ({ ...n.commit, parents: n.commit.parents.nodes }));
return commits;
}
async getFileSha(path, branch) {
const { repo_owner: owner, repo_name: name } = this;
async getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const { data } = await this.query({
query: queries.fileSha,
variables: { owner, name, expression: `${branch}:${path}` },
});
return data.repository.file.sha;
if (data.repository.file) {
return data.repository.file.sha;
}
throw new APIError('Not Found', 404, API_NAME);
}
}

View File

@ -159,60 +159,6 @@ describe('github API', () => {
});
});
describe('getMediaAsBlob', () => {
it('should return response blob on non svg file', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const blob = {};
api.readFile = jest.fn().mockResolvedValue(blob);
await expect(api.getMediaAsBlob('sha', 'static/media/image.png')).resolves.toBe(blob);
expect(api.readFile).toHaveBeenCalledTimes(1);
expect(api.readFile).toHaveBeenCalledWith('static/media/image.png', 'sha', {
parseText: false,
});
});
it('should return text blob on svg file', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const text = 'svg';
api.readFile = jest.fn().mockResolvedValue(text);
await expect(api.getMediaAsBlob('sha', 'static/media/logo.svg')).resolves.toEqual(
new Blob([text], { type: 'image/svg+xml' }),
);
expect(api.readFile).toHaveBeenCalledTimes(1);
expect(api.readFile).toHaveBeenCalledWith('static/media/logo.svg', 'sha', {
parseText: true,
});
});
});
describe('getMediaDisplayURL', () => {
it('should return createObjectURL result', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
const blob = {};
api.getMediaAsBlob = jest.fn().mockResolvedValue(blob);
global.URL.createObjectURL = jest
.fn()
.mockResolvedValue('blob:http://localhost:8080/blob-id');
await expect(api.getMediaDisplayURL('sha', 'static/media/image.png')).resolves.toBe(
'blob:http://localhost:8080/blob-id',
);
expect(api.getMediaAsBlob).toHaveBeenCalledTimes(1);
expect(api.getMediaAsBlob).toHaveBeenCalledWith('sha', 'static/media/image.png');
expect(global.URL.createObjectURL).toHaveBeenCalledTimes(1);
expect(global.URL.createObjectURL).toHaveBeenCalledWith(blob);
});
});
describe('persistFiles', () => {
it('should update tree, commit and patch branch when useWorkflow is false', async () => {
const api = new API({ branch: 'master', repo: 'owner/repo' });
@ -572,4 +518,24 @@ describe('github API', () => {
});
});
});
test('should get preview statuses', async () => {
const api = new API({ repo: 'repo' });
const statuses = [
{ context: 'deploy', state: 'success', target_url: 'deploy-url' },
{ context: 'build', state: 'error' },
];
api.request = jest.fn(() => Promise.resolve({ statuses }));
const sha = 'sha';
await expect(api.getStatuses(sha)).resolves.toEqual([
{ context: 'deploy', state: 'success', target_url: 'deploy-url' },
{ context: 'build', state: 'other' },
]);
expect(api.request).toHaveBeenCalledTimes(1);
expect(api.request).toHaveBeenCalledWith(`/repos/repo/commits/${sha}/status`);
});
});

View File

@ -44,27 +44,24 @@ describe('github GraphQL API', () => {
expect(api.getAllFiles(entries, path)).toEqual([
{
name: 'post-1.md',
sha: 'sha-1',
id: 'sha-1',
type: 'blob',
size: 1,
path: 'posts/post-1.md',
blob: { size: 1 },
},
{
name: 'post-2.md',
sha: 'sha-2',
id: 'sha-2',
type: 'blob',
size: 2,
path: 'posts/post-2.md',
blob: { size: 2 },
},
{
name: 'nested-post.md',
sha: 'nested-post-sha',
id: 'nested-post-sha',
type: 'blob',
size: 3,
path: 'posts/2019/nested-post.md',
blob: { size: 3 },
},
]);
});

View File

@ -4,20 +4,11 @@ jest.spyOn(console, 'error').mockImplementation(() => {});
describe('github backend implementation', () => {
const config = {
getIn: jest.fn().mockImplementation(array => {
if (array[0] === 'backend' && array[1] === 'repo') {
return 'owner/repo';
}
if (array[0] === 'backend' && array[1] === 'open_authoring') {
return false;
}
if (array[0] === 'backend' && array[1] === 'branch') {
return 'master';
}
if (array[0] === 'backend' && array[1] === 'api_root') {
return 'https://api.github.com';
}
}),
backend: {
repo: 'owner/repo',
open_authoring: false,
api_root: 'https://api.github.com',
},
};
const createObjectURL = jest.fn();
@ -102,7 +93,7 @@ describe('github backend implementation', () => {
};
expect.assertions(5);
await expect(gitHubImplementation.persistMedia(mediaFile)).resolves.toEqual({
await expect(gitHubImplementation.persistMedia(mediaFile, {})).resolves.toEqual({
id: 0,
name: 'image.png',
size: 100,
@ -140,9 +131,9 @@ describe('github backend implementation', () => {
});
describe('loadEntryMediaFiles', () => {
const getMediaAsBlob = jest.fn();
const readFile = jest.fn();
const mockAPI = {
getMediaAsBlob,
readFile,
};
it('should return media files from meta data', async () => {
@ -150,18 +141,17 @@ describe('github backend implementation', () => {
gitHubImplementation.api = mockAPI;
const blob = new Blob(['']);
getMediaAsBlob.mockResolvedValue(blob);
readFile.mockResolvedValue(blob);
const file = new File([blob], name);
await expect(
gitHubImplementation.loadEntryMediaFiles([
{ path: 'static/media/image.png', sha: 'image.png' },
gitHubImplementation.loadEntryMediaFiles('branch', [
{ path: 'static/media/image.png', id: 'sha' },
]),
).resolves.toEqual([
{
id: 'image.png',
sha: 'image.png',
id: 'sha',
displayURL: 'displayURL',
path: 'static/media/image.png',
name: 'image.png',
@ -186,24 +176,27 @@ describe('github backend implementation', () => {
gitHubImplementation.api = mockAPI;
gitHubImplementation.loadEntryMediaFiles = jest
.fn()
.mockResolvedValue([{ path: 'image.png', sha: 'sha' }]);
.mockResolvedValue([{ path: 'image.png', id: 'sha' }]);
generateContentKey.mockReturnValue('contentKey');
const data = {
fileData: 'fileData',
isModification: true,
metaData: { objects: { entry: { path: 'entry-path' }, files: [{ path: 'image.png' }] } },
metaData: {
branch: 'branch',
objects: { entry: { path: 'entry-path' }, files: [{ path: 'image.png', sha: 'sha' }] },
},
};
readUnpublishedBranchFile.mockResolvedValue(data);
const collection = { get: jest.fn().mockReturnValue('posts') };
const collection = 'posts';
await expect(gitHubImplementation.unpublishedEntry(collection, 'slug')).resolves.toEqual({
slug: 'slug',
file: { path: 'entry-path' },
file: { path: 'entry-path', id: null },
data: 'fileData',
metaData: { objects: { entry: { path: 'entry-path' }, files: [{ path: 'image.png' }] } },
mediaFiles: [{ path: 'image.png', sha: 'sha' }],
metaData: data.metaData,
mediaFiles: [{ path: 'image.png', id: 'sha' }],
isModification: true,
});
@ -214,9 +207,9 @@ describe('github backend implementation', () => {
expect(readUnpublishedBranchFile).toHaveBeenCalledWith('contentKey');
expect(gitHubImplementation.loadEntryMediaFiles).toHaveBeenCalledTimes(1);
expect(gitHubImplementation.loadEntryMediaFiles).toHaveBeenCalledWith(
data.metaData.objects.files,
);
expect(gitHubImplementation.loadEntryMediaFiles).toHaveBeenCalledWith('branch', [
{ path: 'image.png', id: 'sha' },
]);
});
});
});

View File

@ -1,512 +0,0 @@
import React from 'react';
import trimStart from 'lodash/trimStart';
import semaphore from 'semaphore';
import { stripIndent } from 'common-tags';
import { asyncLock, basename, getCollectionDepth } from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import { get } from 'lodash';
import API from './API';
import GraphQLAPI from './GraphQLAPI';
const MAX_CONCURRENT_DOWNLOADS = 10;
/**
* Keywords for inferring a status that will provide a deploy preview URL.
*/
const PREVIEW_CONTEXT_KEYWORDS = ['deploy'];
/**
* Check a given status context string to determine if it provides a link to a
* deploy preview. Checks for an exact match against `previewContext` if given,
* otherwise checks for inclusion of a value from `PREVIEW_CONTEXT_KEYWORDS`.
*/
function isPreviewContext(context, previewContext) {
if (previewContext) {
return context === previewContext;
}
return PREVIEW_CONTEXT_KEYWORDS.some(keyword => context.includes(keyword));
}
/**
* Retrieve a deploy preview URL from an array of statuses. By default, a
* matching status is inferred via `isPreviewContext`.
*/
function getPreviewStatus(statuses, config) {
const previewContext = config.getIn(['backend', 'preview_context']);
return statuses.find(({ context }) => {
return isPreviewContext(context, previewContext);
});
}
export default class GitHub {
constructor(config, options = {}) {
this.config = config;
this.options = {
proxied: false,
API: null,
...options,
};
if (!this.options.proxied && config.getIn(['backend', 'repo']) == null) {
throw new Error('The GitHub backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.openAuthoringEnabled = config.getIn(['backend', 'open_authoring'], false);
if (this.openAuthoringEnabled) {
if (!this.options.useWorkflow) {
throw new Error(
'backend.open_authoring is true but publish_mode is not set to editorial_workflow.',
);
}
this.originRepo = config.getIn(['backend', 'repo'], '');
} else {
this.repo = this.originRepo = config.getIn(['backend', 'repo'], '');
}
this.branch = config.getIn(['backend', 'branch'], 'master').trim();
this.api_root = config.getIn(['backend', 'api_root'], 'https://api.github.com');
this.token = '';
this.squash_merges = config.getIn(['backend', 'squash_merges']);
this.use_graphql = config.getIn(['backend', 'use_graphql']);
this.lock = asyncLock();
}
async runWithLock(func, message) {
try {
const acquired = await this.lock.acquire();
if (!acquired) {
console.warn(message);
}
const result = await func();
return result;
} finally {
this.lock.release();
}
}
authComponent() {
const wrappedAuthenticationPage = props => <AuthenticationPage {...props} backend={this} />;
wrappedAuthenticationPage.displayName = 'AuthenticationPage';
return wrappedAuthenticationPage;
}
restoreUser(user) {
return this.openAuthoringEnabled
? this.authenticateWithFork({ userData: user, getPermissionToFork: () => true }).then(() =>
this.authenticate(user),
)
: this.authenticate(user);
}
async pollUntilForkExists({ repo, token }) {
const pollDelay = 250; // milliseconds
var repoExists = false;
while (!repoExists) {
repoExists = await fetch(`${this.api_root}/repos/${repo}`, {
headers: { Authorization: `token ${token}` },
})
.then(() => true)
.catch(err => {
if (err && err.status === 404) {
console.log('This 404 was expected and handled appropriately.');
return false;
} else {
return Promise.reject(err);
}
});
// wait between polls
if (!repoExists) {
await new Promise(resolve => setTimeout(resolve, pollDelay));
}
}
return Promise.resolve();
}
async currentUser({ token }) {
if (!this._currentUserPromise) {
this._currentUserPromise = fetch(`${this.api_root}/user`, {
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
}
return this._currentUserPromise;
}
async userIsOriginMaintainer({ username: usernameArg, token }) {
const username = usernameArg || (await this.currentUser({ token })).login;
this._userIsOriginMaintainerPromises = this._userIsOriginMaintainerPromises || {};
if (!this._userIsOriginMaintainerPromises[username]) {
this._userIsOriginMaintainerPromises[username] = fetch(
`${this.api_root}/repos/${this.originRepo}/collaborators/${username}/permission`,
{
headers: {
Authorization: `token ${token}`,
},
},
)
.then(res => res.json())
.then(({ permission }) => permission === 'admin' || permission === 'write');
}
return this._userIsOriginMaintainerPromises[username];
}
async forkExists({ token }) {
try {
const currentUser = await this.currentUser({ token });
const repoName = this.originRepo.split('/')[1];
const repo = await fetch(`${this.api_root}/repos/${currentUser.login}/${repoName}`, {
method: 'GET',
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
// https://developer.github.com/v3/repos/#get
// The parent and source objects are present when the repository is a fork.
// parent is the repository this repository was forked from, source is the ultimate source for the network.
const forkExists =
repo.fork === true &&
repo.parent &&
repo.parent.full_name.toLowerCase() === this.originRepo.toLowerCase();
return forkExists;
} catch {
return false;
}
}
async authenticateWithFork({ userData, getPermissionToFork }) {
if (!this.openAuthoringEnabled) {
throw new Error('Cannot authenticate with fork; Open Authoring is turned off.');
}
const { token } = userData;
// Origin maintainers should be able to use the CMS normally
if (await this.userIsOriginMaintainer({ token })) {
this.repo = this.originRepo;
this.useOpenAuthoring = false;
return Promise.resolve();
}
if (!(await this.forkExists({ token }))) {
await getPermissionToFork();
}
const fork = await fetch(`${this.api_root}/repos/${this.originRepo}/forks`, {
method: 'POST',
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
this.useOpenAuthoring = true;
this.repo = fork.full_name;
return this.pollUntilForkExists({ repo: fork.full_name, token });
}
async authenticate(state) {
this.token = state.token;
const apiCtor = this.use_graphql ? GraphQLAPI : API;
this.api = new apiCtor({
token: this.token,
branch: this.branch,
repo: this.repo,
originRepo: this.originRepo,
api_root: this.api_root,
squash_merges: this.squash_merges,
useOpenAuthoring: this.useOpenAuthoring,
initialWorkflowStatus: this.options.initialWorkflowStatus,
});
const user = await this.api.user();
const isCollab = await this.api.hasWriteAccess().catch(error => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a GitHub account with access.
If your repo is under an organization, ensure the organization has granted access to Netlify
CMS.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your GitHub user account does not have access to this repo.');
}
// Authorized user
return { ...user, token: state.token, useOpenAuthoring: this.useOpenAuthoring };
}
logout() {
this.token = null;
if (this.api && typeof this.api.reset === 'function') {
return this.api.reset();
}
return;
}
getToken() {
return Promise.resolve(this.token);
}
async entriesByFolder(collection, extension) {
const repoURL = this.useOpenAuthoring ? this.api.originRepoURL : this.api.repoURL;
const files = await this.api.listFiles(collection.get('folder'), {
repoURL,
depth: getCollectionDepth(collection),
});
const filteredFiles = files.filter(file => file.name.endsWith('.' + extension));
return this.fetchFiles(filteredFiles, { repoURL });
}
entriesByFiles(collection) {
const repoURL = this.useOpenAuthoring ? this.api.originRepoURL : this.api.repoURL;
const files = collection.get('files').map(collectionFile => ({
path: collectionFile.get('file'),
label: collectionFile.get('label'),
}));
return this.fetchFiles(files, { repoURL });
}
fetchFiles = (files, { repoURL = this.api.repoURL } = {}) => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [];
files.forEach(file => {
promises.push(
new Promise(resolve =>
sem.take(() =>
this.api
.readFile(file.path, file.sha, { repoURL })
.then(data => {
resolve({ file, data });
sem.leave();
})
.catch((err = true) => {
sem.leave();
console.error(`failed to load file from GitHub: ${file.path}`);
resolve({ error: err });
}),
),
),
);
});
return Promise.all(promises).then(loadedEntries =>
loadedEntries.filter(loadedEntry => !loadedEntry.error),
);
};
// Fetches a single entry.
getEntry(collection, slug, path) {
const repoURL = this.api.originRepoURL;
return this.api.readFile(path, null, { repoURL }).then(data => ({
file: { path },
data,
}));
}
getMedia(mediaFolder = this.config.get('media_folder')) {
return this.api.listFiles(mediaFolder).then(files =>
files.map(({ sha, name, size, path }) => {
// load media using getMediaDisplayURL to avoid token expiration with GitHub raw content urls
// for private repositories
return { id: sha, name, size, displayURL: { id: sha, path }, path };
}),
);
}
async getMediaFile(path) {
const blob = await this.api.getMediaAsBlob(null, path);
const name = basename(path);
const fileObj = new File([blob], name);
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
async getMediaDisplayURL(displayURL) {
const { id, path } = displayURL;
const mediaURL = await this.api.getMediaDisplayURL(id, path);
return mediaURL;
}
persistEntry(entry, mediaFiles = [], options = {}) {
// persistEntry is a transactional operation
return this.runWithLock(
() => this.api.persistFiles(entry, mediaFiles, options),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile, options = {}) {
try {
await this.api.persistFiles(null, [mediaFile], options);
const { sha, path, fileObj } = mediaFile;
const displayURL = URL.createObjectURL(fileObj);
return {
id: sha,
name: fileObj.name,
size: fileObj.size,
displayURL,
path: trimStart(path, '/'),
};
} catch (error) {
console.error(error);
throw error;
}
}
deleteFile(path, commitMessage, options) {
return this.api.deleteFile(path, commitMessage, options);
}
async loadMediaFile(file) {
return this.api.getMediaAsBlob(file.sha, file.path).then(blob => {
const name = basename(file.path);
const fileObj = new File([blob], name);
return {
id: file.sha,
sha: file.sha,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
});
}
async loadEntryMediaFiles(files) {
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(file)));
return mediaFiles;
}
unpublishedEntries() {
return this.api
.listUnpublishedBranches()
.then(branches => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [];
branches.map(({ ref }) => {
promises.push(
new Promise(resolve => {
const contentKey = this.api.contentKeyFromRef(ref);
return sem.take(() =>
this.api
.readUnpublishedBranchFile(contentKey)
.then(data => {
if (data === null || data === undefined) {
resolve(null);
sem.leave();
} else {
resolve({
slug: this.api.slugFromContentKey(contentKey, data.metaData.collection),
file: { path: data.metaData.objects.entry.path },
data: data.fileData,
metaData: data.metaData,
isModification: data.isModification,
});
sem.leave();
}
})
.catch(() => {
sem.leave();
resolve(null);
}),
);
}),
);
});
return Promise.all(promises);
})
.catch(error => {
if (error.message === 'Not Found') {
return Promise.resolve([]);
}
return Promise.reject(error);
});
}
async unpublishedEntry(
collection,
slug,
{ loadEntryMediaFiles = files => this.loadEntryMediaFiles(files) } = {},
) {
const contentKey = this.api.generateContentKey(collection.get('name'), slug);
const data = await this.api.readUnpublishedBranchFile(contentKey);
if (!data) {
return null;
}
const files = get(data, 'metaData.objects.files', []);
const mediaFiles = await loadEntryMediaFiles(files);
return {
slug,
file: { path: data.metaData.objects.entry.path },
data: data.fileData,
metaData: data.metaData,
mediaFiles,
isModification: data.isModification,
};
}
/**
* Uses GitHub's Statuses API to retrieve statuses, infers which is for a
* deploy preview via `getPreviewStatus`. Returns the url provided by the
* status, as well as the status state, which should be one of 'success',
* 'pending', and 'failure'.
*/
async getDeployPreview(collection, slug) {
const contentKey = this.api.generateContentKey(collection.get('name'), slug);
const data = await this.api.retrieveMetadata(contentKey);
if (!data || !data.pr) {
return null;
}
const headSHA = typeof data.pr.head === 'string' ? data.pr.head : data.pr.head.sha;
const statuses = await this.api.getStatuses(headSHA);
const deployStatus = getPreviewStatus(statuses, this.config);
if (deployStatus) {
const { target_url, state } = deployStatus;
return { url: target_url, status: state };
}
}
updateUnpublishedEntryStatus(collection, slug, newStatus) {
// updateUnpublishedEntryStatus is a transactional operation
return this.runWithLock(
() => this.api.updateUnpublishedEntryStatus(collection, slug, newStatus),
'Failed to acquire update entry status lock',
);
}
deleteUnpublishedEntry(collection, slug) {
// deleteUnpublishedEntry is a transactional operation
return this.runWithLock(
() => this.api.deleteUnpublishedEntry(collection, slug),
'Failed to acquire delete entry lock',
);
}
publishUnpublishedEntry(collection, slug) {
// publishUnpublishedEntry is a transactional operation
return this.runWithLock(
() => this.api.publishUnpublishedEntry(collection, slug),
'Failed to acquire publish entry lock',
);
}
}

View File

@ -0,0 +1,498 @@
import * as React from 'react';
import semaphore, { Semaphore } from 'semaphore';
import trimStart from 'lodash/trimStart';
import { stripIndent } from 'common-tags';
import {
asyncLock,
basename,
AsyncLock,
Implementation,
AssetProxy,
PersistOptions,
DisplayURL,
getBlobSHA,
entriesByFolder,
entriesByFiles,
unpublishedEntries,
User,
getMediaDisplayURL,
getMediaAsBlob,
Credentials,
filterByPropExtension,
Config,
ImplementationFile,
getPreviewStatus,
UnpublishedEntryMediaFile,
runWithLock,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import { UsersGetAuthenticatedResponse as GitHubUser } from '@octokit/rest';
import API, { Entry } from './API';
import GraphQLAPI from './GraphQLAPI';
const MAX_CONCURRENT_DOWNLOADS = 10;
export default class GitHub implements Implementation {
lock: AsyncLock;
api: API | null;
options: {
proxied: boolean;
API: API | null;
useWorkflow?: boolean;
initialWorkflowStatus: string;
};
originRepo: string;
repo?: string;
openAuthoringEnabled: boolean;
useOpenAuthoring?: boolean;
branch: string;
apiRoot: string;
mediaFolder: string;
previewContext: string;
token: string | null;
squashMerges: boolean;
useGraphql: boolean;
_currentUserPromise?: Promise<GitHubUser>;
_userIsOriginMaintainerPromises?: {
[key: string]: Promise<boolean>;
};
_mediaDisplayURLSem?: Semaphore;
constructor(config: Config, options = {}) {
this.options = {
proxied: false,
API: null,
initialWorkflowStatus: '',
...options,
};
if (
!this.options.proxied &&
(config.backend.repo === null || config.backend.repo === undefined)
) {
throw new Error('The GitHub backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.openAuthoringEnabled = config.backend.open_authoring || false;
if (this.openAuthoringEnabled) {
if (!this.options.useWorkflow) {
throw new Error(
'backend.open_authoring is true but publish_mode is not set to editorial_workflow.',
);
}
this.originRepo = config.backend.repo || '';
} else {
this.repo = this.originRepo = config.backend.repo || '';
}
this.branch = config.backend.branch?.trim() || 'master';
this.apiRoot = config.backend.api_root || 'https://api.github.com';
this.token = '';
this.squashMerges = config.backend.squash_merges || false;
this.useGraphql = config.backend.use_graphql || false;
this.mediaFolder = config.media_folder;
this.previewContext = config.backend.preview_context || '';
this.lock = asyncLock();
}
authComponent() {
const wrappedAuthenticationPage = (props: Record<string, unknown>) => (
<AuthenticationPage {...props} backend={this} />
);
wrappedAuthenticationPage.displayName = 'AuthenticationPage';
return wrappedAuthenticationPage;
}
restoreUser(user: User) {
return this.openAuthoringEnabled
? this.authenticateWithFork({ userData: user, getPermissionToFork: () => true }).then(() =>
this.authenticate(user),
)
: this.authenticate(user);
}
async pollUntilForkExists({ repo, token }: { repo: string; token: string }) {
const pollDelay = 250; // milliseconds
let repoExists = false;
while (!repoExists) {
repoExists = await fetch(`${this.apiRoot}/repos/${repo}`, {
headers: { Authorization: `token ${token}` },
})
.then(() => true)
.catch(err => {
if (err && err.status === 404) {
console.log('This 404 was expected and handled appropriately.');
return false;
} else {
return Promise.reject(err);
}
});
// wait between polls
if (!repoExists) {
await new Promise(resolve => setTimeout(resolve, pollDelay));
}
}
return Promise.resolve();
}
async currentUser({ token }: { token: string }) {
if (!this._currentUserPromise) {
this._currentUserPromise = fetch(`${this.apiRoot}/user`, {
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
}
return this._currentUserPromise;
}
async userIsOriginMaintainer({
username: usernameArg,
token,
}: {
username?: string;
token: string;
}) {
const username = usernameArg || (await this.currentUser({ token })).login;
this._userIsOriginMaintainerPromises = this._userIsOriginMaintainerPromises || {};
if (!this._userIsOriginMaintainerPromises[username]) {
this._userIsOriginMaintainerPromises[username] = fetch(
`${this.apiRoot}/repos/${this.originRepo}/collaborators/${username}/permission`,
{
headers: {
Authorization: `token ${token}`,
},
},
)
.then(res => res.json())
.then(({ permission }) => permission === 'admin' || permission === 'write');
}
return this._userIsOriginMaintainerPromises[username];
}
async forkExists({ token }: { token: string }) {
try {
const currentUser = await this.currentUser({ token });
const repoName = this.originRepo.split('/')[1];
const repo = await fetch(`${this.apiRoot}/repos/${currentUser.login}/${repoName}`, {
method: 'GET',
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
// https://developer.github.com/v3/repos/#get
// The parent and source objects are present when the repository is a fork.
// parent is the repository this repository was forked from, source is the ultimate source for the network.
const forkExists =
repo.fork === true &&
repo.parent &&
repo.parent.full_name.toLowerCase() === this.originRepo.toLowerCase();
return forkExists;
} catch {
return false;
}
}
async authenticateWithFork({
userData,
getPermissionToFork,
}: {
userData: User;
getPermissionToFork: () => Promise<boolean> | boolean;
}) {
if (!this.openAuthoringEnabled) {
throw new Error('Cannot authenticate with fork; Open Authoring is turned off.');
}
const token = userData.token as string;
// Origin maintainers should be able to use the CMS normally
if (await this.userIsOriginMaintainer({ token })) {
this.repo = this.originRepo;
this.useOpenAuthoring = false;
return Promise.resolve();
}
if (!(await this.forkExists({ token }))) {
await getPermissionToFork();
}
const fork = await fetch(`${this.apiRoot}/repos/${this.originRepo}/forks`, {
method: 'POST',
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
this.useOpenAuthoring = true;
this.repo = fork.full_name;
return this.pollUntilForkExists({ repo: fork.full_name, token });
}
async authenticate(state: Credentials) {
this.token = state.token as string;
const apiCtor = this.useGraphql ? GraphQLAPI : API;
this.api = new apiCtor({
token: this.token,
branch: this.branch,
repo: this.repo,
originRepo: this.originRepo,
apiRoot: this.apiRoot,
squashMerges: this.squashMerges,
useOpenAuthoring: this.useOpenAuthoring,
initialWorkflowStatus: this.options.initialWorkflowStatus,
});
const user = await this.api!.user();
const isCollab = await this.api!.hasWriteAccess().catch(error => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a GitHub account with access.
If your repo is under an organization, ensure the organization has granted access to Netlify
CMS.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your GitHub user account does not have access to this repo.');
}
// Authorized user
return { ...user, token: state.token as string, useOpenAuthoring: this.useOpenAuthoring };
}
logout() {
this.token = null;
if (this.api && this.api.reset && typeof this.api.reset === 'function') {
return this.api.reset();
}
}
getToken() {
return Promise.resolve(this.token);
}
async entriesByFolder(folder: string, extension: string, depth: number) {
const repoURL = this.useOpenAuthoring ? this.api!.originRepoURL : this.api!.repoURL;
const listFiles = () =>
this.api!.listFiles(folder, {
repoURL,
depth,
}).then(filterByPropExtension(extension, 'path'));
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL }) as Promise<string>;
return entriesByFolder(listFiles, readFile, 'GitHub');
}
entriesByFiles(files: ImplementationFile[]) {
const repoURL = this.useOpenAuthoring ? this.api!.originRepoURL : this.api!.repoURL;
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL }) as Promise<string>;
return entriesByFiles(files, readFile, 'GitHub');
}
// Fetches a single entry.
getEntry(path: string) {
const repoURL = this.api!.originRepoURL;
return this.api!.readFile(path, null, { repoURL }).then(data => ({
file: { path, id: null },
data: data as string,
}));
}
getMedia(mediaFolder = this.mediaFolder) {
return this.api!.listFiles(mediaFolder).then(files =>
files.map(({ id, name, size, path }) => {
// load media using getMediaDisplayURL to avoid token expiration with GitHub raw content urls
// for private repositories
return { id, name, size, displayURL: { id, path }, path };
}),
);
}
async getMediaFile(path: string) {
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
const name = basename(path);
const fileObj = new File([blob], name);
const url = URL.createObjectURL(fileObj);
const id = await getBlobSHA(blob);
return {
id,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
getMediaDisplayURL(displayURL: DisplayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
return getMediaDisplayURL(
displayURL,
this.api!.readFile.bind(this.api!),
this._mediaDisplayURLSem,
);
}
persistEntry(entry: Entry, mediaFiles: AssetProxy[] = [], options: PersistOptions) {
// persistEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.persistFiles(entry, mediaFiles, options),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
try {
await this.api!.persistFiles(null, [mediaFile], options);
const { sha, path, fileObj } = mediaFile as AssetProxy & { sha: string };
const displayURL = URL.createObjectURL(fileObj);
return {
id: sha,
name: fileObj!.name,
size: fileObj!.size,
displayURL,
path: trimStart(path, '/'),
};
} catch (error) {
console.error(error);
throw error;
}
}
deleteFile(path: string, commitMessage: string) {
return this.api!.deleteFile(path, commitMessage);
}
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
const readFile = (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
return getMediaAsBlob(file.path, file.id, readFile).then(blob => {
const name = basename(file.path);
const fileObj = new File([blob], name);
return {
id: file.id,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
});
}
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
return mediaFiles;
}
unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(({ ref }) => this.api!.contentKeyFromRef(ref)),
);
const readUnpublishedBranchFile = (contentKey: string) =>
this.api!.readUnpublishedBranchFile(contentKey);
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, 'GitHub');
}
async unpublishedEntry(
collection: string,
slug: string,
{
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
this.loadEntryMediaFiles(branch, files),
} = {},
) {
const contentKey = this.api!.generateContentKey(collection, slug);
const data = await this.api!.readUnpublishedBranchFile(contentKey);
const files = data.metaData.objects.files || [];
const mediaFiles = await loadEntryMediaFiles(
data.metaData.branch,
files.map(({ sha: id, path }) => ({ id, path })),
);
return {
slug,
file: { path: data.metaData.objects.entry.path, id: null },
data: data.fileData as string,
metaData: data.metaData,
mediaFiles,
isModification: data.isModification,
};
}
/**
* Uses GitHub's Statuses API to retrieve statuses, infers which is for a
* deploy preview via `getPreviewStatus`. Returns the url provided by the
* status, as well as the status state, which should be one of 'success',
* 'pending', and 'failure'.
*/
async getDeployPreview(collectionName: string, slug: string) {
const contentKey = this.api!.generateContentKey(collectionName, slug);
const data = await this.api!.retrieveMetadata(contentKey);
if (!data || !data.pr) {
return null;
}
const headSHA = typeof data.pr.head === 'string' ? data.pr.head : data.pr.head.sha;
const statuses = await this.api!.getStatuses(headSHA);
const deployStatus = getPreviewStatus(statuses, this.previewContext);
if (deployStatus) {
const { target_url: url, state } = deployStatus;
return { url, status: state };
} else {
return null;
}
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
// updateUnpublishedEntryStatus is a transactional operation
return runWithLock(
this.lock,
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
'Failed to acquire update entry status lock',
);
}
deleteUnpublishedEntry(collection: string, slug: string) {
// deleteUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.deleteUnpublishedEntry(collection, slug),
'Failed to acquire delete entry lock',
);
}
publishUnpublishedEntry(collection: string, slug: string) {
// publishUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.publishUnpublishedEntry(collection, slug),
'Failed to acquire publish entry lock',
);
}
}

View File

@ -126,7 +126,7 @@ const buildFilesQuery = (depth = 1) => {
return query;
};
export const files = depth => gql`
export const files = (depth: number) => gql`
query files($owner: String!, $name: String!, $expression: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
@ -263,32 +263,6 @@ export const tree = gql`
${fragments.treeEntry}
`;
export const pullRequestCommits = gql`
query pullRequestCommits($owner: String!, $name: String!, $number: Int!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
pullRequest(number: $number) {
id
commits(last: 100) {
nodes {
id
commit {
...ObjectParts
parents(last: 100) {
nodes {
...ObjectParts
}
}
}
}
}
}
}
}
${fragments.repository}
${fragments.object}
`;
export const fileSha = gql`
query fileSha($owner: String!, $name: String!, $expression: String!) {
repository(owner: $owner, name: $name) {

View File

@ -17,7 +17,7 @@
"scripts": {
"develop": "yarn build:esm --watch",
"build": "cross-env NODE_ENV=production webpack",
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward"
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
},
"dependencies": {
"js-base64": "^2.5.1",

View File

@ -1,283 +0,0 @@
import {
localForage,
parseLinkHeader,
unsentRequest,
then,
APIError,
Cursor,
} from 'netlify-cms-lib-util';
import { Base64 } from 'js-base64';
import { fromJS, Map } from 'immutable';
import { flow, partial, result } from 'lodash';
export default class API {
constructor(config) {
this.api_root = config.api_root || 'https://gitlab.com/api/v4';
this.token = config.token || false;
this.branch = config.branch || 'master';
this.repo = config.repo || '';
this.repoURL = `/projects/${encodeURIComponent(this.repo)}`;
}
withAuthorizationHeaders = req =>
unsentRequest.withHeaders(this.token ? { Authorization: `Bearer ${this.token}` } : {}, req);
buildRequest = req =>
flow([
unsentRequest.withRoot(this.api_root),
this.withAuthorizationHeaders,
unsentRequest.withTimestamp,
])(req);
request = async req =>
flow([
this.buildRequest,
unsentRequest.performRequest,
p => p.catch(err => Promise.reject(new APIError(err.message, null, 'GitLab'))),
])(req);
catchFormatErrors = (format, formatter) => res => {
try {
return formatter(res);
} catch (err) {
throw new Error(
`Response cannot be parsed into the expected format (${format}): ${err.message}`,
);
}
};
responseFormats = fromJS({
json: async res => {
const contentType = res.headers.get('Content-Type');
if (contentType !== 'application/json' && contentType !== 'text/json') {
throw new Error(`${contentType} is not a valid JSON Content-Type`);
}
return res.json();
},
text: async res => res.text(),
blob: async res => res.blob(),
}).mapEntries(([format, formatter]) => [format, this.catchFormatErrors(format, formatter)]);
parseResponse = async (res, { expectingOk = true, expectingFormat = 'text' }) => {
let body;
try {
const formatter = this.responseFormats.get(expectingFormat, false);
if (!formatter) {
throw new Error(`${expectingFormat} is not a supported response format.`);
}
body = await formatter(res);
} catch (err) {
throw new APIError(err.message, res.status, 'GitLab');
}
if (expectingOk && !res.ok) {
const isJSON = expectingFormat === 'json';
throw new APIError(isJSON && body.message ? body.message : body, res.status, 'GitLab');
}
return body;
};
responseToJSON = res => this.parseResponse(res, { expectingFormat: 'json' });
responseToBlob = res => this.parseResponse(res, { expectingFormat: 'blob' });
responseToText = res => this.parseResponse(res, { expectingFormat: 'text' });
requestJSON = req => this.request(req).then(this.responseToJSON);
requestText = req => this.request(req).then(this.responseToText);
user = () => this.requestJSON('/user');
WRITE_ACCESS = 30;
hasWriteAccess = () =>
this.requestJSON(this.repoURL).then(({ permissions }) => {
const { project_access, group_access } = permissions;
if (project_access && project_access.access_level >= this.WRITE_ACCESS) {
return true;
}
if (group_access && group_access.access_level >= this.WRITE_ACCESS) {
return true;
}
return false;
});
readFile = async (path, sha, { ref = this.branch, parseText = true } = {}) => {
const cacheKey = parseText ? `gl.${sha}` : `gl.${sha}.blob`;
const cachedFile = sha ? await localForage.getItem(cacheKey) : null;
if (cachedFile) {
return cachedFile;
}
const result = await this.request({
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}/raw`,
params: { ref },
cache: 'no-store',
}).then(parseText ? this.responseToText : this.responseToBlob);
if (sha) {
localForage.setItem(cacheKey, result);
}
return result;
};
getCursorFromHeaders = headers => {
// indices and page counts are assumed to be zero-based, but the
// indices and page counts returned from GitLab are one-based
const index = parseInt(headers.get('X-Page'), 10) - 1;
const pageCount = parseInt(headers.get('X-Total-Pages'), 10) - 1;
const pageSize = parseInt(headers.get('X-Per-Page'), 10);
const count = parseInt(headers.get('X-Total'), 10);
const links = parseLinkHeader(headers.get('Link'));
const actions = Map(links)
.keySeq()
.flatMap(key =>
(key === 'prev' && index > 0) ||
(key === 'next' && index < pageCount) ||
(key === 'first' && index > 0) ||
(key === 'last' && index < pageCount)
? [key]
: [],
);
return Cursor.create({
actions,
meta: { index, count, pageSize, pageCount },
data: { links },
});
};
getCursor = ({ headers }) => this.getCursorFromHeaders(headers);
// Gets a cursor without retrieving the entries by using a HEAD
// request
fetchCursor = req =>
flow([unsentRequest.withMethod('HEAD'), this.request, then(this.getCursor)])(req);
fetchCursorAndEntries = req =>
flow([
unsentRequest.withMethod('GET'),
this.request,
p => Promise.all([p.then(this.getCursor), p.then(this.responseToJSON)]),
then(([cursor, entries]) => ({ cursor, entries })),
])(req);
fetchRelativeCursor = async (cursor, action) => this.fetchCursor(cursor.data.links[action]);
reversableActions = Map({
first: 'last',
last: 'first',
next: 'prev',
prev: 'next',
});
reverseCursor = cursor => {
const pageCount = cursor.meta.get('pageCount', 0);
const currentIndex = cursor.meta.get('index', 0);
const newIndex = pageCount - currentIndex;
const links = cursor.data.get('links', Map());
const reversedLinks = links.mapEntries(([k, v]) => [this.reversableActions.get(k) || k, v]);
const reversedActions = cursor.actions.map(
action => this.reversableActions.get(action) || action,
);
return cursor.updateStore(store =>
store
.setIn(['meta', 'index'], newIndex)
.setIn(['data', 'links'], reversedLinks)
.set('actions', reversedActions),
);
};
// The exported listFiles and traverseCursor reverse the direction
// of the cursors, since GitLab's pagination sorts the opposite way
// we want to sort by default (it sorts by filename _descending_,
// while the CMS defaults to sorting by filename _ascending_, at
// least in the current GitHub backend). This should eventually be
// refactored.
listFiles = async (path, recursive = false) => {
const firstPageCursor = await this.fetchCursor({
url: `${this.repoURL}/repository/tree`,
params: { path, ref: this.branch, recursive },
});
const lastPageLink = firstPageCursor.data.getIn(['links', 'last']);
const { entries, cursor } = await this.fetchCursorAndEntries(lastPageLink);
return {
files: entries.filter(({ type }) => type === 'blob').reverse(),
cursor: this.reverseCursor(cursor),
};
};
traverseCursor = async (cursor, action) => {
const link = cursor.data.getIn(['links', action]);
const { entries, cursor: newCursor } = await this.fetchCursorAndEntries(link);
return {
entries: entries.filter(({ type }) => type === 'blob').reverse(),
cursor: this.reverseCursor(newCursor),
};
};
listAllFiles = async (path, recursive = false) => {
const entries = [];
let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({
url: `${this.repoURL}/repository/tree`,
// Get the maximum number of entries per page
params: { path, ref: this.branch, per_page: 100, recursive },
});
entries.push(...initialEntries);
while (cursor && cursor.actions.has('next')) {
const link = cursor.data.getIn(['links', 'next']);
const { cursor: newCursor, entries: newEntries } = await this.fetchCursorAndEntries(link);
entries.push(...newEntries);
cursor = newCursor;
}
return entries.filter(({ type }) => type === 'blob');
};
toBase64 = str => Promise.resolve(Base64.encode(str));
fromBase64 = str => Base64.decode(str);
uploadAndCommit = async (
item,
{ commitMessage, updateFile = false, branch = this.branch, author = this.commitAuthor },
) => {
const content = await result(item, 'toBase64', partial(this.toBase64, item.raw));
const file_path = item.path.replace(/^\//, '');
const action = updateFile ? 'update' : 'create';
const encoding = 'base64';
const commitParams = {
branch,
commit_message: commitMessage,
actions: [{ action, file_path, content, encoding }],
};
if (author) {
const { name, email } = author;
commitParams.author_name = name;
commitParams.author_email = email;
}
const response = await this.requestJSON({
url: `${this.repoURL}/repository/commits`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(commitParams),
});
return { ...item, sha: response.id };
};
persistFiles = (files, { commitMessage, newEntry }) =>
Promise.all(
files.map(file =>
this.uploadAndCommit(file, { commitMessage, updateFile: newEntry === false }),
),
);
deleteFile = (path, commit_message, options = {}) => {
const branch = options.branch || this.branch;
const commitParams = { commit_message, branch };
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
commitParams.author_name = name;
commitParams.author_email = email;
}
return flow([
unsentRequest.withMethod('DELETE'),
// TODO: only send author params if they are defined.
unsentRequest.withParams(commitParams),
this.request,
])(`${this.repoURL}/repository/files/${encodeURIComponent(path)}`);
};
}

View File

@ -0,0 +1,708 @@
import {
localForage,
parseLinkHeader,
unsentRequest,
then,
APIError,
Cursor,
ApiRequest,
Entry,
AssetProxy,
PersistOptions,
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
EditorialWorkflowError,
labelToStatus,
statusToLabel,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
responseParser,
PreviewState,
parseContentKey,
} from 'netlify-cms-lib-util';
import { Base64 } from 'js-base64';
import { Map, Set } from 'immutable';
import { flow, partial, result, trimStart } from 'lodash';
import { CursorStore } from 'netlify-cms-lib-util/src/Cursor';
export const API_NAME = 'GitLab';
export interface Config {
apiRoot?: string;
token?: string;
branch?: string;
repo?: string;
squashMerges: boolean;
initialWorkflowStatus: string;
}
export interface CommitAuthor {
name: string;
email: string;
}
enum CommitAction {
CREATE = 'create',
DELETE = 'delete',
MOVE = 'move',
UPDATE = 'update',
}
type CommitItem = {
base64Content?: string;
path: string;
action: CommitAction;
};
interface CommitsParams {
commit_message: string;
branch: string;
author_name?: string;
author_email?: string;
actions?: {
action: string;
file_path: string;
content?: string;
encoding?: string;
}[];
}
type GitLabCommitDiff = {
diff: string;
new_path: string;
old_path: string;
};
enum GitLabCommitStatuses {
Pending = 'pending',
Running = 'running',
Success = 'success',
Failed = 'failed',
Canceled = 'canceled',
}
type GitLabCommitStatus = {
status: GitLabCommitStatuses;
name: string;
author: {
username: string;
name: string;
};
description: null;
sha: string;
ref: string;
target_url: string;
};
type GitLabMergeRebase = {
rebase_in_progress: boolean;
merge_error: string;
};
type GitLabMergeRequest = {
id: number;
iid: number;
title: string;
description: string;
state: string;
merged_by: {
name: string;
username: string;
};
merged_at: string;
created_at: string;
updated_at: string;
target_branch: string;
source_branch: string;
author: {
name: string;
username: string;
};
labels: string[];
sha: string;
};
export default class API {
apiRoot: string;
token: string | boolean;
branch: string;
useOpenAuthoring?: boolean;
repo: string;
repoURL: string;
commitAuthor?: CommitAuthor;
squashMerges: boolean;
initialWorkflowStatus: string;
constructor(config: Config) {
this.apiRoot = config.apiRoot || 'https://gitlab.com/api/v4';
this.token = config.token || false;
this.branch = config.branch || 'master';
this.repo = config.repo || '';
this.repoURL = `/projects/${encodeURIComponent(this.repo)}`;
this.squashMerges = config.squashMerges;
this.initialWorkflowStatus = config.initialWorkflowStatus;
}
withAuthorizationHeaders = (req: ApiRequest) =>
unsentRequest.withHeaders(this.token ? { Authorization: `Bearer ${this.token}` } : {}, req);
buildRequest = (req: ApiRequest) =>
flow([
unsentRequest.withRoot(this.apiRoot),
this.withAuthorizationHeaders,
unsentRequest.withTimestamp,
])(req);
request = async (req: ApiRequest): Promise<Response> =>
flow([
this.buildRequest,
unsentRequest.performRequest,
p => p.catch((err: Error) => Promise.reject(new APIError(err.message, null, API_NAME))),
])(req);
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
responseToText = responseParser({ format: 'text', apiName: API_NAME });
// eslint-disable-next-line @typescript-eslint/no-explicit-any
requestJSON = (req: ApiRequest) => this.request(req).then(this.responseToJSON) as Promise<any>;
requestText = (req: ApiRequest) => this.request(req).then(this.responseToText) as Promise<string>;
user = () => this.requestJSON('/user');
WRITE_ACCESS = 30;
hasWriteAccess = () =>
this.requestJSON(this.repoURL).then(({ permissions }) => {
const { project_access: projectAccess, group_access: groupAccess } = permissions;
if (projectAccess && projectAccess.access_level >= this.WRITE_ACCESS) {
return true;
}
if (groupAccess && groupAccess.access_level >= this.WRITE_ACCESS) {
return true;
}
return false;
});
readFile = async (
path: string,
sha?: string | null,
{ parseText = true, branch = this.branch } = {},
): Promise<string | Blob> => {
const fetchContent = async () => {
const content = await this.request({
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}/raw`,
params: { ref: branch },
cache: 'no-store',
}).then<Blob | string>(parseText ? this.responseToText : this.responseToBlob);
return content;
};
const content = await readFile(sha, fetchContent, localForage, parseText);
return content;
};
getCursorFromHeaders = (headers: Headers) => {
// indices and page counts are assumed to be zero-based, but the
// indices and page counts returned from GitLab are one-based
const index = parseInt(headers.get('X-Page') as string, 10) - 1;
const pageCount = parseInt(headers.get('X-Total-Pages') as string, 10) - 1;
const pageSize = parseInt(headers.get('X-Per-Page') as string, 10);
const count = parseInt(headers.get('X-Total') as string, 10);
const links = parseLinkHeader(headers.get('Link') as string);
const actions = Map(links)
.keySeq()
.flatMap(key =>
(key === 'prev' && index > 0) ||
(key === 'next' && index < pageCount) ||
(key === 'first' && index > 0) ||
(key === 'last' && index < pageCount)
? [key]
: [],
);
return Cursor.create({
actions,
meta: { index, count, pageSize, pageCount },
data: { links },
});
};
getCursor = ({ headers }: { headers: Headers }) => this.getCursorFromHeaders(headers);
// Gets a cursor without retrieving the entries by using a HEAD
// request
fetchCursor = (req: ApiRequest) =>
flow([unsentRequest.withMethod('HEAD'), this.request, then(this.getCursor)])(req);
fetchCursorAndEntries = (
req: ApiRequest,
): Promise<{
entries: { id: string; type: string; path: string; name: string }[];
cursor: Cursor;
}> =>
flow([
unsentRequest.withMethod('GET'),
this.request,
p => Promise.all([p.then(this.getCursor), p.then(this.responseToJSON)]),
then(([cursor, entries]: [Cursor, {}[]]) => ({ cursor, entries })),
])(req);
reversableActions = Map({
first: 'last',
last: 'first',
next: 'prev',
prev: 'next',
});
reverseCursor = (cursor: Cursor) => {
const pageCount = cursor.meta!.get('pageCount', 0) as number;
const currentIndex = cursor.meta!.get('index', 0) as number;
const newIndex = pageCount - currentIndex;
const links = cursor.data!.get('links', Map()) as Map<string, string>;
const reversedLinks = links.mapEntries(tuple => {
const [k, v] = tuple as string[];
return [this.reversableActions.get(k) || k, v];
});
const reversedActions = cursor.actions!.map(
action => this.reversableActions.get(action as string) || (action as string),
);
return cursor.updateStore((store: CursorStore) =>
store!
.setIn(['meta', 'index'], newIndex)
.setIn(['data', 'links'], reversedLinks)
.set('actions', (reversedActions as unknown) as Set<string>),
);
};
// The exported listFiles and traverseCursor reverse the direction
// of the cursors, since GitLab's pagination sorts the opposite way
// we want to sort by default (it sorts by filename _descending_,
// while the CMS defaults to sorting by filename _ascending_, at
// least in the current GitHub backend). This should eventually be
// refactored.
listFiles = async (path: string, recursive = false) => {
const firstPageCursor = await this.fetchCursor({
url: `${this.repoURL}/repository/tree`,
params: { path, ref: this.branch, recursive },
});
const lastPageLink = firstPageCursor.data.getIn(['links', 'last']);
const { entries, cursor } = await this.fetchCursorAndEntries(lastPageLink);
return {
files: entries.filter(({ type }) => type === 'blob').reverse(),
cursor: this.reverseCursor(cursor),
};
};
traverseCursor = async (cursor: Cursor, action: string) => {
const link = cursor.data!.getIn(['links', action]);
const { entries, cursor: newCursor } = await this.fetchCursorAndEntries(link);
return {
entries: entries.filter(({ type }) => type === 'blob').reverse(),
cursor: this.reverseCursor(newCursor),
};
};
listAllFiles = async (path: string, recursive = false) => {
const entries = [];
// eslint-disable-next-line prefer-const
let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({
url: `${this.repoURL}/repository/tree`,
// Get the maximum number of entries per page
// eslint-disable-next-line @typescript-eslint/camelcase
params: { path, ref: this.branch, per_page: 100, recursive },
});
entries.push(...initialEntries);
while (cursor && cursor.actions!.has('next')) {
const link = cursor.data!.getIn(['links', 'next']);
const { cursor: newCursor, entries: newEntries } = await this.fetchCursorAndEntries(link);
entries.push(...newEntries);
cursor = newCursor;
}
return entries.filter(({ type }) => type === 'blob');
};
toBase64 = (str: string) => Promise.resolve(Base64.encode(str));
fromBase64 = (str: string) => Base64.decode(str);
uploadAndCommit(
items: CommitItem[],
{ commitMessage = '', branch = this.branch, newBranch = false },
) {
const actions = items.map(item => ({
action: item.action,
// eslint-disable-next-line @typescript-eslint/camelcase
file_path: item.path,
...(item.base64Content ? { content: item.base64Content, encoding: 'base64' } : {}),
}));
const commitParams: CommitsParams = {
branch,
// eslint-disable-next-line @typescript-eslint/camelcase
commit_message: commitMessage,
actions,
// eslint-disable-next-line @typescript-eslint/camelcase
...(newBranch ? { start_branch: this.branch } : {}),
};
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
// eslint-disable-next-line @typescript-eslint/camelcase
commitParams.author_name = name;
// eslint-disable-next-line @typescript-eslint/camelcase
commitParams.author_email = email;
}
return this.requestJSON({
url: `${this.repoURL}/repository/commits`,
method: 'POST',
headers: { 'Content-Type': 'application/json; charset=utf-8' },
body: JSON.stringify(commitParams),
});
}
async getCommitItems(files: (Entry | AssetProxy)[], branch: string) {
const items = await Promise.all(
files.map(async file => {
const [base64Content, fileExists] = await Promise.all([
result(file, 'toBase64', partial(this.toBase64, (file as Entry).raw)),
this.isFileExists(file.path, branch),
]);
return {
action: fileExists ? CommitAction.UPDATE : CommitAction.CREATE,
base64Content,
path: trimStart(file.path, '/'),
};
}),
);
return items as CommitItem[];
}
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
const files = entry ? [entry, ...mediaFiles] : mediaFiles;
if (options.useWorkflow) {
return this.editorialWorkflowGit(files, entry as Entry, options);
} else {
const items = await this.getCommitItems(files, this.branch);
return this.uploadAndCommit(items, {
commitMessage: options.commitMessage,
});
}
}
deleteFile = (path: string, commitMessage: string) => {
const branch = this.branch;
// eslint-disable-next-line @typescript-eslint/camelcase
const commitParams: CommitsParams = { commit_message: commitMessage, branch };
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
// eslint-disable-next-line @typescript-eslint/camelcase
commitParams.author_name = name;
// eslint-disable-next-line @typescript-eslint/camelcase
commitParams.author_email = email;
}
return flow([
unsentRequest.withMethod('DELETE'),
// TODO: only send author params if they are defined.
unsentRequest.withParams(commitParams),
this.request,
])(`${this.repoURL}/repository/files/${encodeURIComponent(path)}`);
};
generateContentKey(collectionName: string, slug: string) {
return generateContentKey(collectionName, slug);
}
contentKeyFromBranch(branch: string) {
return branch.substring(`${CMS_BRANCH_PREFIX}/`.length);
}
branchFromContentKey(contentKey: string) {
return `${CMS_BRANCH_PREFIX}/${contentKey}`;
}
async getMergeRequests(sourceBranch?: string) {
const mergeRequests: GitLabMergeRequest[] = await this.requestJSON({
url: `${this.repoURL}/merge_requests`,
params: {
state: 'opened',
labels: 'Any',
// eslint-disable-next-line @typescript-eslint/camelcase
target_branch: this.branch,
// eslint-disable-next-line @typescript-eslint/camelcase
...(sourceBranch ? { source_branch: sourceBranch } : {}),
},
});
return mergeRequests.filter(
mr => mr.source_branch.startsWith(CMS_BRANCH_PREFIX) && mr.labels.some(isCMSLabel),
);
}
async listUnpublishedBranches() {
console.log(
'%c Checking for Unpublished entries',
'line-height: 30px;text-align: center;font-weight: bold',
);
const mergeRequests = await this.getMergeRequests();
const branches = mergeRequests.map(mr => mr.source_branch);
return branches;
}
async isFileExists(path: string, branch: string) {
const fileExists = await this.requestText({
method: 'HEAD',
url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}`,
params: { ref: branch },
cache: 'no-store',
})
.then(() => true)
.catch(error => {
if (error instanceof APIError && error.status === 404) {
return false;
}
throw error;
});
return fileExists;
}
async getBranchMergeRequest(branch: string) {
const mergeRequests = await this.getMergeRequests(branch);
if (mergeRequests.length <= 0) {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
}
return mergeRequests[0];
}
async getDifferences(to: string) {
const result: { diffs: GitLabCommitDiff[] } = await this.requestJSON({
url: `${this.repoURL}/repository/compare`,
params: {
from: this.branch,
to,
},
});
return result.diffs;
}
async retrieveMetadata(contentKey: string) {
const { collection, slug } = parseContentKey(contentKey);
const branch = this.branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
const diff = await this.getDifferences(mergeRequest.sha);
const path = diff.find(d => d.old_path.includes(slug))?.old_path as string;
// TODO: get real file id
const mediaFiles = await Promise.all(
diff.filter(d => d.old_path !== path).map(d => ({ path: d.new_path, id: null })),
);
const label = mergeRequest.labels.find(isCMSLabel) as string;
const status = labelToStatus(label);
return { branch, collection, slug, path, status, mediaFiles };
}
async readUnpublishedBranchFile(contentKey: string) {
const { branch, collection, slug, path, status, mediaFiles } = await this.retrieveMetadata(
contentKey,
);
const [fileData, isModification] = await Promise.all([
this.readFile(path, null, { branch }) as Promise<string>,
this.isFileExists(path, this.branch),
]);
return {
slug,
metaData: { branch, collection, objects: { entry: { path, mediaFiles } }, status },
fileData,
isModification,
};
}
async rebaseMergeRequest(mergeRequest: GitLabMergeRequest) {
let rebase: GitLabMergeRebase = await this.requestJSON({
method: 'PUT',
url: `${this.repoURL}/merge_requests/${mergeRequest.iid}/rebase`,
});
let i = 1;
while (rebase.rebase_in_progress) {
await new Promise(resolve => setTimeout(resolve, 1000));
rebase = await this.requestJSON({
url: `${this.repoURL}/merge_requests/${mergeRequest.iid}`,
params: {
// eslint-disable-next-line @typescript-eslint/camelcase
include_rebase_in_progress: true,
},
});
if (!rebase.rebase_in_progress || i > 10) {
break;
}
i++;
}
if (rebase.rebase_in_progress) {
throw new APIError('Timed out rebasing merge request', null, API_NAME);
} else if (rebase.merge_error) {
throw new APIError(`Rebase error: ${rebase.merge_error}`, null, API_NAME);
}
}
async createMergeRequest(branch: string, commitMessage: string, status: string) {
await this.requestJSON({
method: 'POST',
url: `${this.repoURL}/merge_requests`,
params: {
// eslint-disable-next-line @typescript-eslint/camelcase
source_branch: branch,
// eslint-disable-next-line @typescript-eslint/camelcase
target_branch: this.branch,
title: commitMessage,
description: DEFAULT_PR_BODY,
labels: statusToLabel(status),
// eslint-disable-next-line @typescript-eslint/camelcase
remove_source_branch: true,
squash: this.squashMerges,
},
});
}
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
const contentKey = this.generateContentKey(options.collectionName as string, entry.slug);
const branch = this.branchFromContentKey(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
const items = await this.getCommitItems(files, this.branch);
await this.uploadAndCommit(items, {
commitMessage: options.commitMessage,
branch,
newBranch: true,
});
await this.createMergeRequest(
branch,
options.commitMessage,
options.status || this.initialWorkflowStatus,
);
} else {
const mergeRequest = await this.getBranchMergeRequest(branch);
await this.rebaseMergeRequest(mergeRequest);
const [items, diffs] = await Promise.all([
this.getCommitItems(files, branch),
this.getDifferences(branch),
]);
// mark files for deletion
for (const diff of diffs) {
if (!items.some(item => item.path === diff.new_path)) {
items.push({ action: CommitAction.DELETE, path: diff.new_path });
}
}
await this.uploadAndCommit(items, {
commitMessage: options.commitMessage,
branch,
});
}
}
async updateMergeRequestLabels(mergeRequest: GitLabMergeRequest, labels: string[]) {
await this.requestJSON({
method: 'PUT',
url: `${this.repoURL}/merge_requests/${mergeRequest.iid}`,
params: {
labels: labels.join(','),
},
});
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
const contentKey = this.generateContentKey(collection, slug);
const branch = this.branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
const labels = [
...mergeRequest.labels.filter(label => !isCMSLabel(label)),
statusToLabel(newStatus),
];
await this.updateMergeRequestLabels(mergeRequest, labels);
}
async mergeMergeRequest(mergeRequest: GitLabMergeRequest) {
await this.requestJSON({
method: 'PUT',
url: `${this.repoURL}/merge_requests/${mergeRequest.iid}/merge`,
params: {
// eslint-disable-next-line @typescript-eslint/camelcase
merge_commit_message: MERGE_COMMIT_MESSAGE,
// eslint-disable-next-line @typescript-eslint/camelcase
squash_commit_message: MERGE_COMMIT_MESSAGE,
squash: this.squashMerges,
// eslint-disable-next-line @typescript-eslint/camelcase
should_remove_source_branch: true,
},
});
}
async publishUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
await this.mergeMergeRequest(mergeRequest);
}
async closeMergeRequest(mergeRequest: GitLabMergeRequest) {
await this.requestJSON({
method: 'PUT',
url: `${this.repoURL}/merge_requests/${mergeRequest.iid}`,
params: {
// eslint-disable-next-line @typescript-eslint/camelcase
state_event: 'close',
},
});
}
async deleteBranch(branch: string) {
await this.request({
method: 'DELETE',
url: `${this.repoURL}/repository/branches/${encodeURIComponent(branch)}`,
});
}
async deleteUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
await this.closeMergeRequest(mergeRequest);
await this.deleteBranch(branch);
}
async getMergeRequestStatues(mergeRequest: GitLabMergeRequest, branch: string) {
const statuses: GitLabCommitStatus[] = await this.requestJSON({
url: `${this.repoURL}/repository/commits/${mergeRequest.sha}/statuses`,
params: {
ref: branch,
},
});
return statuses;
}
async getStatuses(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = this.branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
const statuses: GitLabCommitStatus[] = await this.getMergeRequestStatues(mergeRequest, branch);
// eslint-disable-next-line @typescript-eslint/camelcase
return statuses.map(({ name, status, target_url }) => ({
context: name,
state: status === GitLabCommitStatuses.Success ? PreviewState.Success : PreviewState.Other,
// eslint-disable-next-line @typescript-eslint/camelcase
target_url,
}));
}
}

View File

@ -1,6 +1,5 @@
import React from 'react';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import styled from '@emotion/styled';
import { NetlifyAuthenticator, ImplicitAuthenticator } from 'netlify-cms-lib-auth';
import { AuthenticationPage, Icon } from 'netlify-cms-ui-default';
@ -16,19 +15,25 @@ export default class GitLabAuthenticationPage extends React.Component {
base_url: PropTypes.string,
siteId: PropTypes.string,
authEndpoint: PropTypes.string,
config: ImmutablePropTypes.map,
config: PropTypes.object.isRequired,
clearHash: PropTypes.func,
};
state = {};
componentDidMount() {
const authType = this.props.config.getIn(['backend', 'auth_type']);
const {
auth_type: authType = '',
base_url = 'https://gitlab.com',
auth_endpoint = 'oauth/authorize',
app_id = '',
} = this.props.config.backend;
if (authType === 'implicit') {
this.auth = new ImplicitAuthenticator({
base_url: this.props.config.getIn(['backend', 'base_url'], 'https://gitlab.com'),
auth_endpoint: this.props.config.getIn(['backend', 'auth_endpoint'], 'oauth/authorize'),
app_id: this.props.config.getIn(['backend', 'app_id']),
base_url,
auth_endpoint,
app_id,
clearHash: this.props.clearHash,
});
// Complete implicit authentication if we were redirected back to from the provider.
@ -69,8 +74,8 @@ export default class GitLabAuthenticationPage extends React.Component {
onLogin={this.handleLogin}
loginDisabled={inProgress}
loginErrorMessage={this.state.loginError}
logoUrl={config.get('logo_url')}
siteUrl={config.get('site_url')}
logoUrl={config.logo_url}
siteUrl={config.site_url}
renderButtonContent={() => (
<React.Fragment>
<LoginButtonIcon type="gitlab" /> {inProgress ? 'Logging in...' : 'Login with GitLab'}

View File

@ -0,0 +1,35 @@
import API from '../API';
global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
describe('GitLab API', () => {
beforeEach(() => {
jest.resetAllMocks();
});
test('should get preview statuses', async () => {
const api = new API({ repo: 'repo' });
const mr = { sha: 'sha' };
const statuses = [
{ name: 'deploy', status: 'success', target_url: 'deploy-url' },
{ name: 'build', status: 'pending' },
];
api.getBranchMergeRequest = jest.fn(() => Promise.resolve(mr));
api.getMergeRequestStatues = jest.fn(() => Promise.resolve(statuses));
const collectionName = 'posts';
const slug = 'title';
await expect(api.getStatuses(collectionName, slug)).resolves.toEqual([
{ context: 'deploy', state: 'success', target_url: 'deploy-url' },
{ context: 'build', state: 'other' },
]);
expect(api.getBranchMergeRequest).toHaveBeenCalledTimes(1);
expect(api.getBranchMergeRequest).toHaveBeenCalledWith('cms/posts/title');
expect(api.getMergeRequestStatues).toHaveBeenCalledTimes(1);
expect(api.getMergeRequestStatues).toHaveBeenCalledWith(mr, 'cms/posts/title');
});
});

View File

@ -1,6 +1,5 @@
jest.mock('netlify-cms-core/src/backend');
import { fromJS } from 'immutable';
import { partial } from 'lodash';
import { oneLine, stripIndent } from 'common-tags';
import nock from 'nock';
import { Cursor } from 'netlify-cms-lib-util';
@ -175,7 +174,7 @@ describe('gitlab backend', () => {
}
function mockApi(backend) {
return nock(backend.implementation.api_root);
return nock(backend.implementation.apiRoot);
}
function interceptAuth(backend, { userResponse, projectResponse } = {}) {
@ -206,7 +205,7 @@ describe('gitlab backend', () => {
function createHeaders(backend, { basePath, path, page, perPage, pageCount, totalCount }) {
const pageNum = parseInt(page, 10);
const pageCountNum = parseInt(pageCount, 10);
const url = `${backend.implementation.api_root}${basePath}`;
const url = `${backend.implementation.apiRoot}${basePath}`;
const link = linkPage =>
`<${url}?id=${expectedRepo}&page=${linkPage}&path=${path}&per_page=${perPage}&recursive=false>`;
@ -286,18 +285,8 @@ describe('gitlab backend', () => {
});
}
it('throws if configuration requires editorial workflow', () => {
const resolveBackendWithWorkflow = partial(resolveBackend, {
...defaultConfig,
publish_mode: 'editorial_workflow',
});
expect(resolveBackendWithWorkflow).toThrowErrorMatchingInlineSnapshot(
`"The GitLab backend does not support the Editorial Workflow."`,
);
});
it('throws if configuration does not include repo', () => {
expect(resolveBackend).toThrowErrorMatchingInlineSnapshot(
expect(() => resolveBackend({ backend: {} })).toThrowErrorMatchingInlineSnapshot(
`"The GitLab backend needs a \\"repo\\" in the backend configuration."`,
);
});
@ -382,7 +371,12 @@ describe('gitlab backend', () => {
interceptCollection(backend, collectionContentConfig);
const entry = await backend.getEntry(
{ config: fromJS({}), integrations: fromJS([]), entryDraft: fromJS({}) },
{
config: fromJS({}),
integrations: fromJS([]),
entryDraft: fromJS({}),
mediaLibrary: fromJS({}),
},
fromJS(collectionContentConfig),
slug,
);

View File

@ -1,237 +0,0 @@
import trimStart from 'lodash/trimStart';
import semaphore from 'semaphore';
import { trim } from 'lodash';
import { stripIndent } from 'common-tags';
import { CURSOR_COMPATIBILITY_SYMBOL, basename, getCollectionDepth } from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import API from './API';
const MAX_CONCURRENT_DOWNLOADS = 10;
export default class GitLab {
constructor(config, options = {}) {
this.config = config;
this.options = {
proxied: false,
API: null,
...options,
};
if (this.options.useWorkflow) {
throw new Error('The GitLab backend does not support the Editorial Workflow.');
}
if (!this.options.proxied && config.getIn(['backend', 'repo']) == null) {
throw new Error('The GitLab backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.repo = config.getIn(['backend', 'repo'], '');
this.branch = config.getIn(['backend', 'branch'], 'master');
this.api_root = config.getIn(['backend', 'api_root'], 'https://gitlab.com/api/v4');
this.token = '';
}
authComponent() {
return AuthenticationPage;
}
restoreUser(user) {
return this.authenticate(user);
}
async authenticate(state) {
this.token = state.token;
this.api = new API({
token: this.token,
branch: this.branch,
repo: this.repo,
api_root: this.api_root,
});
const user = await this.api.user();
const isCollab = await this.api.hasWriteAccess(user).catch(error => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a GitLab account with access.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your GitLab user account does not have access to this repo.');
}
// Authorized user
return { ...user, login: user.username, token: state.token };
}
logout() {
this.token = null;
return;
}
getToken() {
return Promise.resolve(this.token);
}
filterFile(folder, file, extension, depth) {
// gitlab paths include the root folder
const fileFolder = trim(file.path.split(folder)[1] || '/', '/');
return file.name.endsWith('.' + extension) && fileFolder.split('/').length <= depth;
}
entriesByFolder(collection, extension) {
const depth = getCollectionDepth(collection);
const folder = collection.get('folder');
return this.api.listFiles(folder, depth > 1).then(({ files, cursor }) =>
this.fetchFiles(files.filter(file => this.filterFile(folder, file, extension, depth))).then(
fetchedFiles => {
const returnedFiles = fetchedFiles;
returnedFiles[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return returnedFiles;
},
),
);
}
allEntriesByFolder(collection, extension) {
const depth = getCollectionDepth(collection);
const folder = collection.get('folder');
return this.api
.listAllFiles(folder, depth > 1)
.then(files =>
this.fetchFiles(files.filter(file => this.filterFile(folder, file, extension, depth))),
);
}
entriesByFiles(collection) {
const files = collection.get('files').map(collectionFile => ({
path: collectionFile.get('file'),
label: collectionFile.get('label'),
}));
return this.fetchFiles(files).then(fetchedFiles => {
const returnedFiles = fetchedFiles;
return returnedFiles;
});
}
fetchFiles = files => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [];
files.forEach(file => {
promises.push(
new Promise(resolve =>
sem.take(() =>
this.api
.readFile(file.path, file.id)
.then(data => {
resolve({ file, data });
sem.leave();
})
.catch((error = true) => {
sem.leave();
console.error(`failed to load file from GitLab: ${file.path}`);
resolve({ error });
}),
),
),
);
});
return Promise.all(promises).then(loadedEntries =>
loadedEntries.filter(loadedEntry => !loadedEntry.error),
);
};
// Fetches a single entry.
getEntry(collection, slug, path) {
return this.api.readFile(path).then(data => ({
file: { path },
data,
}));
}
getMedia(mediaFolder = this.config.get('media_folder')) {
return this.api.listAllFiles(mediaFolder).then(files =>
files.map(({ id, name, path }) => {
return { id, name, path, displayURL: { id, name, path } };
}),
);
}
async getMediaAsBlob(path, id, name) {
let blob = await this.api.readFile(path, id, { parseText: false });
// svgs are returned with mimetype "text/plain" by gitlab
if (blob.type === 'text/plain' && name.match(/\.svg$/i)) {
blob = new window.Blob([blob], { type: 'image/svg+xml' });
}
return blob;
}
getMediaDisplayURL(displayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
const { id, name, path } = displayURL;
return new Promise((resolve, reject) =>
this._mediaDisplayURLSem.take(() =>
this.getMediaAsBlob(path, id, name)
.then(blob => URL.createObjectURL(blob))
.then(resolve, reject)
.finally(() => this._mediaDisplayURLSem.leave()),
),
);
}
async getMediaFile(path) {
const name = basename(path);
const blob = await this.getMediaAsBlob(path, null, name);
const fileObj = new File([blob], name);
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
async persistEntry(entry, mediaFiles, options = {}) {
return this.api.persistFiles([entry], options);
}
async persistMedia(mediaFile, options = {}) {
const [{ sha }] = await this.api.persistFiles([mediaFile], options);
const { path, fileObj } = mediaFile;
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path: trimStart(path, '/'),
name: fileObj.name,
size: fileObj.size,
file: fileObj,
url,
id: sha,
};
}
deleteFile(path, commitMessage, options) {
return this.api.deleteFile(path, commitMessage, options);
}
traverseCursor(cursor, action) {
return this.api.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => ({
entries: await Promise.all(
entries.map(file => this.api.readFile(file.path, file.id).then(data => ({ file, data }))),
),
cursor: newCursor,
}));
}
}

View File

@ -0,0 +1,368 @@
import trimStart from 'lodash/trimStart';
import semaphore, { Semaphore } from 'semaphore';
import { trim } from 'lodash';
import { stripIndent } from 'common-tags';
import {
CURSOR_COMPATIBILITY_SYMBOL,
basename,
Entry,
AssetProxy,
PersistOptions,
Cursor,
Implementation,
DisplayURL,
entriesByFolder,
entriesByFiles,
getMediaDisplayURL,
getMediaAsBlob,
User,
Credentials,
Config,
ImplementationFile,
unpublishedEntries,
getPreviewStatus,
UnpublishedEntryMediaFile,
asyncLock,
AsyncLock,
runWithLock,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
import { getBlobSHA } from 'netlify-cms-lib-util/src';
const MAX_CONCURRENT_DOWNLOADS = 10;
export default class GitLab implements Implementation {
lock: AsyncLock;
api: API | null;
options: {
proxied: boolean;
API: API | null;
initialWorkflowStatus: string;
};
repo: string;
branch: string;
apiRoot: string;
token: string | null;
squashMerges: boolean;
mediaFolder: string;
previewContext: string;
_mediaDisplayURLSem?: Semaphore;
constructor(config: Config, options = {}) {
this.options = {
proxied: false,
API: null,
initialWorkflowStatus: '',
...options,
};
if (
!this.options.proxied &&
(config.backend.repo === null || config.backend.repo === undefined)
) {
throw new Error('The GitLab backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.repo = config.backend.repo || '';
this.branch = config.backend.branch || 'master';
this.apiRoot = config.backend.api_root || 'https://gitlab.com/api/v4';
this.token = '';
this.squashMerges = config.backend.squash_merges || false;
this.mediaFolder = config.media_folder;
this.previewContext = config.backend.preview_context || '';
this.lock = asyncLock();
}
authComponent() {
return AuthenticationPage;
}
restoreUser(user: User) {
return this.authenticate(user);
}
async authenticate(state: Credentials) {
this.token = state.token as string;
this.api = new API({
token: this.token,
branch: this.branch,
repo: this.repo,
apiRoot: this.apiRoot,
squashMerges: this.squashMerges,
initialWorkflowStatus: this.options.initialWorkflowStatus,
});
const user = await this.api.user();
const isCollab = await this.api.hasWriteAccess().catch((error: Error) => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a GitLab account with access.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your GitLab user account does not have access to this repo.');
}
// Authorized user
return { ...user, login: user.username, token: state.token as string };
}
async logout() {
this.token = null;
return;
}
getToken() {
return Promise.resolve(this.token);
}
filterFile(
folder: string,
file: { path: string; name: string },
extension: string,
depth: number,
) {
// gitlab paths include the root folder
const fileFolder = trim(file.path.split(folder)[1] || '/', '/');
return file.name.endsWith('.' + extension) && fileFolder.split('/').length <= depth;
}
async entriesByFolder(folder: string, extension: string, depth: number) {
let cursor: Cursor;
const listFiles = () =>
this.api!.listFiles(folder, depth > 1).then(({ files, cursor: c }) => {
cursor = c;
return files.filter(file => this.filterFile(folder, file, extension, depth));
});
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), API_NAME);
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return files;
}
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const listFiles = () =>
this.api!.listAllFiles(folder, depth > 1).then(files =>
files.filter(file => this.filterFile(folder, file, extension, depth)),
);
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), API_NAME);
return files;
}
entriesByFiles(files: ImplementationFile[]) {
return entriesByFiles(files, this.api!.readFile.bind(this.api!), API_NAME);
}
// Fetches a single entry.
getEntry(path: string) {
return this.api!.readFile(path).then(data => ({
file: { path, id: null },
data: data as string,
}));
}
getMedia(mediaFolder = this.mediaFolder) {
return this.api!.listAllFiles(mediaFolder).then(files =>
files.map(({ id, name, path }) => {
return { id, name, path, displayURL: { id, name, path } };
}),
);
}
getMediaDisplayURL(displayURL: DisplayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
return getMediaDisplayURL(
displayURL,
this.api!.readFile.bind(this.api!),
this._mediaDisplayURLSem,
);
}
async getMediaFile(path: string) {
const name = basename(path);
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
const fileObj = new File([blob], name);
const url = URL.createObjectURL(fileObj);
const id = await getBlobSHA(blob);
return {
id,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
// persistEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.persistFiles(entry, mediaFiles, options),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
const fileObj = mediaFile.fileObj as File;
const [id] = await Promise.all([
getBlobSHA(fileObj),
this.api!.persistFiles(null, [mediaFile], options),
]);
const { path } = mediaFile;
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path: trimStart(path, '/'),
name: fileObj!.name,
size: fileObj!.size,
file: fileObj,
url,
id,
};
}
deleteFile(path: string, commitMessage: string) {
return this.api!.deleteFile(path, commitMessage);
}
traverseCursor(cursor: Cursor, action: string) {
return this.api!.traverseCursor(cursor, action).then(
async ({ entries, cursor: newCursor }) => ({
entries: await Promise.all(
entries.map(file =>
this.api!.readFile(file.path, file.id).then(data => ({ file, data: data as string })),
),
),
cursor: newCursor,
}),
);
}
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
const readFile = (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
return getMediaAsBlob(file.path, null, readFile).then(blob => {
const name = basename(file.path);
const fileObj = new File([blob], name);
return {
id: file.path,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
});
}
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
return mediaFiles;
}
async unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(branch => this.api!.contentKeyFromBranch(branch)),
);
const readUnpublishedBranchFile = (contentKey: string) =>
this.api!.readUnpublishedBranchFile(contentKey);
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, API_NAME);
}
async unpublishedEntry(
collection: string,
slug: string,
{
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
this.loadEntryMediaFiles(branch, files),
} = {},
) {
const contentKey = this.api!.generateContentKey(collection, slug);
const data = await this.api!.readUnpublishedBranchFile(contentKey);
const mediaFiles = await loadEntryMediaFiles(
data.metaData.branch,
// TODO: fix this
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
data.metaData.objects.entry.mediaFiles,
);
return {
slug,
file: { path: data.metaData.objects.entry.path, id: null },
data: data.fileData as string,
metaData: data.metaData,
mediaFiles,
isModification: data.isModification,
};
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
// updateUnpublishedEntryStatus is a transactional operation
return runWithLock(
this.lock,
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
'Failed to acquire update entry status lock',
);
}
async deleteUnpublishedEntry(collection: string, slug: string) {
// deleteUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.deleteUnpublishedEntry(collection, slug),
'Failed to acquire delete entry lock',
);
}
async publishUnpublishedEntry(collection: string, slug: string) {
// publishUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.publishUnpublishedEntry(collection, slug),
'Failed to acquire publish entry lock',
);
}
async getDeployPreview(collection: string, slug: string) {
try {
const statuses = await this.api!.getStatuses(collection, slug);
const deployStatus = getPreviewStatus(statuses, this.previewContext);
if (deployStatus) {
const { target_url: url, state } = deployStatus;
return { url, status: state };
} else {
return null;
}
} catch (e) {
return null;
}
}
}

View File

@ -16,18 +16,16 @@
"scripts": {
"develop": "yarn build:esm --watch",
"build": "cross-env NODE_ENV=production webpack",
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward"
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
},
"peerDependencies": {
"@emotion/core": "^10.0.9",
"@emotion/styled": "^10.0.9",
"immutable": "^3.8.2",
"lodash": "^4.17.11",
"netlify-cms-lib-util": "^2.3.0",
"netlify-cms-ui-default": "^2.6.0",
"prop-types": "^15.7.2",
"react": "^16.8.4",
"react-immutable-proptypes": "^2.1.0",
"uuid": "^3.3.2"
}
}

View File

@ -1,6 +1,5 @@
import React from 'react';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import styled from '@emotion/styled';
import { Icon, buttons, shadows, GoBackButton } from 'netlify-cms-ui-default';
@ -38,14 +37,14 @@ export default class AuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
config: ImmutablePropTypes.map.isRequired,
config: PropTypes.object.isRequired,
};
componentDidMount() {
/**
* Allow login screen to be skipped for demo purposes.
*/
const skipLogin = this.props.config.getIn(['backend', 'login']) === false;
const skipLogin = this.props.config.backend.login === false;
if (skipLogin) {
this.props.onLogin(this.state);
}
@ -65,7 +64,7 @@ export default class AuthenticationPage extends React.Component {
<LoginButton disabled={inProgress} onClick={this.handleLogin}>
{inProgress ? 'Logging in...' : 'Login'}
</LoginButton>
{config.get('site_url') && <GoBackButton href={config.get('site_url')}></GoBackButton>}
{config.site_url && <GoBackButton href={config.site_url}></GoBackButton>}
</StyledAuthenticationPage>
);
}

View File

@ -17,8 +17,8 @@ describe('test backend implementation', () => {
const backend = new TestBackend();
await expect(backend.getEntry(null, null, 'posts/some-post.md')).resolves.toEqual({
file: { path: 'posts/some-post.md' },
await expect(backend.getEntry('posts/some-post.md')).resolves.toEqual({
file: { path: 'posts/some-post.md', id: null },
data: 'post content',
});
});
@ -38,8 +38,8 @@ describe('test backend implementation', () => {
const backend = new TestBackend();
await expect(backend.getEntry(null, null, 'posts/dir1/dir2/some-post.md')).resolves.toEqual({
file: { path: 'posts/dir1/dir2/some-post.md' },
await expect(backend.getEntry('posts/dir1/dir2/some-post.md')).resolves.toEqual({
file: { path: 'posts/dir1/dir2/some-post.md', id: null },
data: 'post content',
});
});
@ -224,31 +224,31 @@ describe('test backend implementation', () => {
expect(getFolderEntries(tree, 'pages', 'md', 1)).toEqual([
{
file: { path: 'pages/root-page.md' },
file: { path: 'pages/root-page.md', id: null },
data: 'root page content',
},
]);
expect(getFolderEntries(tree, 'pages', 'md', 2)).toEqual([
{
file: { path: 'pages/dir1/nested-page-1.md' },
file: { path: 'pages/dir1/nested-page-1.md', id: null },
data: 'nested page 1 content',
},
{
file: { path: 'pages/root-page.md' },
file: { path: 'pages/root-page.md', id: null },
data: 'root page content',
},
]);
expect(getFolderEntries(tree, 'pages', 'md', 3)).toEqual([
{
file: { path: 'pages/dir1/dir2/nested-page-2.md' },
file: { path: 'pages/dir1/dir2/nested-page-2.md', id: null },
data: 'nested page 2 content',
},
{
file: { path: 'pages/dir1/nested-page-1.md' },
file: { path: 'pages/dir1/nested-page-1.md', id: null },
data: 'nested page 1 content',
},
{
file: { path: 'pages/root-page.md' },
file: { path: 'pages/root-page.md', id: null },
data: 'root page content',
},
]);

View File

@ -5,25 +5,49 @@ import {
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
basename,
getCollectionDepth,
Implementation,
Entry,
ImplementationEntry,
AssetProxy,
PersistOptions,
ImplementationMediaFile,
User,
Config,
ImplementationFile,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
type RepoFile = { file?: { path: string }; content: string };
type RepoTree = { [key: string]: RepoFile | RepoTree };
declare global {
interface Window {
repoFiles: RepoTree;
repoFilesUnpublished: ImplementationEntry[];
}
}
window.repoFiles = window.repoFiles || {};
window.repoFilesUnpublished = window.repoFilesUnpublished || [];
function getFile(path) {
function getFile(path: string) {
const segments = path.split('/');
let obj = window.repoFiles;
let obj: RepoTree = window.repoFiles;
while (obj && segments.length) {
obj = obj[segments.shift()];
obj = obj[segments.shift() as string] as RepoTree;
}
return obj || {};
return ((obj as unknown) as RepoFile) || {};
}
const pageSize = 10;
const getCursor = (collection, extension, entries, index) => {
const getCursor = (
folder: string,
extension: string,
entries: ImplementationEntry[],
index: number,
depth: number,
) => {
const count = entries.length;
const pageCount = Math.floor(count / pageSize);
return Cursor.create({
@ -32,24 +56,31 @@ const getCursor = (collection, extension, entries, index) => {
...(index > 0 ? ['prev', 'first'] : []),
],
meta: { index, count, pageSize, pageCount },
data: { collection, extension, index, pageCount },
data: { folder, extension, index, pageCount, depth },
});
};
export const getFolderEntries = (tree, folder, extension, depth, files = [], path = folder) => {
export const getFolderEntries = (
tree: RepoTree,
folder: string,
extension: string,
depth: number,
files = [] as ImplementationEntry[],
path = folder,
) => {
if (depth <= 0) {
return files;
}
Object.keys(tree[folder] || {}).forEach(key => {
if (key.endsWith(`.${extension}`)) {
const file = tree[folder][key];
const file = (tree[folder] as RepoTree)[key] as RepoFile;
files.unshift({
file: { path: `${path}/${key}` },
file: { path: `${path}/${key}`, id: null },
data: file.content,
});
} else {
const subTree = tree[folder];
const subTree = tree[folder] as RepoTree;
return getFolderEntries(subTree, key, extension, depth - 1, files, `${path}/${key}`);
}
});
@ -57,9 +88,11 @@ export const getFolderEntries = (tree, folder, extension, depth, files = [], pat
return files;
};
export default class TestBackend {
constructor(config, options = {}) {
this.config = config;
export default class TestBackend implements Implementation {
assets: ImplementationMediaFile[];
options: { initialWorkflowStatus?: string };
constructor(_config: Config, options = {}) {
this.assets = [];
this.options = options;
}
@ -68,12 +101,12 @@ export default class TestBackend {
return AuthenticationPage;
}
restoreUser(user) {
return this.authenticate(user);
restoreUser() {
return this.authenticate();
}
authenticate() {
return Promise.resolve();
return (Promise.resolve() as unknown) as Promise<User>;
}
logout() {
@ -84,14 +117,20 @@ export default class TestBackend {
return Promise.resolve('');
}
traverseCursor(cursor, action) {
const { collection, extension, index, pageCount } = cursor.data.toObject();
traverseCursor(cursor: Cursor, action: string) {
const { folder, extension, index, pageCount, depth } = cursor.data!.toObject() as {
folder: string;
extension: string;
index: number;
pageCount: number;
depth: number;
};
const newIndex = (() => {
if (action === 'next') {
return index + 1;
return (index as number) + 1;
}
if (action === 'prev') {
return index - 1;
return (index as number) - 1;
}
if (action === 'first') {
return 0;
@ -99,35 +138,26 @@ export default class TestBackend {
if (action === 'last') {
return pageCount;
}
return 0;
})();
// TODO: stop assuming cursors are for collections
const depth = getCollectionDepth(collection);
const allEntries = getFolderEntries(
window.repoFiles,
collection.get('folder'),
extension,
depth,
);
const allEntries = getFolderEntries(window.repoFiles, folder, extension, depth);
const entries = allEntries.slice(newIndex * pageSize, newIndex * pageSize + pageSize);
const newCursor = getCursor(collection, extension, allEntries, newIndex);
const newCursor = getCursor(folder, extension, allEntries, newIndex, depth);
return Promise.resolve({ entries, cursor: newCursor });
}
entriesByFolder(collection, extension) {
const folder = collection.get('folder');
const depth = getCollectionDepth(collection);
entriesByFolder(folder: string, extension: string, depth: number) {
const entries = folder ? getFolderEntries(window.repoFiles, folder, extension, depth) : [];
const cursor = getCursor(collection, extension, entries, 0);
const cursor = getCursor(folder, extension, entries, 0, depth);
const ret = take(entries, pageSize);
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
ret[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return Promise.resolve(ret);
}
entriesByFiles(collection) {
const files = collection.get('files').map(collectionFile => ({
path: collectionFile.get('file'),
label: collectionFile.get('label'),
}));
entriesByFiles(files: ImplementationFile[]) {
return Promise.all(
files.map(file => ({
file,
@ -136,9 +166,9 @@ export default class TestBackend {
);
}
getEntry(collection, slug, path) {
getEntry(path: string) {
return Promise.resolve({
file: { path },
file: { path, id: null },
data: getFile(path).content,
});
}
@ -147,18 +177,18 @@ export default class TestBackend {
return Promise.resolve(window.repoFilesUnpublished);
}
getMediaFiles(entry) {
const mediaFiles = entry.mediaFiles.map(file => ({
getMediaFiles(entry: ImplementationEntry) {
const mediaFiles = entry.mediaFiles!.map(file => ({
...file,
...this.mediaFileToAsset(file),
file: file.fileObj,
...this.normalizeAsset(file),
file: file.file as File,
}));
return mediaFiles;
}
unpublishedEntry(collection, slug) {
unpublishedEntry(collection: string, slug: string) {
const entry = window.repoFilesUnpublished.find(
e => e.metaData.collection === collection.get('name') && e.slug === slug,
e => e.metaData!.collection === collection && e.slug === slug,
);
if (!entry) {
return Promise.reject(
@ -170,25 +200,32 @@ export default class TestBackend {
return Promise.resolve(entry);
}
deleteUnpublishedEntry(collection, slug) {
deleteUnpublishedEntry(collection: string, slug: string) {
const unpubStore = window.repoFilesUnpublished;
const existingEntryIndex = unpubStore.findIndex(
e => e.metaData.collection === collection && e.slug === slug,
e => e.metaData!.collection === collection && e.slug === slug,
);
unpubStore.splice(existingEntryIndex, 1);
return Promise.resolve();
}
async persistEntry({ path, raw, slug }, mediaFiles, options = {}) {
async persistEntry(
{ path, raw, slug }: Entry,
assetProxies: AssetProxy[],
options: PersistOptions,
) {
if (options.useWorkflow) {
const unpubStore = window.repoFilesUnpublished;
const existingEntryIndex = unpubStore.findIndex(e => e.file.path === path);
if (existingEntryIndex >= 0) {
const unpubEntry = { ...unpubStore[existingEntryIndex], data: raw };
unpubEntry.title = options.parsedData && options.parsedData.title;
unpubEntry.description = options.parsedData && options.parsedData.description;
unpubEntry.mediaFiles = mediaFiles;
const unpubEntry = {
...unpubStore[existingEntryIndex],
data: raw,
title: options.parsedData && options.parsedData.title,
description: options.parsedData && options.parsedData.description,
mediaFiles: assetProxies.map(this.normalizeAsset),
};
unpubStore.splice(existingEntryIndex, 1, unpubEntry);
} else {
@ -196,15 +233,16 @@ export default class TestBackend {
data: raw,
file: {
path,
id: null,
},
metaData: {
collection: options.collectionName,
status: options.status || this.options.initialWorkflowStatus,
collection: options.collectionName as string,
status: (options.status || this.options.initialWorkflowStatus) as string,
title: options.parsedData && options.parsedData.title,
description: options.parsedData && options.parsedData.description,
},
slug,
mediaFiles,
mediaFiles: assetProxies.map(this.normalizeAsset),
};
unpubStore.push(unpubEntry);
}
@ -218,78 +256,91 @@ export default class TestBackend {
let obj = window.repoFiles;
while (segments.length > 1) {
const segment = segments.shift();
const segment = segments.shift() as string;
obj[segment] = obj[segment] || {};
obj = obj[segment];
obj = obj[segment] as RepoTree;
}
obj[segments.shift()] = entry;
(obj[segments.shift() as string] as RepoFile) = entry;
await Promise.all(mediaFiles.map(file => this.persistMedia(file)));
await Promise.all(assetProxies.map(file => this.persistMedia(file)));
return Promise.resolve();
}
updateUnpublishedEntryStatus(collection, slug, newStatus) {
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
const unpubStore = window.repoFilesUnpublished;
const entryIndex = unpubStore.findIndex(
e => e.metaData.collection === collection && e.slug === slug,
e => e.metaData!.collection === collection && e.slug === slug,
);
unpubStore[entryIndex].metaData.status = newStatus;
unpubStore[entryIndex]!.metaData!.status = newStatus;
return Promise.resolve();
}
async publishUnpublishedEntry(collection, slug) {
async publishUnpublishedEntry(collection: string, slug: string) {
const unpubStore = window.repoFilesUnpublished;
const unpubEntryIndex = unpubStore.findIndex(
e => e.metaData.collection === collection && e.slug === slug,
e => e.metaData!.collection === collection && e.slug === slug,
);
const unpubEntry = unpubStore[unpubEntryIndex];
const entry = { raw: unpubEntry.data, slug: unpubEntry.slug, path: unpubEntry.file.path };
const entry = {
raw: unpubEntry.data,
slug: unpubEntry.slug as string,
path: unpubEntry.file.path,
};
unpubStore.splice(unpubEntryIndex, 1);
await this.persistEntry(entry, unpubEntry.mediaFiles);
return { mediaFiles: this.getMediaFiles(unpubEntry) };
await this.persistEntry(entry, unpubEntry.mediaFiles!, { commitMessage: '' });
}
getMedia() {
return Promise.resolve(this.assets);
}
async getMediaFile(path) {
const asset = this.assets.find(asset => asset.path === path);
async getMediaFile(path: string) {
const asset = this.assets.find(asset => asset.path === path) as ImplementationMediaFile;
const url = asset.url as string;
const name = basename(path);
const blob = await fetch(asset.url).then(res => res.blob());
const blob = await fetch(url).then(res => res.blob());
const fileObj = new File([blob], name);
return {
displayURL: asset.url,
id: url,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url: asset.url,
url,
};
}
mediaFileToAsset(mediaFile) {
const { fileObj } = mediaFile;
normalizeAsset(assetProxy: AssetProxy) {
const fileObj = assetProxy.fileObj as File;
const { name, size } = fileObj;
const objectUrl = attempt(window.URL.createObjectURL, fileObj);
const url = isError(objectUrl) ? '' : objectUrl;
const normalizedAsset = { id: uuid(), name, size, path: mediaFile.path, url, displayURL: url };
const normalizedAsset = {
id: uuid(),
name,
size,
path: assetProxy.path,
url,
displayURL: url,
fileObj,
};
return normalizedAsset;
}
persistMedia(mediaFile) {
const normalizedAsset = this.mediaFileToAsset(mediaFile);
persistMedia(assetProxy: AssetProxy) {
const normalizedAsset = this.normalizeAsset(assetProxy);
this.assets.push(normalizedAsset);
return Promise.resolve(normalizedAsset);
}
deleteFile(path) {
deleteFile(path: string) {
const assetIndex = this.assets.findIndex(asset => asset.path === path);
if (assetIndex > -1) {
this.assets.splice(assetIndex, 1);
@ -299,4 +350,8 @@ export default class TestBackend {
return Promise.resolve();
}
async getDeployPreview() {
return null;
}
}

View File

@ -78,7 +78,7 @@ describe('mediaLibrary', () => {
jest.clearAllMocks();
});
it('should not persist media in editorial workflow', () => {
it('should not persist media when editing draft', () => {
const { getBlobSHA } = require('netlify-cms-lib-util');
getBlobSHA.mockReturnValue('000000000000000');
@ -88,7 +88,6 @@ describe('mediaLibrary', () => {
const store = mockStore({
config: Map({
publish_mode: 'editorial_workflow',
media_folder: 'static/media',
}),
collections: Map({
@ -132,52 +131,7 @@ describe('mediaLibrary', () => {
});
});
it('should persist media when not in editorial workflow', () => {
const { sanitizeSlug } = require('../../lib/urlHelper');
sanitizeSlug.mockReturnValue('name.png');
const store = mockStore({
config: Map({
media_folder: 'static/media',
}),
collections: Map({
posts: Map({ name: 'posts' }),
}),
integrations: Map(),
mediaLibrary: Map({
files: List(),
}),
entryDraft: Map({
entry: Map({ isPersisting: false, collection: 'posts' }),
}),
});
const file = new File([''], 'name.png');
const assetProxy = { path: 'static/media/name.png' };
createAssetProxy.mockReturnValue(assetProxy);
return store.dispatch(persistMedia(file)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(3);
expect(actions[0]).toEqual({ type: 'MEDIA_PERSIST_REQUEST' });
expect(actions[1]).toEqual({
type: 'ADD_ASSET',
payload: { path: 'static/media/name.png' },
});
expect(actions[2]).toEqual({
type: 'MEDIA_PERSIST_SUCCESS',
payload: {
file: { id: 'id' },
},
});
expect(backend.persistMedia).toHaveBeenCalledTimes(1);
expect(backend.persistMedia).toHaveBeenCalledWith(store.getState().config, assetProxy);
});
});
it('should persist media when draft is empty', () => {
it('should persist media when not editing draft', () => {
const store = mockStore({
config: Map({
media_folder: 'static/media',

View File

@ -7,7 +7,7 @@ import { getIntegrationProvider } from '../integrations';
import { selectIntegration, selectPublishedSlugs } from '../reducers';
import { selectFields } from '../reducers/collections';
import { selectCollectionEntriesCursor } from '../reducers/cursors';
import { Cursor } from 'netlify-cms-lib-util';
import { Cursor, ImplementationMediaFile } from 'netlify-cms-lib-util';
import { createEntry, EntryValue } from '../valueObjects/Entry';
import AssetProxy, { createAssetProxy } from '../valueObjects/AssetProxy';
import ValidationErrorTypes from '../constants/validationErrorTypes';
@ -23,7 +23,7 @@ import {
} from '../types/redux';
import { ThunkDispatch } from 'redux-thunk';
import { AnyAction, Dispatch } from 'redux';
import { waitForMediaLibraryToLoad } from './mediaLibrary';
import { waitForMediaLibraryToLoad, loadMedia } from './mediaLibrary';
import { waitUntil } from './waitUntil';
const { notifSend } = notifActions;
@ -108,7 +108,7 @@ export function entriesLoaded(
collection: Collection,
entries: EntryValue[],
pagination: number | null,
cursor: typeof Cursor,
cursor: Cursor,
append = true,
) {
return {
@ -261,7 +261,7 @@ export function loadLocalBackup() {
};
}
export function addDraftEntryMediaFile(file: MediaFile) {
export function addDraftEntryMediaFile(file: ImplementationMediaFile) {
return { type: ADD_DRAFT_ENTRY_MEDIA_FILE, payload: file };
}
@ -270,7 +270,7 @@ export function removeDraftEntryMediaFile({ id }: { id: string }) {
}
export function persistLocalBackup(entry: EntryMap, collection: Collection) {
return (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
return (_dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
const state = getState();
const backend = currentBackend(state.config);
@ -309,7 +309,7 @@ export function retrieveLocalBackup(collection: Collection, slug: string) {
}
export function deleteLocalBackup(collection: Collection, slug: string) {
return (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
return (_dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
const state = getState();
const backend = currentBackend(state.config);
return backend.deleteLocalDraftBackup(collection, slug);
@ -351,7 +351,7 @@ const appendActions = fromJS({
['append_next']: { action: 'next', append: true },
});
const addAppendActionsToCursor = (cursor: typeof Cursor) => {
const addAppendActionsToCursor = (cursor: Cursor) => {
return Cursor.create(cursor).updateStore('actions', (actions: Set<string>) => {
return actions.union(
appendActions
@ -393,11 +393,11 @@ export function loadEntries(collection: Collection, page = 0) {
})
: Cursor.create(response.cursor),
}))
.then((response: { cursor: typeof Cursor; pagination: number; entries: EntryValue[] }) =>
.then((response: { cursor: Cursor; pagination: number; entries: EntryValue[] }) =>
dispatch(
entriesLoaded(
collection,
response.cursor.meta.get('usingOldPaginationAPI')
response.cursor.meta!.get('usingOldPaginationAPI')
? response.entries.reverse()
: response.entries,
response.pagination,
@ -422,8 +422,8 @@ export function loadEntries(collection: Collection, page = 0) {
};
}
function traverseCursor(backend: Backend, cursor: typeof Cursor, action: string) {
if (!cursor.actions.has(action)) {
function traverseCursor(backend: Backend, cursor: Cursor, action: string) {
if (!cursor.actions!.has(action)) {
throw new Error(`The current cursor does not support the pagination action "${action}".`);
}
return backend.traverseCursor(cursor, action);
@ -445,8 +445,8 @@ export function traverseCollectionCursor(collection: Collection, action: string)
// Handle cursors representing pages in the old, integer-based
// pagination API
if (cursor.meta.get('usingOldPaginationAPI', false)) {
return dispatch(loadEntries(collection, cursor.data.get('nextPage')));
if (cursor.meta!.get('usingOldPaginationAPI', false)) {
return dispatch(loadEntries(collection, cursor.data!.get('nextPage') as number));
}
try {
@ -625,6 +625,10 @@ export function persistEntry(collection: Collection) {
dismissAfter: 4000,
}),
);
// re-load media library if entry had media files
if (assetProxies.length > 0) {
dispatch(loadMedia());
}
dispatch(entryPersisted(collection, serializedEntry, slug));
})
.catch((error: Error) => {

View File

@ -1,5 +1,5 @@
import AssetProxy, { createAssetProxy } from '../valueObjects/AssetProxy';
import { Collection, State, MediaFile } from '../types/redux';
import { Collection, State } from '../types/redux';
import { ThunkDispatch } from 'redux-thunk';
import { AnyAction } from 'redux';
import { isAbsolutePath } from 'netlify-cms-lib-util';
@ -49,7 +49,7 @@ export function getAsset({ collection, entryPath, path }: GetAssetArgs) {
} else {
// load asset url from backend
await waitForMediaLibraryToLoad(dispatch, getState());
const file: MediaFile | null = selectMediaFileByPath(state, resolvedPath);
const file = selectMediaFileByPath(state, resolvedPath);
if (file) {
const url = await getMediaDisplayURL(dispatch, getState(), file);

View File

@ -1,20 +1,22 @@
import { Map } from 'immutable';
import { actions as notifActions } from 'redux-notifications';
import { getBlobSHA } from 'netlify-cms-lib-util';
import { getBlobSHA, ImplementationMediaFile } from 'netlify-cms-lib-util';
import { currentBackend } from '../backend';
import AssetProxy, { createAssetProxy } from '../valueObjects/AssetProxy';
import { selectIntegration } from '../reducers';
import { selectMediaFilePath, selectMediaFilePublicPath } from '../reducers/entries';
import {
selectMediaFilePath,
selectMediaFilePublicPath,
selectEditingDraft,
} from '../reducers/entries';
import { selectMediaDisplayURL, selectMediaFiles } from '../reducers/mediaLibrary';
import { getIntegrationProvider } from '../integrations';
import { addAsset, removeAsset } from './media';
import { addDraftEntryMediaFile, removeDraftEntryMediaFile } from './entries';
import { sanitizeSlug } from '../lib/urlHelper';
import { State, MediaFile, DisplayURLState } from '../types/redux';
import { State, MediaFile, DisplayURLState, MediaLibraryInstance } from '../types/redux';
import { AnyAction } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
import { MediaLibraryInstance } from '../mediaLibrary';
import { selectEditingWorkflowDraft } from '../reducers/editorialWorkflow';
import { waitUntilWithTimeout } from './waitUntil';
const { notifSend } = notifActions;
@ -49,7 +51,7 @@ export function createMediaLibrary(instance: MediaLibraryInstance) {
}
export function clearMediaControl(id: string) {
return (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
return (_dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
const state = getState();
const mediaLibrary = state.mediaLibrary.get('externalLibrary');
if (mediaLibrary) {
@ -59,7 +61,7 @@ export function clearMediaControl(id: string) {
}
export function removeMediaControl(id: string) {
return (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
return (_dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
const state = getState();
const mediaLibrary = state.mediaLibrary.get('externalLibrary');
if (mediaLibrary) {
@ -150,7 +152,7 @@ export function loadMedia(
resolve(
backend
.getMedia()
.then((files: MediaFile[]) => dispatch(mediaLoaded(files)))
.then(files => dispatch(mediaLoaded(files)))
.catch((error: { status?: number }) => {
console.error(error);
if (error.status === 404) {
@ -177,7 +179,7 @@ function createMediaFileFromAsset({
file: File;
assetProxy: AssetProxy;
draft: boolean;
}): MediaFile {
}): ImplementationMediaFile {
const mediaFile = {
id,
name: file.name,
@ -200,7 +202,7 @@ export function persistMedia(file: File, opts: MediaOptions = {}) {
const fileName = sanitizeSlug(file.name.toLowerCase(), state.config.get('slug'));
const existingFile = files.find(existingFile => existingFile.name.toLowerCase() === fileName);
const editingDraft = selectEditingWorkflowDraft(state);
const editingDraft = selectEditingDraft(state.entryDraft);
/**
* Check for existing files of the same name before persisting. If no asset
@ -255,7 +257,7 @@ export function persistMedia(file: File, opts: MediaOptions = {}) {
dispatch(addAsset(assetProxy));
let mediaFile: MediaFile;
let mediaFile: ImplementationMediaFile;
if (integration) {
const id = await getBlobSHA(file);
// integration assets are persisted immediately, thus draft is false
@ -314,7 +316,7 @@ export function deleteMedia(file: MediaFile, opts: MediaOptions = {}) {
dispatch(removeAsset(file.path));
dispatch(removeDraftEntryMediaFile({ id: file.id }));
} else {
const editingDraft = selectEditingWorkflowDraft(state);
const editingDraft = selectEditingDraft(state.entryDraft);
dispatch(mediaDeleting());
dispatch(removeAsset(file.path));
@ -395,7 +397,7 @@ interface MediaOptions {
privateUpload?: boolean;
}
export function mediaLoaded(files: MediaFile[], opts: MediaOptions = {}) {
export function mediaLoaded(files: ImplementationMediaFile[], opts: MediaOptions = {}) {
return {
type: MEDIA_LOAD_SUCCESS,
payload: { files, ...opts },
@ -411,7 +413,7 @@ export function mediaPersisting() {
return { type: MEDIA_PERSIST_REQUEST };
}
export function mediaPersisted(file: MediaFile, opts: MediaOptions = {}) {
export function mediaPersisted(file: ImplementationMediaFile, opts: MediaOptions = {}) {
const { privateUpload } = opts;
return {
type: MEDIA_PERSIST_SUCCESS,

View File

@ -3,10 +3,10 @@ import { List } from 'immutable';
import { stripIndent } from 'common-tags';
import * as fuzzy from 'fuzzy';
import { resolveFormat } from './formats/formats';
import { selectUseWorkflow } from './reducers/config';
import { selectMediaFilePath, selectMediaFolder } from './reducers/entries';
import { selectIntegration } from './reducers/integrations';
import {
selectListMethod,
selectEntrySlug,
selectEntryPath,
selectFileEntryLabel,
@ -24,8 +24,16 @@ import {
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
EditorialWorkflowError,
Implementation as BackendImplementation,
DisplayURL,
ImplementationEntry,
ImplementationMediaFile,
Credentials,
User,
getPathDepth,
Config as ImplementationConfig,
} from 'netlify-cms-lib-util';
import { EDITORIAL_WORKFLOW, status } from './constants/publishModes';
import { status } from './constants/publishModes';
import {
SLUG_MISSING_REQUIRED_DATE,
compileStringTemplate,
@ -38,16 +46,14 @@ import {
EntryMap,
Config,
SlugConfig,
DisplayURL,
FilterRule,
Collections,
MediaFile,
EntryDraft,
CollectionFile,
State,
} from './types/redux';
import AssetProxy from './valueObjects/AssetProxy';
import { selectEditingWorkflowDraft } from './reducers/editorialWorkflow';
import { FOLDER, FILES } from './constants/collectionTypes';
export class LocalStorageAuthStore {
storageKey = 'netlify-cms-user';
@ -153,87 +159,6 @@ function createPreviewUrl(
return `${basePath}/${previewPath}`;
}
interface ImplementationInitOptions {
useWorkflow: boolean;
updateUserCredentials: (credentials: Credentials) => void;
initialWorkflowStatus: string;
}
interface ImplementationEntry {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: any;
file: { path: string; label: string };
metaData: { collection: string };
isModification?: boolean;
slug: string;
mediaFiles: MediaFile[];
}
interface Implementation {
authComponent: () => void;
restoreUser: (user: User) => Promise<User>;
init: (config: Config, options: ImplementationInitOptions) => Implementation;
authenticate: (credentials: Credentials) => Promise<User>;
logout: () => Promise<void>;
getToken: () => Promise<string>;
unpublishedEntry?: (collection: Collection, slug: string) => Promise<ImplementationEntry>;
getEntry: (collection: Collection, slug: string, path: string) => Promise<ImplementationEntry>;
allEntriesByFolder?: (
collection: Collection,
extension: string,
) => Promise<ImplementationEntry[]>;
traverseCursor: (
cursor: typeof Cursor,
action: unknown,
) => Promise<{ entries: ImplementationEntry[]; cursor: typeof Cursor }>;
entriesByFolder: (collection: Collection, extension: string) => Promise<ImplementationEntry[]>;
entriesByFiles: (collection: Collection, extension: string) => Promise<ImplementationEntry[]>;
unpublishedEntries: () => Promise<ImplementationEntry[]>;
getMediaDisplayURL?: (displayURL: DisplayURL) => Promise<string>;
getMedia: (folder?: string) => Promise<MediaFile[]>;
getMediaFile: (path: string) => Promise<MediaFile>;
getDeployPreview: (
collection: Collection,
slug: string,
) => Promise<{ url: string; status: string }>;
persistEntry: (
obj: { path: string; slug: string; raw: string },
assetProxies: AssetProxy[],
opts: {
newEntry: boolean;
parsedData: { title: string; description: string };
commitMessage: string;
collectionName: string;
useWorkflow: boolean;
unpublished: boolean;
status?: string;
},
) => Promise<void>;
persistMedia: (file: AssetProxy, opts: { commitMessage: string }) => Promise<MediaFile>;
deleteFile: (
path: string,
commitMessage: string,
opts?: { collection: Collection; slug: string },
) => Promise<void>;
updateUnpublishedEntryStatus: (
collection: string,
slug: string,
newStatus: string,
) => Promise<void>;
publishUnpublishedEntry: (collection: string, slug: string) => Promise<void>;
deleteUnpublishedEntry: (collection: string, slug: string) => Promise<void>;
}
type Credentials = {};
interface User {
backendName: string;
login: string;
name: string;
useOpenAuthoring: boolean;
}
interface AuthStore {
retrieve: () => User;
store: (user: User) => void;
@ -246,18 +171,10 @@ interface BackendOptions {
config?: Config;
}
interface BackupMediaFile extends MediaFile {
file?: File;
}
export interface ImplementationMediaFile extends MediaFile {
file?: File;
}
interface BackupEntry {
raw: string;
path: string;
mediaFiles: BackupMediaFile[];
mediaFiles: ImplementationMediaFile[];
}
interface PersistArgs {
@ -270,6 +187,16 @@ interface PersistArgs {
status?: string;
}
interface ImplementationInitOptions {
useWorkflow: boolean;
updateUserCredentials: (credentials: Credentials) => void;
initialWorkflowStatus: string;
}
type Implementation = BackendImplementation & {
init: (config: ImplementationConfig, options: ImplementationInitOptions) => Implementation;
};
export class Backend {
implementation: Implementation;
backendName: string;
@ -284,8 +211,8 @@ export class Backend {
// We can't reliably run this on exit, so we do cleanup on load.
this.deleteAnonymousBackup();
this.config = config as Config;
this.implementation = implementation.init(this.config, {
useWorkflow: this.config.get('publish_mode') === EDITORIAL_WORKFLOW,
this.implementation = implementation.init(this.config.toJS(), {
useWorkflow: selectUseWorkflow(this.config),
updateUserCredentials: this.updateUserCredentials,
initialWorkflowStatus: status.first(),
});
@ -300,12 +227,12 @@ export class Backend {
if (this.user) {
return this.user;
}
const stored = this.authStore?.retrieve();
const stored = this.authStore!.retrieve();
if (stored && stored.backendName === this.backendName) {
return Promise.resolve(this.implementation.restoreUser(stored)).then(user => {
this.user = { ...user, backendName: this.backendName };
// return confirmed/rehydrated user object instead of stored
this.authStore?.store(this.user);
this.authStore!.store(this.user as User);
return this.user;
});
}
@ -313,10 +240,10 @@ export class Backend {
}
updateUserCredentials = (updatedCredentials: Credentials) => {
const storedUser = this.authStore?.retrieve();
const storedUser = this.authStore!.retrieve();
if (storedUser && storedUser.backendName === this.backendName) {
this.user = { ...storedUser, ...updatedCredentials };
this.authStore?.store(this.user as User);
this.authStore!.store(this.user as User);
return this.user;
}
};
@ -346,10 +273,10 @@ export class Backend {
getToken = () => this.implementation.getToken();
async entryExist(collection: Collection, path: string, slug: string) {
async entryExist(collection: Collection, path: string, slug: string, useWorkflow: boolean) {
const unpublishedEntry =
this.implementation.unpublishedEntry &&
(await this.implementation.unpublishedEntry(collection, slug).catch(error => {
useWorkflow &&
(await this.implementation.unpublishedEntry(collection.get('name'), slug).catch(error => {
if (error instanceof EditorialWorkflowError && error.notUnderEditorialWorkflow) {
return Promise.resolve(false);
}
@ -359,7 +286,7 @@ export class Backend {
if (unpublishedEntry) return unpublishedEntry;
const publishedEntry = await this.implementation
.getEntry(collection, slug, path)
.getEntry(path)
.then(({ data }) => data)
.catch(() => {
return Promise.resolve(false);
@ -371,9 +298,10 @@ export class Backend {
async generateUniqueSlug(
collection: Collection,
entryData: EntryMap,
slugConfig: SlugConfig,
config: Config,
usedSlugs: List<string>,
) {
const slugConfig = config.get('slug');
const slug: string = slugFormatter(collection, entryData, slugConfig);
let i = 1;
let uniqueSlug = slug;
@ -385,6 +313,7 @@ export class Backend {
collection,
selectEntryPath(collection, uniqueSlug) as string,
uniqueSlug,
selectUseWorkflow(config),
))
) {
uniqueSlug = `${slug}${sanitizeChar(' ', slugConfig)}${i++}`;
@ -411,24 +340,42 @@ export class Backend {
}
listEntries(collection: Collection) {
const listMethod = this.implementation[selectListMethod(collection)];
const extension = selectFolderEntryExtension(collection);
return listMethod
.call(this.implementation, collection, extension)
.then((loadedEntries: ImplementationEntry[]) => ({
entries: this.processEntries(loadedEntries, collection),
/*
let listMethod: () => Promise<ImplementationEntry[]>;
const collectionType = collection.get('type');
if (collectionType === FOLDER) {
listMethod = () =>
this.implementation.entriesByFolder(
collection.get('folder') as string,
extension,
getPathDepth(collection.get('path', '') as string),
);
} else if (collectionType === FILES) {
const files = collection
.get('files')!
.map(collectionFile => ({
path: collectionFile!.get('file'),
label: collectionFile!.get('label'),
}))
.toArray();
listMethod = () => this.implementation.entriesByFiles(files);
} else {
throw new Error(`Unknown collection type: ${collectionType}`);
}
return listMethod().then((loadedEntries: ImplementationEntry[]) => ({
entries: this.processEntries(loadedEntries, collection),
/*
Wrap cursors so we can tell which collection the cursor is
from. This is done to prevent traverseCursor from requiring a
`collection` argument.
*/
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
cursor: Cursor.create(loadedEntries[CURSOR_COMPATIBILITY_SYMBOL]).wrapData({
cursorType: 'collectionEntries',
collection,
}),
}));
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
cursor: Cursor.create(loadedEntries[CURSOR_COMPATIBILITY_SYMBOL]).wrapData({
cursorType: 'collectionEntries',
collection,
}),
}));
}
// The same as listEntries, except that if a cursor with the "next"
@ -440,14 +387,18 @@ export class Backend {
if (collection.get('folder') && this.implementation.allEntriesByFolder) {
const extension = selectFolderEntryExtension(collection);
return this.implementation
.allEntriesByFolder(collection, extension)
.allEntriesByFolder(
collection.get('folder') as string,
extension,
getPathDepth(collection.get('path', '') as string),
)
.then(entries => this.processEntries(entries, collection));
}
const response = await this.listEntries(collection);
const { entries } = response;
let { cursor } = response;
while (cursor && cursor.actions.includes('next')) {
while (cursor && cursor.actions!.includes('next')) {
const { entries: newEntries, cursor: newCursor } = await this.traverseCursor(cursor, 'next');
entries.push(...newEntries);
cursor = newCursor;
@ -513,19 +464,19 @@ export class Backend {
return { query: searchTerm, hits };
}
traverseCursor(cursor: typeof Cursor, action: string) {
traverseCursor(cursor: Cursor, action: string) {
const [data, unwrappedCursor] = cursor.unwrapData();
// TODO: stop assuming all cursors are for collections
const collection: Collection = data.get('collection');
return this.implementation
.traverseCursor(unwrappedCursor, action)
.then(async ({ entries, cursor: newCursor }) => ({
const collection = data.get('collection') as Collection;
return this.implementation!.traverseCursor!(unwrappedCursor, action).then(
async ({ entries, cursor: newCursor }) => ({
entries: this.processEntries(entries, collection),
cursor: Cursor.create(newCursor).wrapData({
cursorType: 'collectionEntries',
collection,
}),
}));
}),
);
}
async getLocalDraftBackup(collection: Collection, slug: string) {
@ -560,14 +511,14 @@ export class Backend {
return;
}
const mediaFiles = await Promise.all<BackupMediaFile>(
const mediaFiles = await Promise.all<ImplementationMediaFile>(
entry
.get('mediaFiles')
.toJS()
.map(async (file: MediaFile) => {
.map(async (file: ImplementationMediaFile) => {
// make sure to serialize the file
if (file.url?.startsWith('blob:')) {
const blob = await fetch(file.url).then(res => res.blob());
const blob = await fetch(file.url as string).then(res => res.blob());
return { ...file, file: new File([blob], file.name) };
}
return file;
@ -598,14 +549,13 @@ export class Backend {
const path = selectEntryPath(collection, slug) as string;
const label = selectFileEntryLabel(collection, slug);
const workflowDraft = selectEditingWorkflowDraft(state);
const integration = selectIntegration(state.integrations, null, 'assetStore');
const [loadedEntry, mediaFiles] = await Promise.all([
this.implementation.getEntry(collection, slug, path),
workflowDraft && !integration
this.implementation.getEntry(path),
collection.has('media_folder') && !integration
? this.implementation.getMedia(selectMediaFolder(state.config, collection, path))
: Promise.resolve([]),
: Promise.resolve(state.mediaLibrary.get('files') || []),
]);
const entry = createEntry(collection.get('name'), slug, loadedEntry.file.path, {
@ -649,17 +599,15 @@ export class Backend {
}
unpublishedEntries(collections: Collections) {
return this.implementation
.unpublishedEntries()
.then(loadedEntries => loadedEntries.filter(entry => entry !== null))
return this.implementation.unpublishedEntries!()
.then(entries =>
entries.map(loadedEntry => {
const collectionName = loadedEntry.metaData.collection;
const collectionName = loadedEntry.metaData!.collection;
const collection = collections.find(c => c.get('name') === collectionName);
const entry = createEntry(collectionName, loadedEntry.slug, loadedEntry.file.path, {
raw: loadedEntry.data,
isModification: loadedEntry.isModification,
label: selectFileEntryLabel(collection, loadedEntry.slug),
label: selectFileEntryLabel(collection, loadedEntry.slug!),
});
entry.metaData = loadedEntry.metaData;
return entry;
@ -678,8 +626,7 @@ export class Backend {
}
unpublishedEntry(collection: Collection, slug: string) {
return this.implementation
.unpublishedEntry?.(collection, slug)
return this.implementation!.unpublishedEntry!(collection.get('name') as string, slug)
.then(loadedEntry => {
const entry = createEntry(collection.get('name'), loadedEntry.slug, loadedEntry.file.path, {
raw: loadedEntry.data,
@ -741,7 +688,7 @@ export class Backend {
count = 0;
while (!deployPreview && count < maxAttempts) {
count++;
deployPreview = await this.implementation.getDeployPreview(collection, slug);
deployPreview = await this.implementation.getDeployPreview(collection.get('name'), slug);
if (!deployPreview) {
await new Promise(resolve => setTimeout(() => resolve(), interval));
}
@ -795,7 +742,7 @@ export class Backend {
const slug = await this.generateUniqueSlug(
collection,
entryDraft.getIn(['entry', 'data']),
config.get('slug'),
config,
usedSlugs,
);
const path = selectEntryPath(collection, slug) as string;
@ -836,7 +783,7 @@ export class Backend {
user.useOpenAuthoring,
);
const useWorkflow = config.get('publish_mode') === EDITORIAL_WORKFLOW;
const useWorkflow = selectUseWorkflow(config);
const collectionName = collection.get('name');
@ -892,7 +839,7 @@ export class Backend {
},
user.useOpenAuthoring,
);
return this.implementation.deleteFile(path, commitMessage, { collection, slug });
return this.implementation.deleteFile(path, commitMessage);
}
async deleteMedia(config: Config, path: string) {
@ -917,15 +864,15 @@ export class Backend {
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
return this.implementation.updateUnpublishedEntryStatus(collection, slug, newStatus);
return this.implementation.updateUnpublishedEntryStatus!(collection, slug, newStatus);
}
publishUnpublishedEntry(collection: string, slug: string) {
return this.implementation.publishUnpublishedEntry(collection, slug);
return this.implementation.publishUnpublishedEntry!(collection, slug);
}
deleteUnpublishedEntry(collection: string, slug: string) {
return this.implementation.deleteUnpublishedEntry(collection, slug);
return this.implementation.deleteUnpublishedEntry!(collection, slug);
}
entryToRaw(collection: Collection, entry: EntryMap): string {
@ -939,13 +886,13 @@ export class Backend {
if (fields) {
return collection
.get('fields')
.map(f => f?.get('name'))
.map(f => f!.get('name'))
.toArray();
}
const files = collection.get('files');
const file = (files || List<CollectionFile>())
.filter(f => f?.get('name') === entry.get('slug'))
.filter(f => f!.get('name') === entry.get('slug'))
.get(0);
if (file == null) {
@ -953,7 +900,7 @@ export class Backend {
}
return file
.get('fields')
.map(f => f?.get('name'))
.map(f => f!.get('name'))
.toArray();
}
@ -976,10 +923,11 @@ export function resolveBackend(config: Config) {
const authStore = new LocalStorageAuthStore();
if (!getBackend(name)) {
const backend = getBackend(name);
if (!backend) {
throw new Error(`Backend not found: ${name}`);
} else {
return new Backend(getBackend(name), { backendName: name, authStore, config });
return new Backend(backend, { backendName: name, authStore, config });
}
}

View File

@ -130,7 +130,7 @@ class App extends React.Component {
siteId: this.props.config.getIn(['backend', 'site_domain']),
base_url: this.props.config.getIn(['backend', 'base_url'], null),
authEndpoint: this.props.config.getIn(['backend', 'auth_endpoint']),
config: this.props.config,
config: this.props.config.toJS(),
clearHash: () => history.replace('/'),
t,
})}

View File

@ -6,6 +6,7 @@ import { once } from 'lodash';
import { getMediaLibrary } from './lib/registry';
import store from './redux';
import { createMediaLibrary, insertMedia } from './actions/mediaLibrary';
import { MediaLibraryInstance } from './types/redux';
type MediaLibraryOptions = {};
@ -16,14 +17,6 @@ interface MediaLibrary {
}) => MediaLibraryInstance;
}
export interface MediaLibraryInstance {
show?: () => void;
hide?: () => void;
onClearControl?: (args: { id: string }) => void;
onRemoveControl?: (args: { id: string }) => void;
enableStandalone?: () => boolean;
}
const initializeMediaLibrary = once(async function initializeMediaLibrary(name, options) {
const lib = (getMediaLibrary(name) as unknown) as MediaLibrary;
const handleInsert = (url: string) => store.dispatch(insertMedia(url));

View File

@ -5,7 +5,6 @@ import reducer, {
selectMediaFilePath,
selectMediaFilePublicPath,
} from '../entries';
import { EDITORIAL_WORKFLOW } from '../../constants/publishModes';
const initialState = OrderedMap({
posts: Map({ name: 'posts' }),
@ -73,33 +72,26 @@ describe('entries', () => {
});
describe('selectMediaFolder', () => {
it('should return global media folder when not in editorial workflow', () => {
expect(selectMediaFolder(Map({ media_folder: 'static/media' }))).toEqual('static/media');
});
it("should return global media folder when in editorial workflow and collection doesn't specify media_folder", () => {
it("should return global media folder when collection doesn't specify media_folder", () => {
expect(
selectMediaFolder(
Map({ media_folder: 'static/media', publish_mode: EDITORIAL_WORKFLOW }),
Map({ name: 'posts' }),
),
selectMediaFolder(Map({ media_folder: 'static/media' }), Map({ name: 'posts' })),
).toEqual('static/media');
});
it('should return draft media folder when in editorial workflow, collection specifies media_folder and entry path is null', () => {
it('should return draft media folder when collection specifies media_folder and entry path is null', () => {
expect(
selectMediaFolder(
Map({ media_folder: 'static/media', publish_mode: EDITORIAL_WORKFLOW }),
Map({ media_folder: 'static/media' }),
Map({ name: 'posts', folder: 'posts', media_folder: '' }),
null,
),
).toEqual('posts/DRAFT_MEDIA_FILES');
});
it('should return relative media folder when in editorial workflow, collection specifies media_folder and entry path is not null', () => {
it('should return relative media folder when collection specifies media_folder and entry path is not null', () => {
expect(
selectMediaFolder(
Map({ media_folder: 'static/media', publish_mode: EDITORIAL_WORKFLOW }),
Map({ media_folder: 'static/media' }),
Map({ name: 'posts', folder: 'posts', media_folder: '' }),
'posts/title/index.md',
),
@ -109,7 +101,7 @@ describe('entries', () => {
it('should resolve relative media folder', () => {
expect(
selectMediaFolder(
Map({ media_folder: 'static/media', publish_mode: EDITORIAL_WORKFLOW }),
Map({ media_folder: 'static/media' }),
Map({ name: 'posts', folder: 'posts', media_folder: '../' }),
'posts/title/index.md',
),
@ -126,19 +118,14 @@ describe('entries', () => {
it('should resolve path from global media folder when absolute path', () => {
expect(
selectMediaFilePath(
Map({ media_folder: 'static/media', publish_mode: EDITORIAL_WORKFLOW }),
null,
null,
'/media/image.png',
),
selectMediaFilePath(Map({ media_folder: 'static/media' }), null, null, '/media/image.png'),
).toBe('static/media/image.png');
});
it('should resolve path from global media folder when relative path for collection with no media folder', () => {
expect(
selectMediaFilePath(
Map({ media_folder: 'static/media', publish_mode: EDITORIAL_WORKFLOW }),
Map({ media_folder: 'static/media' }),
Map({ name: 'posts', folder: 'posts' }),
null,
'image.png',
@ -149,7 +136,7 @@ describe('entries', () => {
it('should resolve path from collection media folder when relative path for collection with media folder', () => {
expect(
selectMediaFilePath(
Map({ media_folder: 'static/media', publish_mode: EDITORIAL_WORKFLOW }),
Map({ media_folder: 'static/media' }),
Map({ name: 'posts', folder: 'posts', media_folder: '' }),
null,
'image.png',
@ -160,7 +147,7 @@ describe('entries', () => {
it('should handle relative media_folder', () => {
expect(
selectMediaFilePath(
Map({ media_folder: 'static/media', publish_mode: EDITORIAL_WORKFLOW }),
Map({ media_folder: 'static/media' }),
Map({ name: 'posts', folder: 'posts', media_folder: '../../static/media/' }),
'posts/title/index.md',
'image.png',
@ -176,26 +163,16 @@ describe('entries', () => {
);
});
it('should resolve path from public folder when not in editorial workflow', () => {
it('should resolve path from public folder for collection with no media folder', () => {
expect(
selectMediaFilePublicPath(Map({ public_folder: '/media' }), null, '/media/image.png'),
).toBe('/media/image.png');
});
it('should resolve path from public folder when in editorial workflow for collection with no public folder', () => {
it('should resolve path from collection media folder for collection with public folder', () => {
expect(
selectMediaFilePublicPath(
Map({ public_folder: '/media', publish_mode: EDITORIAL_WORKFLOW }),
Map({ name: 'posts', folder: 'posts' }),
'image.png',
),
).toBe('/media/image.png');
});
it('should resolve path from collection media folder when in editorial workflow for collection with public folder', () => {
expect(
selectMediaFilePublicPath(
Map({ public_folder: '/media', publish_mode: EDITORIAL_WORKFLOW }),
Map({ public_folder: '/media' }),
Map({ name: 'posts', folder: 'posts', public_folder: '' }),
'image.png',
),
@ -205,7 +182,7 @@ describe('entries', () => {
it('should handle relative public_folder', () => {
expect(
selectMediaFilePublicPath(
Map({ public_folder: '/media', publish_mode: EDITORIAL_WORKFLOW }),
Map({ public_folder: '/media' }),
Map({ name: 'posts', folder: 'posts', public_folder: '../../static/media/' }),
'image.png',
),

View File

@ -7,7 +7,7 @@ import mediaLibrary, {
} from '../mediaLibrary';
jest.mock('uuid/v4');
jest.mock('Reducers/editorialWorkflow');
jest.mock('Reducers/entries');
jest.mock('Reducers');
describe('mediaLibrary', () => {
@ -43,10 +43,10 @@ describe('mediaLibrary', () => {
);
});
it('should select draft media files when editing a workflow draft', () => {
const { selectEditingWorkflowDraft } = require('Reducers/editorialWorkflow');
it('should select draft media files when editing a draft', () => {
const { selectEditingDraft } = require('Reducers/entries');
selectEditingWorkflowDraft.mockReturnValue(true);
selectEditingDraft.mockReturnValue(true);
const state = {
entryDraft: fromJS({ entry: { mediaFiles: [{ id: 1 }] } }),
@ -55,10 +55,10 @@ describe('mediaLibrary', () => {
expect(selectMediaFiles(state)).toEqual([{ key: 1, id: 1 }]);
});
it('should select global media files when not editing a workflow draft', () => {
const { selectEditingWorkflowDraft } = require('Reducers/editorialWorkflow');
it('should select global media files when not editing a draft', () => {
const { selectEditingDraft } = require('Reducers/entries');
selectEditingWorkflowDraft.mockReturnValue(false);
selectEditingDraft.mockReturnValue(false);
const state = {
mediaLibrary: Map({ files: [{ id: 1 }] }),
@ -80,9 +80,9 @@ describe('mediaLibrary', () => {
});
it('should return media file by path', () => {
const { selectEditingWorkflowDraft } = require('Reducers/editorialWorkflow');
const { selectEditingDraft } = require('Reducers/entries');
selectEditingWorkflowDraft.mockReturnValue(false);
selectEditingDraft.mockReturnValue(false);
const state = {
mediaLibrary: Map({ files: [{ id: 1, path: 'path' }] }),

View File

@ -36,11 +36,6 @@ const collections = (state = null, action: CollectionsAction) => {
}
};
enum ListMethod {
ENTRIES_BY_FOLDER = 'entriesByFolder',
ENTRIES_BY_FILES = 'entriesByFiles',
}
const selectors = {
[FOLDER]: {
entryExtension(collection: Collection) {
@ -65,9 +60,6 @@ const selectors = {
return slug;
},
listMethod() {
return ListMethod.ENTRIES_BY_FOLDER;
},
allowNewEntries(collection: Collection) {
return collection.get('create');
},
@ -102,16 +94,13 @@ const selectors = {
const files = collection.get('files');
return files && files.find(f => f?.get('file') === path).get('label');
},
listMethod() {
return ListMethod.ENTRIES_BY_FILES;
},
allowNewEntries() {
return false;
},
allowDeletion(collection: Collection) {
return collection.get('delete', false);
},
templateName(collection: Collection, slug: string) {
templateName(_collection: Collection, slug: string) {
return slug;
},
},
@ -127,8 +116,6 @@ export const selectEntryPath = (collection: Collection, slug: string) =>
selectors[collection.get('type')].entryPath(collection, slug);
export const selectEntrySlug = (collection: Collection, path: string) =>
selectors[collection.get('type')].entrySlug(collection, path);
export const selectListMethod = (collection: Collection) =>
selectors[collection.get('type')].listMethod();
export const selectAllowNewEntries = (collection: Collection) =>
selectors[collection.get('type')].allowNewEntries(collection);
export const selectAllowDeletion = (collection: Collection) =>

View File

@ -1,6 +1,7 @@
import { Map } from 'immutable';
import { CONFIG_REQUEST, CONFIG_SUCCESS, CONFIG_FAILURE, CONFIG_MERGE } from '../actions/config';
import { Config, ConfigAction } from '../types/redux';
import { EDITORIAL_WORKFLOW } from '../constants/publishModes';
const config = (state = Map({ isFetching: true }), action: ConfigAction) => {
switch (action.type) {
@ -24,4 +25,7 @@ const config = (state = Map({ isFetching: true }), action: ConfigAction) => {
export const selectLocale = (state: Config) => state.get('locale', 'en') as string;
export const selectUseWorkflow = (state: Config) =>
state.get('publish_mode') === EDITORIAL_WORKFLOW;
export default config;

View File

@ -1,6 +1,6 @@
import { Map, List, fromJS } from 'immutable';
import { startsWith } from 'lodash';
import { EDITORIAL_WORKFLOW } from 'Constants/publishModes';
import { EDITORIAL_WORKFLOW } from '../constants/publishModes';
import {
UNPUBLISHED_ENTRY_REQUEST,
UNPUBLISHED_ENTRY_REDIRECT,
@ -16,32 +16,33 @@ import {
UNPUBLISHED_ENTRY_PUBLISH_SUCCESS,
UNPUBLISHED_ENTRY_PUBLISH_FAILURE,
UNPUBLISHED_ENTRY_DELETE_SUCCESS,
} from 'Actions/editorialWorkflow';
import { CONFIG_SUCCESS } from 'Actions/config';
} from '../actions/editorialWorkflow';
import { CONFIG_SUCCESS } from '../actions/config';
import { EditorialWorkflowAction, EditorialWorkflow, Entities } from '../types/redux';
const unpublishedEntries = (state = Map(), action) => {
const unpublishedEntries = (state = Map(), action: EditorialWorkflowAction) => {
switch (action.type) {
case CONFIG_SUCCESS: {
const publishMode = action.payload && action.payload.get('publish_mode');
if (publishMode === EDITORIAL_WORKFLOW) {
// Editorial workflow state is explicetelly initiated after the config.
// Editorial workflow state is explicitly initiated after the config.
return Map({ entities: Map(), pages: Map() });
}
return state;
}
case UNPUBLISHED_ENTRY_REQUEST:
return state.setIn(
['entities', `${action.payload.collection}.${action.payload.slug}`, 'isFetching'],
['entities', `${action.payload!.collection}.${action.payload!.slug}`, 'isFetching'],
true,
);
case UNPUBLISHED_ENTRY_REDIRECT:
return state.deleteIn(['entities', `${action.payload.collection}.${action.payload.slug}`]);
return state.deleteIn(['entities', `${action.payload!.collection}.${action.payload!.slug}`]);
case UNPUBLISHED_ENTRY_SUCCESS:
return state.setIn(
['entities', `${action.payload.collection}.${action.payload.entry.slug}`],
fromJS(action.payload.entry),
['entities', `${action.payload!.collection}.${action.payload!.entry.slug}`],
fromJS(action.payload!.entry),
);
case UNPUBLISHED_ENTRIES_REQUEST:
@ -49,7 +50,7 @@ const unpublishedEntries = (state = Map(), action) => {
case UNPUBLISHED_ENTRIES_SUCCESS:
return state.withMutations(map => {
action.payload.entries.forEach(entry =>
action.payload!.entries.forEach(entry =>
map.setIn(
['entities', `${entry.collection}.${entry.slug}`],
fromJS(entry).set('isFetching', false),
@ -58,35 +59,38 @@ const unpublishedEntries = (state = Map(), action) => {
map.set(
'pages',
Map({
...action.payload.pages,
ids: List(action.payload.entries.map(entry => entry.slug)),
...action.payload!.pages,
ids: List(action.payload!.entries.map(entry => entry.slug)),
}),
);
});
case UNPUBLISHED_ENTRY_PERSIST_REQUEST:
case UNPUBLISHED_ENTRY_PERSIST_REQUEST: {
// Update Optimistically
return state.withMutations(map => {
map.setIn(
['entities', `${action.payload.collection}.${action.payload.entry.get('slug')}`],
fromJS(action.payload.entry),
['entities', `${action.payload!.collection}.${action.payload!.entry.get('slug')}`],
fromJS(action.payload!.entry),
);
map.setIn(
[
'entities',
`${action.payload.collection}.${action.payload.entry.get('slug')}`,
`${action.payload!.collection}.${action.payload!.entry.get('slug')}`,
'isPersisting',
],
true,
);
map.updateIn(['pages', 'ids'], List(), list => list.push(action.payload.entry.get('slug')));
map.updateIn(['pages', 'ids'], List(), list =>
list.push(action.payload!.entry.get('slug')),
);
});
}
case UNPUBLISHED_ENTRY_PERSIST_SUCCESS:
// Update Optimistically
return state.deleteIn([
'entities',
`${action.payload.collection}.${action.payload.slug}`,
`${action.payload!.collection}.${action.payload!.slug}`,
'isPersisting',
]);
@ -94,11 +98,16 @@ const unpublishedEntries = (state = Map(), action) => {
// Update Optimistically
return state.withMutations(map => {
map.setIn(
['entities', `${action.payload.collection}.${action.payload.slug}`, 'metaData', 'status'],
action.payload.newStatus,
[
'entities',
`${action.payload!.collection}.${action.payload!.slug}`,
'metaData',
'status',
],
action.payload!.newStatus,
);
map.setIn(
['entities', `${action.payload.collection}.${action.payload.slug}`, 'isUpdatingStatus'],
['entities', `${action.payload!.collection}.${action.payload!.slug}`, 'isUpdatingStatus'],
true,
);
});
@ -106,55 +115,49 @@ const unpublishedEntries = (state = Map(), action) => {
case UNPUBLISHED_ENTRY_STATUS_CHANGE_SUCCESS:
case UNPUBLISHED_ENTRY_STATUS_CHANGE_FAILURE:
return state.setIn(
['entities', `${action.payload.collection}.${action.payload.slug}`, 'isUpdatingStatus'],
['entities', `${action.payload!.collection}.${action.payload!.slug}`, 'isUpdatingStatus'],
false,
);
case UNPUBLISHED_ENTRY_PUBLISH_REQUEST:
return state.setIn(
['entities', `${action.payload.collection}.${action.payload.slug}`, 'isPublishing'],
['entities', `${action.payload!.collection}.${action.payload!.slug}`, 'isPublishing'],
true,
);
case UNPUBLISHED_ENTRY_PUBLISH_SUCCESS:
case UNPUBLISHED_ENTRY_PUBLISH_FAILURE:
return state.withMutations(map => {
map.deleteIn(['entities', `${action.payload.collection}.${action.payload.slug}`]);
map.deleteIn(['entities', `${action.payload!.collection}.${action.payload!.slug}`]);
});
case UNPUBLISHED_ENTRY_DELETE_SUCCESS:
return state.deleteIn(['entities', `${action.payload.collection}.${action.payload.slug}`]);
return state.deleteIn(['entities', `${action.payload!.collection}.${action.payload!.slug}`]);
default:
return state;
}
};
export const selectUnpublishedEntry = (state, collection, slug) =>
state && state.getIn(['entities', `${collection}.${slug}`]);
export const selectUnpublishedEntry = (
state: EditorialWorkflow,
collection: string,
slug: string,
) => state && state.getIn(['entities', `${collection}.${slug}`]);
export const selectUnpublishedEntriesByStatus = (state, status) => {
export const selectUnpublishedEntriesByStatus = (state: EditorialWorkflow, status: string) => {
if (!state) return null;
return state
.get('entities')
.filter(entry => entry.getIn(['metaData', 'status']) === status)
.valueSeq();
const entities = state.get('entities') as Entities;
return entities.filter(entry => entry.getIn(['metaData', 'status']) === status).valueSeq();
};
export const selectUnpublishedSlugs = (state, collection) => {
export const selectUnpublishedSlugs = (state: EditorialWorkflow, collection: string) => {
if (!state.get('entities')) return null;
return state
.get('entities')
.filter((v, k) => startsWith(k, `${collection}.`))
const entities = state.get('entities') as Entities;
return entities
.filter((_v, k) => startsWith(k as string, `${collection}.`))
.map(entry => entry.get('slug'))
.valueSeq();
};
export const selectEditingWorkflowDraft = state => {
const entry = state.entryDraft.get('entry');
const useWorkflow = state.config.get('publish_mode') === EDITORIAL_WORKFLOW;
const workflowDraft = entry && !entry.isEmpty() && useWorkflow;
return workflowDraft;
};
export default unpublishedEntries;

View File

@ -23,9 +23,9 @@ import {
EntryFailurePayload,
EntryDeletePayload,
EntriesRequestPayload,
EntryDraft,
} from '../types/redux';
import { isAbsolutePath, basename } from 'netlify-cms-lib-util/src';
import { EDITORIAL_WORKFLOW } from '../constants/publishModes';
let collection: string;
let loadedEntries: EntryObject[];
@ -144,8 +144,7 @@ export const selectMediaFolder = (
) => {
let mediaFolder = config.get('media_folder');
const useWorkflow = config.get('publish_mode') === EDITORIAL_WORKFLOW;
if (useWorkflow && collection && collection.has('media_folder')) {
if (collection && collection.has('media_folder')) {
if (entryPath) {
const entryDir = dirname(entryPath);
mediaFolder = join(entryDir, collection.get('media_folder') as string);
@ -189,12 +188,17 @@ export const selectMediaFilePublicPath = (
let publicFolder = config.get('public_folder');
const useWorkflow = config.get('publish_mode') === EDITORIAL_WORKFLOW;
if (useWorkflow && collection && collection.has('public_folder')) {
if (collection && collection.has('public_folder')) {
publicFolder = collection.get('public_folder') as string;
}
return join(publicFolder, basename(mediaPath));
};
export const selectEditingDraft = (state: EntryDraft) => {
const entry = state.get('entry');
const workflowDraft = entry && !entry.isEmpty();
return workflowDraft;
};
export default entries;

View File

@ -18,11 +18,29 @@ import {
MEDIA_DISPLAY_URL_REQUEST,
MEDIA_DISPLAY_URL_SUCCESS,
MEDIA_DISPLAY_URL_FAILURE,
} from 'Actions/mediaLibrary';
import { selectEditingWorkflowDraft } from 'Reducers/editorialWorkflow';
import { selectIntegration } from 'Reducers';
} from '../actions/mediaLibrary';
import { selectEditingDraft } from './entries';
import { selectIntegration } from './';
import {
State,
MediaLibraryAction,
MediaLibraryInstance,
MediaFile,
MediaFileMap,
DisplayURLState,
} from '../types/redux';
const defaultState = {
const defaultState: {
isVisible: boolean;
showMediaButton: boolean;
controlMedia: Map<string, string>;
displayURLs: Map<string, string>;
externalLibrary?: MediaLibraryInstance;
controlID?: string;
page?: number;
files?: MediaFile[];
config: Map<string, string>;
} = {
isVisible: false,
showMediaButton: true,
controlMedia: Map(),
@ -30,7 +48,7 @@ const defaultState = {
config: Map(),
};
const mediaLibrary = (state = Map(defaultState), action) => {
const mediaLibrary = (state = Map(defaultState), action: MediaLibraryAction) => {
switch (action.type) {
case MEDIA_LIBRARY_CREATE:
return state.withMutations(map => {
@ -104,7 +122,7 @@ const mediaLibrary = (state = Map(defaultState), action) => {
map.set('dynamicSearchQuery', dynamicSearchQuery);
map.set('dynamicSearchActive', !!dynamicSearchQuery);
if (page && page > 1) {
const updatedFiles = map.get('files').concat(filesWithKeys);
const updatedFiles = (map.get('files') as MediaFile[]).concat(filesWithKeys);
map.set('files', updatedFiles);
} else {
map.set('files', filesWithKeys);
@ -128,7 +146,8 @@ const mediaLibrary = (state = Map(defaultState), action) => {
}
return state.withMutations(map => {
const fileWithKey = { ...file, key: uuid() };
const updatedFiles = [fileWithKey, ...map.get('files')];
const files = map.get('files') as MediaFile[];
const updatedFiles = [fileWithKey, ...files];
map.set('files', updatedFiles);
map.set('isPersisting', false);
});
@ -149,9 +168,8 @@ const mediaLibrary = (state = Map(defaultState), action) => {
return state;
}
return state.withMutations(map => {
const updatedFiles = map
.get('files')
.filter(file => (key ? file.key !== key : file.id !== id));
const files = map.get('files') as MediaFile[];
const updatedFiles = files.filter(file => (key ? file.key !== key : file.id !== id));
map.set('files', updatedFiles);
map.deleteIn(['displayURLs', id]);
map.set('isDeleting', false);
@ -191,17 +209,17 @@ const mediaLibrary = (state = Map(defaultState), action) => {
}
};
export function selectMediaFiles(state) {
export function selectMediaFiles(state: State) {
const { mediaLibrary, entryDraft } = state;
const workflowDraft = selectEditingWorkflowDraft(state);
const editingDraft = selectEditingDraft(state.entryDraft);
const integration = selectIntegration(state, null, 'assetStore');
let files;
if (workflowDraft && !integration) {
files = entryDraft
.getIn(['entry', 'mediaFiles'], List())
.toJS()
.map(file => ({ key: file.id, ...file }));
if (editingDraft && !integration) {
const entryFiles = entryDraft
.getIn(['entry', 'mediaFiles'], List<MediaFileMap>())
.toJS() as MediaFile[];
files = entryFiles.map(file => ({ key: file.id, ...file }));
} else {
files = mediaLibrary.get('files') || [];
}
@ -209,14 +227,17 @@ export function selectMediaFiles(state) {
return files;
}
export function selectMediaFileByPath(state, path) {
export function selectMediaFileByPath(state: State, path: string) {
const files = selectMediaFiles(state);
const file = files.find(file => file.path === path);
return file;
}
export function selectMediaDisplayURL(state, id) {
const displayUrlState = state.mediaLibrary.getIn(['displayURLs', id], Map());
export function selectMediaDisplayURL(state: State, id: string) {
const displayUrlState = state.mediaLibrary.getIn(
['displayURLs', id],
(Map() as unknown) as DisplayURLState,
);
return displayUrlState;
}

View File

@ -21,4 +21,11 @@ export interface StaticallyTypedRecord<T> {
some<K extends keyof T>(predicate: (value: T[K], key: K, iter: this) => boolean): boolean;
mapKeys<K extends keyof T, V>(mapFunc: (key: K, value: StaticallyTypedRecord<T>) => V): V[];
find<K extends keyof T>(findFunc: (value: T[K]) => boolean): T[K];
filter<K extends keyof T>(
predicate: (value: T[K], key: K, iter: this) => boolean,
): StaticallyTypedRecord<T>;
valueSeq<K extends keyof T>(): T[K][];
map<K extends keyof T, V>(
mapFunc: (value: T[K]) => V,
): StaticallyTypedRecord<{ [key: string]: V }>;
}

View File

@ -2,6 +2,7 @@ import { Action } from 'redux';
import { StaticallyTypedRecord } from './immutable';
import { Map, List } from 'immutable';
import AssetProxy from '../valueObjects/AssetProxy';
import { ImplementationMediaFile } from 'netlify-cms-lib-util';
export type SlugConfig = StaticallyTypedRecord<{
encoding: string;
@ -11,6 +12,17 @@ export type SlugConfig = StaticallyTypedRecord<{
type BackendObject = {
name: string;
repo?: string | null;
open_authoring?: boolean;
branch?: string;
api_root?: string;
squash_merges?: boolean;
use_graphql?: boolean;
preview_context?: string;
identity_url?: string;
gateway_url?: string;
large_media_url?: string;
use_large_media_transforms_in_media_library?: boolean;
};
type Backend = StaticallyTypedRecord<Backend> & BackendObject;
@ -24,6 +36,8 @@ export type Config = StaticallyTypedRecord<{
locale?: string;
slug: SlugConfig;
media_folder_relative?: boolean;
base_url?: string;
site_id?: string;
site_url?: string;
show_preview_links?: boolean;
}>;
@ -36,7 +50,7 @@ type Pages = StaticallyTypedRecord<PagesObject>;
type EntitiesObject = { [key: string]: EntryMap };
type Entities = StaticallyTypedRecord<EntitiesObject>;
export type Entities = StaticallyTypedRecord<EntitiesObject>;
export type Entries = StaticallyTypedRecord<{
pages: Pages & PagesObject;
@ -45,7 +59,10 @@ export type Entries = StaticallyTypedRecord<{
export type Deploys = StaticallyTypedRecord<{}>;
export type EditorialWorkflow = StaticallyTypedRecord<{}>;
export type EditorialWorkflow = StaticallyTypedRecord<{
pages: Pages & PagesObject;
entities: Entities & EntitiesObject;
}>;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type EntryObject = {
@ -56,6 +73,7 @@ export type EntryObject = {
collection: string;
mediaFiles: List<MediaFileMap>;
newRecord: boolean;
metaData: { status: string };
};
export type EntryMap = StaticallyTypedRecord<EntryObject>;
@ -120,7 +138,7 @@ export type Collections = StaticallyTypedRecord<{ [path: string]: Collection & C
export type Medias = StaticallyTypedRecord<{ [path: string]: AssetProxy | undefined }>;
interface MediaLibraryInstance {
export interface MediaLibraryInstance {
show: (args: {
id?: string;
value?: string;
@ -136,23 +154,17 @@ interface MediaLibraryInstance {
export type DisplayURL = { id: string; path: string } | string;
export interface MediaFile {
name: string;
id: string;
size?: number;
displayURL?: DisplayURL;
path: string;
draft?: boolean;
url?: string;
}
export type MediaFile = ImplementationMediaFile & { key?: string };
export type MediaFileMap = StaticallyTypedRecord<MediaFile>;
export type DisplayURLState = StaticallyTypedRecord<{
type DisplayURLStateObject = {
isFetching: boolean;
url?: string;
err?: Error;
}>;
};
export type DisplayURLState = StaticallyTypedRecord<DisplayURLStateObject>;
interface DisplayURLsObject {
[id: string]: DisplayURLState;
@ -262,3 +274,43 @@ export interface EntriesAction extends Action<string> {
export interface CollectionsAction extends Action<string> {
payload?: StaticallyTypedRecord<{ collections: List<Collection> }>;
}
export interface EditorialWorkflowAction extends Action<string> {
payload?: StaticallyTypedRecord<{ publish_mode: string }> & {
collection: string;
entry: { slug: string };
} & {
collection: string;
slug: string;
} & {
pages: [];
entries: { collection: string; slug: string }[];
} & {
collection: string;
entry: StaticallyTypedRecord<{ slug: string }>;
} & {
collection: string;
slug: string;
newStatus: string;
};
}
export interface MediaLibraryAction extends Action<string> {
payload: MediaLibraryInstance & {
controlID: string;
forImage: boolean;
privateUpload: boolean;
config: Map<string, string>;
} & { mediaPath: string | string[] } & { page: number } & {
files: MediaFile[];
page: number;
canPaginate: boolean;
dynamicSearch: boolean;
dynamicSearchQuery: boolean;
} & {
file: MediaFile;
privateUpload: boolean;
} & {
file: { id: string; key: string; privateUpload: boolean };
} & { key: string } & { url: string } & { err: Error };
}

View File

@ -1,5 +1,5 @@
import { isBoolean } from 'lodash';
import { ImplementationMediaFile } from '../backend';
import { ImplementationMediaFile } from 'netlify-cms-lib-util';
interface Options {
partial?: boolean;

View File

@ -0,0 +1,11 @@
declare module 'netlify-cms-lib-auth' {
class NetlifyAuthenticator {
constructor(config = {});
refresh: (args: {
provider: string;
refresh_token: string;
}) => Promise<{ token: string; refresh_token: string }>;
}
export default NetlifyAuthenticator;
}

View File

@ -1,55 +0,0 @@
declare module 'netlify-cms-lib-util' {
export const isAbsolutePath: (path: string) => boolean;
export const basename: (path: string, extension?: string) => string;
export const EDITORIAL_WORKFLOW_ERROR: 'EDITORIAL_WORKFLOW_ERROR';
export const getBlobSHA: (blob: Blob) => string;
export interface CursorType {
create: (args: unknown) => Cursor;
updateStore: (args: unknown) => void;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
unwrapData: () => [Map<string, any>, CursorType];
actions: Set;
data: Map;
meta: Map;
store: Map;
}
export const Cursor: CursorType;
export const CURSOR_COMPATIBILITY_SYMBOL = Symbol(
'cursor key for compatibility with old backends',
);
export class APIError extends Error {
status: number;
constructor(message?: string, responseStatus: number, backend: string);
}
export class EditorialWorkflowError extends Error {
constructor(message?: string, notUnderEditorialWorkflow: boolean);
notUnderEditorialWorkflow: boolean;
}
export const getAllResponses: (url: string, options: RequestInit) => Promise<Response[]>;
export const flowAsync: (funcs: Function[]) => () => Promise<unknown>;
export const localForage: {
setItem: <T>(key: string, item: T) => Promise<T>;
getItem: <T>(key: string) => Promise<T | null>;
removeItem: (key: string) => Promise<void>;
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const onlySuccessfulPromises: (...args: any[]) => any;
export const resolvePromiseProperties: (
object: Record<string, Promise<unknown>>,
) => Promise<unknown>;
export type ResponseParser<T> = (res: Response) => Promise<T>;
export const responseParser: ({ format }: { format: 'blob' | 'json' | 'text' }) => ResponseParser;
}

View File

@ -14,11 +14,12 @@
"scripts": {
"develop": "yarn build:esm --watch",
"build": "cross-env NODE_ENV=production webpack",
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward"
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
},
"dependencies": {
"js-sha256": "^0.9.0",
"localforage": "^1.7.3"
"localforage": "^1.7.3",
"semaphore": "^1.1.0"
},
"peerDependencies": {
"immutable": "^3.7.6",

View File

@ -0,0 +1,78 @@
export const CMS_BRANCH_PREFIX = 'cms';
export const DEFAULT_PR_BODY = 'Automatically generated by Netlify CMS';
export const MERGE_COMMIT_MESSAGE = 'Automatically generated. Merged on Netlify CMS.';
const NETLIFY_CMS_LABEL_PREFIX = 'netlify-cms/';
export const isCMSLabel = (label: string) => label.startsWith(NETLIFY_CMS_LABEL_PREFIX);
export const labelToStatus = (label: string) => label.substr(NETLIFY_CMS_LABEL_PREFIX.length);
export const statusToLabel = (status: string) => `${NETLIFY_CMS_LABEL_PREFIX}${status}`;
export const generateContentKey = (collectionName: string, slug: string) =>
`${collectionName}/${slug}`;
export const parseContentKey = (contentKey: string) => {
const index = contentKey.indexOf('/');
return { collection: contentKey.substr(0, index), slug: contentKey.substr(index + 1) };
};
export interface FetchError extends Error {
status: number;
}
export const readFile = async (
id: string | null | undefined,
fetchContent: () => Promise<string | Blob>,
localForage: LocalForage,
isText: boolean,
) => {
const key = id ? (isText ? `gh.${id}` : `gh.${id}.blob`) : null;
const cached = key ? await localForage.getItem<string | Blob>(key) : null;
if (cached) {
return cached;
}
const content = await fetchContent();
if (key) {
localForage.setItem(key, content);
}
return content;
};
/**
* Keywords for inferring a status that will provide a deploy preview URL.
*/
const PREVIEW_CONTEXT_KEYWORDS = ['deploy'];
/**
* Check a given status context string to determine if it provides a link to a
* deploy preview. Checks for an exact match against `previewContext` if given,
* otherwise checks for inclusion of a value from `PREVIEW_CONTEXT_KEYWORDS`.
*/
export const isPreviewContext = (context: string, previewContext: string) => {
if (previewContext) {
return context === previewContext;
}
return PREVIEW_CONTEXT_KEYWORDS.some(keyword => context.includes(keyword));
};
export enum PreviewState {
Other = 'other',
Success = 'success',
}
/**
* Retrieve a deploy preview URL from an array of statuses. By default, a
* matching status is inferred via `isPreviewContext`.
*/
export const getPreviewStatus = (
statuses: {
context: string;
target_url: string;
state: PreviewState;
}[],
previewContext: string,
) => {
return statuses.find(({ context }) => {
return isPreviewContext(context, previewContext);
});
};

View File

@ -1,7 +1,12 @@
export const API_ERROR = 'API_ERROR';
export default class APIError extends Error {
constructor(message, status, api, meta = {}) {
message: string;
status: null | number;
api: string;
meta: {};
constructor(message: string, status: null | number, api: string, meta = {}) {
super(message);
this.message = message;
this.status = status;

View File

@ -1,122 +0,0 @@
import { fromJS, Map, Set } from 'immutable';
const jsToMap = obj => {
if (obj === undefined) {
return Map();
}
const immutableObj = fromJS(obj);
if (!Map.isMap(immutableObj)) {
throw new Error('Object must be equivalent to a Map.');
}
return immutableObj;
};
const knownMetaKeys = Set(['index', 'count', 'pageSize', 'pageCount', 'usingOldPaginationAPI']);
const filterUnknownMetaKeys = meta => meta.filter((v, k) => knownMetaKeys.has(k));
/*
createCursorMap takes one of three signatures:
- () -> cursor with empty actions, data, and meta
- (cursorMap: <object/Map with optional actions, data, and meta keys>) -> cursor
- (actions: <array/List>, data: <object/Map>, meta: <optional object/Map>) -> cursor
*/
const createCursorMap = (...args) => {
const { actions, data, meta } =
args.length === 1
? jsToMap(args[0]).toObject()
: { actions: args[0], data: args[1], meta: args[2] };
return Map({
// actions are a Set, rather than a List, to ensure an efficient .has
actions: Set(actions),
// data and meta are Maps
data: jsToMap(data),
meta: jsToMap(meta).update(filterUnknownMetaKeys),
});
};
const hasAction = (cursorMap, action) => cursorMap.hasIn(['actions', action]);
const getActionHandlers = (cursorMap, handler) =>
cursorMap
.get('actions', Set())
.toMap()
.map(action => handler(action));
// The cursor logic is entirely functional, so this class simply
// provides a chainable interface
export default class Cursor {
static create(...args) {
return new Cursor(...args);
}
constructor(...args) {
if (args[0] instanceof Cursor) {
return args[0];
}
this.store = createCursorMap(...args);
this.actions = this.store.get('actions');
this.data = this.store.get('data');
this.meta = this.store.get('meta');
}
updateStore(...args) {
return new Cursor(this.store.update(...args));
}
updateInStore(...args) {
return new Cursor(this.store.updateIn(...args));
}
hasAction(action) {
return hasAction(this.store, action);
}
addAction(action) {
return this.updateStore('actions', actions => actions.add(action));
}
removeAction(action) {
return this.updateStore('actions', actions => actions.delete(action));
}
setActions(actions) {
return this.updateStore(store => store.set('actions', Set(actions)));
}
mergeActions(actions) {
return this.updateStore('actions', oldActions => oldActions.union(actions));
}
getActionHandlers(handler) {
return getActionHandlers(this.store, handler);
}
setData(data) {
return new Cursor(this.store.set('data', jsToMap(data)));
}
mergeData(data) {
return new Cursor(this.store.mergeIn(['data'], jsToMap(data)));
}
wrapData(data) {
return this.updateStore('data', oldData => jsToMap(data).set('wrapped_cursor_data', oldData));
}
unwrapData() {
return [
this.store.get('data').delete('wrapped_cursor_data'),
this.updateStore('data', data => data.get('wrapped_cursor_data')),
];
}
clearData() {
return this.updateStore('data', () => Map());
}
setMeta(meta) {
return this.updateStore(store => store.set('meta', jsToMap(meta)));
}
mergeMeta(meta) {
return this.updateStore(store => store.update('meta', oldMeta => oldMeta.merge(jsToMap(meta))));
}
}
// This is a temporary hack to allow cursors to be added to the
// interface between backend.js and backends without modifying old
// backends at all. This should be removed in favor of wrapping old
// backends with a compatibility layer, as part of the backend API
// refactor.
export const CURSOR_COMPATIBILITY_SYMBOL = Symbol('cursor key for compatibility with old backends');

View File

@ -0,0 +1,161 @@
import { fromJS, Map, Set } from 'immutable';
type CursorStoreObject = {
actions: Set<string>;
data: Map<string, unknown>;
meta: Map<string, unknown>;
};
export type CursorStore = {
get<K extends keyof CursorStoreObject>(
key: K,
defaultValue?: CursorStoreObject[K],
): CursorStoreObject[K];
getIn<V>(path: string[]): V;
set<K extends keyof CursorStoreObject, V extends CursorStoreObject[K]>(
key: K,
value: V,
): CursorStoreObject[K];
setIn(path: string[], value: unknown): CursorStore;
hasIn(path: string[]): boolean;
mergeIn(path: string[], value: unknown): CursorStore;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
update: (...args: any[]) => CursorStore;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
updateIn: (...args: any[]) => CursorStore;
};
type ActionHandler = (action: string) => unknown;
const jsToMap = (obj: {}) => {
if (obj === undefined) {
return Map();
}
const immutableObj = fromJS(obj);
if (!Map.isMap(immutableObj)) {
throw new Error('Object must be equivalent to a Map.');
}
return immutableObj;
};
const knownMetaKeys = Set(['index', 'count', 'pageSize', 'pageCount', 'usingOldPaginationAPI']);
const filterUnknownMetaKeys = (meta: Map<string, string>) =>
meta.filter((_v, k) => knownMetaKeys.has(k as string));
/*
createCursorMap takes one of three signatures:
- () -> cursor with empty actions, data, and meta
- (cursorMap: <object/Map with optional actions, data, and meta keys>) -> cursor
- (actions: <array/List>, data: <object/Map>, meta: <optional object/Map>) -> cursor
*/
const createCursorStore = (...args: {}[]) => {
const { actions, data, meta } =
args.length === 1
? jsToMap(args[0]).toObject()
: { actions: args[0], data: args[1], meta: args[2] };
return Map({
// actions are a Set, rather than a List, to ensure an efficient .has
actions: Set(actions),
// data and meta are Maps
data: jsToMap(data),
meta: jsToMap(meta).update(filterUnknownMetaKeys),
}) as CursorStore;
};
const hasAction = (store: CursorStore, action: string) => store.hasIn(['actions', action]);
const getActionHandlers = (store: CursorStore, handler: ActionHandler) =>
store
.get('actions', Set<string>())
.toMap()
.map(action => handler(action as string));
// The cursor logic is entirely functional, so this class simply
// provides a chainable interface
export default class Cursor {
store?: CursorStore;
actions?: Set<string>;
data?: Map<string, unknown>;
meta?: Map<string, unknown>;
static create(...args: {}[]) {
return new Cursor(...args);
}
constructor(...args: {}[]) {
if (args[0] instanceof Cursor) {
return args[0] as Cursor;
}
this.store = createCursorStore(...args);
this.actions = this.store.get('actions');
this.data = this.store.get('data');
this.meta = this.store.get('meta');
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
updateStore(...args: any[]) {
return new Cursor(this.store!.update(...args));
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
updateInStore(...args: any[]) {
return new Cursor(this.store!.updateIn(...args));
}
hasAction(action: string) {
return hasAction(this.store!, action);
}
addAction(action: string) {
return this.updateStore('actions', (actions: Set<string>) => actions.add(action));
}
removeAction(action: string) {
return this.updateStore('actions', (actions: Set<string>) => actions.delete(action));
}
setActions(actions: Iterable<string>) {
return this.updateStore((store: CursorStore) => store.set('actions', Set<string>(actions)));
}
mergeActions(actions: Set<string>) {
return this.updateStore('actions', (oldActions: Set<string>) => oldActions.union(actions));
}
getActionHandlers(handler: ActionHandler) {
return getActionHandlers(this.store!, handler);
}
setData(data: {}) {
return new Cursor(this.store!.set('data', jsToMap(data)));
}
mergeData(data: {}) {
return new Cursor(this.store!.mergeIn(['data'], jsToMap(data)));
}
wrapData(data: {}) {
return this.updateStore('data', (oldData: Map<string, unknown>) =>
jsToMap(data).set('wrapped_cursor_data', oldData),
);
}
unwrapData() {
return [
this.store!.get('data').delete('wrapped_cursor_data'),
this.updateStore('data', (data: Map<string, unknown>) => data.get('wrapped_cursor_data')),
] as [Map<string, unknown>, Cursor];
}
clearData() {
return this.updateStore('data', () => Map());
}
setMeta(meta: {}) {
return this.updateStore((store: CursorStore) => store.set('meta', jsToMap(meta)));
}
mergeMeta(meta: {}) {
return this.updateStore((store: CursorStore) =>
store.update('meta', (oldMeta: Map<string, unknown>) => oldMeta.merge(jsToMap(meta))),
);
}
}
// This is a temporary hack to allow cursors to be added to the
// interface between backend.js and backends without modifying old
// backends at all. This should be removed in favor of wrapping old
// backends with a compatibility layer, as part of the backend API
// refactor.
export const CURSOR_COMPATIBILITY_SYMBOL = Symbol('cursor key for compatibility with old backends');

View File

@ -0,0 +1,65 @@
import * as api from '../API';
describe('Api', () => {
describe('generateContentKey', () => {
it('should generate content key', () => {
expect(api.generateContentKey('posts', 'dir1/dir2/post-title')).toBe(
'posts/dir1/dir2/post-title',
);
});
});
describe('parseContentKey', () => {
it('should parse content key', () => {
expect(api.parseContentKey('posts/dir1/dir2/post-title')).toEqual({
collection: 'posts',
slug: 'dir1/dir2/post-title',
});
});
});
describe('isCMSLabel', () => {
it('should return true for CMS label', () => {
expect(api.isCMSLabel('netlify-cms/draft')).toBe(true);
});
it('should return false for non CMS label', () => {
expect(api.isCMSLabel('other/label')).toBe(false);
});
});
describe('labelToStatus', () => {
it('should get status from label', () => {
expect(api.labelToStatus('netlify-cms/draft')).toBe('draft');
});
});
describe('statusToLabel', () => {
it('should generate label from status', () => {
expect(api.statusToLabel('draft')).toBe('netlify-cms/draft');
});
});
describe('isPreviewContext', () => {
it('should return true for default preview context', () => {
expect(api.isPreviewContext('deploy', '')).toBe(true);
});
it('should return false for non default preview context', () => {
expect(api.isPreviewContext('other', '')).toBe(false);
});
it('should return true for custom preview context', () => {
expect(api.isPreviewContext('ci/custom_preview', 'ci/custom_preview')).toBe(true);
});
});
describe('getPreviewStatus', () => {
it('should return preview status on matching context', () => {
expect(api.getPreviewStatus([{ context: 'deploy' }])).toEqual({ context: 'deploy' });
});
it('should return undefined on matching context', () => {
expect(api.getPreviewStatus([{ context: 'other' }])).toBeUndefined();
});
});
});

View File

@ -1,6 +1,5 @@
import { parseLinkHeader, getAllResponses, getCollectionDepth } from '../backendUtil';
import { parseLinkHeader, getAllResponses, getPathDepth } from '../backendUtil';
import { oneLine } from 'common-tags';
import { Map } from 'immutable';
import nock from 'nock';
describe('parseLinkHeader', () => {
@ -71,12 +70,12 @@ describe('getAllResponses', () => {
});
});
describe('getCollectionDepth', () => {
it('should return 1 for collection with no path', () => {
expect(getCollectionDepth(Map({}))).toBe(1);
describe('getPathDepth', () => {
it('should return 1 for empty string', () => {
expect(getPathDepth('')).toBe(1);
});
it('should return 2 for collection with path of one nested folder', () => {
expect(getCollectionDepth(Map({ path: '{{year}}/{{slug}}' }))).toBe(2);
it('should return 2 for path of one nested folder', () => {
expect(getPathDepth('{{year}}/{{slug}}')).toBe(2);
});
});

View File

@ -0,0 +1,58 @@
import { getMediaAsBlob, getMediaDisplayURL } from '../implementation';
describe('implementation', () => {
describe('getMediaAsBlob', () => {
it('should return response blob on non svg file', async () => {
const blob = {};
const readFile = jest.fn().mockResolvedValue(blob);
await expect(getMediaAsBlob('static/media/image.png', 'sha', readFile)).resolves.toBe(blob);
expect(readFile).toHaveBeenCalledTimes(1);
expect(readFile).toHaveBeenCalledWith('static/media/image.png', 'sha', {
parseText: false,
});
});
it('should return text blob on svg file', async () => {
const text = 'svg';
const readFile = jest.fn().mockResolvedValue(text);
await expect(getMediaAsBlob('static/media/logo.svg', 'sha', readFile)).resolves.toEqual(
new Blob([text], { type: 'image/svg+xml' }),
);
expect(readFile).toHaveBeenCalledTimes(1);
expect(readFile).toHaveBeenCalledWith('static/media/logo.svg', 'sha', {
parseText: true,
});
});
});
describe('getMediaDisplayURL', () => {
it('should return createObjectURL result', async () => {
const blob = {};
const readFile = jest.fn().mockResolvedValue(blob);
const semaphore = { take: jest.fn(callback => callback()), leave: jest.fn() };
global.URL.createObjectURL = jest
.fn()
.mockResolvedValue('blob:http://localhost:8080/blob-id');
await expect(
getMediaDisplayURL({ path: 'static/media/image.png', id: 'sha' }, readFile, semaphore),
).resolves.toBe('blob:http://localhost:8080/blob-id');
expect(semaphore.take).toHaveBeenCalledTimes(1);
expect(semaphore.leave).toHaveBeenCalledTimes(1);
expect(readFile).toHaveBeenCalledTimes(1);
expect(readFile).toHaveBeenCalledWith('static/media/image.png', 'sha', {
parseText: false,
});
expect(global.URL.createObjectURL).toHaveBeenCalledTimes(1);
expect(global.URL.createObjectURL).toHaveBeenCalledWith(blob);
});
});
});

View File

@ -0,0 +1,20 @@
import unsentRequest from '../unsentRequest';
describe('unsentRequest', () => {
describe('withHeaders', () => {
it('should create new request with headers', () => {
expect(
unsentRequest
.withHeaders({ Authorization: 'token' })('path')
.toJS(),
).toEqual({ url: 'path', headers: { Authorization: 'token' } });
});
it('should add headers to existing request', () => {
expect(unsentRequest.withHeaders({ Authorization: 'token' }, 'path').toJS()).toEqual({
url: 'path',
headers: { Authorization: 'token' },
});
});
});
});

View File

@ -1,10 +1,12 @@
import semaphore from 'semaphore';
export const asyncLock = () => {
export type AsyncLock = { release: () => void; acquire: () => Promise<boolean> };
export const asyncLock = (): AsyncLock => {
let lock = semaphore(1);
const acquire = (timeout = 15000) => {
const promise = new Promise(resolve => {
const promise = new Promise<boolean>(resolve => {
// this makes sure a caller doesn't gets stuck forever awaiting on the lock
const timeoutId = setTimeout(() => {
// we reset the lock in that case to allow future consumers to use it without being blocked

View File

@ -3,11 +3,14 @@ import { map } from 'lodash/fp';
import { fromJS } from 'immutable';
import { fileExtension } from './path';
import unsentRequest from './unsentRequest';
import APIError from './APIError';
export const filterByPropExtension = (extension, propName) => arr =>
type Formatter = (res: Response) => Promise<string | Blob | unknown>;
export const filterByPropExtension = (extension: string, propName: string) => <T>(arr: T[]) =>
arr.filter(el => fileExtension(get(el, propName)) === extension);
const catchFormatErrors = (format, formatter) => res => {
const catchFormatErrors = (format: string, formatter: Formatter) => (res: Response) => {
try {
return formatter(res);
} catch (err) {
@ -18,34 +21,51 @@ const catchFormatErrors = (format, formatter) => res => {
};
const responseFormatters = fromJS({
json: async res => {
const contentType = res.headers.get('Content-Type');
json: async (res: Response) => {
const contentType = res.headers.get('Content-Type') || '';
if (!contentType.startsWith('application/json') && !contentType.startsWith('text/json')) {
throw new Error(`${contentType} is not a valid JSON Content-Type`);
}
return res.json();
},
text: async res => res.text(),
blob: async res => res.blob(),
}).mapEntries(([format, formatter]) => [format, catchFormatErrors(format, formatter)]);
text: async (res: Response) => res.text(),
blob: async (res: Response) => res.blob(),
}).mapEntries(([format, formatter]: [string, Formatter]) => [
format,
catchFormatErrors(format, formatter),
]);
export const parseResponse = async (res, { expectingOk = true, format = 'text' } = {}) => {
export const parseResponse = async (
res: Response,
{ expectingOk = true, format = 'text', apiName = '' },
) => {
let body;
try {
const formatter = responseFormatters.get(format, false);
if (!formatter) {
throw new Error(`${format} is not a supported response format.`);
}
body = await formatter(res);
} catch (err) {
throw new APIError(err.message, res.status, apiName);
}
if (expectingOk && !res.ok) {
throw new Error(`Expected an ok response, but received an error status: ${res.status}.`);
const isJSON = format === 'json';
const message = isJSON ? body.message || body.msg || body.error?.message : body;
throw new APIError(isJSON && message ? message : body, res.status, apiName);
}
const formatter = responseFormatters.get(format, false);
if (!formatter) {
throw new Error(`${format} is not a supported response format.`);
}
const body = await formatter(res);
return body;
};
export const responseParser = options => res => parseResponse(res, options);
export const responseParser = (options: {
expectingOk?: boolean;
format: string;
apiName: string;
}) => (res: Response) => parseResponse(res, options);
export const parseLinkHeader = flow([
linksString => linksString.split(','),
map(str => str.trim().split(';')),
map((str: string) => str.trim().split(';')),
map(([linkStr, keyStr]) => [
keyStr.match(/rel="(.*?)"/)[1],
linkStr
@ -56,7 +76,11 @@ export const parseLinkHeader = flow([
fromPairs,
]);
export const getAllResponses = async (url, options = {}, linkHeaderRelName = 'next') => {
export const getAllResponses = async (
url: string,
options: { headers?: {} } = {},
linkHeaderRelName = 'next',
) => {
const maxResponses = 30;
let responseCount = 1;
@ -78,7 +102,7 @@ export const getAllResponses = async (url, options = {}, linkHeaderRelName = 'ne
return pageResponses;
};
export const getCollectionDepth = collection => {
const depth = collection.get('path', '').split('/').length;
export const getPathDepth = (path: string) => {
const depth = path.split('/').length;
return depth;
};

View File

@ -1,9 +1,9 @@
import sha256 from 'js-sha256';
export default blob =>
export default (blob: Blob): Promise<string> =>
new Promise((resolve, reject) => {
const fr = new FileReader();
fr.onload = ({ target: { result } }) => resolve(sha256(result));
fr.onload = ({ target }) => resolve(sha256(target?.result));
fr.onerror = err => {
fr.abort();
reject(err);

View File

@ -0,0 +1,305 @@
import semaphore, { Semaphore } from 'semaphore';
import Cursor from './Cursor';
import { AsyncLock } from './asyncLock';
export type DisplayURLObject = { id: string; path: string };
export type DisplayURL =
| DisplayURLObject
| string
| { original: DisplayURL; path?: string; largeMedia?: string };
export interface ImplementationMediaFile {
name: string;
id: string;
size?: number;
displayURL?: DisplayURL;
path: string;
draft?: boolean;
url?: string;
file?: File;
}
export interface UnpublishedEntryMediaFile {
id: string;
path: string;
}
export interface ImplementationEntry {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: string;
file: { path: string; label?: string; id?: string | null };
slug?: string;
mediaFiles?: ImplementationMediaFile[];
metaData?: { collection: string; status: string };
isModification?: boolean;
}
export interface Map {
get: <T>(key: string, defaultValue?: T) => T;
getIn: <T>(key: string[], defaultValue?: T) => T;
setIn: <T>(key: string[], value: T) => Map;
set: <T>(key: string, value: T) => Map;
}
export type AssetProxy = {
path: string;
fileObj?: File;
toBase64?: () => Promise<string>;
};
export type Entry = { path: string; slug: string; raw: string };
export type PersistOptions = {
newEntry?: boolean;
parsedData?: { title: string; description: string };
commitMessage: string;
collectionName?: string;
useWorkflow?: boolean;
unpublished?: boolean;
status?: string;
};
export type DeleteOptions = {};
export type Credentials = { token: string | {}; refresh_token?: string };
export type User = Credentials & {
backendName?: string;
login?: string;
name: string;
useOpenAuthoring?: boolean;
};
export type Config = {
backend: {
repo?: string | null;
open_authoring?: boolean;
branch?: string;
api_root?: string;
squash_merges?: boolean;
use_graphql?: boolean;
preview_context?: string;
identity_url?: string;
gateway_url?: string;
large_media_url?: string;
use_large_media_transforms_in_media_library?: boolean;
};
media_folder: string;
base_url?: string;
site_id?: string;
};
export interface Implementation {
authComponent: () => void;
restoreUser: (user: User) => Promise<User>;
authenticate: (credentials: Credentials) => Promise<User>;
logout: () => Promise<void> | void | null;
getToken: () => Promise<string | null>;
getEntry: (path: string) => Promise<ImplementationEntry>;
entriesByFolder: (
folder: string,
extension: string,
depth: number,
) => Promise<ImplementationEntry[]>;
entriesByFiles: (files: ImplementationFile[]) => Promise<ImplementationEntry[]>;
getMediaDisplayURL?: (displayURL: DisplayURL) => Promise<string>;
getMedia: (folder?: string) => Promise<ImplementationMediaFile[]>;
getMediaFile: (path: string) => Promise<ImplementationMediaFile>;
persistEntry: (obj: Entry, assetProxies: AssetProxy[], opts: PersistOptions) => Promise<void>;
persistMedia: (file: AssetProxy, opts: PersistOptions) => Promise<ImplementationMediaFile>;
deleteFile: (path: string, commitMessage: string) => Promise<void>;
unpublishedEntries: () => Promise<ImplementationEntry[]>;
unpublishedEntry: (collection: string, slug: string) => Promise<ImplementationEntry>;
updateUnpublishedEntryStatus: (
collection: string,
slug: string,
newStatus: string,
) => Promise<void>;
publishUnpublishedEntry: (collection: string, slug: string) => Promise<void>;
deleteUnpublishedEntry: (collection: string, slug: string) => Promise<void>;
getDeployPreview: (
collectionName: string,
slug: string,
) => Promise<{ url: string; status: string } | null>;
allEntriesByFolder?: (
folder: string,
extension: string,
depth: number,
) => Promise<ImplementationEntry[]>;
traverseCursor?: (
cursor: Cursor,
action: string,
) => Promise<{ entries: ImplementationEntry[]; cursor: Cursor }>;
}
const MAX_CONCURRENT_DOWNLOADS = 10;
export type ImplementationFile = {
id?: string | null | undefined;
label?: string;
path: string;
};
type Metadata = {
objects: { entry: { path: string } };
collection: string;
status: string;
};
type ReadFile = (
path: string,
id: string | null | undefined,
options: { parseText: boolean },
) => Promise<string | Blob>;
type ReadUnpublishedFile = (
key: string,
) => Promise<{ metaData: Metadata; fileData: string; isModification: boolean; slug: string }>;
const fetchFiles = async (files: ImplementationFile[], readFile: ReadFile, apiName: string) => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [] as Promise<ImplementationEntry | { error: boolean }>[];
files.forEach(file => {
promises.push(
new Promise(resolve =>
sem.take(() =>
readFile(file.path, file.id, { parseText: true })
.then(data => {
resolve({ file, data: data as string });
sem.leave();
})
.catch((error = true) => {
sem.leave();
console.error(`failed to load file from ${apiName}: ${file.path}`);
resolve({ error });
}),
),
),
);
});
return Promise.all(promises).then(loadedEntries =>
loadedEntries.filter(loadedEntry => !(loadedEntry as { error: boolean }).error),
) as Promise<ImplementationEntry[]>;
};
const fetchUnpublishedFiles = async (
keys: string[],
readUnpublishedFile: ReadUnpublishedFile,
apiName: string,
) => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [] as Promise<ImplementationEntry | { error: boolean }>[];
keys.forEach(key => {
promises.push(
new Promise(resolve =>
sem.take(() =>
readUnpublishedFile(key)
.then(data => {
if (data === null || data === undefined) {
resolve({ error: true });
sem.leave();
} else {
resolve({
slug: data.slug,
file: { path: data.metaData.objects.entry.path, id: null },
data: data.fileData,
metaData: data.metaData,
isModification: data.isModification,
});
sem.leave();
}
})
.catch((error = true) => {
sem.leave();
console.error(`failed to load file from ${apiName}: ${key}`);
resolve({ error });
}),
),
),
);
});
return Promise.all(promises).then(loadedEntries =>
loadedEntries.filter(loadedEntry => !(loadedEntry as { error: boolean }).error),
) as Promise<ImplementationEntry[]>;
};
export const entriesByFolder = async (
listFiles: () => Promise<ImplementationFile[]>,
readFile: ReadFile,
apiName: string,
) => {
const files = await listFiles();
return fetchFiles(files, readFile, apiName);
};
export const entriesByFiles = async (
files: ImplementationFile[],
readFile: ReadFile,
apiName: string,
) => {
return fetchFiles(files, readFile, apiName);
};
export const unpublishedEntries = async (
listEntriesKeys: () => Promise<string[]>,
readUnpublishedFile: ReadUnpublishedFile,
apiName: string,
) => {
try {
const keys = await listEntriesKeys();
const entries = await fetchUnpublishedFiles(keys, readUnpublishedFile, apiName);
return entries;
} catch (error) {
if (error.message === 'Not Found') {
return Promise.resolve([]);
}
throw error;
}
};
export const getMediaAsBlob = async (path: string, id: string | null, readFile: ReadFile) => {
let blob: Blob;
if (path.match(/.svg$/)) {
const text = (await readFile(path, id, { parseText: true })) as string;
blob = new Blob([text], { type: 'image/svg+xml' });
} else {
blob = (await readFile(path, id, { parseText: false })) as Blob;
}
return blob;
};
export const getMediaDisplayURL = async (
displayURL: DisplayURL,
readFile: ReadFile,
semaphore: Semaphore,
) => {
const { path, id } = displayURL as DisplayURLObject;
return new Promise<string>((resolve, reject) =>
semaphore.take(() =>
getMediaAsBlob(path, id, readFile)
.then(blob => URL.createObjectURL(blob))
.then(resolve, reject)
.finally(() => semaphore.leave()),
),
);
};
export const runWithLock = async (lock: AsyncLock, func: Function, message: string) => {
try {
const acquired = await lock.acquire();
if (!acquired) {
console.warn(message);
}
const result = await func();
return result;
} finally {
lock.release();
}
};

View File

@ -1,79 +0,0 @@
import APIError from './APIError';
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from './Cursor';
import EditorialWorkflowError, { EDITORIAL_WORKFLOW_ERROR } from './EditorialWorkflowError';
import localForage from './localForage';
import { isAbsolutePath, basename, fileExtensionWithSeparator, fileExtension } from './path';
import {
filterPromises,
filterPromisesWith,
onlySuccessfulPromises,
resolvePromiseProperties,
flowAsync,
then,
} from './promise';
import unsentRequest from './unsentRequest';
import {
filterByPropExtension,
getAllResponses,
parseLinkHeader,
parseResponse,
responseParser,
getCollectionDepth,
} from './backendUtil';
import loadScript from './loadScript';
import getBlobSHA from './getBlobSHA';
import { asyncLock } from './asyncLock';
export const NetlifyCmsLibUtil = {
APIError,
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
EditorialWorkflowError,
EDITORIAL_WORKFLOW_ERROR,
localForage,
basename,
fileExtensionWithSeparator,
fileExtension,
filterPromises,
filterPromisesWith,
onlySuccessfulPromises,
resolvePromiseProperties,
flowAsync,
then,
unsentRequest,
filterByPropExtension,
parseLinkHeader,
parseResponse,
responseParser,
loadScript,
getBlobSHA,
getCollectionDepth,
};
export {
APIError,
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
EditorialWorkflowError,
EDITORIAL_WORKFLOW_ERROR,
localForage,
basename,
fileExtensionWithSeparator,
fileExtension,
filterPromises,
filterPromisesWith,
onlySuccessfulPromises,
resolvePromiseProperties,
flowAsync,
then,
unsentRequest,
filterByPropExtension,
parseLinkHeader,
getAllResponses,
parseResponse,
responseParser,
loadScript,
getBlobSHA,
asyncLock,
isAbsolutePath,
getCollectionDepth,
};

View File

@ -0,0 +1,164 @@
import APIError from './APIError';
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from './Cursor';
import EditorialWorkflowError, { EDITORIAL_WORKFLOW_ERROR } from './EditorialWorkflowError';
import localForage from './localForage';
import { isAbsolutePath, basename, fileExtensionWithSeparator, fileExtension } from './path';
import { onlySuccessfulPromises, flowAsync, then } from './promise';
import unsentRequest from './unsentRequest';
import {
filterByPropExtension,
getAllResponses,
parseLinkHeader,
parseResponse,
responseParser,
getPathDepth,
} from './backendUtil';
import loadScript from './loadScript';
import getBlobSHA from './getBlobSHA';
import { asyncLock, AsyncLock as AL } from './asyncLock';
import {
Implementation as I,
ImplementationEntry as IE,
ImplementationMediaFile as IMF,
ImplementationFile as IF,
DisplayURLObject as DUO,
DisplayURL as DU,
Credentials as Cred,
User as U,
Entry as E,
PersistOptions as PO,
AssetProxy as AP,
entriesByFiles,
entriesByFolder,
unpublishedEntries,
getMediaDisplayURL,
getMediaAsBlob,
runWithLock,
Config as C,
UnpublishedEntryMediaFile as UEMF,
} from './implementation';
import {
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
labelToStatus,
statusToLabel,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
isPreviewContext,
getPreviewStatus,
PreviewState,
FetchError as FE,
parseContentKey,
} from './API';
export type AsyncLock = AL;
export type Implementation = I;
export type ImplementationEntry = IE;
export type ImplementationMediaFile = IMF;
export type ImplementationFile = IF;
export type DisplayURL = DU;
export type DisplayURLObject = DUO;
export type Credentials = Cred;
export type User = U;
export type Entry = E;
export type UnpublishedEntryMediaFile = UEMF;
export type PersistOptions = PO;
export type AssetProxy = AP;
export type ApiRequest =
| {
url: string;
params?: Record<string, string | boolean | number>;
method?: 'POST' | 'PUT' | 'DELETE' | 'HEAD';
headers?: Record<string, string>;
body?: string | FormData;
cache?: 'no-store';
}
| string;
export type Config = C;
export type FetchError = FE;
export const NetlifyCmsLibUtil = {
APIError,
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
EditorialWorkflowError,
EDITORIAL_WORKFLOW_ERROR,
localForage,
basename,
fileExtensionWithSeparator,
fileExtension,
onlySuccessfulPromises,
flowAsync,
then,
unsentRequest,
filterByPropExtension,
parseLinkHeader,
parseResponse,
responseParser,
loadScript,
getBlobSHA,
getPathDepth,
entriesByFiles,
entriesByFolder,
unpublishedEntries,
getMediaDisplayURL,
getMediaAsBlob,
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
labelToStatus,
statusToLabel,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
isPreviewContext,
getPreviewStatus,
runWithLock,
PreviewState,
parseContentKey,
};
export {
APIError,
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
EditorialWorkflowError,
EDITORIAL_WORKFLOW_ERROR,
localForage,
basename,
fileExtensionWithSeparator,
fileExtension,
onlySuccessfulPromises,
flowAsync,
then,
unsentRequest,
filterByPropExtension,
parseLinkHeader,
getAllResponses,
parseResponse,
responseParser,
loadScript,
getBlobSHA,
asyncLock,
isAbsolutePath,
getPathDepth,
entriesByFiles,
entriesByFolder,
unpublishedEntries,
getMediaDisplayURL,
getMediaAsBlob,
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
labelToStatus,
statusToLabel,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
isPreviewContext,
getPreviewStatus,
runWithLock,
PreviewState,
parseContentKey,
};

View File

@ -1,7 +1,7 @@
const absolutePath = new RegExp('^(?:[a-z]+:)?//', 'i');
const normalizePath = path => path.replace(/[\\/]+/g, '/');
const normalizePath = (path: string) => path.replace(/[\\/]+/g, '/');
export function isAbsolutePath(path) {
export function isAbsolutePath(path: string) {
return absolutePath.test(path);
}
@ -16,7 +16,7 @@ export function isAbsolutePath(path) {
* // returns
* 'quux'
*/
export function basename(p, ext = '') {
export function basename(p: string, ext = '') {
// Special case: Normalize will modify this to '.'
if (p === '') {
return p;
@ -50,13 +50,13 @@ export function basename(p, ext = '') {
* // returns
* '.html'
*/
export function fileExtensionWithSeparator(p) {
export function fileExtensionWithSeparator(p: string) {
p = normalizePath(p);
const sections = p.split('/');
p = sections.pop();
p = sections.pop() as string;
// Special case: foo/file.ext/ should return '.ext'
if (p === '' && sections.length > 0) {
p = sections.pop();
p = sections.pop() as string;
}
if (p === '..') {
return '';
@ -77,7 +77,7 @@ export function fileExtensionWithSeparator(p) {
* // returns
* 'html'
*/
export function fileExtension(p) {
export function fileExtension(p: string) {
const ext = fileExtensionWithSeparator(p);
return ext === '' ? ext : ext.substr(1);
}

View File

@ -1,38 +0,0 @@
import constant from 'lodash/constant';
import filter from 'lodash/fp/filter';
import map from 'lodash/fp/map';
import flow from 'lodash/flow';
import zipObject from 'lodash/zipObject';
export const filterPromises = (arr, filter) =>
Promise.all(arr.map(entry => Promise.resolve(entry).then(filter))).then(bits =>
arr.filter(() => bits.shift()),
);
export const filterPromisesWith = filter => arr => filterPromises(arr, filter);
export const resolvePromiseProperties = obj => {
// Get the keys which represent promises
const promiseKeys = Object.keys(obj).filter(key => typeof obj[key].then === 'function');
const promises = promiseKeys.map(key => obj[key]);
// Resolve all promises
return Promise.all(promises).then(resolvedPromises =>
// Return a copy of obj with promises overwritten by their
// resolved values
Object.assign({}, obj, zipObject(promiseKeys, resolvedPromises)),
);
};
export const then = fn => p => Promise.resolve(p).then(fn);
const filterPromiseSymbol = Symbol('filterPromiseSymbol');
export const onlySuccessfulPromises = flow([
then(map(p => p.catch(constant(filterPromiseSymbol)))),
then(Promise.all.bind(Promise)),
then(filter(maybeValue => maybeValue !== filterPromiseSymbol)),
]);
const wrapFlowAsync = fn => async arg => fn(await arg);
export const flowAsync = fns => flow(fns.map(fn => wrapFlowAsync(fn)));

View File

@ -0,0 +1,14 @@
import flow from 'lodash/flow';
export const then = <T, V>(fn: (r: T) => V) => (p: Promise<T>) => Promise.resolve(p).then(fn);
const filterPromiseSymbol = Symbol('filterPromiseSymbol');
export const onlySuccessfulPromises = (promises: Promise<unknown>[]) => {
return Promise.all(promises.map(p => p.catch(() => filterPromiseSymbol))).then(results =>
results.filter(result => result !== filterPromiseSymbol),
);
};
const wrapFlowAsync = (fn: Function) => async (arg: unknown) => fn(await arg);
export const flowAsync = (fns: Function[]) => flow(fns.map(fn => wrapFlowAsync(fn)));

View File

@ -0,0 +1,4 @@
declare module 'js-sha256' {
const sha256: (reader: string | ArrayBuffer | null | undefined) => string;
export default sha256;
}

View File

@ -0,0 +1,5 @@
declare module 'semaphore' {
export type Semaphore = { take: (f: Function) => void; leave: () => void };
const semaphore: (count: number) => Semaphore;
export default semaphore;
}