Feat: entry sorting (#3494)

* refactor: typescript search actions, add tests avoid duplicate search

* refactor: switch from promise chain to async/await in loadEntries

* feat: add sorting, initial commit

* fix: set isFetching to true on entries request

* fix: ui improvments and bug fixes

* test: fix tests

* feat(backend-gitlab): cache local tree)

* fix: fix prop type warning

* refactor: code cleanup

* feat(backend-bitbucket): add local tree caching support

* feat: swtich to orderBy and support multiple sort keys

* fix: backoff function

* fix: improve backoff

* feat: infer sortable fields

* feat: fetch file commit metadata - initial commit

* feat: extract file author and date, finalize GitLab & Bitbucket

* refactor: code cleanup

* feat: handle github rate limit errors

* refactor: code cleanup

* fix(github): add missing author and date when traversing cursor

* fix: add missing author and date when traversing cursor

* refactor: code cleanup

* refactor: code cleanup

* refactor: code cleanup

* test: fix tests

* fix: rebuild local tree when head doesn't exist in remote branch

* fix: allow sortable fields to be an empty array

* fix: allow translation of built in sort fields

* build: fix proxy server build

* fix: hide commit author and date fields by default on non git backends

* fix(algolia): add listAllEntries method for alogolia integration

* fix: handle sort fields overflow

* test(bitbucket): re-record some bitbucket e2e tests

* test(bitbucket): fix media library test

* refactor(gitgateway-gitlab): share request code and handle 404 errors

* fix: always show commit date by default

* docs: add sortableFields

* refactor: code cleanup

* improvement: drop multi-sort, rework sort UI

* chore: force main package bumps

Co-authored-by: Shawn Erquhart <shawn@erquh.art>
This commit is contained in:
Erez Rokah
2020-04-01 06:13:27 +03:00
committed by GitHub
parent cbb3927101
commit 174d86f0a0
82 changed files with 15128 additions and 12621 deletions

View File

@ -24,6 +24,8 @@ import {
FetchError,
parseContentKey,
branchFromContentKey,
requestWithBackoff,
readFileMetadata,
} from 'netlify-cms-lib-util';
import { oneLine } from 'common-tags';
import { parse } from 'what-the-diff';
@ -160,7 +162,24 @@ type BitBucketUser = {
};
};
export const API_NAME = 'BitBucket';
type BitBucketBranch = {
name: string;
target: { hash: string };
};
type BitBucketCommit = {
hash: string;
author: {
raw: string;
user: {
display_name: string;
nickname: string;
};
};
date: string;
};
export const API_NAME = 'Bitbucket';
const APPLICATION_JSON = 'application/json; charset=utf-8';
@ -195,15 +214,17 @@ export default class API {
this.initialWorkflowStatus = config.initialWorkflowStatus;
}
buildRequest = (req: ApiRequest) =>
flow([unsentRequest.withRoot(this.apiRoot), unsentRequest.withTimestamp])(req);
buildRequest = (req: ApiRequest) => {
return flow([unsentRequest.withRoot(this.apiRoot), unsentRequest.withTimestamp])(req);
};
request = (req: ApiRequest): Promise<Response> =>
flow([
this.buildRequest,
this.requestFunction,
p => p.catch((err: Error) => Promise.reject(new APIError(err.message, null, API_NAME))),
])(req);
request = (req: ApiRequest): Promise<Response> => {
try {
return requestWithBackoff(this, req);
} catch (err) {
throw new APIError(err.message, null, API_NAME);
}
};
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
@ -226,11 +247,21 @@ export default class API {
branchCommitSha = async (branch: string) => {
const {
target: { hash: branchSha },
} = await this.requestJSON(`${this.repoURL}/refs/branches/${branch}`);
return branchSha as string;
}: BitBucketBranch = await this.requestJSON(`${this.repoURL}/refs/branches/${branch}`);
return branchSha;
};
defaultBranchCommitSha = () => {
return this.branchCommitSha(this.branch);
};
isFile = ({ type }: BitBucketFile) => type === 'commit_file';
getFileId = (commitHash: string, path: string) => {
return `${commitHash}/${path}`;
};
processFile = (file: BitBucketFile) => ({
id: file.id,
type: file.type,
@ -243,17 +274,17 @@ export default class API {
// that will help with caching (though not as well as a normal
// SHA, since it will change even if the individual file itself
// doesn't.)
...(file.commit && file.commit.hash ? { id: `${file.commit.hash}/${file.path}` } : {}),
...(file.commit && file.commit.hash ? { id: this.getFileId(file.commit.hash, file.path) } : {}),
});
processFiles = (files: BitBucketFile[]) => files.filter(this.isFile).map(this.processFile);
readFile = async (
path: string,
sha?: string | null,
{ parseText = true, branch = this.branch } = {},
{ parseText = true, branch = this.branch, head = '' } = {},
): Promise<string | Blob> => {
const fetchContent = async () => {
const node = await this.branchCommitSha(branch);
const node = head ? head : await this.branchCommitSha(branch);
const content = await this.request({
url: `${this.repoURL}/src/${node}/${path}`,
cache: 'no-store',
@ -264,10 +295,44 @@ export default class API {
return content;
};
async readFileMetadata(path: string, sha: string) {
const fetchFileMetadata = async () => {
try {
const { values }: { values: BitBucketCommit[] } = await this.requestJSON({
url: `${this.repoURL}/commits`,
params: { path, include: this.branch },
});
const commit = values[0];
return {
author: commit.author.user
? commit.author.user.display_name || commit.author.user.nickname
: commit.author.raw,
updatedOn: commit.date,
};
} catch (e) {
return { author: '', updatedOn: '' };
}
};
const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
return fileMetadata;
}
async isShaExistsInBranch(branch: string, sha: string) {
const { values }: { values: BitBucketCommit[] } = await this.requestJSON({
url: `${this.repoURL}/commits`,
params: { include: branch, pagelen: 100 },
}).catch(e => {
console.log(`Failed getting commits for branch '${branch}'`, e);
return [];
});
return values.some(v => v.hash === sha);
}
getEntriesAndCursor = (jsonResponse: BitBucketSrcResult) => {
const {
size: count,
page: index,
page,
pagelen: pageSize,
next,
previous: prev,
@ -278,21 +343,20 @@ export default class API {
entries,
cursor: Cursor.create({
actions: [...(next ? ['next'] : []), ...(prev ? ['prev'] : [])],
meta: { index, count, pageSize, pageCount },
meta: { page, count, pageSize, pageCount },
data: { links: { next, prev } },
}),
};
};
listFiles = async (path: string, depth = 1) => {
listFiles = async (path: string, depth = 1, pagelen = 20) => {
const node = await this.branchCommitSha(this.branch);
const result: BitBucketSrcResult = await this.requestJSON({
url: `${this.repoURL}/src/${node}/${path}`,
params: {
// sort files by filename ascending
sort: '-path',
// eslint-disable-next-line @typescript-eslint/camelcase
max_depth: depth,
pagelen,
},
}).catch(replace404WithEmptyResponse);
const { entries, cursor } = this.getEntriesAndCursor(result);
@ -320,7 +384,11 @@ export default class API {
])(cursor.data!.getIn(['links', action]));
listAllFiles = async (path: string, depth = 1) => {
const { cursor: initialCursor, entries: initialEntries } = await this.listFiles(path, depth);
const { cursor: initialCursor, entries: initialEntries } = await this.listFiles(
path,
depth,
100,
);
const entries = [...initialEntries];
let currentCursor = initialCursor;
while (currentCursor && currentCursor.actions!.has('next')) {
@ -435,19 +503,30 @@ export default class API {
await this.addPullRequestComment(pullRequest, statusToLabel(status));
}
async getDifferences(branch: string) {
async getDifferences(source: string, destination: string = this.branch) {
if (source === destination) {
return [];
}
const rawDiff = await this.requestText({
url: `${this.repoURL}/diff/${branch}..${this.branch}`,
url: `${this.repoURL}/diff/${source}..${destination}`,
params: {
binary: false,
},
});
return parse(rawDiff).map(d => ({
newPath: d.newPath.replace(/b\//, ''),
binary: d.binary || /.svg$/.test(d.newPath),
newFile: d.status === 'added',
}));
return parse(rawDiff).map(d => {
const oldPath = d.oldPath?.replace(/b\//, '') || '';
const newPath = d.newPath?.replace(/b\//, '') || '';
const path = newPath || (oldPath as string);
return {
oldPath,
newPath,
binary: d.binary || /.svg$/.test(path),
status: d.status,
newFile: d.status === 'added',
path,
};
});
}
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
@ -472,7 +551,7 @@ export default class API {
const toDelete: DeleteEntry[] = [];
for (const diff of diffs) {
if (!files.some(file => file.path === diff.newPath)) {
toDelete.push({ path: diff.newPath, delete: true });
toDelete.push({ path: diff.path, delete: true });
}
}
@ -499,19 +578,6 @@ export default class API {
);
};
async isFileExists(path: string, branch: string) {
const fileExists = await this.readFile(path, null, { branch })
.then(() => true)
.catch(error => {
if (error instanceof APIError && error.status === 404) {
return false;
}
throw error;
});
return fileExists;
}
async getPullRequests(sourceBranch?: string) {
const sourceQuery = sourceBranch
? `source.branch.name = "${sourceBranch}"`

View File

@ -1,10 +1,9 @@
import semaphore, { Semaphore } from 'semaphore';
import { flow, trimStart } from 'lodash';
import { trimStart } from 'lodash';
import { stripIndent } from 'common-tags';
import {
CURSOR_COMPATIBILITY_SYMBOL,
filterByPropExtension,
then,
filterByExtension,
unsentRequest,
basename,
getBlobSHA,
@ -36,8 +35,10 @@ import {
blobToFileObj,
contentKeyFromBranch,
generateContentKey,
localForage,
allEntriesByFolder,
} from 'netlify-cms-lib-util';
import NetlifyAuthenticator from 'netlify-cms-lib-auth';
import { NetlifyAuthenticator } from 'netlify-cms-lib-auth';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
import { GitLfsClient } from './git-lfs-client';
@ -106,6 +107,10 @@ export default class BitbucketBackend implements Implementation {
this.lock = asyncLock();
}
isGitBackend() {
return true;
}
authComponent() {
return AuthenticationPage;
}
@ -121,12 +126,11 @@ export default class BitbucketBackend implements Implementation {
});
}
requestFunction = (req: ApiRequest) =>
this.getToken()
.then(
token => unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req) as ApiRequest,
)
.then(unsentRequest.performRequest);
requestFunction = async (req: ApiRequest) => {
const token = await this.getToken();
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
return unsentRequest.performRequest(authorizedRequest);
};
restoreUser(user: User) {
return this.authenticate(user);
@ -199,6 +203,7 @@ export default class BitbucketBackend implements Implementation {
// eslint-disable-next-line @typescript-eslint/camelcase
this.refreshToken = refresh_token;
this.refreshedTokenPromise = undefined;
// eslint-disable-next-line @typescript-eslint/camelcase
this.updateUserCredentials({ token, refresh_token });
return token;
@ -225,28 +230,22 @@ export default class BitbucketBackend implements Implementation {
? await this.refreshedTokenPromise
: this.token) as string;
return flow([
unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }) as (
req: ApiRequest,
) => ApiRequest,
unsentRequest.performRequest,
then(async (res: Response) => {
if (res.status === 401) {
const json = await res.json().catch(() => null);
if (json && json.type === 'error' && /^access token expired/i.test(json.error.message)) {
const newToken = await this.getRefreshedAccessToken();
const reqWithNewToken = unsentRequest.withHeaders(
{
Authorization: `Bearer ${newToken}`,
},
req,
) as ApiRequest;
return unsentRequest.performRequest(reqWithNewToken);
}
}
return res;
}),
])(req);
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
const response: Response = await unsentRequest.performRequest(authorizedRequest);
if (response.status === 401) {
const json = await response.json().catch(() => null);
if (json && json.type === 'error' && /^access token expired/i.test(json.error.message)) {
const newToken = await this.getRefreshedAccessToken();
const reqWithNewToken = unsentRequest.withHeaders(
{
Authorization: `Bearer ${newToken}`,
},
req,
) as ApiRequest;
return unsentRequest.performRequest(reqWithNewToken);
}
}
return response;
};
async entriesByFolder(folder: string, extension: string, depth: number) {
@ -255,10 +254,20 @@ export default class BitbucketBackend implements Implementation {
const listFiles = () =>
this.api!.listFiles(folder, depth).then(({ entries, cursor: c }) => {
cursor = c.mergeMeta({ extension });
return filterByPropExtension(extension, 'path')(entries);
return entries.filter(e => filterByExtension(e, extension));
});
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), 'BitBucket');
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
const files = await entriesByFolder(
listFiles,
readFile,
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
@ -266,16 +275,45 @@ export default class BitbucketBackend implements Implementation {
return files;
}
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const listFiles = () =>
this.api!.listAllFiles(folder, depth).then(filterByPropExtension(extension, 'path'));
async listAllFiles(folder: string, extension: string, depth: number) {
const files = await this.api!.listAllFiles(folder, depth);
const filtered = files.filter(file => filterByExtension(file, extension));
return filtered;
}
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), 'BitBucket');
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
const files = await allEntriesByFolder({
listAllFiles: () => this.listAllFiles(folder, extension, depth),
readFile,
readFileMetadata: this.api!.readFileMetadata.bind(this.api),
apiName: API_NAME,
branch: this.branch,
localForage,
folder,
extension,
depth,
getDefaultBranch: () => Promise.resolve({ name: this.branch, sha: head }),
isShaExistsInBranch: this.api!.isShaExistsInBranch.bind(this.api!),
getDifferences: (source, destination) => this.api!.getDifferences(source, destination),
getFileId: path => Promise.resolve(this.api!.getFileId(head, path)),
filterFile: file => filterByExtension(file, extension),
});
return files;
}
async entriesByFiles(files: ImplementationFile[]) {
return entriesByFiles(files, this.api!.readFile.bind(this.api!), 'BitBucket');
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
return entriesByFiles(files, readFile, this.api!.readFileMetadata.bind(this.api), API_NAME);
}
getEntry(path: string) {
@ -403,15 +441,22 @@ export default class BitbucketBackend implements Implementation {
return this.api!.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => {
const extension = cursor.meta?.get('extension');
if (extension) {
entries = filterByPropExtension(extension as string, 'path')(entries);
entries = entries.filter(e => filterByExtension(e, extension));
newCursor = newCursor.mergeMeta({ extension });
}
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
const entriesWithData = await entriesByFiles(
entries,
readFile,
this.api!.readFileMetadata.bind(this.api)!,
API_NAME,
);
return {
entries: await Promise.all(
entries.map(file =>
this.api!.readFile(file.path, file.id).then(data => ({ file, data: data as string })),
),
),
entries: entriesWithData,
cursor: newCursor,
};
});

View File

@ -1,3 +1,5 @@
declare module 'what-the-diff' {
export const parse: (rawDiff: string) => { newPath: string; binary: boolean; status: string }[];
export const parse: (
rawDiff: string,
) => { oldPath?: string; newPath?: string; binary: boolean; status: string }[];
}