Feat: entry sorting (#3494)

* refactor: typescript search actions, add tests avoid duplicate search

* refactor: switch from promise chain to async/await in loadEntries

* feat: add sorting, initial commit

* fix: set isFetching to true on entries request

* fix: ui improvments and bug fixes

* test: fix tests

* feat(backend-gitlab): cache local tree)

* fix: fix prop type warning

* refactor: code cleanup

* feat(backend-bitbucket): add local tree caching support

* feat: swtich to orderBy and support multiple sort keys

* fix: backoff function

* fix: improve backoff

* feat: infer sortable fields

* feat: fetch file commit metadata - initial commit

* feat: extract file author and date, finalize GitLab & Bitbucket

* refactor: code cleanup

* feat: handle github rate limit errors

* refactor: code cleanup

* fix(github): add missing author and date when traversing cursor

* fix: add missing author and date when traversing cursor

* refactor: code cleanup

* refactor: code cleanup

* refactor: code cleanup

* test: fix tests

* fix: rebuild local tree when head doesn't exist in remote branch

* fix: allow sortable fields to be an empty array

* fix: allow translation of built in sort fields

* build: fix proxy server build

* fix: hide commit author and date fields by default on non git backends

* fix(algolia): add listAllEntries method for alogolia integration

* fix: handle sort fields overflow

* test(bitbucket): re-record some bitbucket e2e tests

* test(bitbucket): fix media library test

* refactor(gitgateway-gitlab): share request code and handle 404 errors

* fix: always show commit date by default

* docs: add sortableFields

* refactor: code cleanup

* improvement: drop multi-sort, rework sort UI

* chore: force main package bumps

Co-authored-by: Shawn Erquhart <shawn@erquh.art>
This commit is contained in:
Erez Rokah
2020-04-01 06:13:27 +03:00
committed by GitHub
parent cbb3927101
commit 174d86f0a0
82 changed files with 15128 additions and 12621 deletions

View File

@ -65,5 +65,5 @@
"react": "^16.8.4",
"react-dom": "^16.8.4"
},
"incrementToForceBump": 1
"incrementToForceBump": 2
}

View File

@ -24,6 +24,8 @@ import {
FetchError,
parseContentKey,
branchFromContentKey,
requestWithBackoff,
readFileMetadata,
} from 'netlify-cms-lib-util';
import { oneLine } from 'common-tags';
import { parse } from 'what-the-diff';
@ -160,7 +162,24 @@ type BitBucketUser = {
};
};
export const API_NAME = 'BitBucket';
type BitBucketBranch = {
name: string;
target: { hash: string };
};
type BitBucketCommit = {
hash: string;
author: {
raw: string;
user: {
display_name: string;
nickname: string;
};
};
date: string;
};
export const API_NAME = 'Bitbucket';
const APPLICATION_JSON = 'application/json; charset=utf-8';
@ -195,15 +214,17 @@ export default class API {
this.initialWorkflowStatus = config.initialWorkflowStatus;
}
buildRequest = (req: ApiRequest) =>
flow([unsentRequest.withRoot(this.apiRoot), unsentRequest.withTimestamp])(req);
buildRequest = (req: ApiRequest) => {
return flow([unsentRequest.withRoot(this.apiRoot), unsentRequest.withTimestamp])(req);
};
request = (req: ApiRequest): Promise<Response> =>
flow([
this.buildRequest,
this.requestFunction,
p => p.catch((err: Error) => Promise.reject(new APIError(err.message, null, API_NAME))),
])(req);
request = (req: ApiRequest): Promise<Response> => {
try {
return requestWithBackoff(this, req);
} catch (err) {
throw new APIError(err.message, null, API_NAME);
}
};
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
@ -226,11 +247,21 @@ export default class API {
branchCommitSha = async (branch: string) => {
const {
target: { hash: branchSha },
} = await this.requestJSON(`${this.repoURL}/refs/branches/${branch}`);
return branchSha as string;
}: BitBucketBranch = await this.requestJSON(`${this.repoURL}/refs/branches/${branch}`);
return branchSha;
};
defaultBranchCommitSha = () => {
return this.branchCommitSha(this.branch);
};
isFile = ({ type }: BitBucketFile) => type === 'commit_file';
getFileId = (commitHash: string, path: string) => {
return `${commitHash}/${path}`;
};
processFile = (file: BitBucketFile) => ({
id: file.id,
type: file.type,
@ -243,17 +274,17 @@ export default class API {
// that will help with caching (though not as well as a normal
// SHA, since it will change even if the individual file itself
// doesn't.)
...(file.commit && file.commit.hash ? { id: `${file.commit.hash}/${file.path}` } : {}),
...(file.commit && file.commit.hash ? { id: this.getFileId(file.commit.hash, file.path) } : {}),
});
processFiles = (files: BitBucketFile[]) => files.filter(this.isFile).map(this.processFile);
readFile = async (
path: string,
sha?: string | null,
{ parseText = true, branch = this.branch } = {},
{ parseText = true, branch = this.branch, head = '' } = {},
): Promise<string | Blob> => {
const fetchContent = async () => {
const node = await this.branchCommitSha(branch);
const node = head ? head : await this.branchCommitSha(branch);
const content = await this.request({
url: `${this.repoURL}/src/${node}/${path}`,
cache: 'no-store',
@ -264,10 +295,44 @@ export default class API {
return content;
};
async readFileMetadata(path: string, sha: string) {
const fetchFileMetadata = async () => {
try {
const { values }: { values: BitBucketCommit[] } = await this.requestJSON({
url: `${this.repoURL}/commits`,
params: { path, include: this.branch },
});
const commit = values[0];
return {
author: commit.author.user
? commit.author.user.display_name || commit.author.user.nickname
: commit.author.raw,
updatedOn: commit.date,
};
} catch (e) {
return { author: '', updatedOn: '' };
}
};
const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
return fileMetadata;
}
async isShaExistsInBranch(branch: string, sha: string) {
const { values }: { values: BitBucketCommit[] } = await this.requestJSON({
url: `${this.repoURL}/commits`,
params: { include: branch, pagelen: 100 },
}).catch(e => {
console.log(`Failed getting commits for branch '${branch}'`, e);
return [];
});
return values.some(v => v.hash === sha);
}
getEntriesAndCursor = (jsonResponse: BitBucketSrcResult) => {
const {
size: count,
page: index,
page,
pagelen: pageSize,
next,
previous: prev,
@ -278,21 +343,20 @@ export default class API {
entries,
cursor: Cursor.create({
actions: [...(next ? ['next'] : []), ...(prev ? ['prev'] : [])],
meta: { index, count, pageSize, pageCount },
meta: { page, count, pageSize, pageCount },
data: { links: { next, prev } },
}),
};
};
listFiles = async (path: string, depth = 1) => {
listFiles = async (path: string, depth = 1, pagelen = 20) => {
const node = await this.branchCommitSha(this.branch);
const result: BitBucketSrcResult = await this.requestJSON({
url: `${this.repoURL}/src/${node}/${path}`,
params: {
// sort files by filename ascending
sort: '-path',
// eslint-disable-next-line @typescript-eslint/camelcase
max_depth: depth,
pagelen,
},
}).catch(replace404WithEmptyResponse);
const { entries, cursor } = this.getEntriesAndCursor(result);
@ -320,7 +384,11 @@ export default class API {
])(cursor.data!.getIn(['links', action]));
listAllFiles = async (path: string, depth = 1) => {
const { cursor: initialCursor, entries: initialEntries } = await this.listFiles(path, depth);
const { cursor: initialCursor, entries: initialEntries } = await this.listFiles(
path,
depth,
100,
);
const entries = [...initialEntries];
let currentCursor = initialCursor;
while (currentCursor && currentCursor.actions!.has('next')) {
@ -435,19 +503,30 @@ export default class API {
await this.addPullRequestComment(pullRequest, statusToLabel(status));
}
async getDifferences(branch: string) {
async getDifferences(source: string, destination: string = this.branch) {
if (source === destination) {
return [];
}
const rawDiff = await this.requestText({
url: `${this.repoURL}/diff/${branch}..${this.branch}`,
url: `${this.repoURL}/diff/${source}..${destination}`,
params: {
binary: false,
},
});
return parse(rawDiff).map(d => ({
newPath: d.newPath.replace(/b\//, ''),
binary: d.binary || /.svg$/.test(d.newPath),
newFile: d.status === 'added',
}));
return parse(rawDiff).map(d => {
const oldPath = d.oldPath?.replace(/b\//, '') || '';
const newPath = d.newPath?.replace(/b\//, '') || '';
const path = newPath || (oldPath as string);
return {
oldPath,
newPath,
binary: d.binary || /.svg$/.test(path),
status: d.status,
newFile: d.status === 'added',
path,
};
});
}
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
@ -472,7 +551,7 @@ export default class API {
const toDelete: DeleteEntry[] = [];
for (const diff of diffs) {
if (!files.some(file => file.path === diff.newPath)) {
toDelete.push({ path: diff.newPath, delete: true });
toDelete.push({ path: diff.path, delete: true });
}
}
@ -499,19 +578,6 @@ export default class API {
);
};
async isFileExists(path: string, branch: string) {
const fileExists = await this.readFile(path, null, { branch })
.then(() => true)
.catch(error => {
if (error instanceof APIError && error.status === 404) {
return false;
}
throw error;
});
return fileExists;
}
async getPullRequests(sourceBranch?: string) {
const sourceQuery = sourceBranch
? `source.branch.name = "${sourceBranch}"`

View File

@ -1,10 +1,9 @@
import semaphore, { Semaphore } from 'semaphore';
import { flow, trimStart } from 'lodash';
import { trimStart } from 'lodash';
import { stripIndent } from 'common-tags';
import {
CURSOR_COMPATIBILITY_SYMBOL,
filterByPropExtension,
then,
filterByExtension,
unsentRequest,
basename,
getBlobSHA,
@ -36,8 +35,10 @@ import {
blobToFileObj,
contentKeyFromBranch,
generateContentKey,
localForage,
allEntriesByFolder,
} from 'netlify-cms-lib-util';
import NetlifyAuthenticator from 'netlify-cms-lib-auth';
import { NetlifyAuthenticator } from 'netlify-cms-lib-auth';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
import { GitLfsClient } from './git-lfs-client';
@ -106,6 +107,10 @@ export default class BitbucketBackend implements Implementation {
this.lock = asyncLock();
}
isGitBackend() {
return true;
}
authComponent() {
return AuthenticationPage;
}
@ -121,12 +126,11 @@ export default class BitbucketBackend implements Implementation {
});
}
requestFunction = (req: ApiRequest) =>
this.getToken()
.then(
token => unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req) as ApiRequest,
)
.then(unsentRequest.performRequest);
requestFunction = async (req: ApiRequest) => {
const token = await this.getToken();
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
return unsentRequest.performRequest(authorizedRequest);
};
restoreUser(user: User) {
return this.authenticate(user);
@ -199,6 +203,7 @@ export default class BitbucketBackend implements Implementation {
// eslint-disable-next-line @typescript-eslint/camelcase
this.refreshToken = refresh_token;
this.refreshedTokenPromise = undefined;
// eslint-disable-next-line @typescript-eslint/camelcase
this.updateUserCredentials({ token, refresh_token });
return token;
@ -225,28 +230,22 @@ export default class BitbucketBackend implements Implementation {
? await this.refreshedTokenPromise
: this.token) as string;
return flow([
unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }) as (
req: ApiRequest,
) => ApiRequest,
unsentRequest.performRequest,
then(async (res: Response) => {
if (res.status === 401) {
const json = await res.json().catch(() => null);
if (json && json.type === 'error' && /^access token expired/i.test(json.error.message)) {
const newToken = await this.getRefreshedAccessToken();
const reqWithNewToken = unsentRequest.withHeaders(
{
Authorization: `Bearer ${newToken}`,
},
req,
) as ApiRequest;
return unsentRequest.performRequest(reqWithNewToken);
}
}
return res;
}),
])(req);
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
const response: Response = await unsentRequest.performRequest(authorizedRequest);
if (response.status === 401) {
const json = await response.json().catch(() => null);
if (json && json.type === 'error' && /^access token expired/i.test(json.error.message)) {
const newToken = await this.getRefreshedAccessToken();
const reqWithNewToken = unsentRequest.withHeaders(
{
Authorization: `Bearer ${newToken}`,
},
req,
) as ApiRequest;
return unsentRequest.performRequest(reqWithNewToken);
}
}
return response;
};
async entriesByFolder(folder: string, extension: string, depth: number) {
@ -255,10 +254,20 @@ export default class BitbucketBackend implements Implementation {
const listFiles = () =>
this.api!.listFiles(folder, depth).then(({ entries, cursor: c }) => {
cursor = c.mergeMeta({ extension });
return filterByPropExtension(extension, 'path')(entries);
return entries.filter(e => filterByExtension(e, extension));
});
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), 'BitBucket');
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
const files = await entriesByFolder(
listFiles,
readFile,
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
@ -266,16 +275,45 @@ export default class BitbucketBackend implements Implementation {
return files;
}
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const listFiles = () =>
this.api!.listAllFiles(folder, depth).then(filterByPropExtension(extension, 'path'));
async listAllFiles(folder: string, extension: string, depth: number) {
const files = await this.api!.listAllFiles(folder, depth);
const filtered = files.filter(file => filterByExtension(file, extension));
return filtered;
}
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), 'BitBucket');
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
const files = await allEntriesByFolder({
listAllFiles: () => this.listAllFiles(folder, extension, depth),
readFile,
readFileMetadata: this.api!.readFileMetadata.bind(this.api),
apiName: API_NAME,
branch: this.branch,
localForage,
folder,
extension,
depth,
getDefaultBranch: () => Promise.resolve({ name: this.branch, sha: head }),
isShaExistsInBranch: this.api!.isShaExistsInBranch.bind(this.api!),
getDifferences: (source, destination) => this.api!.getDifferences(source, destination),
getFileId: path => Promise.resolve(this.api!.getFileId(head, path)),
filterFile: file => filterByExtension(file, extension),
});
return files;
}
async entriesByFiles(files: ImplementationFile[]) {
return entriesByFiles(files, this.api!.readFile.bind(this.api!), 'BitBucket');
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
return entriesByFiles(files, readFile, this.api!.readFileMetadata.bind(this.api), API_NAME);
}
getEntry(path: string) {
@ -403,15 +441,22 @@ export default class BitbucketBackend implements Implementation {
return this.api!.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => {
const extension = cursor.meta?.get('extension');
if (extension) {
entries = filterByPropExtension(extension as string, 'path')(entries);
entries = entries.filter(e => filterByExtension(e, extension));
newCursor = newCursor.mergeMeta({ extension });
}
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
const entriesWithData = await entriesByFiles(
entries,
readFile,
this.api!.readFileMetadata.bind(this.api)!,
API_NAME,
);
return {
entries: await Promise.all(
entries.map(file =>
this.api!.readFile(file.path, file.id).then(data => ({ file, data: data as string })),
),
),
entries: entriesWithData,
cursor: newCursor,
};
});

View File

@ -1,3 +1,5 @@
declare module 'what-the-diff' {
export const parse: (rawDiff: string) => { newPath: string; binary: boolean; status: string }[];
export const parse: (
rawDiff: string,
) => { oldPath?: string; newPath?: string; binary: boolean; status: string }[];
}

View File

@ -1,7 +1,6 @@
import { flow } from 'lodash';
import { API as GitlabAPI } from 'netlify-cms-backend-gitlab';
import { Config as GitHubConfig, CommitAuthor } from 'netlify-cms-backend-gitlab/src/API';
import { unsentRequest, then, ApiRequest } from 'netlify-cms-lib-util';
import { unsentRequest, ApiRequest } from 'netlify-cms-lib-util';
type Config = GitHubConfig & { tokenPromise: () => Promise<string>; commitAuthor: CommitAuthor };
@ -15,16 +14,15 @@ export default class API extends GitlabAPI {
this.repoURL = '';
}
authenticateRequest = async (req: ApiRequest) =>
unsentRequest.withHeaders(
withAuthorizationHeaders = async (req: ApiRequest) => {
const token = await this.tokenPromise();
return unsentRequest.withHeaders(
{
Authorization: `Bearer ${await this.tokenPromise()}`,
Authorization: `Bearer ${token}`,
},
req,
);
request = async (req: ApiRequest) =>
flow([this.buildRequest, this.authenticateRequest, then(unsentRequest.performRequest)])(req);
};
hasWriteAccess = () => Promise.resolve(true);
}

View File

@ -133,6 +133,10 @@ export default class GitGateway implements Implementation {
this.backend = null;
}
isGitBackend() {
return true;
}
requestFunction = (req: ApiRequest) =>
this.tokenPromise!()
.then(
@ -357,7 +361,12 @@ export default class GitGateway implements Implementation {
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
const items = await entriesByFiles([{ path, id }], readFile, 'Git-Gateway');
const items = await entriesByFiles(
[{ path, id }],
readFile,
this.api!.readFileMetadata.bind(this.api),
'Git-Gateway',
);
const entry = items[0];
const pointerFile = parsePointerFile(entry.data);
if (!pointerFile.sha) {

View File

@ -12,6 +12,7 @@ import {
Entry as LibEntry,
PersistOptions,
readFile,
readFileMetadata,
CMS_BRANCH_PREFIX,
generateContentKey,
DEFAULT_PR_BODY,
@ -24,6 +25,9 @@ import {
labelToStatus,
statusToLabel,
contentKeyFromBranch,
requestWithBackoff,
unsentRequest,
ApiRequest,
} from 'netlify-cms-lib-util';
import { Octokit } from '@octokit/rest';
@ -276,21 +280,31 @@ export default class API {
throw new APIError(error.message, responseStatus, API_NAME);
}
buildRequest(req: ApiRequest) {
return req;
}
async request(
path: string,
options: Options = {},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
parser = (response: Response) => this.parseResponse(response),
) {
const headers = await this.requestHeaders(options.headers || {});
const url = this.urlFor(path, options);
let responseStatus: number;
return fetch(url, { ...options, headers })
.then(response => {
responseStatus = response.status;
return parser(response);
})
.catch(error => this.handleRequestError(error, responseStatus));
let responseStatus = 500;
try {
const req = (unsentRequest.fromFetchArguments(url, {
...options,
headers,
}) as unknown) as ApiRequest;
const response = await requestWithBackoff(this, req);
responseStatus = response.status;
const parsedResponse = await parser(response);
return parsedResponse;
} catch (error) {
return this.handleRequestError(error, responseStatus);
}
}
nextUrlProcessor() {
@ -580,6 +594,28 @@ export default class API {
return content;
}
async readFileMetadata(path: string, sha: string) {
const fetchFileMetadata = async () => {
try {
const result: Octokit.ReposListCommitsResponse = await this.request(
`${this.originRepoURL}/commits`,
{
params: { path, sha: this.branch },
},
);
const { commit } = result[0];
return {
author: commit.author.name || commit.author.email,
updatedOn: commit.author.date,
};
} catch (e) {
return { author: '', updatedOn: '' };
}
};
const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
return fileMetadata;
}
async fetchBlobContent({ sha, repoURL, parseText }: BlobArgs) {
const result: Octokit.GitGetBlobResponse = await this.request(`${repoURL}/git/blobs/${sha}`);

View File

@ -219,10 +219,12 @@ describe('github backend implementation', () => {
describe('entriesByFolder', () => {
const listFiles = jest.fn();
const readFile = jest.fn();
const readFileMetadata = jest.fn(() => Promise.resolve({ author: '', updatedOn: '' }));
const mockAPI = {
listFiles,
readFile,
readFileMetadata,
originRepoURL: 'originRepoURL',
};
@ -245,7 +247,7 @@ describe('github backend implementation', () => {
const expectedEntries = files
.slice(0, 20)
.map(({ id, path }) => ({ data: id, file: { path, id } }));
.map(({ id, path }) => ({ data: id, file: { path, id, author: '', updatedOn: '' } }));
const expectedCursor = Cursor.create({
actions: ['next', 'last'],
@ -267,11 +269,13 @@ describe('github backend implementation', () => {
describe('traverseCursor', () => {
const listFiles = jest.fn();
const readFile = jest.fn((path, id) => Promise.resolve(`${id}`));
const readFileMetadata = jest.fn(() => Promise.resolve({}));
const mockAPI = {
listFiles,
readFile,
originRepoURL: 'originRepoURL',
readFileMetadata,
};
const files = [];

View File

@ -20,7 +20,7 @@ import {
getMediaDisplayURL,
getMediaAsBlob,
Credentials,
filterByPropExtension,
filterByExtension,
Config,
ImplementationFile,
getPreviewStatus,
@ -104,6 +104,10 @@ export default class GitHub implements Implementation {
this.lock = asyncLock();
}
isGitBackend() {
return true;
}
authComponent() {
const wrappedAuthenticationPage = (props: Record<string, unknown>) => (
<AuthenticationPage {...props} backend={this} />
@ -319,7 +323,7 @@ export default class GitHub implements Implementation {
repoURL,
depth,
}).then(files => {
const filtered = filterByPropExtension(extension, 'path')(files);
const filtered = files.filter(file => filterByExtension(file, extension));
const result = this.getCursorAndFiles(filtered, 1);
cursor = result.cursor;
return result.files;
@ -328,7 +332,12 @@ export default class GitHub implements Implementation {
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL }) as Promise<string>;
const files = await entriesByFolder(listFiles, readFile, API_NAME);
const files = await entriesByFolder(
listFiles,
readFile,
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
@ -342,14 +351,18 @@ export default class GitHub implements Implementation {
this.api!.listFiles(folder, {
repoURL,
depth,
}).then(files => {
return filterByPropExtension(extension, 'path')(files);
});
}).then(files => files.filter(file => filterByExtension(file, extension)));
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL }) as Promise<string>;
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { repoURL }) as Promise<string>;
};
const files = await entriesByFolder(listFiles, readFile, API_NAME);
const files = await entriesByFolder(
listFiles,
readFile,
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
return files;
}
@ -359,7 +372,7 @@ export default class GitHub implements Implementation {
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL }).catch(() => '') as Promise<string>;
return entriesByFiles(files, readFile, 'GitHub');
return entriesByFiles(files, readFile, this.api!.readFileMetadata.bind(this.api), API_NAME);
}
// Fetches a single entry.
@ -470,17 +483,20 @@ export default class GitHub implements Implementation {
}
}
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL: this.api!.originRepoURL }).catch(() => '') as Promise<
string
>;
const entries = await entriesByFiles(
result.files,
readFile,
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
return {
entries: await Promise.all(
result.files.map(file =>
this.api!.readFile(file.path, file.id, { repoURL: this.api!.originRepoURL }).then(
data => ({
file,
data: data as string,
}),
),
),
),
entries,
cursor: result.cursor,
};
}

View File

@ -22,11 +22,13 @@ import {
PreviewState,
parseContentKey,
branchFromContentKey,
requestWithBackoff,
readFileMetadata,
FetchError,
} from 'netlify-cms-lib-util';
import { Base64 } from 'js-base64';
import { Map, Set } from 'immutable';
import { Map } from 'immutable';
import { flow, partial, result, trimStart } from 'lodash';
import { CursorStore } from 'netlify-cms-lib-util/src/Cursor';
export const API_NAME = 'GitLab';
@ -75,6 +77,8 @@ type GitLabCommitDiff = {
new_path: string;
old_path: string;
new_file: boolean;
renamed_file: boolean;
deleted_file: boolean;
};
enum GitLabCommitStatuses {
@ -135,8 +139,31 @@ type GitLabRepo = {
};
type GitLabBranch = {
name: string;
developers_can_push: boolean;
developers_can_merge: boolean;
commit: {
id: string;
};
};
type GitLabCommitRef = {
type: string;
name: string;
};
type GitLabCommit = {
id: string;
short_id: string;
title: string;
author_name: string;
author_email: string;
authored_date: string;
committer_name: string;
committer_email: string;
committed_date: string;
created_at: string;
message: string;
};
export const getMaxAccess = (groups: { group_access_level: number }[]) => {
@ -169,22 +196,28 @@ export default class API {
this.initialWorkflowStatus = config.initialWorkflowStatus;
}
withAuthorizationHeaders = (req: ApiRequest) =>
unsentRequest.withHeaders(this.token ? { Authorization: `Bearer ${this.token}` } : {}, req);
withAuthorizationHeaders = (req: ApiRequest) => {
const withHeaders: ApiRequest = unsentRequest.withHeaders(
this.token ? { Authorization: `Bearer ${this.token}` } : {},
req,
);
return Promise.resolve(withHeaders);
};
buildRequest = (req: ApiRequest) =>
flow([
unsentRequest.withRoot(this.apiRoot),
this.withAuthorizationHeaders,
unsentRequest.withTimestamp,
])(req);
buildRequest = async (req: ApiRequest) => {
const withRoot: ApiRequest = unsentRequest.withRoot(this.apiRoot)(req);
const withAuthorizationHeaders: ApiRequest = await this.withAuthorizationHeaders(withRoot);
const withTimestamp: ApiRequest = unsentRequest.withTimestamp(withAuthorizationHeaders);
return withTimestamp;
};
request = async (req: ApiRequest): Promise<Response> =>
flow([
this.buildRequest,
unsentRequest.performRequest,
p => p.catch((err: Error) => Promise.reject(new APIError(err.message, null, API_NAME))),
])(req);
request = async (req: ApiRequest): Promise<Response> => {
try {
return requestWithBackoff(this, req);
} catch (err) {
throw new APIError(err.message, null, API_NAME);
}
};
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
@ -204,6 +237,7 @@ export default class API {
shared_with_groups: sharedWithGroups,
permissions,
}: GitLabRepo = await this.requestJSON(this.repoURL);
const { project_access: projectAccess, group_access: groupAccess } = permissions;
if (projectAccess && projectAccess.access_level >= this.WRITE_ACCESS) {
return true;
@ -221,11 +255,13 @@ export default class API {
// developer access
if (maxAccess.group_access_level >= this.WRITE_ACCESS) {
// check permissions to merge and push
const branch: GitLabBranch = await this.requestJSON(
`${this.repoURL}/repository/branches/${this.branch}`,
).catch(() => ({}));
if (branch.developers_can_merge && branch.developers_can_push) {
return true;
try {
const branch = await this.getDefaultBranch();
if (branch.developers_can_merge && branch.developers_can_push) {
return true;
}
} catch (e) {
console.log('Failed getting default branch', e);
}
}
}
@ -250,27 +286,46 @@ export default class API {
return content;
};
async readFileMetadata(path: string, sha: string) {
const fetchFileMetadata = async () => {
try {
const result: GitLabCommit[] = await this.requestJSON({
url: `${this.repoURL}/repository/commits`,
// eslint-disable-next-line @typescript-eslint/camelcase
params: { path, ref_name: this.branch },
});
const commit = result[0];
return {
author: commit.author_name || commit.author_email,
updatedOn: commit.authored_date,
};
} catch (e) {
return { author: '', updatedOn: '' };
}
};
const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
return fileMetadata;
}
getCursorFromHeaders = (headers: Headers) => {
// indices and page counts are assumed to be zero-based, but the
// indices and page counts returned from GitLab are one-based
const index = parseInt(headers.get('X-Page') as string, 10) - 1;
const pageCount = parseInt(headers.get('X-Total-Pages') as string, 10) - 1;
const page = parseInt(headers.get('X-Page') as string, 10);
const pageCount = parseInt(headers.get('X-Total-Pages') as string, 10);
const pageSize = parseInt(headers.get('X-Per-Page') as string, 10);
const count = parseInt(headers.get('X-Total') as string, 10);
const links = parseLinkHeader(headers.get('Link') as string);
const links = parseLinkHeader(headers.get('Link'));
const actions = Map(links)
.keySeq()
.flatMap(key =>
(key === 'prev' && index > 0) ||
(key === 'next' && index < pageCount) ||
(key === 'first' && index > 0) ||
(key === 'last' && index < pageCount)
(key === 'prev' && page > 1) ||
(key === 'next' && page < pageCount) ||
(key === 'first' && page > 1) ||
(key === 'last' && page < pageCount)
? [key]
: [],
);
return Cursor.create({
actions,
meta: { index, count, pageSize, pageCount },
meta: { page, count, pageSize, pageCount },
data: { links },
});
};
@ -291,56 +346,28 @@ export default class API {
flow([
unsentRequest.withMethod('GET'),
this.request,
p => Promise.all([p.then(this.getCursor), p.then(this.responseToJSON)]),
p =>
Promise.all([
p.then(this.getCursor),
p.then(this.responseToJSON).catch((e: FetchError) => {
if (e.status === 404) {
return [];
} else {
throw e;
}
}),
]),
then(([cursor, entries]: [Cursor, {}[]]) => ({ cursor, entries })),
])(req);
reversableActions = Map({
first: 'last',
last: 'first',
next: 'prev',
prev: 'next',
});
reverseCursor = (cursor: Cursor) => {
const pageCount = cursor.meta!.get('pageCount', 0) as number;
const currentIndex = cursor.meta!.get('index', 0) as number;
const newIndex = pageCount - currentIndex;
const links = cursor.data!.get('links', Map()) as Map<string, string>;
const reversedLinks = links.mapEntries(tuple => {
const [k, v] = tuple as string[];
return [this.reversableActions.get(k) || k, v];
});
const reversedActions = cursor.actions!.map(
action => this.reversableActions.get(action as string) || (action as string),
);
return cursor.updateStore((store: CursorStore) =>
store!
.setIn(['meta', 'index'], newIndex)
.setIn(['data', 'links'], reversedLinks)
.set('actions', (reversedActions as unknown) as Set<string>),
);
};
// The exported listFiles and traverseCursor reverse the direction
// of the cursors, since GitLab's pagination sorts the opposite way
// we want to sort by default (it sorts by filename _descending_,
// while the CMS defaults to sorting by filename _ascending_, at
// least in the current GitHub backend). This should eventually be
// refactored.
listFiles = async (path: string, recursive = false) => {
const firstPageCursor = await this.fetchCursor({
const { entries, cursor } = await this.fetchCursorAndEntries({
url: `${this.repoURL}/repository/tree`,
params: { path, ref: this.branch, recursive },
});
const lastPageLink = firstPageCursor.data.getIn(['links', 'last']);
const { entries, cursor } = await this.fetchCursorAndEntries(lastPageLink);
return {
files: entries.filter(({ type }) => type === 'blob').reverse(),
cursor: this.reverseCursor(cursor),
files: entries.filter(({ type }) => type === 'blob'),
cursor,
};
};
@ -348,8 +375,8 @@ export default class API {
const link = cursor.data!.getIn(['links', action]);
const { entries, cursor: newCursor } = await this.fetchCursorAndEntries(link);
return {
entries: entries.filter(({ type }) => type === 'blob').reverse(),
cursor: this.reverseCursor(newCursor),
entries: entries.filter(({ type }) => type === 'blob'),
cursor: newCursor,
};
};
@ -527,19 +554,39 @@ export default class API {
return mergeRequests[0];
}
async getDifferences(to: string) {
async getDifferences(to: string, from = this.branch) {
if (to === from) {
return [];
}
const result: { diffs: GitLabCommitDiff[] } = await this.requestJSON({
url: `${this.repoURL}/repository/compare`,
params: {
from: this.branch,
from,
to,
},
});
return result.diffs.map(d => ({
...d,
binary: d.diff.startsWith('Binary') || /.svg$/.test(d.new_path),
}));
if (result.diffs.length >= 1000) {
throw new APIError('Diff limit reached', null, API_NAME);
}
return result.diffs.map(d => {
let status = 'modified';
if (d.new_file) {
status = 'added';
} else if (d.deleted_file) {
status = 'deleted';
} else if (d.renamed_file) {
status = 'renamed';
}
return {
status,
oldPath: d.old_path,
newPath: d.new_path,
newFile: d.new_file,
binary: d.diff.startsWith('Binary') || /.svg$/.test(d.new_path),
};
});
}
async retrieveMetadata(contentKey: string) {
@ -547,15 +594,15 @@ export default class API {
const branch = branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
const diff = await this.getDifferences(mergeRequest.sha);
const { old_path: path, new_file: newFile } = diff.find(d => !d.binary) as {
old_path: string;
new_file: boolean;
const { oldPath: path, newFile: newFile } = diff.find(d => !d.binary) as {
oldPath: string;
newFile: boolean;
};
const mediaFiles = await Promise.all(
diff
.filter(d => d.old_path !== path)
.filter(d => d.oldPath !== path)
.map(async d => {
const path = d.new_path;
const path = d.newPath;
const id = await this.getFileId(path, branch);
return { path, id };
}),
@ -662,8 +709,8 @@ export default class API {
// mark files for deletion
for (const diff of diffs) {
if (!items.some(item => item.path === diff.new_path)) {
items.push({ action: CommitAction.DELETE, path: diff.new_path });
if (!items.some(item => item.path === diff.newPath)) {
items.push({ action: CommitAction.DELETE, path: diff.newPath });
}
}
@ -730,6 +777,23 @@ export default class API {
});
}
async getDefaultBranch() {
const branch: GitLabBranch = await this.requestJSON(
`${this.repoURL}/repository/branches/${encodeURIComponent(this.branch)}`,
);
return branch;
}
async isShaExistsInBranch(branch: string, sha: string) {
const refs: GitLabCommitRef[] = await this.requestJSON({
url: `${this.repoURL}/repository/commits/${sha}/refs`,
params: {
type: 'branch',
},
});
return refs.some(r => r.name === branch);
}
async deleteBranch(branch: string) {
await this.request({
method: 'DELETE',

View File

@ -2,6 +2,8 @@ import API, { getMaxAccess } from '../API';
global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
jest.spyOn(console, 'log').mockImplementation(() => undefined);
describe('GitLab API', () => {
beforeEach(() => {
jest.resetAllMocks();
@ -132,9 +134,14 @@ describe('GitLab API', () => {
permissions: { project_access: null, group_access: null },
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
});
api.requestJSON.mockRejectedValue(new Error('Not Found'));
const error = new Error('Not Found');
api.requestJSON.mockRejectedValue(error);
await expect(api.hasWriteAccess()).resolves.toBe(false);
expect(console.log).toHaveBeenCalledTimes(1);
expect(console.log).toHaveBeenCalledWith('Failed getting default branch', error);
});
});

View File

@ -93,6 +93,14 @@ const resp = {
id: 1,
},
},
branch: {
success: {
name: 'master',
commit: {
id: 1,
},
},
},
project: {
success: {
permissions: {
@ -190,6 +198,14 @@ describe('gitlab backend', () => {
.reply(200, projectResponse || resp.project.success);
}
function interceptBranch(backend, { branch = 'master' } = {}) {
const api = mockApi(backend);
api
.get(`${expectedRepoUrl}/repository/branches/${encodeURIComponent(branch)}`)
.query(true)
.reply(200, resp.branch.success);
}
function parseQuery(uri) {
const query = uri.split('?')[1];
if (!query) {
@ -273,6 +289,17 @@ describe('gitlab backend', () => {
.get(url)
.query(true)
.reply(200, mockRepo.files[path]);
api
.get(`${expectedRepoUrl}/repository/commits`)
.query(({ path }) => path === path)
.reply(200, [
{
author_name: 'author_name',
author_email: 'author_email',
authored_date: 'authored_date',
},
]);
}
function sharedSetup() {
@ -397,6 +424,7 @@ describe('gitlab backend', () => {
expect(entries).toEqual({
cursor: expect.any(Cursor),
pagination: 1,
entries: expect.arrayContaining(
tree.map(file => expect.objectContaining({ path: file.path })),
),
@ -406,6 +434,7 @@ describe('gitlab backend', () => {
it('returns all entries from folder collection', async () => {
const tree = mockRepo.tree[collectionManyEntriesConfig.folder];
interceptBranch(backend);
tree.forEach(file => interceptFiles(backend, file.path));
interceptCollection(backend, collectionManyEntriesConfig, { repeat: 5 });
@ -431,11 +460,11 @@ describe('gitlab backend', () => {
expect(entries.entries).toHaveLength(2);
});
it('returns last page from paginated folder collection tree', async () => {
it('returns first page from paginated folder collection tree', async () => {
const tree = mockRepo.tree[collectionManyEntriesConfig.folder];
const pageTree = tree.slice(-20);
const pageTree = tree.slice(0, 20);
pageTree.forEach(file => interceptFiles(backend, file.path));
interceptCollection(backend, collectionManyEntriesConfig, { page: 25 });
interceptCollection(backend, collectionManyEntriesConfig, { page: 1 });
const entries = await backend.listEntries(fromJS(collectionManyEntriesConfig));
expect(entries.entries).toEqual(
@ -450,13 +479,13 @@ describe('gitlab backend', () => {
it('returns complete last page of paginated tree', async () => {
const tree = mockRepo.tree[collectionManyEntriesConfig.folder];
tree.slice(-20).forEach(file => interceptFiles(backend, file.path));
interceptCollection(backend, collectionManyEntriesConfig, { page: 25 });
tree.slice(0, 20).forEach(file => interceptFiles(backend, file.path));
interceptCollection(backend, collectionManyEntriesConfig, { page: 1 });
const entries = await backend.listEntries(fromJS(collectionManyEntriesConfig));
const nextPageTree = tree.slice(-40, -20);
const nextPageTree = tree.slice(20, 40);
nextPageTree.forEach(file => interceptFiles(backend, file.path));
interceptCollection(backend, collectionManyEntriesConfig, { page: 24 });
interceptCollection(backend, collectionManyEntriesConfig, { page: 2 });
const nextPage = await backend.traverseCursor(entries.cursor, 'next');
expect(nextPage.entries).toEqual(
@ -466,15 +495,16 @@ describe('gitlab backend', () => {
);
expect(nextPage.entries).toHaveLength(20);
const prevPageTree = tree.slice(-20);
const lastPageTree = tree.slice(-20);
lastPageTree.forEach(file => interceptFiles(backend, file.path));
interceptCollection(backend, collectionManyEntriesConfig, { page: 25 });
const prevPage = await backend.traverseCursor(nextPage.cursor, 'prev');
expect(prevPage.entries).toEqual(
const lastPage = await backend.traverseCursor(nextPage.cursor, 'last');
expect(lastPage.entries).toEqual(
expect.arrayContaining(
prevPageTree.map(file => expect.objectContaining({ path: file.path })),
lastPageTree.map(file => expect.objectContaining({ path: file.path })),
),
);
expect(prevPage.entries).toHaveLength(20);
expect(lastPage.entries).toHaveLength(20);
});
});

View File

@ -29,6 +29,9 @@ import {
blobToFileObj,
contentKeyFromBranch,
generateContentKey,
localForage,
allEntriesByFolder,
filterByExtension,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
@ -80,6 +83,10 @@ export default class GitLab implements Implementation {
this.lock = asyncLock();
}
isGitBackend() {
return true;
}
authComponent() {
return AuthenticationPage;
}
@ -136,7 +143,7 @@ export default class GitLab implements Implementation {
) {
// gitlab paths include the root folder
const fileFolder = trim(file.path.split(folder)[1] || '/', '/');
return file.name.endsWith('.' + extension) && fileFolder.split('/').length <= depth;
return filterByExtension(file, extension) && fileFolder.split('/').length <= depth;
}
async entriesByFolder(folder: string, extension: string, depth: number) {
@ -148,25 +155,52 @@ export default class GitLab implements Implementation {
return files.filter(file => this.filterFile(folder, file, extension, depth));
});
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), API_NAME);
const files = await entriesByFolder(
listFiles,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return files;
}
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const listFiles = () =>
this.api!.listAllFiles(folder, depth > 1).then(files =>
files.filter(file => this.filterFile(folder, file, extension, depth)),
);
async listAllFiles(folder: string, extension: string, depth: number) {
const files = await this.api!.listAllFiles(folder, depth > 1);
const filtered = files.filter(file => this.filterFile(folder, file, extension, depth));
return filtered;
}
const files = await entriesByFolder(listFiles, this.api!.readFile.bind(this.api!), API_NAME);
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const files = await allEntriesByFolder({
listAllFiles: () => this.listAllFiles(folder, extension, depth),
readFile: this.api!.readFile.bind(this.api!),
readFileMetadata: this.api!.readFileMetadata.bind(this.api),
apiName: API_NAME,
branch: this.branch,
localForage,
folder,
extension,
depth,
getDefaultBranch: () =>
this.api!.getDefaultBranch().then(b => ({ name: b.name, sha: b.commit.id })),
isShaExistsInBranch: this.api!.isShaExistsInBranch.bind(this.api!),
getDifferences: (to, from) => this.api!.getDifferences(to, from),
getFileId: path => this.api!.getFileId(path, this.branch),
filterFile: file => this.filterFile(folder, file, extension, depth),
});
return files;
}
entriesByFiles(files: ImplementationFile[]) {
return entriesByFiles(files, this.api!.readFile.bind(this.api!), API_NAME);
return entriesByFiles(
files,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
}
// Fetches a single entry.
@ -258,12 +292,14 @@ export default class GitLab implements Implementation {
entries = entries.filter(f => this.filterFile(folder, f, extension, depth));
newCursor = newCursor.mergeMeta({ folder, extension, depth });
}
const entriesWithData = await entriesByFiles(
entries,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api)!,
API_NAME,
);
return {
entries: await Promise.all(
entries.map(file =>
this.api!.readFile(file.path, file.id).then(data => ({ file, data: data as string })),
),
),
entries: entriesWithData,
cursor: newCursor,
};
});

View File

@ -58,6 +58,10 @@ export default class ProxyBackend implements Implementation {
this.options = options;
}
isGitBackend() {
return false;
}
authComponent() {
return AuthenticationPage;
}

View File

@ -97,6 +97,10 @@ export default class TestBackend implements Implementation {
this.options = options;
}
isGitBackend() {
return false;
}
authComponent() {
return AuthenticationPage;
}

View File

@ -186,6 +186,7 @@ describe('Backend', () => {
expect(result).toEqual({
entry: {
author: '',
mediaFiles: [],
collection: 'posts',
slug: 'slug',
@ -196,6 +197,7 @@ describe('Backend', () => {
label: null,
metaData: null,
isModification: null,
updatedOn: '',
},
});
expect(localForage.getItem).toHaveBeenCalledTimes(1);
@ -224,6 +226,7 @@ describe('Backend', () => {
expect(result).toEqual({
entry: {
author: '',
mediaFiles: [{ id: '1' }],
collection: 'posts',
slug: 'slug',
@ -234,6 +237,7 @@ describe('Backend', () => {
label: null,
metaData: null,
isModification: null,
updatedOn: '',
},
});
expect(localForage.getItem).toHaveBeenCalledTimes(1);
@ -367,6 +371,7 @@ describe('Backend', () => {
const result = await backend.unpublishedEntry(state, collection, slug);
expect(result).toEqual({
author: '',
collection: 'posts',
slug: '',
path: 'path',
@ -377,6 +382,7 @@ describe('Backend', () => {
metaData: {},
isModification: true,
mediaFiles: [{ id: '1', draft: true }],
updatedOn: '',
});
});
});

View File

@ -2,6 +2,11 @@ import { fromJS } from 'immutable';
import { applyDefaults, detectProxyServer, handleLocalBackend } from '../config';
jest.spyOn(console, 'log').mockImplementation(() => {});
jest.mock('coreSrc/backend', () => {
return {
currentBackend: jest.fn(() => ({ isGitBackend: jest.fn(() => true) })),
};
});
describe('config', () => {
describe('applyDefaults', () => {

View File

@ -42,6 +42,7 @@ describe('entries', () => {
expect(actions[0]).toEqual({
payload: {
author: '',
collection: undefined,
data: {},
isModification: null,
@ -52,6 +53,7 @@ describe('entries', () => {
path: '',
raw: '',
slug: '',
updatedOn: '',
},
type: 'DRAFT_CREATE_EMPTY',
});
@ -71,6 +73,7 @@ describe('entries', () => {
expect(actions[0]).toEqual({
payload: {
author: '',
collection: undefined,
data: { title: 'title', boolean: true },
isModification: null,
@ -81,6 +84,7 @@ describe('entries', () => {
path: '',
raw: '',
slug: '',
updatedOn: '',
},
type: 'DRAFT_CREATE_EMPTY',
});
@ -102,6 +106,7 @@ describe('entries', () => {
expect(actions[0]).toEqual({
payload: {
author: '',
collection: undefined,
data: { title: '&lt;script&gt;alert(&#039;hello&#039;)&lt;/script&gt;' },
isModification: null,
@ -112,6 +117,7 @@ describe('entries', () => {
path: '',
raw: '',
slug: '',
updatedOn: '',
},
type: 'DRAFT_CREATE_EMPTY',
});

View File

@ -0,0 +1,108 @@
import { fromJS } from 'immutable';
import { searchEntries } from '../search';
import configureMockStore from 'redux-mock-store';
import thunk from 'redux-thunk';
const middlewares = [thunk];
const mockStore = configureMockStore(middlewares);
jest.mock('../../reducers');
jest.mock('../../backend');
jest.mock('../../integrations');
describe('search', () => {
describe('searchEntries', () => {
const { currentBackend } = require('../../backend');
const { selectIntegration } = require('../../reducers');
const { getIntegrationProvider } = require('../../integrations');
beforeEach(() => {
jest.resetAllMocks();
});
it('should search entries using integration', async () => {
const store = mockStore({
collections: fromJS({ posts: { name: 'posts' }, pages: { name: 'pages' } }),
search: fromJS({}),
});
selectIntegration.mockReturnValue('search_integration');
currentBackend.mockReturnValue({});
const response = { entries: [{ name: '1' }, { name: '' }], pagination: 1 };
const integration = { search: jest.fn().mockResolvedValue(response) };
getIntegrationProvider.mockReturnValue(integration);
await store.dispatch(searchEntries('find me'));
const actions = store.getActions();
expect(actions).toHaveLength(2);
expect(actions[0]).toEqual({
type: 'SEARCH_ENTRIES_REQUEST',
payload: {
searchTerm: 'find me',
page: 0,
},
});
expect(actions[1]).toEqual({
type: 'SEARCH_ENTRIES_SUCCESS',
payload: {
searchTerm: 'find me',
entries: response.entries,
page: response.pagination,
},
});
expect(integration.search).toHaveBeenCalledTimes(1);
expect(integration.search).toHaveBeenCalledWith(['posts', 'pages'], 'find me', 0);
});
it('should search entries using backend', async () => {
const store = mockStore({
collections: fromJS({ posts: { name: 'posts' }, pages: { name: 'pages' } }),
search: fromJS({}),
});
const response = { entries: [{ name: '1' }, { name: '' }], pagination: 1 };
const backend = { search: jest.fn().mockResolvedValue(response) };
currentBackend.mockReturnValue(backend);
await store.dispatch(searchEntries('find me'));
const actions = store.getActions();
expect(actions).toHaveLength(2);
expect(actions[0]).toEqual({
type: 'SEARCH_ENTRIES_REQUEST',
payload: {
searchTerm: 'find me',
page: 0,
},
});
expect(actions[1]).toEqual({
type: 'SEARCH_ENTRIES_SUCCESS',
payload: {
searchTerm: 'find me',
entries: response.entries,
page: response.pagination,
},
});
expect(backend.search).toHaveBeenCalledTimes(1);
expect(backend.search).toHaveBeenCalledWith(
[fromJS({ name: 'posts' }), fromJS({ name: 'pages' })],
'find me',
);
});
it('should ignore identical search', async () => {
const store = mockStore({
collections: fromJS({ posts: { name: 'posts' }, pages: { name: 'pages' } }),
search: fromJS({ isFetching: true, term: 'find me' }),
});
await store.dispatch(searchEntries('find me'));
const actions = store.getActions();
expect(actions).toHaveLength(0);
});
});
});

View File

@ -4,6 +4,8 @@ import { trimStart, get, isPlainObject } from 'lodash';
import { authenticateUser } from 'Actions/auth';
import * as publishModes from 'Constants/publishModes';
import { validateConfig } from 'Constants/configSchema';
import { selectDefaultSortableFields } from '../reducers/collections';
import { currentBackend } from 'coreSrc/backend';
export const CONFIG_REQUEST = 'CONFIG_REQUEST';
export const CONFIG_SUCCESS = 'CONFIG_SUCCESS';
@ -71,18 +73,26 @@ export function applyDefaults(config) {
if (collection.has('media_folder') && !collection.has('public_folder')) {
collection = collection.set('public_folder', collection.get('media_folder'));
}
return collection.set('folder', trimStart(folder, '/'));
collection = collection.set('folder', trimStart(folder, '/'));
}
const files = collection.get('files');
if (files) {
return collection.set(
collection = collection.set(
'files',
files.map(file => {
return file.set('file', trimStart(file.get('file'), '/'));
}),
);
}
if (!collection.has('sortableFields')) {
const backend = currentBackend(config);
const defaultSortable = selectDefaultSortableFields(collection, backend);
collection = collection.set('sortableFields', fromJS(defaultSortable));
}
return collection;
}),
);
});

View File

@ -1,22 +1,31 @@
import { fromJS, List, Map, Set } from 'immutable';
import { isEqual } from 'lodash';
import { isEqual, orderBy } from 'lodash';
import { actions as notifActions } from 'redux-notifications';
import { serializeValues } from '../lib/serializeEntryValues';
import { currentBackend, Backend } from '../backend';
import { getIntegrationProvider } from '../integrations';
import { selectIntegration, selectPublishedSlugs } from '../reducers';
import { selectFields, updateFieldByKey } from '../reducers/collections';
import { selectFields, updateFieldByKey, selectSortDataPath } from '../reducers/collections';
import { selectCollectionEntriesCursor } from '../reducers/cursors';
import { Cursor, ImplementationMediaFile } from 'netlify-cms-lib-util';
import { createEntry, EntryValue } from '../valueObjects/Entry';
import AssetProxy, { createAssetProxy } from '../valueObjects/AssetProxy';
import ValidationErrorTypes from '../constants/validationErrorTypes';
import { addAssets, getAsset } from './media';
import { Collection, EntryMap, State, EntryFields, EntryField } from '../types/redux';
import {
Collection,
EntryMap,
State,
EntryFields,
EntryField,
SortDirection,
} from '../types/redux';
import { ThunkDispatch } from 'redux-thunk';
import { AnyAction } from 'redux';
import { waitForMediaLibraryToLoad, loadMedia } from './mediaLibrary';
import { waitUntil } from './waitUntil';
import { selectIsFetching, selectEntriesSortFields } from '../reducers/entries';
const { notifSend } = notifActions;
@ -31,6 +40,10 @@ export const ENTRIES_REQUEST = 'ENTRIES_REQUEST';
export const ENTRIES_SUCCESS = 'ENTRIES_SUCCESS';
export const ENTRIES_FAILURE = 'ENTRIES_FAILURE';
export const SORT_ENTRIES_REQUEST = 'SORT_ENTRIES_REQUEST';
export const SORT_ENTRIES_SUCCESS = 'SORT_ENTRIES_SUCCESS';
export const SORT_ENTRIES_FAILURE = 'SORT_ENTRIES_FAILURE';
export const DRAFT_CREATE_FROM_ENTRY = 'DRAFT_CREATE_FROM_ENTRY';
export const DRAFT_CREATE_EMPTY = 'DRAFT_CREATE_EMPTY';
export const DRAFT_DISCARD = 'DRAFT_DISCARD';
@ -124,6 +137,69 @@ export function entriesFailed(collection: Collection, error: Error) {
};
}
export function sortByField(
collection: Collection,
key: string,
direction: SortDirection = SortDirection.Ascending,
) {
return async (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
const state = getState();
const backend = currentBackend(state.config);
// if we're already fetching we update the sort key, but skip loading entries
const isFetching = selectIsFetching(state.entries, collection.get('name'));
dispatch({
type: SORT_ENTRIES_REQUEST,
payload: {
collection: collection.get('name'),
key,
direction,
},
});
if (isFetching) {
return;
}
try {
const integration = selectIntegration(state, collection.get('name'), 'listEntries');
const provider: Backend = integration
? getIntegrationProvider(state.integrations, backend.getToken, integration)
: backend;
let entries = await provider.listAllEntries(collection);
const sortFields = selectEntriesSortFields(getState().entries, collection.get('name'));
if (sortFields && sortFields.length > 0) {
const keys = sortFields.map(v => selectSortDataPath(collection, v.get('key')));
const orders = sortFields.map(v =>
v.get('direction') === SortDirection.Ascending ? 'asc' : 'desc',
);
entries = orderBy(entries, keys, orders);
}
dispatch({
type: SORT_ENTRIES_SUCCESS,
payload: {
collection: collection.get('name'),
key,
direction,
entries,
},
});
} catch (error) {
dispatch({
type: SORT_ENTRIES_FAILURE,
payload: {
collection: collection.get('name'),
key,
direction,
error,
},
});
}
};
}
export function entryPersisting(collection: Collection, entry: EntryMap) {
return {
type: ENTRY_PERSIST_REQUEST,
@ -383,11 +459,17 @@ const addAppendActionsToCursor = (cursor: Cursor) => {
};
export function loadEntries(collection: Collection, page = 0) {
return (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
return async (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
if (collection.get('isFetching')) {
return;
}
const state = getState();
const sortFields = selectEntriesSortFields(state.entries, collection.get('name'));
if (sortFields && sortFields.length > 0) {
const field = sortFields[0];
return dispatch(sortByField(collection, field.get('key'), field.get('direction')));
}
const backend = currentBackend(state.config);
const integration = selectIntegration(state, collection.get('name'), 'listEntries');
const provider = integration
@ -395,11 +477,15 @@ export function loadEntries(collection: Collection, page = 0) {
: backend;
const append = !!(page && !isNaN(page) && page > 0);
dispatch(entriesLoading(collection));
provider
.listEntries(collection, page)
.then((response: { cursor: typeof Cursor }) => ({
...response,
try {
let response: {
cursor: Cursor;
pagination: number;
entries: EntryValue[];
} = await provider.listEntries(collection, page);
response = {
...response,
// The only existing backend using the pagination system is the
// Algolia integration, which is also the only integration used
// to list entries. Thus, this checking for an integration can
@ -413,33 +499,32 @@ export function loadEntries(collection: Collection, page = 0) {
data: { nextPage: page + 1 },
})
: Cursor.create(response.cursor),
}))
.then((response: { cursor: Cursor; pagination: number; entries: EntryValue[] }) =>
dispatch(
entriesLoaded(
collection,
response.cursor.meta!.get('usingOldPaginationAPI')
? response.entries.reverse()
: response.entries,
response.pagination,
addAppendActionsToCursor(response.cursor),
append,
),
};
dispatch(
entriesLoaded(
collection,
response.cursor.meta!.get('usingOldPaginationAPI')
? response.entries.reverse()
: response.entries,
response.pagination,
addAppendActionsToCursor(response.cursor),
append,
),
)
.catch((err: Error) => {
dispatch(
notifSend({
message: {
details: err,
key: 'ui.toast.onFailToLoadEntries',
},
kind: 'danger',
dismissAfter: 8000,
}),
);
return Promise.reject(dispatch(entriesFailed(collection, err)));
});
);
} catch (err) {
dispatch(
notifSend({
message: {
details: err,
key: 'ui.toast.onFailToLoadEntries',
},
kind: 'danger',
dismissAfter: 8000,
}),
);
return Promise.reject(dispatch(entriesFailed(collection, err)));
}
};
}
@ -473,10 +558,10 @@ export function traverseCollectionCursor(collection: Collection, action: string)
try {
dispatch(entriesLoading(collection));
const { entries, cursor: newCursor } = await traverseCursor(backend, cursor, realAction);
// Pass null for the old pagination argument - this will
// eventually be removed.
const pagination = newCursor.meta?.get('page');
return dispatch(
entriesLoaded(collection, entries, null, addAppendActionsToCursor(newCursor), append),
entriesLoaded(collection, entries, pagination, addAppendActionsToCursor(newCursor), append),
);
} catch (err) {
console.error(err);
@ -484,7 +569,7 @@ export function traverseCollectionCursor(collection: Collection, action: string)
notifSend({
message: {
details: err,
key: 'ui.toast.onFailToPersist',
key: 'ui.toast.onFailToLoadEntries',
},
kind: 'danger',
dismissAfter: 8000,

View File

@ -1,6 +1,10 @@
import { currentBackend } from 'coreSrc/backend';
import { getIntegrationProvider } from 'Integrations';
import { selectIntegration } from 'Reducers';
import { ThunkDispatch } from 'redux-thunk';
import { AnyAction } from 'redux';
import { State } from '../types/redux';
import { currentBackend } from '../backend';
import { getIntegrationProvider } from '../integrations';
import { selectIntegration } from '../reducers';
import { EntryValue } from '../valueObjects/Entry';
/*
* Constant Declarations
@ -19,14 +23,14 @@ export const SEARCH_CLEAR = 'SEARCH_CLEAR';
* Simple Action Creators (Internal)
* We still need to export them for tests
*/
export function searchingEntries(searchTerm) {
export function searchingEntries(searchTerm: string, page: number) {
return {
type: SEARCH_ENTRIES_REQUEST,
payload: { searchTerm },
payload: { searchTerm, page },
};
}
export function searchSuccess(searchTerm, entries, page) {
export function searchSuccess(searchTerm: string, entries: EntryValue[], page: number) {
return {
type: SEARCH_ENTRIES_SUCCESS,
payload: {
@ -37,7 +41,7 @@ export function searchSuccess(searchTerm, entries, page) {
};
}
export function searchFailure(searchTerm, error) {
export function searchFailure(searchTerm: string, error: Error) {
return {
type: SEARCH_ENTRIES_FAILURE,
payload: {
@ -47,7 +51,12 @@ export function searchFailure(searchTerm, error) {
};
}
export function querying(namespace, collection, searchFields, searchTerm) {
export function querying(
namespace: string,
collection: string,
searchFields: string[],
searchTerm: string,
) {
return {
type: QUERY_REQUEST,
payload: {
@ -59,7 +68,18 @@ export function querying(namespace, collection, searchFields, searchTerm) {
};
}
export function querySuccess(namespace, collection, searchFields, searchTerm, response) {
type Response = {
entries: EntryValue[];
pagination: number;
};
export function querySuccess(
namespace: string,
collection: string,
searchFields: string[],
searchTerm: string,
response: Response,
) {
return {
type: QUERY_SUCCESS,
payload: {
@ -72,7 +92,13 @@ export function querySuccess(namespace, collection, searchFields, searchTerm, re
};
}
export function queryFailure(namespace, collection, searchFields, searchTerm, error) {
export function queryFailure(
namespace: string,
collection: string,
searchFields: string[],
searchTerm: string,
error: Error,
) {
return {
type: QUERY_FAILURE,
payload: {
@ -98,17 +124,27 @@ export function clearSearch() {
*/
// SearchEntries will search for complete entries in all collections.
export function searchEntries(searchTerm, page = 0) {
return (dispatch, getState) => {
dispatch(searchingEntries(searchTerm));
export function searchEntries(searchTerm: string, page = 0) {
return (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
const state = getState();
const { search } = state;
const backend = currentBackend(state.config);
const allCollections = state.collections.keySeq().toArray();
const collections = allCollections.filter(collection =>
selectIntegration(state, collection, 'search'),
selectIntegration(state, collection as string, 'search'),
);
const integration = selectIntegration(state, collections[0], 'search');
const integration = selectIntegration(state, collections[0] as string, 'search');
// avoid duplicate searches
if (
search.get('isFetching') === true &&
search.get('term') === searchTerm &&
// if an integration doesn't exist, 'page' is not used
(search.get('page') === page || !integration)
) {
return;
}
dispatch(searchingEntries(searchTerm, page));
const searchPromise = integration
? getIntegrationProvider(state.integrations, backend.getToken, integration).search(
@ -119,16 +155,22 @@ export function searchEntries(searchTerm, page = 0) {
: backend.search(state.collections.valueSeq().toArray(), searchTerm);
return searchPromise.then(
response => dispatch(searchSuccess(searchTerm, response.entries, response.pagination)),
error => dispatch(searchFailure(searchTerm, error)),
(response: Response) =>
dispatch(searchSuccess(searchTerm, response.entries, response.pagination)),
(error: Error) => dispatch(searchFailure(searchTerm, error)),
);
};
}
// Instead of searching for complete entries, query will search for specific fields
// in specific collections and return raw data (no entries).
export function query(namespace, collectionName, searchFields, searchTerm) {
return (dispatch, getState) => {
export function query(
namespace: string,
collectionName: string,
searchFields: string[],
searchTerm: string,
) {
return (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
dispatch(querying(namespace, collectionName, searchFields, searchTerm));
const state = getState();
@ -147,9 +189,10 @@ export function query(namespace, collectionName, searchFields, searchTerm) {
: backend.query(collection, searchFields, searchTerm);
return queryPromise.then(
response =>
(response: Response) =>
dispatch(querySuccess(namespace, collectionName, searchFields, searchTerm, response)),
error => dispatch(queryFailure(namespace, collectionName, searchFields, searchTerm, error)),
(error: Error) =>
dispatch(queryFailure(namespace, collectionName, searchFields, searchTerm, error)),
);
};
}

View File

@ -184,6 +184,10 @@ export class Backend {
return Promise.resolve(null);
}
isGitBackend() {
return this.implementation.isGitBackend?.() || false;
}
updateUserCredentials = (updatedCredentials: Credentials) => {
const storedUser = this.authStore!.retrieve();
if (storedUser && storedUser.backendName === this.backendName) {
@ -273,7 +277,12 @@ export class Backend {
collection.get('name'),
selectEntrySlug(collection, loadedEntry.file.path),
loadedEntry.file.path,
{ raw: loadedEntry.data || '', label: loadedEntry.file.label },
{
raw: loadedEntry.data || '',
label: loadedEntry.file.label,
author: loadedEntry.file.author,
updatedOn: loadedEntry.file.updatedOn,
},
),
);
const formattedEntries = entries.map(this.entryWithFormat(collection));
@ -284,7 +293,7 @@ export class Backend {
return filteredEntries;
}
listEntries(collection: Collection) {
async listEntries(collection: Collection) {
const extension = selectFolderEntryExtension(collection);
let listMethod: () => Promise<ImplementationEntry[]>;
const collectionType = collection.get('type');
@ -307,20 +316,23 @@ export class Backend {
} else {
throw new Error(`Unknown collection type: ${collectionType}`);
}
return listMethod().then((loadedEntries: ImplementationEntry[]) => ({
entries: this.processEntries(loadedEntries, collection),
/*
const loadedEntries = await listMethod();
/*
Wrap cursors so we can tell which collection the cursor is
from. This is done to prevent traverseCursor from requiring a
`collection` argument.
*/
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
cursor: Cursor.create(loadedEntries[CURSOR_COMPATIBILITY_SYMBOL]).wrapData({
cursorType: 'collectionEntries',
collection,
}),
}));
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
const cursor = Cursor.create(loadedEntries[CURSOR_COMPATIBILITY_SYMBOL]).wrapData({
cursorType: 'collectionEntries',
collection,
});
return {
entries: this.processEntries(loadedEntries, collection),
pagination: cursor.meta?.get('page'),
cursor,
};
}
// The same as listEntries, except that if a cursor with the "next"

View File

@ -3,12 +3,17 @@ import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import styled from '@emotion/styled';
import { connect } from 'react-redux';
import { translate } from 'react-polyglot';
import { lengths } from 'netlify-cms-ui-default';
import { getNewEntryUrl } from 'Lib/urlHelper';
import Sidebar from './Sidebar';
import CollectionTop from './CollectionTop';
import EntriesCollection from './Entries/EntriesCollection';
import EntriesSearch from './Entries/EntriesSearch';
import CollectionControls from './CollectionControls';
import { sortByField } from 'Actions/entries';
import { selectSortableFields } from 'Reducers/collections';
import { selectEntriesSort } from 'Reducers/entries';
import { VIEW_STYLE_LIST } from 'Constants/collectionViews';
const CollectionContainer = styled.div`
@ -26,6 +31,9 @@ class Collection extends React.Component {
isSearchResults: PropTypes.bool,
collection: ImmutablePropTypes.map.isRequired,
collections: ImmutablePropTypes.orderedMap.isRequired,
sortableFields: PropTypes.array,
sort: ImmutablePropTypes.orderedMap,
onSortClick: PropTypes.func.isRequired,
};
state = {
@ -49,21 +57,33 @@ class Collection extends React.Component {
};
render() {
const { collection, collections, collectionName, isSearchResults, searchTerm } = this.props;
const {
collection,
collections,
collectionName,
isSearchResults,
searchTerm,
sortableFields,
onSortClick,
sort,
} = this.props;
const newEntryUrl = collection.get('create') ? getNewEntryUrl(collectionName) : '';
return (
<CollectionContainer>
<Sidebar collections={collections} searchTerm={searchTerm} />
<CollectionMain>
{isSearchResults ? null : (
<CollectionTop
collectionLabel={collection.get('label')}
collectionLabelSingular={collection.get('label_singular')}
collectionDescription={collection.get('description')}
newEntryUrl={newEntryUrl}
viewStyle={this.state.viewStyle}
onChangeViewStyle={this.handleChangeViewStyle}
/>
<>
<CollectionTop collection={collection} newEntryUrl={newEntryUrl} />
<CollectionControls
collection={collection}
viewStyle={this.state.viewStyle}
onChangeViewStyle={this.handleChangeViewStyle}
sortableFields={sortableFields}
onSortClick={onSortClick}
sort={sort}
/>
</>
)}
{isSearchResults ? this.renderEntriesSearch() : this.renderEntriesCollection()}
</CollectionMain>
@ -74,10 +94,36 @@ class Collection extends React.Component {
function mapStateToProps(state, ownProps) {
const { collections } = state;
const { isSearchResults, match } = ownProps;
const { isSearchResults, match, t } = ownProps;
const { name, searchTerm } = match.params;
const collection = name ? collections.get(name) : collections.first();
return { collection, collections, collectionName: name, isSearchResults, searchTerm };
const sort = selectEntriesSort(state.entries, collection.get('name'));
const sortableFields = selectSortableFields(collection, t);
return {
collection,
collections,
collectionName: name,
isSearchResults,
searchTerm,
sort,
sortableFields,
};
}
export default connect(mapStateToProps)(Collection);
const mapDispatchToProps = {
sortByField,
};
const mergeProps = (stateProps, dispatchProps, ownProps) => {
return {
...stateProps,
...ownProps,
onSortClick: (key, direction) =>
dispatchProps.sortByField(stateProps.collection, key, direction),
};
};
const ConnectedCollection = connect(mapStateToProps, mapDispatchToProps, mergeProps)(Collection);
export default translate()(ConnectedCollection);

View File

@ -0,0 +1,40 @@
import React from 'react';
import styled from '@emotion/styled';
import ViewStyleControl from './ViewStyleControl';
import SortControl from './SortControl';
import { lengths } from 'netlify-cms-ui-default';
const CollectionControlsContainer = styled.div`
display: flex;
align-items: center;
flex-direction: row-reverse;
margin-top: 22px;
width: ${lengths.topCardWidth};
& > div {
margin-left: 6px;
}
`;
const CollectionControls = ({
collection,
viewStyle,
onChangeViewStyle,
sortableFields,
onSortClick,
sort,
}) => (
<CollectionControlsContainer>
<ViewStyleControl viewStyle={viewStyle} onChangeViewStyle={onChangeViewStyle} />
{sortableFields.length > 0 && (
<SortControl
fields={sortableFields}
collection={collection}
sort={sort}
onSortClick={onSortClick}
/>
)}
</CollectionControlsContainer>
);
export default CollectionControls;

View File

@ -1,20 +1,20 @@
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import React from 'react';
import styled from '@emotion/styled';
import { translate } from 'react-polyglot';
import { Link } from 'react-router-dom';
import { Icon, components, buttons, shadows, colors } from 'netlify-cms-ui-default';
import { VIEW_STYLE_LIST, VIEW_STYLE_GRID } from 'Constants/collectionViews';
import { components, buttons, shadows } from 'netlify-cms-ui-default';
const CollectionTopContainer = styled.div`
${components.cardTop};
margin-bottom: 22px;
`;
const CollectionTopRow = styled.div`
display: flex;
align-items: center;
justify-content: space-between;
margin-bottom: 20px;
`;
const CollectionTopHeading = styled.h1`
@ -32,47 +32,27 @@ const CollectionTopNewButton = styled(Link)`
const CollectionTopDescription = styled.p`
${components.cardTopDescription};
margin-bottom: 0;
`;
const ViewControls = styled.div`
display: flex;
align-items: center;
justify-content: flex-end;
margin-top: 24px;
`;
const getCollectionProps = collection => {
const collectionLabel = collection.get('label');
const collectionLabelSingular = collection.get('label_singular');
const collectionDescription = collection.get('description');
const ViewControlsText = styled.span`
font-size: 14px;
color: ${colors.text};
margin-right: 12px;
`;
return {
collectionLabel,
collectionLabelSingular,
collectionDescription,
};
};
const ViewControlsButton = styled.button`
${buttons.button};
color: ${props => (props.isActive ? colors.active : '#b3b9c4')};
background-color: transparent;
display: block;
padding: 0;
margin: 0 4px;
const CollectionTop = ({ collection, newEntryUrl, t }) => {
const { collectionLabel, collectionLabelSingular, collectionDescription } = getCollectionProps(
collection,
t,
);
&:last-child {
margin-right: 0;
}
${Icon} {
display: block;
}
`;
const CollectionTop = ({
collectionLabel,
collectionLabelSingular,
collectionDescription,
viewStyle,
onChangeViewStyle,
newEntryUrl,
t,
}) => {
return (
<CollectionTopContainer>
<CollectionTopRow>
@ -88,31 +68,12 @@ const CollectionTop = ({
{collectionDescription ? (
<CollectionTopDescription>{collectionDescription}</CollectionTopDescription>
) : null}
<ViewControls>
<ViewControlsText>{t('collection.collectionTop.viewAs')}:</ViewControlsText>
<ViewControlsButton
isActive={viewStyle === VIEW_STYLE_LIST}
onClick={() => onChangeViewStyle(VIEW_STYLE_LIST)}
>
<Icon type="list" />
</ViewControlsButton>
<ViewControlsButton
isActive={viewStyle === VIEW_STYLE_GRID}
onClick={() => onChangeViewStyle(VIEW_STYLE_GRID)}
>
<Icon type="grid" />
</ViewControlsButton>
</ViewControls>
</CollectionTopContainer>
);
};
CollectionTop.propTypes = {
collectionLabel: PropTypes.string.isRequired,
collectionLabelSingular: PropTypes.string,
collectionDescription: PropTypes.string,
viewStyle: PropTypes.oneOf([VIEW_STYLE_LIST, VIEW_STYLE_GRID]).isRequired,
onChangeViewStyle: PropTypes.func.isRequired,
collection: ImmutablePropTypes.map.isRequired,
newEntryUrl: PropTypes.string,
t: PropTypes.func.isRequired,
};

View File

@ -1,10 +1,21 @@
import PropTypes from 'prop-types';
import React from 'react';
import styled from '@emotion/styled';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { translate } from 'react-polyglot';
import { Loader } from 'netlify-cms-ui-default';
import { Loader, lengths } from 'netlify-cms-ui-default';
import EntryListing from './EntryListing';
const PaginationMessage = styled.div`
width: ${lengths.topCardWidth};
padding: 16px;
text-align: center;
`;
const NoEntriesMessage = styled(PaginationMessage)`
margin-top: 16px;
`;
const Entries = ({
collections,
entries,
@ -13,6 +24,7 @@ const Entries = ({
cursor,
handleCursorActions,
t,
page,
}) => {
const loadingMessages = [
t('collection.entries.loadingEntries'),
@ -20,27 +32,32 @@ const Entries = ({
t('collection.entries.longerLoading'),
];
if (entries) {
return (
<EntryListing
collections={collections}
entries={entries}
viewStyle={viewStyle}
cursor={cursor}
handleCursorActions={handleCursorActions}
/>
);
}
if (isFetching) {
if (isFetching && page === undefined) {
return <Loader active>{loadingMessages}</Loader>;
}
return <div className="nc-collectionPage-noEntries">No Entries</div>;
if (entries && entries.size > 0) {
return (
<>
<EntryListing
collections={collections}
entries={entries}
viewStyle={viewStyle}
cursor={cursor}
handleCursorActions={handleCursorActions}
/>
{isFetching && page !== undefined ? (
<PaginationMessage>{t('collection.entries.loadingEntries')}</PaginationMessage>
) : null}
</>
);
}
return <NoEntriesMessage>{t('collection.entries.noEntries')}</NoEntriesMessage>;
};
Entries.propTypes = {
collections: ImmutablePropTypes.map.isRequired,
collections: ImmutablePropTypes.iterable.isRequired,
entries: ImmutablePropTypes.list,
page: PropTypes.number,
isFetching: PropTypes.bool,

View File

@ -8,13 +8,14 @@ import {
loadEntries as actionLoadEntries,
traverseCollectionCursor as actionTraverseCollectionCursor,
} from 'Actions/entries';
import { selectEntries } from 'Reducers';
import { selectEntries, selectEntriesLoaded, selectIsFetching } from '../../../reducers/entries';
import { selectCollectionEntriesCursor } from 'Reducers/cursors';
import Entries from './Entries';
class EntriesCollection extends React.Component {
static propTypes = {
collection: ImmutablePropTypes.map.isRequired,
page: PropTypes.number,
entries: ImmutablePropTypes.list,
isFetching: PropTypes.bool.isRequired,
viewStyle: PropTypes.string,
@ -44,7 +45,7 @@ class EntriesCollection extends React.Component {
};
render() {
const { collection, entries, isFetching, viewStyle, cursor } = this.props;
const { collection, entries, isFetching, viewStyle, cursor, page } = this.props;
return (
<Entries
@ -55,6 +56,7 @@ class EntriesCollection extends React.Component {
viewStyle={viewStyle}
cursor={cursor}
handleCursorActions={partial(this.handleCursorActions, cursor)}
page={page}
/>
);
}
@ -64,9 +66,9 @@ function mapStateToProps(state, ownProps) {
const { collection, viewStyle } = ownProps;
const page = state.entries.getIn(['pages', collection.get('name'), 'page']);
const entries = selectEntries(state, collection.get('name'));
const entriesLoaded = !!state.entries.getIn(['pages', collection.get('name')]);
const isFetching = state.entries.getIn(['pages', collection.get('name'), 'isFetching'], false);
const entries = selectEntries(state.entries, collection.get('name'));
const entriesLoaded = selectEntriesLoaded(state.entries, collection.get('name'));
const isFetching = selectIsFetching(state.entries, collection.get('name'));
const rawCursor = selectCollectionEntriesCursor(state.cursors, collection.get('name'));
const cursor = Cursor.create(rawCursor).clearData();

View File

@ -13,7 +13,7 @@ const ListCard = styled.li`
${components.card};
width: ${lengths.topCardWidth};
margin-left: 12px;
margin-bottom: 16px;
margin-bottom: 10px;
overflow: hidden;
`;

View File

@ -0,0 +1,69 @@
import React from 'react';
import styled from '@emotion/styled';
import { translate } from 'react-polyglot';
import { buttons, Dropdown, DropdownItem, StyledDropdownButton } from 'netlify-cms-ui-default';
import { SortDirection } from '../../types/redux';
const SortButton = styled(StyledDropdownButton)`
${buttons.button};
${buttons.medium};
${buttons.grayText};
font-size: 14px;
&:after {
top: 11px;
}
`;
function nextSortDirection(direction) {
switch (direction) {
case SortDirection.Ascending:
return SortDirection.Descending;
case SortDirection.Descending:
return SortDirection.None;
default:
return SortDirection.Ascending;
}
}
function sortIconProps(sortDir) {
return {
icon: 'chevron',
iconDirection: sortIconDirections[sortDir],
iconSmall: true,
};
}
const sortIconDirections = {
[SortDirection.Ascending]: 'up',
[SortDirection.Descending]: 'down',
};
const SortControl = ({ t, fields, onSortClick, sort }) => {
return (
<Dropdown
renderButton={() => <SortButton>{t('collection.collectionTop.sortBy')}</SortButton>}
closeOnSelection={false}
dropdownTopOverlap="30px"
dropdownWidth="160px"
dropdownPosition="left"
>
{fields.map(field => {
const sortDir = sort?.getIn([field.key, 'direction']);
const isActive = sortDir && sortDir !== SortDirection.None;
const nextSortDir = nextSortDirection(sortDir);
return (
<DropdownItem
key={field.key}
label={field.label}
onClick={() => onSortClick(field.key, nextSortDir)}
isActive={isActive}
{...(isActive && sortIconProps(sortDir))}
/>
);
})}
</Dropdown>
);
};
export default translate()(SortControl);

View File

@ -0,0 +1,49 @@
import React from 'react';
import styled from '@emotion/styled';
import { Icon, buttons, colors } from 'netlify-cms-ui-default';
import { VIEW_STYLE_LIST, VIEW_STYLE_GRID } from 'Constants/collectionViews';
const ViewControlsSection = styled.div`
display: flex;
align-items: center;
justify-content: flex-end;
max-width: 500px;
`;
const ViewControlsButton = styled.button`
${buttons.button};
color: ${props => (props.isActive ? colors.active : '#b3b9c4')};
background-color: transparent;
display: block;
padding: 0;
margin: 0 4px;
&:last-child {
margin-right: 0;
}
${Icon} {
display: block;
}
`;
const ViewStyleControl = ({ viewStyle, onChangeViewStyle }) => {
return (
<ViewControlsSection>
<ViewControlsButton
isActive={viewStyle === VIEW_STYLE_LIST}
onClick={() => onChangeViewStyle(VIEW_STYLE_LIST)}
>
<Icon type="list" />
</ViewControlsButton>
<ViewControlsButton
isActive={viewStyle === VIEW_STYLE_GRID}
onClick={() => onChangeViewStyle(VIEW_STYLE_GRID)}
>
<Icon type="grid" />
</ViewControlsButton>
</ViewControlsSection>
);
};
export default ViewStyleControl;

View File

@ -164,5 +164,23 @@ describe('config', () => {
validateConfig(merge(validConfig, { collections: [{ publish: false }] }));
}).not.toThrowError();
});
it('should throw if collections sortableFields is not a boolean or a string array', () => {
expect(() => {
validateConfig(merge({}, validConfig, { collections: [{ sortableFields: 'title' }] }));
}).toThrowError("'collections[0].sortableFields' should be array");
});
it('should allow sortableFields to be a string array', () => {
expect(() => {
validateConfig(merge({}, validConfig, { collections: [{ sortableFields: ['title'] }] }));
}).not.toThrow();
});
it('should allow sortableFields to be a an empty array', () => {
expect(() => {
validateConfig(merge({}, validConfig, { collections: [{ sortableFields: [] }] }));
}).not.toThrow();
});
});
});

View File

@ -135,6 +135,12 @@ const getConfigSchema = () => ({
},
},
fields: fieldsConfig,
sortableFields: {
type: 'array',
items: {
type: 'string',
},
},
},
required: ['name', 'label'],
oneOf: [{ required: ['files'] }, { required: ['folder', 'fields'] }],

View File

@ -2,12 +2,14 @@ import React from 'react';
export const IDENTIFIER_FIELDS = ['title', 'path'];
export const SORTABLE_FIELDS = ['title', 'date', 'author', 'description'];
export const INFERABLE_FIELDS = {
title: {
type: 'string',
secondaryTypes: [],
synonyms: ['title', 'name', 'label', 'headline', 'header'],
defaultPreview: value => <h1>{value}</h1>, // eslint-disable-line react/display-name
defaultPreview: (value: React.ReactNode) => <h1>{value}</h1>, // eslint-disable-line react/display-name
fallbackToFirstField: true,
showError: true,
},
@ -15,7 +17,7 @@ export const INFERABLE_FIELDS = {
type: 'string',
secondaryTypes: [],
synonyms: ['short_title', 'shortTitle', 'short'],
defaultPreview: value => <h2>{value}</h2>, // eslint-disable-line react/display-name
defaultPreview: (value: React.ReactNode) => <h2>{value}</h2>, // eslint-disable-line react/display-name
fallbackToFirstField: false,
showError: false,
},
@ -23,7 +25,7 @@ export const INFERABLE_FIELDS = {
type: 'string',
secondaryTypes: [],
synonyms: ['author', 'name', 'by', 'byline', 'owner'],
defaultPreview: value => <strong>{value}</strong>, // eslint-disable-line react/display-name
defaultPreview: (value: React.ReactNode) => <strong>{value}</strong>, // eslint-disable-line react/display-name
fallbackToFirstField: false,
showError: false,
},
@ -31,7 +33,7 @@ export const INFERABLE_FIELDS = {
type: 'datetime',
secondaryTypes: ['date'],
synonyms: ['date', 'publishDate', 'publish_date'],
defaultPreview: value => value,
defaultPreview: (value: React.ReactNode) => value,
fallbackToFirstField: false,
showError: false,
},
@ -51,7 +53,7 @@ export const INFERABLE_FIELDS = {
'bio',
'summary',
],
defaultPreview: value => value,
defaultPreview: (value: React.ReactNode) => value,
fallbackToFirstField: false,
showError: false,
},
@ -69,7 +71,7 @@ export const INFERABLE_FIELDS = {
'hero',
'logo',
],
defaultPreview: value => value,
defaultPreview: (value: React.ReactNode) => value,
fallbackToFirstField: false,
showError: false,
},

View File

@ -129,6 +129,37 @@ export default class Algolia {
}
}
async listAllEntries(collection) {
const params = {
hitsPerPage: 1000,
};
let response = await this.request(
`${this.searchURL}/indexes/${this.indexPrefix}${collection.get('name')}`,
{ params },
);
let { nbPages = 0, hits, page } = response;
page = page + 1;
while (page < nbPages) {
response = await this.request(
`${this.searchURL}/indexes/${this.indexPrefix}${collection.get('name')}`,
{
params: { ...params, page },
},
);
hits = [...hits, ...response.hits];
page = page + 1;
}
const entries = hits.map(hit => {
const slug = selectEntrySlug(collection, hit.path);
return createEntry(collection.get('name'), slug, hit.path, {
data: hit.data,
partial: true,
});
});
return entries;
}
getEntry(collection, slug) {
return this.searchBy('slug', collection.get('name'), slug).then(response => {
const entry = response.hits.filter(hit => hit.slug === slug)[0];

View File

@ -7,7 +7,7 @@ import {
SLUG_MISSING_REQUIRED_DATE,
keyToPathArray,
} from './stringTemplate';
import { selectIdentifier } from '../reducers/collections';
import { selectIdentifier, selectField, COMMIT_AUTHOR, COMMIT_DATE } from '../reducers/collections';
import { Collection, SlugConfig, Config, EntryMap } from '../types/redux';
import { stripIndent } from 'common-tags';
import { basename, fileExtension } from 'netlify-cms-lib-util';
@ -205,6 +205,13 @@ export const summaryFormatter = (
const identifier = entryData.getIn(keyToPathArray(selectIdentifier(collection) as string));
entryData = addFileTemplateFields(entry.get('path'), entryData);
// allow commit information in summary template
if (entry.get('author') && !selectField(collection, COMMIT_AUTHOR)) {
entryData = entryData.set(COMMIT_AUTHOR, entry.get('author'));
}
if (entry.get('updatedOn') && !selectField(collection, COMMIT_DATE)) {
entryData = entryData.set(COMMIT_DATE, entry.get('updatedOn'));
}
const summary = compileStringTemplate(summaryTemplate, date, identifier, entryData);
return summary;
};

View File

@ -3,7 +3,7 @@ import { get, escapeRegExp } from 'lodash';
import consoleError from '../lib/consoleError';
import { CONFIG_SUCCESS } from '../actions/config';
import { FILES, FOLDER } from '../constants/collectionTypes';
import { INFERABLE_FIELDS, IDENTIFIER_FIELDS } from '../constants/fieldInference';
import { INFERABLE_FIELDS, IDENTIFIER_FIELDS, SORTABLE_FIELDS } from '../constants/fieldInference';
import { formatExtensions } from '../formats/formats';
import {
CollectionsAction,
@ -15,6 +15,7 @@ import {
} from '../types/redux';
import { selectMediaFolder } from './entries';
import { keyToPathArray } from '../lib/stringTemplate';
import { Backend } from '../backend';
const collections = (state = null, action: CollectionsAction) => {
switch (action.type) {
@ -288,6 +289,7 @@ export const selectIdentifier = (collection: Collection) => {
fieldNames.find(name => name?.toLowerCase().trim() === id.toLowerCase().trim()),
);
};
export const selectInferedField = (collection: Collection, fieldName: string) => {
if (fieldName === 'title' && collection.get('identifier_field')) {
return selectIdentifier(collection);
@ -337,4 +339,56 @@ export const selectInferedField = (collection: Collection, fieldName: string) =>
return null;
};
export const COMMIT_AUTHOR = 'commit_author';
export const COMMIT_DATE = 'commit_date';
export const selectDefaultSortableFields = (collection: Collection, backend: Backend) => {
let defaultSortable = SORTABLE_FIELDS.map((type: string) => {
const field = selectInferedField(collection, type);
if (backend.isGitBackend() && type === 'author' && !field) {
// default to commit author if not author field is found
return COMMIT_AUTHOR;
}
return field;
}).filter(Boolean);
if (backend.isGitBackend()) {
// always have commit date by default
defaultSortable = [COMMIT_DATE, ...defaultSortable];
}
return defaultSortable as string[];
};
export const selectSortableFields = (collection: Collection, t: (key: string) => string) => {
const fields = collection
.get('sortableFields')
.toArray()
.map(key => {
if (key === COMMIT_DATE) {
return { key, field: { name: key, label: t('collection.defaultFields.updatedOn.label') } };
}
const field = selectField(collection, key);
if (key === COMMIT_AUTHOR && !field) {
return { key, field: { name: key, label: t('collection.defaultFields.author.label') } };
}
return { key, field: field?.toJS() };
})
.filter(item => !!item.field)
.map(item => ({ ...item.field, key: item.key }));
return fields;
};
export const selectSortDataPath = (collection: Collection, key: string) => {
if (key === COMMIT_DATE) {
return 'updatedOn';
} else if (key === COMMIT_AUTHOR && !selectField(collection, key)) {
return 'author';
} else {
return `data.${key}`;
}
};
export default collections;

View File

@ -1,6 +1,6 @@
import { fromJS } from 'immutable';
import { Cursor } from 'netlify-cms-lib-util';
import { ENTRIES_SUCCESS } from 'Actions/entries';
import { ENTRIES_SUCCESS, SORT_ENTRIES_SUCCESS } from 'Actions/entries';
// Since pagination can be used for a variety of views (collections
// and searches are the most common examples), we namespace cursors by
@ -16,7 +16,9 @@ const cursors = (state = fromJS({ cursorsByType: { collectionEntries: {} } }), a
Cursor.create(action.payload.cursor).store,
);
}
case SORT_ENTRIES_SUCCESS: {
return state.deleteIn(['cursorsByType', 'collectionEntries', action.payload.collection]);
}
default:
return state;
}

View File

@ -1,4 +1,4 @@
import { Map, List, fromJS } from 'immutable';
import { Map, List, fromJS, OrderedMap } from 'immutable';
import { dirname, join } from 'path';
import {
ENTRY_REQUEST,
@ -8,6 +8,9 @@ import {
ENTRIES_SUCCESS,
ENTRIES_FAILURE,
ENTRY_DELETE_SUCCESS,
SORT_ENTRIES_REQUEST,
SORT_ENTRIES_SUCCESS,
SORT_ENTRIES_FAILURE,
} from '../actions/entries';
import { SEARCH_ENTRIES_SUCCESS } from '../actions/search';
import {
@ -26,10 +29,17 @@ import {
EntryMap,
EntryField,
CollectionFiles,
EntriesSortRequestPayload,
EntriesSortSuccessPayload,
EntriesSortFailurePayload,
SortMap,
SortObject,
Sort,
SortDirection,
} from '../types/redux';
import { folderFormatter } from '../lib/formatters';
import { isAbsolutePath, basename } from 'netlify-cms-lib-util';
import { trim } from 'lodash';
import { trim, once, sortBy, set } from 'lodash';
let collection: string;
let loadedEntries: EntryObject[];
@ -37,7 +47,60 @@ let append: boolean;
let page: number;
let slug: string;
const entries = (state = Map({ entities: Map(), pages: Map() }), action: EntriesAction) => {
const storageSortKey = 'netlify-cms.entries.sort';
type StorageSortObject = SortObject & { index: number };
type StorageSort = { [collection: string]: { [key: string]: StorageSortObject } };
const loadSort = once(() => {
const sortString = localStorage.getItem(storageSortKey);
if (sortString) {
try {
const sort: StorageSort = JSON.parse(sortString);
let map = Map() as Sort;
Object.entries(sort).forEach(([collection, sort]) => {
let orderedMap = OrderedMap() as SortMap;
sortBy(Object.values(sort), ['index']).forEach(value => {
const { key, direction } = value;
orderedMap = orderedMap.set(key, fromJS({ key, direction }));
});
map = map.set(collection, orderedMap);
});
return map;
} catch (e) {
return Map() as Sort;
}
}
return Map() as Sort;
});
const clearSort = () => {
localStorage.removeItem(storageSortKey);
};
const persistSort = (sort: Sort | undefined) => {
if (sort) {
const storageSort: StorageSort = {};
sort.keySeq().forEach(key => {
const collection = key as string;
const sortObjects = (sort
.get(collection)
.valueSeq()
.toJS() as SortObject[]).map((value, index) => ({ ...value, index }));
sortObjects.forEach(value => {
set(storageSort, [collection, value.key], value);
});
});
localStorage.setItem(storageSortKey, JSON.stringify(storageSort));
} else {
clearSort();
}
};
const entries = (
state = Map({ entities: Map(), pages: Map(), sort: loadSort() }),
action: EntriesAction,
) => {
switch (action.type) {
case ENTRY_REQUEST: {
const payload = action.payload as EntryRequestPayload;
@ -59,7 +122,13 @@ const entries = (state = Map({ entities: Map(), pages: Map() }), action: Entries
case ENTRIES_REQUEST: {
const payload = action.payload as EntriesRequestPayload;
return state.setIn(['pages', payload.collection, 'isFetching'], true);
const newState = state.withMutations(map => {
map.deleteIn(['sort', payload.collection]);
map.setIn(['pages', payload.collection, 'isFetching'], true);
});
clearSort();
return newState;
}
case ENTRIES_SUCCESS: {
@ -123,11 +192,74 @@ const entries = (state = Map({ entities: Map(), pages: Map() }), action: Entries
});
}
case SORT_ENTRIES_REQUEST: {
const payload = action.payload as EntriesSortRequestPayload;
const { collection, key, direction } = payload;
const newState = state.withMutations(map => {
const sort = OrderedMap({ [key]: Map({ key, direction }) });
map.setIn(['sort', collection], sort);
map.setIn(['pages', collection, 'isFetching'], true);
map.deleteIn(['pages', collection, 'page']);
});
persistSort(newState.get('sort') as Sort);
return newState;
}
case SORT_ENTRIES_SUCCESS: {
const payload = action.payload as EntriesSortSuccessPayload;
const { collection, entries } = payload;
loadedEntries = entries;
const newState = state.withMutations(map => {
loadedEntries.forEach(entry =>
map.setIn(
['entities', `${entry.collection}.${entry.slug}`],
fromJS(entry).set('isFetching', false),
),
);
map.setIn(['pages', collection, 'isFetching'], false);
const ids = List(loadedEntries.map(entry => entry.slug));
map.setIn(
['pages', collection],
Map({
page: 1,
ids,
}),
);
});
return newState;
}
case SORT_ENTRIES_FAILURE: {
const payload = action.payload as EntriesSortFailurePayload;
const { collection, key } = payload;
const newState = state.withMutations(map => {
map.deleteIn(['sort', collection, key]);
map.setIn(['pages', collection, 'isFetching'], false);
});
persistSort(newState.get('sort') as Sort);
return newState;
}
default:
return state;
}
};
export const selectEntriesSort = (entries: Entries, collection: string) => {
const sort = entries.get('sort') as Sort | undefined;
return sort?.get(collection);
};
export const selectEntriesSortFields = (entries: Entries, collection: string) => {
const sort = selectEntriesSort(entries, collection);
const values =
sort
?.valueSeq()
.filter(v => v?.get('direction') !== SortDirection.None)
.toArray() || [];
return values;
};
export const selectEntry = (state: Entries, collection: string, slug: string) =>
state.getIn(['entities', `${collection}.${slug}`]);
@ -136,7 +268,18 @@ export const selectPublishedSlugs = (state: Entries, collection: string) =>
export const selectEntries = (state: Entries, collection: string) => {
const slugs = selectPublishedSlugs(state, collection);
return slugs && slugs.map(slug => selectEntry(state, collection, slug as string));
const entries =
slugs && (slugs.map(slug => selectEntry(state, collection, slug as string)) as List<EntryMap>);
return entries;
};
export const selectEntriesLoaded = (state: Entries, collection: string) => {
return !!state.getIn(['pages', collection]);
};
export const selectIsFetching = (state: Entries, collection: string) => {
return state.getIn(['pages', collection, 'isFetching'], false);
};
const DRAFT_MEDIA_FILES = 'DRAFT_MEDIA_FILES';

View File

@ -31,6 +31,7 @@ const entries = (state = defaultState, action) => {
return state.withMutations(map => {
map.set('isFetching', true);
map.set('term', action.payload.searchTerm);
map.set('page', action.payload.page);
});
}
return state;

View File

@ -24,8 +24,9 @@ export interface StaticallyTypedRecord<T> {
filter<K extends keyof T>(
predicate: (value: T[K], key: K, iter: this) => boolean,
): StaticallyTypedRecord<T>;
valueSeq<K extends keyof T>(): T[K][];
valueSeq<K extends keyof T>(): T[K][] & { toArray: () => T[K][] };
map<K extends keyof T, V>(
mapFunc: (value: T[K]) => V,
): StaticallyTypedRecord<{ [key: string]: V }>;
keySeq<K extends keyof T>(): { toArray: () => K[] };
}

View File

@ -1,6 +1,6 @@
import { Action } from 'redux';
import { StaticallyTypedRecord } from './immutable';
import { Map, List } from 'immutable';
import { Map, List, OrderedMap } from 'immutable';
import AssetProxy from '../valueObjects/AssetProxy';
import { MediaFile as BackendMediaFile } from '../backend';
@ -52,11 +52,24 @@ type Pages = StaticallyTypedRecord<PagesObject>;
type EntitiesObject = { [key: string]: EntryMap };
export enum SortDirection {
Ascending = 'Ascending',
Descending = 'Descending',
None = 'None',
}
export type SortObject = { key: string; direction: SortDirection };
export type SortMap = OrderedMap<string, StaticallyTypedRecord<SortObject>>;
export type Sort = Map<string, SortMap>;
export type Entities = StaticallyTypedRecord<EntitiesObject>;
export type Entries = StaticallyTypedRecord<{
pages: Pages & PagesObject;
entities: Entities & EntitiesObject;
sort: Sort;
}>;
export type Deploys = StaticallyTypedRecord<{}>;
@ -76,6 +89,8 @@ export type EntryObject = {
mediaFiles: List<MediaFileMap>;
newRecord: boolean;
metaData: { status: string };
author?: string;
updatedOn?: string;
};
export type EntryMap = StaticallyTypedRecord<EntryObject>;
@ -140,6 +155,7 @@ type CollectionObject = {
slug?: string;
label_singular?: string;
label: string;
sortableFields: List<string>;
};
export type Collection = StaticallyTypedRecord<CollectionObject>;
@ -201,7 +217,12 @@ interface SearchItem {
slug: string;
}
export type Search = StaticallyTypedRecord<{ entryIds?: SearchItem[] }>;
export type Search = StaticallyTypedRecord<{
entryIds?: SearchItem[];
isFetching: boolean;
term: string | null;
page: number;
}>;
export type Cursors = StaticallyTypedRecord<{}>;
@ -269,6 +290,18 @@ export interface EntriesSuccessPayload extends EntryPayload {
append: boolean;
page: number;
}
export interface EntriesSortRequestPayload extends EntryPayload {
key: string;
direction: string;
}
export interface EntriesSortSuccessPayload extends EntriesSortRequestPayload {
entries: EntryObject[];
}
export interface EntriesSortFailurePayload extends EntriesSortRequestPayload {
error: Error;
}
export interface EntriesAction extends Action<string> {
payload:

View File

@ -10,6 +10,8 @@ interface Options {
metaData?: unknown | null;
isModification?: boolean | null;
mediaFiles?: MediaFile[] | null;
author?: string;
updatedOn?: string;
}
export interface EntryValue {
@ -24,6 +26,8 @@ export interface EntryValue {
metaData: unknown | null;
isModification: boolean | null;
mediaFiles: MediaFile[];
author: string;
updatedOn: string;
}
export function createEntry(collection: string, slug = '', path = '', options: Options = {}) {
@ -38,6 +42,8 @@ export function createEntry(collection: string, slug = '', path = '', options: O
metaData: options.metaData || null,
isModification: isBoolean(options.isModification) ? options.isModification : null,
mediaFiles: options.mediaFiles || [],
author: options.author || '',
updatedOn: options.updatedOn || '',
};
return returnObj;

View File

@ -7,5 +7,5 @@ declare module 'netlify-cms-lib-auth' {
refresh_token: string;
}) => Promise<{ token: string; refresh_token: string }>;
}
export default NetlifyAuthenticator;
export { NetlifyAuthenticator };
}

View File

@ -1,3 +1,6 @@
import { asyncLock, AsyncLock } from './asyncLock';
import unsentRequest from './unsentRequest';
export const CMS_BRANCH_PREFIX = 'cms';
export const DEFAULT_PR_BODY = 'Automatically generated by Netlify CMS';
export const MERGE_COMMIT_MESSAGE = 'Automatically generated. Merged on Netlify CMS.';
@ -27,6 +30,93 @@ export interface FetchError extends Error {
status: number;
}
interface API {
rateLimiter?: AsyncLock;
buildRequest: (req: ApiRequest) => ApiRequest | Promise<ApiRequest>;
requestFunction?: (req: ApiRequest) => Promise<Response>;
}
export type ApiRequestObject = {
url: string;
params?: Record<string, string | boolean | number>;
method?: 'POST' | 'PUT' | 'DELETE' | 'HEAD';
headers?: Record<string, string>;
body?: string | FormData;
cache?: 'no-store';
};
export type ApiRequest = ApiRequestObject | string;
class RateLimitError extends Error {
resetSeconds: number;
constructor(message: string, resetSeconds: number) {
super(message);
if (resetSeconds < 0) {
this.resetSeconds = 1;
} else if (resetSeconds > 60 * 60) {
this.resetSeconds = 60 * 60;
} else {
this.resetSeconds = resetSeconds;
}
}
}
export const requestWithBackoff = async (
api: API,
req: ApiRequest,
attempt = 1,
): Promise<Response> => {
if (api.rateLimiter) {
await api.rateLimiter.acquire();
}
try {
const builtRequest = await api.buildRequest(req);
const requestFunction = api.requestFunction || unsentRequest.performRequest;
const response: Response = await requestFunction(builtRequest);
if (response.status === 429) {
// GitLab/Bitbucket too many requests
const text = await response.text().catch(() => 'Too many requests');
throw new Error(text);
} else if (response.status === 403) {
// GitHub too many requests
const { message } = await response.json().catch(() => ({ message: '' }));
if (message.match('API rate limit exceeded')) {
const now = new Date();
const nextWindowInSeconds = response.headers.has('X-RateLimit-Reset')
? parseInt(response.headers.get('X-RateLimit-Reset')!)
: now.getTime() / 1000 + 60;
throw new RateLimitError(message, nextWindowInSeconds);
}
}
return response;
} catch (err) {
if (attempt <= 5) {
if (!api.rateLimiter) {
const timeout = err.resetSeconds || attempt * attempt;
console.log(
`Pausing requests for ${timeout} ${
attempt === 1 ? 'second' : 'seconds'
} due to fetch failures:`,
err.message,
);
api.rateLimiter = asyncLock();
api.rateLimiter.acquire();
setTimeout(() => {
api.rateLimiter?.release();
api.rateLimiter = undefined;
console.log(`Done pausing requests`);
}, 1000 * timeout);
}
return requestWithBackoff(api, req, attempt + 1);
} else {
throw err;
}
}
};
export const readFile = async (
id: string | null | undefined,
fetchContent: () => Promise<string | Blob>,
@ -46,6 +136,29 @@ export const readFile = async (
return content;
};
export type FileMetadata = {
author: string;
updatedOn: string;
};
const getFileMetadataKey = (id: string) => `gh.${id}.meta`;
export const readFileMetadata = async (
id: string,
fetchMetadata: () => Promise<FileMetadata>,
localForage: LocalForage,
) => {
const key = getFileMetadataKey(id);
const cached = await localForage.getItem<FileMetadata>(key);
if (cached) {
return cached;
} else {
const metadata = await fetchMetadata();
await localForage.setItem<FileMetadata>(key, metadata);
return metadata;
}
};
/**
* Keywords for inferring a status that will provide a deploy preview URL.
*/

View File

@ -1,9 +1,4 @@
import {
parseLinkHeader,
getAllResponses,
getPathDepth,
filterByPropExtension,
} from '../backendUtil';
import { parseLinkHeader, getAllResponses, getPathDepth, filterByExtension } from '../backendUtil';
import { oneLine } from 'common-tags';
import nock from 'nock';
@ -85,13 +80,14 @@ describe('getPathDepth', () => {
});
});
describe('filterByPropExtension', () => {
it('should return filtered array based on extension', () => {
expect(
filterByPropExtension('.html.md', 'path')([{ path: 'file.html.md' }, { path: 'file.json' }]),
).toEqual([{ path: 'file.html.md' }]);
expect(
filterByPropExtension('html.md', 'path')([{ path: 'file.html.md' }, { path: 'file.json' }]),
).toEqual([{ path: 'file.html.md' }]);
describe('filterByExtension', () => {
it('should return true when extension matches', () => {
expect(filterByExtension({ path: 'file.html.md' }, '.html.md')).toBe(true);
expect(filterByExtension({ path: 'file.html.md' }, 'html.md')).toBe(true);
});
it("should return false when extension doesn't match", () => {
expect(filterByExtension({ path: 'file.json' }, '.html.md')).toBe(false);
expect(filterByExtension({ path: 'file.json' }, 'html.md')).toBe(false);
});
});

View File

@ -1,4 +1,4 @@
import { flow, fromPairs, get } from 'lodash';
import { flow, fromPairs } from 'lodash';
import { map } from 'lodash/fp';
import { fromJS } from 'immutable';
import unsentRequest from './unsentRequest';
@ -6,10 +6,10 @@ import APIError from './APIError';
type Formatter = (res: Response) => Promise<string | Blob | unknown>;
export const filterByPropExtension = (extension: string, propName: string) => <T>(arr: T[]) =>
arr.filter(el =>
get(el, propName, '').endsWith(extension.startsWith('.') ? extension : `.${extension}`),
);
export const filterByExtension = (file: { path: string }, extension: string) => {
const path = file?.path || '';
return path.endsWith(extension.startsWith('.') ? extension : `.${extension}`);
};
const catchFormatErrors = (format: string, formatter: Formatter) => (res: Response) => {
try {
@ -64,18 +64,23 @@ export const responseParser = (options: {
apiName: string;
}) => (res: Response) => parseResponse(res, options);
export const parseLinkHeader = flow([
linksString => linksString.split(','),
map((str: string) => str.trim().split(';')),
map(([linkStr, keyStr]) => [
keyStr.match(/rel="(.*?)"/)[1],
linkStr
.trim()
.match(/<(.*?)>/)[1]
.replace(/\+/g, '%20'),
]),
fromPairs,
]);
export const parseLinkHeader = (header: string | null) => {
if (!header) {
return {};
}
return flow([
linksString => linksString.split(','),
map((str: string) => str.trim().split(';')),
map(([linkStr, keyStr]) => [
keyStr.match(/rel="(.*?)"/)[1],
linkStr
.trim()
.match(/<(.*?)>/)[1]
.replace(/\+/g, '%20'),
]),
fromPairs,
])(header);
};
export const getAllResponses = async (
url: string,

View File

@ -1,6 +1,9 @@
import semaphore, { Semaphore } from 'semaphore';
import { unionBy, sortBy } from 'lodash';
import Cursor from './Cursor';
import { AsyncLock } from './asyncLock';
import { FileMetadata } from './API';
import { basename } from './path';
export type DisplayURLObject = { id: string; path: string };
@ -25,7 +28,7 @@ export interface UnpublishedEntryMediaFile {
export interface ImplementationEntry {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: string;
file: { path: string; label?: string; id?: string | null };
file: { path: string; label?: string; id?: string | null; author?: string; updatedOn?: string };
slug?: string;
mediaFiles?: ImplementationMediaFile[];
metaData?: { collection: string; status: string };
@ -135,6 +138,8 @@ export interface Implementation {
cursor: Cursor,
action: string,
) => Promise<{ entries: ImplementationEntry[]; cursor: Cursor }>;
isGitBackend?: () => boolean;
}
const MAX_CONCURRENT_DOWNLOADS = 10;
@ -156,28 +161,40 @@ type ReadFile = (
id: string | null | undefined,
options: { parseText: boolean },
) => Promise<string | Blob>;
type ReadFileMetadata = (path: string, id: string) => Promise<FileMetadata>;
type ReadUnpublishedFile = (
key: string,
) => Promise<{ metaData: Metadata; fileData: string; isModification: boolean; slug: string }>;
const fetchFiles = async (files: ImplementationFile[], readFile: ReadFile, apiName: string) => {
const fetchFiles = async (
files: ImplementationFile[],
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [] as Promise<ImplementationEntry | { error: boolean }>[];
files.forEach(file => {
promises.push(
new Promise(resolve =>
sem.take(() =>
readFile(file.path, file.id, { parseText: true })
.then(data => {
resolve({ file, data: data as string });
sem.leave();
})
.catch((error = true) => {
sem.leave();
console.error(`failed to load file from ${apiName}: ${file.path}`);
resolve({ error });
}),
),
sem.take(async () => {
try {
const [data, fileMetadata] = await Promise.all([
readFile(file.path, file.id, { parseText: true }),
file.id
? readFileMetadata(file.path, file.id)
: Promise.resolve({ author: '', updatedOn: '' }),
]);
resolve({ file: { ...file, ...fileMetadata }, data: data as string });
sem.leave();
} catch (error) {
sem.leave();
console.error(`failed to load file from ${apiName}: ${file.path}`);
resolve({ error: true });
}
}),
),
);
});
@ -230,18 +247,20 @@ const fetchUnpublishedFiles = async (
export const entriesByFolder = async (
listFiles: () => Promise<ImplementationFile[]>,
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) => {
const files = await listFiles();
return fetchFiles(files, readFile, apiName);
return fetchFiles(files, readFile, readFileMetadata, apiName);
};
export const entriesByFiles = async (
files: ImplementationFile[],
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) => {
return fetchFiles(files, readFile, apiName);
return fetchFiles(files, readFile, readFileMetadata, apiName);
};
export const unpublishedEntries = async (
@ -306,3 +325,255 @@ export const runWithLock = async (lock: AsyncLock, func: Function, message: stri
lock.release();
}
};
const LOCAL_KEY = 'git.local';
type LocalTree = {
head: string;
files: { id: string; name: string; path: string }[];
};
type GetKeyArgs = {
branch: string;
folder: string;
extension: string;
depth: number;
};
const getLocalKey = ({ branch, folder, extension, depth }: GetKeyArgs) => {
return `${LOCAL_KEY}.${branch}.${folder}.${extension}.${depth}`;
};
type PersistLocalTreeArgs = GetKeyArgs & {
localForage: LocalForage;
localTree: LocalTree;
};
type GetLocalTreeArgs = GetKeyArgs & {
localForage: LocalForage;
};
export const persistLocalTree = async ({
localForage,
localTree,
branch,
folder,
extension,
depth,
}: PersistLocalTreeArgs) => {
await localForage.setItem<LocalTree>(
getLocalKey({ branch, folder, extension, depth }),
localTree,
);
};
export const getLocalTree = async ({
localForage,
branch,
folder,
extension,
depth,
}: GetLocalTreeArgs) => {
const localTree = await localForage.getItem<LocalTree>(
getLocalKey({ branch, folder, extension, depth }),
);
return localTree;
};
type GetDiffFromLocalTreeMethods = {
getDifferences: (
to: string,
from: string,
) => Promise<
{
oldPath: string;
newPath: string;
status: string;
binary: boolean;
}[]
>;
filterFile: (file: { path: string; name: string }) => boolean;
getFileId: (path: string) => Promise<string>;
};
type GetDiffFromLocalTreeArgs = GetDiffFromLocalTreeMethods & {
branch: { name: string; sha: string };
localTree: LocalTree;
folder: string;
extension: string;
depth: number;
};
const getDiffFromLocalTree = async ({
branch,
localTree,
folder,
getDifferences,
filterFile,
getFileId,
}: GetDiffFromLocalTreeArgs) => {
const diff = await getDifferences(branch.sha, localTree.head);
const diffFiles = diff
.filter(d => (d.oldPath?.startsWith(folder) || d.newPath?.startsWith(folder)) && !d.binary)
.reduce((acc, d) => {
if (d.status === 'renamed') {
acc.push({
path: d.oldPath,
name: basename(d.oldPath),
deleted: true,
});
acc.push({
path: d.newPath,
name: basename(d.newPath),
deleted: false,
});
} else if (d.status === 'deleted') {
acc.push({
path: d.oldPath,
name: basename(d.oldPath),
deleted: true,
});
} else {
acc.push({
path: d.newPath || d.oldPath,
name: basename(d.newPath || d.oldPath),
deleted: false,
});
}
return acc;
}, [] as { path: string; name: string; deleted: boolean }[])
.filter(filterFile);
const diffFilesWithIds = await Promise.all(
diffFiles.map(async file => {
if (!file.deleted) {
const id = await getFileId(file.path);
return { ...file, id };
} else {
return { ...file, id: '' };
}
}),
);
return diffFilesWithIds;
};
type AllEntriesByFolderArgs = GetKeyArgs &
GetDiffFromLocalTreeMethods & {
listAllFiles: (
folder: string,
extension: string,
depth: number,
) => Promise<ImplementationFile[]>;
readFile: ReadFile;
readFileMetadata: ReadFileMetadata;
getDefaultBranch: () => Promise<{ name: string; sha: string }>;
isShaExistsInBranch: (branch: string, sha: string) => Promise<boolean>;
apiName: string;
localForage: LocalForage;
};
export const allEntriesByFolder = async ({
listAllFiles,
readFile,
readFileMetadata,
apiName,
branch,
localForage,
folder,
extension,
depth,
getDefaultBranch,
isShaExistsInBranch,
getDifferences,
getFileId,
filterFile,
}: AllEntriesByFolderArgs) => {
const listAllFilesAndPersist = async () => {
const files = await listAllFiles(folder, extension, depth);
const branch = await getDefaultBranch();
await persistLocalTree({
localForage,
localTree: {
head: branch.sha,
files: files.map(f => ({ id: f.id!, path: f.path, name: basename(f.path) })),
},
branch: branch.name,
depth,
extension,
folder,
});
return files;
};
const listFiles = async () => {
const localTree = await getLocalTree({ localForage, branch, folder, extension, depth });
if (localTree) {
const branch = await getDefaultBranch();
// if the branch was forced pushed the local tree sha can be removed from the remote tree
const localTreeInBranch = await isShaExistsInBranch(branch.name, localTree.head);
if (!localTreeInBranch) {
console.log(
`Can't find local tree head '${localTree.head}' in branch '${branch.name}', rebuilding local tree`,
);
return listAllFilesAndPersist();
}
const diff = await getDiffFromLocalTree({
branch,
localTree,
folder,
extension,
depth,
getDifferences,
getFileId,
filterFile,
}).catch(e => {
console.log('Failed getting diff from local tree:', e);
return null;
});
if (!diff) {
console.log(`Diff is null, rebuilding local tree`);
return listAllFilesAndPersist();
}
if (diff.length === 0) {
// return local copy
return localTree.files;
} else {
// refresh local copy
const identity = (file: { path: string }) => file.path;
const deleted = diff.reduce((acc, d) => {
acc[d.path] = d.deleted;
return acc;
}, {} as Record<string, boolean>);
const newCopy = sortBy(
unionBy(
diff.filter(d => !deleted[d.path]),
localTree.files.filter(f => !deleted[f.path]),
identity,
),
identity,
);
await persistLocalTree({
localForage,
localTree: { head: branch.sha, files: newCopy },
branch: branch.name,
depth,
extension,
folder,
});
return newCopy;
}
} else {
return listAllFilesAndPersist();
}
};
const files = await listFiles();
return fetchFiles(files, readFile, readFileMetadata, apiName);
};

View File

@ -6,7 +6,7 @@ import { isAbsolutePath, basename, fileExtensionWithSeparator, fileExtension } f
import { onlySuccessfulPromises, flowAsync, then } from './promise';
import unsentRequest from './unsentRequest';
import {
filterByPropExtension,
filterByExtension,
getAllResponses,
parseLinkHeader,
parseResponse,
@ -37,9 +37,11 @@ import {
Config as C,
UnpublishedEntryMediaFile as UEMF,
blobToFileObj,
allEntriesByFolder,
} from './implementation';
import {
readFile,
readFileMetadata,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
@ -54,6 +56,8 @@ import {
parseContentKey,
branchFromContentKey,
contentKeyFromBranch,
ApiRequest as AR,
requestWithBackoff,
} from './API';
import {
createPointerFile,
@ -77,16 +81,7 @@ export type Entry = E;
export type UnpublishedEntryMediaFile = UEMF;
export type PersistOptions = PO;
export type AssetProxy = AP;
export type ApiRequest =
| {
url: string;
params?: Record<string, string | boolean | number>;
method?: 'POST' | 'PUT' | 'DELETE' | 'HEAD';
headers?: Record<string, string>;
body?: string | FormData;
cache?: 'no-store';
}
| string;
export type ApiRequest = AR;
export type Config = C;
export type FetchError = FE;
export type PointerFile = PF;
@ -105,7 +100,7 @@ export const NetlifyCmsLibUtil = {
flowAsync,
then,
unsentRequest,
filterByPropExtension,
filterByExtension,
parseLinkHeader,
parseResponse,
responseParser,
@ -118,6 +113,7 @@ export const NetlifyCmsLibUtil = {
getMediaDisplayURL,
getMediaAsBlob,
readFile,
readFileMetadata,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
@ -138,6 +134,8 @@ export const NetlifyCmsLibUtil = {
branchFromContentKey,
contentKeyFromBranch,
blobToFileObj,
requestWithBackoff,
allEntriesByFolder,
};
export {
APIError,
@ -153,7 +151,7 @@ export {
flowAsync,
then,
unsentRequest,
filterByPropExtension,
filterByExtension,
parseLinkHeader,
getAllResponses,
parseResponse,
@ -169,6 +167,7 @@ export {
getMediaDisplayURL,
getMediaAsBlob,
readFile,
readFileMetadata,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
@ -189,4 +188,6 @@ export {
branchFromContentKey,
contentKeyFromBranch,
blobToFileObj,
requestWithBackoff,
allEntriesByFolder,
};

View File

@ -37,13 +37,25 @@ const en = {
searchAll: 'Search all',
},
collectionTop: {
sortBy: 'Sort by',
viewAs: 'View as',
newButton: 'New %{collectionLabel}',
ascending: 'Ascending',
descending: 'Descending',
},
entries: {
loadingEntries: 'Loading Entries',
cachingEntries: 'Caching Entries',
loadingEntries: 'Loading Entries...',
cachingEntries: 'Caching Entries...',
longerLoading: 'This might take several minutes',
noEntries: 'No Entries',
},
defaultFields: {
author: {
label: 'Author',
},
updatedOn: {
label: 'Updated On',
},
},
},
editor: {

View File

@ -0,0 +1,5 @@
declare module 'semaphore' {
export type Semaphore = { take: (f: Function) => void; leave: () => void };
const semaphore: (count: number) => Semaphore;
export default semaphore;
}

View File

@ -3,7 +3,7 @@ import PropTypes from 'prop-types';
import { css } from '@emotion/core';
import styled from '@emotion/styled';
import { Wrapper, Button as DropdownButton, Menu, MenuItem } from 'react-aria-menubutton';
import { buttons, components, zIndex } from './styles';
import { colors, buttons, components, zIndex } from './styles';
import Icon from './Icon';
const StyledWrapper = styled(Wrapper)`
@ -18,6 +18,7 @@ const StyledDropdownButton = styled(DropdownButton)`
display: block;
padding-left: 20px;
padding-right: 40px;
position: relative;
&:after {
${components.caretDown};
@ -25,7 +26,7 @@ const StyledDropdownButton = styled(DropdownButton)`
display: block;
position: absolute;
top: 16px;
right: 16px;
right: 10px;
color: currentColor;
}
`;
@ -47,18 +48,35 @@ const DropdownList = styled.ul`
`};
`;
const StyledMenuItem = styled(MenuItem)`
${components.dropdownItem};
`;
const StyledMenuItem = ({ isActive, ...props }) => (
<MenuItem
css={css`
${components.dropdownItem};
&:focus,
&:active,
&:not(:focus),
&:not(:active) {
background-color: ${isActive ? colors.activeBackground : 'inherit'};
color: ${isActive ? colors.active : 'inherit'};
}
&:hover {
color: ${colors.active};
background-color: ${colors.activeBackground};
}
`}
{...props}
/>
);
const MenuItemIconContainer = styled.div`
flex: 1 0 32px;
text-align: right;
position: relative;
top: 2px;
top: ${props => (props.iconSmall ? '0' : '2px')};
`;
const Dropdown = ({
closeOnSelection = true,
renderButton,
dropdownWidth = 'auto',
dropdownPosition = 'left',
@ -67,7 +85,11 @@ const Dropdown = ({
children,
}) => {
return (
<StyledWrapper onSelection={handler => handler()} className={className}>
<StyledWrapper
closeOnSelection={closeOnSelection}
onSelection={handler => handler()}
className={className}
>
{renderButton()}
<Menu>
<DropdownList width={dropdownWidth} top={dropdownTopOverlap} position={dropdownPosition}>
@ -87,12 +109,12 @@ Dropdown.propTypes = {
children: PropTypes.node,
};
const DropdownItem = ({ label, icon, iconDirection, onClick, className }) => (
<StyledMenuItem value={onClick} className={className}>
const DropdownItem = ({ label, icon, iconDirection, iconSmall, isActive, onClick, className }) => (
<StyledMenuItem value={onClick} isActive={isActive} className={className}>
<span>{label}</span>
{icon ? (
<MenuItemIconContainer>
<Icon type={icon} direction={iconDirection} size="small" />
<MenuItemIconContainer iconSmall={iconSmall}>
<Icon type={icon} direction={iconDirection} size={iconSmall ? 'xsmall' : 'small'} />
</MenuItemIconContainer>
) : null}
</StyledMenuItem>

View File

@ -94,7 +94,7 @@ export class Loader extends React.Component {
return (
<LoaderText>
<CSSTransition
classNames={{
className={{
enter: styles.enter,
enterActive: styles.enterActive,
exit: styles.exit,

View File

@ -213,6 +213,10 @@ const buttons = {
background-color: #555a65;
}
`,
grayText: css`
background-color: transparent;
color: ${colorsRaw.gray};
`,
green: css`
background-color: #aae31f;
color: ${colorsRaw.green};
@ -317,7 +321,7 @@ const components = {
color: ${colorsRaw.gray};
font-weight: 500;
border-bottom: 1px solid #eaebf1;
padding: 10px 14px;
padding: 8px 14px;
display: flex;
justify-content: space-between;
align-items: center;
@ -335,6 +339,12 @@ const components = {
background-color: ${colors.activeBackground};
}
`,
viewControlsText: css`
font-size: 14px;
color: ${colors.text};
margin-right: 12px;
white-space: nowrap;
`,
};
const reactSelectStyles = {

View File

@ -27,5 +27,5 @@
"react": "^16.8.4",
"react-dom": "^16.8.4"
},
"incrementToForceBump": 1
"incrementToForceBump": 2
}