Feature/single package (#1)

This commit is contained in:
Daniel Lautzenheiser
2022-09-28 20:04:00 -06:00
committed by GitHub
parent 5963227066
commit 0b64464611
1110 changed files with 7842 additions and 257596 deletions

View File

@ -3,6 +3,198 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
## [2.55.59](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.58...netlify-cms-core@2.55.59) (2022-09-29)
**Note:** Version bump only for package netlify-cms-core
## [2.55.58](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.57...netlify-cms-core@2.55.58) (2022-09-28)
**Note:** Version bump only for package netlify-cms-core
## [2.55.57](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.56...netlify-cms-core@2.55.57) (2022-09-28)
**Note:** Version bump only for package netlify-cms-core
## [2.55.56](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.55...netlify-cms-core@2.55.56) (2022-09-28)
**Note:** Version bump only for package netlify-cms-core
## [2.55.55](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.54...netlify-cms-core@2.55.55) (2022-09-28)
**Note:** Version bump only for package netlify-cms-core
## [2.55.54](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.53...netlify-cms-core@2.55.54) (2022-09-28)
**Note:** Version bump only for package netlify-cms-core
## [2.55.53](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.52...netlify-cms-core@2.55.53) (2022-09-28)
**Note:** Version bump only for package netlify-cms-core
## [2.55.52](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.51...netlify-cms-core@2.55.52) (2022-09-27)
**Note:** Version bump only for package netlify-cms-core
## [2.55.51](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.50...netlify-cms-core@2.55.51) (2022-09-27)
**Note:** Version bump only for package netlify-cms-core
## [2.55.50](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.49...netlify-cms-core@2.55.50) (2022-09-27)
**Note:** Version bump only for package netlify-cms-core
## [2.55.49](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.48...netlify-cms-core@2.55.49) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.48](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.47...netlify-cms-core@2.55.48) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.47](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.47...netlify-cms-core@2.55.47) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.47](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.46...netlify-cms-core@2.55.47) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.46](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.45...netlify-cms-core@2.55.46) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.45](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.44...netlify-cms-core@2.55.45) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.44](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.43...netlify-cms-core@2.55.44) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.43](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.42...netlify-cms-core@2.55.43) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.42](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.41...netlify-cms-core@2.55.42) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.41](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.40...netlify-cms-core@2.55.41) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.40](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.39...netlify-cms-core@2.55.40) (2022-09-26)
**Note:** Version bump only for package netlify-cms-core
## [2.55.39](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.38...netlify-cms-core@2.55.39) (2022-09-23)
**Note:** Version bump only for package netlify-cms-core
## [2.55.38](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.37...netlify-cms-core@2.55.38) (2022-09-23)
**Note:** Version bump only for package netlify-cms-core
## [2.55.37](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.36...netlify-cms-core@2.55.37) (2022-09-23)
**Note:** Version bump only for package netlify-cms-core
## [2.55.36](https://github.com/netlify/netlify-cms/compare/netlify-cms-core@2.55.35...netlify-cms-core@2.55.36) (2022-09-22)
**Note:** Version bump only for package netlify-cms-core
@ -2229,7 +2421,7 @@ See [Conventional Commits](https://conventionalcommits.org) for commit guideline
### Features
* **netlify-cms-widget-relation:** use react-select and add support for multiple entries ([#1936](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/1936)) ([518f6fb](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/518f6fb))
* **relation-widget:** use react-select and add support for multiple entries ([#1936](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/1936)) ([518f6fb](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/518f6fb))
@ -2269,7 +2461,7 @@ See [Conventional Commits](https://conventionalcommits.org) for commit guideline
* **config:** allow config.yml file load to be skipped ([#2053](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/2053)) ([14f94a0](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/14f94a0))
* **netlify-cms-core:** expose loadEntry action to Widgets ([#2010](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/2010)) ([5d8aef1](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/5d8aef1))
* **netlify-cms-widget-map:** add map widget ([#2051](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/2051)) ([18f34d2](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/18f34d2))
* **map-widget:** add map widget ([#2051](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/2051)) ([18f34d2](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/18f34d2))
* **widget-number:** add range validation ([#2049](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/2049)) ([dc44cac](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/dc44cac))
* **workflow:** add deploy preview links ([#2028](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/2028)) ([15d221d](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/15d221d))
@ -2333,7 +2525,7 @@ See [Conventional Commits](https://conventionalcommits.org) for commit guideline
### Features
* **netlify-cms-widget-select:** add support for multiple selection ([#1901](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/1901)) ([88bf287](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/88bf287))
* **select-widget:** add support for multiple selection ([#1901](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/1901)) ([88bf287](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/88bf287))
* add cloudinary support ([#1932](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/issues/1932)) ([1fc2f50](https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core/commit/1fc2f50))

View File

@ -9,3 +9,76 @@ In the meantime, you can:
site](https://www.netlifycms.org) for more info.
2. Reach out to the [community chat](https://netlifycms.org/chat/) if you need help.
3. Help out and [write the readme yourself](https://github.com/netlify/netlify-cms/edit/master/packages/netlify-cms-core/README.md)!
# Using Core
```tsx
import React from 'react';
import {
AzureBackend,
BitbucketBackend,
BooleanWidget,
CodeWidget,
ColorStringWidget,
DateTimeWidget,
FileWidget,
GitGatewayBackend,
GitHubBackend,
GitLabBackend,
imageEditorComponent,
ImageWidget,
ListWidget,
MapWidget,
MarkdownWidget,
NetlifyCmsCore as CMS,
NumberWidget,
ObjectWidget,
ProxyBackend,
RelationWidget,
SelectWidget,
StringWidget,
TestBackend,
TextWidget,
locales,
Icon,
images
} from 'netlify-cms-core';
// Register all the things
CMS.registerBackend('git-gateway', GitGatewayBackend);
CMS.registerBackend('azure', AzureBackend);
CMS.registerBackend('github', GitHubBackend);
CMS.registerBackend('gitlab', GitLabBackend);
CMS.registerBackend('bitbucket', BitbucketBackend);
CMS.registerBackend('test-repo', TestBackend);
CMS.registerBackend('proxy', ProxyBackend);
CMS.registerWidget([
StringWidget.Widget(),
NumberWidget.Widget(),
TextWidget.Widget(),
ImageWidget.Widget(),
FileWidget.Widget(),
SelectWidget.Widget(),
MarkdownWidget.Widget(),
ListWidget.Widget(),
ObjectWidget.Widget(),
RelationWidget.Widget(),
BooleanWidget.Widget(),
MapWidget.Widget(),
DateTimeWidget.Widget(),
CodeWidget.Widget(),
ColorStringWidget.Widget(),
]);
CMS.registerEditorComponent(imageEditorComponent);
CMS.registerEditorComponent({
id: 'code-block',
label: 'Code Block',
widget: 'code',
type: 'code-block',
});
CMS.registerLocale('en', locales.en);
Object.keys(images).forEach(iconName => {
CMS.registerIcon(iconName, <Icon type={iconName} />);
});
```

View File

@ -1,7 +1,8 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
declare module 'netlify-cms-core' {
import type { ComponentType, ReactNode } from 'react';
import type { List, Map } from 'immutable';
import type { Iterable as ImmutableIterable, List, Map } from 'immutable';
import type { ComponentType, FocusEventHandler, ReactNode } from 'react';
import type { t } from 'react-polyglot';
import type { Pluggable } from 'unified';
export type CmsBackendType =
@ -298,6 +299,19 @@ declare module 'netlify-cms-core' {
pattern?: string;
}
export type SortDirection = 'Ascending' | 'Descending' | 'None';
export interface CmsSortableFieldsDefault {
field: string;
direction: SortDirection;
}
export interface CmsSortableFields {
default?: CmsSortableFieldsDefault;
fields: string[];
}
export interface CmsCollection {
name: string;
icon?: string;
@ -337,15 +351,10 @@ declare module 'netlify-cms-core' {
path?: string;
media_folder?: string;
public_folder?: string;
sortable_fields?: string[];
sortable_fields?: CmsSortableFields;
view_filters?: ViewFilter[];
view_groups?: ViewGroup[];
i18n?: boolean | CmsI18nConfig;
/**
* @deprecated Use sortable_fields instead
*/
sortableFields?: string[];
}
export interface CmsBackend {
@ -425,7 +434,14 @@ declare module 'netlify-cms-core' {
widget: string;
}
export interface EditorComponentOptions {
export interface EditorComponentWidgetOptions {
id: string;
label: string;
widget: string;
type: string;
}
export interface EditorComponentManualOptions {
id: string;
label: string;
fields: EditorComponentField[];
@ -436,6 +452,8 @@ declare module 'netlify-cms-core' {
toPreview: (data: any) => string;
}
export type EditorComponentOptions = EditorComponentManualOptions | EditorComponentWidgetOptions;
export interface PreviewStyleOptions {
raw: boolean;
}
@ -444,7 +462,7 @@ declare module 'netlify-cms-core' {
value: string;
}
export type CmsBackendClass = any; // TODO: type properly
export type CmsBackendClass = Implementation;
export interface CmsRegistryBackend {
init: (args: any) => CmsBackendClass;
@ -456,6 +474,9 @@ declare module 'netlify-cms-core' {
onChange: (value: T) => void;
forID: string;
classNameWrapper: string;
setActiveStyle: FocusEventHandler;
setInactiveStyle: FocusEventHandler;
t: t;
}
export interface CmsWidgetPreviewProps<T = any> {
@ -471,12 +492,17 @@ declare module 'netlify-cms-core' {
name: string;
controlComponent: CmsWidgetControlProps<T>;
previewComponent?: CmsWidgetPreviewProps<T>;
validator?: (props: {
field: Map<string, any>;
value: T | undefined | null;
t: t;
}) => boolean | { error: any } | Promise<boolean | { error: any }>;
globalStyles?: any;
}
export interface CmsWidget<T = any> {
control: CmsWidgetControlProps<T>;
preview?: CmsWidgetPreviewProps<T>;
control: ComponentType<CmsWidgetControlProps<T>>;
preview?: ComponentType<CmsWidgetPreviewProps<T>>;
globalStyles?: any;
}
@ -547,7 +573,7 @@ declare module 'netlify-cms-core' {
document: Document;
};
export interface CMS {
export interface CMSApi {
getBackend: (name: string) => CmsRegistryBackend | undefined;
getEditorComponents: () => Map<string, ComponentType<any>>;
getRemarkPlugins: () => Array<Pluggable>;
@ -574,7 +600,7 @@ declare module 'netlify-cms-core' {
component: ComponentType<PreviewTemplateComponentProps>,
) => void;
registerWidget: (
widget: string | CmsWidgetParam,
widget: string | CmsWidgetParam | CmsWidgetParam[],
control?: ComponentType<CmsWidgetControlProps> | string,
preview?: ComponentType<CmsWidgetPreviewProps>,
) => void;
@ -584,11 +610,353 @@ declare module 'netlify-cms-core' {
) => void;
registerIcon: (iconName: string, icon: ReactNode) => void;
getIcon: (iconName: string) => ReactNode;
registerAdditionalLink: (id: string, title: string, link: string, iconName?: string) => void;
getAdditionalLinks: () => { title: string, link: string, iconName?: string }[];
registerAdditionalLink: (
id: string,
title: string,
data: string | ComponentType,
iconName?: string,
) => void;
getAdditionalLinks: () => { title: string; data: string | ComponentType; iconName?: string }[];
getAdditionalLink: (
id: string,
) => { title: string; data: string | ComponentType; iconName?: string } | undefined;
}
export const NetlifyCmsCore: CMS;
export const CMS: CMSApi;
export default NetlifyCmsCore;
export default CMS;
// Backends
export type DisplayURLObject = { id: string; path: string };
export type DisplayURL = DisplayURLObject | string;
export type DataFile = {
path: string;
slug: string;
raw: string;
newPath?: string;
};
export type AssetProxy = {
path: string;
fileObj?: File;
toBase64?: () => Promise<string>;
};
export type Entry = {
dataFiles: DataFile[];
assets: AssetProxy[];
};
export type PersistOptions = {
newEntry?: boolean;
commitMessage: string;
collectionName?: string;
useWorkflow?: boolean;
unpublished?: boolean;
status?: string;
};
export type DeleteOptions = {};
export type Credentials = { token: string | {}; refresh_token?: string };
export type User = Credentials & {
backendName?: string;
login?: string;
name: string;
useOpenAuthoring?: boolean;
};
export interface ImplementationEntry {
data: string;
file: { path: string; label?: string; id?: string | null; author?: string; updatedOn?: string };
}
export type ImplementationFile = {
id?: string | null | undefined;
label?: string;
path: string;
};
export interface ImplementationMediaFile {
name: string;
id: string;
size?: number;
displayURL?: DisplayURL;
path: string;
draft?: boolean;
url?: string;
file?: File;
}
export interface UnpublishedEntryMediaFile {
id: string;
path: string;
}
export interface UnpublishedEntryDiff {
id: string;
path: string;
newFile: boolean;
}
export interface UnpublishedEntry {
pullRequestAuthor?: string;
slug: string;
collection: string;
status: string;
diffs: UnpublishedEntryDiff[];
updatedAt: string;
}
export type CursorStoreObject = {
actions: Set<string>;
data: Map<string, unknown>;
meta: Map<string, unknown>;
};
export type CursorStore = {
get<K extends keyof CursorStoreObject>(
key: K,
defaultValue?: CursorStoreObject[K],
): CursorStoreObject[K];
getIn<V>(path: string[]): V;
set<K extends keyof CursorStoreObject, V extends CursorStoreObject[K]>(
key: K,
value: V,
): CursorStoreObject[K];
setIn(path: string[], value: unknown): CursorStore;
hasIn(path: string[]): boolean;
mergeIn(path: string[], value: unknown): CursorStore;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
update: (...args: any[]) => CursorStore;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
updateIn: (...args: any[]) => CursorStore;
};
export type ActionHandler = (action: string) => unknown;
export class Cursor {
static create(...args: {}[]): Cursor;
updateStore(...args: any[]): Cursor;
updateInStore(...args: any[]): Cursor;
hasAction(action: string): boolean;
addAction(action: string): Cursor;
removeAction(action: string): Cursor;
setActions(actions: Iterable<string>): Cursor;
mergeActions(actions: Set<string>): Cursor;
getActionHandlers(handler: ActionHandler): ImmutableIterable<string, unknown>;
setData(data: {}): Cursor;
mergeData(data: {}): Cursor;
wrapData(data: {}): Cursor;
unwrapData(): [Map<string, unknown>, Cursor];
clearData(): Cursor;
setMeta(meta: {}): Cursor;
mergeMeta(meta: {}): Cursor;
}
class Implementation {
authComponent: () => void;
restoreUser: (user: User) => Promise<User>;
authenticate: (credentials: Credentials) => Promise<User>;
logout: () => Promise<void> | void | null;
getToken: () => Promise<string | null>;
getEntry: (path: string) => Promise<ImplementationEntry>;
entriesByFolder: (
folder: string,
extension: string,
depth: number,
) => Promise<ImplementationEntry[]>;
entriesByFiles: (files: ImplementationFile[]) => Promise<ImplementationEntry[]>;
getMediaDisplayURL?: (displayURL: DisplayURL) => Promise<string>;
getMedia: (folder?: string) => Promise<ImplementationMediaFile[]>;
getMediaFile: (path: string) => Promise<ImplementationMediaFile>;
persistEntry: (entry: Entry, opts: PersistOptions) => Promise<void>;
persistMedia: (file: AssetProxy, opts: PersistOptions) => Promise<ImplementationMediaFile>;
deleteFiles: (paths: string[], commitMessage: string) => Promise<void>;
unpublishedEntries: () => Promise<string[]>;
unpublishedEntry: (args: {
id?: string;
collection?: string;
slug?: string;
}) => Promise<UnpublishedEntry>;
unpublishedEntryDataFile: (
collection: string,
slug: string,
path: string,
id: string,
) => Promise<string>;
unpublishedEntryMediaFile: (
collection: string,
slug: string,
path: string,
id: string,
) => Promise<ImplementationMediaFile>;
updateUnpublishedEntryStatus: (
collection: string,
slug: string,
newStatus: string,
) => Promise<void>;
publishUnpublishedEntry: (collection: string, slug: string) => Promise<void>;
deleteUnpublishedEntry: (collection: string, slug: string) => Promise<void>;
getDeployPreview: (
collectionName: string,
slug: string,
) => Promise<{ url: string; status: string } | null>;
allEntriesByFolder?: (
folder: string,
extension: string,
depth: number,
) => Promise<ImplementationEntry[]>;
traverseCursor?: (
cursor: Cursor,
action: string,
) => Promise<{ entries: ImplementationEntry[]; cursor: Cursor }>;
isGitBackend?: () => boolean;
status: () => Promise<{
auth: { status: boolean };
api: { status: boolean; statusPage: string };
}>;
}
export class AzureBackend extends Implementation {}
export class BitbucketBackend extends Implementation {}
export class GitGatewayBackend extends Implementation {}
export class GitHubBackend extends Implementation {}
export class GitLabBackend extends Implementation {}
export class ProxyBackend extends Implementation {}
export class TestBackend extends Implementation {}
// Widgets
export const BooleanWidget: {
Widget: () => CmsWidgetParam<boolean>;
};
export const CodeWidget: {
Widget: () => CmsWidgetParam<any>;
};
export const ColorStringWidget: {
Widget: () => CmsWidgetParam<string>;
};
export const DateTimeWidget: {
Widget: () => CmsWidgetParam<Date | string>;
};
export const FileWidget: {
Widget: () => CmsWidgetParam<string | string[] | List<string>>;
};
export const ImageWidget: {
Widget: () => CmsWidgetParam<string | string[] | List<string>>;
};
export const ListWidget: {
Widget: () => CmsWidgetParam<List<any>>;
};
export const MapWidget: {
Widget: () => CmsWidgetParam<any>;
};
export const MarkdownWidget: {
Widget: () => CmsWidgetParam<string>;
};
export const NumberWidget: {
Widget: () => CmsWidgetParam<string | number>;
};
export const ObjectWidget: {
Widget: () => CmsWidgetParam<Map<string, any> | Record<string, any>>;
};
export const RelationWidget: {
Widget: () => CmsWidgetParam<any>;
};
export const SelectWidget: {
Widget: () => CmsWidgetParam<string | string[]>;
};
export const StringWidget: {
Widget: () => CmsWidgetParam<string>;
};
export const TextWidget: {
Widget: () => CmsWidgetParam<string>;
};
export const MediaLibraryCloudinary: {
name: string;
init: ({
options,
handleInsert,
}?: {
options?: Record<string, any> | undefined;
handleInsert: any;
}) => Promise<{
show: ({
config,
allowMultiple,
}?: {
config?: Record<string, any> | undefined;
allowMultiple: boolean;
}) => any;
hide: () => any;
enableStandalone: () => boolean;
}>;
};
export const MediaLibraryUploadcare: {
name: string;
init: ({
options,
handleInsert,
}?: {
options?:
| {
config: Record<string, any>;
settings: Record<string, any>;
}
| undefined;
handleInsert: any;
}) => Promise<{
show: ({
value,
config,
allowMultiple,
imagesOnly,
}?: {
value: any;
config?: Record<string, any> | undefined;
allowMultiple: boolean;
imagesOnly?: boolean | undefined;
}) => any;
enableStandalone: () => boolean;
}>;
};
export const imageEditorComponent: EditorComponentManualOptions;
export const locales: {
en: Record<string, any>;
};
class NetlifyAuthenticator {
constructor(config: Record<string, any>);
refresh: (args: {
provider: string;
refresh_token: string;
}) => Promise<{ token: string; refresh_token: string }>;
}
export { NetlifyAuthenticator };
// Images
export interface IconProps {
type: string;
direction?: 'right' | 'down' | 'left' | 'up';
size?: string;
className?: string;
}
export const Icon: React.ComponentType<IconProps>;
export const images: Record<string, ReactNode>;
}

View File

@ -1,7 +1,7 @@
{
"name": "netlify-cms-core",
"description": "Netlify CMS core application, see netlify-cms package for the main distribution.",
"version": "2.55.36",
"version": "2.55.59",
"repository": "https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-core",
"bugs": "https://github.com/netlify/netlify-cms/issues",
"module": "dist/esm/index.js",
@ -13,10 +13,10 @@
],
"types": "index.d.ts",
"scripts": {
"develop": "yarn build:esm --watch",
"develop": "webpack serve --hot",
"webpack": "node --max_old_space_size=4096 ../../node_modules/webpack/bin/webpack.js",
"build": "cross-env NODE_ENV=production run-s webpack",
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore **/__tests__ --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
},
"keywords": [
"netlify",
@ -25,78 +25,121 @@
],
"license": "MIT",
"dependencies": {
"@iarna/toml": "2.2.5",
"ajv": "8.1.0",
"ajv-errors": "^3.0.0",
"ajv-keywords": "^5.0.0",
"clean-stack": "^4.1.0",
"copy-text-to-clipboard": "^3.0.0",
"deepmerge": "^4.2.2",
"diacritics": "^1.3.0",
"fuzzy": "^0.1.1",
"gotrue-js": "^0.9.24",
"gray-matter": "^4.0.2",
"history": "^4.7.2",
"immer": "^9.0.0",
"js-base64": "^3.0.0",
"jwt-decode": "^3.0.0",
"node-polyglot": "^2.3.0",
"prop-types": "^15.7.2",
"react": "^16.8.4",
"react-dnd": "^14.0.0",
"react-dnd-html5-backend": "^14.0.0",
"react-dom": "^16.8.4",
"react-frame-component": "^5.2.1",
"react-hot-loader": "^4.8.0",
"react-immutable-proptypes": "^2.1.0",
"react-is": "18.2.0",
"react-markdown": "^6.0.2",
"react-modal": "^3.8.1",
"react-polyglot": "^0.7.0",
"react-redux": "^7.2.0",
"react-router-dom": "^5.2.0",
"react-scroll-sync": "^0.9.0",
"react-sortable-hoc": "^2.0.0",
"react-split-pane": "^0.1.85",
"react-topbar-progress-indicator": "^4.0.0",
"react-virtualized-auto-sizer": "^1.0.2",
"react-waypoint": "^10.0.0",
"react-window": "^1.8.5",
"redux": "^4.0.5",
"redux-devtools-extension": "^2.13.8",
"redux-notifications": "^4.0.1",
"redux-thunk": "^2.3.0",
"remark-gfm": "3.0.1",
"sanitize-filename": "^1.6.1",
"semaphore": "^1.0.5",
"tomlify-j0.4": "^3.0.0-alpha.0",
"url": "^0.11.0",
"url-join": "^4.0.1",
"what-input": "^5.1.4",
"yaml": "^1.8.3"
},
"peerDependencies": {
"@emotion/css": "11.10.0",
"@emotion/react": "11.10.4",
"@emotion/styled": "11.10.4",
"immutable": "^3.7.6",
"lodash": "^4.17.11",
"moment": "^2.24.0",
"netlify-cms-editor-component-image": "^2.6.7",
"netlify-cms-lib-auth": "^2.3.0",
"netlify-cms-lib-util": "^2.12.3",
"netlify-cms-lib-widgets": "^1.6.1",
"netlify-cms-ui-default": "^2.12.1",
"prop-types": "^15.7.2",
"react": "^16.8.4 || ^17.0.0",
"react-dom": "^16.8.4 || ^17.0.0",
"react-immutable-proptypes": "^2.1.0"
"@hot-loader/react-dom": "17.0.2",
"@iarna/toml": "2.2.5",
"@mui/icons-material": "5.10.6",
"@mui/material": "5.10.6",
"@reduxjs/toolkit": "1.8.5",
"ajv": "6.12.6",
"ajv-errors": "1.0.1",
"ajv-keywords": "3.5.2",
"apollo-cache-inmemory": "1.6.6",
"apollo-client": "2.6.10",
"apollo-link-context": "1.0.20",
"apollo-link-http": "1.5.17",
"array-move": "4.0.0",
"clean-stack": "4.2.0",
"codemirror": "5.65.9",
"common-tags": "1.8.1",
"copy-text-to-clipboard": "3.0.1",
"create-react-class": "15.7.0",
"deepmerge": "4.2.2",
"diacritics": "1.3.0",
"dompurify": "2.4.0",
"fuzzy": "0.1.3",
"gotrue-js": "0.9.29",
"graphql": "15.8.0",
"graphql-tag": "2.12.6",
"gray-matter": "4.0.3",
"history": "4.10.1",
"immer": "9.0.15",
"immutable": "3.8.2",
"ini": "2.0.0",
"is-hotkey": "0.2.0",
"js-base64": "3.7.2",
"js-sha256": "0.9.0",
"jwt-decode": "3.1.2",
"localforage": "1.10.0",
"lodash": "4.17.21",
"mdast-util-definitions": "1.2.5",
"mdast-util-to-string": "1.1.0",
"minimatch": "3.0.4",
"moment": "2.29.4",
"node-polyglot": "2.4.2",
"ol": "6.15.1",
"prop-types": "15.8.1",
"react": "17.0.2",
"react-aria-menubutton": "7.0.3",
"react-codemirror2": "7.2.1",
"react-color": "2.19.3",
"react-datetime": "3.1.1",
"react-dnd": "14.0.5",
"react-dnd-html5-backend": "14.1.0",
"react-dom": "17.0.2",
"react-frame-component": "5.2.3",
"react-hot-loader": "4.13.0",
"react-immutable-proptypes": "2.2.0",
"react-is": "18.2.0",
"react-markdown": "6.0.3",
"react-modal": "3.15.1",
"react-polyglot": "0.7.2",
"react-redux": "8.0.4",
"react-router-dom": "5.3.3",
"react-scroll-sync": "0.9.0",
"react-select": "4.3.1",
"react-sortable-hoc": "2.0.0",
"react-split-pane": "0.1.92",
"react-textarea-autosize": "8.3.4",
"react-toggled": "1.2.7",
"react-topbar-progress-indicator": "4.1.1",
"react-transition-group": "4.4.5",
"react-virtualized-auto-sizer": "1.0.7",
"react-waypoint": "10.3.0",
"react-window": "1.8.7",
"rehype-parse": "6.0.2",
"rehype-remark": "8.1.1",
"rehype-stringify": "7.0.0",
"remark-gfm": "3.0.1",
"remark-parse": "6.0.3",
"remark-rehype": "4.0.1",
"remark-stringify": "6.0.4",
"sanitize-filename": "1.6.3",
"semaphore": "1.1.0",
"slate": "0.47.9",
"slate-base64-serializer": "0.2.115",
"slate-plain-serializer": "0.7.13",
"slate-react": "0.22.10",
"slate-soft-break": "0.9.0",
"tomlify-j0.4": "3.0.0",
"unified": "7.1.0",
"unist-builder": "1.0.4",
"unist-util-visit-parents": "2.1.2",
"uploadcare-widget": "3.19.0",
"uploadcare-widget-tab-effects": "1.5.0",
"url": "0.11.0",
"url-join": "4.0.1",
"uuid": "3.4.0",
"validate-color": "2.2.1",
"what-input": "5.2.12",
"what-the-diff": "0.6.0",
"yaml": "1.10.2"
},
"devDependencies": {
"@types/history": "^4.7.8",
"@types/react": "18.0.20",
"@types/history": "4.7.11",
"@types/react": "17.0.50",
"@types/react-dom": "17.0.17",
"@types/react-router-dom": "5.3.3",
"@types/react-scroll-sync": "0.8.4",
"@types/redux-mock-store": "^1.0.2",
"@types/url-join": "^4.0.0",
"redux-mock-store": "^1.5.3"
"@types/url-join": "4.0.1",
"commonmark": "0.30.0",
"commonmark-spec": "0.30.0",
"cross-env": "7.0.3",
"react-svg-loader": "3.0.3",
"slate-hyperscript": "0.13.9",
"webpack": "4.46.0",
"webpack-cli": "4.10.0"
}
}

View File

@ -1,934 +0,0 @@
import { Map, List, fromJS } from 'immutable';
import {
resolveBackend,
Backend,
extractSearchFields,
expandSearchEntries,
mergeExpandedEntries,
} from '../backend';
import { getBackend } from '../lib/registry';
import { FOLDER, FILES } from '../constants/collectionTypes';
jest.mock('../lib/registry');
jest.mock('netlify-cms-lib-util');
jest.mock('../lib/urlHelper');
describe('Backend', () => {
describe('filterEntries', () => {
let backend;
beforeEach(() => {
getBackend.mockReturnValue({
init: jest.fn(),
});
backend = resolveBackend({
backend: {
name: 'git-gateway',
},
});
});
it('filters string values', () => {
const result = backend.filterEntries(
{
entries: [
{
data: {
testField: 'testValue',
},
},
{
data: {
testField: 'testValue2',
},
},
],
},
Map({ field: 'testField', value: 'testValue' }),
);
expect(result.length).toBe(1);
});
it('filters number values', () => {
const result = backend.filterEntries(
{
entries: [
{
data: {
testField: 42,
},
},
{
data: {
testField: 5,
},
},
],
},
Map({ field: 'testField', value: 42 }),
);
expect(result.length).toBe(1);
});
it('filters boolean values', () => {
const result = backend.filterEntries(
{
entries: [
{
data: {
testField: false,
},
},
{
data: {
testField: true,
},
},
],
},
Map({ field: 'testField', value: false }),
);
expect(result.length).toBe(1);
});
it('filters list values', () => {
const result = backend.filterEntries(
{
entries: [
{
data: {
testField: ['valueOne', 'valueTwo', 'testValue'],
},
},
{
data: {
testField: ['valueThree'],
},
},
],
},
Map({ field: 'testField', value: 'testValue' }),
);
expect(result.length).toBe(1);
});
});
describe('getLocalDraftBackup', () => {
const { localForage, asyncLock } = require('netlify-cms-lib-util');
asyncLock.mockImplementation(() => ({ acquire: jest.fn(), release: jest.fn() }));
beforeEach(() => {
jest.clearAllMocks();
});
it('should return empty object on no item', async () => {
const implementation = {
init: jest.fn(() => implementation),
};
const backend = new Backend(implementation, { config: {}, backendName: 'github' });
const collection = Map({
name: 'posts',
});
const slug = 'slug';
localForage.getItem.mockReturnValue();
const result = await backend.getLocalDraftBackup(collection, slug);
expect(result).toEqual({});
expect(localForage.getItem).toHaveBeenCalledTimes(1);
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
});
it('should return empty object on item with empty content', async () => {
const implementation = {
init: jest.fn(() => implementation),
};
const backend = new Backend(implementation, { config: {}, backendName: 'github' });
const collection = Map({
name: 'posts',
});
const slug = 'slug';
localForage.getItem.mockReturnValue({ raw: '' });
const result = await backend.getLocalDraftBackup(collection, slug);
expect(result).toEqual({});
expect(localForage.getItem).toHaveBeenCalledTimes(1);
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
});
it('should return backup entry, empty media files and assets when only raw property was saved', async () => {
const implementation = {
init: jest.fn(() => implementation),
};
const backend = new Backend(implementation, { config: {}, backendName: 'github' });
const collection = Map({
name: 'posts',
});
const slug = 'slug';
localForage.getItem.mockReturnValue({
raw: '---\ntitle: "Hello World"\n---\n',
});
const result = await backend.getLocalDraftBackup(collection, slug);
expect(result).toEqual({
entry: {
author: '',
mediaFiles: [],
collection: 'posts',
slug: 'slug',
path: '',
partial: false,
raw: '---\ntitle: "Hello World"\n---\n',
data: { title: 'Hello World' },
meta: {},
i18n: {},
label: null,
isModification: null,
status: '',
updatedOn: '',
},
});
expect(localForage.getItem).toHaveBeenCalledTimes(1);
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
});
it('should return backup entry, media files and assets when all were backed up', async () => {
const implementation = {
init: jest.fn(() => implementation),
};
const backend = new Backend(implementation, { config: {}, backendName: 'github' });
const collection = Map({
name: 'posts',
});
const slug = 'slug';
localForage.getItem.mockReturnValue({
raw: '---\ntitle: "Hello World"\n---\n',
mediaFiles: [{ id: '1' }],
});
const result = await backend.getLocalDraftBackup(collection, slug);
expect(result).toEqual({
entry: {
author: '',
mediaFiles: [{ id: '1' }],
collection: 'posts',
slug: 'slug',
path: '',
partial: false,
raw: '---\ntitle: "Hello World"\n---\n',
data: { title: 'Hello World' },
meta: {},
i18n: {},
label: null,
isModification: null,
status: '',
updatedOn: '',
},
});
expect(localForage.getItem).toHaveBeenCalledTimes(1);
expect(localForage.getItem).toHaveBeenCalledWith('backup.posts.slug');
});
});
describe('persistLocalDraftBackup', () => {
const { localForage } = require('netlify-cms-lib-util');
beforeEach(() => {
jest.clearAllMocks();
});
it('should not persist empty entry', async () => {
const implementation = {
init: jest.fn(() => implementation),
};
const backend = new Backend(implementation, { config: {}, backendName: 'github' });
backend.entryToRaw = jest.fn().mockReturnValue('');
const collection = Map({
name: 'posts',
});
const slug = 'slug';
const entry = Map({
slug,
});
await backend.persistLocalDraftBackup(entry, collection);
expect(backend.entryToRaw).toHaveBeenCalledTimes(1);
expect(backend.entryToRaw).toHaveBeenCalledWith(collection, entry);
expect(localForage.setItem).toHaveBeenCalledTimes(0);
});
it('should persist non empty entry', async () => {
const implementation = {
init: jest.fn(() => implementation),
};
const backend = new Backend(implementation, { config: {}, backendName: 'github' });
backend.entryToRaw = jest.fn().mockReturnValue('content');
const collection = Map({
name: 'posts',
});
const slug = 'slug';
const entry = Map({
slug,
path: 'content/posts/entry.md',
mediaFiles: List([{ id: '1' }]),
});
await backend.persistLocalDraftBackup(entry, collection);
expect(backend.entryToRaw).toHaveBeenCalledTimes(1);
expect(backend.entryToRaw).toHaveBeenCalledWith(collection, entry);
expect(localForage.setItem).toHaveBeenCalledTimes(2);
expect(localForage.setItem).toHaveBeenCalledWith('backup.posts.slug', {
mediaFiles: [{ id: '1' }],
path: 'content/posts/entry.md',
raw: 'content',
});
expect(localForage.setItem).toHaveBeenCalledWith('backup', 'content');
});
});
describe('persistMedia', () => {
it('should persist media', async () => {
const persistMediaResult = {};
const implementation = {
init: jest.fn(() => implementation),
persistMedia: jest.fn().mockResolvedValue(persistMediaResult),
};
const config = { backend: { name: 'github' } };
const backend = new Backend(implementation, { config, backendName: config.backend.name });
const user = { login: 'login', name: 'name' };
backend.currentUser = jest.fn().mockResolvedValue(user);
const file = { path: 'static/media/image.png' };
const result = await backend.persistMedia(config, file);
expect(result).toBe(persistMediaResult);
expect(implementation.persistMedia).toHaveBeenCalledTimes(1);
expect(implementation.persistMedia).toHaveBeenCalledWith(
{ path: 'static/media/image.png' },
{ commitMessage: 'Upload “static/media/image.png”' },
);
});
});
describe('unpublishedEntry', () => {
it('should return unpublished entry', async () => {
const unpublishedEntryResult = {
diffs: [{ path: 'src/posts/index.md', newFile: false }, { path: 'netlify.png' }],
};
const implementation = {
init: jest.fn(() => implementation),
unpublishedEntry: jest.fn().mockResolvedValue(unpublishedEntryResult),
unpublishedEntryDataFile: jest
.fn()
.mockResolvedValueOnce('---\ntitle: "Hello World"\n---\n'),
unpublishedEntryMediaFile: jest.fn().mockResolvedValueOnce({ id: '1' }),
};
const config = {
media_folder: 'static/images',
};
const backend = new Backend(implementation, { config, backendName: 'github' });
const collection = fromJS({
name: 'posts',
folder: 'src/posts',
fields: [],
});
const state = {
config,
integrations: Map({}),
mediaLibrary: Map({}),
};
const slug = 'slug';
const result = await backend.unpublishedEntry(state, collection, slug);
expect(result).toEqual({
author: '',
collection: 'posts',
slug: '',
path: 'src/posts/index.md',
partial: false,
raw: '---\ntitle: "Hello World"\n---\n',
data: { title: 'Hello World' },
meta: { path: 'src/posts/index.md' },
i18n: {},
label: null,
isModification: true,
mediaFiles: [{ id: '1', draft: true }],
status: '',
updatedOn: '',
});
});
});
describe('generateUniqueSlug', () => {
beforeEach(() => {
jest.resetAllMocks();
});
it("should return unique slug when entry doesn't exist", async () => {
const { sanitizeSlug } = require('../lib/urlHelper');
sanitizeSlug.mockReturnValue('some-post-title');
const implementation = {
init: jest.fn(() => implementation),
getEntry: jest.fn(() => Promise.resolve()),
};
const collection = fromJS({
name: 'posts',
fields: [
{
name: 'title',
},
],
type: FOLDER,
folder: 'posts',
slug: '{{slug}}',
path: 'sub_dir/{{slug}}',
});
const entry = Map({
title: 'some post title',
});
const backend = new Backend(implementation, { config: {}, backendName: 'github' });
await expect(backend.generateUniqueSlug(collection, entry, Map({}), [])).resolves.toBe(
'sub_dir/some-post-title',
);
});
it('should return unique slug when entry exists', async () => {
const { sanitizeSlug, sanitizeChar } = require('../lib/urlHelper');
sanitizeSlug.mockReturnValue('some-post-title');
sanitizeChar.mockReturnValue('-');
const implementation = {
init: jest.fn(() => implementation),
getEntry: jest.fn(),
};
implementation.getEntry.mockResolvedValueOnce({ data: 'data' });
implementation.getEntry.mockResolvedValueOnce();
const collection = fromJS({
name: 'posts',
fields: [
{
name: 'title',
},
],
type: FOLDER,
folder: 'posts',
slug: '{{slug}}',
path: 'sub_dir/{{slug}}',
});
const entry = Map({
title: 'some post title',
});
const backend = new Backend(implementation, { config: {}, backendName: 'github' });
await expect(backend.generateUniqueSlug(collection, entry, Map({}), [])).resolves.toBe(
'sub_dir/some-post-title-1',
);
});
});
describe('extractSearchFields', () => {
it('should extract slug', () => {
expect(extractSearchFields(['slug'])({ slug: 'entry-slug', data: {} })).toEqual(
' entry-slug',
);
});
it('should extract path', () => {
expect(extractSearchFields(['path'])({ path: 'entry-path', data: {} })).toEqual(
' entry-path',
);
});
it('should extract fields', () => {
expect(
extractSearchFields(['title', 'order'])({ data: { title: 'Entry Title', order: 5 } }),
).toEqual(' Entry Title 5');
});
it('should extract nested fields', () => {
expect(
extractSearchFields(['nested.title'])({ data: { nested: { title: 'nested title' } } }),
).toEqual(' nested title');
});
});
describe('search/query', () => {
const collections = [
fromJS({
name: 'posts',
folder: 'posts',
fields: [
{ name: 'title', widget: 'string' },
{ name: 'short_title', widget: 'string' },
{ name: 'author', widget: 'string' },
{ name: 'description', widget: 'string' },
{ name: 'nested', widget: 'object', fields: { name: 'title', widget: 'string' } },
],
}),
fromJS({
name: 'pages',
folder: 'pages',
fields: [
{ name: 'title', widget: 'string' },
{ name: 'short_title', widget: 'string' },
{ name: 'author', widget: 'string' },
{ name: 'description', widget: 'string' },
{ name: 'nested', widget: 'object', fields: { name: 'title', widget: 'string' } },
],
}),
];
const posts = [
{
path: 'posts/find-me.md',
slug: 'find-me',
data: {
title: 'find me by title',
short_title: 'find me by short title',
author: 'find me by author',
description: 'find me by description',
nested: { title: 'find me by nested title' },
},
},
{ path: 'posts/not-me.md', slug: 'not-me', data: { title: 'not me' } },
];
const pages = [
{
path: 'pages/find-me.md',
slug: 'find-me',
data: {
title: 'find me by title',
short_title: 'find me by short title',
author: 'find me by author',
description: 'find me by description',
nested: { title: 'find me by nested title' },
},
},
{ path: 'pages/not-me.md', slug: 'not-me', data: { title: 'not me' } },
];
const files = [
{
path: 'files/file1.md',
slug: 'file1',
data: {
author: 'find me by author',
},
},
{
path: 'files/file2.md',
slug: 'file2',
data: {
other: 'find me by other',
},
},
];
const implementation = {
init: jest.fn(() => implementation),
};
let backend;
beforeEach(() => {
backend = new Backend(implementation, { config: {}, backendName: 'github' });
backend.listAllEntries = jest.fn(collection => {
if (collection.get('name') === 'posts') {
return Promise.resolve(posts);
}
if (collection.get('name') === 'pages') {
return Promise.resolve(pages);
}
if (collection.get('name') === 'files') {
return Promise.resolve(files);
}
return Promise.resolve([]);
});
});
it('should search collections by title', async () => {
const results = await backend.search(collections, 'find me by title');
expect(results).toEqual({
entries: [posts[0], pages[0]],
});
});
it('should search collections by short title', async () => {
const results = await backend.search(collections, 'find me by short title');
expect(results).toEqual({
entries: [posts[0], pages[0]],
});
});
it('should search collections by author', async () => {
const results = await backend.search(collections, 'find me by author');
expect(results).toEqual({
entries: [posts[0], pages[0]],
});
});
it('should search collections by summary description', async () => {
const results = await backend.search(
collections.map(c => c.set('summary', '{{description}}')),
'find me by description',
);
expect(results).toEqual({
entries: [posts[0], pages[0]],
});
});
it('should search in file collection using top level fields', async () => {
const collections = [
fromJS({
name: 'files',
files: [
{
name: 'file1',
fields: [{ name: 'author', widget: 'string' }],
},
{
name: 'file2',
fields: [{ name: 'other', widget: 'string' }],
},
],
type: FILES,
}),
];
expect(await backend.search(collections, 'find me by author')).toEqual({
entries: [files[0]],
});
expect(await backend.search(collections, 'find me by other')).toEqual({
entries: [files[1]],
});
});
it('should query collections by title', async () => {
const results = await backend.query(collections[0], ['title'], 'find me by title');
expect(results).toEqual({
hits: [posts[0]],
query: 'find me by title',
});
});
it('should query collections by slug', async () => {
const results = await backend.query(collections[0], ['slug'], 'find-me');
expect(results).toEqual({
hits: [posts[0]],
query: 'find-me',
});
});
it('should query collections by path', async () => {
const results = await backend.query(collections[0], ['path'], 'posts/find-me.md');
expect(results).toEqual({
hits: [posts[0]],
query: 'posts/find-me.md',
});
});
it('should query collections by nested field', async () => {
const results = await backend.query(
collections[0],
['nested.title'],
'find me by nested title',
);
expect(results).toEqual({
hits: [posts[0]],
query: 'find me by nested title',
});
});
});
describe('expandSearchEntries', () => {
it('should expand entry with list to multiple entries', () => {
const entry = {
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 2, name: '2' },
],
},
},
list: [1, 2],
},
};
expect(expandSearchEntries([entry], ['list.*', 'field.nested.list.*.name'])).toEqual([
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 2, name: '2' },
],
},
},
list: [1, 2],
},
field: 'list.0',
},
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 2, name: '2' },
],
},
},
list: [1, 2],
},
field: 'list.1',
},
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 2, name: '2' },
],
},
},
list: [1, 2],
},
field: 'field.nested.list.0.name',
},
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 2, name: '2' },
],
},
},
list: [1, 2],
},
field: 'field.nested.list.1.name',
},
]);
});
});
describe('mergeExpandedEntries', () => {
it('should merge entries and filter data', () => {
const expanded = [
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 2, name: '2' },
{ id: 3, name: '3' },
{ id: 4, name: '4' },
],
},
},
list: [1, 2],
},
field: 'field.nested.list.0.name',
},
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 2, name: '2' },
{ id: 3, name: '3' },
{ id: 4, name: '4' },
],
},
},
list: [1, 2],
},
field: 'field.nested.list.3.name',
},
];
expect(mergeExpandedEntries(expanded)).toEqual([
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 4, name: '4' },
],
},
},
list: [1, 2],
},
},
]);
});
it('should merge entries and filter data based on different fields', () => {
const expanded = [
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 2, name: '2' },
{ id: 3, name: '3' },
{ id: 4, name: '4' },
],
},
},
list: [1, 2],
},
field: 'field.nested.list.0.name',
},
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 2, name: '2' },
{ id: 3, name: '3' },
{ id: 4, name: '4' },
],
},
},
list: [1, 2],
},
field: 'field.nested.list.3.name',
},
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 2, name: '2' },
{ id: 3, name: '3' },
{ id: 4, name: '4' },
],
},
},
list: [1, 2],
},
field: 'list.1',
},
];
expect(mergeExpandedEntries(expanded)).toEqual([
{
data: {
field: {
nested: {
list: [
{ id: 1, name: '1' },
{ id: 4, name: '4' },
],
},
},
list: [2],
},
},
]);
});
it('should merge entries and keep sort by entry index', () => {
const expanded = [
{
data: {
list: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
},
field: 'list.5',
},
{
data: {
list: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
},
field: 'list.0',
},
{
data: {
list: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
},
field: 'list.11',
},
{
data: {
list: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
},
field: 'list.1',
},
];
expect(mergeExpandedEntries(expanded)).toEqual([
{
data: {
list: [5, 0, 11, 1],
},
},
]);
});
});
});

File diff suppressed because it is too large Load Diff

View File

@ -1,224 +0,0 @@
import configureMockStore from 'redux-mock-store';
import thunk from 'redux-thunk';
import { fromJS } from 'immutable';
import { addAssets } from '../media';
import * as actions from '../editorialWorkflow';
jest.mock('../../backend');
jest.mock('../../valueObjects/AssetProxy');
jest.mock('netlify-cms-lib-util');
jest.mock('uuid/v4', () => {
return jest.fn().mockReturnValue('000000000000000000000');
});
jest.mock('redux-notifications', () => {
const actual = jest.requireActual('redux-notifications');
return {
...actual,
actions: {
notifSend: jest.fn().mockImplementation(payload => ({
type: 'NOTIF_SEND',
...payload,
})),
},
};
});
const middlewares = [thunk];
const mockStore = configureMockStore(middlewares);
describe('editorialWorkflow actions', () => {
beforeEach(() => {
jest.clearAllMocks();
});
describe('loadUnpublishedEntry', () => {
it('should load unpublished entry', () => {
const { currentBackend } = require('../../backend');
const { createAssetProxy } = require('../../valueObjects/AssetProxy');
const assetProxy = { name: 'name', path: 'path' };
const entry = { mediaFiles: [{ file: { name: 'name' }, id: '1', draft: true }] };
const backend = {
unpublishedEntry: jest.fn().mockResolvedValue(entry),
};
const store = mockStore({
config: fromJS({}),
collections: fromJS({
posts: { name: 'posts' },
}),
mediaLibrary: fromJS({
isLoading: false,
}),
editorialWorkflow: fromJS({
pages: { ids: [] },
}),
});
currentBackend.mockReturnValue(backend);
createAssetProxy.mockResolvedValue(assetProxy);
const slug = 'slug';
const collection = store.getState().collections.get('posts');
return store.dispatch(actions.loadUnpublishedEntry(collection, slug)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(4);
expect(actions[0]).toEqual({
type: 'UNPUBLISHED_ENTRY_REQUEST',
payload: {
collection: 'posts',
slug,
},
});
expect(actions[1]).toEqual(addAssets([assetProxy]));
expect(actions[2]).toEqual({
type: 'UNPUBLISHED_ENTRY_SUCCESS',
payload: {
collection: 'posts',
entry: { ...entry, mediaFiles: [{ file: { name: 'name' }, id: '1', draft: true }] },
},
});
expect(actions[3]).toEqual({
type: 'DRAFT_CREATE_FROM_ENTRY',
payload: {
entry,
},
});
});
});
});
describe('publishUnpublishedEntry', () => {
it('should publish unpublished entry and report success', () => {
const { currentBackend } = require('../../backend');
const entry = {};
const backend = {
publishUnpublishedEntry: jest.fn().mockResolvedValue(),
getEntry: jest.fn().mockResolvedValue(entry),
getMedia: jest.fn().mockResolvedValue([]),
};
const store = mockStore({
config: fromJS({}),
integrations: fromJS([]),
mediaLibrary: fromJS({
isLoading: false,
}),
collections: fromJS({
posts: { name: 'posts' },
}),
});
currentBackend.mockReturnValue(backend);
const slug = 'slug';
return store.dispatch(actions.publishUnpublishedEntry('posts', slug)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(8);
expect(actions[0]).toEqual({
type: 'UNPUBLISHED_ENTRY_PUBLISH_REQUEST',
payload: {
collection: 'posts',
slug,
},
});
expect(actions[1]).toEqual({
type: 'MEDIA_LOAD_REQUEST',
payload: {
page: 1,
},
});
expect(actions[2]).toEqual({
type: 'NOTIF_SEND',
message: { key: 'ui.toast.entryPublished' },
kind: 'success',
dismissAfter: 4000,
});
expect(actions[3]).toEqual({
type: 'UNPUBLISHED_ENTRY_PUBLISH_SUCCESS',
payload: {
collection: 'posts',
slug,
},
});
expect(actions[4]).toEqual({
type: 'MEDIA_LOAD_SUCCESS',
payload: {
files: [],
},
});
expect(actions[5]).toEqual({
type: 'ENTRY_REQUEST',
payload: {
slug,
collection: 'posts',
},
});
expect(actions[6]).toEqual({
type: 'ENTRY_SUCCESS',
payload: {
entry,
collection: 'posts',
},
});
expect(actions[7]).toEqual({
type: 'DRAFT_CREATE_FROM_ENTRY',
payload: {
entry,
},
});
});
});
it('should publish unpublished entry and report error', () => {
const { currentBackend } = require('../../backend');
const error = new Error('failed to publish entry');
const backend = {
publishUnpublishedEntry: jest.fn().mockRejectedValue(error),
};
const store = mockStore({
config: fromJS({}),
collections: fromJS({
posts: { name: 'posts' },
}),
});
currentBackend.mockReturnValue(backend);
const slug = 'slug';
return store.dispatch(actions.publishUnpublishedEntry('posts', slug)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(3);
expect(actions[0]).toEqual({
type: 'UNPUBLISHED_ENTRY_PUBLISH_REQUEST',
payload: {
collection: 'posts',
slug,
},
});
expect(actions[1]).toEqual({
type: 'NOTIF_SEND',
message: { key: 'ui.toast.onFailToPublishEntry', details: error },
kind: 'danger',
dismissAfter: 8000,
});
expect(actions[2]).toEqual({
type: 'UNPUBLISHED_ENTRY_PUBLISH_FAILURE',
payload: {
collection: 'posts',
slug,
},
});
});
});
});
});

View File

@ -1,575 +0,0 @@
import { fromJS, Map } from 'immutable';
import configureMockStore from 'redux-mock-store';
import thunk from 'redux-thunk';
import {
createEmptyDraft,
createEmptyDraftData,
retrieveLocalBackup,
persistLocalBackup,
getMediaAssets,
validateMetaField,
} from '../entries';
import AssetProxy from '../../valueObjects/AssetProxy';
jest.mock('../../backend');
jest.mock('netlify-cms-lib-util');
jest.mock('../mediaLibrary');
jest.mock('../../reducers/entries');
jest.mock('../../reducers/entryDraft');
const middlewares = [thunk];
const mockStore = configureMockStore(middlewares);
describe('entries', () => {
describe('createEmptyDraft', () => {
const { currentBackend } = require('../../backend');
const backend = {
processEntry: jest.fn((_state, _collection, entry) => Promise.resolve(entry)),
};
currentBackend.mockReturnValue(backend);
beforeEach(() => {
jest.clearAllMocks();
});
it('should dispatch draft created action', () => {
const store = mockStore({ mediaLibrary: fromJS({ files: [] }) });
const collection = fromJS({
fields: [{ name: 'title' }],
});
return store.dispatch(createEmptyDraft(collection, '')).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(1);
expect(actions[0]).toEqual({
payload: {
author: '',
collection: undefined,
data: {},
meta: {},
i18n: {},
isModification: null,
label: null,
mediaFiles: [],
partial: false,
path: '',
raw: '',
slug: '',
status: '',
updatedOn: '',
},
type: 'DRAFT_CREATE_EMPTY',
});
});
});
it('should populate draft entry from URL param', () => {
const store = mockStore({ mediaLibrary: fromJS({ files: [] }) });
const collection = fromJS({
fields: [{ name: 'title' }, { name: 'boolean' }],
});
return store.dispatch(createEmptyDraft(collection, '?title=title&boolean=True')).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(1);
expect(actions[0]).toEqual({
payload: {
author: '',
collection: undefined,
data: { title: 'title', boolean: true },
meta: {},
i18n: {},
isModification: null,
label: null,
mediaFiles: [],
partial: false,
path: '',
raw: '',
slug: '',
status: '',
updatedOn: '',
},
type: 'DRAFT_CREATE_EMPTY',
});
});
});
it('should html escape URL params', () => {
const store = mockStore({ mediaLibrary: fromJS({ files: [] }) });
const collection = fromJS({
fields: [{ name: 'title' }],
});
return store
.dispatch(createEmptyDraft(collection, "?title=<script>alert('hello')</script>"))
.then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(1);
expect(actions[0]).toEqual({
payload: {
author: '',
collection: undefined,
data: { title: '&lt;script&gt;alert(&#039;hello&#039;)&lt;/script&gt;' },
meta: {},
i18n: {},
isModification: null,
label: null,
mediaFiles: [],
partial: false,
path: '',
raw: '',
slug: '',
status: '',
updatedOn: '',
},
type: 'DRAFT_CREATE_EMPTY',
});
});
});
});
describe('createEmptyDraftData', () => {
it('should allow an empty array as list default for a single field list', () => {
const fields = fromJS([
{
name: 'images',
widget: 'list',
default: [],
field: { name: 'url', widget: 'text' },
},
]);
expect(createEmptyDraftData(fields)).toEqual({ images: fromJS([]) });
});
it('should allow a complex array as list default for a single field list', () => {
const fields = fromJS([
{
name: 'images',
widget: 'list',
default: [
{
url: 'https://image.png',
},
],
field: { name: 'url', widget: 'text' },
},
]);
expect(createEmptyDraftData(fields)).toEqual({
images: fromJS([
{
url: 'https://image.png',
},
]),
});
});
it('should allow an empty array as list default for a fields list', () => {
const fields = fromJS([
{
name: 'images',
widget: 'list',
default: [],
fields: [
{ name: 'title', widget: 'text' },
{ name: 'url', widget: 'text' },
],
},
]);
expect(createEmptyDraftData(fields)).toEqual({ images: fromJS([]) });
});
it('should allow a complex array as list default for a fields list', () => {
const fields = fromJS([
{
name: 'images',
widget: 'list',
default: [
{
title: 'default image',
url: 'https://image.png',
},
],
fields: [
{ name: 'title', widget: 'text' },
{ name: 'url', widget: 'text' },
],
},
]);
expect(createEmptyDraftData(fields)).toEqual({
images: fromJS([
{
title: 'default image',
url: 'https://image.png',
},
]),
});
});
it('should use field default when no list default is provided', () => {
const fields = fromJS([
{
name: 'images',
widget: 'list',
field: { name: 'url', widget: 'text', default: 'https://image.png' },
},
]);
expect(createEmptyDraftData(fields)).toEqual({ images: [{ url: 'https://image.png' }] });
});
it('should use fields default when no list default is provided', () => {
const fields = fromJS([
{
name: 'images',
widget: 'list',
fields: [
{ name: 'title', widget: 'text', default: 'default image' },
{ name: 'url', widget: 'text', default: 'https://image.png' },
],
},
]);
expect(createEmptyDraftData(fields)).toEqual({
images: [{ title: 'default image', url: 'https://image.png' }],
});
});
it('should not set empty value for list fields widget', () => {
const fields = fromJS([
{
name: 'images',
widget: 'list',
fields: [
{ name: 'title', widget: 'text' },
{ name: 'url', widget: 'text' },
],
},
]);
expect(createEmptyDraftData(fields)).toEqual({});
});
it('should set default value for object field widget', () => {
const fields = fromJS([
{
name: 'post',
widget: 'object',
field: { name: 'image', widget: 'text', default: 'https://image.png' },
},
]);
expect(createEmptyDraftData(fields)).toEqual({ post: { image: 'https://image.png' } });
});
it('should set default values for object fields widget', () => {
const fields = fromJS([
{
name: 'post',
widget: 'object',
fields: [
{ name: 'title', widget: 'text', default: 'default title' },
{ name: 'url', widget: 'text', default: 'https://image.png' },
],
},
]);
expect(createEmptyDraftData(fields)).toEqual({
post: { title: 'default title', url: 'https://image.png' },
});
});
it('should not set empty value for object fields widget', () => {
const fields = fromJS([
{
name: 'post',
widget: 'object',
fields: [
{ name: 'title', widget: 'text' },
{ name: 'url', widget: 'text' },
],
},
]);
expect(createEmptyDraftData(fields)).toEqual({});
});
it('should populate nested fields', () => {
const fields = fromJS([
{
name: 'names',
widget: 'list',
field: {
name: 'object',
widget: 'object',
fields: [
{ name: 'first', widget: 'string', default: 'first' },
{ name: 'second', widget: 'string', default: 'second' },
],
},
},
]);
expect(createEmptyDraftData(fields)).toEqual({
names: [{ object: { first: 'first', second: 'second' } }],
});
});
});
describe('persistLocalBackup', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should persist local backup with media files', () => {
const { currentBackend } = require('../../backend');
const backend = {
persistLocalDraftBackup: jest.fn(() => Promise.resolve()),
};
const store = mockStore({
config: Map(),
});
currentBackend.mockReturnValue(backend);
const collection = Map();
const mediaFiles = [{ path: 'static/media/image.png' }];
const entry = fromJS({ mediaFiles });
return store.dispatch(persistLocalBackup(entry, collection)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(0);
expect(backend.persistLocalDraftBackup).toHaveBeenCalledTimes(1);
expect(backend.persistLocalDraftBackup).toHaveBeenCalledWith(entry, collection);
});
});
});
describe('retrieveLocalBackup', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should retrieve media files with local backup', () => {
const { currentBackend } = require('../../backend');
const { createAssetProxy } = require('../../valueObjects/AssetProxy');
const backend = {
getLocalDraftBackup: jest.fn((...args) => args),
};
const store = mockStore({
config: Map(),
});
currentBackend.mockReturnValue(backend);
const collection = Map({
name: 'collection',
});
const slug = 'slug';
const file = new File([], 'image.png');
const mediaFiles = [{ path: 'static/media/image.png', url: 'url', file }];
const asset = createAssetProxy(mediaFiles[0]);
const entry = { mediaFiles };
backend.getLocalDraftBackup.mockReturnValue({ entry });
return store.dispatch(retrieveLocalBackup(collection, slug)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(2);
expect(actions[0]).toEqual({
type: 'ADD_ASSETS',
payload: [asset],
});
expect(actions[1]).toEqual({
type: 'DRAFT_LOCAL_BACKUP_RETRIEVED',
payload: { entry },
});
});
});
});
describe('getMediaAssets', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should map mediaFiles to assets', () => {
const mediaFiles = fromJS([{ path: 'path1' }, { path: 'path2', draft: true }]);
const entry = Map({ mediaFiles });
expect(getMediaAssets({ entry })).toEqual([new AssetProxy({ path: 'path2' })]);
});
});
describe('validateMetaField', () => {
const state = {
config: {
slug: {
encoding: 'unicode',
clean_accents: false,
sanitize_replacement: '-',
},
},
entries: fromJS([]),
};
const collection = fromJS({
folder: 'folder',
type: 'folder_based_collection',
name: 'name',
});
const t = jest.fn((key, args) => ({ key, args }));
const { selectCustomPath } = require('../../reducers/entryDraft');
const { selectEntryByPath } = require('../../reducers/entries');
beforeEach(() => {
jest.clearAllMocks();
});
it('should not return error on non meta field', () => {
expect(validateMetaField(null, null, fromJS({}), null, t)).toEqual({ error: false });
});
it('should not return error on meta path field', () => {
expect(validateMetaField(null, null, fromJS({ meta: true, name: 'other' }), null, t)).toEqual(
{ error: false },
);
});
it('should return error on empty path', () => {
expect(validateMetaField(null, null, fromJS({ meta: true, name: 'path' }), null, t)).toEqual({
error: {
message: {
key: 'editor.editorControlPane.widget.invalidPath',
args: { path: null },
},
type: 'CUSTOM',
},
});
expect(
validateMetaField(null, null, fromJS({ meta: true, name: 'path' }), undefined, t),
).toEqual({
error: {
message: {
key: 'editor.editorControlPane.widget.invalidPath',
args: { path: undefined },
},
type: 'CUSTOM',
},
});
expect(validateMetaField(null, null, fromJS({ meta: true, name: 'path' }), '', t)).toEqual({
error: {
message: {
key: 'editor.editorControlPane.widget.invalidPath',
args: { path: '' },
},
type: 'CUSTOM',
},
});
});
it('should return error on invalid path', () => {
expect(
validateMetaField(state, null, fromJS({ meta: true, name: 'path' }), 'invalid path', t),
).toEqual({
error: {
message: {
key: 'editor.editorControlPane.widget.invalidPath',
args: { path: 'invalid path' },
},
type: 'CUSTOM',
},
});
});
it('should return error on existing path', () => {
selectCustomPath.mockReturnValue('existing-path');
selectEntryByPath.mockReturnValue(fromJS({ path: 'existing-path' }));
expect(
validateMetaField(
{
...state,
entryDraft: fromJS({
entry: {},
}),
},
collection,
fromJS({ meta: true, name: 'path' }),
'existing-path',
t,
),
).toEqual({
error: {
message: {
key: 'editor.editorControlPane.widget.pathExists',
args: { path: 'existing-path' },
},
type: 'CUSTOM',
},
});
expect(selectCustomPath).toHaveBeenCalledTimes(1);
expect(selectCustomPath).toHaveBeenCalledWith(
collection,
fromJS({ entry: { meta: { path: 'existing-path' } } }),
);
expect(selectEntryByPath).toHaveBeenCalledTimes(1);
expect(selectEntryByPath).toHaveBeenCalledWith(
state.entries,
collection.get('name'),
'existing-path',
);
});
it('should not return error on non existing path for new entry', () => {
selectCustomPath.mockReturnValue('non-existing-path');
selectEntryByPath.mockReturnValue(undefined);
expect(
validateMetaField(
{
...state,
entryDraft: fromJS({
entry: {},
}),
},
collection,
fromJS({ meta: true, name: 'path' }),
'non-existing-path',
t,
),
).toEqual({
error: false,
});
});
it('should not return error when for existing entry', () => {
selectCustomPath.mockReturnValue('existing-path');
selectEntryByPath.mockReturnValue(fromJS({ path: 'existing-path' }));
expect(
validateMetaField(
{
...state,
entryDraft: fromJS({
entry: { path: 'existing-path' },
}),
},
collection,
fromJS({ meta: true, name: 'path' }),
'existing-path',
t,
),
).toEqual({
error: false,
});
});
});
});

View File

@ -1,171 +0,0 @@
import { Map } from 'immutable';
import configureMockStore from 'redux-mock-store';
import thunk from 'redux-thunk';
import { mocked } from 'ts-jest/utils';
import { getAsset, ADD_ASSET, LOAD_ASSET_REQUEST } from '../media';
import { selectMediaFilePath } from '../../reducers/entries';
import AssetProxy from '../../valueObjects/AssetProxy';
import type { State } from '../../types/redux';
import type { AnyAction } from 'redux';
import type { ThunkDispatch } from 'redux-thunk';
const middlewares = [thunk];
const mockStore = configureMockStore<Partial<State>, ThunkDispatch<State, {}, AnyAction>>(
middlewares,
);
const mockedSelectMediaFilePath = mocked(selectMediaFilePath);
jest.mock('../../reducers/entries');
jest.mock('../mediaLibrary');
describe('media', () => {
const emptyAsset = new AssetProxy({
path: 'empty.svg',
file: new File([`<svg xmlns="http://www.w3.org/2000/svg"></svg>`], 'empty.svg', {
type: 'image/svg+xml',
}),
});
describe('getAsset', () => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
global.URL = { createObjectURL: jest.fn() };
beforeEach(() => {
jest.resetAllMocks();
});
it('should return empty asset for null path', () => {
const store = mockStore({});
const payload = { collection: null, entryPath: null, entry: null, path: null };
// TODO change to proper payload when immutable is removed
// from 'collections' and 'entries' state slices
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = store.dispatch(getAsset(payload));
const actions = store.getActions();
expect(actions).toHaveLength(0);
expect(result).toEqual(emptyAsset);
});
it('should return asset from medias state', () => {
const path = 'static/media/image.png';
const asset = new AssetProxy({ file: new File([], 'empty'), path });
const store = mockStore({
// TODO change to proper store data when immutable is removed
// from 'config' state slice
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
config: Map(),
medias: {
[path]: { asset, isLoading: false, error: null },
},
});
mockedSelectMediaFilePath.mockReturnValue(path);
const payload = { collection: Map(), entry: Map({ path: 'entryPath' }), path };
// TODO change to proper payload when immutable is removed
// from 'collections' and 'entries' state slices
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = store.dispatch(getAsset(payload));
const actions = store.getActions();
expect(actions).toHaveLength(0);
expect(result).toBe(asset);
expect(mockedSelectMediaFilePath).toHaveBeenCalledTimes(1);
expect(mockedSelectMediaFilePath).toHaveBeenCalledWith(
store.getState().config,
payload.collection,
payload.entry,
path,
undefined,
);
});
it('should create asset for absolute path when not in medias state', () => {
const path = 'https://asset.netlify.com/image.png';
const asset = new AssetProxy({ url: path, path });
const store = mockStore({
medias: {},
});
mockedSelectMediaFilePath.mockReturnValue(path);
const payload = { collection: null, entryPath: null, path };
// TODO change to proper payload when immutable is removed
// from 'collections' state slice
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = store.dispatch(getAsset(payload));
const actions = store.getActions();
expect(actions).toHaveLength(1);
expect(actions[0]).toEqual({
type: ADD_ASSET,
payload: asset,
});
expect(result).toEqual(asset);
});
it('should return empty asset and initiate load when not in medias state', () => {
const path = 'static/media/image.png';
const store = mockStore({
medias: {},
});
mockedSelectMediaFilePath.mockReturnValue(path);
const payload = { path };
// TODO change to proper payload when immutable is removed
// from 'collections' and 'entries' state slices
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = store.dispatch(getAsset(payload));
const actions = store.getActions();
expect(actions).toHaveLength(1);
expect(actions[0]).toEqual({
type: LOAD_ASSET_REQUEST,
payload: { path },
});
expect(result).toEqual(emptyAsset);
});
it('should return asset with original path on load error', () => {
const path = 'static/media/image.png';
const resolvePath = 'resolvePath';
const store = mockStore({
medias: {
[resolvePath]: {
asset: undefined,
error: new Error('test'),
isLoading: false,
},
},
});
mockedSelectMediaFilePath.mockReturnValue(resolvePath);
const payload = { path };
// TODO change to proper payload when immutable is removed
// from 'collections' and 'entries' state slices
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = store.dispatch(getAsset(payload));
const actions = store.getActions();
const asset = new AssetProxy({ url: path, path: resolvePath });
expect(actions).toHaveLength(1);
expect(actions[0]).toEqual({
type: ADD_ASSET,
payload: asset,
});
expect(result).toEqual(asset);
});
});
});

View File

@ -1,327 +0,0 @@
import configureMockStore from 'redux-mock-store';
import thunk from 'redux-thunk';
import { List, Map } from 'immutable';
import { insertMedia, persistMedia, deleteMedia } from '../mediaLibrary';
jest.mock('../../backend');
jest.mock('../waitUntil');
jest.mock('netlify-cms-lib-util', () => {
const lib = jest.requireActual('netlify-cms-lib-util');
return {
...lib,
getBlobSHA: jest.fn(),
};
});
const middlewares = [thunk];
const mockStore = configureMockStore(middlewares);
describe('mediaLibrary', () => {
describe('insertMedia', () => {
it('should return mediaPath as string when string is given', () => {
const store = mockStore({
config: {
public_folder: '/media',
},
collections: Map({
posts: Map({ name: 'posts' }),
}),
entryDraft: Map({
entry: Map({ isPersisting: false, collection: 'posts' }),
}),
});
store.dispatch(insertMedia('foo.png'));
expect(store.getActions()[0]).toEqual({
type: 'MEDIA_INSERT',
payload: { mediaPath: '/media/foo.png' },
});
});
it('should return mediaPath as array of strings when array of strings is given', () => {
const store = mockStore({
config: {
public_folder: '/media',
},
collections: Map({
posts: Map({ name: 'posts' }),
}),
entryDraft: Map({
entry: Map({ isPersisting: false, collection: 'posts' }),
}),
});
store.dispatch(insertMedia(['foo.png']));
expect(store.getActions()[0]).toEqual({
type: 'MEDIA_INSERT',
payload: { mediaPath: ['/media/foo.png'] },
});
});
});
const { currentBackend } = require('../../backend');
const backend = {
persistMedia: jest.fn(() => ({ id: 'id' })),
deleteMedia: jest.fn(),
};
currentBackend.mockReturnValue(backend);
describe('persistMedia', () => {
global.URL = { createObjectURL: jest.fn().mockReturnValue('displayURL') };
beforeEach(() => {
jest.clearAllMocks();
});
it('should not persist media when editing draft', () => {
const { getBlobSHA } = require('netlify-cms-lib-util');
getBlobSHA.mockReturnValue('000000000000000');
const store = mockStore({
config: {
media_folder: 'static/media',
slug: {
encoding: 'unicode',
clean_accents: false,
sanitize_replacement: '-',
},
},
collections: Map({
posts: Map({ name: 'posts' }),
}),
integrations: Map(),
mediaLibrary: Map({
files: List(),
}),
entryDraft: Map({
entry: Map({ isPersisting: false, collection: 'posts' }),
}),
});
const file = new File([''], 'name.png');
return store.dispatch(persistMedia(file)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(2);
expect(actions[0].type).toEqual('ADD_ASSET');
expect(actions[0].payload).toEqual(
expect.objectContaining({
path: 'static/media/name.png',
}),
);
expect(actions[1].type).toEqual('ADD_DRAFT_ENTRY_MEDIA_FILE');
expect(actions[1].payload).toEqual(
expect.objectContaining({
draft: true,
id: '000000000000000',
path: 'static/media/name.png',
size: file.size,
name: file.name,
}),
);
expect(getBlobSHA).toHaveBeenCalledTimes(1);
expect(getBlobSHA).toHaveBeenCalledWith(file);
expect(backend.persistMedia).toHaveBeenCalledTimes(0);
});
});
it('should persist media when not editing draft', () => {
const store = mockStore({
config: {
media_folder: 'static/media',
slug: {
encoding: 'unicode',
clean_accents: false,
sanitize_replacement: '-',
},
},
collections: Map({
posts: Map({ name: 'posts' }),
}),
integrations: Map(),
mediaLibrary: Map({
files: List(),
}),
entryDraft: Map({
entry: Map(),
}),
});
const file = new File([''], 'name.png');
return store.dispatch(persistMedia(file)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(3);
expect(actions).toHaveLength(3);
expect(actions[0]).toEqual({ type: 'MEDIA_PERSIST_REQUEST' });
expect(actions[1].type).toEqual('ADD_ASSET');
expect(actions[1].payload).toEqual(
expect.objectContaining({
path: 'static/media/name.png',
}),
);
expect(actions[2]).toEqual({
type: 'MEDIA_PERSIST_SUCCESS',
payload: {
file: { id: 'id' },
},
});
expect(backend.persistMedia).toHaveBeenCalledTimes(1);
expect(backend.persistMedia).toHaveBeenCalledWith(
store.getState().config,
expect.objectContaining({
path: 'static/media/name.png',
}),
);
});
});
it('should sanitize media name if needed when persisting', () => {
const store = mockStore({
config: {
media_folder: 'static/media',
slug: {
encoding: 'ascii',
clean_accents: true,
sanitize_replacement: '_',
},
},
collections: Map({
posts: Map({ name: 'posts' }),
}),
integrations: Map(),
mediaLibrary: Map({
files: List(),
}),
entryDraft: Map({
entry: Map(),
}),
});
const file = new File([''], 'abc DEF éâçÖ $;, .png');
return store.dispatch(persistMedia(file)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(3);
expect(actions[0]).toEqual({ type: 'MEDIA_PERSIST_REQUEST' });
expect(actions[1].type).toEqual('ADD_ASSET');
expect(actions[1].payload).toEqual(
expect.objectContaining({
path: 'static/media/abc_def_eaco_.png',
}),
);
expect(actions[2]).toEqual({
type: 'MEDIA_PERSIST_SUCCESS',
payload: {
file: { id: 'id' },
},
});
expect(backend.persistMedia).toHaveBeenCalledTimes(1);
expect(backend.persistMedia).toHaveBeenCalledWith(
store.getState().config,
expect.objectContaining({
path: 'static/media/abc_def_eaco_.png',
}),
);
});
});
});
describe('deleteMedia', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should delete non draft file', () => {
const store = mockStore({
config: {
publish_mode: 'editorial_workflow',
},
collections: Map(),
integrations: Map(),
mediaLibrary: Map({
files: List(),
}),
entryDraft: Map({
entry: Map({ isPersisting: false }),
}),
});
const file = { name: 'name.png', id: 'id', path: 'static/media/name.png', draft: false };
return store.dispatch(deleteMedia(file)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(4);
expect(actions[0]).toEqual({ type: 'MEDIA_DELETE_REQUEST' });
expect(actions[1]).toEqual({
type: 'REMOVE_ASSET',
payload: 'static/media/name.png',
});
expect(actions[2]).toEqual({
type: 'MEDIA_DELETE_SUCCESS',
payload: { file },
});
expect(actions[3]).toEqual({
type: 'REMOVE_DRAFT_ENTRY_MEDIA_FILE',
payload: { id: 'id' },
});
expect(backend.deleteMedia).toHaveBeenCalledTimes(1);
expect(backend.deleteMedia).toHaveBeenCalledWith(
store.getState().config,
'static/media/name.png',
);
});
});
it('should not delete a draft file', () => {
const store = mockStore({
config: {
publish_mode: 'editorial_workflow',
},
collections: Map(),
integrations: Map(),
mediaLibrary: Map({
files: List(),
}),
entryDraft: Map({
entry: Map({ isPersisting: false }),
}),
});
const file = { name: 'name.png', id: 'id', path: 'static/media/name.png', draft: true };
return store.dispatch(deleteMedia(file)).then(() => {
const actions = store.getActions();
expect(actions).toHaveLength(2);
expect(actions[0]).toEqual({
type: 'REMOVE_ASSET',
payload: 'static/media/name.png',
});
expect(actions[1]).toEqual({
type: 'REMOVE_DRAFT_ENTRY_MEDIA_FILE',
payload: { id: 'id' },
});
expect(backend.deleteMedia).toHaveBeenCalledTimes(0);
});
});
});
});

View File

@ -1,209 +0,0 @@
import { fromJS } from 'immutable';
import configureMockStore from 'redux-mock-store';
import thunk from 'redux-thunk';
import { searchEntries } from '../search';
const middlewares = [thunk];
const mockStore = configureMockStore(middlewares);
jest.mock('../../reducers');
jest.mock('../../backend');
jest.mock('../../integrations');
describe('search', () => {
describe('searchEntries', () => {
const { currentBackend } = require('../../backend');
const { selectIntegration } = require('../../reducers');
const { getIntegrationProvider } = require('../../integrations');
beforeEach(() => {
jest.resetAllMocks();
});
it('should search entries in all collections using integration', async () => {
const store = mockStore({
collections: fromJS({ posts: { name: 'posts' }, pages: { name: 'pages' } }),
search: {},
});
selectIntegration.mockReturnValue('search_integration');
currentBackend.mockReturnValue({});
const response = { entries: [{ name: '1' }, { name: '' }], pagination: 1 };
const integration = { search: jest.fn().mockResolvedValue(response) };
getIntegrationProvider.mockReturnValue(integration);
await store.dispatch(searchEntries('find me'));
const actions = store.getActions();
expect(actions).toHaveLength(2);
expect(actions[0]).toEqual({
type: 'SEARCH_ENTRIES_REQUEST',
payload: {
searchTerm: 'find me',
searchCollections: ['posts', 'pages'],
page: 0,
},
});
expect(actions[1]).toEqual({
type: 'SEARCH_ENTRIES_SUCCESS',
payload: {
entries: response.entries,
page: response.pagination,
},
});
expect(integration.search).toHaveBeenCalledTimes(1);
expect(integration.search).toHaveBeenCalledWith(['posts', 'pages'], 'find me', 0);
});
it('should search entries in a subset of collections using integration', async () => {
const store = mockStore({
collections: fromJS({ posts: { name: 'posts' }, pages: { name: 'pages' } }),
search: {},
});
selectIntegration.mockReturnValue('search_integration');
currentBackend.mockReturnValue({});
const response = { entries: [{ name: '1' }, { name: '' }], pagination: 1 };
const integration = { search: jest.fn().mockResolvedValue(response) };
getIntegrationProvider.mockReturnValue(integration);
await store.dispatch(searchEntries('find me', ['pages']));
const actions = store.getActions();
expect(actions).toHaveLength(2);
expect(actions[0]).toEqual({
type: 'SEARCH_ENTRIES_REQUEST',
payload: {
searchTerm: 'find me',
searchCollections: ['pages'],
page: 0,
},
});
expect(actions[1]).toEqual({
type: 'SEARCH_ENTRIES_SUCCESS',
payload: {
entries: response.entries,
page: response.pagination,
},
});
expect(integration.search).toHaveBeenCalledTimes(1);
expect(integration.search).toHaveBeenCalledWith(['pages'], 'find me', 0);
});
it('should search entries in all collections using backend', async () => {
const store = mockStore({
collections: fromJS({ posts: { name: 'posts' }, pages: { name: 'pages' } }),
search: {},
});
const response = { entries: [{ name: '1' }, { name: '' }], pagination: 1 };
const backend = { search: jest.fn().mockResolvedValue(response) };
currentBackend.mockReturnValue(backend);
await store.dispatch(searchEntries('find me'));
const actions = store.getActions();
expect(actions).toHaveLength(2);
expect(actions[0]).toEqual({
type: 'SEARCH_ENTRIES_REQUEST',
payload: {
searchTerm: 'find me',
searchCollections: ['posts', 'pages'],
page: 0,
},
});
expect(actions[1]).toEqual({
type: 'SEARCH_ENTRIES_SUCCESS',
payload: {
entries: response.entries,
page: response.pagination,
},
});
expect(backend.search).toHaveBeenCalledTimes(1);
expect(backend.search).toHaveBeenCalledWith(
[fromJS({ name: 'posts' }), fromJS({ name: 'pages' })],
'find me',
);
});
it('should search entries in a subset of collections using backend', async () => {
const store = mockStore({
collections: fromJS({ posts: { name: 'posts' }, pages: { name: 'pages' } }),
search: {},
});
const response = { entries: [{ name: '1' }, { name: '' }], pagination: 1 };
const backend = { search: jest.fn().mockResolvedValue(response) };
currentBackend.mockReturnValue(backend);
await store.dispatch(searchEntries('find me', ['pages']));
const actions = store.getActions();
expect(actions).toHaveLength(2);
expect(actions[0]).toEqual({
type: 'SEARCH_ENTRIES_REQUEST',
payload: {
searchTerm: 'find me',
searchCollections: ['pages'],
page: 0,
},
});
expect(actions[1]).toEqual({
type: 'SEARCH_ENTRIES_SUCCESS',
payload: {
entries: response.entries,
page: response.pagination,
},
});
expect(backend.search).toHaveBeenCalledTimes(1);
expect(backend.search).toHaveBeenCalledWith([fromJS({ name: 'pages' })], 'find me');
});
it('should ignore identical search in all collections', async () => {
const store = mockStore({
collections: fromJS({ posts: { name: 'posts' }, pages: { name: 'pages' } }),
search: { isFetching: true, term: 'find me', collections: ['posts', 'pages'] },
});
await store.dispatch(searchEntries('find me'));
const actions = store.getActions();
expect(actions).toHaveLength(0);
});
it('should ignore identical search in a subset of collections', async () => {
const store = mockStore({
collections: fromJS({ posts: { name: 'posts' }, pages: { name: 'pages' } }),
search: { isFetching: true, term: 'find me', collections: ['pages'] },
});
await store.dispatch(searchEntries('find me', ['pages']));
const actions = store.getActions();
expect(actions).toHaveLength(0);
});
it('should not ignore same search term in different search collections', async () => {
const store = mockStore({
collections: fromJS({ posts: { name: 'posts' }, pages: { name: 'pages' } }),
search: { isFetching: true, term: 'find me', collections: ['pages'] },
});
const backend = { search: jest.fn().mockResolvedValue({}) };
currentBackend.mockReturnValue(backend);
await store.dispatch(searchEntries('find me', ['posts', 'pages']));
expect(backend.search).toHaveBeenCalledTimes(1);
expect(backend.search).toHaveBeenCalledWith(
[fromJS({ name: 'posts' }), fromJS({ name: 'pages' })],
'find me',
);
});
});
});

View File

@ -1,13 +1,11 @@
import { actions as notifActions } from 'redux-notifications';
import { currentBackend } from '../backend';
import { addSnackbar } from '../store/slices/snackbars';
import type { Credentials, User } from 'netlify-cms-lib-util';
import type { Credentials, User } from '../lib/util';
import type { ThunkDispatch } from 'redux-thunk';
import type { AnyAction } from 'redux';
import type { State } from '../types/redux';
const { notifSend, notifClear } = notifActions;
import type { t } from 'react-polyglot';
export const AUTH_REQUEST = 'AUTH_REQUEST';
export const AUTH_SUCCESS = 'AUTH_SUCCESS';
@ -96,13 +94,12 @@ export function loginUser(credentials: Credentials) {
.catch((error: Error) => {
console.error(error);
dispatch(
notifSend({
addSnackbar({
type: 'warning',
message: {
details: error.message,
key: 'ui.toast.onFailToAuth',
message: error.message,
},
kind: 'warning',
dismissAfter: 8000,
}),
);
dispatch(authError(error));
@ -116,7 +113,6 @@ export function logoutUser() {
const backend = currentBackend(state.config);
Promise.resolve(backend.logout()).then(() => {
dispatch(logout());
dispatch(notifClear());
});
};
}

View File

@ -14,8 +14,8 @@ import { FILES, FOLDER } from '../constants/collectionTypes';
import type { ThunkDispatch } from 'redux-thunk';
import type { AnyAction } from 'redux';
import type {
CmsCollection,
import type { State } from '../types/redux';
import {
CmsConfig,
CmsField,
CmsFieldBase,
@ -24,8 +24,8 @@ import type {
CmsI18nConfig,
CmsPublishMode,
CmsLocalBackend,
State,
} from '../types/redux';
CmsCollection,
} from '../interface';
export const CONFIG_REQUEST = 'CONFIG_REQUEST';
export const CONFIG_SUCCESS = 'CONFIG_SUCCESS';
@ -64,7 +64,7 @@ function getConfigUrl() {
};
const configLinkEl = document.querySelector<HTMLLinkElement>('link[rel="cms-config-url"]');
if (configLinkEl && validTypes[configLinkEl.type] && configLinkEl.href) {
console.log(`Using config file path: "${configLinkEl.href}"`);
console.info(`Using config file path: "${configLinkEl.href}"`);
return configLinkEl.href;
}
return 'config.yml';
@ -100,7 +100,7 @@ function setSnakeCaseConfig<T extends CmsField>(field: T) {
console.warn(
`Field ${field.name} is using a deprecated configuration '${camel}'. Please use '${snake}'`,
);
return { [snake]: (field as Record<string, unknown>)[camel] };
return { [snake]: (field as unknown as Record<string, unknown>)[camel] };
});
return Object.assign({}, field, ...snakeValues) as T;
@ -189,15 +189,6 @@ export function normalizeConfig(config: CmsConfig) {
normalizedCollection = { ...normalizedCollection, files: normalizedFiles };
}
if (normalizedCollection.sortableFields) {
const { sortableFields, ...rest } = normalizedCollection;
normalizedCollection = { ...rest, sortable_fields: sortableFields };
console.warn(
`Collection ${collection.name} is using a deprecated configuration 'sortableFields'. Please use 'sortable_fields'`,
);
}
return normalizedCollection;
});
@ -332,12 +323,14 @@ export function applyDefaults(originalConfig: CmsConfig) {
}
if (!collection.sortable_fields) {
collection.sortable_fields = selectDefaultSortableFields(
// TODO remove fromJS when Immutable is removed from the collections state slice
fromJS(collection),
backend,
hasIntegration(config, collection),
);
collection.sortable_fields = {
fields: selectDefaultSortableFields(
// TODO remove fromJS when Immutable is removed from the collections state slice
fromJS(collection),
backend,
hasIntegration(config, collection),
),
};
}
collection.view_filters = (view_filters || []).map(filter => {
@ -388,7 +381,7 @@ async function getConfigYaml(file: string, hasManualConfig: boolean) {
const contentType = response.headers.get('Content-Type') || 'Not-Found';
const isYaml = contentType.indexOf('yaml') !== -1;
if (!isYaml) {
console.log(`Response for ${file} was not yaml. (Content-Type: ${contentType})`);
console.info(`Response for ${file} was not yaml. (Content-Type: ${contentType})`);
if (hasManualConfig) {
return {};
}
@ -435,7 +428,7 @@ export async function detectProxyServer(localBackend?: boolean | CmsLocalBackend
: localBackend.url || defaultUrl.replace('localhost', location.hostname);
try {
console.log(`Looking for Netlify CMS Proxy Server at '${proxyUrl}'`);
console.info(`Looking for Netlify CMS Proxy Server at '${proxyUrl}'`);
const res = await fetch(`${proxyUrl}`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
@ -447,14 +440,14 @@ export async function detectProxyServer(localBackend?: boolean | CmsLocalBackend
type?: string;
};
if (typeof repo === 'string' && Array.isArray(publish_modes) && typeof type === 'string') {
console.log(`Detected Netlify CMS Proxy Server at '${proxyUrl}' with repo: '${repo}'`);
console.info(`Detected Netlify CMS Proxy Server at '${proxyUrl}' with repo: '${repo}'`);
return { proxyUrl, publish_modes, type };
} else {
console.log(`Netlify CMS Proxy Server not detected at '${proxyUrl}'`);
console.info(`Netlify CMS Proxy Server not detected at '${proxyUrl}'`);
return {};
}
} catch {
console.log(`Netlify CMS Proxy Server not detected at '${proxyUrl}'`);
console.info(`Netlify CMS Proxy Server not detected at '${proxyUrl}'`);
return {};
}
}
@ -462,7 +455,7 @@ export async function detectProxyServer(localBackend?: boolean | CmsLocalBackend
function getPublishMode(config: CmsConfig, publishModes?: CmsPublishMode[], backendType?: string) {
if (config.publish_mode && publishModes && !publishModes.includes(config.publish_mode)) {
const newPublishMode = publishModes[0];
console.log(
console.info(
`'${config.publish_mode}' is not supported by '${backendType}' backend, switching to '${newPublishMode}'`,
);
return newPublishMode;
@ -526,7 +519,7 @@ export function loadConfig(manualConfig: Partial<CmsConfig> = {}, onLoad: () =>
if (typeof onLoad === 'function') {
onLoad();
}
} catch (err) {
} catch (err: any) {
dispatch(configFailed(err));
throw err;
}

View File

@ -1,14 +1,12 @@
import { actions as notifActions } from 'redux-notifications';
import { currentBackend } from '../backend';
import { selectDeployPreview } from '../reducers';
import { addSnackbar } from '../store/slices/snackbars';
import type { ThunkDispatch } from 'redux-thunk';
import type { t } from 'react-polyglot';
import type { AnyAction } from 'redux';
import type { ThunkDispatch } from 'redux-thunk';
import type { Collection, Entry, State } from '../types/redux';
const { notifSend } = notifActions;
export const DEPLOY_PREVIEW_REQUEST = 'DEPLOY_PREVIEW_REQUEST';
export const DEPLOY_PREVIEW_SUCCESS = 'DEPLOY_PREVIEW_SUCCESS';
export const DEPLOY_PREVIEW_FAILURE = 'DEPLOY_PREVIEW_FAILURE';
@ -85,16 +83,12 @@ export function loadDeployPreview(
return dispatch(deployPreviewLoaded(collectionName, slug, deploy));
}
return dispatch(deployPreviewError(collectionName, slug));
} catch (error) {
} catch (error: any) {
console.error(error);
dispatch(
notifSend({
message: {
details: error.message,
key: 'ui.toast.onFailToLoadDeployPreview',
},
kind: 'danger',
dismissAfter: 8000,
addSnackbar({
type: 'error',
message: { key: 'ui.toast.onFailToLoadDeployPreview', details: error.message },
}),
);
dispatch(deployPreviewError(collectionName, slug));

View File

@ -1,45 +1,43 @@
import { List, Map } from 'immutable';
import { get } from 'lodash';
import { actions as notifActions } from 'redux-notifications';
import { Map, List } from 'immutable';
import { EDITORIAL_WORKFLOW_ERROR } from 'netlify-cms-lib-util';
import { currentBackend, slugFromCustomPath } from '../backend';
import { EDITORIAL_WORKFLOW, status } from '../constants/publishModes';
import ValidationErrorTypes from '../constants/validationErrorTypes';
import { EDITORIAL_WORKFLOW_ERROR } from '../lib/util';
import {
selectPublishedSlugs,
selectUnpublishedSlugs,
selectEntry,
selectPublishedSlugs,
selectUnpublishedEntry,
selectUnpublishedSlugs,
} from '../reducers';
import { selectEditingDraft } from '../reducers/entries';
import { EDITORIAL_WORKFLOW, status } from '../constants/publishModes';
import { navigateToEntry } from '../routing/history';
import { addSnackbar } from '../store/slices/snackbars';
import { createAssetProxy } from '../valueObjects/AssetProxy';
import {
loadEntry,
createDraftFromEntry,
entryDeleted,
getMediaAssets,
createDraftFromEntry,
loadEntries,
getSerializedEntry,
loadEntries,
loadEntry,
} from './entries';
import { createAssetProxy } from '../valueObjects/AssetProxy';
import { addAssets } from './media';
import { loadMedia } from './mediaLibrary';
import ValidationErrorTypes from '../constants/validationErrorTypes';
import { navigateToEntry } from '../routing/history';
import type { AnyAction } from 'redux';
import type { ThunkDispatch } from 'redux-thunk';
import type { Status } from '../constants/publishModes';
import type {
Collection,
EntryMap,
State,
Collections,
EntryDraft,
EntryMap,
MediaFile,
State,
} from '../types/redux';
import type { AnyAction } from 'redux';
import type { EntryValue } from '../valueObjects/Entry';
import type { Status } from '../constants/publishModes';
import type { ThunkDispatch } from 'redux-thunk';
const { notifSend } = notifActions;
/*
* Constant Declarations
@ -271,19 +269,15 @@ export function loadUnpublishedEntry(collection: Collection, slug: string) {
dispatch(addAssets(assetProxies));
dispatch(unpublishedEntryLoaded(collection, entry));
dispatch(createDraftFromEntry(entry));
} catch (error) {
} catch (error: any) {
if (error.name === EDITORIAL_WORKFLOW_ERROR && error.notUnderEditorialWorkflow) {
dispatch(unpublishedEntryRedirected(collection, slug));
dispatch(loadEntry(collection, slug));
} else {
dispatch(
notifSend({
message: {
key: 'ui.toast.onFailToLoadEntries',
details: error,
},
kind: 'danger',
dismissAfter: 8000,
addSnackbar({
type: 'error',
message: { key: 'ui.toast.onFailToLoadEntries', details: error },
}),
);
}
@ -307,13 +301,9 @@ export function loadUnpublishedEntries(collections: Collections) {
.then(response => dispatch(unpublishedEntriesLoaded(response.entries, response.pagination)))
.catch((error: Error) => {
dispatch(
notifSend({
message: {
key: 'ui.toast.onFailToLoadEntries',
details: error,
},
kind: 'danger',
dismissAfter: 8000,
addSnackbar({
type: 'error',
message: { key: 'ui.toast.onFailToLoadEntries', details: error },
}),
);
dispatch(unpublishedEntriesFailed(error));
@ -343,12 +333,9 @@ export function persistUnpublishedEntry(collection: Collection, existingUnpublis
if (hasPresenceErrors) {
dispatch(
notifSend({
message: {
key: 'ui.toast.missingRequiredField',
},
kind: 'danger',
dismissAfter: 8000,
addSnackbar({
type: 'error',
message: { key: 'ui.toast.missingRequiredField' },
}),
);
}
@ -378,12 +365,9 @@ export function persistUnpublishedEntry(collection: Collection, existingUnpublis
usedSlugs,
});
dispatch(
notifSend({
message: {
key: 'ui.toast.entrySaved',
},
kind: 'success',
dismissAfter: 4000,
addSnackbar({
type: 'success',
message: { key: 'ui.toast.entrySaved' },
}),
);
dispatch(unpublishedEntryPersisted(collection, serializedEntry));
@ -392,15 +376,11 @@ export function persistUnpublishedEntry(collection: Collection, existingUnpublis
dispatch(loadUnpublishedEntry(collection, newSlug));
navigateToEntry(collection.get('name'), newSlug);
}
} catch (error) {
} catch (error: any) {
dispatch(
notifSend({
message: {
key: 'ui.toast.onFailToPersist',
details: error,
},
kind: 'danger',
dismissAfter: 8000,
addSnackbar({
type: 'error',
message: { key: 'ui.toast.onFailToPersist', details: error },
}),
);
return Promise.reject(
@ -425,25 +405,18 @@ export function updateUnpublishedEntryStatus(
.updateUnpublishedEntryStatus(collection, slug, newStatus)
.then(() => {
dispatch(
notifSend({
message: {
key: 'ui.toast.entryUpdated',
},
kind: 'success',
dismissAfter: 4000,
addSnackbar({
type: 'success',
message: { key: 'ui.toast.entryUpdated' },
}),
);
dispatch(unpublishedEntryStatusChangePersisted(collection, slug, newStatus));
})
.catch((error: Error) => {
dispatch(
notifSend({
message: {
key: 'ui.toast.onFailToUpdateStatus',
details: error,
},
kind: 'danger',
dismissAfter: 8000,
addSnackbar({
type: 'error',
message: { key: 'ui.toast.onFailToUpdateStatus', details: error },
}),
);
dispatch(unpublishedEntryStatusChangeError(collection, slug));
@ -460,20 +433,18 @@ export function deleteUnpublishedEntry(collection: string, slug: string) {
.deleteUnpublishedEntry(collection, slug)
.then(() => {
dispatch(
notifSend({
addSnackbar({
type: 'success',
message: { key: 'ui.toast.onDeleteUnpublishedChanges' },
kind: 'success',
dismissAfter: 4000,
}),
);
dispatch(unpublishedEntryDeleted(collection, slug));
})
.catch((error: Error) => {
dispatch(
notifSend({
addSnackbar({
type: 'error',
message: { key: 'ui.toast.onDeleteUnpublishedChanges', details: error },
kind: 'danger',
dismissAfter: 8000,
}),
);
dispatch(unpublishedEntryDeleteError(collection, slug));
@ -493,10 +464,9 @@ export function publishUnpublishedEntry(collectionName: string, slug: string) {
// re-load media after entry was published
dispatch(loadMedia());
dispatch(
notifSend({
addSnackbar({
type: 'success',
message: { key: 'ui.toast.entryPublished' },
kind: 'success',
dismissAfter: 4000,
}),
);
dispatch(unpublishedEntryPublished(collectionName, slug));
@ -513,10 +483,9 @@ export function publishUnpublishedEntry(collectionName: string, slug: string) {
}
} catch (error) {
dispatch(
notifSend({
addSnackbar({
type: 'error',
message: { key: 'ui.toast.onFailToPublishEntry', details: error },
kind: 'danger',
dismissAfter: 8000,
}),
);
dispatch(unpublishedEntryPublishError(collectionName, slug));
@ -548,19 +517,17 @@ export function unpublishPublishedEntry(collection: Collection, slug: string) {
dispatch(entryDeleted(collection, slug));
dispatch(loadUnpublishedEntry(collection, slug));
dispatch(
notifSend({
addSnackbar({
type: 'success',
message: { key: 'ui.toast.entryUnpublished' },
kind: 'success',
dismissAfter: 4000,
}),
);
})
.catch((error: Error) => {
dispatch(
notifSend({
addSnackbar({
type: 'error',
message: { key: 'ui.toast.onFailToUnpublishEntry', details: error },
kind: 'danger',
dismissAfter: 8000,
}),
);
dispatch(unpublishedEntryPersistedFail(error, collection, entry.get('slug')));

View File

@ -1,46 +1,36 @@
import { fromJS, List, Map } from 'immutable';
import { isEqual } from 'lodash';
import { actions as notifActions } from 'redux-notifications';
import { Cursor } from 'netlify-cms-lib-util';
import { selectCollectionEntriesCursor } from '../reducers/cursors';
import { selectFields, updateFieldByKey } from '../reducers/collections';
import { selectIntegration, selectPublishedSlugs } from '../reducers';
import { getIntegrationProvider } from '../integrations';
import { currentBackend } from '../backend';
import { serializeValues } from '../lib/serializeEntryValues';
import { createEntry } from '../valueObjects/Entry';
import { createAssetProxy } from '../valueObjects/AssetProxy';
import ValidationErrorTypes from '../constants/validationErrorTypes';
import { addAssets, getAsset } from './media';
import { SortDirection } from '../types/redux';
import { waitForMediaLibraryToLoad, loadMedia } from './mediaLibrary';
import { waitUntil } from './waitUntil';
import { selectIsFetching, selectEntriesSortFields, selectEntryByPath } from '../reducers/entries';
import { getIntegrationProvider } from '../integrations';
import { SortDirection } from '../interface';
import { getProcessSegment } from '../lib/formatters';
import { duplicateDefaultI18nFields, hasI18n, I18N, I18N_FIELD, serializeI18n } from '../lib/i18n';
import { serializeValues } from '../lib/serializeEntryValues';
import { Cursor } from '../lib/util';
import { selectIntegration, selectPublishedSlugs } from '../reducers';
import { selectFields, updateFieldByKey } from '../reducers/collections';
import { selectCollectionEntriesCursor } from '../reducers/cursors';
import { selectEntriesSortFields, selectEntryByPath, selectIsFetching } from '../reducers/entries';
import { selectCustomPath } from '../reducers/entryDraft';
import { navigateToEntry } from '../routing/history';
import { getProcessSegment } from '../lib/formatters';
import { hasI18n, duplicateDefaultI18nFields, serializeI18n, I18N, I18N_FIELD } from '../lib/i18n';
import { addSnackbar } from '../store/slices/snackbars';
import { createAssetProxy } from '../valueObjects/AssetProxy';
import { createEntry } from '../valueObjects/Entry';
import { addAssets, getAsset } from './media';
import { loadMedia, waitForMediaLibraryToLoad } from './mediaLibrary';
import { waitUntil } from './waitUntil';
import type { ImplementationMediaFile } from 'netlify-cms-lib-util';
import type { Set } from 'immutable';
import type { AnyAction } from 'redux';
import type { ThunkDispatch } from 'redux-thunk';
import type {
Collection,
EntryMap,
State,
EntryFields,
EntryField,
ViewFilter,
ViewGroup,
Entry,
} from '../types/redux';
import type { EntryValue } from '../valueObjects/Entry';
import type { Backend } from '../backend';
import type { ViewFilter, ViewGroup } from '../interface';
import type { ImplementationMediaFile } from '../lib/util';
import type { Collection, Entry, EntryField, EntryFields, EntryMap, State } from '../types/redux';
import type AssetProxy from '../valueObjects/AssetProxy';
import type { Set } from 'immutable';
const { notifSend } = notifActions;
import type { EntryValue } from '../valueObjects/Entry';
/*
* Constant Declarations
@ -534,16 +524,15 @@ export function loadEntry(collection: Collection, slug: string) {
const loadedEntry = await tryLoadEntry(getState(), collection, slug);
dispatch(entryLoaded(collection, loadedEntry));
dispatch(createDraftFromEntry(loadedEntry));
} catch (error) {
} catch (error: any) {
console.error(error);
dispatch(
notifSend({
addSnackbar({
type: 'error',
message: {
details: error.message,
key: 'ui.toast.onFailToLoadEntries',
details: error.message,
},
kind: 'danger',
dismissAfter: 8000,
}),
);
dispatch(entryLoadError(error, collection, slug));
@ -630,15 +619,14 @@ export function loadEntries(collection: Collection, page = 0) {
append,
),
);
} catch (err) {
} catch (err: any) {
dispatch(
notifSend({
addSnackbar({
type: 'error',
message: {
details: err,
key: 'ui.toast.onFailToLoadEntries',
details: err,
},
kind: 'danger',
dismissAfter: 8000,
}),
);
return Promise.reject(dispatch(entriesFailed(collection, err)));
@ -681,16 +669,15 @@ export function traverseCollectionCursor(collection: Collection, action: string)
return dispatch(
entriesLoaded(collection, entries, pagination, addAppendActionsToCursor(newCursor), append),
);
} catch (err) {
} catch (err: any) {
console.error(err);
dispatch(
notifSend({
addSnackbar({
type: 'error',
message: {
details: err,
key: 'ui.toast.onFailToLoadEntries',
details: err,
},
kind: 'danger',
dismissAfter: 8000,
}),
);
return Promise.reject(dispatch(entriesFailed(collection, err)));
@ -894,12 +881,11 @@ export function persistEntry(collection: Collection) {
if (hasPresenceErrors) {
dispatch(
notifSend({
addSnackbar({
type: 'error',
message: {
key: 'ui.toast.missingRequiredField',
},
kind: 'danger',
dismissAfter: 8000,
}),
);
}
@ -926,12 +912,11 @@ export function persistEntry(collection: Collection) {
})
.then(async (newSlug: string) => {
dispatch(
notifSend({
addSnackbar({
type: 'success',
message: {
key: 'ui.toast.entrySaved',
},
kind: 'success',
dismissAfter: 4000,
}),
);
@ -951,13 +936,12 @@ export function persistEntry(collection: Collection) {
.catch((error: Error) => {
console.error(error);
dispatch(
notifSend({
addSnackbar({
type: 'error',
message: {
details: error,
key: 'ui.toast.onFailToPersist',
details: error,
},
kind: 'danger',
dismissAfter: 8000,
}),
);
return Promise.reject(dispatch(entryPersistFail(collection, serializedEntry, error)));
@ -978,13 +962,12 @@ export function deleteEntry(collection: Collection, slug: string) {
})
.catch((error: Error) => {
dispatch(
notifSend({
addSnackbar({
type: 'error',
message: {
details: error,
key: 'ui.toast.onFailToDelete',
details: error,
},
kind: 'danger',
dismissAfter: 8000,
}),
);
console.error(error);

View File

@ -1,5 +1,4 @@
import { isAbsolutePath } from 'netlify-cms-lib-util';
import { isAbsolutePath } from '../lib/util';
import { createAssetProxy } from '../valueObjects/AssetProxy';
import { selectMediaFilePath } from '../reducers/entries';
import { selectMediaFileByPath } from '../reducers/mediaLibrary';
@ -60,7 +59,7 @@ export function loadAsset(resolvedPath: string) {
dispatch(addAsset(asset));
}
dispatch(loadAssetSuccess(resolvedPath));
} catch (e) {
} catch (e: any) {
dispatch(loadAssetFailure(resolvedPath, e));
}
};

View File

@ -1,35 +1,34 @@
import { Map } from 'immutable';
import { actions as notifActions } from 'redux-notifications';
import { basename, getBlobSHA } from 'netlify-cms-lib-util';
import { currentBackend } from '../backend';
import { createAssetProxy } from '../valueObjects/AssetProxy';
import confirm from '../components/UI/Confirm';
import { getIntegrationProvider } from '../integrations';
import { sanitizeSlug } from '../lib/urlHelper';
import { basename, getBlobSHA } from '../lib/util';
import { selectIntegration } from '../reducers';
import {
selectEditingDraft,
selectMediaFilePath,
selectMediaFilePublicPath,
selectEditingDraft,
} from '../reducers/entries';
import { selectMediaDisplayURL, selectMediaFiles } from '../reducers/mediaLibrary';
import { getIntegrationProvider } from '../integrations';
import { addAsset, removeAsset } from './media';
import { addSnackbar } from '../store/slices/snackbars';
import { createAssetProxy } from '../valueObjects/AssetProxy';
import { addDraftEntryMediaFile, removeDraftEntryMediaFile } from './entries';
import { sanitizeSlug } from '../lib/urlHelper';
import { addAsset, removeAsset } from './media';
import { waitUntilWithTimeout } from './waitUntil';
import type {
State,
MediaFile,
DisplayURLState,
MediaLibraryInstance,
EntryField,
} from '../types/redux';
import type { AnyAction } from 'redux';
import type { ThunkDispatch } from 'redux-thunk';
import type { ImplementationMediaFile } from '../lib/util';
import type {
DisplayURLState,
EntryField,
MediaFile,
MediaLibraryInstance,
State,
} from '../types/redux';
import type AssetProxy from '../valueObjects/AssetProxy';
import type { ImplementationMediaFile } from 'netlify-cms-lib-util';
const { notifSend } = notifActions;
export const MEDIA_LIBRARY_OPEN = 'MEDIA_LIBRARY_OPEN';
export const MEDIA_LIBRARY_CLOSE = 'MEDIA_LIBRARY_CLOSE';
@ -169,7 +168,7 @@ export function loadMedia(
.catch((error: { status?: number }) => {
console.error(error);
if (error.status === 404) {
console.log('This 404 was expected and handled appropriately.');
console.info('This 404 was expected and handled appropriately.');
dispatch(mediaLoaded([]));
} else {
dispatch(mediaLoadFailed());
@ -231,7 +230,16 @@ export function persistMedia(file: File, opts: MediaOptions = {}) {
* may not be unique, so we forego this check.
*/
if (!integration && existingFile) {
if (!window.confirm(`${existingFile.name} already exists. Do you want to replace it?`)) {
if (
!(await confirm({
title: 'mediaLibrary.mediaLibrary.alreadyExistsTitle',
body: {
key: 'mediaLibrary.mediaLibrary.alreadyExistsBody',
options: { filename: existingFile.name },
},
color: 'error',
}))
) {
return;
} else {
await dispatch(deleteMedia(existingFile, { privateUpload }));
@ -299,10 +307,12 @@ export function persistMedia(file: File, opts: MediaOptions = {}) {
} catch (error) {
console.error(error);
dispatch(
notifSend({
message: `Failed to persist media: ${error}`,
kind: 'danger',
dismissAfter: 8000,
addSnackbar({
type: 'error',
message: {
key: 'ui.toast.onFailToPersistMedia',
details: error,
},
}),
);
return dispatch(mediaPersistFailed({ privateUpload }));
@ -323,13 +333,15 @@ export function deleteMedia(file: MediaFile, opts: MediaOptions = {}) {
try {
await provider.delete(file.id);
return dispatch(mediaDeleted(file, { privateUpload }));
} catch (error) {
} catch (error: any) {
console.error(error);
dispatch(
notifSend({
message: `Failed to delete media: ${error.message}`,
kind: 'danger',
dismissAfter: 8000,
addSnackbar({
type: 'error',
message: {
key: 'ui.toast.onFailToDeleteMedia',
details: error.message,
},
}),
);
return dispatch(mediaDeleteFailed({ privateUpload }));
@ -353,13 +365,15 @@ export function deleteMedia(file: MediaFile, opts: MediaOptions = {}) {
dispatch(removeDraftEntryMediaFile({ id: file.id }));
}
}
} catch (error) {
} catch (error: any) {
console.error(error);
dispatch(
notifSend({
message: `Failed to delete media: ${error.message}`,
kind: 'danger',
dismissAfter: 8000,
addSnackbar({
type: 'error',
message: {
key: 'ui.toast.onFailToDeleteMedia',
details: error.message,
},
}),
);
return dispatch(mediaDeleteFailed());
@ -401,7 +415,7 @@ export function loadMediaDisplayURL(file: MediaFile) {
} else {
throw new Error('No display URL was returned!');
}
} catch (err) {
} catch (err: any) {
console.error(err);
dispatch(mediaDisplayURLFailure(id, err));
}

View File

@ -1,13 +1,10 @@
import { actions as notifActions } from 'redux-notifications';
import { currentBackend } from '../backend';
import { addSnackbar, removeSnackbarById } from '../store/slices/snackbars';
import type { ThunkDispatch } from 'redux-thunk';
import type { AnyAction } from 'redux';
import type { ThunkDispatch } from 'redux-thunk';
import type { State } from '../types/redux';
const { notifSend, notifDismiss } = notifActions;
export const STATUS_REQUEST = 'STATUS_REQUEST';
export const STATUS_SUCCESS = 'STATUS_SUCCESS';
export const STATUS_FAILURE = 'STATUS_FAILURE';
@ -48,17 +45,16 @@ export function checkBackendStatus() {
const status = await backend.status();
const backendDownKey = 'ui.toast.onBackendDown';
const previousBackendDownNotifs = state.notifs.filter(n => n.message?.key === backendDownKey);
const previousBackendDownNotifs = state.snackbar.messages.filter(
n => n.message?.key === backendDownKey,
);
if (status.api.status === false) {
if (previousBackendDownNotifs.length === 0) {
dispatch(
notifSend({
message: {
details: status.api.statusPage,
key: 'ui.toast.onBackendDown',
},
kind: 'danger',
addSnackbar({
type: 'error',
message: { key: 'ui.toast.onBackendDown', details: status.api.statusPage },
}),
);
}
@ -66,21 +62,19 @@ export function checkBackendStatus() {
} else if (status.api.status === true && previousBackendDownNotifs.length > 0) {
// If backend is up, clear all the danger messages
previousBackendDownNotifs.forEach(notif => {
dispatch(notifDismiss(notif.id));
dispatch(removeSnackbarById(notif.id));
});
}
const authError = status.auth.status === false;
if (authError) {
const key = 'ui.toast.onLoggedOut';
const existingNotification = state.notifs.find(n => n.message?.key === key);
const existingNotification = state.snackbar.messages.find(n => n.message?.key === key);
if (!existingNotification) {
dispatch(
notifSend({
message: {
key: 'ui.toast.onLoggedOut',
},
kind: 'danger',
addSnackbar({
type: 'error',
message: { key: 'ui.toast.onLoggedOut' },
}),
);
}

View File

@ -1,6 +1,6 @@
import { WAIT_UNTIL_ACTION } from '../redux/middleware/waitUntilAction';
import { WAIT_UNTIL_ACTION } from '../store/middleware/waitUntilAction';
import type { WaitActionArgs } from '../redux/middleware/waitUntilAction';
import type { WaitActionArgs } from '../store/middleware/waitUntilAction';
import type { ThunkDispatch } from 'redux-thunk';
import type { AnyAction } from 'redux';
import type { State } from '../types/redux';

View File

@ -1,78 +1,78 @@
import { attempt, flatten, isError, uniq, trim, sortBy, get, set } from 'lodash';
import { List, fromJS, Set } from 'immutable';
import * as fuzzy from 'fuzzy';
import { fromJS, List, Set } from 'immutable';
import { attempt, flatten, get, isError, set, sortBy, trim, uniq } from 'lodash';
import { basename, dirname, extname, join } from 'path';
import { FILES, FOLDER } from './constants/collectionTypes';
import { status } from './constants/publishModes';
import { resolveFormat } from './formats/formats';
import { commitMessageFormatter, previewUrlFormatter, slugFormatter } from './lib/formatters';
import {
localForage,
formatI18nBackup,
getFilePaths,
getI18nBackup,
getI18nDataFiles,
getI18nEntry,
getI18nFiles,
getI18nFilesDepth,
groupEntries,
hasI18n,
} from './lib/i18n';
import { getBackend, invokeEvent } from './lib/registry';
import { sanitizeChar } from './lib/urlHelper';
import {
asyncLock,
blobToFileObj,
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
getPathDepth,
blobToFileObj,
asyncLock,
EDITORIAL_WORKFLOW_ERROR,
} from 'netlify-cms-lib-util';
import { basename, join, extname, dirname } from 'path';
import { stringTemplate } from 'netlify-cms-lib-widgets';
import { resolveFormat } from './formats/formats';
import { selectUseWorkflow } from './reducers/config';
import { selectMediaFilePath, selectEntry } from './reducers/entries';
import { selectIntegration } from './reducers/integrations';
getPathDepth,
localForage,
} from './lib/util';
import { stringTemplate } from './lib/widgets';
import {
selectEntrySlug,
selectEntryPath,
selectFileEntryLabel,
selectAllowNewEntries,
selectAllowDeletion,
selectAllowNewEntries,
selectEntryPath,
selectEntrySlug,
selectFieldsComments,
selectFileEntryLabel,
selectFolderEntryExtension,
selectHasMetaPath,
selectInferedField,
selectMediaFolders,
selectFieldsComments,
selectHasMetaPath,
} from './reducers/collections';
import { createEntry } from './valueObjects/Entry';
import { sanitizeChar } from './lib/urlHelper';
import { getBackend, invokeEvent } from './lib/registry';
import { commitMessageFormatter, slugFormatter, previewUrlFormatter } from './lib/formatters';
import { status } from './constants/publishModes';
import { FOLDER, FILES } from './constants/collectionTypes';
import { selectUseWorkflow } from './reducers/config';
import { selectEntry, selectMediaFilePath } from './reducers/entries';
import { selectCustomPath } from './reducers/entryDraft';
import {
getI18nFilesDepth,
getI18nFiles,
hasI18n,
getFilePaths,
getI18nEntry,
groupEntries,
getI18nDataFiles,
getI18nBackup,
formatI18nBackup,
} from './lib/i18n';
import { selectIntegration } from './reducers/integrations';
import { createEntry } from './valueObjects/Entry';
import type AssetProxy from './valueObjects/AssetProxy';
import type { Map } from 'immutable';
import type { CmsConfig } from './interface';
import type {
CmsConfig,
AsyncLock,
Credentials,
DataFile,
DisplayURL,
Implementation as BackendImplementation,
ImplementationEntry,
UnpublishedEntry,
UnpublishedEntryDiff,
User,
} from './lib/util';
import type {
Collection,
CollectionFile,
Collections,
EntryDraft,
EntryField,
EntryMap,
FilterRule,
EntryDraft,
Collection,
Collections,
CollectionFile,
State,
EntryField,
} from './types/redux';
import type AssetProxy from './valueObjects/AssetProxy';
import type { EntryValue } from './valueObjects/Entry';
import type {
Implementation as BackendImplementation,
DisplayURL,
ImplementationEntry,
Credentials,
User,
AsyncLock,
UnpublishedEntry,
DataFile,
UnpublishedEntryDiff,
} from 'netlify-cms-lib-util';
import type { Map } from 'immutable';
const { extractTemplateVars, dateParsers, expandPath } = stringTemplate;
@ -399,7 +399,7 @@ export class Backend {
async logout() {
try {
await this.implementation.logout();
} catch (e) {
} catch (e: any) {
console.warn('Error during logout', e.message);
} finally {
this.user = null;

View File

@ -0,0 +1,794 @@
import { Base64 } from 'js-base64';
import { partial, result, trim, trimStart } from 'lodash';
import { dirname, basename } from 'path';
import {
localForage,
APIError,
unsentRequest,
requestWithBackoff,
responseParser,
readFile,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
generateContentKey,
parseContentKey,
labelToStatus,
isCMSLabel,
EditorialWorkflowError,
statusToLabel,
PreviewState,
readFileMetadata,
branchFromContentKey,
} from '../../lib/util';
import type { ApiRequest, AssetProxy, PersistOptions, DataFile } from '../../lib/util';
import type { Map } from 'immutable';
export const API_NAME = 'Azure DevOps';
const API_VERSION = 'api-version';
type AzureUser = {
coreAttributes?: {
Avatar?: { value?: { value?: string } };
DisplayName?: { value?: string };
EmailAddress?: { value?: string };
};
};
type AzureGitItem = {
objectId: string;
gitObjectType: AzureObjectType;
path: string;
};
// https://docs.microsoft.com/en-us/rest/api/azure/devops/git/pull%20requests/get%20pull%20request?view=azure-devops-rest-6.1#gitpullrequest
type AzureWebApiTagDefinition = {
active: boolean;
id: string;
name: string;
url: string;
};
type AzurePullRequest = {
title: string;
artifactId: string;
closedDate: string;
creationDate: string;
isDraft: string;
status: AzurePullRequestStatus;
lastMergeSourceCommit: AzureGitChangeItem;
mergeStatus: AzureAsyncPullRequestStatus;
pullRequestId: number;
labels: AzureWebApiTagDefinition[];
sourceRefName: string;
createdBy?: {
displayName?: string;
uniqueName: string;
};
};
type AzurePullRequestCommit = { commitId: string };
enum AzureCommitStatusState {
ERROR = 'error',
FAILED = 'failed',
NOT_APPLICABLE = 'notApplicable',
NOT_SET = 'notSet',
PENDING = 'pending',
SUCCEEDED = 'succeeded',
}
type AzureCommitStatus = {
context: { genre?: string | null; name: string };
state: AzureCommitStatusState;
targetUrl: string;
};
// This does not match Azure documentation, but it is what comes back from some calls
// PullRequest as an example is documented as returning PullRequest[], but it actually
// returns that inside of this value prop in the json
interface AzureArray<T> {
value: T[];
}
enum AzureCommitChangeType {
ADD = 'add',
DELETE = 'delete',
RENAME = 'rename',
EDIT = 'edit',
}
enum AzureItemContentType {
BASE64 = 'base64encoded',
}
enum AzurePullRequestStatus {
ACTIVE = 'active',
COMPLETED = 'completed',
ABANDONED = 'abandoned',
}
enum AzureAsyncPullRequestStatus {
CONFLICTS = 'conflicts',
FAILURE = 'failure',
QUEUED = 'queued',
REJECTED = 'rejectedByPolicy',
SUCCEEDED = 'succeeded',
}
enum AzureObjectType {
BLOB = 'blob',
TREE = 'tree',
}
// https://docs.microsoft.com/en-us/rest/api/azure/devops/git/diffs/get?view=azure-devops-rest-6.1#gitcommitdiffs
interface AzureGitCommitDiffs {
changes: AzureGitChange[];
}
// https://docs.microsoft.com/en-us/rest/api/azure/devops/git/diffs/get?view=azure-devops-rest-6.1#gitchange
interface AzureGitChange {
changeId: number;
item: AzureGitChangeItem;
changeType: AzureCommitChangeType;
originalPath: string;
url: string;
}
interface AzureGitChangeItem {
objectId: string;
originalObjectId: string;
gitObjectType: string;
commitId: string;
path: string;
isFolder: string;
url: string;
}
type AzureRef = {
name: string;
objectId: string;
};
type AzureCommit = {
author: {
date: string;
email: string;
name: string;
};
};
function delay(ms: number) {
return new Promise(resolve => setTimeout(resolve, ms));
}
function getChangeItem(item: AzureCommitItem) {
switch (item.action) {
case AzureCommitChangeType.ADD:
return {
changeType: AzureCommitChangeType.ADD,
item: { path: item.path },
newContent: {
content: item.base64Content,
contentType: AzureItemContentType.BASE64,
},
};
case AzureCommitChangeType.EDIT:
return {
changeType: AzureCommitChangeType.EDIT,
item: { path: item.path },
newContent: {
content: item.base64Content,
contentType: AzureItemContentType.BASE64,
},
};
case AzureCommitChangeType.DELETE:
return {
changeType: AzureCommitChangeType.DELETE,
item: { path: item.path },
};
case AzureCommitChangeType.RENAME:
return {
changeType: AzureCommitChangeType.RENAME,
item: { path: item.path },
sourceServerItem: item.oldPath,
};
default:
return {};
}
}
type AzureCommitItem = {
action: AzureCommitChangeType;
base64Content?: string;
text?: string;
path: string;
oldPath?: string;
};
interface AzureApiConfig {
apiRoot: string;
repo: { org: string; project: string; repoName: string };
branch: string;
squashMerges: boolean;
initialWorkflowStatus: string;
cmsLabelPrefix: string;
apiVersion: string;
}
export default class API {
apiVersion: string;
token: string;
branch: string;
mergeStrategy: string;
endpointUrl: string;
initialWorkflowStatus: string;
cmsLabelPrefix: string;
constructor(config: AzureApiConfig, token: string) {
const { repo } = config;
const apiRoot = trim(config.apiRoot, '/');
this.endpointUrl = `${apiRoot}/${repo.org}/${repo.project}/_apis/git/repositories/${repo.repoName}`;
this.token = token;
this.branch = config.branch;
this.mergeStrategy = config.squashMerges ? 'squash' : 'noFastForward';
this.initialWorkflowStatus = config.initialWorkflowStatus;
this.apiVersion = config.apiVersion;
this.cmsLabelPrefix = config.cmsLabelPrefix;
}
withHeaders = (req: ApiRequest) => {
const withHeaders = unsentRequest.withHeaders(
{
Authorization: `Bearer ${this.token}`,
'Content-Type': 'application/json; charset=utf-8',
},
req,
);
return withHeaders;
};
withAzureFeatures = (req: Map<string, Map<string, string>>) => {
if (req.hasIn(['params', API_VERSION])) {
return req;
}
const withParams = unsentRequest.withParams(
{
[API_VERSION]: `${this.apiVersion}`,
},
req,
);
return withParams;
};
buildRequest = (req: ApiRequest) => {
const withHeaders = this.withHeaders(req);
const withAzureFeatures = this.withAzureFeatures(withHeaders);
if (withAzureFeatures.has('cache')) {
return withAzureFeatures;
} else {
const withNoCache = unsentRequest.withNoCache(withAzureFeatures);
return withNoCache;
}
};
request = (req: ApiRequest): Promise<Response> => {
try {
return requestWithBackoff(this, req);
} catch (err: any) {
throw new APIError(err.message, null, API_NAME);
}
};
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
responseToText = responseParser({ format: 'text', apiName: API_NAME });
requestJSON = <T>(req: ApiRequest) => this.request(req).then(this.responseToJSON) as Promise<T>;
requestText = (req: ApiRequest) => this.request(req).then(this.responseToText) as Promise<string>;
toBase64 = (str: string) => Promise.resolve(Base64.encode(str));
fromBase64 = (str: string) => Base64.decode(str);
branchToRef = (branch: string): string => `refs/heads/${branch}`;
refToBranch = (ref: string): string => ref.slice('refs/heads/'.length);
user = async () => {
const result = await this.requestJSON<AzureUser>({
url: 'https://app.vssps.visualstudio.com/_apis/profile/profiles/me',
params: { [API_VERSION]: '6.1-preview.2' },
});
const name = result.coreAttributes?.DisplayName?.value;
const email = result.coreAttributes?.EmailAddress?.value;
const url = result.coreAttributes?.Avatar?.value?.value;
const user = {
name: name || email || '',
avatar_url: `data:image/png;base64,${url}`,
email,
};
return user;
};
async readFileMetadata(
path: string,
sha: string | null | undefined,
{ branch = this.branch } = {},
) {
const fetchFileMetadata = async () => {
try {
const { value } = await this.requestJSON<AzureArray<AzureCommit>>({
url: `${this.endpointUrl}/commits/`,
params: {
'searchCriteria.itemPath': path,
'searchCriteria.itemVersion.version': branch,
'searchCriteria.$top': 1,
},
});
const [commit] = value;
return {
author: commit.author.name || commit.author.email,
updatedOn: commit.author.date,
};
} catch (error) {
return { author: '', updatedOn: '' };
}
};
const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
return fileMetadata;
}
readFile = (
path: string,
sha?: string | null,
{ parseText = true, branch = this.branch } = {},
) => {
const fetchContent = () => {
return this.request({
url: `${this.endpointUrl}/items/`,
params: { version: branch, path },
cache: 'no-store',
}).then<Blob | string>(parseText ? this.responseToText : this.responseToBlob);
};
return readFile(sha, fetchContent, localForage, parseText);
};
listFiles = async (path: string, recursive: boolean, branch = this.branch) => {
try {
const { value: items } = await this.requestJSON<AzureArray<AzureGitItem>>({
url: `${this.endpointUrl}/items/`,
params: {
version: branch,
scopePath: path,
recursionLevel: recursive ? 'full' : 'oneLevel',
},
});
const files = items
.filter(item => item.gitObjectType === AzureObjectType.BLOB)
.map(file => ({
id: file.objectId,
path: trimStart(file.path, '/'),
name: basename(file.path),
}));
return files;
} catch (err: any) {
if (err && err.status === 404) {
console.info('This 404 was expected and handled appropriately.');
return [];
} else {
throw err;
}
}
};
async getRef(branch: string = this.branch) {
const { value: refs } = await this.requestJSON<AzureArray<AzureRef>>({
url: `${this.endpointUrl}/refs`,
params: {
$top: '1', // There's only one ref, so keep the payload small
filter: 'heads/' + branch,
},
});
return refs.find(b => b.name == this.branchToRef(branch))!;
}
async deleteRef(ref: AzureRef): Promise<void> {
const deleteBranchPayload = [
{
name: ref.name,
oldObjectId: ref.objectId,
newObjectId: '0000000000000000000000000000000000000000',
},
];
await this.requestJSON({
method: 'POST',
url: `${this.endpointUrl}/refs`,
body: JSON.stringify(deleteBranchPayload),
});
}
async uploadAndCommit(
items: AzureCommitItem[],
comment: string,
branch: string,
newBranch: boolean,
) {
const ref = await this.getRef(newBranch ? this.branch : branch);
const refUpdate = [
{
name: this.branchToRef(branch),
oldObjectId: ref.objectId,
},
];
const changes = items.map(item => getChangeItem(item));
const commits = [{ comment, changes }];
const push = {
refUpdates: refUpdate,
commits,
};
return this.requestJSON({
url: `${this.endpointUrl}/pushes`,
method: 'POST',
body: JSON.stringify(push),
});
}
async retrieveUnpublishedEntryData(contentKey: string) {
const { collection, slug } = parseContentKey(contentKey);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const diffs = await this.getDifferences(pullRequest.sourceRefName);
const diffsWithIds = await Promise.all(
diffs.map(async d => {
const path = trimStart(d.item.path, '/');
const newFile = d.changeType === AzureCommitChangeType.ADD;
const id = d.item.objectId;
return { id, path, newFile };
}),
);
const label = pullRequest.labels.find(l => isCMSLabel(l.name, this.cmsLabelPrefix));
const labelName = label && label.name ? label.name : this.cmsLabelPrefix;
const status = labelToStatus(labelName, this.cmsLabelPrefix);
// Uses creationDate, as we do not have direct access to the updated date
const updatedAt = pullRequest.closedDate ? pullRequest.closedDate : pullRequest.creationDate;
const pullRequestAuthor =
pullRequest.createdBy?.displayName || pullRequest.createdBy?.uniqueName;
return {
collection,
slug,
status,
diffs: diffsWithIds,
updatedAt,
pullRequestAuthor,
};
}
async getPullRequestStatues(pullRequest: AzurePullRequest) {
const { value: commits } = await this.requestJSON<AzureArray<AzurePullRequestCommit>>({
url: `${this.endpointUrl}/pullrequests/${pullRequest.pullRequestId}/commits`,
params: {
$top: 1,
},
});
const { value: statuses } = await this.requestJSON<AzureArray<AzureCommitStatus>>({
url: `${this.endpointUrl}/commits/${commits[0].commitId}/statuses`,
params: { latestOnly: true },
});
return statuses;
}
async getStatuses(collection: string, slug: string) {
const contentKey = generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const statuses = await this.getPullRequestStatues(pullRequest);
return statuses.map(({ context, state, targetUrl }) => ({
context: context.name,
state: state === AzureCommitStatusState.SUCCEEDED ? PreviewState.Success : PreviewState.Other,
target_url: targetUrl,
}));
}
async getCommitItems(files: { path: string; newPath?: string }[], branch: string) {
const items = await Promise.all(
files.map(async file => {
const [base64Content, fileExists] = await Promise.all([
result(file, 'toBase64', partial(this.toBase64, (file as DataFile).raw)),
this.isFileExists(file.path, branch),
]);
const path = file.newPath || file.path;
const oldPath = file.path;
const renameOrEdit =
path !== oldPath ? AzureCommitChangeType.RENAME : AzureCommitChangeType.EDIT;
const action = fileExists ? renameOrEdit : AzureCommitChangeType.ADD;
return {
action,
base64Content,
path,
oldPath,
} as AzureCommitItem;
}),
);
// move children
for (const item of items.filter(i => i.oldPath && i.action === AzureCommitChangeType.RENAME)) {
const sourceDir = dirname(item.oldPath as string);
const destDir = dirname(item.path);
const children = await this.listFiles(sourceDir, true, branch);
children
.filter(file => file.path !== item.oldPath)
.forEach(file => {
items.push({
action: AzureCommitChangeType.RENAME,
path: file.path.replace(sourceDir, destDir),
oldPath: file.path,
});
});
}
return items;
}
async persistFiles(dataFiles: DataFile[], mediaFiles: AssetProxy[], options: PersistOptions) {
const files = [...dataFiles, ...mediaFiles];
if (options.useWorkflow) {
const slug = dataFiles[0].slug;
return this.editorialWorkflowGit(files, slug, options);
} else {
const items = await this.getCommitItems(files, this.branch);
return this.uploadAndCommit(items, options.commitMessage, this.branch, true);
}
}
async deleteFiles(paths: string[], comment: string) {
const ref = await this.getRef(this.branch);
const refUpdate = {
name: ref.name,
oldObjectId: ref.objectId,
};
const changes = paths.map(path =>
getChangeItem({ action: AzureCommitChangeType.DELETE, path }),
);
const commits = [{ comment, changes }];
const push = {
refUpdates: [refUpdate],
commits,
};
return this.requestJSON({
url: `${this.endpointUrl}/pushes`,
method: 'POST',
body: JSON.stringify(push),
});
}
async getPullRequests(sourceBranch?: string) {
const { value: pullRequests } = await this.requestJSON<AzureArray<AzurePullRequest>>({
url: `${this.endpointUrl}/pullrequests`,
params: {
'searchCriteria.status': 'active',
'searchCriteria.targetRefName': this.branchToRef(this.branch),
'searchCriteria.includeLinks': false,
...(sourceBranch ? { 'searchCriteria.sourceRefName': this.branchToRef(sourceBranch) } : {}),
},
});
const filtered = pullRequests.filter(pr => {
return pr.labels.some(label => isCMSLabel(label.name, this.cmsLabelPrefix));
});
return filtered;
}
async listUnpublishedBranches(): Promise<string[]> {
const pullRequests = await this.getPullRequests();
const branches = pullRequests.map(pr => this.refToBranch(pr.sourceRefName));
return branches;
}
async isFileExists(path: string, branch: string) {
try {
await this.requestText({
url: `${this.endpointUrl}/items/`,
params: { version: branch, path },
cache: 'no-store',
});
return true;
} catch (error) {
if (error instanceof APIError && error.status === 404) {
return false;
}
throw error;
}
}
async createPullRequest(branch: string, commitMessage: string, status: string) {
const pr = {
sourceRefName: this.branchToRef(branch),
targetRefName: this.branchToRef(this.branch),
title: commitMessage,
description: DEFAULT_PR_BODY,
labels: [
{
name: statusToLabel(status, this.cmsLabelPrefix),
},
],
};
await this.requestJSON({
method: 'POST',
url: `${this.endpointUrl}/pullrequests`,
params: {
supportsIterations: false,
},
body: JSON.stringify(pr),
});
}
async getBranchPullRequest(branch: string) {
const pullRequests = await this.getPullRequests(branch);
if (pullRequests.length <= 0) {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
}
return pullRequests[0];
}
async getDifferences(to: string) {
const result = await this.requestJSON<AzureGitCommitDiffs>({
url: `${this.endpointUrl}/diffs/commits`,
params: {
baseVersion: this.branch,
targetVersion: this.refToBranch(to),
},
});
return result.changes.filter(
d =>
d.item.gitObjectType === AzureObjectType.BLOB &&
Object.values(AzureCommitChangeType).includes(d.changeType),
);
}
async editorialWorkflowGit(
files: (DataFile | AssetProxy)[],
slug: string,
options: PersistOptions,
) {
const contentKey = generateContentKey(options.collectionName as string, slug);
const branch = branchFromContentKey(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
const items = await this.getCommitItems(files, this.branch);
await this.uploadAndCommit(items, options.commitMessage, branch, true);
await this.createPullRequest(
branch,
options.commitMessage,
options.status || this.initialWorkflowStatus,
);
} else {
const items = await this.getCommitItems(files, branch);
await this.uploadAndCommit(items, options.commitMessage, branch, false);
}
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
const contentKey = generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const nonCmsLabels = pullRequest.labels
.filter(label => !isCMSLabel(label.name, this.cmsLabelPrefix))
.map(label => label.name);
const labels = [...nonCmsLabels, statusToLabel(newStatus, this.cmsLabelPrefix)];
await this.updatePullRequestLabels(pullRequest, labels);
}
async deleteUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.abandonPullRequest(pullRequest);
}
async publishUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.completePullRequest(pullRequest);
}
async updatePullRequestLabels(pullRequest: AzurePullRequest, labels: string[]) {
const cmsLabels = pullRequest.labels.filter(l => isCMSLabel(l.name, this.cmsLabelPrefix));
await Promise.all(
cmsLabels.map(l => {
return this.requestText({
method: 'DELETE',
url: `${this.endpointUrl}/pullrequests/${encodeURIComponent(
pullRequest.pullRequestId,
)}/labels/${encodeURIComponent(l.id)}`,
});
}),
);
await Promise.all(
labels.map(l => {
return this.requestText({
method: 'POST',
url: `${this.endpointUrl}/pullrequests/${encodeURIComponent(
pullRequest.pullRequestId,
)}/labels`,
body: JSON.stringify({ name: l }),
});
}),
);
}
async completePullRequest(pullRequest: AzurePullRequest) {
const pullRequestCompletion = {
status: AzurePullRequestStatus.COMPLETED,
lastMergeSourceCommit: pullRequest.lastMergeSourceCommit,
completionOptions: {
deleteSourceBranch: true,
mergeCommitMessage: MERGE_COMMIT_MESSAGE,
mergeStrategy: this.mergeStrategy,
},
};
let response = await this.requestJSON<AzurePullRequest>({
method: 'PATCH',
url: `${this.endpointUrl}/pullrequests/${encodeURIComponent(pullRequest.pullRequestId)}`,
body: JSON.stringify(pullRequestCompletion),
});
// We need to wait for Azure to complete the pull request to actually complete
// Sometimes this is instant, but frequently it is 1-3 seconds
const DELAY_MILLISECONDS = 500;
const MAX_ATTEMPTS = 10;
let attempt = 1;
while (response.mergeStatus === AzureAsyncPullRequestStatus.QUEUED && attempt <= MAX_ATTEMPTS) {
await delay(DELAY_MILLISECONDS);
response = await this.requestJSON({
url: `${this.endpointUrl}/pullrequests/${encodeURIComponent(pullRequest.pullRequestId)}`,
});
attempt = attempt + 1;
}
}
async abandonPullRequest(pullRequest: AzurePullRequest) {
const pullRequestAbandon = {
status: AzurePullRequestStatus.ABANDONED,
};
await this.requestJSON({
method: 'PATCH',
url: `${this.endpointUrl}/pullrequests/${encodeURIComponent(pullRequest.pullRequestId)}`,
body: JSON.stringify(pullRequestAbandon),
});
await this.deleteRef({
name: pullRequest.sourceRefName,
objectId: pullRequest.lastMergeSourceCommit.commitId,
});
}
}

View File

@ -0,0 +1,84 @@
import React from 'react';
import PropTypes from 'prop-types';
import styled from '@emotion/styled';
import { AuthenticationPage, Icon } from '../../ui';
import { ImplicitAuthenticator } from '../../lib/auth';
import alert from '../../components/UI/Alert';
const LoginButtonIcon = styled(Icon)`
margin-right: 18px;
`;
export default class AzureAuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
base_url: PropTypes.string,
siteId: PropTypes.string,
authEndpoint: PropTypes.string,
config: PropTypes.object.isRequired,
clearHash: PropTypes.func,
t: PropTypes.func.isRequired,
};
state = {};
componentDidMount() {
this.auth = new ImplicitAuthenticator({
base_url: `https://login.microsoftonline.com/${this.props.config.backend.tenant_id}`,
auth_endpoint: 'oauth2/authorize',
app_id: this.props.config.backend.app_id,
clearHash: this.props.clearHash,
});
// Complete implicit authentication if we were redirected back to from the provider.
this.auth.completeAuth((err, data) => {
if (err) {
alert({
title: 'auth.errors.authTitle',
body: { key: 'auth.errors.authBody', options: { details: err } },
});
return;
}
this.props.onLogin(data);
});
}
handleLogin = e => {
e.preventDefault();
this.auth.authenticate(
{
scope: 'vso.code_full,user.read',
resource: '499b84ac-1321-427f-aa17-267ca6975798',
prompt: 'select_account',
},
(err, data) => {
if (err) {
this.setState({ loginError: err.toString() });
return;
}
this.props.onLogin(data);
},
);
};
render() {
const { inProgress, config, t } = this.props;
return (
<AuthenticationPage
onLogin={this.handleLogin}
loginDisabled={inProgress}
loginErrorMessage={this.state.loginError}
logoUrl={config.logo_url}
renderButtonContent={() => (
<React.Fragment>
<LoginButtonIcon type="azure" />
{inProgress ? t('auth.loggingIn') : t('auth.loginWithAzure')}
</React.Fragment>
)}
t={t}
/>
);
}
}

View File

@ -0,0 +1,383 @@
import { trimStart, trim } from 'lodash';
import semaphore from 'semaphore';
import {
basename,
getMediaDisplayURL,
generateContentKey,
getMediaAsBlob,
getPreviewStatus,
asyncLock,
runWithLock,
unpublishedEntries,
entriesByFiles,
filterByExtension,
branchFromContentKey,
entriesByFolder,
contentKeyFromBranch,
getBlobSHA,
} from '../../lib/util';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
import type { Semaphore } from 'semaphore';
import type {
Credentials,
Implementation,
ImplementationFile,
ImplementationMediaFile,
DisplayURL,
Entry,
AssetProxy,
PersistOptions,
Config,
AsyncLock,
User,
UnpublishedEntryMediaFile,
} from '../../lib/util';
const MAX_CONCURRENT_DOWNLOADS = 10;
function parseAzureRepo(config: Config) {
const { repo } = config.backend;
if (typeof repo !== 'string') {
throw new Error('The Azure backend needs a "repo" in the backend configuration.');
}
const parts = repo.split('/');
if (parts.length !== 3) {
throw new Error('The Azure backend must be in a the format of {org}/{project}/{repo}');
}
const [org, project, repoName] = parts;
return {
org,
project,
repoName,
};
}
export default class Azure implements Implementation {
lock: AsyncLock;
api?: API;
options: {
initialWorkflowStatus: string;
};
repo: {
org: string;
project: string;
repoName: string;
};
branch: string;
apiRoot: string;
apiVersion: string;
token: string | null;
squashMerges: boolean;
cmsLabelPrefix: string;
mediaFolder: string;
previewContext: string;
_mediaDisplayURLSem?: Semaphore;
constructor(config: Config, options = {}) {
this.options = {
initialWorkflowStatus: '',
...options,
};
this.repo = parseAzureRepo(config);
this.branch = config.backend.branch || 'master';
this.apiRoot = config.backend.api_root || 'https://dev.azure.com';
this.apiVersion = config.backend.api_version || '6.1-preview';
this.token = '';
this.squashMerges = config.backend.squash_merges || false;
this.cmsLabelPrefix = config.backend.cms_label_prefix || '';
this.mediaFolder = trim(config.media_folder, '/');
this.previewContext = config.backend.preview_context || '';
this.lock = asyncLock();
}
isGitBackend() {
return true;
}
async status() {
const auth =
(await this.api!.user()
.then(user => !!user)
.catch(e => {
console.warn('Failed getting Azure user', e);
return false;
})) || false;
return { auth: { status: auth }, api: { status: true, statusPage: '' } };
}
authComponent() {
return AuthenticationPage;
}
restoreUser(user: User) {
return this.authenticate(user);
}
async authenticate(state: Credentials) {
this.token = state.token as string;
this.api = new API(
{
apiRoot: this.apiRoot,
apiVersion: this.apiVersion,
repo: this.repo,
branch: this.branch,
squashMerges: this.squashMerges,
cmsLabelPrefix: this.cmsLabelPrefix,
initialWorkflowStatus: this.options.initialWorkflowStatus,
},
this.token,
);
const user = await this.api.user();
return { token: state.token as string, ...user };
}
/**
* Log the user out by forgetting their access token.
* TODO: *Actual* logout by redirecting to:
* https://login.microsoftonline.com/{tenantId}/oauth2/logout?client_id={clientId}&post_logout_redirect_uri={baseUrl}
*/
logout() {
this.token = null;
return;
}
getToken() {
return Promise.resolve(this.token);
}
async entriesByFolder(folder: string, extension: string, depth: number) {
const listFiles = async () => {
const files = await this.api!.listFiles(folder, depth > 1);
const filtered = files.filter(file => filterByExtension({ path: file.path }, extension));
return filtered.map(file => ({
id: file.id,
path: file.path,
}));
};
const entries = await entriesByFolder(
listFiles,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
return entries;
}
entriesByFiles(files: ImplementationFile[]) {
return entriesByFiles(
files,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
}
async getEntry(path: string) {
const data = (await this.api!.readFile(path)) as string;
return {
file: { path },
data,
};
}
async getMedia() {
const files = await this.api!.listFiles(this.mediaFolder, false);
const mediaFiles = await Promise.all(
files.map(async ({ id, path, name }) => {
const blobUrl = await this.getMediaDisplayURL({ id, path });
return { id, name, displayURL: blobUrl, path };
}),
);
return mediaFiles;
}
getMediaDisplayURL(displayURL: DisplayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
return getMediaDisplayURL(
displayURL,
this.api!.readFile.bind(this.api!),
this._mediaDisplayURLSem,
);
}
async getMediaFile(path: string) {
const name = basename(path);
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
const fileObj = new File([blob], name);
const url = URL.createObjectURL(fileObj);
const id = await getBlobSHA(blob);
return {
id,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
async persistEntry(entry: Entry, options: PersistOptions): Promise<void> {
const mediaFiles: AssetProxy[] = entry.assets;
await this.api!.persistFiles(entry.dataFiles, mediaFiles, options);
}
async persistMedia(
mediaFile: AssetProxy,
options: PersistOptions,
): Promise<ImplementationMediaFile> {
const fileObj = mediaFile.fileObj as File;
const [id] = await Promise.all([
getBlobSHA(fileObj),
this.api!.persistFiles([], [mediaFile], options),
]);
const { path } = mediaFile;
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path: trimStart(path, '/'),
name: fileObj!.name,
size: fileObj!.size,
file: fileObj,
url,
id: id as string,
};
}
async deleteFiles(paths: string[], commitMessage: string) {
await this.api!.deleteFiles(paths, commitMessage);
}
async loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
const readFile = (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
const blob = await getMediaAsBlob(file.path, null, readFile);
const name = basename(file.path);
const fileObj = new File([blob], name);
return {
id: file.path,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
}
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
return mediaFiles;
}
async unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(branch => contentKeyFromBranch(branch)),
);
const ids = await unpublishedEntries(listEntriesKeys);
return ids;
}
async unpublishedEntry({
id,
collection,
slug,
}: {
id?: string;
collection?: string;
slug?: string;
}) {
if (id) {
const data = await this.api!.retrieveUnpublishedEntryData(id);
return data;
} else if (collection && slug) {
const contentKey = generateContentKey(collection, slug);
const data = await this.api!.retrieveUnpublishedEntryData(contentKey);
return data;
} else {
throw new Error('Missing unpublished entry id or collection and slug');
}
}
getBranch(collection: string, slug: string) {
const contentKey = generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);
return branch;
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const mediaFile = await this.loadMediaFile(branch, { path, id });
return mediaFile;
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const data = (await this.api!.readFile(path, id, { branch })) as string;
return data;
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
// updateUnpublishedEntryStatus is a transactional operation
return runWithLock(
this.lock,
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
'Failed to acquire update entry status lock',
);
}
deleteUnpublishedEntry(collection: string, slug: string) {
// deleteUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.deleteUnpublishedEntry(collection, slug),
'Failed to acquire delete entry lock',
);
}
publishUnpublishedEntry(collection: string, slug: string) {
// publishUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.publishUnpublishedEntry(collection, slug),
'Failed to acquire publish entry lock',
);
}
async getDeployPreview(collection: string, slug: string) {
try {
const statuses = await this.api!.getStatuses(collection, slug);
const deployStatus = getPreviewStatus(statuses, this.previewContext);
if (deployStatus) {
const { target_url: url, state } = deployStatus;
return { url, status: state };
} else {
return null;
}
} catch (e) {
return null;
}
}
}

View File

@ -0,0 +1,10 @@
import AzureBackend from './implementation';
import API from './API';
import AuthenticationPage from './AuthenticationPage';
export const NetlifyCmsBackendAzure = {
AzureBackend,
API,
AuthenticationPage,
};
export { AzureBackend, API, AuthenticationPage };

View File

@ -0,0 +1,803 @@
import { flow, get } from 'lodash';
import { dirname } from 'path';
import { oneLine } from 'common-tags';
import { parse } from 'what-the-diff';
import {
localForage,
unsentRequest,
responseParser,
then,
basename,
Cursor,
APIError,
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
labelToStatus,
isCMSLabel,
EditorialWorkflowError,
statusToLabel,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
PreviewState,
parseContentKey,
branchFromContentKey,
requestWithBackoff,
readFileMetadata,
throwOnConflictingBranches,
} from '../../lib/util';
import type {
ApiRequest,
AssetProxy,
PersistOptions,
FetchError,
DataFile,
} from '../../lib/util';
interface Config {
apiRoot?: string;
token?: string;
branch?: string;
repo?: string;
requestFunction?: (req: ApiRequest) => Promise<Response>;
hasWriteAccess?: () => Promise<boolean>;
squashMerges: boolean;
initialWorkflowStatus: string;
cmsLabelPrefix: string;
}
interface CommitAuthor {
name: string;
email: string;
}
enum BitBucketPullRequestState {
MERGED = 'MERGED',
SUPERSEDED = 'SUPERSEDED',
OPEN = 'OPEN',
DECLINED = 'DECLINED',
}
type BitBucketPullRequest = {
description: string;
id: number;
title: string;
state: BitBucketPullRequestState;
updated_on: string;
summary: {
raw: string;
};
source: {
commit: {
hash: string;
};
branch: {
name: string;
};
};
destination: {
commit: {
hash: string;
};
branch: {
name: string;
};
};
author: BitBucketUser;
};
type BitBucketPullRequests = {
size: number;
page: number;
pagelen: number;
next: string;
preview: string;
values: BitBucketPullRequest[];
};
type BitBucketPullComment = {
content: {
raw: string;
};
};
type BitBucketPullComments = {
size: number;
page: number;
pagelen: number;
next: string;
preview: string;
values: BitBucketPullComment[];
};
enum BitBucketPullRequestStatusState {
Successful = 'SUCCESSFUL',
Failed = 'FAILED',
InProgress = 'INPROGRESS',
Stopped = 'STOPPED',
}
type BitBucketPullRequestStatus = {
uuid: string;
name: string;
key: string;
refname: string;
url: string;
description: string;
state: BitBucketPullRequestStatusState;
};
type BitBucketPullRequestStatues = {
size: number;
page: number;
pagelen: number;
next: string;
preview: string;
values: BitBucketPullRequestStatus[];
};
type DeleteEntry = {
path: string;
delete: true;
};
type BitBucketFile = {
id: string;
type: string;
path: string;
commit?: { hash: string };
};
type BitBucketSrcResult = {
size: number;
page: number;
pagelen: number;
next: string;
previous: string;
values: BitBucketFile[];
};
type BitBucketUser = {
username: string;
display_name: string;
nickname: string;
links: {
avatar: {
href: string;
};
};
};
type BitBucketBranch = {
name: string;
target: { hash: string };
};
type BitBucketCommit = {
hash: string;
author: {
raw: string;
user: {
display_name: string;
nickname: string;
};
};
date: string;
};
export const API_NAME = 'Bitbucket';
const APPLICATION_JSON = 'application/json; charset=utf-8';
function replace404WithEmptyResponse(err: FetchError) {
if (err && err.status === 404) {
console.info('This 404 was expected and handled appropriately.');
return { size: 0, values: [] as BitBucketFile[] } as BitBucketSrcResult;
} else {
return Promise.reject(err);
}
}
export default class API {
apiRoot: string;
branch: string;
repo: string;
requestFunction: (req: ApiRequest) => Promise<Response>;
repoURL: string;
commitAuthor?: CommitAuthor;
mergeStrategy: string;
initialWorkflowStatus: string;
cmsLabelPrefix: string;
constructor(config: Config) {
this.apiRoot = config.apiRoot || 'https://api.bitbucket.org/2.0';
this.branch = config.branch || 'master';
this.repo = config.repo || '';
this.requestFunction = config.requestFunction || unsentRequest.performRequest;
// Allow overriding this.hasWriteAccess
this.hasWriteAccess = config.hasWriteAccess || this.hasWriteAccess;
this.repoURL = this.repo ? `/repositories/${this.repo}` : '';
this.mergeStrategy = config.squashMerges ? 'squash' : 'merge_commit';
this.initialWorkflowStatus = config.initialWorkflowStatus;
this.cmsLabelPrefix = config.cmsLabelPrefix;
}
buildRequest = (req: ApiRequest) => {
const withRoot = unsentRequest.withRoot(this.apiRoot)(req);
if (withRoot.has('cache')) {
return withRoot;
} else {
const withNoCache = unsentRequest.withNoCache(withRoot);
return withNoCache;
}
};
request = (req: ApiRequest): Promise<Response> => {
try {
return requestWithBackoff(this, req);
} catch (err: any) {
throw new APIError(err.message, null, API_NAME);
}
};
responseToJSON = responseParser({ format: 'json', apiName: API_NAME });
responseToBlob = responseParser({ format: 'blob', apiName: API_NAME });
responseToText = responseParser({ format: 'text', apiName: API_NAME });
// eslint-disable-next-line @typescript-eslint/no-explicit-any
requestJSON = (req: ApiRequest) => this.request(req).then(this.responseToJSON) as Promise<any>;
requestText = (req: ApiRequest) => this.request(req).then(this.responseToText) as Promise<string>;
user = () => this.requestJSON('/user') as Promise<BitBucketUser>;
hasWriteAccess = async () => {
const response = await this.request(this.repoURL);
if (response.status === 404) {
throw Error('Repo not found');
}
return response.ok;
};
getBranch = async (branchName: string) => {
const branch: BitBucketBranch = await this.requestJSON(
`${this.repoURL}/refs/branches/${branchName}`,
);
return branch;
};
branchCommitSha = async (branch: string) => {
const {
target: { hash: branchSha },
}: BitBucketBranch = await this.getBranch(branch);
return branchSha;
};
defaultBranchCommitSha = () => {
return this.branchCommitSha(this.branch);
};
isFile = ({ type }: BitBucketFile) => type === 'commit_file';
getFileId = (commitHash: string, path: string) => {
return `${commitHash}/${path}`;
};
processFile = (file: BitBucketFile) => ({
id: file.id,
type: file.type,
path: file.path,
name: basename(file.path),
// BitBucket does not return file SHAs, but it does give us the
// commit SHA. Since the commit SHA will change if any files do,
// we can construct an ID using the commit SHA and the file path
// that will help with caching (though not as well as a normal
// SHA, since it will change even if the individual file itself
// doesn't.)
...(file.commit && file.commit.hash ? { id: this.getFileId(file.commit.hash, file.path) } : {}),
});
processFiles = (files: BitBucketFile[]) => files.filter(this.isFile).map(this.processFile);
readFile = async (
path: string,
sha?: string | null,
{ parseText = true, branch = this.branch, head = '' } = {},
): Promise<string | Blob> => {
const fetchContent = async () => {
const node = head ? head : await this.branchCommitSha(branch);
const content = await this.request({
url: `${this.repoURL}/src/${node}/${path}`,
cache: 'no-store',
}).then<string | Blob>(parseText ? this.responseToText : this.responseToBlob);
return content;
};
const content = await readFile(sha, fetchContent, localForage, parseText);
return content;
};
async readFileMetadata(path: string, sha: string | null | undefined) {
const fetchFileMetadata = async () => {
try {
const { values }: { values: BitBucketCommit[] } = await this.requestJSON({
url: `${this.repoURL}/commits`,
params: { path, include: this.branch },
});
const commit = values[0];
return {
author: commit.author.user
? commit.author.user.display_name || commit.author.user.nickname
: commit.author.raw,
updatedOn: commit.date,
};
} catch (e) {
return { author: '', updatedOn: '' };
}
};
const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
return fileMetadata;
}
async isShaExistsInBranch(branch: string, sha: string) {
const { values }: { values: BitBucketCommit[] } = await this.requestJSON({
url: `${this.repoURL}/commits`,
params: { include: branch, pagelen: 100 },
}).catch(e => {
console.info(`Failed getting commits for branch '${branch}'`, e);
return [];
});
return values.some(v => v.hash === sha);
}
getEntriesAndCursor = (jsonResponse: BitBucketSrcResult) => {
const {
size: count,
page,
pagelen: pageSize,
next,
previous: prev,
values: entries,
} = jsonResponse;
const pageCount = pageSize && count ? Math.ceil(count / pageSize) : undefined;
return {
entries,
cursor: Cursor.create({
actions: [...(next ? ['next'] : []), ...(prev ? ['prev'] : [])],
meta: { page, count, pageSize, pageCount },
data: { links: { next, prev } },
}),
};
};
listFiles = async (path: string, depth = 1, pagelen: number, branch: string) => {
const node = await this.branchCommitSha(branch);
const result: BitBucketSrcResult = await this.requestJSON({
url: `${this.repoURL}/src/${node}/${path}`,
params: {
max_depth: depth,
pagelen,
},
}).catch(replace404WithEmptyResponse);
const { entries, cursor } = this.getEntriesAndCursor(result);
return { entries: this.processFiles(entries), cursor: cursor as Cursor };
};
traverseCursor = async (
cursor: Cursor,
action: string,
): Promise<{
cursor: Cursor;
entries: { path: string; name: string; type: string; id: string }[];
}> =>
flow([
this.requestJSON,
then(this.getEntriesAndCursor),
then<
{ cursor: Cursor; entries: BitBucketFile[] },
{ cursor: Cursor; entries: BitBucketFile[] }
>(({ cursor: newCursor, entries }) => ({
cursor: newCursor,
entries: this.processFiles(entries),
})),
])(cursor.data!.getIn(['links', action]));
listAllFiles = async (path: string, depth: number, branch: string) => {
const { cursor: initialCursor, entries: initialEntries } = await this.listFiles(
path,
depth,
100,
branch,
);
const entries = [...initialEntries];
let currentCursor = initialCursor;
while (currentCursor && currentCursor.actions!.has('next')) {
const { cursor: newCursor, entries: newEntries } = await this.traverseCursor(
currentCursor,
'next',
);
entries.push(...newEntries);
currentCursor = newCursor;
}
return this.processFiles(entries);
};
async uploadFiles(
files: { path: string; newPath?: string; delete?: boolean }[],
{
commitMessage,
branch,
parentSha,
}: { commitMessage: string; branch: string; parentSha?: string },
) {
const formData = new FormData();
const toMove: { from: string; to: string; contentBlob: Blob }[] = [];
files.forEach(file => {
if (file.delete) {
// delete the file
formData.append('files', file.path);
} else if (file.newPath) {
const contentBlob = get(file, 'fileObj', new Blob([(file as DataFile).raw]));
toMove.push({ from: file.path, to: file.newPath, contentBlob });
} else {
// add/modify the file
const contentBlob = get(file, 'fileObj', new Blob([(file as DataFile).raw]));
// Third param is filename header, in case path is `message`, `branch`, etc.
formData.append(file.path, contentBlob, basename(file.path));
}
});
for (const { from, to, contentBlob } of toMove) {
const sourceDir = dirname(from);
const destDir = dirname(to);
const filesBranch = parentSha ? this.branch : branch;
const files = await this.listAllFiles(sourceDir, 100, filesBranch);
for (const file of files) {
// to move a file in Bitbucket we need to delete the old path
// and upload the file content to the new path
// NOTE: this is very wasteful, and also the Bitbucket `diff` API
// reports these files as deleted+added instead of renamed
// delete current path
formData.append('files', file.path);
// create in new path
const content =
file.path === from
? contentBlob
: await this.readFile(file.path, null, {
branch: filesBranch,
parseText: false,
});
formData.append(file.path.replace(sourceDir, destDir), content, basename(file.path));
}
}
if (commitMessage) {
formData.append('message', commitMessage);
}
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
formData.append('author', `${name} <${email}>`);
}
formData.append('branch', branch);
if (parentSha) {
formData.append('parents', parentSha);
}
try {
await this.requestText({
url: `${this.repoURL}/src`,
method: 'POST',
body: formData,
});
} catch (error: any) {
const message = error.message || '';
// very descriptive message from Bitbucket
if (parentSha && message.includes('Something went wrong')) {
await throwOnConflictingBranches(branch, name => this.getBranch(name), API_NAME);
}
throw error;
}
return files;
}
async persistFiles(dataFiles: DataFile[], mediaFiles: AssetProxy[], options: PersistOptions) {
const files = [...dataFiles, ...mediaFiles];
if (options.useWorkflow) {
const slug = dataFiles[0].slug;
return this.editorialWorkflowGit(files, slug, options);
} else {
return this.uploadFiles(files, { commitMessage: options.commitMessage, branch: this.branch });
}
}
async addPullRequestComment(pullRequest: BitBucketPullRequest, comment: string) {
await this.requestJSON({
method: 'POST',
url: `${this.repoURL}/pullrequests/${pullRequest.id}/comments`,
headers: { 'Content-Type': APPLICATION_JSON },
body: JSON.stringify({
content: {
raw: comment,
},
}),
});
}
async getPullRequestLabel(id: number) {
const comments: BitBucketPullComments = await this.requestJSON({
url: `${this.repoURL}/pullrequests/${id}/comments`,
params: {
pagelen: 100,
},
});
return comments.values.map(c => c.content.raw)[comments.values.length - 1];
}
async createPullRequest(branch: string, commitMessage: string, status: string) {
const pullRequest: BitBucketPullRequest = await this.requestJSON({
method: 'POST',
url: `${this.repoURL}/pullrequests`,
headers: { 'Content-Type': APPLICATION_JSON },
body: JSON.stringify({
title: commitMessage,
source: {
branch: {
name: branch,
},
},
destination: {
branch: {
name: this.branch,
},
},
description: DEFAULT_PR_BODY,
close_source_branch: true,
}),
});
// use comments for status labels
await this.addPullRequestComment(pullRequest, statusToLabel(status, this.cmsLabelPrefix));
}
async getDifferences(source: string, destination: string = this.branch) {
if (source === destination) {
return [];
}
const rawDiff = await this.requestText({
url: `${this.repoURL}/diff/${source}..${destination}`,
params: {
binary: false,
},
});
const diffs = parse(rawDiff).map(d => {
const oldPath = d.oldPath?.replace(/b\//, '') || '';
const newPath = d.newPath?.replace(/b\//, '') || '';
const path = newPath || (oldPath as string);
return {
oldPath,
newPath,
status: d.status,
newFile: d.status === 'added',
path,
binary: d.binary || /.svg$/.test(path),
};
});
return diffs;
}
async editorialWorkflowGit(
files: (DataFile | AssetProxy)[],
slug: string,
options: PersistOptions,
) {
const contentKey = generateContentKey(options.collectionName as string, slug);
const branch = branchFromContentKey(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
const defaultBranchSha = await this.branchCommitSha(this.branch);
await this.uploadFiles(files, {
commitMessage: options.commitMessage,
branch,
parentSha: defaultBranchSha,
});
await this.createPullRequest(
branch,
options.commitMessage,
options.status || this.initialWorkflowStatus,
);
} else {
// mark files for deletion
const diffs = await this.getDifferences(branch);
const toDelete: DeleteEntry[] = [];
for (const diff of diffs.filter(d => d.binary && d.status !== 'deleted')) {
if (!files.some(file => file.path === diff.path)) {
toDelete.push({ path: diff.path, delete: true });
}
}
await this.uploadFiles([...files, ...toDelete], {
commitMessage: options.commitMessage,
branch,
});
}
}
deleteFiles = (paths: string[], message: string) => {
const body = new FormData();
paths.forEach(path => {
body.append('files', path);
});
body.append('branch', this.branch);
if (message) {
body.append('message', message);
}
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
body.append('author', `${name} <${email}>`);
}
return flow([unsentRequest.withMethod('POST'), unsentRequest.withBody(body), this.request])(
`${this.repoURL}/src`,
);
};
async getPullRequests(sourceBranch?: string) {
const sourceQuery = sourceBranch
? `source.branch.name = "${sourceBranch}"`
: `source.branch.name ~ "${CMS_BRANCH_PREFIX}/"`;
const pullRequests: BitBucketPullRequests = await this.requestJSON({
url: `${this.repoURL}/pullrequests`,
params: {
pagelen: 50,
q: oneLine`
source.repository.full_name = "${this.repo}"
AND state = "${BitBucketPullRequestState.OPEN}"
AND destination.branch.name = "${this.branch}"
AND comment_count > 0
AND ${sourceQuery}
`,
},
});
const labels = await Promise.all(
pullRequests.values.map(pr => this.getPullRequestLabel(pr.id)),
);
return pullRequests.values.filter((_, index) => isCMSLabel(labels[index], this.cmsLabelPrefix));
}
async getBranchPullRequest(branch: string) {
const pullRequests = await this.getPullRequests(branch);
if (pullRequests.length <= 0) {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
}
return pullRequests[0];
}
async listUnpublishedBranches() {
console.info(
'%c Checking for Unpublished entries',
'line-height: 30px;text-align: center;font-weight: bold',
);
const pullRequests = await this.getPullRequests();
const branches = pullRequests.map(mr => mr.source.branch.name);
return branches;
}
async retrieveUnpublishedEntryData(contentKey: string) {
const { collection, slug } = parseContentKey(contentKey);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const diffs = await this.getDifferences(branch);
const label = await this.getPullRequestLabel(pullRequest.id);
const status = labelToStatus(label, this.cmsLabelPrefix);
const updatedAt = pullRequest.updated_on;
const pullRequestAuthor = pullRequest.author.display_name;
return {
collection,
slug,
status,
// TODO: get real id
diffs: diffs
.filter(d => d.status !== 'deleted')
.map(d => ({ path: d.path, newFile: d.newFile, id: '' })),
updatedAt,
pullRequestAuthor,
};
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
const contentKey = generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.addPullRequestComment(pullRequest, statusToLabel(newStatus, this.cmsLabelPrefix));
}
async mergePullRequest(pullRequest: BitBucketPullRequest) {
await this.requestJSON({
method: 'POST',
url: `${this.repoURL}/pullrequests/${pullRequest.id}/merge`,
headers: { 'Content-Type': APPLICATION_JSON },
body: JSON.stringify({
message: MERGE_COMMIT_MESSAGE,
close_source_branch: true,
merge_strategy: this.mergeStrategy,
}),
});
}
async publishUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.mergePullRequest(pullRequest);
}
async declinePullRequest(pullRequest: BitBucketPullRequest) {
await this.requestJSON({
method: 'POST',
url: `${this.repoURL}/pullrequests/${pullRequest.id}/decline`,
});
}
async deleteBranch(branch: string) {
await this.request({
method: 'DELETE',
url: `${this.repoURL}/refs/branches/${branch}`,
});
}
async deleteUnpublishedEntry(collectionName: string, slug: string) {
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
await this.declinePullRequest(pullRequest);
await this.deleteBranch(branch);
}
async getPullRequestStatuses(pullRequest: BitBucketPullRequest) {
const statuses: BitBucketPullRequestStatues = await this.requestJSON({
url: `${this.repoURL}/pullrequests/${pullRequest.id}/statuses`,
params: {
pagelen: 100,
},
});
return statuses.values;
}
async getStatuses(collectionName: string, slug: string) {
const contentKey = generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const statuses = await this.getPullRequestStatuses(pullRequest);
return statuses.map(({ key, state, url }) => ({
context: key,
state:
state === BitBucketPullRequestStatusState.Successful
? PreviewState.Success
: PreviewState.Other,
target_url: url,
}));
}
async getUnpublishedEntrySha(collection: string, slug: string) {
const contentKey = generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
return pullRequest.destination.commit.hash;
}
}

View File

@ -0,0 +1,95 @@
import React from 'react';
import PropTypes from 'prop-types';
import styled from '@emotion/styled';
import { AuthenticationPage, Icon } from '../../ui';
import { NetlifyAuthenticator, ImplicitAuthenticator } from '../../lib/auth';
const LoginButtonIcon = styled(Icon)`
margin-right: 18px;
`;
export default class BitbucketAuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
base_url: PropTypes.string,
siteId: PropTypes.string,
authEndpoint: PropTypes.string,
config: PropTypes.object.isRequired,
clearHash: PropTypes.func,
t: PropTypes.func.isRequired,
};
state = {};
componentDidMount() {
const { auth_type: authType = '' } = this.props.config.backend;
if (authType === 'implicit') {
const {
base_url = 'https://bitbucket.org',
auth_endpoint = 'site/oauth2/authorize',
app_id = '',
} = this.props.config.backend;
this.auth = new ImplicitAuthenticator({
base_url,
auth_endpoint,
app_id,
clearHash: this.props.clearHash,
});
// Complete implicit authentication if we were redirected back to from the provider.
this.auth.completeAuth((err, data) => {
if (err) {
this.setState({ loginError: err.toString() });
return;
}
this.props.onLogin(data);
});
this.authSettings = { scope: 'repository:write' };
} else {
this.auth = new NetlifyAuthenticator({
base_url: this.props.base_url,
site_id:
document.location.host.split(':')[0] === 'localhost'
? 'cms.netlify.com'
: this.props.siteId,
auth_endpoint: this.props.authEndpoint,
});
this.authSettings = { provider: 'bitbucket', scope: 'repo' };
}
}
handleLogin = e => {
e.preventDefault();
this.auth.authenticate(this.authSettings, (err, data) => {
if (err) {
this.setState({ loginError: err.toString() });
return;
}
this.props.onLogin(data);
});
};
render() {
const { inProgress, config, t } = this.props;
return (
<AuthenticationPage
onLogin={this.handleLogin}
loginDisabled={inProgress}
loginErrorMessage={this.state.loginError}
logoUrl={config.logo_url}
siteUrl={config.site_url}
renderButtonContent={() => (
<React.Fragment>
<LoginButtonIcon type="bitbucket" />
{inProgress ? t('auth.loggingIn') : t('auth.loginWithBitbucket')}
</React.Fragment>
)}
t={t}
/>
);
}
}

View File

@ -0,0 +1,103 @@
import minimatch from 'minimatch';
import { unsentRequest } from '../../lib/util';
import type { ApiRequest, PointerFile } from '../../lib/util';
type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;
interface LfsBatchAction {
href: string;
header?: { [key: string]: string };
expires_in?: number;
expires_at?: string;
}
interface LfsBatchObject {
oid: string;
size: number;
}
interface LfsBatchObjectUpload extends LfsBatchObject {
actions?: {
upload: LfsBatchAction;
verify?: LfsBatchAction;
};
}
interface LfsBatchObjectError extends LfsBatchObject {
error: {
code: number;
message: string;
};
}
interface LfsBatchUploadResponse {
transfer?: string;
objects: (LfsBatchObjectUpload | LfsBatchObjectError)[];
}
export class GitLfsClient {
private static defaultContentHeaders = {
Accept: 'application/vnd.git-lfs+json',
['Content-Type']: 'application/vnd.git-lfs+json',
};
constructor(
public enabled: boolean,
public rootURL: string,
public patterns: string[],
private makeAuthorizedRequest: MakeAuthorizedRequest,
) {}
matchPath(path: string) {
return this.patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
}
async uploadResource(pointer: PointerFile, resource: Blob): Promise<string> {
const requests = await this.getResourceUploadRequests([pointer]);
for (const request of requests) {
await this.doUpload(request.actions!.upload, resource);
if (request.actions!.verify) {
await this.doVerify(request.actions!.verify, request);
}
}
return pointer.sha;
}
private async doUpload(upload: LfsBatchAction, resource: Blob) {
await unsentRequest.fetchWithTimeout(decodeURI(upload.href), {
method: 'PUT',
body: resource,
headers: upload.header,
});
}
private async doVerify(verify: LfsBatchAction, object: LfsBatchObject) {
this.makeAuthorizedRequest({
url: decodeURI(verify.href),
method: 'POST',
headers: { ...GitLfsClient.defaultContentHeaders, ...verify.header },
body: JSON.stringify({ oid: object.oid, size: object.size }),
});
}
private async getResourceUploadRequests(objects: PointerFile[]): Promise<LfsBatchObjectUpload[]> {
const response = await this.makeAuthorizedRequest({
url: `${this.rootURL}/objects/batch`,
method: 'POST',
headers: GitLfsClient.defaultContentHeaders,
body: JSON.stringify({
operation: 'upload',
transfers: ['basic'],
objects: objects.map(({ sha, ...rest }) => ({ ...rest, oid: sha })),
}),
});
return ((await response.json()) as LfsBatchUploadResponse).objects.filter(object => {
if ('error' in object) {
console.error(object.error);
return false;
}
return object.actions;
});
}
}

View File

@ -0,0 +1,630 @@
import semaphore from 'semaphore';
import { trimStart } from 'lodash';
import { stripIndent } from 'common-tags';
import {
CURSOR_COMPATIBILITY_SYMBOL,
filterByExtension,
unsentRequest,
basename,
getBlobSHA,
entriesByFolder,
entriesByFiles,
getMediaDisplayURL,
getMediaAsBlob,
unpublishedEntries,
runWithLock,
asyncLock,
getPreviewStatus,
getLargeMediaPatternsFromGitAttributesFile,
getPointerFileForMediaFileObj,
getLargeMediaFilteredMediaFiles,
blobToFileObj,
contentKeyFromBranch,
generateContentKey,
localForage,
allEntriesByFolder,
AccessTokenError,
branchFromContentKey,
} from '../../lib/util';
import { NetlifyAuthenticator } from '../../lib/auth';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
import { GitLfsClient } from './git-lfs-client';
import type {
Entry,
ApiRequest,
Cursor,
AssetProxy,
PersistOptions,
DisplayURL,
Implementation,
User,
Credentials,
Config,
ImplementationFile,
AsyncLock,
FetchError,
} from '../../lib/util';
import type { Semaphore } from 'semaphore';
const MAX_CONCURRENT_DOWNLOADS = 10;
const STATUS_PAGE = 'https://bitbucket.status.atlassian.com';
const BITBUCKET_STATUS_ENDPOINT = `${STATUS_PAGE}/api/v2/components.json`;
const BITBUCKET_OPERATIONAL_UNITS = ['API', 'Authentication and user management', 'Git LFS'];
type BitbucketStatusComponent = {
id: string;
name: string;
status: string;
};
// Implementation wrapper class
export default class BitbucketBackend implements Implementation {
lock: AsyncLock;
api: API | null;
updateUserCredentials: (args: { token: string; refresh_token: string }) => Promise<null>;
options: {
proxied: boolean;
API: API | null;
updateUserCredentials: (args: { token: string; refresh_token: string }) => Promise<null>;
initialWorkflowStatus: string;
};
repo: string;
branch: string;
apiRoot: string;
baseUrl: string;
siteId: string;
token: string | null;
mediaFolder: string;
refreshToken?: string;
refreshedTokenPromise?: Promise<string>;
authenticator?: NetlifyAuthenticator;
_mediaDisplayURLSem?: Semaphore;
squashMerges: boolean;
cmsLabelPrefix: string;
previewContext: string;
largeMediaURL: string;
_largeMediaClientPromise?: Promise<GitLfsClient>;
authType: string;
constructor(config: Config, options = {}) {
this.options = {
proxied: false,
API: null,
updateUserCredentials: async () => null,
initialWorkflowStatus: '',
...options,
};
if (
!this.options.proxied &&
(config.backend.repo === null || config.backend.repo === undefined)
) {
throw new Error('The BitBucket backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.updateUserCredentials = this.options.updateUserCredentials;
this.repo = config.backend.repo || '';
this.branch = config.backend.branch || 'master';
this.apiRoot = config.backend.api_root || 'https://api.bitbucket.org/2.0';
this.baseUrl = config.base_url || '';
this.siteId = config.site_id || '';
this.largeMediaURL =
config.backend.large_media_url || `https://bitbucket.org/${config.backend.repo}/info/lfs`;
this.token = '';
this.mediaFolder = config.media_folder;
this.squashMerges = config.backend.squash_merges || false;
this.cmsLabelPrefix = config.backend.cms_label_prefix || '';
this.previewContext = config.backend.preview_context || '';
this.lock = asyncLock();
this.authType = config.backend.auth_type || '';
}
isGitBackend() {
return true;
}
async status() {
const api = await fetch(BITBUCKET_STATUS_ENDPOINT)
.then(res => res.json())
.then(res => {
return res['components']
.filter((statusComponent: BitbucketStatusComponent) =>
BITBUCKET_OPERATIONAL_UNITS.includes(statusComponent.name),
)
.every(
(statusComponent: BitbucketStatusComponent) => statusComponent.status === 'operational',
);
})
.catch(e => {
console.warn('Failed getting BitBucket status', e);
return true;
});
let auth = false;
// no need to check auth if api is down
if (api) {
auth =
(await this.api
?.user()
.then(user => !!user)
.catch(e => {
console.warn('Failed getting Bitbucket user', e);
return false;
})) || false;
}
return { auth: { status: auth }, api: { status: api, statusPage: STATUS_PAGE } };
}
authComponent() {
return AuthenticationPage;
}
setUser(user: { token: string }) {
this.token = user.token;
this.api = new API({
requestFunction: this.apiRequestFunction,
branch: this.branch,
repo: this.repo,
squashMerges: this.squashMerges,
cmsLabelPrefix: this.cmsLabelPrefix,
initialWorkflowStatus: this.options.initialWorkflowStatus,
});
}
requestFunction = async (req: ApiRequest) => {
const token = await this.getToken();
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
return unsentRequest.performRequest(authorizedRequest);
};
restoreUser(user: User) {
return this.authenticate(user);
}
async authenticate(state: Credentials) {
this.token = state.token as string;
this.refreshToken = state.refresh_token;
this.api = new API({
requestFunction: this.apiRequestFunction,
branch: this.branch,
repo: this.repo,
apiRoot: this.apiRoot,
squashMerges: this.squashMerges,
cmsLabelPrefix: this.cmsLabelPrefix,
initialWorkflowStatus: this.options.initialWorkflowStatus,
});
const isCollab = await this.api.hasWriteAccess().catch(error => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a Bitbucket account with access.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your BitBucket user account does not have access to this repo.');
}
const user = await this.api.user();
// Authorized user
return {
...user,
name: user.display_name,
login: user.username,
token: state.token,
avatar_url: user.links.avatar.href,
refresh_token: state.refresh_token,
};
}
getRefreshedAccessToken() {
if (this.authType === 'implicit') {
throw new AccessTokenError(`Can't refresh access token when using implicit auth`);
}
if (this.refreshedTokenPromise) {
return this.refreshedTokenPromise;
}
// instantiating a new Authenticator on each refresh isn't ideal,
if (!this.authenticator) {
const cfg = {
base_url: this.baseUrl,
site_id: this.siteId,
};
this.authenticator = new NetlifyAuthenticator(cfg);
}
this.refreshedTokenPromise = this.authenticator!.refresh({
provider: 'bitbucket',
refresh_token: this.refreshToken as string,
}).then(({ token, refresh_token }) => {
this.token = token;
this.refreshToken = refresh_token;
this.refreshedTokenPromise = undefined;
this.updateUserCredentials({ token, refresh_token });
return token;
});
return this.refreshedTokenPromise;
}
logout() {
this.token = null;
return;
}
getToken() {
if (this.refreshedTokenPromise) {
return this.refreshedTokenPromise;
}
return Promise.resolve(this.token);
}
apiRequestFunction = async (req: ApiRequest) => {
const token = (
this.refreshedTokenPromise ? await this.refreshedTokenPromise : this.token
) as string;
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
const response: Response = await unsentRequest.performRequest(authorizedRequest);
if (response.status === 401) {
const json = await response.json().catch(() => null);
if (json && json.type === 'error' && /^access token expired/i.test(json.error.message)) {
const newToken = await this.getRefreshedAccessToken();
const reqWithNewToken = unsentRequest.withHeaders(
{
Authorization: `Bearer ${newToken}`,
},
req,
) as ApiRequest;
return unsentRequest.performRequest(reqWithNewToken);
}
}
return response;
};
async entriesByFolder(folder: string, extension: string, depth: number) {
let cursor: Cursor;
const listFiles = () =>
this.api!.listFiles(folder, depth, 20, this.branch).then(({ entries, cursor: c }) => {
cursor = c.mergeMeta({ extension });
return entries.filter(e => filterByExtension(e, extension));
});
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
const files = await entriesByFolder(
listFiles,
readFile,
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return files;
}
async listAllFiles(folder: string, extension: string, depth: number) {
const files = await this.api!.listAllFiles(folder, depth, this.branch);
const filtered = files.filter(file => filterByExtension(file, extension));
return filtered;
}
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
const files = await allEntriesByFolder({
listAllFiles: () => this.listAllFiles(folder, extension, depth),
readFile,
readFileMetadata: this.api!.readFileMetadata.bind(this.api),
apiName: API_NAME,
branch: this.branch,
localForage,
folder,
extension,
depth,
getDefaultBranch: () => Promise.resolve({ name: this.branch, sha: head }),
isShaExistsInBranch: this.api!.isShaExistsInBranch.bind(this.api!),
getDifferences: (source, destination) => this.api!.getDifferences(source, destination),
getFileId: path => Promise.resolve(this.api!.getFileId(head, path)),
filterFile: file => filterByExtension(file, extension),
});
return files;
}
async entriesByFiles(files: ImplementationFile[]) {
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
return entriesByFiles(files, readFile, this.api!.readFileMetadata.bind(this.api), API_NAME);
}
getEntry(path: string) {
return this.api!.readFile(path).then(data => ({
file: { path, id: null },
data: data as string,
}));
}
getMedia(mediaFolder = this.mediaFolder) {
return this.api!.listAllFiles(mediaFolder, 1, this.branch).then(files =>
files.map(({ id, name, path }) => ({ id, name, path, displayURL: { id, path } })),
);
}
getLargeMediaClient() {
if (!this._largeMediaClientPromise) {
this._largeMediaClientPromise = (async (): Promise<GitLfsClient> => {
const patterns = await this.api!.readFile('.gitattributes')
.then(attributes => getLargeMediaPatternsFromGitAttributesFile(attributes as string))
.catch((err: FetchError) => {
if (err.status === 404) {
console.info('This 404 was expected and handled appropriately.');
} else {
console.error(err);
}
return [];
});
return new GitLfsClient(
!!(this.largeMediaURL && patterns.length > 0),
this.largeMediaURL,
patterns,
this.requestFunction,
);
})();
}
return this._largeMediaClientPromise;
}
getMediaDisplayURL(displayURL: DisplayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
return getMediaDisplayURL(
displayURL,
this.api!.readFile.bind(this.api!),
this._mediaDisplayURLSem,
);
}
async getMediaFile(path: string) {
const name = basename(path);
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
const fileObj = blobToFileObj(name, blob);
const url = URL.createObjectURL(fileObj);
const id = await getBlobSHA(fileObj);
return {
id,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
async persistEntry(entry: Entry, options: PersistOptions) {
const client = await this.getLargeMediaClient();
// persistEntry is a transactional operation
return runWithLock(
this.lock,
async () =>
this.api!.persistFiles(
entry.dataFiles,
client.enabled
? await getLargeMediaFilteredMediaFiles(client, entry.assets)
: entry.assets,
options,
),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
const { fileObj, path } = mediaFile;
const displayURL = URL.createObjectURL(fileObj as Blob);
const client = await this.getLargeMediaClient();
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
if (!client.enabled || !client.matchPath(fixedPath)) {
return this._persistMedia(mediaFile, options);
}
const persistMediaArgument = await getPointerFileForMediaFileObj(client, fileObj as File, path);
return {
...(await this._persistMedia(persistMediaArgument, options)),
displayURL,
};
}
async _persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
const fileObj = mediaFile.fileObj as File;
const [id] = await Promise.all([
getBlobSHA(fileObj),
this.api!.persistFiles([], [mediaFile], options),
]);
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path: trimStart(mediaFile.path, '/k'),
name: fileObj!.name,
size: fileObj!.size,
id,
file: fileObj,
url,
};
}
deleteFiles(paths: string[], commitMessage: string) {
return this.api!.deleteFiles(paths, commitMessage);
}
traverseCursor(cursor: Cursor, action: string) {
return this.api!.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => {
const extension = cursor.meta?.get('extension');
if (extension) {
entries = entries.filter(e => filterByExtension(e, extension));
newCursor = newCursor.mergeMeta({ extension });
}
const head = await this.api!.defaultBranchCommitSha();
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { head }) as Promise<string>;
};
const entriesWithData = await entriesByFiles(
entries,
readFile,
this.api!.readFileMetadata.bind(this.api)!,
API_NAME,
);
return {
entries: entriesWithData,
cursor: newCursor,
};
});
}
async loadMediaFile(path: string, id: string, { branch }: { branch: string }) {
const readFile = async (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => {
const content = await this.api!.readFile(path, id, { branch, parseText });
return content;
};
const blob = await getMediaAsBlob(path, id, readFile);
const name = basename(path);
const fileObj = blobToFileObj(name, blob);
return {
id: path,
displayURL: URL.createObjectURL(fileObj),
path,
name,
size: fileObj.size,
file: fileObj,
};
}
async unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(branch => contentKeyFromBranch(branch)),
);
const ids = await unpublishedEntries(listEntriesKeys);
return ids;
}
async unpublishedEntry({
id,
collection,
slug,
}: {
id?: string;
collection?: string;
slug?: string;
}) {
if (id) {
const data = await this.api!.retrieveUnpublishedEntryData(id);
return data;
} else if (collection && slug) {
const entryId = generateContentKey(collection, slug);
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
return data;
} else {
throw new Error('Missing unpublished entry id or collection and slug');
}
}
getBranch(collection: string, slug: string) {
const contentKey = generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);
return branch;
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const data = (await this.api!.readFile(path, id, { branch })) as string;
return data;
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const mediaFile = await this.loadMediaFile(path, id, { branch });
return mediaFile;
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
// updateUnpublishedEntryStatus is a transactional operation
return runWithLock(
this.lock,
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
'Failed to acquire update entry status lock',
);
}
async deleteUnpublishedEntry(collection: string, slug: string) {
// deleteUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.deleteUnpublishedEntry(collection, slug),
'Failed to acquire delete entry lock',
);
}
async publishUnpublishedEntry(collection: string, slug: string) {
// publishUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.publishUnpublishedEntry(collection, slug),
'Failed to acquire publish entry lock',
);
}
async getDeployPreview(collection: string, slug: string) {
try {
const statuses = await this.api!.getStatuses(collection, slug);
const deployStatus = getPreviewStatus(statuses, this.previewContext);
if (deployStatus) {
const { target_url: url, state } = deployStatus;
return { url, status: state };
} else {
return null;
}
} catch (e) {
return null;
}
}
}

View File

@ -0,0 +1,10 @@
import BitbucketBackend from './implementation';
import API from './API';
import AuthenticationPage from './AuthenticationPage';
export const NetlifyCmsBackendBitbucket = {
BitbucketBackend,
API,
AuthenticationPage,
};
export { BitbucketBackend, API, AuthenticationPage };

View File

@ -0,0 +1,5 @@
declare module 'semaphore' {
export type Semaphore = { take: (f: Function) => void; leave: () => void };
const semaphore: (count: number) => Semaphore;
export default semaphore;
}

View File

@ -0,0 +1,5 @@
declare module 'what-the-diff' {
export const parse: (
rawDiff: string,
) => { oldPath?: string; newPath?: string; binary: boolean; status: string }[];
}

View File

@ -0,0 +1,230 @@
import PropTypes from 'prop-types';
import React from 'react';
import styled from '@emotion/styled';
import { partial } from 'lodash';
import {
AuthenticationPage,
buttons,
shadows,
colors,
colorsRaw,
lengths,
zIndex,
} from '../../ui';
const LoginButton = styled.button`
${buttons.button};
${shadows.dropDeep};
${buttons.default};
${buttons.gray};
padding: 0 30px;
display: block;
margin-top: 20px;
margin-left: auto;
`;
const AuthForm = styled.form`
width: 350px;
margin-top: -80px;
`;
const AuthInput = styled.input`
background-color: ${colorsRaw.white};
border-radius: ${lengths.borderRadius};
font-size: 14px;
padding: 10px;
margin-bottom: 15px;
margin-top: 6px;
width: 100%;
position: relative;
z-index: ${zIndex.zIndex1};
border: 1px solid ${colorsRaw.gray};
&:focus {
outline: none;
box-shadow: inset 0 0 0 2px ${colors.active};
border: 1px solid transparent;
}
`;
const ErrorMessage = styled.p`
color: ${colors.errorText};
`;
let component = null;
if (window.netlifyIdentity) {
window.netlifyIdentity.on('login', user => {
component && component.handleIdentityLogin(user);
});
window.netlifyIdentity.on('logout', () => {
component && component.handleIdentityLogout();
});
window.netlifyIdentity.on('error', err => {
component && component.handleIdentityError(err);
});
}
export default class GitGatewayAuthenticationPage extends React.Component {
static authClient;
constructor(props) {
super(props);
component = this;
}
componentDidMount() {
if (!this.loggedIn && window.netlifyIdentity && window.netlifyIdentity.currentUser()) {
this.props.onLogin(window.netlifyIdentity.currentUser());
window.netlifyIdentity.close();
}
}
componentWillUnmount() {
component = null;
}
handleIdentityLogin = user => {
this.props.onLogin(user);
window.netlifyIdentity.close();
};
handleIdentityLogout = () => {
window.netlifyIdentity.open();
};
handleIdentityError = err => {
if (err?.message?.match(/^Failed to load settings from.+\.netlify\/identity$/)) {
window.netlifyIdentity.close();
this.setState({
errors: { identity: this.props.t('auth.errors.identitySettings') },
});
}
};
handleIdentity = () => {
const user = window.netlifyIdentity.currentUser();
if (user) {
this.props.onLogin(user);
} else {
window.netlifyIdentity.open();
}
};
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool.isRequired,
error: PropTypes.node,
config: PropTypes.object.isRequired,
t: PropTypes.func.isRequired,
};
state = { email: '', password: '', errors: {} };
handleChange = (name, e) => {
this.setState({ ...this.state, [name]: e.target.value });
};
handleLogin = async e => {
e.preventDefault();
const { email, password } = this.state;
const { t } = this.props;
const errors = {};
if (!email) {
errors.email = t('auth.errors.email');
}
if (!password) {
errors.password = t('auth.errors.password');
}
if (Object.keys(errors).length > 0) {
this.setState({ errors });
return;
}
try {
const client = await GitGatewayAuthenticationPage.authClient();
const user = await client.login(this.state.email, this.state.password, true);
this.props.onLogin(user);
} catch (error) {
this.setState({
errors: { server: error.description || error.msg || error },
loggingIn: false,
});
}
};
render() {
const { errors } = this.state;
const { error, inProgress, config, t } = this.props;
if (window.netlifyIdentity) {
if (errors.identity) {
return (
<AuthenticationPage
logoUrl={config.logo_url}
siteUrl={config.site_url}
onLogin={this.handleIdentity}
renderPageContent={() => (
<a
href="https://docs.netlify.com/visitor-access/git-gateway/#setup-and-settings"
target="_blank"
rel="noopener noreferrer"
>
{errors.identity}
</a>
)}
t={t}
/>
);
} else {
return (
<AuthenticationPage
logoUrl={config.logo_url}
siteUrl={config.site_url}
onLogin={this.handleIdentity}
renderButtonContent={() => t('auth.loginWithNetlifyIdentity')}
t={t}
/>
);
}
}
return (
<AuthenticationPage
logoUrl={config.logo_url}
siteUrl={config.site_url}
renderPageContent={() => (
<AuthForm onSubmit={this.handleLogin}>
{!error ? null : <ErrorMessage>{error}</ErrorMessage>}
{!errors.server ? null : <ErrorMessage>{String(errors.server)}</ErrorMessage>}
<ErrorMessage>{errors.email || null}</ErrorMessage>
<AuthInput
type="text"
name="email"
placeholder="Email"
value={this.state.email}
onChange={partial(this.handleChange, 'email')}
/>
<ErrorMessage>{errors.password || null}</ErrorMessage>
<AuthInput
type="password"
name="password"
placeholder="Password"
value={this.state.password}
onChange={partial(this.handleChange, 'password')}
/>
<LoginButton disabled={inProgress}>
{inProgress ? t('auth.loggingIn') : t('auth.login')}
</LoginButton>
</AuthForm>
)}
t={t}
/>
);
}
}

View File

@ -0,0 +1,130 @@
import { APIError } from '../../lib/util';
import { API as GithubAPI } from '../github';
import type { Config as GitHubConfig, Diff } from '../github/API';
import type { FetchError } from '../../lib/util';
import type { Octokit } from '@octokit/rest';
type Config = GitHubConfig & {
apiRoot: string;
tokenPromise: () => Promise<string>;
commitAuthor: { name: string };
isLargeMedia: (filename: string) => Promise<boolean>;
};
export default class API extends GithubAPI {
tokenPromise: () => Promise<string>;
commitAuthor: { name: string };
isLargeMedia: (filename: string) => Promise<boolean>;
constructor(config: Config) {
super(config);
this.apiRoot = config.apiRoot;
this.tokenPromise = config.tokenPromise;
this.commitAuthor = config.commitAuthor;
this.isLargeMedia = config.isLargeMedia;
this.repoURL = '';
this.originRepoURL = '';
}
hasWriteAccess() {
return this.getDefaultBranch()
.then(() => true)
.catch((error: FetchError) => {
if (error.status === 401) {
if (error.message === 'Bad credentials') {
throw new APIError(
'Git Gateway Error: Please ask your site administrator to reissue the Git Gateway token.',
error.status,
'Git Gateway',
);
} else {
return false;
}
} else if (
error.status === 404 &&
(error.message === undefined || error.message === 'Unable to locate site configuration')
) {
throw new APIError(
`Git Gateway Error: Please make sure Git Gateway is enabled on your site.`,
error.status,
'Git Gateway',
);
} else {
console.error('Problem fetching repo data from Git Gateway');
throw error;
}
});
}
requestHeaders(headers = {}) {
return this.tokenPromise().then(jwtToken => {
const baseHeader = {
Authorization: `Bearer ${jwtToken}`,
'Content-Type': 'application/json; charset=utf-8',
...headers,
};
return baseHeader;
});
}
handleRequestError(error: FetchError & { msg: string }, responseStatus: number) {
throw new APIError(error.message || error.msg, responseStatus, 'Git Gateway');
}
user() {
return Promise.resolve({ login: '', ...this.commitAuthor });
}
async getHeadReference(head: string) {
if (!this.repoOwner) {
// get the repo owner from the branch url
// this is required for returning the full head reference, e.g. owner:head
// when filtering pull requests based on the head
const branch = await this.getDefaultBranch();
const self = branch._links.self;
const regex = new RegExp('https?://.+?/repos/(.+?)/');
const owner = self.match(regex);
this.repoOwner = owner ? owner[1] : '';
}
return super.getHeadReference(head);
}
commit(message: string, changeTree: { parentSha?: string; sha: string }) {
const commitParams: {
message: string;
tree: string;
parents: string[];
author?: { name: string; date: string };
} = {
message,
tree: changeTree.sha,
parents: changeTree.parentSha ? [changeTree.parentSha] : [],
};
if (this.commitAuthor) {
commitParams.author = {
...this.commitAuthor,
date: new Date().toISOString(),
};
}
return this.request('/git/commits', {
method: 'POST',
body: JSON.stringify(commitParams),
});
}
nextUrlProcessor() {
return (url: string) => url.replace(/^(?:[a-z]+:\/\/.+?\/.+?\/.+?\/)/, `${this.apiRoot}/`);
}
async diffFromFile(file: Octokit.ReposCompareCommitsResponseFilesItem): Promise<Diff> {
const diff = await super.diffFromFile(file);
return {
...diff,
binary: diff.binary || (await this.isLargeMedia(file.filename)),
};
}
}

View File

@ -0,0 +1,30 @@
import { unsentRequest } from '../../lib/util';
import { API as GitlabAPI } from '../gitlab';
import type { Config as GitLabConfig, CommitAuthor } from '../gitlab/API';
import type { ApiRequest } from '../../lib/util';
type Config = GitLabConfig & { tokenPromise: () => Promise<string>; commitAuthor: CommitAuthor };
export default class API extends GitlabAPI {
tokenPromise: () => Promise<string>;
constructor(config: Config) {
super(config);
this.tokenPromise = config.tokenPromise;
this.commitAuthor = config.commitAuthor;
this.repoURL = '';
}
withAuthorizationHeaders = async (req: ApiRequest) => {
const token = await this.tokenPromise();
return unsentRequest.withHeaders(
{
Authorization: `Bearer ${token}`,
},
req,
);
};
hasWriteAccess = () => Promise.resolve(true);
}

View File

@ -0,0 +1,627 @@
import GoTrue from 'gotrue-js';
import jwtDecode from 'jwt-decode';
import { get, pick, intersection } from 'lodash';
import ini from 'ini';
import {
APIError,
unsentRequest,
basename,
entriesByFiles,
parsePointerFile,
getLargeMediaPatternsFromGitAttributesFile,
getPointerFileForMediaFileObj,
getLargeMediaFilteredMediaFiles,
AccessTokenError,
PreviewState,
} from '../../lib/util';
import { GitHubBackend } from '../github';
import { GitLabBackend } from '../gitlab';
import { BitbucketBackend, API as BitBucketAPI } from '../bitbucket';
import GitHubAPI from './GitHubAPI';
import GitLabAPI from './GitLabAPI';
import AuthenticationPage from './AuthenticationPage';
import { getClient } from './netlify-lfs-client';
import type { Client } from './netlify-lfs-client';
import type {
ApiRequest,
AssetProxy,
PersistOptions,
Entry,
Cursor,
Implementation,
DisplayURL,
User,
Credentials,
Config,
ImplementationFile,
DisplayURLObject,
} from '../../lib/util';
const STATUS_PAGE = 'https://www.netlifystatus.com';
const GIT_GATEWAY_STATUS_ENDPOINT = `${STATUS_PAGE}/api/v2/components.json`;
const GIT_GATEWAY_OPERATIONAL_UNITS = ['Git Gateway'];
type GitGatewayStatus = {
id: string;
name: string;
status: string;
};
type NetlifyIdentity = {
logout: () => void;
currentUser: () => User;
on: (event: string, args: unknown) => void;
init: () => void;
store: { user: unknown; modal: { page: string }; saving: boolean };
};
type AuthClient = {
logout: () => void;
currentUser: () => unknown;
login?(email: string, password: string, remember?: boolean): Promise<unknown>;
clearStore: () => void;
};
declare global {
interface Window {
netlifyIdentity?: NetlifyIdentity;
}
}
const localHosts: Record<string, boolean> = {
localhost: true,
'127.0.0.1': true,
'0.0.0.0': true,
};
const defaults = {
identity: '/.netlify/identity',
gateway: '/.netlify/git',
largeMedia: '/.netlify/large-media',
};
function getEndpoint(endpoint: string, netlifySiteURL: string | null) {
if (
localHosts[document.location.host.split(':').shift() as string] &&
netlifySiteURL &&
endpoint.match(/^\/\.netlify\//)
) {
const parts = [];
if (netlifySiteURL) {
parts.push(netlifySiteURL);
if (!netlifySiteURL.match(/\/$/)) {
parts.push('/');
}
}
parts.push(endpoint.replace(/^\//, ''));
return parts.join('');
}
return endpoint;
}
// wait for identity widget to initialize
// force init on timeout
let initPromise = Promise.resolve() as Promise<unknown>;
if (window.netlifyIdentity) {
let initialized = false;
initPromise = Promise.race([
new Promise<void>(resolve => {
window.netlifyIdentity?.on('init', () => {
initialized = true;
resolve();
});
}),
new Promise(resolve => setTimeout(resolve, 2500)).then(() => {
if (!initialized) {
console.info('Manually initializing identity widget');
window.netlifyIdentity?.init();
}
}),
]);
}
interface NetlifyUser extends Credentials {
jwt: () => Promise<string>;
email: string;
user_metadata: { full_name: string; avatar_url: string };
}
async function apiGet(path: string) {
const apiRoot = 'https://api.netlify.com/api/v1/sites';
const response = await fetch(`${apiRoot}/${path}`).then(res => res.json());
return response;
}
export default class GitGateway implements Implementation {
config: Config;
api?: GitHubAPI | GitLabAPI | BitBucketAPI;
branch: string;
squashMerges: boolean;
cmsLabelPrefix: string;
mediaFolder: string;
transformImages: boolean;
gatewayUrl: string;
netlifyLargeMediaURL: string;
backendType: string | null;
apiUrl: string;
authClient?: AuthClient;
backend: GitHubBackend | GitLabBackend | BitbucketBackend | null;
acceptRoles?: string[];
tokenPromise?: () => Promise<string>;
_largeMediaClientPromise?: Promise<Client>;
options: {
proxied: boolean;
API: GitHubAPI | GitLabAPI | BitBucketAPI | null;
initialWorkflowStatus: string;
};
constructor(config: Config, options = {}) {
this.options = {
proxied: true,
API: null,
initialWorkflowStatus: '',
...options,
};
this.config = config;
this.branch = config.backend.branch?.trim() || 'master';
this.squashMerges = config.backend.squash_merges || false;
this.cmsLabelPrefix = config.backend.cms_label_prefix || '';
this.mediaFolder = config.media_folder;
const { use_large_media_transforms_in_media_library: transformImages = true } = config.backend;
this.transformImages = transformImages;
const netlifySiteURL = localStorage.getItem('netlifySiteURL');
this.apiUrl = getEndpoint(config.backend.identity_url || defaults.identity, netlifySiteURL);
this.gatewayUrl = getEndpoint(config.backend.gateway_url || defaults.gateway, netlifySiteURL);
this.netlifyLargeMediaURL = getEndpoint(
config.backend.large_media_url || defaults.largeMedia,
netlifySiteURL,
);
const backendTypeRegex = /\/(github|gitlab|bitbucket)\/?$/;
const backendTypeMatches = this.gatewayUrl.match(backendTypeRegex);
if (backendTypeMatches) {
this.backendType = backendTypeMatches[1];
this.gatewayUrl = this.gatewayUrl.replace(backendTypeRegex, '');
} else {
this.backendType = null;
}
this.backend = null;
AuthenticationPage.authClient = () => this.getAuthClient();
}
isGitBackend() {
return true;
}
async status() {
const api = await fetch(GIT_GATEWAY_STATUS_ENDPOINT)
.then(res => res.json())
.then(res => {
return res['components']
.filter((statusComponent: GitGatewayStatus) =>
GIT_GATEWAY_OPERATIONAL_UNITS.includes(statusComponent.name),
)
.every((statusComponent: GitGatewayStatus) => statusComponent.status === 'operational');
})
.catch(e => {
console.warn('Failed getting Git Gateway status', e);
return true;
});
let auth = false;
// no need to check auth if api is down
if (api) {
auth =
(await this.tokenPromise?.()
.then(token => !!token)
.catch(e => {
console.warn('Failed getting Identity token', e);
return false;
})) || false;
}
return { auth: { status: auth }, api: { status: api, statusPage: STATUS_PAGE } };
}
async getAuthClient() {
if (this.authClient) {
return this.authClient;
}
await initPromise;
if (window.netlifyIdentity) {
this.authClient = {
logout: () => window.netlifyIdentity?.logout(),
currentUser: () => window.netlifyIdentity?.currentUser(),
clearStore: () => {
const store = window.netlifyIdentity?.store;
if (store) {
store.user = null;
store.modal.page = 'login';
store.saving = false;
}
},
};
} else {
const goTrue = new GoTrue({ APIUrl: this.apiUrl });
this.authClient = {
logout: () => {
const user = goTrue.currentUser();
if (user) {
return user.logout();
}
},
currentUser: () => goTrue.currentUser(),
login: goTrue.login.bind(goTrue),
clearStore: () => undefined,
};
}
return this.authClient;
}
requestFunction = (req: ApiRequest) =>
this.tokenPromise!()
.then(
token => unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req) as ApiRequest,
)
.then(unsentRequest.performRequest);
authenticate(credentials: Credentials) {
const user = credentials as NetlifyUser;
this.tokenPromise = async () => {
try {
const func = user.jwt.bind(user);
const token = await func();
return token;
} catch (error: any) {
throw new AccessTokenError(`Failed getting access token: ${error.message}`);
}
};
return this.tokenPromise!().then(async token => {
if (!this.backendType) {
const {
github_enabled: githubEnabled,
gitlab_enabled: gitlabEnabled,
bitbucket_enabled: bitbucketEnabled,
roles,
} = await unsentRequest
.fetchWithTimeout(`${this.gatewayUrl}/settings`, {
headers: { Authorization: `Bearer ${token}` },
})
.then(async res => {
const contentType = res.headers.get('Content-Type') || '';
if (!contentType.includes('application/json') && !contentType.includes('text/json')) {
throw new APIError(
`Your Git Gateway backend is not returning valid settings. Please make sure it is enabled.`,
res.status,
'Git Gateway',
);
}
const body = await res.json();
if (!res.ok) {
throw new APIError(
`Git Gateway Error: ${body.message ? body.message : body}`,
res.status,
'Git Gateway',
);
}
return body;
});
this.acceptRoles = roles;
if (githubEnabled) {
this.backendType = 'github';
} else if (gitlabEnabled) {
this.backendType = 'gitlab';
} else if (bitbucketEnabled) {
this.backendType = 'bitbucket';
}
}
if (this.acceptRoles && this.acceptRoles.length > 0) {
const userRoles = get(jwtDecode(token), 'app_metadata.roles', []);
const validRole = intersection(userRoles, this.acceptRoles).length > 0;
if (!validRole) {
throw new Error("You don't have sufficient permissions to access Netlify CMS");
}
}
const userData = {
name: user.user_metadata.full_name || user.email.split('@').shift()!,
email: user.email,
avatar_url: user.user_metadata.avatar_url,
metadata: user.user_metadata,
};
const apiConfig = {
apiRoot: `${this.gatewayUrl}/${this.backendType}`,
branch: this.branch,
tokenPromise: this.tokenPromise!,
commitAuthor: pick(userData, ['name', 'email']),
isLargeMedia: (filename: string) => this.isLargeMediaFile(filename),
squashMerges: this.squashMerges,
cmsLabelPrefix: this.cmsLabelPrefix,
initialWorkflowStatus: this.options.initialWorkflowStatus,
};
if (this.backendType === 'github') {
this.api = new GitHubAPI(apiConfig);
this.backend = new GitHubBackend(this.config, { ...this.options, API: this.api });
} else if (this.backendType === 'gitlab') {
this.api = new GitLabAPI(apiConfig);
this.backend = new GitLabBackend(this.config, { ...this.options, API: this.api });
} else if (this.backendType === 'bitbucket') {
this.api = new BitBucketAPI({
...apiConfig,
requestFunction: this.requestFunction,
hasWriteAccess: async () => true,
});
this.backend = new BitbucketBackend(this.config, { ...this.options, API: this.api });
}
if (!(await this.api!.hasWriteAccess())) {
throw new Error("You don't have sufficient permissions to access Netlify CMS");
}
return { name: userData.name, login: userData.email } as User;
});
}
async restoreUser() {
const client = await this.getAuthClient();
const user = client.currentUser();
if (!user) return Promise.reject();
return this.authenticate(user as Credentials);
}
authComponent() {
return AuthenticationPage;
}
async logout() {
const client = await this.getAuthClient();
try {
client.logout();
} catch (e) {
// due to a bug in the identity widget (gotrue-js actually) the store is not reset if logout fails
// TODO: remove after https://github.com/netlify/gotrue-js/pull/83 is merged
client.clearStore();
}
}
getToken() {
return this.tokenPromise!();
}
async entriesByFolder(folder: string, extension: string, depth: number) {
return this.backend!.entriesByFolder(folder, extension, depth);
}
allEntriesByFolder(folder: string, extension: string, depth: number) {
return this.backend!.allEntriesByFolder(folder, extension, depth);
}
entriesByFiles(files: ImplementationFile[]) {
return this.backend!.entriesByFiles(files);
}
getEntry(path: string) {
return this.backend!.getEntry(path);
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
return this.backend!.unpublishedEntryDataFile(collection, slug, path, id);
}
async isLargeMediaFile(path: string) {
const client = await this.getLargeMediaClient();
return client.enabled && client.matchPath(path);
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
const isLargeMedia = await this.isLargeMediaFile(path);
if (isLargeMedia) {
const branch = this.backend!.getBranch(collection, slug);
const { url, blob } = await this.getLargeMediaDisplayURL({ path, id }, branch);
const name = basename(path);
return {
id,
name,
path,
url,
displayURL: url,
file: new File([blob], name),
size: blob.size,
};
} else {
return this.backend!.unpublishedEntryMediaFile(collection, slug, path, id);
}
}
getMedia(mediaFolder = this.mediaFolder) {
return this.backend!.getMedia(mediaFolder);
}
// this method memoizes this._getLargeMediaClient so that there can
// only be one client at a time
getLargeMediaClient() {
if (this._largeMediaClientPromise) {
return this._largeMediaClientPromise;
}
this._largeMediaClientPromise = this._getLargeMediaClient();
return this._largeMediaClientPromise;
}
_getLargeMediaClient() {
const netlifyLargeMediaEnabledPromise = this.api!.readFile('.lfsconfig')
.then(config => ini.decode<{ lfs: { url: string } }>(config as string))
.then(({ lfs: { url } }) => new URL(url))
.then(lfsURL => ({
enabled: lfsURL.hostname.endsWith('netlify.com') || lfsURL.hostname.endsWith('netlify.app'),
}))
.catch((err: Error) => ({ enabled: false, err }));
const lfsPatternsPromise = this.api!.readFile('.gitattributes')
.then(attributes => getLargeMediaPatternsFromGitAttributesFile(attributes as string))
.then((patterns: string[]) => ({ err: null, patterns }))
.catch((err: Error) => {
if (err.message.includes('404')) {
console.info('This 404 was expected and handled appropriately.');
return { err: null, patterns: [] as string[] };
} else {
return { err, patterns: [] as string[] };
}
});
return Promise.all([netlifyLargeMediaEnabledPromise, lfsPatternsPromise]).then(
([{ enabled: maybeEnabled }, { patterns, err: patternsErr }]) => {
const enabled = maybeEnabled && !patternsErr;
// We expect LFS patterns to exist when the .lfsconfig states
// that we're using Netlify Large Media
if (maybeEnabled && patternsErr) {
console.error(patternsErr);
}
return getClient({
enabled,
rootURL: this.netlifyLargeMediaURL,
makeAuthorizedRequest: this.requestFunction,
patterns,
transformImages: this.transformImages ? { nf_resize: 'fit', w: 560, h: 320 } : false,
});
},
);
}
async getLargeMediaDisplayURL(
{ path, id }: { path: string; id: string | null },
branch = this.branch,
) {
const readFile = (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
const items = await entriesByFiles(
[{ path, id }],
readFile,
this.api!.readFileMetadata.bind(this.api),
'Git-Gateway',
);
const entry = items[0];
const pointerFile = parsePointerFile(entry.data);
if (!pointerFile.sha) {
console.warn(`Failed parsing pointer file ${path}`);
return { url: path, blob: new Blob() };
}
const client = await this.getLargeMediaClient();
const { url, blob } = await client.getDownloadURL(pointerFile);
return { url, blob };
}
async getMediaDisplayURL(displayURL: DisplayURL) {
const { path, id } = displayURL as DisplayURLObject;
const isLargeMedia = await this.isLargeMediaFile(path);
if (isLargeMedia) {
const { url } = await this.getLargeMediaDisplayURL({ path, id });
return url;
}
if (typeof displayURL === 'string') {
return displayURL;
}
const url = await this.backend!.getMediaDisplayURL(displayURL);
return url;
}
async getMediaFile(path: string) {
const isLargeMedia = await this.isLargeMediaFile(path);
if (isLargeMedia) {
const { url, blob } = await this.getLargeMediaDisplayURL({ path, id: null });
const name = basename(path);
return {
id: url,
name,
path,
url,
displayURL: url,
file: new File([blob], name),
size: blob.size,
};
}
return this.backend!.getMediaFile(path);
}
async persistEntry(entry: Entry, options: PersistOptions) {
const client = await this.getLargeMediaClient();
if (client.enabled) {
const assets = await getLargeMediaFilteredMediaFiles(client, entry.assets);
return this.backend!.persistEntry({ ...entry, assets }, options);
} else {
return this.backend!.persistEntry(entry, options);
}
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
const { fileObj, path } = mediaFile;
const displayURL = URL.createObjectURL(fileObj as Blob);
const client = await this.getLargeMediaClient();
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
const isLargeMedia = await this.isLargeMediaFile(fixedPath);
if (isLargeMedia) {
const persistMediaArgument = await getPointerFileForMediaFileObj(
client,
fileObj as File,
path,
);
return {
...(await this.backend!.persistMedia(persistMediaArgument, options)),
displayURL,
};
}
return await this.backend!.persistMedia(mediaFile, options);
}
deleteFiles(paths: string[], commitMessage: string) {
return this.backend!.deleteFiles(paths, commitMessage);
}
async getDeployPreview(collection: string, slug: string) {
let preview = await this.backend!.getDeployPreview(collection, slug);
if (!preview) {
try {
// if the commit doesn't have a status, try to use Netlify API directly
// this is useful when builds are queue up in Netlify and don't have a commit status yet
// and only works with public logs at the moment
// TODO: get Netlify API Token and use it to access private logs
const siteId = new URL(localStorage.getItem('netlifySiteURL') || '').hostname;
const site = await apiGet(siteId);
const deploys: { state: string; commit_ref: string; deploy_url: string }[] = await apiGet(
`${site.id}/deploys?per_page=100`,
);
if (deploys.length > 0) {
const ref = await this.api!.getUnpublishedEntrySha(collection, slug);
const deploy = deploys.find(d => d.commit_ref === ref);
if (deploy) {
preview = {
status: deploy.state === 'ready' ? PreviewState.Success : PreviewState.Other,
url: deploy.deploy_url,
};
}
}
// eslint-disable-next-line no-empty
} catch (e) {}
}
return preview;
}
unpublishedEntries() {
return this.backend!.unpublishedEntries();
}
unpublishedEntry({ id, collection, slug }: { id?: string; collection?: string; slug?: string }) {
return this.backend!.unpublishedEntry({ id, collection, slug });
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
return this.backend!.updateUnpublishedEntryStatus(collection, slug, newStatus);
}
deleteUnpublishedEntry(collection: string, slug: string) {
return this.backend!.deleteUnpublishedEntry(collection, slug);
}
publishUnpublishedEntry(collection: string, slug: string) {
return this.backend!.publishUnpublishedEntry(collection, slug);
}
traverseCursor(cursor: Cursor, action: string) {
return this.backend!.traverseCursor!(cursor, action);
}
}

View File

@ -0,0 +1,8 @@
import GitGatewayBackend from './implementation';
import AuthenticationPage from './AuthenticationPage';
export const NetlifyCmsBackendGitGateway = {
GitGatewayBackend,
AuthenticationPage,
};
export { GitGatewayBackend, AuthenticationPage };

View File

@ -0,0 +1,181 @@
import { flow, fromPairs, map } from 'lodash/fp';
import { isPlainObject, isEmpty } from 'lodash';
import minimatch from 'minimatch';
import { unsentRequest } from '../../lib/util';
import type { ApiRequest, PointerFile } from '../../lib/util';
type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;
type ImageTransformations = { nf_resize: string; w: number; h: number };
type ClientConfig = {
rootURL: string;
makeAuthorizedRequest: MakeAuthorizedRequest;
patterns: string[];
enabled: boolean;
transformImages: ImageTransformations | boolean;
};
export function matchPath({ patterns }: ClientConfig, path: string) {
return patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
}
//
// API interactions
const defaultContentHeaders = {
Accept: 'application/vnd.git-lfs+json',
['Content-Type']: 'application/vnd.git-lfs+json',
};
async function resourceExists(
{ rootURL, makeAuthorizedRequest }: ClientConfig,
{ sha, size }: PointerFile,
) {
const response = await makeAuthorizedRequest({
url: `${rootURL}/verify`,
method: 'POST',
headers: defaultContentHeaders,
body: JSON.stringify({ oid: sha, size }),
});
if (response.ok) {
return true;
}
if (response.status === 404) {
return false;
}
// TODO: what kind of error to throw here? APIError doesn't seem
// to fit
}
function getTransofrmationsParams(t: boolean | ImageTransformations) {
if (isPlainObject(t) && !isEmpty(t)) {
const { nf_resize: resize, w, h } = t as ImageTransformations;
return `?nf_resize=${resize}&w=${w}&h=${h}`;
}
return '';
}
async function getDownloadURL(
{ rootURL, transformImages: t, makeAuthorizedRequest }: ClientConfig,
{ sha }: PointerFile,
) {
try {
const transformation = getTransofrmationsParams(t);
const transformedPromise = makeAuthorizedRequest(`${rootURL}/origin/${sha}${transformation}`);
const [transformed, original] = await Promise.all([
transformedPromise,
// if transformation is defined, we need to load the original so we have the correct meta data
transformation ? makeAuthorizedRequest(`${rootURL}/origin/${sha}`) : transformedPromise,
]);
if (!transformed.ok) {
const error = await transformed.json();
throw new Error(
`Failed getting large media for sha '${sha}': '${error.code} - ${error.msg}'`,
);
}
const transformedBlob = await transformed.blob();
const url = URL.createObjectURL(transformedBlob);
return { url, blob: transformation ? await original.blob() : transformedBlob };
} catch (error) {
console.error(error);
return { url: '', blob: new Blob() };
}
}
function uploadOperation(objects: PointerFile[]) {
return {
operation: 'upload',
transfers: ['basic'],
objects: objects.map(({ sha, ...rest }) => ({ ...rest, oid: sha })),
};
}
async function getResourceUploadURLs(
{
rootURL,
makeAuthorizedRequest,
}: { rootURL: string; makeAuthorizedRequest: MakeAuthorizedRequest },
pointerFiles: PointerFile[],
) {
const response = await makeAuthorizedRequest({
url: `${rootURL}/objects/batch`,
method: 'POST',
headers: defaultContentHeaders,
body: JSON.stringify(uploadOperation(pointerFiles)),
});
const { objects } = await response.json();
const uploadUrls = objects.map(
(object: { error?: { message: string }; actions: { upload: { href: string } } }) => {
if (object.error) {
throw new Error(object.error.message);
}
return object.actions.upload.href;
},
);
return uploadUrls;
}
function uploadBlob(uploadURL: string, blob: Blob) {
return unsentRequest.fetchWithTimeout(uploadURL, {
method: 'PUT',
body: blob,
});
}
async function uploadResource(
clientConfig: ClientConfig,
{ sha, size }: PointerFile,
resource: Blob,
) {
const existingFile = await resourceExists(clientConfig, { sha, size });
if (existingFile) {
return sha;
}
const [uploadURL] = await getResourceUploadURLs(clientConfig, [{ sha, size }]);
await uploadBlob(uploadURL, resource);
return sha;
}
//
// Create Large Media client
function configureFn(config: ClientConfig, fn: Function) {
return (...args: unknown[]) => fn(config, ...args);
}
const clientFns: Record<string, Function> = {
resourceExists,
getResourceUploadURLs,
getDownloadURL,
uploadResource,
matchPath,
};
export type Client = {
resourceExists: (pointer: PointerFile) => Promise<boolean | undefined>;
getResourceUploadURLs: (objects: PointerFile[]) => Promise<string>;
getDownloadURL: (pointer: PointerFile) => Promise<{ url: string; blob: Blob }>;
uploadResource: (pointer: PointerFile, blob: Blob) => Promise<string>;
matchPath: (path: string) => boolean;
patterns: string[];
enabled: boolean;
};
export function getClient(clientConfig: ClientConfig) {
return flow([
Object.keys,
map((key: string) => [key, configureFn(clientConfig, clientFns[key])]),
fromPairs,
configuredFns => ({
...configuredFns,
patterns: clientConfig.patterns,
enabled: clientConfig.enabled,
}),
])(clientFns);
}

View File

@ -0,0 +1,4 @@
declare module 'ini' {
const ini: { decode: <T>(ini: string) => T };
export default ini;
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,152 @@
import React from 'react';
import PropTypes from 'prop-types';
import styled from '@emotion/styled';
import { AuthenticationPage, Icon } from '../../ui';
import { NetlifyAuthenticator } from '../../lib/auth';
const LoginButtonIcon = styled(Icon)`
margin-right: 18px;
`;
const ForkApprovalContainer = styled.div`
display: flex;
flex-flow: column nowrap;
justify-content: space-around;
flex-grow: 0.2;
`;
const ForkButtonsContainer = styled.div`
display: flex;
flex-flow: column nowrap;
justify-content: space-around;
align-items: center;
`;
export default class GitHubAuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
base_url: PropTypes.string,
siteId: PropTypes.string,
authEndpoint: PropTypes.string,
config: PropTypes.object.isRequired,
clearHash: PropTypes.func,
t: PropTypes.func.isRequired,
};
state = {};
getPermissionToFork = () => {
return new Promise((resolve, reject) => {
this.setState({
requestingFork: true,
approveFork: () => {
this.setState({ requestingFork: false });
resolve();
},
refuseFork: () => {
this.setState({ requestingFork: false });
reject();
},
});
});
};
loginWithOpenAuthoring(data) {
const { backend } = this.props;
this.setState({ findingFork: true });
return backend
.authenticateWithFork({ userData: data, getPermissionToFork: this.getPermissionToFork })
.catch(err => {
this.setState({ findingFork: false });
console.error(err);
throw err;
});
}
handleLogin = e => {
e.preventDefault();
const cfg = {
base_url: this.props.base_url,
site_id:
document.location.host.split(':')[0] === 'localhost'
? 'cms.netlify.com'
: this.props.siteId,
auth_endpoint: this.props.authEndpoint,
};
const auth = new NetlifyAuthenticator(cfg);
const { open_authoring: openAuthoring = false, auth_scope: authScope = '' } =
this.props.config.backend;
const scope = authScope || (openAuthoring ? 'public_repo' : 'repo');
auth.authenticate({ provider: 'github', scope }, (err, data) => {
if (err) {
this.setState({ loginError: err.toString() });
return;
}
if (openAuthoring) {
return this.loginWithOpenAuthoring(data).then(() => this.props.onLogin(data));
}
this.props.onLogin(data);
});
};
renderLoginButton = () => {
const { inProgress, t } = this.props;
return inProgress || this.state.findingFork ? (
t('auth.loggingIn')
) : (
<React.Fragment>
<LoginButtonIcon type="github" />
{t('auth.loginWithGitHub')}
</React.Fragment>
);
};
getAuthenticationPageRenderArgs() {
const { requestingFork } = this.state;
if (requestingFork) {
const { approveFork, refuseFork } = this.state;
return {
renderPageContent: ({ LoginButton, TextButton, showAbortButton }) => (
<ForkApprovalContainer>
<p>
Open Authoring is enabled: we need to use a fork on your github account. (If a fork
already exists, we&#39;ll use that.)
</p>
<ForkButtonsContainer>
<LoginButton onClick={approveFork}>Fork the repo</LoginButton>
{showAbortButton && (
<TextButton onClick={refuseFork}>Don&#39;t fork the repo</TextButton>
)}
</ForkButtonsContainer>
</ForkApprovalContainer>
),
};
}
return {
renderButtonContent: this.renderLoginButton,
};
}
render() {
const { inProgress, config, t } = this.props;
const { loginError, requestingFork, findingFork } = this.state;
return (
<AuthenticationPage
onLogin={this.handleLogin}
loginDisabled={inProgress || findingFork || requestingFork}
loginErrorMessage={loginError}
logoUrl={config.logo_url}
siteUrl={config.site_url}
{...this.getAuthenticationPageRenderArgs()}
t={t}
/>
);
}
}

View File

@ -0,0 +1,709 @@
import { ApolloClient } from 'apollo-client';
import {
InMemoryCache,
defaultDataIdFromObject,
IntrospectionFragmentMatcher,
} from 'apollo-cache-inmemory';
import { createHttpLink } from 'apollo-link-http';
import { setContext } from 'apollo-link-context';
import { trim, trimStart } from 'lodash';
import {
APIError,
readFile,
localForage,
DEFAULT_PR_BODY,
branchFromContentKey,
CMS_BRANCH_PREFIX,
throwOnConflictingBranches,
} from '../../lib/util';
import introspectionQueryResultData from './fragmentTypes';
import API, { API_NAME, PullRequestState, MOCK_PULL_REQUEST } from './API';
import * as queries from './queries';
import * as mutations from './mutations';
import type { Config, BlobArgs } from './API';
import type { NormalizedCacheObject } from 'apollo-cache-inmemory';
import type { QueryOptions, MutationOptions, OperationVariables } from 'apollo-client';
import type { GraphQLError } from 'graphql';
import type { Octokit } from '@octokit/rest';
const NO_CACHE = 'no-cache';
const CACHE_FIRST = 'cache-first';
const fragmentMatcher = new IntrospectionFragmentMatcher({
introspectionQueryResultData,
});
interface TreeEntry {
object?: {
entries: TreeEntry[];
};
type: 'blob' | 'tree';
name: string;
sha: string;
blob?: {
size: number;
};
}
interface TreeFile {
path: string;
id: string;
size: number;
type: string;
name: string;
}
type GraphQLPullRequest = {
id: string;
baseRefName: string;
baseRefOid: string;
body: string;
headRefName: string;
headRefOid: string;
number: number;
state: string;
title: string;
mergedAt: string | null;
updatedAt: string | null;
labels: { nodes: { name: string }[] };
repository: {
id: string;
isFork: boolean;
};
user: GraphQLPullsListResponseItemUser;
};
type GraphQLPullsListResponseItemUser = {
avatar_url: string;
login: string;
url: string;
name: string;
};
function transformPullRequest(pr: GraphQLPullRequest) {
return {
...pr,
labels: pr.labels.nodes,
head: { ref: pr.headRefName, sha: pr.headRefOid, repo: { fork: pr.repository.isFork } },
base: { ref: pr.baseRefName, sha: pr.baseRefOid },
};
}
type Error = GraphQLError & { type: string };
export default class GraphQLAPI extends API {
client: ApolloClient<NormalizedCacheObject>;
constructor(config: Config) {
super(config);
this.client = this.getApolloClient();
}
getApolloClient() {
const authLink = setContext((_, { headers }) => {
return {
headers: {
'Content-Type': 'application/json; charset=utf-8',
...headers,
authorization: this.token ? `token ${this.token}` : '',
},
};
});
const httpLink = createHttpLink({ uri: `${this.apiRoot}/graphql` });
return new ApolloClient({
link: authLink.concat(httpLink),
cache: new InMemoryCache({ fragmentMatcher }),
defaultOptions: {
watchQuery: {
fetchPolicy: NO_CACHE,
errorPolicy: 'ignore',
},
query: {
fetchPolicy: NO_CACHE,
errorPolicy: 'all',
},
},
});
}
reset() {
return this.client.resetStore();
}
async getRepository(owner: string, name: string) {
const { data } = await this.query({
query: queries.repository,
variables: { owner, name },
fetchPolicy: CACHE_FIRST, // repository id doesn't change
});
return data.repository;
}
query(options: QueryOptions<OperationVariables>) {
return this.client.query(options).catch(error => {
throw new APIError(error.message, 500, 'GitHub');
});
}
async mutate(options: MutationOptions<OperationVariables>) {
try {
const result = await this.client.mutate(options);
return result;
} catch (error: any) {
const errors = error.graphQLErrors;
if (Array.isArray(errors) && errors.some(e => e.message === 'Ref cannot be created.')) {
const refName = options?.variables?.createRefInput?.name || '';
const branchName = trimStart(refName, 'refs/heads/');
if (branchName) {
await throwOnConflictingBranches(branchName, name => this.getBranch(name), API_NAME);
}
} else if (
Array.isArray(errors) &&
errors.some(e =>
new RegExp(
`A ref named "refs/heads/${CMS_BRANCH_PREFIX}/.+?" already exists in the repository.`,
).test(e.message),
)
) {
const refName = options?.variables?.createRefInput?.name || '';
const sha = options?.variables?.createRefInput?.oid || '';
const branchName = trimStart(refName, 'refs/heads/');
if (branchName && branchName.startsWith(`${CMS_BRANCH_PREFIX}/`) && sha) {
try {
// this can happen if the branch wasn't deleted when the PR was merged
// we backup the existing branch just in case an re-run the mutation
await this.backupBranch(branchName);
await this.deleteBranch(branchName);
const result = await this.client.mutate(options);
return result;
} catch (e) {
console.error(e);
}
}
}
throw new APIError(error.message, 500, 'GitHub');
}
}
async hasWriteAccess() {
const { repoOwner: owner, repoName: name } = this;
try {
const { data } = await this.query({
query: queries.repoPermission,
variables: { owner, name },
fetchPolicy: CACHE_FIRST, // we can assume permission doesn't change often
});
// https://developer.github.com/v4/enum/repositorypermission/
const { viewerPermission } = data.repository;
return ['ADMIN', 'MAINTAIN', 'WRITE'].includes(viewerPermission);
} catch (error) {
console.error('Problem fetching repo data from GitHub');
throw error;
}
}
async user() {
const { data } = await this.query({
query: queries.user,
fetchPolicy: CACHE_FIRST, // we can assume user details don't change often
});
return data.viewer;
}
async retrieveBlobObject(owner: string, name: string, expression: string, options = {}) {
const { data } = await this.query({
query: queries.blob,
variables: { owner, name, expression },
...options,
});
// https://developer.github.com/v4/object/blob/
if (data.repository.object) {
const { is_binary: isBinary, text } = data.repository.object;
return { isNull: false, isBinary, text };
} else {
return { isNull: true };
}
}
getOwnerAndNameFromRepoUrl(repoURL: string) {
let { repoOwner: owner, repoName: name } = this;
if (repoURL === this.originRepoURL) {
({ originRepoOwner: owner, originRepoName: name } = this);
}
return { owner, name };
}
async readFile(
path: string,
sha?: string | null,
{
branch = this.branch,
repoURL = this.repoURL,
parseText = true,
}: {
branch?: string;
repoURL?: string;
parseText?: boolean;
} = {},
) {
if (!sha) {
sha = await this.getFileSha(path, { repoURL, branch });
}
const fetchContent = () => this.fetchBlobContent({ sha: sha as string, repoURL, parseText });
const content = await readFile(sha, fetchContent, localForage, parseText);
return content;
}
async fetchBlobContent({ sha, repoURL, parseText }: BlobArgs) {
if (!parseText) {
return super.fetchBlobContent({ sha, repoURL, parseText });
}
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const { isNull, isBinary, text } = await this.retrieveBlobObject(
owner,
name,
sha,
{ fetchPolicy: CACHE_FIRST }, // blob sha is derived from file content
);
if (isNull) {
throw new APIError('Not Found', 404, 'GitHub');
} else if (!isBinary) {
return text;
} else {
return super.fetchBlobContent({ sha, repoURL, parseText });
}
}
async getPullRequestAuthor(pullRequest: Octokit.PullsListResponseItem) {
const user = pullRequest.user as unknown as GraphQLPullsListResponseItemUser;
return user?.name || user?.login;
}
async getPullRequests(
head: string | undefined,
state: PullRequestState,
predicate: (pr: Octokit.PullsListResponseItem) => boolean,
) {
const { originRepoOwner: owner, originRepoName: name } = this;
let states;
if (state === PullRequestState.Open) {
states = ['OPEN'];
} else if (state === PullRequestState.Closed) {
states = ['CLOSED', 'MERGED'];
} else {
states = ['OPEN', 'CLOSED', 'MERGED'];
}
const { data } = await this.query({
query: queries.pullRequests,
variables: {
owner,
name,
...(head ? { head } : {}),
states,
},
});
const {
pullRequests,
}: {
pullRequests: {
nodes: GraphQLPullRequest[];
};
} = data.repository;
const mapped = pullRequests.nodes.map(transformPullRequest);
return (mapped as unknown as Octokit.PullsListResponseItem[]).filter(
pr => pr.head.ref.startsWith(`${CMS_BRANCH_PREFIX}/`) && predicate(pr),
);
}
async getOpenAuthoringBranches() {
const { repoOwner: owner, repoName: name } = this;
const { data } = await this.query({
query: queries.openAuthoringBranches,
variables: {
owner,
name,
refPrefix: `refs/heads/cms/${this.repo}/`,
},
});
return data.repository.refs.nodes.map(({ name, prefix }: { name: string; prefix: string }) => ({
ref: `${prefix}${name}`,
}));
}
async getStatuses(collectionName: string, slug: string) {
const contentKey = this.generateContentKey(collectionName, slug);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const sha = pullRequest.head.sha;
const { originRepoOwner: owner, originRepoName: name } = this;
const { data } = await this.query({ query: queries.statues, variables: { owner, name, sha } });
if (data.repository.object) {
const { status } = data.repository.object;
const { contexts } = status || { contexts: [] };
return contexts;
} else {
return [];
}
}
getAllFiles(entries: TreeEntry[], path: string) {
const allFiles: TreeFile[] = entries.reduce((acc, item) => {
if (item.type === 'tree') {
const entries = item.object?.entries || [];
return [...acc, ...this.getAllFiles(entries, `${path}/${item.name}`)];
} else if (item.type === 'blob') {
return [
...acc,
{
name: item.name,
type: item.type,
id: item.sha,
path: `${path}/${item.name}`,
size: item.blob ? item.blob.size : 0,
},
];
}
return acc;
}, [] as TreeFile[]);
return allFiles;
}
async listFiles(path: string, { repoURL = this.repoURL, branch = this.branch, depth = 1 } = {}) {
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const folder = trim(path, '/');
const { data } = await this.query({
query: queries.files(depth),
variables: { owner, name, expression: `${branch}:${folder}` },
});
if (data.repository.object) {
const allFiles = this.getAllFiles(data.repository.object.entries, folder);
return allFiles;
} else {
return [];
}
}
getBranchQualifiedName(branch: string) {
return `refs/heads/${branch}`;
}
getBranchQuery(branch: string, owner: string, name: string) {
return {
query: queries.branch,
variables: {
owner,
name,
qualifiedName: this.getBranchQualifiedName(branch),
},
};
}
async getDefaultBranch() {
const { data } = await this.query({
...this.getBranchQuery(this.branch, this.originRepoOwner, this.originRepoName),
});
return data.repository.branch;
}
async getBranch(branch: string) {
const { data } = await this.query({
...this.getBranchQuery(branch, this.repoOwner, this.repoName),
fetchPolicy: CACHE_FIRST,
});
if (!data.repository.branch) {
throw new APIError('Branch not found', 404, API_NAME);
}
return data.repository.branch;
}
async patchRef(type: string, name: string, sha: string, opts: { force?: boolean } = {}) {
if (type !== 'heads') {
return super.patchRef(type, name, sha, opts);
}
const force = opts.force || false;
const branch = await this.getBranch(name);
const { data } = await this.mutate({
mutation: mutations.updateBranch,
variables: {
input: { oid: sha, refId: branch.id, force },
},
});
return data!.updateRef.branch;
}
async deleteBranch(branchName: string) {
const branch = await this.getBranch(branchName);
const { data } = await this.mutate({
mutation: mutations.deleteBranch,
variables: {
deleteRefInput: { refId: branch.id },
},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
update: (store: any) => store.data.delete(defaultDataIdFromObject(branch)),
});
return data!.deleteRef;
}
getPullRequestQuery(number: number) {
const { originRepoOwner: owner, originRepoName: name } = this;
return {
query: queries.pullRequest,
variables: { owner, name, number },
};
}
async getPullRequest(number: number) {
const { data } = await this.query({
...this.getPullRequestQuery(number),
fetchPolicy: CACHE_FIRST,
});
// https://developer.github.com/v4/enum/pullrequeststate/
// GraphQL state: [CLOSED, MERGED, OPEN]
// REST API state: [closed, open]
const state =
data.repository.pullRequest.state === 'OPEN'
? PullRequestState.Open
: PullRequestState.Closed;
return {
...data.repository.pullRequest,
state,
};
}
getPullRequestAndBranchQuery(branch: string, number: number) {
const { repoOwner: owner, repoName: name } = this;
const { originRepoOwner, originRepoName } = this;
return {
query: queries.pullRequestAndBranch,
variables: {
owner,
name,
originRepoOwner,
originRepoName,
number,
qualifiedName: this.getBranchQualifiedName(branch),
},
};
}
async getPullRequestAndBranch(branch: string, number: number) {
const { data } = await this.query({
...this.getPullRequestAndBranchQuery(branch, number),
fetchPolicy: CACHE_FIRST,
});
const { repository, origin } = data;
return { branch: repository.branch, pullRequest: origin.pullRequest };
}
async openPR(number: number) {
const pullRequest = await this.getPullRequest(number);
const { data } = await this.mutate({
mutation: mutations.reopenPullRequest,
variables: {
reopenPullRequestInput: { pullRequestId: pullRequest.id },
},
update: (store, { data: mutationResult }) => {
const { pullRequest } = mutationResult!.reopenPullRequest;
const pullRequestData = { repository: { ...pullRequest.repository, pullRequest } };
store.writeQuery({
...this.getPullRequestQuery(pullRequest.number),
data: pullRequestData,
});
},
});
return data!.reopenPullRequest;
}
async closePR(number: number) {
const pullRequest = await this.getPullRequest(number);
const { data } = await this.mutate({
mutation: mutations.closePullRequest,
variables: {
closePullRequestInput: { pullRequestId: pullRequest.id },
},
update: (store, { data: mutationResult }) => {
const { pullRequest } = mutationResult!.closePullRequest;
const pullRequestData = { repository: { ...pullRequest.repository, pullRequest } };
store.writeQuery({
...this.getPullRequestQuery(pullRequest.number),
data: pullRequestData,
});
},
});
return data!.closePullRequest;
}
async deleteUnpublishedEntry(collectionName: string, slug: string) {
try {
const contentKey = this.generateContentKey(collectionName, slug);
const branchName = branchFromContentKey(contentKey);
const pr = await this.getBranchPullRequest(branchName);
if (pr.number !== MOCK_PULL_REQUEST) {
const { branch, pullRequest } = await this.getPullRequestAndBranch(branchName, pr.number);
const { data } = await this.mutate({
mutation: mutations.closePullRequestAndDeleteBranch,
variables: {
deleteRefInput: { refId: branch.id },
closePullRequestInput: { pullRequestId: pullRequest.id },
},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
update: (store: any) => {
store.data.delete(defaultDataIdFromObject(branch));
store.data.delete(defaultDataIdFromObject(pullRequest));
},
});
return data!.closePullRequest;
} else {
return await this.deleteBranch(branchName);
}
} catch (e: any) {
const { graphQLErrors } = e;
if (graphQLErrors && graphQLErrors.length > 0) {
const branchNotFound = graphQLErrors.some((e: Error) => e.type === 'NOT_FOUND');
if (branchNotFound) {
return;
}
}
throw e;
}
}
async createPR(title: string, head: string) {
const [repository, headReference] = await Promise.all([
this.getRepository(this.originRepoOwner, this.originRepoName),
this.useOpenAuthoring ? `${(await this.user()).login}:${head}` : head,
]);
const { data } = await this.mutate({
mutation: mutations.createPullRequest,
variables: {
createPullRequestInput: {
baseRefName: this.branch,
body: DEFAULT_PR_BODY,
title,
headRefName: headReference,
repositoryId: repository.id,
},
},
update: (store, { data: mutationResult }) => {
const { pullRequest } = mutationResult!.createPullRequest;
const pullRequestData = { repository: { ...pullRequest.repository, pullRequest } };
store.writeQuery({
...this.getPullRequestQuery(pullRequest.number),
data: pullRequestData,
});
},
});
const { pullRequest } = data!.createPullRequest;
return { ...pullRequest, head: { sha: pullRequest.headRefOid } };
}
async createBranch(branchName: string, sha: string) {
const owner = this.repoOwner;
const name = this.repoName;
const repository = await this.getRepository(owner, name);
const { data } = await this.mutate({
mutation: mutations.createBranch,
variables: {
createRefInput: {
name: this.getBranchQualifiedName(branchName),
oid: sha,
repositoryId: repository.id,
},
},
update: (store, { data: mutationResult }) => {
const { branch } = mutationResult!.createRef;
const branchData = { repository: { ...branch.repository, branch } };
store.writeQuery({
...this.getBranchQuery(branchName, owner, name),
data: branchData,
});
},
});
const { branch } = data!.createRef;
return { ...branch, ref: `${branch.prefix}${branch.name}` };
}
async createBranchAndPullRequest(branchName: string, sha: string, title: string) {
const owner = this.originRepoOwner;
const name = this.originRepoName;
const repository = await this.getRepository(owner, name);
const { data } = await this.mutate({
mutation: mutations.createBranchAndPullRequest,
variables: {
createRefInput: {
name: this.getBranchQualifiedName(branchName),
oid: sha,
repositoryId: repository.id,
},
createPullRequestInput: {
baseRefName: this.branch,
body: DEFAULT_PR_BODY,
title,
headRefName: branchName,
repositoryId: repository.id,
},
},
update: (store, { data: mutationResult }) => {
const { branch } = mutationResult!.createRef;
const { pullRequest } = mutationResult!.createPullRequest;
const branchData = { repository: { ...branch.repository, branch } };
const pullRequestData = {
repository: { ...pullRequest.repository, branch },
origin: { ...pullRequest.repository, pullRequest },
};
store.writeQuery({
...this.getBranchQuery(branchName, owner, name),
data: branchData,
});
store.writeQuery({
...this.getPullRequestAndBranchQuery(branchName, pullRequest.number),
data: pullRequestData,
});
},
});
const { pullRequest } = data!.createPullRequest;
return transformPullRequest(pullRequest) as unknown as Octokit.PullsCreateResponse;
}
async getFileSha(path: string, { repoURL = this.repoURL, branch = this.branch } = {}) {
const { owner, name } = this.getOwnerAndNameFromRepoUrl(repoURL);
const { data } = await this.query({
query: queries.fileSha,
variables: { owner, name, expression: `${branch}:${path}` },
});
if (data.repository.file) {
return data.repository.file.sha;
}
throw new APIError('Not Found', 404, API_NAME);
}
}

View File

@ -0,0 +1,572 @@
export default {
__schema: {
types: [
{
kind: 'INTERFACE',
name: 'Node',
possibleTypes: [
{ name: 'AddedToProjectEvent' },
{ name: 'App' },
{ name: 'AssignedEvent' },
{ name: 'BaseRefChangedEvent' },
{ name: 'BaseRefForcePushedEvent' },
{ name: 'Blob' },
{ name: 'Bot' },
{ name: 'BranchProtectionRule' },
{ name: 'ClosedEvent' },
{ name: 'CodeOfConduct' },
{ name: 'CommentDeletedEvent' },
{ name: 'Commit' },
{ name: 'CommitComment' },
{ name: 'CommitCommentThread' },
{ name: 'ConvertedNoteToIssueEvent' },
{ name: 'CrossReferencedEvent' },
{ name: 'DemilestonedEvent' },
{ name: 'DeployKey' },
{ name: 'DeployedEvent' },
{ name: 'Deployment' },
{ name: 'DeploymentEnvironmentChangedEvent' },
{ name: 'DeploymentStatus' },
{ name: 'ExternalIdentity' },
{ name: 'Gist' },
{ name: 'GistComment' },
{ name: 'HeadRefDeletedEvent' },
{ name: 'HeadRefForcePushedEvent' },
{ name: 'HeadRefRestoredEvent' },
{ name: 'Issue' },
{ name: 'IssueComment' },
{ name: 'Label' },
{ name: 'LabeledEvent' },
{ name: 'Language' },
{ name: 'License' },
{ name: 'LockedEvent' },
{ name: 'Mannequin' },
{ name: 'MarketplaceCategory' },
{ name: 'MarketplaceListing' },
{ name: 'MentionedEvent' },
{ name: 'MergedEvent' },
{ name: 'Milestone' },
{ name: 'MilestonedEvent' },
{ name: 'MovedColumnsInProjectEvent' },
{ name: 'Organization' },
{ name: 'OrganizationIdentityProvider' },
{ name: 'OrganizationInvitation' },
{ name: 'PinnedEvent' },
{ name: 'Project' },
{ name: 'ProjectCard' },
{ name: 'ProjectColumn' },
{ name: 'PublicKey' },
{ name: 'PullRequest' },
{ name: 'PullRequestCommit' },
{ name: 'PullRequestCommitCommentThread' },
{ name: 'PullRequestReview' },
{ name: 'PullRequestReviewComment' },
{ name: 'PullRequestReviewThread' },
{ name: 'PushAllowance' },
{ name: 'Reaction' },
{ name: 'ReadyForReviewEvent' },
{ name: 'Ref' },
{ name: 'ReferencedEvent' },
{ name: 'RegistryPackage' },
{ name: 'RegistryPackageDependency' },
{ name: 'RegistryPackageFile' },
{ name: 'RegistryPackageTag' },
{ name: 'RegistryPackageVersion' },
{ name: 'Release' },
{ name: 'ReleaseAsset' },
{ name: 'RemovedFromProjectEvent' },
{ name: 'RenamedTitleEvent' },
{ name: 'ReopenedEvent' },
{ name: 'Repository' },
{ name: 'RepositoryInvitation' },
{ name: 'RepositoryTopic' },
{ name: 'ReviewDismissalAllowance' },
{ name: 'ReviewDismissedEvent' },
{ name: 'ReviewRequest' },
{ name: 'ReviewRequestRemovedEvent' },
{ name: 'ReviewRequestedEvent' },
{ name: 'SavedReply' },
{ name: 'SecurityAdvisory' },
{ name: 'SponsorsListing' },
{ name: 'Sponsorship' },
{ name: 'Status' },
{ name: 'StatusContext' },
{ name: 'SubscribedEvent' },
{ name: 'Tag' },
{ name: 'Team' },
{ name: 'Topic' },
{ name: 'TransferredEvent' },
{ name: 'Tree' },
{ name: 'UnassignedEvent' },
{ name: 'UnlabeledEvent' },
{ name: 'UnlockedEvent' },
{ name: 'UnpinnedEvent' },
{ name: 'UnsubscribedEvent' },
{ name: 'User' },
{ name: 'UserBlockedEvent' },
{ name: 'UserContentEdit' },
{ name: 'UserStatus' },
],
},
{
kind: 'INTERFACE',
name: 'UniformResourceLocatable',
possibleTypes: [
{ name: 'Bot' },
{ name: 'ClosedEvent' },
{ name: 'Commit' },
{ name: 'CrossReferencedEvent' },
{ name: 'Gist' },
{ name: 'Issue' },
{ name: 'Mannequin' },
{ name: 'MergedEvent' },
{ name: 'Milestone' },
{ name: 'Organization' },
{ name: 'PullRequest' },
{ name: 'PullRequestCommit' },
{ name: 'ReadyForReviewEvent' },
{ name: 'Release' },
{ name: 'Repository' },
{ name: 'RepositoryTopic' },
{ name: 'ReviewDismissedEvent' },
{ name: 'User' },
],
},
{
kind: 'INTERFACE',
name: 'Actor',
possibleTypes: [
{ name: 'Bot' },
{ name: 'Mannequin' },
{ name: 'Organization' },
{ name: 'User' },
],
},
{
kind: 'INTERFACE',
name: 'RegistryPackageOwner',
possibleTypes: [{ name: 'Organization' }, { name: 'Repository' }, { name: 'User' }],
},
{
kind: 'INTERFACE',
name: 'ProjectOwner',
possibleTypes: [{ name: 'Organization' }, { name: 'Repository' }, { name: 'User' }],
},
{
kind: 'INTERFACE',
name: 'Closable',
possibleTypes: [
{ name: 'Issue' },
{ name: 'Milestone' },
{ name: 'Project' },
{ name: 'PullRequest' },
],
},
{
kind: 'INTERFACE',
name: 'Updatable',
possibleTypes: [
{ name: 'CommitComment' },
{ name: 'GistComment' },
{ name: 'Issue' },
{ name: 'IssueComment' },
{ name: 'Project' },
{ name: 'PullRequest' },
{ name: 'PullRequestReview' },
{ name: 'PullRequestReviewComment' },
],
},
{
kind: 'UNION',
name: 'ProjectCardItem',
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
},
{
kind: 'INTERFACE',
name: 'Assignable',
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
},
{
kind: 'INTERFACE',
name: 'Comment',
possibleTypes: [
{ name: 'CommitComment' },
{ name: 'GistComment' },
{ name: 'Issue' },
{ name: 'IssueComment' },
{ name: 'PullRequest' },
{ name: 'PullRequestReview' },
{ name: 'PullRequestReviewComment' },
],
},
{
kind: 'INTERFACE',
name: 'UpdatableComment',
possibleTypes: [
{ name: 'CommitComment' },
{ name: 'GistComment' },
{ name: 'Issue' },
{ name: 'IssueComment' },
{ name: 'PullRequest' },
{ name: 'PullRequestReview' },
{ name: 'PullRequestReviewComment' },
],
},
{
kind: 'INTERFACE',
name: 'Labelable',
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
},
{
kind: 'INTERFACE',
name: 'Lockable',
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
},
{
kind: 'INTERFACE',
name: 'RegistryPackageSearch',
possibleTypes: [{ name: 'Organization' }, { name: 'User' }],
},
{
kind: 'INTERFACE',
name: 'RepositoryOwner',
possibleTypes: [{ name: 'Organization' }, { name: 'User' }],
},
{
kind: 'INTERFACE',
name: 'MemberStatusable',
possibleTypes: [{ name: 'Organization' }, { name: 'Team' }],
},
{
kind: 'INTERFACE',
name: 'ProfileOwner',
possibleTypes: [{ name: 'Organization' }, { name: 'User' }],
},
{
kind: 'UNION',
name: 'PinnableItem',
possibleTypes: [{ name: 'Gist' }, { name: 'Repository' }],
},
{
kind: 'INTERFACE',
name: 'Starrable',
possibleTypes: [{ name: 'Gist' }, { name: 'Repository' }, { name: 'Topic' }],
},
{ kind: 'INTERFACE', name: 'RepositoryInfo', possibleTypes: [{ name: 'Repository' }] },
{
kind: 'INTERFACE',
name: 'GitObject',
possibleTypes: [{ name: 'Blob' }, { name: 'Commit' }, { name: 'Tag' }, { name: 'Tree' }],
},
{
kind: 'INTERFACE',
name: 'RepositoryNode',
possibleTypes: [
{ name: 'CommitComment' },
{ name: 'CommitCommentThread' },
{ name: 'Issue' },
{ name: 'IssueComment' },
{ name: 'PullRequest' },
{ name: 'PullRequestCommitCommentThread' },
{ name: 'PullRequestReview' },
{ name: 'PullRequestReviewComment' },
],
},
{
kind: 'INTERFACE',
name: 'Subscribable',
possibleTypes: [
{ name: 'Commit' },
{ name: 'Issue' },
{ name: 'PullRequest' },
{ name: 'Repository' },
{ name: 'Team' },
],
},
{
kind: 'INTERFACE',
name: 'Deletable',
possibleTypes: [
{ name: 'CommitComment' },
{ name: 'GistComment' },
{ name: 'IssueComment' },
{ name: 'PullRequestReview' },
{ name: 'PullRequestReviewComment' },
],
},
{
kind: 'INTERFACE',
name: 'Reactable',
possibleTypes: [
{ name: 'CommitComment' },
{ name: 'Issue' },
{ name: 'IssueComment' },
{ name: 'PullRequest' },
{ name: 'PullRequestReview' },
{ name: 'PullRequestReviewComment' },
],
},
{
kind: 'INTERFACE',
name: 'GitSignature',
possibleTypes: [
{ name: 'GpgSignature' },
{ name: 'SmimeSignature' },
{ name: 'UnknownSignature' },
],
},
{
kind: 'UNION',
name: 'RequestedReviewer',
possibleTypes: [{ name: 'User' }, { name: 'Team' }, { name: 'Mannequin' }],
},
{
kind: 'UNION',
name: 'PullRequestTimelineItem',
possibleTypes: [
{ name: 'Commit' },
{ name: 'CommitCommentThread' },
{ name: 'PullRequestReview' },
{ name: 'PullRequestReviewThread' },
{ name: 'PullRequestReviewComment' },
{ name: 'IssueComment' },
{ name: 'ClosedEvent' },
{ name: 'ReopenedEvent' },
{ name: 'SubscribedEvent' },
{ name: 'UnsubscribedEvent' },
{ name: 'MergedEvent' },
{ name: 'ReferencedEvent' },
{ name: 'CrossReferencedEvent' },
{ name: 'AssignedEvent' },
{ name: 'UnassignedEvent' },
{ name: 'LabeledEvent' },
{ name: 'UnlabeledEvent' },
{ name: 'MilestonedEvent' },
{ name: 'DemilestonedEvent' },
{ name: 'RenamedTitleEvent' },
{ name: 'LockedEvent' },
{ name: 'UnlockedEvent' },
{ name: 'DeployedEvent' },
{ name: 'DeploymentEnvironmentChangedEvent' },
{ name: 'HeadRefDeletedEvent' },
{ name: 'HeadRefRestoredEvent' },
{ name: 'HeadRefForcePushedEvent' },
{ name: 'BaseRefForcePushedEvent' },
{ name: 'ReviewRequestedEvent' },
{ name: 'ReviewRequestRemovedEvent' },
{ name: 'ReviewDismissedEvent' },
{ name: 'UserBlockedEvent' },
],
},
{
kind: 'UNION',
name: 'Closer',
possibleTypes: [{ name: 'Commit' }, { name: 'PullRequest' }],
},
{
kind: 'UNION',
name: 'ReferencedSubject',
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
},
{
kind: 'UNION',
name: 'Assignee',
possibleTypes: [
{ name: 'Bot' },
{ name: 'Mannequin' },
{ name: 'Organization' },
{ name: 'User' },
],
},
{
kind: 'UNION',
name: 'MilestoneItem',
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
},
{
kind: 'UNION',
name: 'RenamedTitleSubject',
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
},
{
kind: 'UNION',
name: 'PullRequestTimelineItems',
possibleTypes: [
{ name: 'PullRequestCommit' },
{ name: 'PullRequestCommitCommentThread' },
{ name: 'PullRequestReview' },
{ name: 'PullRequestReviewThread' },
{ name: 'PullRequestRevisionMarker' },
{ name: 'BaseRefChangedEvent' },
{ name: 'BaseRefForcePushedEvent' },
{ name: 'DeployedEvent' },
{ name: 'DeploymentEnvironmentChangedEvent' },
{ name: 'HeadRefDeletedEvent' },
{ name: 'HeadRefForcePushedEvent' },
{ name: 'HeadRefRestoredEvent' },
{ name: 'MergedEvent' },
{ name: 'ReviewDismissedEvent' },
{ name: 'ReviewRequestedEvent' },
{ name: 'ReviewRequestRemovedEvent' },
{ name: 'ReadyForReviewEvent' },
{ name: 'IssueComment' },
{ name: 'CrossReferencedEvent' },
{ name: 'AddedToProjectEvent' },
{ name: 'AssignedEvent' },
{ name: 'ClosedEvent' },
{ name: 'CommentDeletedEvent' },
{ name: 'ConvertedNoteToIssueEvent' },
{ name: 'DemilestonedEvent' },
{ name: 'LabeledEvent' },
{ name: 'LockedEvent' },
{ name: 'MentionedEvent' },
{ name: 'MilestonedEvent' },
{ name: 'MovedColumnsInProjectEvent' },
{ name: 'PinnedEvent' },
{ name: 'ReferencedEvent' },
{ name: 'RemovedFromProjectEvent' },
{ name: 'RenamedTitleEvent' },
{ name: 'ReopenedEvent' },
{ name: 'SubscribedEvent' },
{ name: 'TransferredEvent' },
{ name: 'UnassignedEvent' },
{ name: 'UnlabeledEvent' },
{ name: 'UnlockedEvent' },
{ name: 'UserBlockedEvent' },
{ name: 'UnpinnedEvent' },
{ name: 'UnsubscribedEvent' },
],
},
{
kind: 'UNION',
name: 'IssueOrPullRequest',
possibleTypes: [{ name: 'Issue' }, { name: 'PullRequest' }],
},
{
kind: 'UNION',
name: 'IssueTimelineItem',
possibleTypes: [
{ name: 'Commit' },
{ name: 'IssueComment' },
{ name: 'CrossReferencedEvent' },
{ name: 'ClosedEvent' },
{ name: 'ReopenedEvent' },
{ name: 'SubscribedEvent' },
{ name: 'UnsubscribedEvent' },
{ name: 'ReferencedEvent' },
{ name: 'AssignedEvent' },
{ name: 'UnassignedEvent' },
{ name: 'LabeledEvent' },
{ name: 'UnlabeledEvent' },
{ name: 'UserBlockedEvent' },
{ name: 'MilestonedEvent' },
{ name: 'DemilestonedEvent' },
{ name: 'RenamedTitleEvent' },
{ name: 'LockedEvent' },
{ name: 'UnlockedEvent' },
{ name: 'TransferredEvent' },
],
},
{
kind: 'UNION',
name: 'IssueTimelineItems',
possibleTypes: [
{ name: 'IssueComment' },
{ name: 'CrossReferencedEvent' },
{ name: 'AddedToProjectEvent' },
{ name: 'AssignedEvent' },
{ name: 'ClosedEvent' },
{ name: 'CommentDeletedEvent' },
{ name: 'ConvertedNoteToIssueEvent' },
{ name: 'DemilestonedEvent' },
{ name: 'LabeledEvent' },
{ name: 'LockedEvent' },
{ name: 'MentionedEvent' },
{ name: 'MilestonedEvent' },
{ name: 'MovedColumnsInProjectEvent' },
{ name: 'PinnedEvent' },
{ name: 'ReferencedEvent' },
{ name: 'RemovedFromProjectEvent' },
{ name: 'RenamedTitleEvent' },
{ name: 'ReopenedEvent' },
{ name: 'SubscribedEvent' },
{ name: 'TransferredEvent' },
{ name: 'UnassignedEvent' },
{ name: 'UnlabeledEvent' },
{ name: 'UnlockedEvent' },
{ name: 'UserBlockedEvent' },
{ name: 'UnpinnedEvent' },
{ name: 'UnsubscribedEvent' },
],
},
{
kind: 'UNION',
name: 'ReviewDismissalAllowanceActor',
possibleTypes: [{ name: 'User' }, { name: 'Team' }],
},
{
kind: 'UNION',
name: 'PushAllowanceActor',
possibleTypes: [{ name: 'User' }, { name: 'Team' }],
},
{
kind: 'UNION',
name: 'PermissionGranter',
possibleTypes: [{ name: 'Organization' }, { name: 'Repository' }, { name: 'Team' }],
},
{ kind: 'INTERFACE', name: 'Sponsorable', possibleTypes: [{ name: 'User' }] },
{
kind: 'INTERFACE',
name: 'Contribution',
possibleTypes: [
{ name: 'CreatedCommitContribution' },
{ name: 'CreatedIssueContribution' },
{ name: 'CreatedPullRequestContribution' },
{ name: 'CreatedPullRequestReviewContribution' },
{ name: 'CreatedRepositoryContribution' },
{ name: 'JoinedGitHubContribution' },
{ name: 'RestrictedContribution' },
],
},
{
kind: 'UNION',
name: 'CreatedRepositoryOrRestrictedContribution',
possibleTypes: [
{ name: 'CreatedRepositoryContribution' },
{ name: 'RestrictedContribution' },
],
},
{
kind: 'UNION',
name: 'CreatedIssueOrRestrictedContribution',
possibleTypes: [{ name: 'CreatedIssueContribution' }, { name: 'RestrictedContribution' }],
},
{
kind: 'UNION',
name: 'CreatedPullRequestOrRestrictedContribution',
possibleTypes: [
{ name: 'CreatedPullRequestContribution' },
{ name: 'RestrictedContribution' },
],
},
{
kind: 'UNION',
name: 'SearchResultItem',
possibleTypes: [
{ name: 'Issue' },
{ name: 'PullRequest' },
{ name: 'Repository' },
{ name: 'User' },
{ name: 'Organization' },
{ name: 'MarketplaceListing' },
{ name: 'App' },
],
},
{
kind: 'UNION',
name: 'CollectionItemContent',
possibleTypes: [{ name: 'Repository' }, { name: 'Organization' }, { name: 'User' }],
},
],
},
};

View File

@ -0,0 +1,92 @@
import { gql } from 'graphql-tag';
export const repository = gql`
fragment RepositoryParts on Repository {
id
isFork
}
`;
export const blobWithText = gql`
fragment BlobWithTextParts on Blob {
id
text
is_binary: isBinary
}
`;
export const object = gql`
fragment ObjectParts on GitObject {
id
sha: oid
}
`;
export const branch = gql`
fragment BranchParts on Ref {
commit: target {
...ObjectParts
}
id
name
prefix
repository {
...RepositoryParts
}
}
${object}
${repository}
`;
export const pullRequest = gql`
fragment PullRequestParts on PullRequest {
id
baseRefName
baseRefOid
body
headRefName
headRefOid
number
state
title
merged_at: mergedAt
updated_at: updatedAt
user: author {
login
... on User {
name
}
}
repository {
...RepositoryParts
}
labels(last: 100) {
nodes {
name
}
}
}
${repository}
`;
export const treeEntry = gql`
fragment TreeEntryParts on TreeEntry {
path: name
sha: oid
type
mode
}
`;
export const fileEntry = gql`
fragment FileEntryParts on TreeEntry {
name
sha: oid
type
blob: object {
... on Blob {
size: byteSize
}
}
}
`;

View File

@ -0,0 +1,673 @@
import * as React from 'react';
import semaphore from 'semaphore';
import trimStart from 'lodash/trimStart';
import { stripIndent } from 'common-tags';
import {
CURSOR_COMPATIBILITY_SYMBOL,
Cursor,
asyncLock,
basename,
getBlobSHA,
entriesByFolder,
entriesByFiles,
unpublishedEntries,
getMediaDisplayURL,
getMediaAsBlob,
filterByExtension,
getPreviewStatus,
runWithLock,
blobToFileObj,
contentKeyFromBranch,
unsentRequest,
branchFromContentKey,
} from '../../lib/util';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
import GraphQLAPI from './GraphQLAPI';
import type { Octokit } from '@octokit/rest';
import type {
AsyncLock,
Implementation,
AssetProxy,
PersistOptions,
DisplayURL,
User,
Credentials,
Config,
ImplementationFile,
UnpublishedEntryMediaFile,
Entry,
} from '../../lib/util';
import type { Semaphore } from 'semaphore';
type GitHubUser = Octokit.UsersGetAuthenticatedResponse;
const MAX_CONCURRENT_DOWNLOADS = 10;
type ApiFile = { id: string; type: string; name: string; path: string; size: number };
const { fetchWithTimeout: fetch } = unsentRequest;
const STATUS_PAGE = 'https://www.githubstatus.com';
const GITHUB_STATUS_ENDPOINT = `${STATUS_PAGE}/api/v2/components.json`;
const GITHUB_OPERATIONAL_UNITS = ['API Requests', 'Issues, Pull Requests, Projects'];
type GitHubStatusComponent = {
id: string;
name: string;
status: string;
};
export default class GitHub implements Implementation {
lock: AsyncLock;
api: API | null;
options: {
proxied: boolean;
API: API | null;
useWorkflow?: boolean;
initialWorkflowStatus: string;
};
originRepo: string;
repo?: string;
openAuthoringEnabled: boolean;
useOpenAuthoring?: boolean;
alwaysForkEnabled: boolean;
branch: string;
apiRoot: string;
mediaFolder: string;
previewContext: string;
token: string | null;
squashMerges: boolean;
cmsLabelPrefix: string;
useGraphql: boolean;
_currentUserPromise?: Promise<GitHubUser>;
_userIsOriginMaintainerPromises?: {
[key: string]: Promise<boolean>;
};
_mediaDisplayURLSem?: Semaphore;
constructor(config: Config, options = {}) {
this.options = {
proxied: false,
API: null,
initialWorkflowStatus: '',
...options,
};
if (
!this.options.proxied &&
(config.backend.repo === null || config.backend.repo === undefined)
) {
throw new Error('The GitHub backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.openAuthoringEnabled = config.backend.open_authoring || false;
if (this.openAuthoringEnabled) {
if (!this.options.useWorkflow) {
throw new Error(
'backend.open_authoring is true but publish_mode is not set to editorial_workflow.',
);
}
this.originRepo = config.backend.repo || '';
} else {
this.repo = this.originRepo = config.backend.repo || '';
}
this.alwaysForkEnabled = config.backend.always_fork || false;
this.branch = config.backend.branch?.trim() || 'master';
this.apiRoot = config.backend.api_root || 'https://api.github.com';
this.token = '';
this.squashMerges = config.backend.squash_merges || false;
this.cmsLabelPrefix = config.backend.cms_label_prefix || '';
this.useGraphql = config.backend.use_graphql || false;
this.mediaFolder = config.media_folder;
this.previewContext = config.backend.preview_context || '';
this.lock = asyncLock();
}
isGitBackend() {
return true;
}
async status() {
const api = await fetch(GITHUB_STATUS_ENDPOINT)
.then(res => res.json())
.then(res => {
return res['components']
.filter((statusComponent: GitHubStatusComponent) =>
GITHUB_OPERATIONAL_UNITS.includes(statusComponent.name),
)
.every(
(statusComponent: GitHubStatusComponent) => statusComponent.status === 'operational',
);
})
.catch(e => {
console.warn('Failed getting GitHub status', e);
return true;
});
let auth = false;
// no need to check auth if api is down
if (api) {
auth =
(await this.api
?.getUser()
.then(user => !!user)
.catch(e => {
console.warn('Failed getting GitHub user', e);
return false;
})) || false;
}
return { auth: { status: auth }, api: { status: api, statusPage: STATUS_PAGE } };
}
authComponent() {
const wrappedAuthenticationPage = (props: Record<string, unknown>) => (
<AuthenticationPage {...props} backend={this} />
);
wrappedAuthenticationPage.displayName = 'AuthenticationPage';
return wrappedAuthenticationPage;
}
restoreUser(user: User) {
return this.openAuthoringEnabled
? this.authenticateWithFork({ userData: user, getPermissionToFork: () => true }).then(() =>
this.authenticate(user),
)
: this.authenticate(user);
}
async pollUntilForkExists({ repo, token }: { repo: string; token: string }) {
const pollDelay = 250; // milliseconds
let repoExists = false;
while (!repoExists) {
repoExists = await fetch(`${this.apiRoot}/repos/${repo}`, {
headers: { Authorization: `token ${token}` },
})
.then(() => true)
.catch(err => {
if (err && err.status === 404) {
console.info('This 404 was expected and handled appropriately.');
return false;
} else {
return Promise.reject(err);
}
});
// wait between polls
if (!repoExists) {
await new Promise(resolve => setTimeout(resolve, pollDelay));
}
}
return Promise.resolve();
}
async currentUser({ token }: { token: string }) {
if (!this._currentUserPromise) {
this._currentUserPromise = fetch(`${this.apiRoot}/user`, {
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
}
return this._currentUserPromise;
}
async userIsOriginMaintainer({
username: usernameArg,
token,
}: {
username?: string;
token: string;
}) {
const username = usernameArg || (await this.currentUser({ token })).login;
this._userIsOriginMaintainerPromises = this._userIsOriginMaintainerPromises || {};
if (!this._userIsOriginMaintainerPromises[username]) {
this._userIsOriginMaintainerPromises[username] = fetch(
`${this.apiRoot}/repos/${this.originRepo}/collaborators/${username}/permission`,
{
headers: {
Authorization: `token ${token}`,
},
},
)
.then(res => res.json())
.then(({ permission }) => permission === 'admin' || permission === 'write');
}
return this._userIsOriginMaintainerPromises[username];
}
async forkExists({ token }: { token: string }) {
try {
const currentUser = await this.currentUser({ token });
const repoName = this.originRepo.split('/')[1];
const repo = await fetch(`${this.apiRoot}/repos/${currentUser.login}/${repoName}`, {
method: 'GET',
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
// https://developer.github.com/v3/repos/#get
// The parent and source objects are present when the repository is a fork.
// parent is the repository this repository was forked from, source is the ultimate source for the network.
const forkExists =
repo.fork === true &&
repo.parent &&
repo.parent.full_name.toLowerCase() === this.originRepo.toLowerCase();
return forkExists;
} catch {
return false;
}
}
async authenticateWithFork({
userData,
getPermissionToFork,
}: {
userData: User;
getPermissionToFork: () => Promise<boolean> | boolean;
}) {
if (!this.openAuthoringEnabled) {
throw new Error('Cannot authenticate with fork; Open Authoring is turned off.');
}
const token = userData.token as string;
// Origin maintainers should be able to use the CMS normally. If alwaysFork
// is enabled we always fork (and avoid the origin maintainer check)
if (!this.alwaysForkEnabled && (await this.userIsOriginMaintainer({ token }))) {
this.repo = this.originRepo;
this.useOpenAuthoring = false;
return Promise.resolve();
}
if (!(await this.forkExists({ token }))) {
await getPermissionToFork();
}
const fork = await fetch(`${this.apiRoot}/repos/${this.originRepo}/forks`, {
method: 'POST',
headers: {
Authorization: `token ${token}`,
},
}).then(res => res.json());
this.useOpenAuthoring = true;
this.repo = fork.full_name;
return this.pollUntilForkExists({ repo: fork.full_name, token });
}
async authenticate(state: Credentials) {
this.token = state.token as string;
const apiCtor = this.useGraphql ? GraphQLAPI : API;
this.api = new apiCtor({
token: this.token,
branch: this.branch,
repo: this.repo,
originRepo: this.originRepo,
apiRoot: this.apiRoot,
squashMerges: this.squashMerges,
cmsLabelPrefix: this.cmsLabelPrefix,
useOpenAuthoring: this.useOpenAuthoring,
initialWorkflowStatus: this.options.initialWorkflowStatus,
});
const user = await this.api!.user();
const isCollab = await this.api!.hasWriteAccess().catch(error => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a GitHub account with access.
If your repo is under an organization, ensure the organization has granted access to Netlify
CMS.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your GitHub user account does not have access to this repo.');
}
// Authorized user
return { ...user, token: state.token as string, useOpenAuthoring: this.useOpenAuthoring };
}
logout() {
this.token = null;
if (this.api && this.api.reset && typeof this.api.reset === 'function') {
return this.api.reset();
}
}
getToken() {
return Promise.resolve(this.token);
}
getCursorAndFiles = (files: ApiFile[], page: number) => {
const pageSize = 20;
const count = files.length;
const pageCount = Math.ceil(files.length / pageSize);
const actions = [] as string[];
if (page > 1) {
actions.push('prev');
actions.push('first');
}
if (page < pageCount) {
actions.push('next');
actions.push('last');
}
const cursor = Cursor.create({
actions,
meta: { page, count, pageSize, pageCount },
data: { files },
});
const pageFiles = files.slice((page - 1) * pageSize, page * pageSize);
return { cursor, files: pageFiles };
};
async entriesByFolder(folder: string, extension: string, depth: number) {
const repoURL = this.api!.originRepoURL;
let cursor: Cursor;
const listFiles = () =>
this.api!.listFiles(folder, {
repoURL,
depth,
}).then(files => {
const filtered = files.filter(file => filterByExtension(file, extension));
const result = this.getCursorAndFiles(filtered, 1);
cursor = result.cursor;
return result.files;
});
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL }) as Promise<string>;
const files = await entriesByFolder(
listFiles,
readFile,
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return files;
}
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const repoURL = this.api!.originRepoURL;
const listFiles = () =>
this.api!.listFiles(folder, {
repoURL,
depth,
}).then(files => files.filter(file => filterByExtension(file, extension)));
const readFile = (path: string, id: string | null | undefined) => {
return this.api!.readFile(path, id, { repoURL }) as Promise<string>;
};
const files = await entriesByFolder(
listFiles,
readFile,
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
return files;
}
entriesByFiles(files: ImplementationFile[]) {
const repoURL = this.useOpenAuthoring ? this.api!.originRepoURL : this.api!.repoURL;
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL }).catch(() => '') as Promise<string>;
return entriesByFiles(files, readFile, this.api!.readFileMetadata.bind(this.api), API_NAME);
}
// Fetches a single entry.
getEntry(path: string) {
const repoURL = this.api!.originRepoURL;
return this.api!.readFile(path, null, { repoURL })
.then(data => ({
file: { path, id: null },
data: data as string,
}))
.catch(() => ({ file: { path, id: null }, data: '' }));
}
getMedia(mediaFolder = this.mediaFolder) {
return this.api!.listFiles(mediaFolder).then(files =>
files.map(({ id, name, size, path }) => {
// load media using getMediaDisplayURL to avoid token expiration with GitHub raw content urls
// for private repositories
return { id, name, size, displayURL: { id, path }, path };
}),
);
}
async getMediaFile(path: string) {
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
const name = basename(path);
const fileObj = blobToFileObj(name, blob);
const url = URL.createObjectURL(fileObj);
const id = await getBlobSHA(blob);
return {
id,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
getMediaDisplayURL(displayURL: DisplayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
return getMediaDisplayURL(
displayURL,
this.api!.readFile.bind(this.api!),
this._mediaDisplayURLSem,
);
}
persistEntry(entry: Entry, options: PersistOptions) {
// persistEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.persistFiles(entry.dataFiles, entry.assets, options),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
try {
await this.api!.persistFiles([], [mediaFile], options);
const { sha, path, fileObj } = mediaFile as AssetProxy & { sha: string };
const displayURL = URL.createObjectURL(fileObj as Blob);
return {
id: sha,
name: fileObj!.name,
size: fileObj!.size,
displayURL,
path: trimStart(path, '/'),
};
} catch (error) {
console.error(error);
throw error;
}
}
deleteFiles(paths: string[], commitMessage: string) {
return this.api!.deleteFiles(paths, commitMessage);
}
async traverseCursor(cursor: Cursor, action: string) {
const meta = cursor.meta!;
const files = cursor.data!.get('files')!.toJS() as ApiFile[];
let result: { cursor: Cursor; files: ApiFile[] };
switch (action) {
case 'first': {
result = this.getCursorAndFiles(files, 1);
break;
}
case 'last': {
result = this.getCursorAndFiles(files, meta.get('pageCount'));
break;
}
case 'next': {
result = this.getCursorAndFiles(files, meta.get('page') + 1);
break;
}
case 'prev': {
result = this.getCursorAndFiles(files, meta.get('page') - 1);
break;
}
default: {
result = this.getCursorAndFiles(files, 1);
break;
}
}
const readFile = (path: string, id: string | null | undefined) =>
this.api!.readFile(path, id, { repoURL: this.api!.originRepoURL }).catch(
() => '',
) as Promise<string>;
const entries = await entriesByFiles(
result.files,
readFile,
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
return {
entries,
cursor: result.cursor,
};
}
async loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
const readFile = (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
const blob = await getMediaAsBlob(file.path, file.id, readFile);
const name = basename(file.path);
const fileObj = blobToFileObj(name, blob);
return {
id: file.id,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
}
async unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(branch => contentKeyFromBranch(branch)),
);
const ids = await unpublishedEntries(listEntriesKeys);
return ids;
}
async unpublishedEntry({
id,
collection,
slug,
}: {
id?: string;
collection?: string;
slug?: string;
}) {
if (id) {
const data = await this.api!.retrieveUnpublishedEntryData(id);
return data;
} else if (collection && slug) {
const entryId = this.api!.generateContentKey(collection, slug);
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
return data;
} else {
throw new Error('Missing unpublished entry id or collection and slug');
}
}
getBranch(collection: string, slug: string) {
const contentKey = this.api!.generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);
return branch;
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const data = (await this.api!.readFile(path, id, { branch })) as string;
return data;
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const mediaFile = await this.loadMediaFile(branch, { path, id });
return mediaFile;
}
async getDeployPreview(collection: string, slug: string) {
try {
const statuses = await this.api!.getStatuses(collection, slug);
const deployStatus = getPreviewStatus(statuses, this.previewContext);
if (deployStatus) {
const { target_url: url, state } = deployStatus;
return { url, status: state };
} else {
return null;
}
} catch (e) {
return null;
}
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
// updateUnpublishedEntryStatus is a transactional operation
return runWithLock(
this.lock,
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
'Failed to acquire update entry status lock',
);
}
deleteUnpublishedEntry(collection: string, slug: string) {
// deleteUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.deleteUnpublishedEntry(collection, slug),
'Failed to acquire delete entry lock',
);
}
publishUnpublishedEntry(collection: string, slug: string) {
// publishUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.publishUnpublishedEntry(collection, slug),
'Failed to acquire publish entry lock',
);
}
}

View File

@ -0,0 +1,10 @@
import GitHubBackend from './implementation';
import API from './API';
import AuthenticationPage from './AuthenticationPage';
export const NetlifyCmsBackendGithub = {
GitHubBackend,
API,
AuthenticationPage,
};
export { GitHubBackend, API, AuthenticationPage };

View File

@ -0,0 +1,110 @@
import { gql } from 'graphql-tag';
import * as fragments from './fragments';
// updateRef only works for branches at the moment
export const updateBranch = gql`
mutation updateRef($input: UpdateRefInput!) {
updateRef(input: $input) {
branch: ref {
...BranchParts
}
}
}
${fragments.branch}
`;
// deleteRef only works for branches at the moment
const deleteRefMutationPart = `
deleteRef(input: $deleteRefInput) {
clientMutationId
}
`;
export const deleteBranch = gql`
mutation deleteRef($deleteRefInput: DeleteRefInput!) {
${deleteRefMutationPart}
}
`;
const closePullRequestMutationPart = `
closePullRequest(input: $closePullRequestInput) {
clientMutationId
pullRequest {
...PullRequestParts
}
}
`;
export const closePullRequest = gql`
mutation closePullRequestAndDeleteBranch($closePullRequestInput: ClosePullRequestInput!) {
${closePullRequestMutationPart}
}
${fragments.pullRequest}
`;
export const closePullRequestAndDeleteBranch = gql`
mutation closePullRequestAndDeleteBranch(
$closePullRequestInput: ClosePullRequestInput!
$deleteRefInput: DeleteRefInput!
) {
${closePullRequestMutationPart}
${deleteRefMutationPart}
}
${fragments.pullRequest}
`;
const createPullRequestMutationPart = `
createPullRequest(input: $createPullRequestInput) {
clientMutationId
pullRequest {
...PullRequestParts
}
}
`;
export const createPullRequest = gql`
mutation createPullRequest($createPullRequestInput: CreatePullRequestInput!) {
${createPullRequestMutationPart}
}
${fragments.pullRequest}
`;
export const createBranch = gql`
mutation createBranch($createRefInput: CreateRefInput!) {
createRef(input: $createRefInput) {
branch: ref {
...BranchParts
}
}
}
${fragments.branch}
`;
// createRef only works for branches at the moment
export const createBranchAndPullRequest = gql`
mutation createBranchAndPullRequest(
$createRefInput: CreateRefInput!
$createPullRequestInput: CreatePullRequestInput!
) {
createRef(input: $createRefInput) {
branch: ref {
...BranchParts
}
}
${createPullRequestMutationPart}
}
${fragments.branch}
${fragments.pullRequest}
`;
export const reopenPullRequest = gql`
mutation reopenPullRequest($reopenPullRequestInput: ReopenPullRequestInput!) {
reopenPullRequest(input: $reopenPullRequestInput) {
clientMutationId
pullRequest {
...PullRequestParts
}
}
}
${fragments.pullRequest}
`;

View File

@ -0,0 +1,213 @@
import { gql } from 'graphql-tag';
import { oneLine } from 'common-tags';
import * as fragments from './fragments';
export const repoPermission = gql`
query repoPermission($owner: String!, $name: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
viewerPermission
}
}
${fragments.repository}
`;
export const user = gql`
query {
viewer {
id
avatar_url: avatarUrl
name
login
}
}
`;
export const blob = gql`
query blob($owner: String!, $name: String!, $expression: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
object(expression: $expression) {
... on Blob {
...BlobWithTextParts
}
}
}
}
${fragments.repository}
${fragments.blobWithText}
`;
export const statues = gql`
query statues($owner: String!, $name: String!, $sha: GitObjectID!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
object(oid: $sha) {
...ObjectParts
... on Commit {
status {
id
contexts {
id
context
state
target_url: targetUrl
}
}
}
}
}
}
${fragments.repository}
${fragments.object}
`;
function buildFilesQuery(depth = 1) {
const PLACE_HOLDER = 'PLACE_HOLDER';
let query = oneLine`
...ObjectParts
... on Tree {
entries {
...FileEntryParts
${PLACE_HOLDER}
}
}
`;
for (let i = 0; i < depth - 1; i++) {
query = query.replace(
PLACE_HOLDER,
oneLine`
object {
... on Tree {
entries {
...FileEntryParts
${PLACE_HOLDER}
}
}
}
`,
);
}
query = query.replace(PLACE_HOLDER, '');
return query;
}
export function files(depth: number) {
return gql`
query files($owner: String!, $name: String!, $expression: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
object(expression: $expression) {
${buildFilesQuery(depth)}
}
}
}
${fragments.repository}
${fragments.object}
${fragments.fileEntry}
`;
}
const branchQueryPart = `
branch: ref(qualifiedName: $qualifiedName) {
...BranchParts
}
`;
export const branch = gql`
query branch($owner: String!, $name: String!, $qualifiedName: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
${branchQueryPart}
}
}
${fragments.repository}
${fragments.branch}
`;
export const openAuthoringBranches = gql`
query openAuthoringBranches($owner: String!, $name: String!, $refPrefix: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
refs(refPrefix: $refPrefix, last: 100) {
nodes {
...BranchParts
}
}
}
}
${fragments.repository}
${fragments.branch}
`;
export const repository = gql`
query repository($owner: String!, $name: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
}
}
${fragments.repository}
`;
const pullRequestQueryPart = `
pullRequest(number: $number) {
...PullRequestParts
}
`;
export const pullRequest = gql`
query pullRequest($owner: String!, $name: String!, $number: Int!) {
repository(owner: $owner, name: $name) {
id
${pullRequestQueryPart}
}
}
${fragments.pullRequest}
`;
export const pullRequests = gql`
query pullRequests($owner: String!, $name: String!, $head: String, $states: [PullRequestState!]) {
repository(owner: $owner, name: $name) {
id
pullRequests(last: 100, headRefName: $head, states: $states) {
nodes {
...PullRequestParts
}
}
}
}
${fragments.pullRequest}
`;
export const pullRequestAndBranch = gql`
query pullRequestAndBranch($owner: String!, $name: String!, $originRepoOwner: String!, $originRepoName: String!, $qualifiedName: String!, $number: Int!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
${branchQueryPart}
}
origin: repository(owner: $originRepoOwner, name: $originRepoName) {
...RepositoryParts
${pullRequestQueryPart}
}
}
${fragments.repository}
${fragments.branch}
${fragments.pullRequest}
`;
export const fileSha = gql`
query fileSha($owner: String!, $name: String!, $expression: String!) {
repository(owner: $owner, name: $name) {
...RepositoryParts
file: object(expression: $expression) {
...ObjectParts
}
}
}
${fragments.repository}
${fragments.object}
`;

View File

@ -0,0 +1,48 @@
const fetch = require('node-fetch');
const fs = require('fs');
const path = require('path');
const API_HOST = process.env.GITHUB_HOST || 'https://api.github.com';
const API_TOKEN = process.env.GITHUB_API_TOKEN;
if (!API_TOKEN) {
throw new Error('Missing environment variable GITHUB_API_TOKEN');
}
fetch(`${API_HOST}/graphql`, {
method: 'POST',
headers: { 'Content-Type': 'application/json', Authorization: `bearer ${API_TOKEN}` },
body: JSON.stringify({
variables: {},
query: `
{
__schema {
types {
kind
name
possibleTypes {
name
}
}
}
}
`,
}),
})
.then(result => result.json())
.then(result => {
// here we're filtering out any type information unrelated to unions or interfaces
const filteredData = result.data.__schema.types.filter(type => type.possibleTypes !== null);
result.data.__schema.types = filteredData;
fs.writeFile(
path.join(__dirname, '..', 'src', 'fragmentTypes.js'),
`module.exports = ${JSON.stringify(result.data)}`,
err => {
if (err) {
console.error('Error writing fragmentTypes file', err);
} else {
console.info('Fragment types successfully extracted!');
}
},
);
});

View File

@ -0,0 +1,5 @@
declare module 'semaphore' {
export type Semaphore = { take: (f: Function) => void; leave: () => void };
const semaphore: (count: number) => Semaphore;
export default semaphore;
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,104 @@
import React from 'react';
import PropTypes from 'prop-types';
import styled from '@emotion/styled';
import { AuthenticationPage, Icon } from '../../ui';
import {
NetlifyAuthenticator,
ImplicitAuthenticator,
PkceAuthenticator,
} from '../../lib/auth';
const LoginButtonIcon = styled(Icon)`
margin-right: 18px;
`;
const clientSideAuthenticators = {
pkce: ({ base_url, auth_endpoint, app_id, auth_token_endpoint }) =>
new PkceAuthenticator({ base_url, auth_endpoint, app_id, auth_token_endpoint }),
implicit: ({ base_url, auth_endpoint, app_id, clearHash }) =>
new ImplicitAuthenticator({ base_url, auth_endpoint, app_id, clearHash }),
};
export default class GitLabAuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
base_url: PropTypes.string,
siteId: PropTypes.string,
authEndpoint: PropTypes.string,
config: PropTypes.object.isRequired,
clearHash: PropTypes.func,
t: PropTypes.func.isRequired,
};
state = {};
componentDidMount() {
const {
auth_type: authType = '',
base_url = 'https://gitlab.com',
auth_endpoint = 'oauth/authorize',
app_id = '',
} = this.props.config.backend;
if (clientSideAuthenticators[authType]) {
this.auth = clientSideAuthenticators[authType]({
base_url,
auth_endpoint,
app_id,
auth_token_endpoint: 'oauth/token',
clearHash: this.props.clearHash,
});
// Complete implicit authentication if we were redirected back to from the provider.
this.auth.completeAuth((err, data) => {
if (err) {
this.setState({ loginError: err.toString() });
return;
}
this.props.onLogin(data);
});
} else {
this.auth = new NetlifyAuthenticator({
base_url: this.props.base_url,
site_id:
document.location.host.split(':')[0] === 'localhost'
? 'cms.netlify.com'
: this.props.siteId,
auth_endpoint: this.props.authEndpoint,
});
}
}
handleLogin = e => {
e.preventDefault();
this.auth.authenticate({ provider: 'gitlab', scope: 'api' }, (err, data) => {
if (err) {
this.setState({ loginError: err.toString() });
return;
}
this.props.onLogin(data);
});
};
render() {
const { inProgress, config, t } = this.props;
return (
<AuthenticationPage
onLogin={this.handleLogin}
loginDisabled={inProgress}
loginErrorMessage={this.state.loginError}
logoUrl={config.logo_url}
siteUrl={config.site_url}
renderButtonContent={() => (
<React.Fragment>
<LoginButtonIcon type="gitlab" />{' '}
{inProgress ? t('auth.loggingIn') : t('auth.loginWithGitLab')}
</React.Fragment>
)}
t={t}
/>
);
}
}

View File

@ -0,0 +1,456 @@
import trimStart from 'lodash/trimStart';
import semaphore from 'semaphore';
import { trim } from 'lodash';
import { stripIndent } from 'common-tags';
import {
CURSOR_COMPATIBILITY_SYMBOL,
basename,
entriesByFolder,
entriesByFiles,
getMediaDisplayURL,
getMediaAsBlob,
unpublishedEntries,
getPreviewStatus,
asyncLock,
runWithLock,
getBlobSHA,
blobToFileObj,
contentKeyFromBranch,
generateContentKey,
localForage,
allEntriesByFolder,
filterByExtension,
branchFromContentKey,
} from '../../lib/util';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
import type {
Entry,
AssetProxy,
PersistOptions,
Cursor,
Implementation,
DisplayURL,
User,
Credentials,
Config,
ImplementationFile,
UnpublishedEntryMediaFile,
AsyncLock,
} from '../../lib/util';
import type { Semaphore } from 'semaphore';
const MAX_CONCURRENT_DOWNLOADS = 10;
export default class GitLab implements Implementation {
lock: AsyncLock;
api: API | null;
options: {
proxied: boolean;
API: API | null;
initialWorkflowStatus: string;
};
repo: string;
branch: string;
apiRoot: string;
token: string | null;
squashMerges: boolean;
cmsLabelPrefix: string;
mediaFolder: string;
previewContext: string;
useGraphQL: boolean;
graphQLAPIRoot: string;
_mediaDisplayURLSem?: Semaphore;
constructor(config: Config, options = {}) {
this.options = {
proxied: false,
API: null,
initialWorkflowStatus: '',
...options,
};
if (
!this.options.proxied &&
(config.backend.repo === null || config.backend.repo === undefined)
) {
throw new Error('The GitLab backend needs a "repo" in the backend configuration.');
}
this.api = this.options.API || null;
this.repo = config.backend.repo || '';
this.branch = config.backend.branch || 'master';
this.apiRoot = config.backend.api_root || 'https://gitlab.com/api/v4';
this.token = '';
this.squashMerges = config.backend.squash_merges || false;
this.cmsLabelPrefix = config.backend.cms_label_prefix || '';
this.mediaFolder = config.media_folder;
this.previewContext = config.backend.preview_context || '';
this.useGraphQL = config.backend.use_graphql || false;
this.graphQLAPIRoot = config.backend.graphql_api_root || 'https://gitlab.com/api/graphql';
this.lock = asyncLock();
}
isGitBackend() {
return true;
}
async status() {
const auth =
(await this.api
?.user()
.then(user => !!user)
.catch(e => {
console.warn('Failed getting GitLab user', e);
return false;
})) || false;
return { auth: { status: auth }, api: { status: true, statusPage: '' } };
}
authComponent() {
return AuthenticationPage;
}
restoreUser(user: User) {
return this.authenticate(user);
}
async authenticate(state: Credentials) {
this.token = state.token as string;
this.api = new API({
token: this.token,
branch: this.branch,
repo: this.repo,
apiRoot: this.apiRoot,
squashMerges: this.squashMerges,
cmsLabelPrefix: this.cmsLabelPrefix,
initialWorkflowStatus: this.options.initialWorkflowStatus,
useGraphQL: this.useGraphQL,
graphQLAPIRoot: this.graphQLAPIRoot,
});
const user = await this.api.user();
const isCollab = await this.api.hasWriteAccess().catch((error: Error) => {
error.message = stripIndent`
Repo "${this.repo}" not found.
Please ensure the repo information is spelled correctly.
If the repo is private, make sure you're logged into a GitLab account with access.
`;
throw error;
});
// Unauthorized user
if (!isCollab) {
throw new Error('Your GitLab user account does not have access to this repo.');
}
// Authorized user
return { ...user, login: user.username, token: state.token as string };
}
async logout() {
this.token = null;
return;
}
getToken() {
return Promise.resolve(this.token);
}
filterFile(
folder: string,
file: { path: string; name: string },
extension: string,
depth: number,
) {
// gitlab paths include the root folder
const fileFolder = trim(file.path.split(folder)[1] || '/', '/');
return filterByExtension(file, extension) && fileFolder.split('/').length <= depth;
}
async entriesByFolder(folder: string, extension: string, depth: number) {
let cursor: Cursor;
const listFiles = () =>
this.api!.listFiles(folder, depth > 1).then(({ files, cursor: c }) => {
cursor = c.mergeMeta({ folder, extension, depth });
return files.filter(file => this.filterFile(folder, file, extension, depth));
});
const files = await entriesByFolder(
listFiles,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return files;
}
async listAllFiles(folder: string, extension: string, depth: number) {
const files = await this.api!.listAllFiles(folder, depth > 1);
const filtered = files.filter(file => this.filterFile(folder, file, extension, depth));
return filtered;
}
async allEntriesByFolder(folder: string, extension: string, depth: number) {
const files = await allEntriesByFolder({
listAllFiles: () => this.listAllFiles(folder, extension, depth),
readFile: this.api!.readFile.bind(this.api!),
readFileMetadata: this.api!.readFileMetadata.bind(this.api),
apiName: API_NAME,
branch: this.branch,
localForage,
folder,
extension,
depth,
getDefaultBranch: () =>
this.api!.getDefaultBranch().then(b => ({ name: b.name, sha: b.commit.id })),
isShaExistsInBranch: this.api!.isShaExistsInBranch.bind(this.api!),
getDifferences: (to, from) => this.api!.getDifferences(to, from),
getFileId: path => this.api!.getFileId(path, this.branch),
filterFile: file => this.filterFile(folder, file, extension, depth),
customFetch: this.useGraphQL ? files => this.api!.readFilesGraphQL(files) : undefined,
});
return files;
}
entriesByFiles(files: ImplementationFile[]) {
return entriesByFiles(
files,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api),
API_NAME,
);
}
// Fetches a single entry.
getEntry(path: string) {
return this.api!.readFile(path).then(data => ({
file: { path, id: null },
data: data as string,
}));
}
getMedia(mediaFolder = this.mediaFolder) {
return this.api!.listAllFiles(mediaFolder).then(files =>
files.map(({ id, name, path }) => {
return { id, name, path, displayURL: { id, name, path } };
}),
);
}
getMediaDisplayURL(displayURL: DisplayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
return getMediaDisplayURL(
displayURL,
this.api!.readFile.bind(this.api!),
this._mediaDisplayURLSem,
);
}
async getMediaFile(path: string) {
const name = basename(path);
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
const fileObj = blobToFileObj(name, blob);
const url = URL.createObjectURL(fileObj);
const id = await getBlobSHA(blob);
return {
id,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
async persistEntry(entry: Entry, options: PersistOptions) {
// persistEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.persistFiles(entry.dataFiles, entry.assets, options),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
const fileObj = mediaFile.fileObj as File;
const [id] = await Promise.all([
getBlobSHA(fileObj),
this.api!.persistFiles([], [mediaFile], options),
]);
const { path } = mediaFile;
const url = URL.createObjectURL(fileObj);
return {
displayURL: url,
path: trimStart(path, '/'),
name: fileObj!.name,
size: fileObj!.size,
file: fileObj,
url,
id,
};
}
deleteFiles(paths: string[], commitMessage: string) {
return this.api!.deleteFiles(paths, commitMessage);
}
traverseCursor(cursor: Cursor, action: string) {
return this.api!.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => {
const [folder, depth, extension] = [
cursor.meta?.get('folder') as string,
cursor.meta?.get('depth') as number,
cursor.meta?.get('extension') as string,
];
if (folder && depth && extension) {
entries = entries.filter(f => this.filterFile(folder, f, extension, depth));
newCursor = newCursor.mergeMeta({ folder, extension, depth });
}
const entriesWithData = await entriesByFiles(
entries,
this.api!.readFile.bind(this.api!),
this.api!.readFileMetadata.bind(this.api)!,
API_NAME,
);
return {
entries: entriesWithData,
cursor: newCursor,
};
});
}
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
const readFile = (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
return getMediaAsBlob(file.path, null, readFile).then(blob => {
const name = basename(file.path);
const fileObj = blobToFileObj(name, blob);
return {
id: file.path,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
});
}
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
return mediaFiles;
}
async unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(branch => contentKeyFromBranch(branch)),
);
const ids = await unpublishedEntries(listEntriesKeys);
return ids;
}
async unpublishedEntry({
id,
collection,
slug,
}: {
id?: string;
collection?: string;
slug?: string;
}) {
if (id) {
const data = await this.api!.retrieveUnpublishedEntryData(id);
return data;
} else if (collection && slug) {
const entryId = generateContentKey(collection, slug);
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
return data;
} else {
throw new Error('Missing unpublished entry id or collection and slug');
}
}
getBranch(collection: string, slug: string) {
const contentKey = generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);
return branch;
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const data = (await this.api!.readFile(path, id, { branch })) as string;
return data;
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const mediaFile = await this.loadMediaFile(branch, { path, id });
return mediaFile;
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
// updateUnpublishedEntryStatus is a transactional operation
return runWithLock(
this.lock,
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
'Failed to acquire update entry status lock',
);
}
async deleteUnpublishedEntry(collection: string, slug: string) {
// deleteUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.deleteUnpublishedEntry(collection, slug),
'Failed to acquire delete entry lock',
);
}
async publishUnpublishedEntry(collection: string, slug: string) {
// publishUnpublishedEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.publishUnpublishedEntry(collection, slug),
'Failed to acquire publish entry lock',
);
}
async getDeployPreview(collection: string, slug: string) {
try {
const statuses = await this.api!.getStatuses(collection, slug);
const deployStatus = getPreviewStatus(statuses, this.previewContext);
if (deployStatus) {
const { target_url: url, state } = deployStatus;
return { url, status: state };
} else {
return null;
}
} catch (e) {
return null;
}
}
}

View File

@ -0,0 +1,10 @@
import GitLabBackend from './implementation';
import API from './API';
import AuthenticationPage from './AuthenticationPage';
export const NetlifyCmsBackendGitlab = {
GitLabBackend,
API,
AuthenticationPage,
};
export { GitLabBackend, API, AuthenticationPage };

View File

@ -0,0 +1,73 @@
import { gql } from 'graphql-tag';
import { oneLine } from 'common-tags';
export const files = gql`
query files($repo: ID!, $branch: String!, $path: String!, $recursive: Boolean!, $cursor: String) {
project(fullPath: $repo) {
repository {
tree(ref: $branch, path: $path, recursive: $recursive) {
blobs(after: $cursor) {
nodes {
type
id: sha
path
name
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
}
}
`;
export const blobs = gql`
query blobs($repo: ID!, $branch: String!, $paths: [String!]!) {
project(fullPath: $repo) {
repository {
blobs(ref: $branch, paths: $paths) {
nodes {
id
data: rawBlob
}
}
}
}
}
`;
export function lastCommits(paths: string[]) {
const tree = paths
.map(
(path, index) => oneLine`
tree${index}: tree(ref: $branch, path: "${path}") {
lastCommit {
authorName
authoredDate
author {
id
username
name
publicEmail
}
}
}
`,
)
.join('\n');
const query = gql`
query lastCommits($repo: ID!, $branch: String!) {
project(fullPath: $repo) {
repository {
${tree}
}
}
}
`;
return query;
}

View File

@ -0,0 +1,17 @@
import { AzureBackend } from './azure';
import { BitbucketBackend } from './bitbucket';
import { GitGatewayBackend } from './git-gateway';
import { GitHubBackend } from './github';
import { GitLabBackend } from './gitlab';
import { ProxyBackend } from './proxy';
import { TestBackend } from './test';
export {
AzureBackend,
BitbucketBackend,
GitGatewayBackend,
GitHubBackend,
GitLabBackend,
ProxyBackend,
TestBackend,
};

View File

@ -0,0 +1,63 @@
import React from 'react';
import PropTypes from 'prop-types';
import styled from '@emotion/styled';
import { Icon, buttons, shadows, GoBackButton } from '../../ui';
const StyledAuthenticationPage = styled.section`
display: flex;
flex-flow: column nowrap;
align-items: center;
justify-content: center;
height: 100vh;
`;
const PageLogoIcon = styled(Icon)`
color: #c4c6d2;
margin-top: -300px;
`;
const LoginButton = styled.button`
${buttons.button};
${shadows.dropDeep};
${buttons.default};
${buttons.gray};
padding: 0 30px;
margin-top: -40px;
display: flex;
align-items: center;
position: relative;
${Icon} {
margin-right: 18px;
}
`;
export default class AuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
config: PropTypes.object.isRequired,
t: PropTypes.func.isRequired,
};
handleLogin = e => {
e.preventDefault();
this.props.onLogin(this.state);
};
render() {
const { config, inProgress, t } = this.props;
return (
<StyledAuthenticationPage>
<PageLogoIcon size="300px" type="netlify-cms" />
<LoginButton disabled={inProgress} onClick={this.handleLogin}>
{inProgress ? t('auth.loggingIn') : t('auth.login')}
</LoginButton>
{config.site_url && <GoBackButton href={config.site_url} t={t}></GoBackButton>}
</StyledAuthenticationPage>
);
}
}

View File

@ -0,0 +1,262 @@
import {
EditorialWorkflowError,
APIError,
unsentRequest,
blobToFileObj,
} from '../../lib/util';
import AuthenticationPage from './AuthenticationPage';
import type {
Entry,
AssetProxy,
PersistOptions,
User,
Config,
Implementation,
ImplementationFile,
UnpublishedEntry,
} from '../../lib/util';
async function serializeAsset(assetProxy: AssetProxy) {
const base64content = await assetProxy.toBase64!();
return { path: assetProxy.path, content: base64content, encoding: 'base64' };
}
type MediaFile = {
id: string;
content: string;
encoding: string;
name: string;
path: string;
};
function deserializeMediaFile({ id, content, encoding, path, name }: MediaFile) {
let byteArray = new Uint8Array(0);
if (encoding !== 'base64') {
console.error(`Unsupported encoding '${encoding}' for file '${path}'`);
} else {
const decodedContent = atob(content);
byteArray = new Uint8Array(decodedContent.length);
for (let i = 0; i < decodedContent.length; i++) {
byteArray[i] = decodedContent.charCodeAt(i);
}
}
const blob = new Blob([byteArray]);
const file = blobToFileObj(name, blob);
const url = URL.createObjectURL(file);
return { id, name, path, file, size: file.size, url, displayURL: url };
}
export default class ProxyBackend implements Implementation {
proxyUrl: string;
mediaFolder: string;
options: { initialWorkflowStatus?: string };
branch: string;
cmsLabelPrefix?: string;
constructor(config: Config, options = {}) {
if (!config.backend.proxy_url) {
throw new Error('The Proxy backend needs a "proxy_url" in the backend configuration.');
}
this.branch = config.backend.branch || 'master';
this.proxyUrl = config.backend.proxy_url;
this.mediaFolder = config.media_folder;
this.options = options;
this.cmsLabelPrefix = config.backend.cms_label_prefix;
}
isGitBackend() {
return false;
}
status() {
return Promise.resolve({ auth: { status: true }, api: { status: true, statusPage: '' } });
}
authComponent() {
return AuthenticationPage;
}
restoreUser() {
return this.authenticate();
}
authenticate() {
return Promise.resolve() as unknown as Promise<User>;
}
logout() {
return null;
}
getToken() {
return Promise.resolve('');
}
async request(payload: { action: string; params: Record<string, unknown> }) {
const response = await unsentRequest.fetchWithTimeout(this.proxyUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json; charset=utf-8' },
body: JSON.stringify({ branch: this.branch, ...payload }),
});
const json = await response.json();
if (response.ok) {
return json;
} else {
throw new APIError(json.error, response.status, 'Proxy');
}
}
entriesByFolder(folder: string, extension: string, depth: number) {
return this.request({
action: 'entriesByFolder',
params: { branch: this.branch, folder, extension, depth },
});
}
entriesByFiles(files: ImplementationFile[]) {
return this.request({
action: 'entriesByFiles',
params: { branch: this.branch, files },
});
}
getEntry(path: string) {
return this.request({
action: 'getEntry',
params: { branch: this.branch, path },
});
}
unpublishedEntries() {
return this.request({
action: 'unpublishedEntries',
params: { branch: this.branch },
});
}
async unpublishedEntry({
id,
collection,
slug,
}: {
id?: string;
collection?: string;
slug?: string;
}) {
try {
const entry: UnpublishedEntry = await this.request({
action: 'unpublishedEntry',
params: { branch: this.branch, id, collection, slug, cmsLabelPrefix: this.cmsLabelPrefix },
});
return entry;
} catch (e: any) {
if (e.status === 404) {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
}
throw e;
}
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
const { data } = await this.request({
action: 'unpublishedEntryDataFile',
params: { branch: this.branch, collection, slug, path, id },
});
return data;
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
const file = await this.request({
action: 'unpublishedEntryMediaFile',
params: { branch: this.branch, collection, slug, path, id },
});
return deserializeMediaFile(file);
}
deleteUnpublishedEntry(collection: string, slug: string) {
return this.request({
action: 'deleteUnpublishedEntry',
params: { branch: this.branch, collection, slug },
});
}
async persistEntry(entry: Entry, options: PersistOptions) {
const assets = await Promise.all(entry.assets.map(serializeAsset));
return this.request({
action: 'persistEntry',
params: {
branch: this.branch,
dataFiles: entry.dataFiles,
assets,
options: { ...options, status: options.status || this.options.initialWorkflowStatus },
cmsLabelPrefix: this.cmsLabelPrefix,
},
});
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
return this.request({
action: 'updateUnpublishedEntryStatus',
params: {
branch: this.branch,
collection,
slug,
newStatus,
cmsLabelPrefix: this.cmsLabelPrefix,
},
});
}
publishUnpublishedEntry(collection: string, slug: string) {
return this.request({
action: 'publishUnpublishedEntry',
params: { branch: this.branch, collection, slug },
});
}
async getMedia(mediaFolder = this.mediaFolder) {
const files: MediaFile[] = await this.request({
action: 'getMedia',
params: { branch: this.branch, mediaFolder },
});
return files.map(deserializeMediaFile);
}
async getMediaFile(path: string) {
const file = await this.request({
action: 'getMediaFile',
params: { branch: this.branch, path },
});
return deserializeMediaFile(file);
}
async persistMedia(assetProxy: AssetProxy, options: PersistOptions) {
const asset = await serializeAsset(assetProxy);
const file: MediaFile = await this.request({
action: 'persistMedia',
params: { branch: this.branch, asset, options: { commitMessage: options.commitMessage } },
});
return deserializeMediaFile(file);
}
deleteFiles(paths: string[], commitMessage: string) {
return this.request({
action: 'deleteFiles',
params: { branch: this.branch, paths, options: { commitMessage } },
});
}
getDeployPreview(collection: string, slug: string) {
return this.request({
action: 'getDeployPreview',
params: { branch: this.branch, collection, slug },
});
}
}

View File

@ -0,0 +1,8 @@
import ProxyBackend from './implementation';
import AuthenticationPage from './AuthenticationPage';
export const NetlifyCmsBackendProxy = {
ProxyBackend,
AuthenticationPage,
};
export { ProxyBackend, AuthenticationPage };

View File

@ -0,0 +1,73 @@
import React from 'react';
import PropTypes from 'prop-types';
import styled from '@emotion/styled';
import { Icon, buttons, shadows, GoBackButton } from '../../ui';
const StyledAuthenticationPage = styled.section`
display: flex;
flex-flow: column nowrap;
align-items: center;
justify-content: center;
height: 100vh;
`;
const PageLogoIcon = styled(Icon)`
color: #c4c6d2;
margin-top: -300px;
`;
const LoginButton = styled.button`
${buttons.button};
${shadows.dropDeep};
${buttons.default};
${buttons.gray};
padding: 0 30px;
margin-top: -40px;
display: flex;
align-items: center;
position: relative;
${Icon} {
margin-right: 18px;
}
`;
export default class AuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
config: PropTypes.object.isRequired,
t: PropTypes.func.isRequired,
};
componentDidMount() {
/**
* Allow login screen to be skipped for demo purposes.
*/
const skipLogin = this.props.config.backend.login === false;
if (skipLogin) {
this.props.onLogin(this.state);
}
}
handleLogin = e => {
e.preventDefault();
this.props.onLogin(this.state);
};
render() {
const { config, inProgress, t } = this.props;
return (
<StyledAuthenticationPage>
<PageLogoIcon size="300px" type="netlify-cms" />
<LoginButton disabled={inProgress} onClick={this.handleLogin}>
{inProgress ? t('auth.loggingIn') : t('auth.login')}
</LoginButton>
{config.site_url && <GoBackButton href={config.site_url} t={t}></GoBackButton>}
</StyledAuthenticationPage>
);
}
}

View File

@ -0,0 +1,430 @@
import { attempt, isError, take, unset, isEmpty } from 'lodash';
import uuid from 'uuid/v4';
import { extname, dirname } from 'path';
import {
EditorialWorkflowError,
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
basename,
} from '../../lib/util';
import AuthenticationPage from './AuthenticationPage';
import type {
Implementation,
Entry,
ImplementationEntry,
AssetProxy,
PersistOptions,
User,
Config,
ImplementationFile,
DataFile,
} from '../../lib/util';
type RepoFile = { path: string; content: string | AssetProxy };
type RepoTree = { [key: string]: RepoFile | RepoTree };
type Diff = {
id: string;
originalPath?: string;
path: string;
newFile: boolean;
status: string;
content: string | AssetProxy;
};
type UnpublishedRepoEntry = {
slug: string;
collection: string;
status: string;
diffs: Diff[];
updatedAt: string;
};
declare global {
interface Window {
repoFiles: RepoTree;
repoFilesUnpublished: { [key: string]: UnpublishedRepoEntry };
}
}
window.repoFiles = window.repoFiles || {};
window.repoFilesUnpublished = window.repoFilesUnpublished || [];
function getFile(path: string, tree: RepoTree) {
const segments = path.split('/');
let obj: RepoTree = tree;
while (obj && segments.length) {
obj = obj[segments.shift() as string] as RepoTree;
}
return (obj as unknown as RepoFile) || {};
}
function writeFile(path: string, content: string | AssetProxy, tree: RepoTree) {
const segments = path.split('/');
let obj = tree;
while (segments.length > 1) {
const segment = segments.shift() as string;
obj[segment] = obj[segment] || {};
obj = obj[segment] as RepoTree;
}
(obj[segments.shift() as string] as RepoFile) = { content, path };
}
function deleteFile(path: string, tree: RepoTree) {
unset(tree, path.split('/'));
}
const pageSize = 10;
function getCursor(
folder: string,
extension: string,
entries: ImplementationEntry[],
index: number,
depth: number,
) {
const count = entries.length;
const pageCount = Math.floor(count / pageSize);
return Cursor.create({
actions: [
...(index < pageCount ? ['next', 'last'] : []),
...(index > 0 ? ['prev', 'first'] : []),
],
meta: { index, count, pageSize, pageCount },
data: { folder, extension, index, pageCount, depth },
});
}
export function getFolderFiles(
tree: RepoTree,
folder: string,
extension: string,
depth: number,
files = [] as RepoFile[],
path = folder,
) {
if (depth <= 0) {
return files;
}
Object.keys(tree[folder] || {}).forEach(key => {
if (extname(key)) {
const file = (tree[folder] as RepoTree)[key] as RepoFile;
if (!extension || key.endsWith(`.${extension}`)) {
files.unshift({ content: file.content, path: `${path}/${key}` });
}
} else {
const subTree = tree[folder] as RepoTree;
return getFolderFiles(subTree, key, extension, depth - 1, files, `${path}/${key}`);
}
});
return files;
}
export default class TestBackend implements Implementation {
mediaFolder: string;
options: { initialWorkflowStatus?: string };
constructor(config: Config, options = {}) {
this.options = options;
this.mediaFolder = config.media_folder;
}
isGitBackend() {
return false;
}
status() {
return Promise.resolve({ auth: { status: true }, api: { status: true, statusPage: '' } });
}
authComponent() {
return AuthenticationPage;
}
restoreUser() {
return this.authenticate();
}
authenticate() {
return Promise.resolve() as unknown as Promise<User>;
}
logout() {
return null;
}
getToken() {
return Promise.resolve('');
}
traverseCursor(cursor: Cursor, action: string) {
const { folder, extension, index, pageCount, depth } = cursor.data!.toObject() as {
folder: string;
extension: string;
index: number;
pageCount: number;
depth: number;
};
const newIndex = (() => {
if (action === 'next') {
return (index as number) + 1;
}
if (action === 'prev') {
return (index as number) - 1;
}
if (action === 'first') {
return 0;
}
if (action === 'last') {
return pageCount;
}
return 0;
})();
// TODO: stop assuming cursors are for collections
const allFiles = getFolderFiles(window.repoFiles, folder, extension, depth);
const allEntries = allFiles.map(f => ({
data: f.content as string,
file: { path: f.path, id: f.path },
}));
const entries = allEntries.slice(newIndex * pageSize, newIndex * pageSize + pageSize);
const newCursor = getCursor(folder, extension, allEntries, newIndex, depth);
return Promise.resolve({ entries, cursor: newCursor });
}
entriesByFolder(folder: string, extension: string, depth: number) {
const files = folder ? getFolderFiles(window.repoFiles, folder, extension, depth) : [];
const entries = files.map(f => ({
data: f.content as string,
file: { path: f.path, id: f.path },
}));
const cursor = getCursor(folder, extension, entries, 0, depth);
const ret = take(entries, pageSize);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
ret[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return Promise.resolve(ret);
}
entriesByFiles(files: ImplementationFile[]) {
return Promise.all(
files.map(file => ({
file,
data: getFile(file.path, window.repoFiles).content as string,
})),
);
}
getEntry(path: string) {
return Promise.resolve({
file: { path, id: null },
data: getFile(path, window.repoFiles).content as string,
});
}
unpublishedEntries() {
return Promise.resolve(Object.keys(window.repoFilesUnpublished));
}
unpublishedEntry({ id, collection, slug }: { id?: string; collection?: string; slug?: string }) {
if (id) {
const parts = id.split('/');
collection = parts[0];
slug = parts[1];
}
const entry = window.repoFilesUnpublished[`${collection}/${slug}`];
if (!entry) {
return Promise.reject(
new EditorialWorkflowError('content is not under editorial workflow', true),
);
}
return Promise.resolve(entry);
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string) {
const entry = window.repoFilesUnpublished[`${collection}/${slug}`];
const file = entry.diffs.find(d => d.path === path);
return file?.content as string;
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string) {
const entry = window.repoFilesUnpublished[`${collection}/${slug}`];
const file = entry.diffs.find(d => d.path === path);
return this.normalizeAsset(file?.content as AssetProxy);
}
deleteUnpublishedEntry(collection: string, slug: string) {
delete window.repoFilesUnpublished[`${collection}/${slug}`];
return Promise.resolve();
}
async addOrUpdateUnpublishedEntry(
key: string,
dataFiles: DataFile[],
assetProxies: AssetProxy[],
slug: string,
collection: string,
status: string,
) {
const diffs: Diff[] = [];
dataFiles.forEach(dataFile => {
const { path, newPath, raw } = dataFile;
const currentDataFile = window.repoFilesUnpublished[key]?.diffs.find(d => d.path === path);
const originalPath = currentDataFile ? currentDataFile.originalPath : path;
diffs.push({
originalPath,
id: newPath || path,
path: newPath || path,
newFile: isEmpty(getFile(originalPath as string, window.repoFiles)),
status: 'added',
content: raw,
});
});
assetProxies.forEach(a => {
const asset = this.normalizeAsset(a);
diffs.push({
id: asset.id,
path: asset.path,
newFile: true,
status: 'added',
content: asset,
});
});
window.repoFilesUnpublished[key] = {
slug,
collection,
status,
diffs,
updatedAt: new Date().toISOString(),
};
}
async persistEntry(entry: Entry, options: PersistOptions) {
if (options.useWorkflow) {
const slug = entry.dataFiles[0].slug;
const key = `${options.collectionName}/${slug}`;
const currentEntry = window.repoFilesUnpublished[key];
const status =
currentEntry?.status || options.status || (this.options.initialWorkflowStatus as string);
this.addOrUpdateUnpublishedEntry(
key,
entry.dataFiles,
entry.assets,
slug,
options.collectionName as string,
status,
);
return Promise.resolve();
}
entry.dataFiles.forEach(dataFile => {
const { path, raw } = dataFile;
writeFile(path, raw, window.repoFiles);
});
entry.assets.forEach(a => {
writeFile(a.path, a, window.repoFiles);
});
return Promise.resolve();
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
window.repoFilesUnpublished[`${collection}/${slug}`].status = newStatus;
return Promise.resolve();
}
publishUnpublishedEntry(collection: string, slug: string) {
const key = `${collection}/${slug}`;
const unpubEntry = window.repoFilesUnpublished[key];
delete window.repoFilesUnpublished[key];
const tree = window.repoFiles;
unpubEntry.diffs.forEach(d => {
if (d.originalPath && !d.newFile) {
const originalPath = d.originalPath;
const sourceDir = dirname(originalPath);
const destDir = dirname(d.path);
const toMove = getFolderFiles(tree, originalPath.split('/')[0], '', 100).filter(f =>
f.path.startsWith(sourceDir),
);
toMove.forEach(f => {
deleteFile(f.path, tree);
writeFile(f.path.replace(sourceDir, destDir), f.content, tree);
});
}
writeFile(d.path, d.content, tree);
});
return Promise.resolve();
}
getMedia(mediaFolder = this.mediaFolder) {
const files = getFolderFiles(window.repoFiles, mediaFolder.split('/')[0], '', 100).filter(f =>
f.path.startsWith(mediaFolder),
);
const assets = files.map(f => this.normalizeAsset(f.content as AssetProxy));
return Promise.resolve(assets);
}
async getMediaFile(path: string) {
const asset = getFile(path, window.repoFiles).content as AssetProxy;
const url = asset.toString();
const name = basename(path);
const blob = await fetch(url).then(res => res.blob());
const fileObj = new File([blob], name);
return {
id: url,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url,
};
}
normalizeAsset(assetProxy: AssetProxy) {
const fileObj = assetProxy.fileObj as File;
const { name, size } = fileObj;
const objectUrl = attempt(window.URL.createObjectURL, fileObj);
const url = isError(objectUrl) ? '' : objectUrl;
const normalizedAsset = {
id: uuid(),
name,
size,
path: assetProxy.path,
url,
displayURL: url,
fileObj,
};
return normalizedAsset;
}
persistMedia(assetProxy: AssetProxy) {
const normalizedAsset = this.normalizeAsset(assetProxy);
writeFile(assetProxy.path, assetProxy, window.repoFiles);
return Promise.resolve(normalizedAsset);
}
deleteFiles(paths: string[]) {
paths.forEach(path => {
deleteFile(path, window.repoFiles);
});
return Promise.resolve();
}
async getDeployPreview() {
return null;
}
}

View File

@ -0,0 +1,8 @@
import TestBackend from './implementation';
import AuthenticationPage from './AuthenticationPage';
export const NetlifyCmsBackendTest = {
TestBackend,
AuthenticationPage,
};
export { TestBackend, AuthenticationPage };

View File

@ -1,11 +1,11 @@
import React from 'react';
import { render } from 'react-dom';
import { Provider, connect } from 'react-redux';
import { Route, Router } from 'react-router-dom';
import { Router } from 'react-router-dom';
import { I18n } from 'react-polyglot';
import { GlobalStyles } from 'netlify-cms-ui-default';
import { store } from './redux';
import { GlobalStyles } from './ui';
import { store } from './store';
import { history } from './routing/history';
import { loadConfig } from './actions/config';
import { authenticateUser } from './actions/auth';
@ -24,7 +24,7 @@ function TranslatedApp({ locale, config }) {
<I18n locale={locale} messages={getPhrases(locale)}>
<ErrorBoundary showBackup config={config}>
<Router history={history}>
<Route component={App} />
<App />
</Router>
</ErrorBoundary>
</I18n>
@ -44,7 +44,7 @@ function bootstrap(opts = {}) {
* Log the version number.
*/
if (typeof NETLIFY_CMS_CORE_VERSION === 'string') {
console.log(`netlify-cms-core ${NETLIFY_CMS_CORE_VERSION}`);
console.info(`netlify-cms-core ${NETLIFY_CMS_CORE_VERSION}`);
}
/**

View File

@ -1,29 +1,31 @@
import styled from '@emotion/styled';
import PropTypes from 'prop-types';
import React from 'react';
import { hot } from 'react-hot-loader';
import { translate } from 'react-polyglot';
import ImmutablePropTypes from 'react-immutable-proptypes';
import styled from '@emotion/styled';
import { translate } from 'react-polyglot';
import { connect } from 'react-redux';
import { Route, Switch, Redirect } from 'react-router-dom';
import { Notifs } from 'redux-notifications';
import TopBarProgress from 'react-topbar-progress-indicator';
import { Loader, colors } from 'netlify-cms-ui-default';
import { Redirect, Route, Switch } from 'react-router-dom';
import { ScrollSync } from 'react-scroll-sync';
import TopBarProgress from 'react-topbar-progress-indicator';
import { loginUser, logoutUser } from '../../actions/auth';
import { currentBackend } from '../../backend';
import { createNewEntry } from '../../actions/collections';
import { openMediaLibrary } from '../../actions/mediaLibrary';
import MediaLibrary from '../MediaLibrary/MediaLibrary';
import { Toast } from '../UI';
import { currentBackend } from '../../backend';
import { EDITORIAL_WORKFLOW, SIMPLE } from '../../constants/publishModes';
import { history } from '../../routing/history';
import { SIMPLE, EDITORIAL_WORKFLOW } from '../../constants/publishModes';
import { colors, Loader } from '../../ui';
import Collection from '../Collection/Collection';
import Workflow from '../Workflow/Workflow';
import Editor from '../Editor/Editor';
import NotFoundPage from './NotFoundPage';
import MediaLibrary from '../MediaLibrary/MediaLibrary';
import Page from '../page/Page';
import Snackbars from '../snackbar/Snackbars';
import { Alert } from '../UI/Alert';
import { Confirm } from '../UI/Confirm';
import Workflow from '../Workflow/Workflow';
import Header from './Header';
import NotFoundPage from './NotFoundPage';
TopBarProgress.config({
barColors: {
@ -39,7 +41,7 @@ const AppRoot = styled.div`
min-width: 1200px;
height: 100vh;
position: relative;
overflow: hidden;
overflow-y: auto;
`;
const AppWrapper = styled.div`
@ -65,7 +67,13 @@ const ErrorCodeBlock = styled.pre`
`;
function getDefaultPath(collections) {
const first = collections.filter(collection => collection.get('hide') !== true && (!collection.has('files') || collection.get('files').size > 1)).first();
const first = collections
.filter(
collection =>
collection.get('hide') !== true &&
(!collection.has('files') || collection.get('files').size > 1),
)
.first();
if (first) {
return `/collections/${first.get('name')}`;
} else {
@ -175,7 +183,6 @@ class App extends React.Component {
return (
<div>
<Notifs CustomComponent={Toast} />
{React.createElement(backend.authComponent(), {
onLogin: this.handleLogin.bind(this),
error: auth.error,
@ -234,7 +241,7 @@ class App extends React.Component {
<ScrollSync enabled={scrollSyncEnabled}>
<AppRoot id="cms-root">
<AppWrapper className="cms-wrapper">
<Notifs CustomComponent={Toast} />
<Snackbars />
<Header
user={user}
collections={collections}
@ -302,9 +309,12 @@ class App extends React.Component {
return <Redirect to={`/collections/${name}/entries/${entryName}`} />;
}}
/>
<Route path="/page/:id" render={props => <Page {...props} />} />
<Route component={NotFoundPage} />
</Switch>
{useMediaLibrary ? <MediaLibrary /> : null}
<Alert />
<Confirm />
</AppMainContainer>
</AppWrapper>
</AppRoot>

View File

@ -5,6 +5,8 @@ import styled from '@emotion/styled';
import { css } from '@emotion/react';
import { translate } from 'react-polyglot';
import { NavLink } from 'react-router-dom';
import { connect } from 'react-redux';
import {
Icon,
Dropdown,
@ -15,9 +17,7 @@ import {
shadows,
buttons,
zIndex,
} from 'netlify-cms-ui-default';
import { connect } from 'react-redux';
} from '../../ui';
import { SettingsDropdown } from '../UI';
import { checkBackendStatus } from '../../actions/status';

View File

@ -1,9 +1,10 @@
import React from 'react';
import styled from '@emotion/styled';
import { translate } from 'react-polyglot';
import { lengths } from 'netlify-cms-ui-default';
import PropTypes from 'prop-types';
import { lengths } from '../../ui';
const NotFoundContainer = styled.div`
margin: ${lengths.pageMargin};
`;

View File

@ -1,205 +0,0 @@
import React from 'react';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import styled from '@emotion/styled';
import { connect } from 'react-redux';
import { translate } from 'react-polyglot';
import { lengths, components } from 'netlify-cms-ui-default';
import { getNewEntryUrl } from '../../lib/urlHelper';
import Sidebar from './Sidebar';
import CollectionTop from './CollectionTop';
import EntriesCollection from './Entries/EntriesCollection';
import EntriesSearch from './Entries/EntriesSearch';
import CollectionControls from './CollectionControls';
import { sortByField, filterByField, changeViewStyle, groupByField } from '../../actions/entries';
import {
selectSortableFields,
selectViewFilters,
selectViewGroups,
} from '../../reducers/collections';
import {
selectEntriesSort,
selectEntriesFilter,
selectEntriesGroup,
selectViewStyle,
} from '../../reducers/entries';
const CollectionContainer = styled.div`
margin: ${lengths.pageMargin};
`;
const CollectionMain = styled.main`
padding-left: 280px;
`;
const SearchResultContainer = styled.div`
${components.cardTop};
margin-bottom: 22px;
`;
const SearchResultHeading = styled.h1`
${components.cardTopHeading};
`;
export class Collection extends React.Component {
static propTypes = {
searchTerm: PropTypes.string,
collectionName: PropTypes.string,
isSearchResults: PropTypes.bool,
isSingleSearchResult: PropTypes.bool,
collection: ImmutablePropTypes.map.isRequired,
collections: ImmutablePropTypes.map.isRequired,
sortableFields: PropTypes.array,
sort: ImmutablePropTypes.orderedMap,
onSortClick: PropTypes.func.isRequired,
};
renderEntriesCollection = () => {
const { collection, filterTerm, viewStyle } = this.props;
return (
<EntriesCollection collection={collection} viewStyle={viewStyle} filterTerm={filterTerm} />
);
};
renderEntriesSearch = () => {
const { searchTerm, collections, collection, isSingleSearchResult } = this.props;
return (
<EntriesSearch
collections={isSingleSearchResult ? collections.filter(c => c === collection) : collections}
searchTerm={searchTerm}
/>
);
};
render() {
const {
collection,
collections,
collectionName,
isSearchEnabled,
isSearchResults,
isSingleSearchResult,
searchTerm,
sortableFields,
onSortClick,
sort,
viewFilters,
viewGroups,
filterTerm,
t,
onFilterClick,
onGroupClick,
filter,
group,
onChangeViewStyle,
viewStyle,
} = this.props;
let newEntryUrl = collection.get('create') ? getNewEntryUrl(collectionName) : '';
if (newEntryUrl && filterTerm) {
newEntryUrl = getNewEntryUrl(collectionName);
if (filterTerm) {
newEntryUrl = `${newEntryUrl}?path=${filterTerm}`;
}
}
const searchResultKey =
'collection.collectionTop.searchResults' + (isSingleSearchResult ? 'InCollection' : '');
return (
<CollectionContainer>
<Sidebar
collections={collections}
collection={(!isSearchResults || isSingleSearchResult) && collection}
isSearchEnabled={isSearchEnabled}
searchTerm={searchTerm}
filterTerm={filterTerm}
/>
<CollectionMain>
{isSearchResults ? (
<SearchResultContainer>
<SearchResultHeading>
{t(searchResultKey, { searchTerm, collection: collection.get('label') })}
</SearchResultHeading>
</SearchResultContainer>
) : (
<>
<CollectionTop collection={collection} newEntryUrl={newEntryUrl} />
<CollectionControls
viewStyle={viewStyle}
onChangeViewStyle={onChangeViewStyle}
sortableFields={sortableFields}
onSortClick={onSortClick}
sort={sort}
viewFilters={viewFilters}
viewGroups={viewGroups}
t={t}
onFilterClick={onFilterClick}
onGroupClick={onGroupClick}
filter={filter}
group={group}
/>
</>
)}
{isSearchResults ? this.renderEntriesSearch() : this.renderEntriesCollection()}
</CollectionMain>
</CollectionContainer>
);
}
}
function mapStateToProps(state, ownProps) {
const { collections } = state;
const isSearchEnabled = state.config && state.config.search != false;
const { isSearchResults, match, t } = ownProps;
const { name, searchTerm = '', filterTerm = '' } = match.params;
const collection = name ? collections.get(name) : collections.first();
const sort = selectEntriesSort(state.entries, collection.get('name'));
const sortableFields = selectSortableFields(collection, t);
const viewFilters = selectViewFilters(collection);
const viewGroups = selectViewGroups(collection);
const filter = selectEntriesFilter(state.entries, collection.get('name'));
const group = selectEntriesGroup(state.entries, collection.get('name'));
const viewStyle = selectViewStyle(state.entries);
return {
collection,
collections,
collectionName: name,
isSearchEnabled,
isSearchResults,
searchTerm,
filterTerm,
sort,
sortableFields,
viewFilters,
viewGroups,
filter,
group,
viewStyle,
};
}
const mapDispatchToProps = {
sortByField,
filterByField,
changeViewStyle,
groupByField,
};
function mergeProps(stateProps, dispatchProps, ownProps) {
return {
...stateProps,
...ownProps,
onSortClick: (key, direction) =>
dispatchProps.sortByField(stateProps.collection, key, direction),
onFilterClick: filter => dispatchProps.filterByField(stateProps.collection, filter),
onGroupClick: group => dispatchProps.groupByField(stateProps.collection, group),
onChangeViewStyle: viewStyle => dispatchProps.changeViewStyle(viewStyle),
};
}
const ConnectedCollection = connect(mapStateToProps, mapDispatchToProps, mergeProps)(Collection);
export default translate()(ConnectedCollection);

View File

@ -0,0 +1,236 @@
import styled from '@emotion/styled';
import React, { useCallback, useEffect, useMemo } from 'react';
import { translate } from 'react-polyglot';
import { connect } from 'react-redux';
import { changeViewStyle, filterByField, groupByField, sortByField } from '../../actions/entries';
import { SortDirection } from '../../interface';
import { getNewEntryUrl } from '../../lib/urlHelper';
import {
selectSortableFields,
selectViewFilters,
selectViewGroups,
} from '../../reducers/collections';
import {
selectEntriesFilter,
selectEntriesGroup,
selectEntriesSort,
selectViewStyle,
} from '../../reducers/entries';
import { components, lengths } from '../../ui';
import CollectionControls from './CollectionControls';
import CollectionTop from './CollectionTop';
import EntriesCollection from './Entries/EntriesCollection';
import EntriesSearch from './Entries/EntriesSearch';
import Sidebar from './Sidebar';
import type { RouteComponentProps } from 'react-router-dom';
import type {
CmsSortableFieldsDefault,
TranslatedProps,
ViewFilter,
ViewGroup,
} from '../../interface';
import type { Collection, State } from '../../types/redux';
import type { StaticallyTypedRecord } from '../../types/immutable';
const CollectionContainer = styled.div`
margin: ${lengths.pageMargin};
`;
const CollectionMain = styled.main`
padding-left: 280px;
`;
const SearchResultContainer = styled.div`
${components.cardTop};
margin-bottom: 22px;
`;
const SearchResultHeading = styled.h1`
${components.cardTopHeading};
`;
interface CollectionRouterParams {
name: string;
searchTerm?: string;
filterTerm?: string;
}
interface CollectionViewProps extends RouteComponentProps<CollectionRouterParams> {
isSearchResults?: boolean;
isSingleSearchResult?: boolean;
}
const CollectionView = ({
collection,
collections,
collectionName,
isSearchEnabled,
isSearchResults,
isSingleSearchResult,
searchTerm,
sortableFields,
onSortClick,
sort,
viewFilters,
viewGroups,
filterTerm,
t,
onFilterClick,
onGroupClick,
filter,
group,
onChangeViewStyle,
viewStyle,
}: ReturnType<typeof mergeProps>) => {
const newEntryUrl = useMemo(() => {
let url = collection.get('create') ? getNewEntryUrl(collectionName) : '';
if (url && filterTerm) {
url = getNewEntryUrl(collectionName);
if (filterTerm) {
url = `${newEntryUrl}?path=${filterTerm}`;
}
}
}, [collection, collectionName, filterTerm]);
const searchResultKey = useMemo(
() => `collection.collectionTop.searchResults${isSingleSearchResult ? 'InCollection' : ''}`,
[isSingleSearchResult],
);
const renderEntriesCollection = useCallback(() => {
return (
<EntriesCollection collection={collection} viewStyle={viewStyle} filterTerm={filterTerm} />
);
}, [collection, filterTerm, viewStyle]);
const renderEntriesSearch = useCallback(() => {
return (
<EntriesSearch
collections={isSingleSearchResult ? collections.filter(c => c === collection) : collections}
searchTerm={searchTerm}
/>
);
}, [searchTerm, collections, collection, isSingleSearchResult]);
useEffect(() => {
if (sort?.first()?.get('key')) {
return;
}
const defaultSort = collection.getIn(['sortable_fields', 'default']) as
| StaticallyTypedRecord<CmsSortableFieldsDefault>
| undefined;
if (!defaultSort || !defaultSort.get('field')) {
return;
}
onSortClick(defaultSort.get('field'), defaultSort.get('direction') ?? SortDirection.Ascending);
}, [collection]);
return (
<CollectionContainer>
<Sidebar
collections={collections}
collection={(!isSearchResults || isSingleSearchResult) && collection}
isSearchEnabled={isSearchEnabled}
searchTerm={searchTerm}
filterTerm={filterTerm}
/>
<CollectionMain>
{isSearchResults ? (
<SearchResultContainer>
<SearchResultHeading>
{t(searchResultKey, { searchTerm, collection: collection.get('label') })}
</SearchResultHeading>
</SearchResultContainer>
) : (
<>
<CollectionTop collection={collection} newEntryUrl={newEntryUrl} />
<CollectionControls
viewStyle={viewStyle}
onChangeViewStyle={onChangeViewStyle}
sortableFields={sortableFields}
onSortClick={onSortClick}
sort={sort}
viewFilters={viewFilters}
viewGroups={viewGroups}
t={t}
onFilterClick={onFilterClick}
onGroupClick={onGroupClick}
filter={filter}
group={group}
/>
</>
)}
{isSearchResults ? renderEntriesSearch() : renderEntriesCollection()}
</CollectionMain>
</CollectionContainer>
);
};
function mapStateToProps(state: State, ownProps: TranslatedProps<CollectionViewProps>) {
const { collections } = state;
const isSearchEnabled = state.config && state.config.search != false;
const { isSearchResults, match, t } = ownProps;
const { name, searchTerm = '', filterTerm = '' } = match.params;
const collection: Collection = name ? collections.get(name) : collections.first();
const sort = selectEntriesSort(state.entries, collection.get('name'));
const sortableFields = selectSortableFields(collection, t);
const viewFilters = selectViewFilters(collection);
const viewGroups = selectViewGroups(collection);
const filter = selectEntriesFilter(state.entries, collection.get('name'));
const group = selectEntriesGroup(state.entries, collection.get('name'));
const viewStyle = selectViewStyle(state.entries);
return {
collection,
collections,
collectionName: name,
isSearchEnabled,
isSearchResults,
searchTerm,
filterTerm,
sort,
sortableFields,
viewFilters,
viewGroups,
filter,
group,
viewStyle,
};
}
const mapDispatchToProps = {
sortByField,
filterByField,
changeViewStyle,
groupByField,
};
function mergeProps(
stateProps: ReturnType<typeof mapStateToProps>,
dispatchProps: typeof mapDispatchToProps,
ownProps: TranslatedProps<CollectionViewProps>,
) {
return {
...stateProps,
...ownProps,
onSortClick: (key: string, direction: SortDirection) =>
dispatchProps.sortByField(stateProps.collection, key, direction),
onFilterClick: (filter: ViewFilter) =>
dispatchProps.filterByField(stateProps.collection, filter),
onGroupClick: (group: ViewGroup) => dispatchProps.groupByField(stateProps.collection, group),
onChangeViewStyle: (viewStyle: string) => dispatchProps.changeViewStyle(viewStyle),
};
}
const ConnectedCollection = connect(
mapStateToProps,
mapDispatchToProps,
mergeProps,
)(CollectionView);
export default translate()(ConnectedCollection);

View File

@ -1,7 +1,7 @@
import React from 'react';
import styled from '@emotion/styled';
import { lengths } from 'netlify-cms-ui-default';
import { lengths } from '../../ui';
import ViewStyleControl from './ViewStyleControl';
import SortControl from './SortControl';
import FilterControl from './FilterControl';

View File

@ -1,10 +1,11 @@
import React from 'react';
import styled from '@emotion/styled';
import { colorsRaw, colors, Icon, lengths, zIndex } from 'netlify-cms-ui-default';
import { translate } from 'react-polyglot';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { colorsRaw, colors, Icon, lengths, zIndex } from '../../ui';
const SearchContainer = styled.div`
margin: 0 12px;
position: relative;

View File

@ -4,7 +4,8 @@ import React from 'react';
import styled from '@emotion/styled';
import { translate } from 'react-polyglot';
import { Link } from 'react-router-dom';
import { components, buttons, shadows } from 'netlify-cms-ui-default';
import { components, buttons, shadows } from '../../ui';
const CollectionTopContainer = styled.div`
${components.cardTop};

View File

@ -1,7 +1,8 @@
import React from 'react';
import { css } from '@emotion/react';
import styled from '@emotion/styled';
import { buttons, StyledDropdownButton, colors } from 'netlify-cms-ui-default';
import { buttons, StyledDropdownButton, colors } from '../../ui';
const Button = styled(StyledDropdownButton)`
${buttons.button};

View File

@ -3,8 +3,8 @@ import React from 'react';
import styled from '@emotion/styled';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { translate } from 'react-polyglot';
import { Loader, lengths } from 'netlify-cms-ui-default';
import { Loader, lengths } from '../../../ui';
import EntryListing from './EntryListing';
const PaginationMessage = styled.div`

View File

@ -5,9 +5,9 @@ import { connect } from 'react-redux';
import styled from '@emotion/styled';
import { translate } from 'react-polyglot';
import { partial } from 'lodash';
import { Cursor } from 'netlify-cms-lib-util';
import { colors } from 'netlify-cms-ui-default';
import { colors } from '../../../ui';
import { Cursor } from '../../../lib/util';
import {
loadEntries as actionLoadEntries,
traverseCollectionCursor as actionTraverseCollectionCursor,
@ -56,7 +56,7 @@ function withGroups(groups, entries, EntriesToRender, t) {
});
}
export class EntriesCollection extends React.Component {
class EntriesCollection extends React.Component {
static propTypes = {
collection: ImmutablePropTypes.map.isRequired,
page: PropTypes.number,

View File

@ -3,8 +3,8 @@ import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { connect } from 'react-redux';
import { isEqual } from 'lodash';
import { Cursor } from 'netlify-cms-lib-util';
import { Cursor } from '../../../lib/util';
import { selectSearchedEntries } from '../../../reducers';
import {
searchEntries as actionSearchEntries,

View File

@ -2,8 +2,8 @@ import React from 'react';
import styled from '@emotion/styled';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import { colors, colorsRaw, components, lengths, zIndex } from 'netlify-cms-ui-default';
import { colors, colorsRaw, components, lengths, zIndex } from '../../../ui';
import { boundGetAsset } from '../../../actions/media';
import { VIEW_STYLE_LIST, VIEW_STYLE_GRID } from '../../../constants/collectionViews';
import { selectIsLoadingAsset } from '../../../reducers/medias';

View File

@ -1,155 +0,0 @@
import React from 'react';
import { render } from '@testing-library/react';
import { fromJS } from 'immutable';
import configureStore from 'redux-mock-store';
import { Provider } from 'react-redux';
import ConnectedEntriesCollection, {
EntriesCollection,
filterNestedEntries,
} from '../EntriesCollection';
jest.mock('../Entries', () => 'mock-entries');
const middlewares = [];
const mockStore = configureStore(middlewares);
function renderWithRedux(component, { store } = {}) {
function Wrapper({ children }) {
return <Provider store={store}>{children}</Provider>;
}
return render(component, { wrapper: Wrapper });
}
function toEntriesState(collection, entriesArray) {
const entries = entriesArray.reduce(
(acc, entry) => {
acc.entities[`${collection.get('name')}.${entry.slug}`] = entry;
acc.pages[collection.get('name')].ids.push(entry.slug);
return acc;
},
{ pages: { [collection.get('name')]: { ids: [] } }, entities: {} },
);
return fromJS(entries);
}
describe('filterNestedEntries', () => {
it('should return only immediate children for non root path', () => {
const entriesArray = [
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
{ slug: 'dir1/dir2/index', path: 'src/pages/dir1/dir2/index.md', data: { title: 'File 2' } },
{ slug: 'dir3/index', path: 'src/pages/dir3/index.md', data: { title: 'File 3' } },
{ slug: 'dir3/dir4/index', path: 'src/pages/dir3/dir4/index.md', data: { title: 'File 4' } },
];
const entries = fromJS(entriesArray);
expect(filterNestedEntries('dir3', 'src/pages', entries).toJS()).toEqual([
{ slug: 'dir3/dir4/index', path: 'src/pages/dir3/dir4/index.md', data: { title: 'File 4' } },
]);
});
it('should return immediate children and root for root path', () => {
const entriesArray = [
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
{ slug: 'dir1/dir2/index', path: 'src/pages/dir1/dir2/index.md', data: { title: 'File 2' } },
{ slug: 'dir3/index', path: 'src/pages/dir3/index.md', data: { title: 'File 3' } },
{ slug: 'dir3/dir4/index', path: 'src/pages/dir3/dir4/index.md', data: { title: 'File 4' } },
];
const entries = fromJS(entriesArray);
expect(filterNestedEntries('', 'src/pages', entries).toJS()).toEqual([
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
{ slug: 'dir3/index', path: 'src/pages/dir3/index.md', data: { title: 'File 3' } },
]);
});
});
describe('EntriesCollection', () => {
const collection = fromJS({ name: 'pages', label: 'Pages', folder: 'src/pages' });
const props = {
t: jest.fn(),
loadEntries: jest.fn(),
traverseCollectionCursor: jest.fn(),
isFetching: false,
cursor: {},
collection,
};
it('should render with entries', () => {
const entries = fromJS([{ slug: 'index' }]);
const { asFragment } = render(<EntriesCollection {...props} entries={entries} />);
expect(asFragment()).toMatchSnapshot();
});
it('should render connected component', () => {
const entriesArray = [
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
{ slug: 'dir2/index', path: 'src/pages/dir2/index.md', data: { title: 'File 2' } },
];
const store = mockStore({
entries: toEntriesState(collection, entriesArray),
cursors: fromJS({}),
});
const { asFragment } = renderWithRedux(<ConnectedEntriesCollection collection={collection} />, {
store,
});
expect(asFragment()).toMatchSnapshot();
});
it('should render show only immediate children for nested collection', () => {
const entriesArray = [
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
{ slug: 'dir1/dir2/index', path: 'src/pages/dir1/dir2/index.md', data: { title: 'File 2' } },
{ slug: 'dir3/index', path: 'src/pages/dir3/index.md', data: { title: 'File 3' } },
{ slug: 'dir3/dir4/index', path: 'src/pages/dir3/dir4/index.md', data: { title: 'File 4' } },
];
const store = mockStore({
entries: toEntriesState(collection, entriesArray),
cursors: fromJS({}),
});
const { asFragment } = renderWithRedux(
<ConnectedEntriesCollection collection={collection.set('nested', fromJS({ depth: 10 }))} />,
{
store,
},
);
expect(asFragment()).toMatchSnapshot();
});
it('should render apply filter term for nested collections', () => {
const entriesArray = [
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
{ slug: 'dir1/index', path: 'src/pages/dir1/index.md', data: { title: 'File 1' } },
{ slug: 'dir1/dir2/index', path: 'src/pages/dir1/dir2/index.md', data: { title: 'File 2' } },
{ slug: 'dir3/index', path: 'src/pages/dir3/index.md', data: { title: 'File 3' } },
{ slug: 'dir3/dir4/index', path: 'src/pages/dir3/dir4/index.md', data: { title: 'File 4' } },
];
const store = mockStore({
entries: toEntriesState(collection, entriesArray),
cursors: fromJS({}),
});
const { asFragment } = renderWithRedux(
<ConnectedEntriesCollection
collection={collection.set('nested', fromJS({ depth: 10 }))}
filterTerm="dir3/dir4"
/>,
{
store,
},
);
expect(asFragment()).toMatchSnapshot();
});
});

View File

@ -1,49 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`EntriesCollection should render apply filter term for nested collections 1`] = `
<DocumentFragment>
<mock-entries
collectionname="Pages"
collections="Map { \\"name\\": \\"pages\\", \\"label\\": \\"Pages\\", \\"folder\\": \\"src/pages\\", \\"nested\\": Map { \\"depth\\": 10 } }"
cursor="[object Object]"
entries="List []"
isfetching="false"
/>
</DocumentFragment>
`;
exports[`EntriesCollection should render connected component 1`] = `
<DocumentFragment>
<mock-entries
collectionname="Pages"
collections="Map { \\"name\\": \\"pages\\", \\"label\\": \\"Pages\\", \\"folder\\": \\"src/pages\\" }"
cursor="[object Object]"
entries="List [ Map { \\"slug\\": \\"index\\", \\"path\\": \\"src/pages/index.md\\", \\"data\\": Map { \\"title\\": \\"Root\\" } }, Map { \\"slug\\": \\"dir1/index\\", \\"path\\": \\"src/pages/dir1/index.md\\", \\"data\\": Map { \\"title\\": \\"File 1\\" } }, Map { \\"slug\\": \\"dir2/index\\", \\"path\\": \\"src/pages/dir2/index.md\\", \\"data\\": Map { \\"title\\": \\"File 2\\" } } ]"
isfetching="false"
/>
</DocumentFragment>
`;
exports[`EntriesCollection should render show only immediate children for nested collection 1`] = `
<DocumentFragment>
<mock-entries
collectionname="Pages"
collections="Map { \\"name\\": \\"pages\\", \\"label\\": \\"Pages\\", \\"folder\\": \\"src/pages\\", \\"nested\\": Map { \\"depth\\": 10 } }"
cursor="[object Object]"
entries="List [ Map { \\"slug\\": \\"index\\", \\"path\\": \\"src/pages/index.md\\", \\"data\\": Map { \\"title\\": \\"Root\\" } }, Map { \\"slug\\": \\"dir1/index\\", \\"path\\": \\"src/pages/dir1/index.md\\", \\"data\\": Map { \\"title\\": \\"File 1\\" } }, Map { \\"slug\\": \\"dir3/index\\", \\"path\\": \\"src/pages/dir3/index.md\\", \\"data\\": Map { \\"title\\": \\"File 3\\" } } ]"
isfetching="false"
/>
</DocumentFragment>
`;
exports[`EntriesCollection should render with entries 1`] = `
<DocumentFragment>
<mock-entries
collectionname="Pages"
collections="Map { \\"name\\": \\"pages\\", \\"label\\": \\"Pages\\", \\"folder\\": \\"src/pages\\" }"
cursor="[object Object]"
entries="List [ Map { \\"slug\\": \\"index\\" } ]"
isfetching="false"
/>
</DocumentFragment>
`;

View File

@ -1,7 +1,7 @@
import React from 'react';
import { translate } from 'react-polyglot';
import { Dropdown, DropdownCheckedItem } from 'netlify-cms-ui-default';
import { Dropdown, DropdownCheckedItem } from '../../ui';
import { ControlButton } from './ControlButton';
function FilterControl({ viewFilters, t, onFilterClick, filter }) {

View File

@ -1,7 +1,7 @@
import React from 'react';
import { translate } from 'react-polyglot';
import { Dropdown, DropdownItem } from 'netlify-cms-ui-default';
import { Dropdown, DropdownItem } from '../../ui';
import { ControlButton } from './ControlButton';
function GroupControl({ viewGroups, t, onGroupClick, group }) {

View File

@ -5,12 +5,12 @@ import styled from '@emotion/styled';
import { connect } from 'react-redux';
import { NavLink } from 'react-router-dom';
import { dirname, sep } from 'path';
import { stringTemplate } from 'netlify-cms-lib-widgets';
import { Icon, colors, components } from 'netlify-cms-ui-default';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { sortBy } from 'lodash';
import { Icon, colors, components } from '../../ui';
import { stringTemplate } from '../../lib/widgets';
import { selectEntries } from '../../reducers/entries';
import { selectEntryCollectionTitle } from '../../reducers/collections';

View File

@ -5,8 +5,8 @@ import styled from '@emotion/styled';
import { css } from '@emotion/react';
import { translate } from 'react-polyglot';
import { NavLink } from 'react-router-dom';
import { Icon, components, colors } from 'netlify-cms-ui-default';
import { Icon, components, colors } from '../../ui';
import { searchCollections } from '../../actions/collections';
import CollectionSearch from './CollectionSearch';
import NestedCollection from './NestedCollection';
@ -97,7 +97,7 @@ const IconWrapper = styled.div`
margin-right: 8px;
`;
export class Sidebar extends React.Component {
class Sidebar extends React.Component {
static propTypes = {
collections: ImmutablePropTypes.map.isRequired,
collection: ImmutablePropTypes.map,
@ -142,7 +142,7 @@ export class Sidebar extends React.Component {
);
};
renderAdditionalLink = ({ title, url, iconName }) => {
renderAdditionalLink = ({ id, title, data, iconName }) => {
let icon = <Icon type="write" />;
if (iconName) {
const storedIcon = getIcon(iconName);
@ -150,12 +150,25 @@ export class Sidebar extends React.Component {
icon = storedIcon;
}
}
const content = (
<>
<IconWrapper>{icon}</IconWrapper>
{title}
</>
);
return (
<li key={title}>
<AdditionalLink href={url} target="_blank" rel="noopener">
<IconWrapper>{icon}</IconWrapper>
{title}
</AdditionalLink>
{typeof data === 'string' ? (
<AdditionalLink href={data} target="_blank" rel="noopener">
{content}
</AdditionalLink>
) : (
<SidebarNavLink to={`/page/${id}`} activeClassName="sidebar-active">
{content}
</SidebarNavLink>
)}
</li>
);
};

View File

@ -1,8 +1,8 @@
import React from 'react';
import { translate } from 'react-polyglot';
import { Dropdown, DropdownItem } from 'netlify-cms-ui-default';
import { SortDirection } from '../../types/redux';
import { SortDirection } from '../../interface';
import { Dropdown, DropdownItem } from '../../ui';
import { ControlButton } from './ControlButton';
function nextSortDirection(direction) {

View File

@ -1,7 +1,7 @@
import React from 'react';
import styled from '@emotion/styled';
import { Icon, buttons, colors } from 'netlify-cms-ui-default';
import { Icon, buttons, colors } from '../../ui';
import { VIEW_STYLE_LIST, VIEW_STYLE_GRID } from '../../constants/collectionViews';
const ViewControlsSection = styled.div`

View File

@ -1,75 +0,0 @@
import React from 'react';
import { render } from '@testing-library/react';
import { fromJS } from 'immutable';
import configureStore from 'redux-mock-store';
import { Provider } from 'react-redux';
import ConnectedCollection, { Collection } from '../Collection';
jest.mock('../Entries/EntriesCollection', () => 'mock-entries-collection');
jest.mock('../CollectionTop', () => 'mock-collection-top');
jest.mock('../CollectionControls', () => 'mock-collection-controls');
jest.mock('../Sidebar', () => 'mock-sidebar');
const middlewares = [];
const mockStore = configureStore(middlewares);
function renderWithRedux(component, { store } = {}) {
function Wrapper({ children }) {
return <Provider store={store}>{children}</Provider>;
}
return render(component, { wrapper: Wrapper });
}
describe('Collection', () => {
const collection = fromJS({
name: 'pages',
sortable_fields: [],
view_filters: [],
view_groups: [],
});
const props = {
collections: fromJS([collection]).toOrderedMap(),
collection,
collectionName: collection.get('name'),
t: jest.fn(key => key),
onSortClick: jest.fn(),
};
it('should render with collection without create url', () => {
const { asFragment } = render(
<Collection {...props} collection={collection.set('create', false)} />,
);
expect(asFragment()).toMatchSnapshot();
});
it('should render with collection with create url', () => {
const { asFragment } = render(
<Collection {...props} collection={collection.set('create', true)} />,
);
expect(asFragment()).toMatchSnapshot();
});
it('should render with collection with create url and path', () => {
const { asFragment } = render(
<Collection {...props} collection={collection.set('create', true)} filterTerm="dir1/dir2" />,
);
expect(asFragment()).toMatchSnapshot();
});
it('should render connected component', () => {
const store = mockStore({
collections: props.collections,
entries: fromJS({}),
});
const { asFragment } = renderWithRedux(<ConnectedCollection match={{ params: {} }} />, {
store,
});
expect(asFragment()).toMatchSnapshot();
});
});

View File

@ -1,442 +0,0 @@
import React from 'react';
import { MemoryRouter } from 'react-router-dom';
import { render, fireEvent } from '@testing-library/react';
import { fromJS } from 'immutable';
import configureStore from 'redux-mock-store';
import { Provider } from 'react-redux';
import ConnectedNestedCollection, {
NestedCollection,
getTreeData,
walk,
updateNode,
} from '../NestedCollection';
jest.mock('netlify-cms-ui-default', () => {
const actual = jest.requireActual('netlify-cms-ui-default');
return {
...actual,
Icon: 'mocked-icon',
};
});
const middlewares = [];
const mockStore = configureStore(middlewares);
function renderWithRedux(component, { store } = {}) {
function Wrapper({ children }) {
return <Provider store={store}>{children}</Provider>;
}
return render(component, { wrapper: Wrapper });
}
describe('NestedCollection', () => {
const collection = fromJS({
name: 'pages',
label: 'Pages',
folder: 'src/pages',
fields: [{ name: 'title', widget: 'string' }],
});
it('should render correctly with no entries', () => {
const entries = fromJS([]);
const { asFragment, getByTestId } = render(
<MemoryRouter>
<NestedCollection collection={collection} entries={entries} />
</MemoryRouter>,
);
expect(getByTestId('/')).toHaveTextContent('Pages');
expect(getByTestId('/')).toHaveAttribute('href', '/collections/pages');
expect(asFragment()).toMatchSnapshot();
});
it('should render correctly with nested entries', () => {
const entries = fromJS([
{ path: 'src/pages/index.md', data: { title: 'Root' } },
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
{ path: 'src/pages/b/index.md', data: { title: 'File 2' } },
{ path: 'src/pages/a/a/index.md', data: { title: 'File 3' } },
{ path: 'src/pages/b/a/index.md', data: { title: 'File 4' } },
]);
const { asFragment, getByTestId } = render(
<MemoryRouter>
<NestedCollection collection={collection} entries={entries} />
</MemoryRouter>,
);
// expand the tree
fireEvent.click(getByTestId('/'));
expect(getByTestId('/a')).toHaveTextContent('File 1');
expect(getByTestId('/a')).toHaveAttribute('href', '/collections/pages/filter/a');
expect(getByTestId('/b')).toHaveTextContent('File 2');
expect(getByTestId('/b')).toHaveAttribute('href', '/collections/pages/filter/b');
expect(asFragment()).toMatchSnapshot();
});
it('should keep expanded nodes on re-render', () => {
const entries = fromJS([
{ path: 'src/pages/index.md', data: { title: 'Root' } },
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
{ path: 'src/pages/b/index.md', data: { title: 'File 2' } },
{ path: 'src/pages/a/a/index.md', data: { title: 'File 3' } },
{ path: 'src/pages/b/a/index.md', data: { title: 'File 4' } },
]);
const { getByTestId, rerender } = render(
<MemoryRouter>
<NestedCollection collection={collection} entries={entries} />
</MemoryRouter>,
);
fireEvent.click(getByTestId('/'));
fireEvent.click(getByTestId('/a'));
expect(getByTestId('/a')).toHaveTextContent('File 1');
const newEntries = fromJS([
{ path: 'src/pages/index.md', data: { title: 'Root' } },
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
{ path: 'src/pages/b/index.md', data: { title: 'File 2' } },
{ path: 'src/pages/a/a/index.md', data: { title: 'File 3' } },
{ path: 'src/pages/b/a/index.md', data: { title: 'File 4' } },
{ path: 'src/pages/c/index.md', data: { title: 'File 5' } },
{ path: 'src/pages/c/a/index.md', data: { title: 'File 6' } },
]);
rerender(
<MemoryRouter>
<NestedCollection collection={collection} entries={newEntries} />
</MemoryRouter>,
);
expect(getByTestId('/a')).toHaveTextContent('File 1');
});
it('should expand nodes based on filterTerm', () => {
const entries = fromJS([
{ path: 'src/pages/index.md', data: { title: 'Root' } },
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
{ path: 'src/pages/a/a/index.md', data: { title: 'File 2' } },
{ path: 'src/pages/a/a/a/index.md', data: { title: 'File 3' } },
]);
const { getByTestId, queryByTestId, rerender } = render(
<MemoryRouter>
<NestedCollection collection={collection} entries={entries} />
</MemoryRouter>,
);
expect(queryByTestId('/a/a')).toBeNull();
rerender(
<MemoryRouter>
<NestedCollection collection={collection} entries={entries} filterTerm={'a/a'} />
</MemoryRouter>,
);
expect(getByTestId('/a/a')).toHaveTextContent('File 2');
});
it('should ignore filterTerm once a user toggles an node', () => {
const entries = fromJS([
{ path: 'src/pages/index.md', data: { title: 'Root' } },
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
{ path: 'src/pages/a/a/index.md', data: { title: 'File 2' } },
{ path: 'src/pages/a/a/a/index.md', data: { title: 'File 3' } },
]);
const { getByTestId, queryByTestId, rerender } = render(
<MemoryRouter>
<NestedCollection collection={collection} entries={entries} />
</MemoryRouter>,
);
rerender(
<MemoryRouter>
<NestedCollection collection={collection} entries={entries} filterTerm={'a/a'} />
</MemoryRouter>,
);
expect(getByTestId('/a/a')).toHaveTextContent('File 2');
fireEvent.click(getByTestId('/a'));
rerender(
<MemoryRouter>
<NestedCollection
collection={collection}
entries={fromJS(entries.toJS())}
filterTerm={'a/a'}
/>
</MemoryRouter>,
);
expect(queryByTestId('/a/a')).toBeNull();
});
it('should not collapse an unselected node when clicked', () => {
const entries = fromJS([
{ path: 'src/pages/index.md', data: { title: 'Root' } },
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
{ path: 'src/pages/a/a/index.md', data: { title: 'File 2' } },
{ path: 'src/pages/a/a/a/index.md', data: { title: 'File 3' } },
{ path: 'src/pages/a/a/a/a/index.md', data: { title: 'File 4' } },
]);
const { getByTestId } = render(
<MemoryRouter>
<NestedCollection collection={collection} entries={entries} />
</MemoryRouter>,
);
fireEvent.click(getByTestId('/'));
fireEvent.click(getByTestId('/a'));
fireEvent.click(getByTestId('/a/a'));
expect(getByTestId('/a/a')).toHaveTextContent('File 2');
fireEvent.click(getByTestId('/a'));
expect(getByTestId('/a/a')).toHaveTextContent('File 2');
});
it('should collapse a selected node when clicked', () => {
const entries = fromJS([
{ path: 'src/pages/index.md', data: { title: 'Root' } },
{ path: 'src/pages/a/index.md', data: { title: 'File 1' } },
{ path: 'src/pages/a/a/index.md', data: { title: 'File 2' } },
{ path: 'src/pages/a/a/a/index.md', data: { title: 'File 3' } },
{ path: 'src/pages/a/a/a/a/index.md', data: { title: 'File 4' } },
]);
const { getByTestId, queryByTestId } = render(
<MemoryRouter>
<NestedCollection collection={collection} entries={entries} />
</MemoryRouter>,
);
fireEvent.click(getByTestId('/'));
fireEvent.click(getByTestId('/a'));
fireEvent.click(getByTestId('/a/a'));
expect(getByTestId('/a/a/a')).toHaveTextContent('File 3');
fireEvent.click(getByTestId('/a/a'));
expect(queryByTestId('/a/a/a')).toBeNull();
});
it('should render connected component', () => {
const entriesArray = [
{ slug: 'index', path: 'src/pages/index.md', data: { title: 'Root' } },
{ slug: 'a/index', path: 'src/pages/a/index.md', data: { title: 'File 1' } },
{ slug: 'b/index', path: 'src/pages/b/index.md', data: { title: 'File 2' } },
{ slug: 'a/a/index', path: 'src/pages/a/a/index.md', data: { title: 'File 3' } },
{ slug: 'b/a/index', path: 'src/pages/b/a/index.md', data: { title: 'File 4' } },
];
const entries = entriesArray.reduce(
(acc, entry) => {
acc.entities[`${collection.get('name')}.${entry.slug}`] = entry;
acc.pages[collection.get('name')].ids.push(entry.slug);
return acc;
},
{ pages: { [collection.get('name')]: { ids: [] } }, entities: {} },
);
const store = mockStore({ entries: fromJS(entries) });
const { asFragment, getByTestId } = renderWithRedux(
<MemoryRouter>
<ConnectedNestedCollection collection={collection} entries={entries} />
</MemoryRouter>,
{ store },
);
// expand the root
fireEvent.click(getByTestId('/'));
expect(getByTestId('/a')).toHaveTextContent('File 1');
expect(getByTestId('/a')).toHaveAttribute('href', '/collections/pages/filter/a');
expect(getByTestId('/b')).toHaveTextContent('File 2');
expect(getByTestId('/b')).toHaveAttribute('href', '/collections/pages/filter/b');
expect(asFragment()).toMatchSnapshot();
});
describe('getTreeData', () => {
it('should return nested tree data from entries', () => {
const entries = fromJS([
{ path: 'src/pages/index.md', data: { title: 'Root' } },
{ path: 'src/pages/intro/index.md', data: { title: 'intro index' } },
{ path: 'src/pages/intro/category/index.md', data: { title: 'intro category index' } },
{ path: 'src/pages/compliance/index.md', data: { title: 'compliance index' } },
]);
const treeData = getTreeData(collection, entries);
expect(treeData).toEqual([
{
title: 'Pages',
path: '/',
isDir: true,
isRoot: true,
children: [
{
title: 'intro',
path: '/intro',
isDir: true,
isRoot: false,
children: [
{
title: 'category',
path: '/intro/category',
isDir: true,
isRoot: false,
children: [
{
path: '/intro/category/index.md',
data: { title: 'intro category index' },
title: 'intro category index',
isDir: false,
isRoot: false,
children: [],
},
],
},
{
path: '/intro/index.md',
data: { title: 'intro index' },
title: 'intro index',
isDir: false,
isRoot: false,
children: [],
},
],
},
{
title: 'compliance',
path: '/compliance',
isDir: true,
isRoot: false,
children: [
{
path: '/compliance/index.md',
data: { title: 'compliance index' },
title: 'compliance index',
isDir: false,
isRoot: false,
children: [],
},
],
},
{
path: '/index.md',
data: { title: 'Root' },
title: 'Root',
isDir: false,
isRoot: false,
children: [],
},
],
},
]);
});
it('should ignore collection summary', () => {
const entries = fromJS([{ path: 'src/pages/index.md', data: { title: 'Root' } }]);
const treeData = getTreeData(collection, entries);
expect(treeData).toEqual([
{
title: 'Pages',
path: '/',
isDir: true,
isRoot: true,
children: [
{
path: '/index.md',
data: { title: 'Root' },
title: 'Root',
isDir: false,
isRoot: false,
children: [],
},
],
},
]);
});
it('should use nested collection summary for title', () => {
const entries = fromJS([{ path: 'src/pages/index.md', data: { title: 'Root' } }]);
const treeData = getTreeData(
collection.setIn(['nested', 'summary'], '{{filename}}'),
entries,
);
expect(treeData).toEqual([
{
title: 'Pages',
path: '/',
isDir: true,
isRoot: true,
children: [
{
path: '/index.md',
data: { title: 'Root' },
title: 'index',
isDir: false,
isRoot: false,
children: [],
},
],
},
]);
});
});
describe('walk', () => {
it('should visit every tree node', () => {
const entries = fromJS([
{ path: 'src/pages/index.md', data: { title: 'Root' } },
{ path: 'src/pages/dir1/index.md', data: { title: 'Dir1 File' } },
{ path: 'src/pages/dir2/index.md', data: { title: 'Dir2 File' } },
]);
const treeData = getTreeData(collection, entries);
const callback = jest.fn();
walk(treeData, callback);
expect(callback).toHaveBeenCalledTimes(6);
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/' }));
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/index.md' }));
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/dir1' }));
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/dir2' }));
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/dir1/index.md' }));
expect(callback).toHaveBeenCalledWith(expect.objectContaining({ path: '/dir2/index.md' }));
});
});
describe('updateNode', () => {
it('should update node', () => {
const entries = fromJS([
{ path: 'src/pages/index.md', data: { title: 'Root' } },
{ path: 'src/pages/dir1/index.md', data: { title: 'Dir1 File' } },
{ path: 'src/pages/dir2/index.md', data: { title: 'Dir2 File' } },
]);
const treeData = getTreeData(collection, entries);
expect(treeData[0].children[0].children[0].expanded).toBeUndefined();
const callback = jest.fn(node => ({ ...node, expanded: true }));
const node = { path: '/dir1/index.md' };
updateNode(treeData, node, callback);
expect(callback).toHaveBeenCalledTimes(1);
expect(callback).toHaveBeenCalledWith(node);
expect(treeData[0].children[0].children[0].expanded).toEqual(true);
});
});
});

View File

@ -1,87 +0,0 @@
import React from 'react';
import { MemoryRouter } from 'react-router-dom';
import { render } from '@testing-library/react';
import { fromJS } from 'immutable';
import { Sidebar } from '../Sidebar';
jest.mock('netlify-cms-ui-default', () => {
const actual = jest.requireActual('netlify-cms-ui-default');
return {
...actual,
Icon: 'mocked-icon',
};
});
jest.mock('../NestedCollection', () => 'nested-collection');
jest.mock('../CollectionSearch', () => 'collection-search');
jest.mock('../../../actions/collections');
describe('Sidebar', () => {
const props = {
searchTerm: '',
isSearchEnabled: true,
t: jest.fn(key => key),
};
it('should render sidebar with a simple collection', () => {
const collections = fromJS([{ name: 'posts', label: 'Posts' }]).toOrderedMap();
const { asFragment, getByTestId } = render(
<MemoryRouter>
<Sidebar {...props} collections={collections} />
</MemoryRouter>,
);
expect(getByTestId('posts')).toHaveTextContent('Posts');
expect(getByTestId('posts')).toHaveAttribute('href', '/collections/posts');
expect(asFragment()).toMatchSnapshot();
});
it('should not render a hidden collection', () => {
const collections = fromJS([{ name: 'posts', label: 'Posts', hide: true }]).toOrderedMap();
const { queryByTestId } = render(
<MemoryRouter>
<Sidebar {...props} collections={collections} />
</MemoryRouter>,
);
expect(queryByTestId('posts')).toBeNull();
});
it('should render sidebar with a nested collection', () => {
const collections = fromJS([
{ name: 'posts', label: 'Posts', nested: { depth: 10 } },
]).toOrderedMap();
const { asFragment } = render(
<MemoryRouter>
<Sidebar {...props} collections={collections} />
</MemoryRouter>,
);
expect(asFragment()).toMatchSnapshot();
});
it('should render nested collection with filterTerm', () => {
const collections = fromJS([
{ name: 'posts', label: 'Posts', nested: { depth: 10 } },
]).toOrderedMap();
const { asFragment } = render(
<MemoryRouter>
<Sidebar {...props} collections={collections} filterTerm="dir1/dir2" />
</MemoryRouter>,
);
expect(asFragment()).toMatchSnapshot();
});
it('should render sidebar without search', () => {
const collections = fromJS([{ name: 'posts', label: 'Posts' }]).toOrderedMap();
const { asFragment } = render(
<MemoryRouter>
<Sidebar {...props} collections={collections} isSearchEnabled={false} />
</MemoryRouter>,
);
expect(asFragment()).toMatchSnapshot();
});
});

View File

@ -1,144 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Collection should render connected component 1`] = `
<DocumentFragment>
.emotion-2 {
margin: 28px 18px;
}
.emotion-0 {
padding-left: 280px;
}
<div
class="emotion-2 emotion-3"
>
<mock-sidebar
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [] }"
collections="OrderedMap { 0: Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [] } }"
filterterm=""
searchterm=""
/>
<main
class="emotion-0 emotion-1"
>
<mock-collection-top
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [] }"
newentryurl=""
/>
<mock-collection-controls
filter="Map {}"
group="Map {}"
sortablefields=""
viewfilters=""
viewgroups=""
/>
<mock-entries-collection
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [] }"
filterterm=""
/>
</main>
</div>
</DocumentFragment>
`;
exports[`Collection should render with collection with create url 1`] = `
<DocumentFragment>
.emotion-2 {
margin: 28px 18px;
}
.emotion-0 {
padding-left: 280px;
}
<div
class="emotion-2 emotion-3"
>
<mock-sidebar
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [], \\"create\\": true }"
collections="OrderedMap { 0: Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [] } }"
/>
<main
class="emotion-0 emotion-1"
>
<mock-collection-top
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [], \\"create\\": true }"
newentryurl="/collections/pages/new"
/>
<mock-collection-controls />
<mock-entries-collection
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [], \\"create\\": true }"
/>
</main>
</div>
</DocumentFragment>
`;
exports[`Collection should render with collection with create url and path 1`] = `
<DocumentFragment>
.emotion-2 {
margin: 28px 18px;
}
.emotion-0 {
padding-left: 280px;
}
<div
class="emotion-2 emotion-3"
>
<mock-sidebar
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [], \\"create\\": true }"
collections="OrderedMap { 0: Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [] } }"
filterterm="dir1/dir2"
/>
<main
class="emotion-0 emotion-1"
>
<mock-collection-top
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [], \\"create\\": true }"
newentryurl="/collections/pages/new?path=dir1/dir2"
/>
<mock-collection-controls />
<mock-entries-collection
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [], \\"create\\": true }"
filterterm="dir1/dir2"
/>
</main>
</div>
</DocumentFragment>
`;
exports[`Collection should render with collection without create url 1`] = `
<DocumentFragment>
.emotion-2 {
margin: 28px 18px;
}
.emotion-0 {
padding-left: 280px;
}
<div
class="emotion-2 emotion-3"
>
<mock-sidebar
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [], \\"create\\": false }"
collections="OrderedMap { 0: Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [] } }"
/>
<main
class="emotion-0 emotion-1"
>
<mock-collection-top
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [], \\"create\\": false }"
newentryurl=""
/>
<mock-collection-controls />
<mock-entries-collection
collection="Map { \\"name\\": \\"pages\\", \\"sortable_fields\\": List [], \\"view_filters\\": List [], \\"view_groups\\": List [], \\"create\\": false }"
/>
</main>
</div>
</DocumentFragment>
`;

View File

@ -1,550 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`NestedCollection should render connected component 1`] = `
<DocumentFragment>
.emotion-6 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
font-size: 14px;
font-weight: 500;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: 8px;
padding-left: 12px;
border-left: 2px solid #fff;
}
.emotion-6 mocked-icon {
margin-right: 8px;
-webkit-flex-shrink: 0;
-ms-flex-negative: 0;
flex-shrink: 0;
}
.emotion-6:hover,
.emotion-6:active,
.emotion-6.sidebar-active {
color: #3a69c7;
background-color: #e8f5fe;
border-left-color: #4863c6;
}
.emotion-4 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
}
.emotion-0 {
margin-right: 4px;
}
.emotion-2 {
position: relative;
top: 2px;
color: #fff;
width: 0;
height: 0;
border: 5px solid transparent;
border-radius: 2px;
border-top: 6px solid currentColor;
border-bottom: 0;
color: currentColor;
}
<a
aria-current="page"
class="emotion-6 emotion-7 sidebar-active"
data-testid="/"
depth="0"
href="/collections/pages"
>
<mocked-icon
type="write"
/>
<div
class="emotion-4 emotion-5"
>
<div
class="emotion-0 emotion-1"
>
Pages
</div>
<div
class="emotion-2 emotion-3"
/>
</div>
</a>
.emotion-2 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
}
.emotion-0 {
margin-right: 4px;
}
.emotion-4 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
font-size: 14px;
font-weight: 500;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: 8px;
padding-left: 32px;
border-left: 2px solid #fff;
}
.emotion-4 mocked-icon {
margin-right: 8px;
-webkit-flex-shrink: 0;
-ms-flex-negative: 0;
flex-shrink: 0;
}
.emotion-4:hover,
.emotion-4:active,
.emotion-4.sidebar-active {
color: #3a69c7;
background-color: #e8f5fe;
border-left-color: #4863c6;
}
<a
class="emotion-4 emotion-5"
data-testid="/a"
depth="1"
href="/collections/pages/filter/a"
>
<mocked-icon
type="write"
/>
<div
class="emotion-2 emotion-3"
>
<div
class="emotion-0 emotion-1"
>
File 1
</div>
</div>
</a>
.emotion-2 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
}
.emotion-0 {
margin-right: 4px;
}
.emotion-4 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
font-size: 14px;
font-weight: 500;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: 8px;
padding-left: 32px;
border-left: 2px solid #fff;
}
.emotion-4 mocked-icon {
margin-right: 8px;
-webkit-flex-shrink: 0;
-ms-flex-negative: 0;
flex-shrink: 0;
}
.emotion-4:hover,
.emotion-4:active,
.emotion-4.sidebar-active {
color: #3a69c7;
background-color: #e8f5fe;
border-left-color: #4863c6;
}
<a
class="emotion-4 emotion-5"
data-testid="/b"
depth="1"
href="/collections/pages/filter/b"
>
<mocked-icon
type="write"
/>
<div
class="emotion-2 emotion-3"
>
<div
class="emotion-0 emotion-1"
>
File 2
</div>
</div>
</a>
</DocumentFragment>
`;
exports[`NestedCollection should render correctly with nested entries 1`] = `
<DocumentFragment>
.emotion-6 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
font-size: 14px;
font-weight: 500;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: 8px;
padding-left: 12px;
border-left: 2px solid #fff;
}
.emotion-6 mocked-icon {
margin-right: 8px;
-webkit-flex-shrink: 0;
-ms-flex-negative: 0;
flex-shrink: 0;
}
.emotion-6:hover,
.emotion-6:active,
.emotion-6.sidebar-active {
color: #3a69c7;
background-color: #e8f5fe;
border-left-color: #4863c6;
}
.emotion-4 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
}
.emotion-0 {
margin-right: 4px;
}
.emotion-2 {
position: relative;
top: 2px;
color: #fff;
width: 0;
height: 0;
border: 5px solid transparent;
border-radius: 2px;
border-top: 6px solid currentColor;
border-bottom: 0;
color: currentColor;
}
<a
aria-current="page"
class="emotion-6 emotion-7 sidebar-active"
data-testid="/"
depth="0"
href="/collections/pages"
>
<mocked-icon
type="write"
/>
<div
class="emotion-4 emotion-5"
>
<div
class="emotion-0 emotion-1"
>
Pages
</div>
<div
class="emotion-2 emotion-3"
/>
</div>
</a>
.emotion-2 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
}
.emotion-0 {
margin-right: 4px;
}
.emotion-4 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
font-size: 14px;
font-weight: 500;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: 8px;
padding-left: 32px;
border-left: 2px solid #fff;
}
.emotion-4 mocked-icon {
margin-right: 8px;
-webkit-flex-shrink: 0;
-ms-flex-negative: 0;
flex-shrink: 0;
}
.emotion-4:hover,
.emotion-4:active,
.emotion-4.sidebar-active {
color: #3a69c7;
background-color: #e8f5fe;
border-left-color: #4863c6;
}
<a
class="emotion-4 emotion-5"
data-testid="/a"
depth="1"
href="/collections/pages/filter/a"
>
<mocked-icon
type="write"
/>
<div
class="emotion-2 emotion-3"
>
<div
class="emotion-0 emotion-1"
>
File 1
</div>
</div>
</a>
.emotion-2 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
}
.emotion-0 {
margin-right: 4px;
}
.emotion-4 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
font-size: 14px;
font-weight: 500;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: 8px;
padding-left: 32px;
border-left: 2px solid #fff;
}
.emotion-4 mocked-icon {
margin-right: 8px;
-webkit-flex-shrink: 0;
-ms-flex-negative: 0;
flex-shrink: 0;
}
.emotion-4:hover,
.emotion-4:active,
.emotion-4.sidebar-active {
color: #3a69c7;
background-color: #e8f5fe;
border-left-color: #4863c6;
}
<a
class="emotion-4 emotion-5"
data-testid="/b"
depth="1"
href="/collections/pages/filter/b"
>
<mocked-icon
type="write"
/>
<div
class="emotion-2 emotion-3"
>
<div
class="emotion-0 emotion-1"
>
File 2
</div>
</div>
</a>
</DocumentFragment>
`;
exports[`NestedCollection should render correctly with no entries 1`] = `
<DocumentFragment>
.emotion-6 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
font-size: 14px;
font-weight: 500;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: 8px;
padding-left: 12px;
border-left: 2px solid #fff;
}
.emotion-6 mocked-icon {
margin-right: 8px;
-webkit-flex-shrink: 0;
-ms-flex-negative: 0;
flex-shrink: 0;
}
.emotion-6:hover,
.emotion-6:active,
.emotion-6.sidebar-active {
color: #3a69c7;
background-color: #e8f5fe;
border-left-color: #4863c6;
}
.emotion-4 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
}
.emotion-0 {
margin-right: 4px;
}
.emotion-2 {
position: relative;
top: 2px;
color: #fff;
width: 0;
height: 0;
border: 5px solid transparent;
border-radius: 2px;
border-left: 6px solid currentColor;
border-right: 0;
color: currentColor;
left: 2px;
}
<a
class="emotion-6 emotion-7"
data-testid="/"
depth="0"
href="/collections/pages"
>
<mocked-icon
type="write"
/>
<div
class="emotion-4 emotion-5"
>
<div
class="emotion-0 emotion-1"
>
Pages
</div>
<div
class="emotion-2 emotion-3"
/>
</div>
</a>
</DocumentFragment>
`;

View File

@ -1,308 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Sidebar should render nested collection with filterTerm 1`] = `
<DocumentFragment>
.emotion-4 {
box-shadow: 0 2px 6px 0 rgba(68,74,87,0.05),0 1px 3px 0 rgba(68,74,87,0.1);
border-radius: 5px;
background-color: #fff;
width: 250px;
padding: 8px 0 12px;
position: fixed;
max-height: calc(100vh - 112px);
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-flex-direction: column;
-ms-flex-direction: column;
flex-direction: column;
}
.emotion-0 {
font-size: 23px;
font-weight: 600;
padding: 0;
margin: 18px 12px 12px;
color: #313d3e;
}
.emotion-2 {
margin: 16px 0 0;
list-style: none;
overflow: auto;
}
<aside
class="emotion-4 emotion-5"
>
<h2
class="emotion-0 emotion-1"
>
collection.sidebar.collections
</h2>
<collection-search
collections="OrderedMap { 0: Map { \\"name\\": \\"posts\\", \\"label\\": \\"Posts\\", \\"nested\\": Map { \\"depth\\": 10 } } }"
searchterm=""
/>
<ul
class="emotion-2 emotion-3"
>
<li>
<nested-collection
collection="Map { \\"name\\": \\"posts\\", \\"label\\": \\"Posts\\", \\"nested\\": Map { \\"depth\\": 10 } }"
data-testid="posts"
filterterm="dir1/dir2"
/>
</li>
</ul>
</aside>
</DocumentFragment>
`;
exports[`Sidebar should render sidebar with a nested collection 1`] = `
<DocumentFragment>
.emotion-4 {
box-shadow: 0 2px 6px 0 rgba(68,74,87,0.05),0 1px 3px 0 rgba(68,74,87,0.1);
border-radius: 5px;
background-color: #fff;
width: 250px;
padding: 8px 0 12px;
position: fixed;
max-height: calc(100vh - 112px);
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-flex-direction: column;
-ms-flex-direction: column;
flex-direction: column;
}
.emotion-0 {
font-size: 23px;
font-weight: 600;
padding: 0;
margin: 18px 12px 12px;
color: #313d3e;
}
.emotion-2 {
margin: 16px 0 0;
list-style: none;
overflow: auto;
}
<aside
class="emotion-4 emotion-5"
>
<h2
class="emotion-0 emotion-1"
>
collection.sidebar.collections
</h2>
<collection-search
collections="OrderedMap { 0: Map { \\"name\\": \\"posts\\", \\"label\\": \\"Posts\\", \\"nested\\": Map { \\"depth\\": 10 } } }"
searchterm=""
/>
<ul
class="emotion-2 emotion-3"
>
<li>
<nested-collection
collection="Map { \\"name\\": \\"posts\\", \\"label\\": \\"Posts\\", \\"nested\\": Map { \\"depth\\": 10 } }"
data-testid="posts"
/>
</li>
</ul>
</aside>
</DocumentFragment>
`;
exports[`Sidebar should render sidebar with a simple collection 1`] = `
<DocumentFragment>
.emotion-6 {
box-shadow: 0 2px 6px 0 rgba(68,74,87,0.05),0 1px 3px 0 rgba(68,74,87,0.1);
border-radius: 5px;
background-color: #fff;
width: 250px;
padding: 8px 0 12px;
position: fixed;
max-height: calc(100vh - 112px);
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-flex-direction: column;
-ms-flex-direction: column;
flex-direction: column;
}
.emotion-0 {
font-size: 23px;
font-weight: 600;
padding: 0;
margin: 18px 12px 12px;
color: #313d3e;
}
.emotion-4 {
margin: 16px 0 0;
list-style: none;
overflow: auto;
}
.emotion-2 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
font-size: 14px;
font-weight: 500;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: 8px 12px;
border-left: 2px solid #fff;
z-index: -1;
}
.emotion-2 mocked-icon {
margin-right: 8px;
-webkit-flex-shrink: 0;
-ms-flex-negative: 0;
flex-shrink: 0;
}
.emotion-2:hover,
.emotion-2:active,
.emotion-2.sidebar-active {
color: #3a69c7;
background-color: #e8f5fe;
border-left-color: #4863c6;
}
<aside
class="emotion-6 emotion-7"
>
<h2
class="emotion-0 emotion-1"
>
collection.sidebar.collections
</h2>
<collection-search
collections="OrderedMap { 0: Map { \\"name\\": \\"posts\\", \\"label\\": \\"Posts\\" } }"
searchterm=""
/>
<ul
class="emotion-4 emotion-5"
>
<li>
<a
class="emotion-2 emotion-3"
data-testid="posts"
href="/collections/posts"
>
<mocked-icon
type="write"
/>
Posts
</a>
</li>
</ul>
</aside>
</DocumentFragment>
`;
exports[`Sidebar should render sidebar without search 1`] = `
<DocumentFragment>
.emotion-6 {
box-shadow: 0 2px 6px 0 rgba(68,74,87,0.05),0 1px 3px 0 rgba(68,74,87,0.1);
border-radius: 5px;
background-color: #fff;
width: 250px;
padding: 8px 0 12px;
position: fixed;
max-height: calc(100vh - 112px);
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-flex-direction: column;
-ms-flex-direction: column;
flex-direction: column;
}
.emotion-0 {
font-size: 23px;
font-weight: 600;
padding: 0;
margin: 18px 12px 12px;
color: #313d3e;
}
.emotion-4 {
margin: 16px 0 0;
list-style: none;
overflow: auto;
}
.emotion-2 {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
font-size: 14px;
font-weight: 500;
-webkit-align-items: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: 8px 12px;
border-left: 2px solid #fff;
z-index: -1;
}
.emotion-2 mocked-icon {
margin-right: 8px;
-webkit-flex-shrink: 0;
-ms-flex-negative: 0;
flex-shrink: 0;
}
.emotion-2:hover,
.emotion-2:active,
.emotion-2.sidebar-active {
color: #3a69c7;
background-color: #e8f5fe;
border-left-color: #4863c6;
}
<aside
class="emotion-6 emotion-7"
>
<h2
class="emotion-0 emotion-1"
>
collection.sidebar.collections
</h2>
<ul
class="emotion-4 emotion-5"
>
<li>
<a
class="emotion-2 emotion-3"
data-testid="posts"
href="/collections/posts"
>
<mocked-icon
type="write"
/>
Posts
</a>
</li>
</ul>
</aside>
</DocumentFragment>
`;

View File

@ -1,5 +1,4 @@
import { debounce } from 'lodash';
import { Loader } from 'netlify-cms-ui-default';
import PropTypes from 'prop-types';
import React from 'react';
import ImmutablePropTypes from 'react-immutable-proptypes';
@ -34,6 +33,9 @@ import { EDITORIAL_WORKFLOW, status } from '../../constants/publishModes';
import { selectDeployPreview, selectEntry, selectUnpublishedEntry } from '../../reducers';
import { selectFields } from '../../reducers/collections';
import { history, navigateToCollection, navigateToNewEntry } from '../../routing/history';
import { Loader } from '../../ui';
import alert from '../UI/Alert';
import confirm from '../UI/Confirm';
import EditorInterface from './EditorInterface';
import withWorkflow from './withWorkflow';
@ -167,21 +169,32 @@ export class Editor extends React.Component {
}
}
componentDidUpdate(prevProps) {
if (!prevProps.localBackup && this.props.localBackup) {
const confirmLoadBackup = window.confirm(this.props.t('editor.editor.confirmLoadBackup'));
async checkLocalBackup(prevProps) {
const { t, hasChanged, localBackup, loadLocalBackup, entryDraft, collection } = this.props;
if (!prevProps.localBackup && localBackup) {
const confirmLoadBackup = await confirm({
title: 'editor.editor.confirmLoadBackupTitle',
body: 'editor.editor.confirmLoadBackupBody',
});
if (confirmLoadBackup) {
this.props.loadLocalBackup();
loadLocalBackup();
} else {
this.deleteBackup();
}
}
if (this.props.hasChanged) {
this.createBackup(this.props.entryDraft.get('entry'), this.props.collection);
if (hasChanged) {
this.createBackup(entryDraft.get('entry'), collection);
}
}
if (prevProps.entry === this.props.entry) return;
componentDidUpdate(prevProps) {
this.checkLocalBackup(prevProps);
if (prevProps.entry === this.props.entry) {
return;
}
const { newEntry, collection } = this.props;
@ -206,10 +219,13 @@ export class Editor extends React.Component {
};
handleChangeStatus = newStatusName => {
const { entryDraft, updateUnpublishedEntryStatus, collection, slug, currentStatus, t } =
const { entryDraft, updateUnpublishedEntryStatus, collection, slug, currentStatus } =
this.props;
if (entryDraft.get('hasChanged')) {
window.alert(t('editor.editor.onUpdatingWithUnsavedChanges'));
alert({
title: 'editor.editor.onUpdatingWithUnsavedChangesTitle',
body: 'editor.editor.onUpdatingWithUnsavedChangesBody',
});
return;
}
const newStatus = status.get(newStatusName);
@ -256,15 +272,25 @@ export class Editor extends React.Component {
collection,
slug,
currentStatus,
t,
} = this.props;
if (currentStatus !== status.last()) {
window.alert(t('editor.editor.onPublishingNotReady'));
alert({
title: 'editor.editor.onPublishingNotReadyTitle',
body: 'editor.editor.onPublishingNotReadyBody',
});
return;
} else if (entryDraft.get('hasChanged')) {
window.alert(t('editor.editor.onPublishingWithUnsavedChanges'));
alert({
title: 'editor.editor.onPublishingWithUnsavedChangesTitle',
body: 'editor.editor.onPublishingWithUnsavedChangesBody',
});
return;
} else if (!window.confirm(t('editor.editor.onPublishing'))) {
} else if (
!(await confirm({
title: 'editor.editor.onPublishingTitle',
body: 'editor.editor.onPublishingBody',
}))
) {
return;
}
@ -280,8 +306,16 @@ export class Editor extends React.Component {
};
handleUnpublishEntry = async () => {
const { unpublishPublishedEntry, collection, slug, t } = this.props;
if (!window.confirm(t('editor.editor.onUnpublishing'))) return;
const { unpublishPublishedEntry, collection, slug } = this.props;
if (
!(await confirm({
title: 'editor.editor.onUnpublishingTitle',
body: 'editor.editor.onUnpublishingBody',
color: 'error',
}))
) {
return;
}
await unpublishPublishedEntry(collection, slug);
@ -295,15 +329,28 @@ export class Editor extends React.Component {
createDraftDuplicateFromEntry(entryDraft.get('entry'));
};
handleDeleteEntry = () => {
const { entryDraft, newEntry, collection, deleteEntry, slug, t } = this.props;
handleDeleteEntry = async () => {
const { entryDraft, newEntry, collection, deleteEntry, slug } = this.props;
if (entryDraft.get('hasChanged')) {
if (!window.confirm(t('editor.editor.onDeleteWithUnsavedChanges'))) {
if (
!(await confirm({
title: 'editor.editor.onDeleteWithUnsavedChangesTitle',
body: 'editor.editor.onDeleteWithUnsavedChangesBody',
color: 'error',
}))
) {
return;
}
} else if (!window.confirm(t('editor.editor.onDeletePublishedEntry'))) {
} else if (
!(await confirm({
title: 'editor.editor.onDeletePublishedEntryTitle',
body: 'editor.editor.onDeletePublishedEntryBody',
color: 'error',
}))
) {
return;
}
if (newEntry) {
return navigateToCollection(collection.get('name'));
}
@ -316,14 +363,24 @@ export class Editor extends React.Component {
};
handleDeleteUnpublishedChanges = async () => {
const { entryDraft, collection, slug, deleteUnpublishedEntry, loadEntry, isModification, t } =
const { entryDraft, collection, slug, deleteUnpublishedEntry, loadEntry, isModification } =
this.props;
if (
entryDraft.get('hasChanged') &&
!window.confirm(t('editor.editor.onDeleteUnpublishedChangesWithUnsavedChanges'))
!(await confirm({
title: 'editor.editor.onDeleteUnpublishedChangesWithUnsavedChangesTitle',
body: 'editor.editor.onDeleteUnpublishedChangesWithUnsavedChangesBody',
color: 'error',
}))
) {
return;
} else if (!window.confirm(t('editor.editor.onDeleteUnpublishedChanges'))) {
} else if (
!(await confirm({
title: 'editor.editor.onDeleteUnpublishedChangesTitle',
body: 'editor.editor.onDeleteUnpublishedChangesBody',
color: 'error',
}))
) {
return;
}
await deleteUnpublishedEntry(collection.get('name'), slug);

View File

@ -7,16 +7,16 @@ import { ClassNames, Global, css as coreCss } from '@emotion/react';
import styled from '@emotion/styled';
import { partial, uniqueId } from 'lodash';
import { connect } from 'react-redux';
import ReactMarkdown from 'react-markdown';
import gfm from 'remark-gfm';
import {
FieldLabel,
colors,
transitions,
lengths,
borders,
} from 'netlify-cms-ui-default';
import ReactMarkdown from 'react-markdown';
import gfm from 'remark-gfm';
} from '../../../ui';
import { resolveWidget, getEditorComponents } from '../../../lib/registry';
import { clearFieldErrors, tryLoadEntry, validateMetaField } from '../../../actions/entries';
import { addAsset, boundGetAsset } from '../../../actions/media';
@ -289,6 +289,7 @@ class EditorControl extends React.Component {
${styleStrings.labelActive};
`}
controlComponent={widget.control}
validator={widget.validator}
entry={entry}
collection={collection}
config={config}

View File

@ -3,15 +3,8 @@ import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { css } from '@emotion/react';
import styled from '@emotion/styled';
import {
buttons,
colors,
Dropdown,
DropdownItem,
StyledDropdownButton,
text,
} from 'netlify-cms-ui-default';
import { buttons, colors, Dropdown, DropdownItem, StyledDropdownButton, text } from '../../../ui';
import EditorControl from './EditorControl';
import {
getI18nInfo,
@ -116,13 +109,15 @@ export default class ControlPane extends React.Component {
copyFromOtherLocale =
({ targetLocale, t }) =>
sourceLocale => {
async sourceLocale => {
if (
!window.confirm(
t('editor.editorControlPane.i18n.copyFromLocaleConfirm', {
locale: sourceLocale.toUpperCase(),
}),
)
!(await confirm({
title: 'editor.editorControlPane.i18n.copyFromLocaleConfirmTitle',
body: {
key: 'editor.editorControlPane.i18n.copyFromLocaleConfirmBody',
options: { locale: sourceLocale.toUpperCase() },
},
}))
) {
return;
}

View File

@ -23,7 +23,8 @@ function isEmpty(value) {
export default class Widget extends Component {
static propTypes = {
controlComponent: PropTypes.func.isRequired,
controlComponent: PropTypes.oneOfType([PropTypes.func, PropTypes.object]).isRequired,
validator: PropTypes.func,
field: ImmutablePropTypes.map.isRequired,
hasActiveStyle: PropTypes.bool,
setActiveStyle: PropTypes.func.isRequired,
@ -99,8 +100,6 @@ export default class Widget extends Component {
*/
this.innerWrappedControl = ref.getWrappedInstance ? ref.getWrappedInstance() : ref;
this.wrappedControlValid = this.innerWrappedControl.isValid || truthy;
/**
* Get the `shouldComponentUpdate` method from the wrapped control, and
* provide the control instance is the `this` binding.
@ -180,44 +179,38 @@ export default class Widget extends Component {
};
validateWrappedControl = field => {
const { t, parentIds } = this.props;
if (typeof this.wrappedControlValid !== 'function') {
throw new Error(oneLine`
this.wrappedControlValid is not a function. Are you sure widget
"${field.get('widget')}" is registered?
`);
}
const { t, parentIds, validator, value } = this.props;
const response = validator?.({ value, field, t });
if (response !== undefined) {
if (typeof response === 'boolean') {
return { error: !response };
} else if (Object.prototype.hasOwnProperty.call(response, 'error')) {
return response;
} else if (response instanceof Promise) {
response.then(
() => {
this.validate({ error: false });
},
err => {
const error = {
type: ValidationErrorTypes.CUSTOM,
message: `${field.get('label', field.get('name'))} - ${err}.`,
};
const response = this.wrappedControlValid();
if (typeof response === 'boolean') {
const isValid = response;
return { error: !isValid };
} else if (Object.prototype.hasOwnProperty.call(response, 'error')) {
return response;
} else if (response instanceof Promise) {
response.then(
() => {
this.validate({ error: false });
},
err => {
const error = {
type: ValidationErrorTypes.CUSTOM,
message: `${field.get('label', field.get('name'))} - ${err}.`,
};
this.validate({ error });
},
);
this.validate({ error });
},
);
const error = {
type: ValidationErrorTypes.CUSTOM,
parentIds,
message: t('editor.editorControlPane.widget.processing', {
fieldLabel: field.get('label', field.get('name')),
}),
};
const error = {
type: ValidationErrorTypes.CUSTOM,
parentIds,
message: t('editor.editorControlPane.widget.processing', {
fieldLabel: field.get('label', field.get('name')),
}),
};
return { error };
return { error };
}
}
return { error: false };
};
@ -315,7 +308,6 @@ export default class Widget extends Component {
onRemoveInsertedMedia,
getAsset,
forID: uniqueFieldId,
ref: this.processInnerControlRef,
validate: this.validate,
classNameWrapper,
classNameWidget,

View File

@ -3,6 +3,8 @@ import React, { Component } from 'react';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { css, Global } from '@emotion/react';
import styled from '@emotion/styled';
import { ScrollSync, ScrollSyncPane } from 'react-scroll-sync';
import {
colors,
colorsRaw,
@ -10,9 +12,7 @@ import {
transitions,
IconButton,
zIndex,
} from 'netlify-cms-ui-default';
import { ScrollSync, ScrollSyncPane } from 'react-scroll-sync';
} from '../../ui';
import EditorControlPane from './EditorControlPane/EditorControlPane';
import EditorPreviewPane from './EditorPreviewPane/EditorPreviewPane';
import EditorToolbar from './EditorToolbar';

View File

@ -3,9 +3,9 @@ import React from 'react';
import styled from '@emotion/styled';
import { List, Map } from 'immutable';
import ImmutablePropTypes from 'react-immutable-proptypes';
import { lengths } from 'netlify-cms-ui-default';
import { connect } from 'react-redux';
import { lengths } from '../../../ui';
import {
resolveWidget,
getPreviewTemplate,

View File

@ -5,6 +5,7 @@ import { css } from '@emotion/react';
import styled from '@emotion/styled';
import { translate } from 'react-polyglot';
import { Link } from 'react-router-dom';
import {
Icon,
Dropdown,
@ -15,8 +16,7 @@ import {
components,
buttons,
zIndex,
} from 'netlify-cms-ui-default';
} from '../../ui';
import { status } from '../../constants/publishModes';
import { SettingsDropdown } from '../UI';

View File

@ -1,219 +0,0 @@
import React from 'react';
import { render } from '@testing-library/react';
import { fromJS } from 'immutable';
import { Editor } from '../Editor';
jest.mock('lodash/debounce', () => {
const flush = jest.fn();
return func => {
func.flush = flush;
return func;
};
});
// eslint-disable-next-line react/display-name
jest.mock('../EditorInterface', () => props => <mock-editor-interface {...props} />);
jest.mock('netlify-cms-ui-default', () => {
return {
// eslint-disable-next-line react/display-name
Loader: props => <mock-loader {...props} />,
};
});
jest.mock('../../../routing/history');
describe('Editor', () => {
const props = {
boundGetAsset: jest.fn(),
changeDraftField: jest.fn(),
changeDraftFieldValidation: jest.fn(),
collection: fromJS({ name: 'posts' }),
createDraftDuplicateFromEntry: jest.fn(),
createEmptyDraft: jest.fn(),
discardDraft: jest.fn(),
entry: fromJS({}),
entryDraft: fromJS({}),
loadEntry: jest.fn(),
persistEntry: jest.fn(),
deleteEntry: jest.fn(),
showDelete: true,
fields: fromJS([]),
slug: 'slug',
newEntry: true,
updateUnpublishedEntryStatus: jest.fn(),
publishUnpublishedEntry: jest.fn(),
deleteUnpublishedEntry: jest.fn(),
logoutUser: jest.fn(),
loadEntries: jest.fn(),
deployPreview: fromJS({}),
loadDeployPreview: jest.fn(),
user: fromJS({}),
t: jest.fn(key => key),
localBackup: fromJS({}),
retrieveLocalBackup: jest.fn(),
persistLocalBackup: jest.fn(),
location: { search: '?title=title' },
};
beforeEach(() => {
jest.clearAllMocks();
});
it('should render loader when entryDraft is null', () => {
// suppress prop type error
jest.spyOn(console, 'error').mockImplementation(() => {});
const { asFragment } = render(<Editor {...props} entryDraft={null} />);
expect(asFragment()).toMatchSnapshot();
expect(console.error).toHaveBeenCalledTimes(1);
expect(console.error).toHaveBeenCalledWith(
'Warning: Failed prop type: Required prop `entryDraft` was not specified in `Editor`.\n in Editor',
);
});
it('should render loader when entryDraft entry is undefined', () => {
const { asFragment } = render(<Editor {...props} entryDraft={fromJS({})} />);
expect(asFragment()).toMatchSnapshot();
});
it('should render loader when entry is fetching', () => {
const { asFragment } = render(
<Editor {...props} entryDraft={fromJS({ entry: {} })} entry={fromJS({ isFetching: true })} />,
);
expect(asFragment()).toMatchSnapshot();
});
it('should render editor interface when entry is not fetching', () => {
const { asFragment } = render(
<Editor
{...props}
entryDraft={fromJS({ entry: { slug: 'slug' } })}
entry={fromJS({ isFetching: false })}
/>,
);
expect(asFragment()).toMatchSnapshot();
});
it('should call retrieveLocalBackup on mount', () => {
render(
<Editor
{...props}
entryDraft={fromJS({ entry: { slug: 'slug' } })}
entry={fromJS({ isFetching: false })}
/>,
);
expect(props.retrieveLocalBackup).toHaveBeenCalledTimes(1);
expect(props.retrieveLocalBackup).toHaveBeenCalledWith(props.collection, props.slug);
});
it('should create new draft on new entry when mounting', () => {
render(
<Editor
{...props}
entryDraft={fromJS({ entry: { slug: 'slug' } })}
entry={fromJS({ isFetching: false })}
newEntry={true}
/>,
);
expect(props.createEmptyDraft).toHaveBeenCalledTimes(1);
expect(props.createEmptyDraft).toHaveBeenCalledWith(props.collection, '?title=title');
expect(props.loadEntry).toHaveBeenCalledTimes(0);
});
it('should load entry on existing entry when mounting', () => {
render(
<Editor
{...props}
entryDraft={fromJS({ entry: { slug: 'slug' } })}
entry={fromJS({ isFetching: false })}
newEntry={false}
/>,
);
expect(props.createEmptyDraft).toHaveBeenCalledTimes(0);
expect(props.loadEntry).toHaveBeenCalledTimes(1);
expect(props.loadEntry).toHaveBeenCalledWith(props.collection, 'slug');
});
it('should load entires when entries are not loaded when mounting', () => {
render(
<Editor
{...props}
entryDraft={fromJS({ entry: { slug: 'slug' } })}
entry={fromJS({ isFetching: false })}
collectionEntriesLoaded={false}
/>,
);
expect(props.loadEntries).toHaveBeenCalledTimes(1);
expect(props.loadEntries).toHaveBeenCalledWith(props.collection);
});
it('should not load entires when entries are loaded when mounting', () => {
render(
<Editor
{...props}
entryDraft={fromJS({ entry: { slug: 'slug' } })}
entry={fromJS({ isFetching: false })}
collectionEntriesLoaded={true}
/>,
);
expect(props.loadEntries).toHaveBeenCalledTimes(0);
});
it('should flush debounce createBackup, discard draft and remove exit blocker on umount', () => {
window.removeEventListener = jest.fn();
const debounce = require('lodash/debounce');
const flush = debounce({}).flush;
const { unmount } = render(
<Editor
{...props}
entryDraft={fromJS({ entry: { slug: 'slug' }, hasChanged: true })}
entry={fromJS({ isFetching: false })}
/>,
);
jest.clearAllMocks();
unmount();
expect(flush).toHaveBeenCalledTimes(1);
expect(props.discardDraft).toHaveBeenCalledTimes(1);
expect(window.removeEventListener).toHaveBeenCalledWith('beforeunload', expect.any(Function));
const callback = window.removeEventListener.mock.calls.find(
call => call[0] === 'beforeunload',
)[1];
const event = {};
callback(event);
expect(event).toEqual({ returnValue: 'editor.editor.onLeavePage' });
});
it('should persist backup when changed', () => {
const { rerender } = render(
<Editor
{...props}
entryDraft={fromJS({ entry: {} })}
entry={fromJS({ isFetching: false })}
/>,
);
jest.clearAllMocks();
rerender(
<Editor
{...props}
entryDraft={fromJS({ entry: { mediaFiles: [{ id: '1' }] } })}
entry={fromJS({ isFetching: false, data: {} })}
hasChanged={true}
/>,
);
expect(props.persistLocalBackup).toHaveBeenCalledTimes(1);
expect(props.persistLocalBackup).toHaveBeenCalledWith(
fromJS({ mediaFiles: [{ id: '1' }] }),
props.collection,
);
});
});

View File

@ -1,120 +0,0 @@
import React from 'react';
import { render } from '@testing-library/react';
import { fromJS } from 'immutable';
import { EditorToolbar } from '../EditorToolbar';
jest.mock('../../UI', () => ({
// eslint-disable-next-line react/display-name
SettingsDropdown: props => <mock-settings-dropdown {...props} />,
}));
jest.mock('react-router-dom', () => {
return {
// eslint-disable-next-line react/display-name
Link: props => <mock-link {...props} />,
};
});
describe('EditorToolbar', () => {
const props = {
isPersisting: false,
isPublishing: false,
isUpdatingStatus: false,
isDeleting: false,
onPersist: jest.fn(),
onPersistAndNew: jest.fn(),
onPersistAndDuplicate: jest.fn(),
showDelete: true,
onDelete: jest.fn(),
onDeleteUnpublishedChanges: jest.fn(),
onChangeStatus: jest.fn(),
onPublish: jest.fn(),
unPublish: jest.fn(),
onDuplicate: jest.fn(),
onPublishAndNew: jest.fn(),
onPublishAndDuplicate: jest.fn(),
hasChanged: false,
collection: fromJS({ name: 'posts' }),
hasWorkflow: false,
useOpenAuthoring: false,
hasUnpublishedChanges: false,
isNewEntry: false,
isModification: false,
onLogoutClick: jest.fn(),
loadDeployPreview: jest.fn(),
t: jest.fn(key => key),
editorBackLink: '',
};
beforeEach(() => {
jest.clearAllMocks();
});
it('should render with default props', () => {
const { asFragment } = render(<EditorToolbar {...props} />);
expect(asFragment()).toMatchSnapshot();
});
[false, true].forEach(useOpenAuthoring => {
it(`should render with workflow controls hasUnpublishedChanges=true,isNewEntry=false,isModification=true,useOpenAuthoring=${useOpenAuthoring}`, () => {
const { asFragment } = render(
<EditorToolbar
{...props}
hasWorkflow={true}
hasUnpublishedChanges={true}
isNewEntry={false}
isModification={true}
useOpenAuthoring={useOpenAuthoring}
/>,
);
expect(asFragment()).toMatchSnapshot();
});
it(`should render with workflow controls hasUnpublishedChanges=true,isNewEntry=false,isModification=false,useOpenAuthoring=${useOpenAuthoring}`, () => {
const { asFragment } = render(
<EditorToolbar
{...props}
hasWorkflow={true}
hasUnpublishedChanges={true}
isNewEntry={false}
isModification={false}
useOpenAuthoring={useOpenAuthoring}
/>,
);
expect(asFragment()).toMatchSnapshot();
});
it(`should render with workflow controls hasUnpublishedChanges=false,isNewEntry=false,isModification=false,useOpenAuthoring=${useOpenAuthoring}`, () => {
const { asFragment } = render(
<EditorToolbar
{...props}
hasWorkflow={true}
hasUnpublishedChanges={false}
isNewEntry={false}
isModification={false}
useOpenAuthoring={useOpenAuthoring}
/>,
);
expect(asFragment()).toMatchSnapshot();
});
['draft', 'pending_review', 'pending_publish'].forEach(status => {
it(`should render with status=${status},useOpenAuthoring=${useOpenAuthoring}`, () => {
const { asFragment } = render(
<EditorToolbar
{...props}
hasWorkflow={true}
currentStatus={status}
useOpenAuthoring={useOpenAuthoring}
/>,
);
expect(asFragment()).toMatchSnapshot();
});
});
it(`should render normal save button`, () => {
const { asFragment } = render(<EditorToolbar {...props} hasChanged={true} />);
expect(asFragment()).toMatchSnapshot();
});
});
});

View File

@ -1,45 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Editor should render editor interface when entry is not fetching 1`] = `
<DocumentFragment>
<mock-editor-interface
collection="Map { \\"name\\": \\"posts\\" }"
deploypreview="Map {}"
entry="Map { \\"slug\\": \\"slug\\" }"
fields="List []"
isnewentry="true"
showdelete="true"
user="Map {}"
/>
</DocumentFragment>
`;
exports[`Editor should render loader when entry is fetching 1`] = `
<DocumentFragment>
<mock-loader
active="true"
>
editor.editor.loadingEntry
</mock-loader>
</DocumentFragment>
`;
exports[`Editor should render loader when entryDraft entry is undefined 1`] = `
<DocumentFragment>
<mock-loader
active="true"
>
editor.editor.loadingEntry
</mock-loader>
</DocumentFragment>
`;
exports[`Editor should render loader when entryDraft is null 1`] = `
<DocumentFragment>
<mock-loader
active="true"
>
editor.editor.loadingEntry
</mock-loader>
</DocumentFragment>
`;

Some files were not shown because too many files have changed in this diff Show More