refactor: monorepo setup with lerna ()

This commit is contained in:
Daniel Lautzenheiser
2022-12-15 13:44:49 -05:00
committed by GitHub
parent dac29fbf3c
commit 504d95c34f
706 changed files with 16571 additions and 142 deletions
.github/workflows
.gitignoreCONTRIBUTING.mdlerna.jsonnx.jsonpackage.json
packages
app
core
.editorconfig.eslintignore.eslintrc.js.gitignore.prettierignore.prettierrcbabel.config.js
dev-test
jest.config.integration.jsjest.config.jspackage.json
src
__mocks__
actions
backend.ts
backends
bootstrap.tsx
components
constants.ts
constants
extensions.ts
formats
index.tsinterface.ts
lib
locales
media-libraries
cloudinary
index.tsx
uploadcare
mediaLibrary.ts
reducers
routing
store
tsconfig.json
types
valueObjects
widgets
boolean
code
colorstring
datetime
file
image
index.tsx
list
map
markdown
MarkdownPreview.tsxindex.ts
mdx
plate
PlateEditor.tsx
components
editableProps.ts
hooks
index.tsplateTypes.ts
plugins
serialization
tests-util
schema.tswithMarkdownControl.tsx
mdx
number
object
relation
select
string
text
test
tsconfig.base.jsontsconfig.dev.jsontsconfig.jsonwebpack.config.jsyarn.lock
docs
.editorconfig.eslintignore.eslintrc.js.eslintrc.json.gitignore.prettierignore.prettierrc
content
design
netlify.tomlnext.config.jspackage.json
public
src
tsconfig.jsonyarn.lock
renovate.jsonyarn.lock

@ -0,0 +1,83 @@
import trim from 'lodash/trim';
import trimEnd from 'lodash/trimEnd';
import { createNonce, isInsecureProtocol, validateNonce } from './utils';
import type { User, AuthenticatorConfig } from '@staticcms/core/interface';
import type { NetlifyError } from './netlify-auth';
export default class ImplicitAuthenticator {
private auth_url: string;
private appID: string;
private clearHash: () => void;
constructor(config: AuthenticatorConfig = {}) {
const baseURL = trimEnd(config.base_url, '/');
const authEndpoint = trim(config.auth_endpoint, '/');
this.auth_url = `${baseURL}/${authEndpoint}`;
this.appID = config.app_id ?? '';
// eslint-disable-next-line @typescript-eslint/no-empty-function
this.clearHash = config.clearHash ?? (() => {});
}
authenticate(
options: { scope: string; prompt?: string | null; resource?: string | null },
cb: (error: Error | NetlifyError | null, data?: User) => void,
) {
if (isInsecureProtocol()) {
return cb(new Error('Cannot authenticate over insecure protocol!'));
}
const authURL = new URL(this.auth_url);
authURL.searchParams.set('client_id', this.appID);
authURL.searchParams.set('redirect_uri', document.location.origin + document.location.pathname);
authURL.searchParams.set('response_type', 'token');
authURL.searchParams.set('scope', options.scope);
if (options.prompt != null && options.prompt != undefined) {
authURL.searchParams.set('prompt', options.prompt);
}
if (options.resource != null && options.resource != undefined) {
authURL.searchParams.set('resource', options.resource);
}
const state = JSON.stringify({ auth_type: 'implicit', nonce: createNonce() });
authURL.searchParams.set('state', state);
document.location.assign(authURL.href);
}
/**
* Complete authentication if we were redirected back to from the provider.
*/
completeAuth(cb: (error: Error | NetlifyError | null, data?: User) => void) {
const hashParams = new URLSearchParams(document.location.hash.replace(/^#?\/?/, ''));
if (!hashParams.has('access_token') && !hashParams.has('error')) {
return;
}
// Remove tokens from hash so that token does not remain in browser history.
this.clearHash();
const params = [...hashParams.entries()].reduce((acc, [key, value]) => {
acc[key] = value;
return acc;
}, {} as Record<string, string>);
const { nonce } = JSON.parse(params.state ?? '');
const validNonce = validateNonce(nonce);
if (!validNonce) {
return cb(new Error('Invalid nonce'));
}
if ('error' in hashParams) {
return cb(new Error(`${params.error}: ${params.error_description}`));
}
if ('access_token' in params) {
const { access_token: token, ...data } = params;
cb(null, { token, ...data } as User);
}
}
}

@ -0,0 +1,3 @@
export { default as NetlifyAuthenticator } from './netlify-auth';
export { default as ImplicitAuthenticator } from './implicit-oauth';
export { default as PkceAuthenticator } from './pkce-oauth';

@ -0,0 +1,201 @@
import trim from 'lodash/trim';
import trimEnd from 'lodash/trimEnd';
import type { User, AuthenticatorConfig } from '@staticcms/core/interface';
const NETLIFY_API = 'https://api.netlify.com';
const AUTH_ENDPOINT = 'auth';
export class NetlifyError {
private err: Error;
constructor(err: Error) {
this.err = err;
}
toString() {
return this.err && this.err.message;
}
}
const PROVIDERS = {
github: {
width: 960,
height: 600,
},
gitlab: {
width: 960,
height: 600,
},
bitbucket: {
width: 960,
height: 500,
},
email: {
width: 500,
height: 400,
},
} as const;
class Authenticator {
private site_id: string | null;
private base_url: string;
private auth_endpoint: string;
private authWindow: Window | null;
constructor(config: AuthenticatorConfig = {}) {
this.site_id = config.site_id || null;
this.base_url = trimEnd(config.base_url, '/') || NETLIFY_API;
this.auth_endpoint = trim(config.auth_endpoint, '/') || AUTH_ENDPOINT;
this.authWindow = null;
}
handshakeCallback(
options: { provider?: keyof typeof PROVIDERS },
cb: (error: Error | NetlifyError | null, data?: User) => void,
) {
const fn = (e: { data: string; origin: string }) => {
if (e.data === 'authorizing:' + options.provider && e.origin === this.base_url) {
window.removeEventListener('message', fn, false);
window.addEventListener('message', this.authorizeCallback(options, cb), false);
return this.authWindow?.postMessage(e.data, e.origin);
}
};
return fn;
}
authorizeCallback(
options: { provider?: keyof typeof PROVIDERS },
cb: (error: Error | NetlifyError | null, data?: User) => void,
) {
const fn = (e: { data: string; origin: string }) => {
if (e.origin !== this.base_url) {
return;
}
if (e.data.indexOf('authorization:' + options.provider + ':success:') === 0) {
const data = JSON.parse(
e.data.match(new RegExp('^authorization:' + options.provider + ':success:(.+)$'))?.[1] ??
'',
);
window.removeEventListener('message', fn, false);
this.authWindow?.close();
cb(null, data);
}
if (e.data.indexOf('authorization:' + options.provider + ':error:') === 0) {
const err = JSON.parse(
e.data.match(new RegExp('^authorization:' + options.provider + ':error:(.+)$'))?.[1] ??
'',
);
window.removeEventListener('message', fn, false);
this.authWindow?.close();
cb(new NetlifyError(err));
}
};
return fn;
}
getSiteID() {
if (this.site_id) {
return this.site_id;
}
const host = document.location.host.split(':')[0];
return host === 'localhost' ? 'cms.netlify.com' : host;
}
authenticate(
options: {
provider?: keyof typeof PROVIDERS;
scope?: string;
login?: boolean;
beta_invite?: string;
invite_code?: string;
},
cb: (error: Error | NetlifyError | null, data?: User) => void,
) {
const { provider } = options;
const siteID = this.getSiteID();
if (!provider) {
return cb(
new NetlifyError(
new Error('You must specify a provider when calling netlify.authenticate'),
),
);
}
if (!siteID) {
return cb(
new NetlifyError(
new Error(
"You must set a site_id with netlify.configure({site_id: 'your-site-id'}) to make authentication work from localhost",
),
),
);
}
const conf = PROVIDERS[provider] || PROVIDERS.github;
const left = screen.width / 2 - conf.width / 2;
const top = screen.height / 2 - conf.height / 2;
window.addEventListener('message', this.handshakeCallback(options, cb), false);
let url = `${this.base_url}/${this.auth_endpoint}?provider=${options.provider}&site_id=${siteID}`;
if (options.scope) {
url += '&scope=' + options.scope;
}
if (options.login === true) {
url += '&login=true';
}
if (options.beta_invite) {
url += '&beta_invite=' + options.beta_invite;
}
if (options.invite_code) {
url += '&invite_code=' + options.invite_code;
}
this.authWindow = window.open(
url,
'Netlify Authorization',
`width=${conf.width}, height=${conf.height}, top=${top}, left=${left}`,
);
this.authWindow?.focus();
}
refresh(
options: {
provider: keyof typeof PROVIDERS;
refresh_token?: string;
},
cb?: (error: Error | NetlifyError | null, data?: User) => void,
) {
const { provider, refresh_token } = options;
const siteID = this.getSiteID();
const onError = cb || Promise.reject.bind(Promise);
if (!provider || !refresh_token) {
return onError(
new NetlifyError(
new Error('You must specify a provider and refresh token when calling netlify.refresh'),
),
);
}
if (!siteID) {
return onError(
new NetlifyError(
new Error(
"You must set a site_id with netlify.configure({site_id: 'your-site-id'}) to make token refresh work from localhost",
),
),
);
}
const url = `${this.base_url}/${this.auth_endpoint}/refresh?provider=${provider}&site_id=${siteID}&refresh_token=${refresh_token}`;
const refreshPromise = fetch(url, { method: 'POST', body: '' }).then(res => res.json());
// Return a promise if a callback wasn't provided
if (!cb) {
return refreshPromise;
}
// Otherwise, use the provided callback.
refreshPromise.then(data => cb(null, data)).catch(cb);
}
}
export default Authenticator;

@ -0,0 +1,139 @@
import trim from 'lodash/trim';
import trimEnd from 'lodash/trimEnd';
import { createNonce, isInsecureProtocol, validateNonce } from './utils';
import type { User, AuthenticatorConfig } from '@staticcms/core/interface';
import type { NetlifyError } from './netlify-auth';
async function sha256(text: string) {
const encoder = new TextEncoder();
const data = encoder.encode(text);
const digest = await window.crypto.subtle.digest('SHA-256', data);
const sha = String.fromCharCode(...new Uint8Array(digest));
return sha;
}
// based on https://github.com/auth0/auth0-spa-js/blob/9a83f698127eae7da72691b0d4b1b847567687e3/src/utils.ts#L147
function generateVerifierCode() {
// characters that can be used for codeVerifer
// excludes _~ as if included would cause an uneven distribution as char.length would no longer be a factor of 256
const chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-.';
const randomValues = Array.from(window.crypto.getRandomValues(new Uint8Array(128)));
return randomValues
.map(val => {
return chars[val % chars.length];
})
.join('');
}
async function createCodeChallenge(codeVerifier: string) {
const sha = await sha256(codeVerifier);
// https://tools.ietf.org/html/rfc7636#appendix-A
return btoa(sha).split('=')[0].replace(/\+/g, '-').replace(/\//g, '_');
}
const CODE_VERIFIER_STORAGE_KEY = 'static-cms-pkce-verifier-code';
function createCodeVerifier() {
const codeVerifier = generateVerifierCode();
window.sessionStorage.setItem(CODE_VERIFIER_STORAGE_KEY, codeVerifier);
return codeVerifier;
}
function getCodeVerifier() {
return window.sessionStorage.getItem(CODE_VERIFIER_STORAGE_KEY);
}
function clearCodeVerifier() {
window.sessionStorage.removeItem(CODE_VERIFIER_STORAGE_KEY);
}
export default class PkceAuthenticator {
private auth_url: string;
private auth_token_url: string;
private appID: string;
constructor(config: AuthenticatorConfig = {}) {
const baseURL = trimEnd(config.base_url, '/');
const authEndpoint = trim(config.auth_endpoint, '/');
const authTokenEndpoint = trim(config.auth_token_endpoint, '/');
this.auth_url = `${baseURL}/${authEndpoint}`;
this.auth_token_url = `${baseURL}/${authTokenEndpoint}`;
this.appID = config.app_id ?? '';
}
async authenticate(
options: { scope: string; prompt?: string | null; resource?: string | null },
cb: (error: Error | NetlifyError | null, data?: User) => void,
) {
if (isInsecureProtocol()) {
return cb(new Error('Cannot authenticate over insecure protocol!'));
}
const authURL = new URL(this.auth_url);
authURL.searchParams.set('client_id', this.appID);
authURL.searchParams.set('redirect_uri', document.location.origin + document.location.pathname);
authURL.searchParams.set('response_type', 'code');
authURL.searchParams.set('scope', options.scope);
const state = JSON.stringify({ auth_type: 'pkce', nonce: createNonce() });
authURL.searchParams.set('state', state);
authURL.searchParams.set('code_challenge_method', 'S256');
const codeVerifier = createCodeVerifier();
const codeChallenge = await createCodeChallenge(codeVerifier);
authURL.searchParams.set('code_challenge', codeChallenge);
document.location.assign(authURL.href);
}
/**
* Complete authentication if we were redirected back to from the provider.
*/
async completeAuth(cb: (error: Error | NetlifyError | null, data?: User) => void) {
const searchParams = new URLSearchParams(document.location.search);
const params = [...searchParams.entries()].reduce((acc, [key, value]) => {
acc[key] = value;
return acc;
}, {} as Record<string, string>);
// Remove code from url
window.history.replaceState(null, '', document.location.pathname);
if (!('code' in params) && !('error' in params)) {
return;
}
const { nonce } = JSON.parse(params.state ?? '');
const validNonce = validateNonce(nonce);
if (!validNonce) {
return cb(new Error('Invalid nonce'));
}
if ('error' in params) {
return cb(new Error(`${params.error}: ${params.error_description}`));
}
if ('code' in params) {
const code = params.code;
const authURL = new URL(this.auth_token_url);
authURL.searchParams.set('client_id', this.appID);
authURL.searchParams.set('code', code ?? '');
authURL.searchParams.set('grant_type', 'authorization_code');
authURL.searchParams.set(
'redirect_uri',
document.location.origin + document.location.pathname,
);
authURL.searchParams.set('code_verifier', getCodeVerifier() ?? '');
//no need for verifier code so remove
clearCodeVerifier();
const response = await fetch(authURL.href, { method: 'POST' });
const data = await response.json();
cb(null, { token: data.access_token, ...data });
}
}
}

@ -0,0 +1,24 @@
import { v4 as uuid } from 'uuid';
export function createNonce() {
const nonce = uuid();
window.sessionStorage.setItem('static-cms-auth', JSON.stringify({ nonce }));
return nonce;
}
export function validateNonce(check: string) {
const auth = window.sessionStorage.getItem('static-cms-auth');
const valid = auth && (JSON.parse(auth).nonce as string);
window.localStorage.removeItem('static-cms-auth');
return check === valid;
}
export function isInsecureProtocol() {
return (
document.location.protocol !== 'https:' &&
// TODO: Is insecure localhost a bad idea as well? I don't think it is, since you are not actually
// sending the token over the internet in this case, assuming the auth URL is secure.
document.location.hostname !== 'localhost' &&
document.location.hostname !== '127.0.0.1'
);
}

@ -0,0 +1,7 @@
export default function consoleError(title: string, description: string) {
console.error(
`%c ⛔ ${title}\n` + `%c${description}\n\n`,
'color: black; font-weight: bold; font-size: 16px; line-height: 50px;',
'color: black;',
);
}

@ -0,0 +1,160 @@
import flow from 'lodash/flow';
import get from 'lodash/get';
import partialRight from 'lodash/partialRight';
import { COMMIT_AUTHOR, COMMIT_DATE } from '../constants/commitProps';
import { sanitizeSlug } from './urlHelper';
import { selectIdentifier, selectInferedField } from './util/collection.util';
import { selectField } from './util/field.util';
import { set } from './util/object.util';
import {
addFileTemplateFields,
compileStringTemplate,
keyToPathArray,
parseDateFromEntry,
} from './widgets/stringTemplate';
import type { Collection, Config, Entry, EntryData, Slug } from '../interface';
const commitMessageTemplates = {
create: 'Create {{collection}} “{{slug}}”',
update: 'Update {{collection}} “{{slug}}”',
delete: 'Delete {{collection}} “{{slug}}”',
uploadMedia: 'Upload “{{path}}”',
deleteMedia: 'Delete “{{path}}”',
} as const;
const variableRegex = /\{\{([^}]+)\}\}/g;
type Options = {
slug?: string;
path?: string;
collection?: Collection;
authorLogin?: string;
authorName?: string;
};
export function commitMessageFormatter(
type: keyof typeof commitMessageTemplates,
config: Config,
{ slug, path, collection, authorLogin, authorName }: Options,
) {
const templates = { ...commitMessageTemplates, ...(config.backend.commit_messages || {}) };
return templates[type].replace(variableRegex, (_, variable) => {
switch (variable) {
case 'slug':
return slug || '';
case 'path':
return path || '';
case 'collection':
return collection ? collection.label_singular || collection.label : '';
case 'author-login':
return authorLogin || '';
case 'author-name':
return authorName || '';
default:
console.warn(`Ignoring unknown variable “${variable}” in commit message template.`);
return '';
}
});
}
export function prepareSlug(slug: string) {
return (
slug
.trim()
// Convert slug to lower-case
.toLocaleLowerCase()
// Remove single quotes.
.replace(/[']/g, '')
// Replace periods with dashes.
.replace(/[.]/g, '-')
);
}
export function getProcessSegment(slugConfig?: Slug, ignoreValues?: string[]) {
return (value: string) =>
ignoreValues && ignoreValues.includes(value)
? value
: flow([value => String(value), prepareSlug, partialRight(sanitizeSlug, slugConfig)])(value);
}
export function slugFormatter(collection: Collection, entryData: EntryData, slugConfig?: Slug) {
const slugTemplate = collection.slug || '{{slug}}';
const identifier = get(entryData, keyToPathArray(selectIdentifier(collection)));
if (!identifier) {
throw new Error(
'Collection must have a field name that is a valid entry identifier, or must have `identifier_field` set',
);
}
const processSegment = getProcessSegment(slugConfig);
const date = new Date();
const slug = compileStringTemplate(slugTemplate, date, identifier, entryData, processSegment);
if (!('path' in collection)) {
return slug;
} else {
const pathTemplate = prepareSlug(collection.path as string);
return compileStringTemplate(pathTemplate, date, slug, entryData, (value: string) =>
value === slug ? value : processSegment(value),
);
}
}
export function summaryFormatter(summaryTemplate: string, entry: Entry, collection: Collection) {
let entryData = entry.data;
const date = parseDateFromEntry(entry, selectInferedField(collection, 'date')) || null;
const identifier = get(entryData, keyToPathArray(selectIdentifier(collection)));
entryData =
addFileTemplateFields(entry.path, entryData, 'folder' in collection ? collection.folder : '') ??
{};
// allow commit information in summary template
if (entry.author && !selectField(collection, COMMIT_AUTHOR)) {
entryData = set(entryData, COMMIT_AUTHOR, entry.author);
}
if (entry.updatedOn && !selectField(collection, COMMIT_DATE)) {
entryData = set(entryData, COMMIT_DATE, entry.updatedOn);
}
const summary = compileStringTemplate(summaryTemplate, date, identifier, entryData);
return summary;
}
export function folderFormatter(
folderTemplate: string,
entry: Entry | undefined,
collection: Collection,
defaultFolder: string,
folderKey: string,
slugConfig?: Slug,
) {
if (!entry || !entry.data) {
return folderTemplate;
}
let fields = set(entry.data, folderKey, defaultFolder) as EntryData;
fields = addFileTemplateFields(
entry.path,
fields,
'folder' in collection ? collection.folder : '',
);
const date = parseDateFromEntry(entry, selectInferedField(collection, 'date')) || null;
const identifier = get(fields, keyToPathArray(selectIdentifier(collection)));
const processSegment = getProcessSegment(slugConfig, [defaultFolder, fields?.dirname as string]);
const mediaFolder = compileStringTemplate(
folderTemplate,
date,
identifier,
fields,
processSegment,
);
return mediaFolder;
}

@ -0,0 +1,22 @@
import { useEffect, useState } from 'react';
export default function useDebounce<T>(value: T, delay: number): T {
const [debouncedValue, setDebouncedValue] = useState(value);
useEffect(() => {
if (delay === 0) {
setDebouncedValue(value);
return;
}
const handler = setTimeout(() => {
setDebouncedValue(value);
}, delay);
return () => {
clearTimeout(handler);
};
}, [value, delay]);
return delay === 0 ? value : debouncedValue;
}

@ -0,0 +1,25 @@
import { useCallback, useRef } from 'react';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const useDebouncedCallback = <T extends (...args: any) => any>(func: T, wait: number) => {
// Use a ref to store the timeout between renders
// and prevent changes to it from causing re-renders
const timeout = useRef<NodeJS.Timeout>();
return useCallback(
(...args: Parameters<T>) => {
const later = () => {
clearTimeout(timeout.current);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
func(...args);
};
clearTimeout(timeout.current);
timeout.current = setTimeout(later, wait);
},
[func, wait],
);
};
export default useDebouncedCallback;

@ -0,0 +1,30 @@
import { useEffect, useState } from 'react';
import { emptyAsset, getAsset } from '@staticcms/core/actions/media';
import { useAppDispatch } from '@staticcms/core/store/hooks';
import { isNotEmpty } from '../util/string.util';
import type { Collection, Entry, FileOrImageField, MarkdownField } from '@staticcms/core/interface';
export default function useIsMediaAsset<T extends FileOrImageField | MarkdownField>(
url: string,
collection: Collection<T>,
field: T,
entry: Entry,
): boolean {
const dispatch = useAppDispatch();
const [exists, setExists] = useState(false);
useEffect(() => {
const checkMediaExistence = async () => {
const asset = await dispatch(getAsset<T>(collection, entry, url, field));
setExists(
Boolean(asset && asset !== emptyAsset && isNotEmpty(asset.toString()) && asset.fileObj),
);
};
checkMediaExistence();
}, [collection, dispatch, entry, field, url]);
return exists;
}

@ -0,0 +1,28 @@
import { useEffect, useState } from 'react';
import { getAsset } from '@staticcms/core/actions/media';
import { useAppDispatch } from '@staticcms/core/store/hooks';
import { isNotEmpty } from '../util/string.util';
import type { Collection, Entry, FileOrImageField, MarkdownField } from '@staticcms/core/interface';
export default function useMediaAsset<T extends FileOrImageField | MarkdownField>(
url: string,
collection: Collection<T>,
field: T,
entry: Entry,
): string {
const dispatch = useAppDispatch();
const [assetSource, setAssetSource] = useState(url);
useEffect(() => {
const fetchMedia = async () => {
const asset = (await dispatch(getAsset<T>(collection, entry, url, field)))?.toString() ?? '';
setAssetSource(asset);
};
fetchMedia();
}, [collection, dispatch, entry, field, url]);
return isNotEmpty(assetSource) ? assetSource : url;
}

@ -0,0 +1,61 @@
import { useCallback, useEffect, useMemo } from 'react';
import { v4 as uuid } from 'uuid';
import { selectMediaPath } from '@staticcms/core/reducers/mediaLibrary';
import { useAppDispatch, useAppSelector } from '@staticcms/core/store/hooks';
import { openMediaLibrary, removeInsertedMedia } from '@staticcms/core/actions/mediaLibrary';
import type { MouseEvent } from 'react';
import type { FileOrImageField, MarkdownField } from '@staticcms/core/interface';
export default function useMediaInsert<T extends string | string[]>(
value: T,
options: { field?: FileOrImageField | MarkdownField; controlID?: string; forImage?: boolean },
callback: (newValue: T) => void,
): (e?: MouseEvent) => void {
const dispatch = useAppDispatch();
const { controlID, field, forImage = false } = options;
const finalControlID = useMemo(() => controlID ?? uuid(), [controlID]);
const mediaPathSelector = useMemo(() => selectMediaPath(finalControlID), [finalControlID]);
const mediaPath = useAppSelector(mediaPathSelector);
const mediaLibraryFieldOptions = useMemo(() => {
return field?.media_library ?? {};
}, [field?.media_library]);
const config = useMemo(
() => ('config' in mediaLibraryFieldOptions ? mediaLibraryFieldOptions.config : undefined),
[mediaLibraryFieldOptions],
);
useEffect(() => {
if (mediaPath && mediaPath !== value) {
callback(mediaPath as T);
setTimeout(() => {
dispatch(removeInsertedMedia(finalControlID));
});
}
}, [callback, finalControlID, dispatch, mediaPath, value]);
const handleOpenMediaLibrary = useCallback(
(e?: MouseEvent, { replaceIndex }: { replaceIndex?: number } = {}) => {
e?.preventDefault();
dispatch(
openMediaLibrary({
controlID: finalControlID,
forImage,
value,
replaceIndex,
allowMultiple: false,
config,
field,
}),
);
},
[dispatch, finalControlID, forImage, value, config, field],
);
return handleOpenMediaLibrary;
}

@ -0,0 +1,23 @@
import { useEffect, useRef } from 'react';
export default function useMemoCompare<T>(next: T, compare: (prev: T, next: T) => boolean): T {
// Ref for storing previous value
const previousRef = useRef<T>(next);
const previous = previousRef.current;
// Pass previous and next value to compare function
// to determine whether to consider them equal.
const isEqual = compare(previous, next);
// If not equal update previousRef to next value.
// We only update if not equal so that this hook continues to return
// the same old value if compare keeps returning true.
useEffect(() => {
if (!isEqual) {
previousRef.current = next;
}
});
// Finally, if equal then return the previous value
return isEqual ? previous : next;
}

@ -0,0 +1,6 @@
import { useMemo } from 'react';
import { v4 as uuid } from 'uuid';
export default function useUUID() {
return useMemo(() => uuid(), []);
}

@ -0,0 +1,437 @@
import escapeRegExp from 'lodash/escapeRegExp';
import get from 'lodash/get';
import groupBy from 'lodash/groupBy';
import { selectEntrySlug } from './util/collection.util';
import { set } from './util/object.util';
import type { Field, Collection, Entry, EntryData, i18nCollection, I18nInfo } from '../interface';
import type { EntryDraftState } from '../reducers/entryDraft';
export const I18N = 'i18n';
export enum I18N_STRUCTURE {
MULTIPLE_FOLDERS = 'multiple_folders',
MULTIPLE_FILES = 'multiple_files',
SINGLE_FILE = 'single_file',
}
export enum I18N_FIELD {
TRANSLATE = 'translate',
DUPLICATE = 'duplicate',
NONE = 'none',
}
export function hasI18n(collection: Collection | i18nCollection): collection is i18nCollection {
return I18N in collection;
}
export function getI18nInfo(collection: i18nCollection): I18nInfo;
export function getI18nInfo(collection: Collection): I18nInfo | null;
export function getI18nInfo(collection: Collection | i18nCollection): I18nInfo | null {
if (!hasI18n(collection) || typeof collection[I18N] !== 'object') {
return null;
}
return collection.i18n;
}
export function getI18nFilesDepth(collection: Collection, depth: number) {
const { structure } = getI18nInfo(collection) as I18nInfo;
if (structure === I18N_STRUCTURE.MULTIPLE_FOLDERS) {
return depth + 1;
}
return depth;
}
export function isFieldTranslatable(field: Field, locale?: string, defaultLocale?: string) {
const isTranslatable = locale !== defaultLocale && field.i18n === I18N_FIELD.TRANSLATE;
return isTranslatable;
}
export function isFieldDuplicate(field: Field, locale?: string, defaultLocale?: string) {
const isDuplicate = locale !== defaultLocale && field.i18n === I18N_FIELD.DUPLICATE;
return isDuplicate;
}
export function isFieldHidden(field: Field, locale?: string, defaultLocale?: string) {
const isHidden = locale !== defaultLocale && field.i18n === I18N_FIELD.NONE;
return isHidden;
}
export function getLocaleDataPath(locale: string) {
return [I18N, locale, 'data'];
}
export function getDataPath(locale: string, defaultLocale: string) {
const dataPath = locale !== defaultLocale ? getLocaleDataPath(locale) : ['data'];
return dataPath;
}
export function getFilePath(
structure: I18N_STRUCTURE,
extension: string,
path: string,
slug: string,
locale: string,
) {
switch (structure) {
case I18N_STRUCTURE.MULTIPLE_FOLDERS:
return path.replace(`/${slug}`, `/${locale}/${slug}`);
case I18N_STRUCTURE.MULTIPLE_FILES:
return path.replace(new RegExp(`${escapeRegExp(extension)}$`), `${locale}.${extension}`);
case I18N_STRUCTURE.SINGLE_FILE:
default:
return path;
}
}
export function getLocaleFromPath(structure: I18N_STRUCTURE, extension: string, path: string) {
switch (structure) {
case I18N_STRUCTURE.MULTIPLE_FOLDERS: {
const parts = path.split('/');
// filename
parts.pop();
// locale
return parts.pop();
}
case I18N_STRUCTURE.MULTIPLE_FILES: {
const parts = path.slice(0, -`.${extension}`.length);
return parts.split('.').pop();
}
case I18N_STRUCTURE.SINGLE_FILE:
default:
return '';
}
}
export function getFilePaths(
collection: Collection,
extension: string,
path: string,
slug: string,
) {
const { structure, locales } = getI18nInfo(collection) as I18nInfo;
if (structure === I18N_STRUCTURE.SINGLE_FILE) {
return [path];
}
const paths = locales.map(locale =>
getFilePath(structure as I18N_STRUCTURE, extension, path, slug, locale),
);
return paths;
}
export function normalizeFilePath(structure: I18N_STRUCTURE, path: string, locale: string) {
switch (structure) {
case I18N_STRUCTURE.MULTIPLE_FOLDERS:
return path.replace(`${locale}/`, '');
case I18N_STRUCTURE.MULTIPLE_FILES:
return path.replace(`.${locale}`, '');
case I18N_STRUCTURE.SINGLE_FILE:
default:
return path;
}
}
export function getI18nFiles(
collection: Collection,
extension: string,
entryDraft: Entry,
entryToRaw: (entryDraft: Entry) => string,
path: string,
slug: string,
newPath?: string,
) {
const {
structure = I18N_STRUCTURE.SINGLE_FILE,
defaultLocale,
locales,
} = getI18nInfo(collection) as I18nInfo;
if (structure === I18N_STRUCTURE.SINGLE_FILE) {
const data = locales.reduce((map, locale) => {
const dataPath = getDataPath(locale, defaultLocale);
if (map) {
map[locale] = get(entryDraft, dataPath);
}
return map;
}, {} as EntryData);
entryDraft.data = data;
return [
{
path: getFilePath(structure, extension, path, slug, locales[0]),
slug,
raw: entryToRaw(entryDraft),
...(newPath && {
newPath: getFilePath(structure, extension, newPath, slug, locales[0]),
}),
},
];
}
const dataFiles = locales
.map(locale => {
const dataPath = getDataPath(locale, defaultLocale);
entryDraft.data = get(entryDraft, dataPath);
return {
path: getFilePath(structure, extension, path, slug, locale),
slug,
raw: entryDraft.data ? entryToRaw(entryDraft) : '',
...(newPath && {
newPath: getFilePath(structure, extension, newPath, slug, locale),
}),
};
})
.filter(dataFile => dataFile.raw);
return dataFiles;
}
export function getI18nBackup(
collection: Collection,
entry: Entry,
entryToRaw: (entry: Entry) => string,
) {
const { locales, defaultLocale } = getI18nInfo(collection) as I18nInfo;
const i18nBackup = locales
.filter(l => l !== defaultLocale)
.reduce((acc, locale) => {
const dataPath = getDataPath(locale, defaultLocale);
const data = get(entry, dataPath);
if (!data) {
return acc;
}
entry.data = data;
return { ...acc, [locale]: { raw: entryToRaw(entry) } };
}, {} as Record<string, { raw: string }>);
return i18nBackup;
}
export function formatI18nBackup(
i18nBackup: Record<string, { raw: string }>,
formatRawData: (raw: string) => Entry,
) {
const i18n = Object.entries(i18nBackup).reduce((acc, [locale, { raw }]) => {
const entry = formatRawData(raw);
return { ...acc, [locale]: { data: entry.data } };
}, {});
return i18n;
}
function mergeValues(
collection: Collection,
structure: I18N_STRUCTURE,
defaultLocale: string,
values: { locale: string; value: Entry }[],
) {
let defaultEntry = values.find(e => e.locale === defaultLocale);
if (!defaultEntry) {
defaultEntry = values[0];
console.warn(`Could not locale entry for default locale '${defaultLocale}'`);
}
const i18n = values
.filter(e => e.locale !== defaultEntry!.locale)
.reduce((acc, { locale, value }) => {
const dataPath = getLocaleDataPath(locale);
return set(acc, dataPath.join('.'), value.data);
}, {});
const path = normalizeFilePath(structure, defaultEntry.value.path, defaultLocale);
const slug = selectEntrySlug(collection, path) as string;
const entryValue: Entry = {
...defaultEntry.value,
raw: '',
...i18n,
path,
slug,
};
return entryValue;
}
function mergeSingleFileValue(entryValue: Entry, defaultLocale: string, locales: string[]): Entry {
const data = (entryValue.data?.[defaultLocale] ?? {}) as EntryData;
const i18n = locales
.filter(l => l !== defaultLocale)
.map(l => ({ locale: l, value: entryValue.data?.[l] }))
.filter(e => e.value)
.reduce((acc, e) => {
return { ...acc, [e.locale]: { data: e.value } };
}, {});
return {
...entryValue,
data,
i18n,
raw: '',
};
}
export async function getI18nEntry(
collection: Collection,
extension: string,
path: string,
slug: string,
getEntryValue: (path: string) => Promise<Entry>,
) {
const {
structure = I18N_STRUCTURE.SINGLE_FILE,
locales,
defaultLocale,
} = getI18nInfo(collection) as I18nInfo;
let entryValue: Entry;
if (structure === I18N_STRUCTURE.SINGLE_FILE) {
entryValue = mergeSingleFileValue(await getEntryValue(path), defaultLocale, locales);
} else {
const entryValues = await Promise.all(
locales.map(async locale => {
const entryPath = getFilePath(structure, extension, path, slug, locale);
const value = await getEntryValue(entryPath).catch(() => null);
return { value, locale };
}),
);
const nonNullValues = entryValues.filter(e => e.value !== null) as {
value: Entry;
locale: string;
}[];
entryValue = mergeValues(collection, structure, defaultLocale, nonNullValues);
}
return entryValue;
}
export function groupEntries(collection: Collection, extension: string, entries: Entry[]): Entry[] {
const {
structure = I18N_STRUCTURE.SINGLE_FILE,
defaultLocale,
locales,
} = getI18nInfo(collection) as I18nInfo;
if (structure === I18N_STRUCTURE.SINGLE_FILE) {
return entries.map(e => mergeSingleFileValue(e, defaultLocale, locales));
}
const grouped = groupBy(
entries.map(e => ({
locale: getLocaleFromPath(structure, extension, e.path) as string,
value: e,
})),
({ locale, value: e }) => {
return normalizeFilePath(structure, e.path, locale);
},
);
const groupedEntries = Object.values(grouped).reduce((acc, values) => {
const entryValue = mergeValues(collection, structure, defaultLocale, values);
return [...acc, entryValue];
}, [] as Entry[]);
return groupedEntries;
}
export function getI18nDataFiles(
collection: Collection,
extension: string,
path: string,
slug: string,
diffFiles: { path: string; id: string; newFile: boolean }[],
) {
const { structure } = getI18nInfo(collection) as I18nInfo;
if (structure === I18N_STRUCTURE.SINGLE_FILE) {
return diffFiles;
}
const paths = getFilePaths(collection, extension, path, slug);
const dataFiles = paths.reduce((acc, path) => {
const dataFile = diffFiles.find(file => file.path === path);
if (dataFile) {
return [...acc, dataFile];
} else {
return [...acc, { path, id: '', newFile: false }];
}
}, [] as { path: string; id: string; newFile: boolean }[]);
return dataFiles;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function duplicateDefaultI18nFields(collection: Collection, dataFields: any) {
const { locales, defaultLocale } = getI18nInfo(collection) as I18nInfo;
const i18nFields = Object.fromEntries(
locales
.filter(locale => locale !== defaultLocale)
.map(locale => [locale, { data: dataFields }]),
);
return i18nFields;
}
export function duplicateI18nFields(
entryDraft: EntryDraftState,
field: Field,
locales: string[],
defaultLocale: string,
fieldPath: string[] = [field.name],
) {
const value = get(entryDraft, ['entry', 'data', ...fieldPath]);
if (field.i18n === I18N_FIELD.DUPLICATE) {
locales
.filter(l => l !== defaultLocale)
.forEach(l => {
entryDraft = get(
entryDraft,
['entry', ...getDataPath(l, defaultLocale), ...fieldPath],
value,
);
});
}
if ('fields' in field && !Array.isArray(value)) {
field.fields?.forEach(field => {
entryDraft = duplicateI18nFields(entryDraft, field, locales, defaultLocale, [
...fieldPath,
field.name,
]);
});
}
return entryDraft;
}
export function getPreviewEntry(
entry: Entry,
locale: string | undefined,
defaultLocale: string | undefined,
) {
if (!locale || locale === defaultLocale) {
return entry;
}
entry.data = entry.i18n?.[locale]?.data as EntryData;
return entry;
}
export function serializeI18n(
collection: Collection,
entry: Entry,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
serializeValues: (data: any) => any,
) {
const { locales, defaultLocale } = getI18nInfo(collection) as I18nInfo;
locales
.filter(locale => locale !== defaultLocale)
.forEach(locale => {
const dataPath = getLocaleDataPath(locale);
entry = set(entry, dataPath.join('.'), serializeValues(get(entry, dataPath)));
});
return entry;
}

@ -0,0 +1,3 @@
export * from './auth';
export * from './util';
export * from './widgets';

@ -0,0 +1,9 @@
/* eslint-disable import/prefer-default-export */
import merge from 'lodash/merge';
import { getLocale } from './registry';
export function getPhrases(locale: string) {
const phrases = merge({}, getLocale('en'), getLocale(locale));
return phrases;
}

@ -0,0 +1,391 @@
import { oneLine } from 'common-tags';
import type {
AdditionalLink,
BackendClass,
BackendInitializer,
BackendInitializerOptions,
BaseField,
Config,
CustomIcon,
Entry,
EntryData,
EventData,
EventListener,
Field,
LocalePhrasesRoot,
MediaLibraryExternalLibrary,
MediaLibraryOptions,
PreviewStyle,
PreviewStyleOptions,
ShortcodeConfig,
TemplatePreviewComponent,
UnknownField,
Widget,
WidgetOptions,
WidgetParam,
WidgetValueSerializer,
} from '../interface';
export const allowedEvents = ['prePublish', 'postPublish', 'preSave', 'postSave'] as const;
export type AllowedEvent = typeof allowedEvents[number];
const eventHandlers = allowedEvents.reduce((acc, e) => {
acc[e] = [];
return acc;
}, {} as Record<AllowedEvent, { handler: EventListener['handler']; options: Record<string, unknown> }[]>);
interface Registry {
backends: Record<string, BackendInitializer>;
templates: Record<string, TemplatePreviewComponent<EntryData>>;
widgets: Record<string, Widget>;
icons: Record<string, CustomIcon>;
additionalLinks: Record<string, AdditionalLink>;
widgetValueSerializers: Record<string, WidgetValueSerializer>;
mediaLibraries: (MediaLibraryExternalLibrary & { options: MediaLibraryOptions })[];
locales: Record<string, LocalePhrasesRoot>;
eventHandlers: typeof eventHandlers;
previewStyles: PreviewStyle[];
/** Markdown editor */
shortcodes: Record<string, ShortcodeConfig>;
}
/**
* Global Registry Object
*/
const registry: Registry = {
backends: {},
templates: {},
widgets: {},
icons: {},
additionalLinks: {},
widgetValueSerializers: {},
mediaLibraries: [],
locales: {},
eventHandlers,
previewStyles: [],
shortcodes: {},
};
export default {
registerPreviewTemplate,
getPreviewTemplate,
registerWidget,
getWidget,
getWidgets,
resolveWidget,
registerWidgetValueSerializer,
getWidgetValueSerializer,
registerBackend,
getBackend,
registerMediaLibrary,
getMediaLibrary,
registerLocale,
getLocale,
registerEventListener,
removeEventListener,
getEventListeners,
invokeEvent,
registerIcon,
getIcon,
registerAdditionalLink,
getAdditionalLinks,
registerPreviewStyle,
getPreviewStyles,
registerShortcode,
getShortcode,
getShortcodes,
};
/**
* Preview Styles
*
* Valid options:
* - raw {boolean} if `true`, `style` value is expected to be a CSS string
*/
export function registerPreviewStyle(style: string, { raw = false }: PreviewStyleOptions = {}) {
registry.previewStyles.push({ value: style, raw });
}
export function getPreviewStyles() {
return registry.previewStyles;
}
/**
* Preview Templates
*/
export function registerPreviewTemplate<T>(name: string, component: TemplatePreviewComponent<T>) {
registry.templates[name] = component as TemplatePreviewComponent<EntryData>;
}
export function getPreviewTemplate(name: string): TemplatePreviewComponent<EntryData> {
return registry.templates[name];
}
/**
* Editor Widgets
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function registerWidget(widgets: WidgetParam<any, any>[]): void;
export function registerWidget(widget: WidgetParam): void;
export function registerWidget<T = unknown, F extends BaseField = UnknownField>(
name: string,
control: string | Widget<T, F>['control'],
preview?: Widget<T, F>['preview'],
options?: WidgetOptions<T, F>,
): void;
export function registerWidget<T = unknown, F extends BaseField = UnknownField>(
nameOrWidgetOrWidgets: string | WidgetParam<T, F> | WidgetParam[],
control?: string | Widget<T, F>['control'],
preview?: Widget<T, F>['preview'],
{
schema,
validator = () => false,
getValidValue = (value: T | null | undefined) => value,
getDefaultValue,
}: WidgetOptions<T, F> = {},
): void {
if (Array.isArray(nameOrWidgetOrWidgets)) {
nameOrWidgetOrWidgets.forEach(widget => {
if (typeof widget !== 'object') {
console.error(`Cannot register widget: ${widget}`);
} else {
registerWidget(widget);
}
});
} else if (typeof nameOrWidgetOrWidgets === 'string') {
// A registered widget control can be reused by a new widget, allowing
// multiple copies with different previews.
const newControl = (
typeof control === 'string' ? registry.widgets[control]?.control : control
) as Widget['control'];
if (newControl) {
registry.widgets[nameOrWidgetOrWidgets] = {
control: newControl,
preview: preview as Widget['preview'],
validator: validator as Widget['validator'],
getValidValue: getValidValue as Widget['getValidValue'],
getDefaultValue: getDefaultValue as Widget['getDefaultValue'],
schema,
};
}
} else if (typeof nameOrWidgetOrWidgets === 'object') {
const {
name: widgetName,
controlComponent: control,
previewComponent: preview,
options: {
validator = () => false,
getValidValue = (value: T | undefined | null) => value,
getDefaultValue,
schema,
} = {},
} = nameOrWidgetOrWidgets;
if (registry.widgets[widgetName]) {
console.warn(oneLine`
Multiple widgets registered with name "${widgetName}". Only the last widget registered with
this name will be used.
`);
}
if (!control) {
throw Error(`Widget "${widgetName}" registered without \`controlComponent\`.`);
}
registry.widgets[widgetName] = {
control,
preview,
validator,
getValidValue,
getDefaultValue,
schema,
} as unknown as Widget;
} else {
console.error('`registerWidget` failed, called with incorrect arguments.');
}
}
export function getWidget<T = unknown, F extends Field = Field>(name: string): Widget<T, F> {
return registry.widgets[name] as unknown as Widget<T, F>;
}
export function getWidgets(): ({
name: string;
} & Widget<unknown>)[] {
return Object.entries(registry.widgets).map(([name, widget]: [string, Widget<unknown>]) => ({
name,
...widget,
}));
}
export function resolveWidget<T = unknown, F extends Field = Field>(name?: string): Widget<T, F> {
return getWidget(name || 'string') || getWidget('unknown');
}
/**
* Widget Serializers
*/
export function registerWidgetValueSerializer(
widgetName: string,
serializer: WidgetValueSerializer,
) {
registry.widgetValueSerializers[widgetName] = serializer;
}
export function getWidgetValueSerializer(widgetName: string): WidgetValueSerializer | undefined {
return registry.widgetValueSerializers[widgetName];
}
/**
* Backends
*/
export function registerBackend<
T extends { new (config: Config, options: BackendInitializerOptions): BackendClass },
>(name: string, BackendClass: T) {
if (!name || !BackendClass) {
console.error(
"Backend parameters invalid. example: CMS.registerBackend('myBackend', BackendClass)",
);
} else if (registry.backends[name]) {
console.error(`Backend [${name}] already registered. Please choose a different name.`);
} else {
registry.backends[name] = {
init: (config: Config, options: BackendInitializerOptions) =>
new BackendClass(config, options),
};
}
}
export function getBackend(name: string): BackendInitializer {
return registry.backends[name];
}
/**
* Media Libraries
*/
export function registerMediaLibrary(
mediaLibrary: MediaLibraryExternalLibrary,
options: MediaLibraryOptions = {},
) {
if (registry.mediaLibraries.find(ml => mediaLibrary.name === ml.name)) {
throw new Error(`A media library named ${mediaLibrary.name} has already been registered.`);
}
registry.mediaLibraries.push({ ...mediaLibrary, options });
}
export function getMediaLibrary(
name: string,
): (MediaLibraryExternalLibrary & { options: MediaLibraryOptions }) | undefined {
return registry.mediaLibraries.find(ml => ml.name === name);
}
/**
* Event Handlers
*/
function validateEventName(name: string) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
if (!allowedEvents.includes(name as any)) {
throw new Error(`Invalid event name '${name}'`);
}
}
export function getEventListeners(name: AllowedEvent) {
validateEventName(name);
return [...registry.eventHandlers[name]];
}
export function registerEventListener(
{ name, handler }: EventListener,
options: Record<string, unknown> = {},
) {
validateEventName(name);
registry.eventHandlers[name].push({ handler, options });
}
export async function invokeEvent({ name, data }: { name: AllowedEvent; data: EventData }) {
validateEventName(name);
const handlers = registry.eventHandlers[name];
let _data = { ...data };
for (const { handler, options } of handlers) {
const result = await handler(_data, options);
if (result !== undefined) {
const entry = {
..._data.entry,
data: result,
} as Entry;
_data = { ...data, entry };
}
}
return _data.entry.data;
}
export function removeEventListener({ name, handler }: EventListener) {
validateEventName(name);
if (handler) {
registry.eventHandlers[name] = registry.eventHandlers[name].filter(
item => item.handler !== handler,
);
} else {
registry.eventHandlers[name] = [];
}
}
/**
* Locales
*/
export function registerLocale(locale: string, phrases: LocalePhrasesRoot) {
if (!locale || !phrases) {
console.error("Locale parameters invalid. example: CMS.registerLocale('locale', phrases)");
} else {
registry.locales[locale] = phrases;
}
}
export function getLocale(locale: string): LocalePhrasesRoot | undefined {
return registry.locales[locale];
}
/**
* Icons
*/
export function registerIcon(name: string, icon: CustomIcon) {
registry.icons[name] = icon;
}
export function getIcon(name: string): CustomIcon | null {
return registry.icons[name] ?? null;
}
/**
* Additional Links
*/
export function registerAdditionalLink(link: AdditionalLink) {
registry.additionalLinks[link.id] = link;
}
export function getAdditionalLinks(): Record<string, AdditionalLink> {
return registry.additionalLinks;
}
export function getAdditionalLink(id: string): AdditionalLink | undefined {
return registry.additionalLinks[id];
}
/**
* Markdown editor shortcodes
*/
export function registerShortcode(name: string, config: ShortcodeConfig) {
if (registry.backends[name]) {
console.error(`Shortcode [${name}] already registered. Please choose a different name.`);
return;
}
registry.shortcodes[name] = config;
}
export function getShortcode(name: string): ShortcodeConfig {
return registry.shortcodes[name];
}
export function getShortcodes(): Record<string, ShortcodeConfig> {
return registry.shortcodes;
}

@ -0,0 +1,88 @@
import merge from 'lodash/merge';
import { getWidgetValueSerializer } from './registry';
import { isNullish } from './util/null.util';
import type { EntryData, Field, ObjectValue } from '../interface';
/**
* Methods for serializing/deserializing entry field values. Most widgets don't
* require this for their values, and those that do can typically serialize/
* deserialize on every change from within the widget. The serialization
* handlers here are for widgets whose values require heavy serialization that
* would hurt performance if run for every change.
* An example of this is the markdown widget, whose value is stored as a
* markdown string. Instead of stringifying on every change of that field, a
* deserialization method is registered from the widget's control module that
* converts the stored markdown string to an AST, and that AST serves as the
* widget model during editing.
*
* Serialization handlers should be registered for each widget that requires
* them, and the registration method is exposed through the registry. Any
* registered deserialization handlers run on entry load, and serialization
* handlers run on persist.
*/
function runSerializer(
values: EntryData,
fields: Field[] | undefined,
method: 'serialize' | 'deserialize',
) {
/**
* Reduce the list of fields to a map where keys are field names and values
* are field values, serializing the values of fields whose widgets have
* registered serializers. If the field is a list or object, call recursively
* for nested fields.
*/
let serializedData =
fields?.reduce((acc, field) => {
const fieldName = field.name;
const value = values?.[fieldName];
const serializer =
'widget' in field && field.widget ? getWidgetValueSerializer(field.widget) : undefined;
const nestedFields = 'fields' in field ? field.fields : undefined;
// Call recursively for fields within lists
if (nestedFields && Array.isArray(value)) {
for (const val of value) {
if (typeof val === 'object') {
acc[fieldName] = runSerializer(val as Record<string, EntryData>, nestedFields, method);
}
}
return acc;
}
// Call recursively for fields within objects
if (nestedFields && typeof value === 'object') {
acc[fieldName] = runSerializer(value as Record<string, EntryData>, nestedFields, method);
return acc;
}
// Run serialization method on value if not null or undefined
if (serializer && !isNullish(value)) {
acc[fieldName] = serializer[method](value);
return acc;
}
// If no serializer is registered for the field's widget, use the field as is
if (!isNullish(value)) {
acc[fieldName] = value;
return acc;
}
return acc;
}, {} as ObjectValue) ?? {};
//preserve unknown fields value
serializedData = merge(values, serializedData);
return serializedData;
}
export function serializeValues(values: EntryData, fields: Field[] | undefined) {
return runSerializer(values, fields, 'serialize');
}
export function deserializeValues(values: EntryData, fields: Field[] | undefined) {
return runSerializer(values, fields, 'deserialize');
}

@ -0,0 +1,12 @@
/* eslint-disable import/prefer-default-export */
export function stringToRGB(str: string) {
if (!str) return '000000';
let hash = 0;
for (let i = 0; i < str.length; i++) {
hash = str.charCodeAt(i) + ((hash << 5) - hash);
}
const c = (hash & 0x00ffffff).toString(16).toUpperCase();
return `00000${c}`.slice(-6);
}

@ -0,0 +1,128 @@
import url from 'url';
import urlJoin from 'url-join';
import diacritics from 'diacritics';
import sanitizeFilename from 'sanitize-filename';
import isString from 'lodash/isString';
import escapeRegExp from 'lodash/escapeRegExp';
import flow from 'lodash/flow';
import partialRight from 'lodash/partialRight';
import type { Slug } from '../interface';
function getUrl(urlString: string, direct?: boolean) {
return `${direct ? '/#' : ''}${urlString}`;
}
export function getCollectionUrl(collectionName: string, direct?: boolean) {
return getUrl(`/collections/${collectionName}`, direct);
}
export function getNewEntryUrl(collectionName: string, direct?: boolean) {
return getUrl(`/collections/${collectionName}/new`, direct);
}
export function addParams(urlString: string, params: Record<string, string>) {
const parsedUrl = url.parse(urlString, true);
parsedUrl.query = { ...parsedUrl.query, ...params };
return url.format(parsedUrl);
}
export function stripProtocol(urlString: string) {
const protocolEndIndex = urlString.indexOf('//');
return protocolEndIndex > -1 ? urlString.slice(protocolEndIndex + 2) : urlString;
}
/* See https://www.w3.org/International/articles/idn-and-iri/#path.
* According to the new IRI (Internationalized Resource Identifier) spec, RFC 3987,
* ASCII chars should be kept the same way as in standard URIs (letters digits _ - . ~).
* Non-ASCII chars (unless they are not in the allowed "ucschars" list) should be percent-encoded.
* If the string is not encoded in Unicode, it should be converted to UTF-8 and normalized first,
* but JS stores strings as UTF-16/UCS-2 internally, so we should not normalize or re-encode.
*/
const uriChars = /[\w\-.~]/i;
const ucsChars =
/[\xA0-\u{D7FF}\u{F900}-\u{FDCF}\u{FDF0}-\u{FFEF}\u{10000}-\u{1FFFD}\u{20000}-\u{2FFFD}\u{30000}-\u{3FFFD}\u{40000}-\u{4FFFD}\u{50000}-\u{5FFFD}\u{60000}-\u{6FFFD}\u{70000}-\u{7FFFD}\u{80000}-\u{8FFFD}\u{90000}-\u{9FFFD}\u{A0000}-\u{AFFFD}\u{B0000}-\u{BFFFD}\u{C0000}-\u{CFFFD}\u{D0000}-\u{DFFFD}\u{E1000}-\u{EFFFD}]/u;
function validURIChar(char: string) {
return uriChars.test(char);
}
function validIRIChar(char: string) {
return uriChars.test(char) || ucsChars.test(char);
}
export function getCharReplacer(encoding: string, replacement: string) {
let validChar: (char: string) => boolean;
if (encoding === 'unicode') {
validChar = validIRIChar;
} else if (encoding === 'ascii') {
validChar = validURIChar;
} else {
throw new Error('`options.encoding` must be "unicode" or "ascii".');
}
// Check and make sure the replacement character is actually a safe char itself.
if (!Array.from(replacement).every(validChar)) {
throw new Error('The replacement character(s) (options.replacement) is itself unsafe.');
}
return (char: string) => (validChar(char) ? char : replacement);
}
// `sanitizeURI` does not actually URI-encode the chars (that is the browser's and server's job), just removes the ones that are not allowed.
export function sanitizeURI(
str: string,
options?: { replacement: Slug['sanitize_replacement']; encoding: Slug['encoding'] },
) {
const { replacement = '', encoding = 'unicode' } = options || {};
if (!isString(str)) {
throw new Error('The input slug must be a string.');
}
if (!isString(replacement)) {
throw new Error('`options.replacement` must be a string.');
}
// `Array.from` must be used instead of `String.split` because
// `split` converts things like emojis into UTF-16 surrogate pairs.
return Array.from(str).map(getCharReplacer(encoding, replacement)).join('');
}
export function sanitizeChar(char: string, options?: Slug) {
const { encoding = 'unicode', sanitize_replacement: replacement = '' } = options || {};
return getCharReplacer(encoding, replacement)(char);
}
export function sanitizeSlug(str: string, options?: Slug) {
if (!isString(str)) {
throw new Error('The input slug must be a string.');
}
const {
encoding,
clean_accents: stripDiacritics,
sanitize_replacement: replacement,
} = options || {};
const sanitizedSlug = flow([
...(stripDiacritics ? [diacritics.remove] : []),
partialRight(sanitizeURI, { replacement, encoding }),
partialRight(sanitizeFilename, { replacement }),
])(str);
// Remove any doubled or leading/trailing replacement characters (that were added in the sanitizers).
const doubleReplacement = new RegExp(`(?:${escapeRegExp(replacement)})+`, 'g');
const trailingReplacement = new RegExp(`${escapeRegExp(replacement)}$`);
const leadingReplacement = new RegExp(`^${escapeRegExp(replacement)}`);
const normalizedSlug: string = sanitizedSlug
.replace(doubleReplacement, replacement)
.replace(leadingReplacement, '')
.replace(trailingReplacement, '');
return normalizedSlug;
}
export function joinUrlPath(base: string, ...path: string[]) {
return urlJoin(base, ...path);
}

@ -0,0 +1,183 @@
import { asyncLock } from './asyncLock';
import unsentRequest from './unsentRequest';
import APIError from './APIError';
import type { AsyncLock } from './asyncLock';
import type { FileMetadata } from '@staticcms/core/interface';
export class FetchError extends Error {
status: number;
constructor(message: string, status: number) {
super(message);
this.status = status;
}
}
interface API {
rateLimiter?: AsyncLock;
buildRequest: (req: ApiRequest) => ApiRequest | Promise<ApiRequest>;
requestFunction?: (req: ApiRequest) => Promise<Response>;
}
export interface ApiRequestURL {
url: string;
params?: Record<string, string>;
}
export type ApiRequestObject = RequestInit & ApiRequestURL;
export type ApiRequest = ApiRequestObject | string;
class RateLimitError extends Error {
resetSeconds: number;
constructor(message: string, resetSeconds: number) {
super(message);
if (resetSeconds < 0) {
this.resetSeconds = 1;
} else if (resetSeconds > 60 * 60) {
this.resetSeconds = 60 * 60;
} else {
this.resetSeconds = resetSeconds;
}
}
}
export async function requestWithBackoff(
api: API,
req: ApiRequest,
attempt = 1,
): Promise<Response> {
if (api.rateLimiter) {
await api.rateLimiter.acquire();
}
try {
const builtRequest = await api.buildRequest(req);
const requestFunction = api.requestFunction || unsentRequest.performRequest;
const response: Response = await requestFunction(builtRequest);
if (response.status === 429) {
// GitLab/Bitbucket too many requests
const text = await response.text().catch(() => 'Too many requests');
throw new Error(text);
} else if (response.status === 403) {
// GitHub too many requests
const json = await response.json().catch(() => ({ message: '' }));
if (json.message.match('API rate limit exceeded')) {
const now = new Date();
const nextWindowInSeconds = response.headers.has('X-RateLimit-Reset')
? parseInt(response.headers.get('X-RateLimit-Reset') ?? '0')
: now.getTime() / 1000 + 60;
throw new RateLimitError(json.message, nextWindowInSeconds);
}
response.json = () => Promise.resolve(json);
}
return response;
} catch (error: unknown) {
if (error instanceof Error) {
if (attempt > 5 || error.message === "Can't refresh access token when using implicit auth") {
throw error;
} else if (error instanceof RateLimitError) {
if (!api.rateLimiter) {
const timeout = error.resetSeconds || attempt * attempt;
console.info(
`Pausing requests for ${timeout} ${
attempt === 1 ? 'second' : 'seconds'
} due to fetch failures:`,
error.message,
);
api.rateLimiter = asyncLock();
api.rateLimiter.acquire();
setTimeout(() => {
api.rateLimiter?.release();
api.rateLimiter = undefined;
console.info(`Done pausing requests`);
}, 1000 * timeout);
}
return requestWithBackoff(api, req, attempt + 1);
}
}
throw error;
}
}
export async function readFile(
id: string | null | undefined,
fetchContent: () => Promise<string | Blob>,
localForage: LocalForage,
isText: boolean,
) {
const key = id ? (isText ? `gh.${id}` : `gh.${id}.blob`) : null;
const cached = key ? await localForage.getItem<string | Blob>(key) : null;
if (cached) {
return cached;
}
const content = await fetchContent();
if (key) {
await localForage.setItem(key, content);
}
return content;
}
function getFileMetadataKey(id: string) {
return `gh.${id}.meta`;
}
export async function readFileMetadata(
id: string | null | undefined,
fetchMetadata: () => Promise<FileMetadata>,
localForage: LocalForage,
) {
const key = id ? getFileMetadataKey(id) : null;
const cached = key && (await localForage.getItem<FileMetadata>(key));
if (cached) {
return cached;
}
const metadata = await fetchMetadata();
if (key) {
await localForage.setItem<FileMetadata>(key, metadata);
}
return metadata;
}
function getConflictingBranches(branchName: string) {
// for cms/posts/post-1, conflicting branches are cms/posts, cms
const parts = branchName.split('/');
parts.pop();
const conflictingBranches = parts.reduce((acc, _, index) => {
acc = [...acc, parts.slice(0, index + 1).join('/')];
return acc;
}, [] as string[]);
return conflictingBranches;
}
export async function throwOnConflictingBranches(
branchName: string,
getBranch: (name: string) => Promise<{ name: string }>,
apiName: string,
) {
const possibleConflictingBranches = getConflictingBranches(branchName);
const conflictingBranches = await Promise.all(
possibleConflictingBranches.map(b =>
getBranch(b)
.then(b => b.name)
.catch(() => ''),
),
);
const conflictingBranch = conflictingBranches.filter(Boolean)[0];
if (conflictingBranch) {
throw new APIError(
`Failed creating branch '${branchName}' since there is already a branch named '${conflictingBranch}'. Please delete the '${conflictingBranch}' branch and try again`,
500,
apiName,
);
}
}

@ -0,0 +1,17 @@
export const API_ERROR = 'API_ERROR';
export default class APIError extends Error {
message: string;
status: null | number;
api: string;
meta: {};
constructor(message: string, status: null | number, api: string, meta = {}) {
super(message);
this.message = message;
this.status = status;
this.api = api;
this.name = API_ERROR;
this.meta = meta;
}
}

@ -0,0 +1,8 @@
export function generateContentKey(collectionName: string, slug: string) {
return `${collectionName}/${slug}`;
}
export function parseContentKey(contentKey: string) {
const index = contentKey.indexOf('/');
return { collection: contentKey.slice(0, index), slug: contentKey.slice(index + 1) };
}

@ -0,0 +1,11 @@
export const ACCESS_TOKEN_ERROR = 'ACCESS_TOKEN_ERROR';
export default class AccessTokenError extends Error {
message: string;
constructor(message: string) {
super(message);
this.message = message;
this.name = ACCESS_TOKEN_ERROR;
}
}

@ -0,0 +1,195 @@
export interface CursorStore {
actions: Set<string>;
data: Record<string, unknown>;
meta: Record<string, unknown>;
}
type ActionHandler = (action: string) => unknown;
const knownMetaKeys = [
'index',
'page',
'count',
'pageSize',
'pageCount',
'usingOldPaginationAPI',
'extension',
'folder',
'depth',
];
function filterUnknownMetaKeys(meta: Record<string, unknown>) {
return Object.keys(meta ?? {}).reduce((acc, k) => {
if (knownMetaKeys.includes(k)) {
acc[k] = meta[k];
}
return acc;
}, {} as Record<string, unknown>);
}
/*
createCursorMap takes one of three signatures:
- () -> cursor with empty actions, data, and meta
- (cursorMap: <object/Record with optional actions, data, and meta keys>) -> cursor
- (actions: <array/List>, data: <object/Record>, meta: <optional object/Record>) -> cursor
*/
function createCursorStore(...args: unknown[]) {
const { actions, data, meta } =
args.length === 1
? ((args[0] ?? { actions: new Set<string>(), data: {}, meta: {} }) as CursorStore)
: ({ actions: args[0], data: args[1], meta: args[2] } as CursorStore);
return {
// actions are a Set, rather than a List, to ensure an efficient .has
actions: new Set([...actions]),
// data and meta are Maps
data,
meta: filterUnknownMetaKeys(meta),
} as CursorStore;
}
function hasAction(store: CursorStore, action: string) {
return store.actions.has(action);
}
function getActionHandlers(store: CursorStore, handler: ActionHandler) {
for (const action in store.actions) {
handler(action);
}
}
// The cursor logic is entirely functional, so this class simply
// provides a chainable interface
export default class Cursor {
store: CursorStore;
actions: Set<string>;
data: Record<string, unknown>;
meta: Record<string, unknown>;
static create(...args: {}[]) {
return new Cursor(...args);
}
constructor(...args: {}[]) {
if (args[0] instanceof Cursor) {
this.store = args[0].store;
this.actions = args[0].actions;
this.data = args[0].data;
this.meta = args[0].meta;
return;
}
this.store = createCursorStore(...args);
this.actions = this.store.actions;
this.data = this.store.data;
this.meta = this.store.meta;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
updateStore(update: (store: CursorStore) => CursorStore) {
return new Cursor(update(this.store));
}
hasAction(action: string) {
return hasAction(this.store, action);
}
addAction(action: string) {
return this.updateStore(store => ({
...store,
actions: new Set([...store.actions, action]),
}));
}
removeAction(action: string) {
return this.updateStore(store => {
const newActions = new Set([...store.actions]);
newActions.delete(action);
return {
...store,
actions: newActions,
};
});
}
setActions(actions: Iterable<string>) {
return this.updateStore(store => ({
...store,
actions: new Set(actions),
}));
}
mergeActions(actions: Set<string>) {
return this.updateStore(store => ({
...store,
actions: new Set([...store.actions, ...actions]),
}));
}
getActionHandlers(handler: ActionHandler) {
return getActionHandlers(this.store, handler);
}
setData(data: Record<string, unknown>) {
return this.updateStore(store => ({
...store,
data,
}));
}
mergeData(data: Record<string, unknown>) {
return this.updateStore(store => ({
...store,
data: { ...store.data, ...data },
}));
}
wrapData(data: Record<string, unknown>) {
return this.updateStore(store => ({
...store,
data: {
...data,
wrapped_cursor_data: store.data,
},
}));
}
unwrapData(): [CursorStore['data'], Cursor] {
return [
this.store.data,
this.updateStore(store => ({
...store,
data: store.data.wrapped_cursor_data as Record<string, unknown>,
})),
];
}
clearData() {
return this.updateStore(store => ({
...store,
data: {},
}));
}
setMeta(meta: Record<string, unknown>) {
return this.updateStore(store => ({
...store,
meta,
}));
}
mergeMeta(meta: Record<string, unknown>) {
return this.updateStore(store => ({
...store,
meta: { ...store.meta, ...meta },
}));
}
}
// This is a temporary hack to allow cursors to be added to the
// interface between backend.js and backends without modifying old
// backends at all. This should be removed in favor of wrapping old
// backends with a compatibility layer, as part of the backend API
// refactor.
export const CURSOR_COMPATIBILITY_SYMBOL = Symbol('cursor key for compatibility with old backends');

@ -0,0 +1,159 @@
import { set } from '../object.util';
describe('object.util', () => {
describe('set', () => {
describe('simple object', () => {
test('existing key', () => {
const testObject = {
something: '12345',
somethingElse: 5,
};
const updatedObject = set(testObject, 'something', '54321');
expect(testObject.something).toBe('12345');
expect(updatedObject.something).toBe('54321');
});
test('new key', () => {
const testObject = {
something: '12345',
somethingElse: 5,
} as {
something: string;
somethingElse: number;
somethingNew?: string;
};
const updatedObject = set(testObject, 'somethingNew', 'aNewValue');
expect(testObject.somethingNew).toBeUndefined();
expect(updatedObject.somethingNew).toBe('aNewValue');
});
});
describe('nested object', () => {
test('existing key', () => {
const testObject = {
something: '12345',
somethingElse: {
nestedValue: 65,
},
};
const updatedObject = set(testObject, 'somethingElse.nestedValue', 125);
expect(testObject.somethingElse.nestedValue).toBe(65);
expect(updatedObject.somethingElse.nestedValue).toBe(125);
});
test('new key', () => {
const testObject = {
something: '12345',
somethingElse: {
nestedValue: 65,
},
} as {
something: string;
somethingElse: {
nestedValue: number;
};
somethingNew?: {
nestedLayer: {
anotherNestedLayer: string;
};
};
};
const updatedObject = set(
testObject,
'somethingNew.nestedLayer.anotherNestedLayer',
'aNewNestedValue',
);
expect(testObject.somethingNew?.nestedLayer.anotherNestedLayer).toBeUndefined();
expect(updatedObject.somethingNew?.nestedLayer.anotherNestedLayer).toBe('aNewNestedValue');
});
});
describe('simple array', () => {
test('existing key', () => {
const testObject = {
something: '12345',
somethingElse: [6, 5, 3],
};
const updatedObject = set(testObject, 'somethingElse.1', 13);
expect(updatedObject.somethingElse).toStrictEqual([6, 13, 3]);
});
test('new index should be ignored', () => {
const testObject = {
something: '12345',
somethingElse: [6, 5, 3],
};
const updatedObject = set(testObject, 'somethingElse.3', 84);
expect(updatedObject.somethingElse).toStrictEqual([6, 5, 3]);
});
});
describe('object array', () => {
test('existing key', () => {
const testObject = {
something: '12345',
somethingElse: [
{ name: 'one', value: '11111' },
{ name: 'two', value: '22222' },
{ name: 'three', value: '33333' },
],
};
const updatedObject = set(testObject, 'somethingElse.1.value', 'aNewValue');
expect(testObject.somethingElse[1].value).toBe('22222');
expect(updatedObject.somethingElse[1].value).toBe('aNewValue');
});
test('new index should be ignored', () => {
const testObject = {
something: '12345',
somethingElse: [
{ name: 'one', value: '11111' },
{ name: 'two', value: '22222' },
{ name: 'three', value: '33333' },
],
};
const updatedObject = set(testObject, 'somethingElse.3.value', 'valueToBeIgnored');
expect(updatedObject.somethingElse.length).toBe(3);
});
test('new key inside existing index', () => {
const testObject = {
something: '12345',
somethingElse: [
{ name: 'one', value: '11111' },
{ name: 'two', value: '22222' },
{ name: 'three', value: '33333' },
],
} as {
something: string;
somethingElse: {
name: string;
value: string;
newKey?: string;
}[];
};
const updatedObject = set(testObject, 'somethingElse.1.newKey', 'newValueToBeAdded');
expect(testObject.somethingElse[1].newKey).toBeUndefined();
expect(updatedObject.somethingElse[1].newKey).toBe('newValueToBeAdded');
});
});
});
});

@ -0,0 +1,43 @@
import semaphore from 'semaphore';
export type AsyncLock = { release: () => void; acquire: () => Promise<boolean> };
export function asyncLock(): AsyncLock {
let lock = semaphore(1);
function acquire(timeout = 15000) {
const promise = new Promise<boolean>(resolve => {
// this makes sure a caller doesn't gets stuck forever awaiting on the lock
const timeoutId = setTimeout(() => {
// we reset the lock in that case to allow future consumers to use it without being blocked
lock = semaphore(1);
resolve(false);
}, timeout);
lock.take(() => {
clearTimeout(timeoutId);
resolve(true);
});
});
return promise;
}
function release() {
try {
// suppress too many calls to leave error
lock.leave();
} catch (e: unknown) {
// calling 'leave' too many times might not be good behavior
// but there is no reason to completely fail on it
if (e instanceof Error && e.message !== 'leave called too many times.') {
throw e;
} else {
console.warn('leave called too many times.');
lock = semaphore(1);
}
}
}
return { acquire, release };
}

@ -0,0 +1,143 @@
import flow from 'lodash/flow';
import fromPairs from 'lodash/fromPairs';
import { map } from 'lodash/fp';
import unsentRequest from './unsentRequest';
import APIError from './APIError';
export function filterByExtension(file: { path: string }, extension: string) {
const path = file?.path || '';
return path.endsWith(extension.startsWith('.') ? extension : `.${extension}`);
}
const formatters = {
json: async (res: Response) => {
const contentType = res.headers.get('Content-Type') || '';
if (!contentType.startsWith('application/json') && !contentType.startsWith('text/json')) {
throw new Error(`${contentType} is not a valid JSON Content-Type`);
}
return res.json();
},
text: async (res: Response) => res.text(),
blob: async (res: Response) => res.blob(),
} as const;
function catchFormatErrors<T extends keyof typeof formatters>(
format: T,
formatter: typeof formatters[T],
) {
return (res: Response) => {
try {
return formatter(res) as ReturnType<typeof formatters[T]>;
} catch (error: unknown) {
if (error instanceof Error) {
throw new Error(
`Response cannot be parsed into the expected format (${format}): ${error.message}`,
);
}
throw error;
}
};
}
const responseFormatters = {
json: catchFormatErrors('json', async (res: Response) => {
const contentType = res.headers.get('Content-Type') || '';
if (!contentType.startsWith('application/json') && !contentType.startsWith('text/json')) {
throw new Error(`${contentType} is not a valid JSON Content-Type`);
}
return res.json();
}),
text: catchFormatErrors('text', async (res: Response) => res.text()),
blob: catchFormatErrors('blob', async (res: Response) => res.blob()),
} as const;
interface ParseResponseOptions {
expectingOk?: boolean;
format?: keyof typeof responseFormatters;
apiName?: string;
}
export async function parseResponse<T extends keyof typeof responseFormatters = 'text'>(
res: Response,
{ expectingOk = true, format = 'text', apiName = '' }: ParseResponseOptions,
): Promise<Awaited<ReturnType<typeof responseFormatters[T]>>> {
let body: Awaited<ReturnType<typeof responseFormatters[T]>>;
try {
const formatter = responseFormatters[format] ?? false;
if (!formatter) {
throw new Error(`${format} is not a supported response format.`);
}
body = await formatter(res);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (err: any) {
throw new APIError(err.message, res.status, apiName);
}
if (expectingOk && !res.ok) {
const isJSON = format === 'json';
const message = isJSON ? body.message || body.msg || body.error?.message : body;
throw new APIError(isJSON && message ? message : body, res.status, apiName);
}
return body;
}
export function responseParser<T extends keyof typeof responseFormatters = 'text'>(options: {
expectingOk?: boolean;
format: T;
apiName: string;
}) {
return (res: Response) => parseResponse<T>(res, options);
}
export function parseLinkHeader(header: string | null) {
if (!header) {
return {};
}
return flow([
linksString => linksString.split(','),
map((str: string) => str.trim().split(';')),
map(([linkStr, keyStr]) => [
keyStr.match(/rel="(.*?)"/)[1],
linkStr
.trim()
.match(/<(.*?)>/)[1]
.replace(/\+/g, '%20'),
]),
fromPairs,
])(header);
}
export async function getAllResponses(
url: string,
options: { headers?: {} } = {},
linkHeaderRelName: string,
nextUrlProcessor: (url: string) => string,
) {
const maxResponses = 30;
let responseCount = 1;
let req = unsentRequest.fromFetchArguments(url, options);
const pageResponses = [];
while (req && responseCount < maxResponses) {
const pageResponse = await unsentRequest.performRequest(req);
const linkHeader = pageResponse.headers.get('Link');
const nextURL = linkHeader && parseLinkHeader(linkHeader)[linkHeaderRelName];
const { headers = {} } = options;
req = nextURL && unsentRequest.fromFetchArguments(nextUrlProcessor(nextURL), { headers });
pageResponses.push(pageResponse);
responseCount++;
}
return pageResponses;
}
export function getPathDepth(path: string) {
const depth = path.split('/').length;
return depth;
}

@ -0,0 +1,414 @@
import get from 'lodash/get';
import { useMemo } from 'react';
import { COMMIT_AUTHOR, COMMIT_DATE } from '@staticcms/core/constants/commitProps';
import {
IDENTIFIER_FIELDS,
INFERABLE_FIELDS,
SORTABLE_FIELDS,
} from '@staticcms/core/constants/fieldInference';
import { formatExtensions } from '@staticcms/core/formats/formats';
import consoleError from '../consoleError';
import { summaryFormatter } from '../formatters';
import { keyToPathArray } from '../widgets/stringTemplate';
import { selectField } from './field.util';
import { selectMediaFolder } from './media.util';
import type { Backend } from '@staticcms/core/backend';
import type { InferredField } from '@staticcms/core/constants/fieldInference';
import type {
Collection,
Config,
Entry,
Field,
FilesCollection,
ObjectField,
SortableField,
} from '@staticcms/core/interface';
function fileForEntry(collection: FilesCollection, slug?: string) {
const files = collection.files;
if (!slug) {
return files?.[0];
}
return files && files.filter(f => f?.name === slug)?.[0];
}
export function selectFields(collection: Collection, slug?: string) {
if ('fields' in collection) {
return collection.fields;
}
const file = fileForEntry(collection, slug);
return file && file.fields;
}
export function selectFolderEntryExtension(collection: Collection) {
return (collection.extension || formatExtensions[collection.format ?? 'frontmatter']).replace(
/^\./,
'',
);
}
export function selectFileEntryLabel(collection: Collection, slug: string) {
if ('fields' in collection) {
return undefined;
}
const file = fileForEntry(collection, slug);
return file && file.label;
}
export function selectEntryPath(collection: Collection, slug: string) {
if ('fields' in collection) {
const folder = collection.folder.replace(/\/$/, '');
return `${folder}/${slug}.${selectFolderEntryExtension(collection)}`;
}
const file = fileForEntry(collection, slug);
return file && file.file;
}
export function selectEntrySlug(collection: Collection, path: string) {
if ('fields' in collection) {
const folder = (collection.folder as string).replace(/\/$/, '');
const slug = path
.split(folder + '/')
.pop()
?.replace(new RegExp(`\\.${selectFolderEntryExtension(collection)}$`), '');
return slug;
}
const file = collection.files.filter(f => f?.file === path)?.[0];
return file && file.name;
}
export function selectAllowNewEntries(collection: Collection) {
if ('fields' in collection) {
return collection.create ?? true;
}
return false;
}
export function selectAllowDeletion(collection: Collection) {
if ('fields' in collection) {
return collection.delete ?? true;
}
return false;
}
export function selectTemplateName(collection: Collection, slug: string) {
if ('fields' in collection) {
return collection.name;
}
return slug;
}
export function selectEntryCollectionTitle(collection: Collection, entry: Entry): string {
// prefer formatted summary over everything else
const summaryTemplate = collection.summary;
if (summaryTemplate) {
return summaryFormatter(summaryTemplate, entry, collection);
}
// if the collection is a file collection return the label of the entry
if ('files' in collection && collection.files) {
const label = selectFileEntryLabel(collection, entry.slug);
if (label) {
return label;
}
}
// try to infer a title field from the entry data
const entryData = entry.data;
const titleField = selectInferedField(collection, 'title');
const result = titleField && get(entryData, keyToPathArray(titleField));
// if the custom field does not yield a result, fallback to 'title'
if (!result && titleField !== 'title') {
return get(entryData, keyToPathArray('title'));
}
return result;
}
export function selectDefaultSortableFields(collection: Collection, backend: Backend) {
let defaultSortable = SORTABLE_FIELDS.map((type: string) => {
const field = selectInferedField(collection, type);
if (backend.isGitBackend() && type === 'author' && !field) {
// default to commit author if not author field is found
return COMMIT_AUTHOR;
}
return field;
}).filter(Boolean);
if (backend.isGitBackend()) {
// always have commit date by default
defaultSortable = [COMMIT_DATE, ...defaultSortable];
}
return defaultSortable as string[];
}
export function selectSortableFields(
collection: Collection,
t: (key: string) => string,
): SortableField[] {
const fields = (collection.sortable_fields?.fields ?? [])
.map(key => {
if (key === COMMIT_DATE) {
return { key, field: { name: key, label: t('collection.defaultFields.updatedOn.label') } };
}
const field = selectField(collection, key);
if (key === COMMIT_AUTHOR && !field) {
return { key, field: { name: key, label: t('collection.defaultFields.author.label') } };
}
return { key, field };
})
.filter(item => !!item.field)
.map(item => ({ ...item.field, key: item.key })) as SortableField[];
return fields;
}
export function selectViewFilters(collection: Collection) {
return collection.view_filters;
}
export function selectViewGroups(collection: Collection) {
return collection.view_groups;
}
export function selectFieldsComments(collection: Collection, entryMap: Entry) {
let fields: Field[] = [];
if ('folder' in collection) {
fields = collection.fields;
} else if ('files' in collection) {
const file = collection.files!.find(f => f?.name === entryMap.slug);
if (file) {
fields = file.fields;
}
}
const comments: Record<string, string> = {};
const names = getFieldsNames(fields);
names.forEach(name => {
const field = selectField(collection, name);
if (field && 'comment' in field) {
comments[name] = field.comment!;
}
});
return comments;
}
function getFieldsWithMediaFolders(fields: Field[]) {
const fieldsWithMediaFolders = fields.reduce((acc, f) => {
if ('media_folder' in f) {
acc = [...acc, f];
}
if ('fields' in f) {
const fields = f.fields ?? [];
acc = [...acc, ...getFieldsWithMediaFolders(fields)];
} else if ('types' in f) {
const types = f.types ?? [];
acc = [...acc, ...getFieldsWithMediaFolders(types)];
}
return acc;
}, [] as Field[]);
return fieldsWithMediaFolders;
}
export function getFileFromSlug(collection: FilesCollection, slug: string) {
return collection.files?.find(f => f.name === slug);
}
export function selectFieldsWithMediaFolders(collection: Collection, slug: string) {
if ('folder' in collection) {
const fields = collection.fields;
return getFieldsWithMediaFolders(fields);
} else {
const fields = getFileFromSlug(collection, slug)?.fields || [];
return getFieldsWithMediaFolders(fields);
}
return [];
}
export function selectMediaFolders(config: Config, collection: Collection, entry: Entry) {
const fields = selectFieldsWithMediaFolders(collection, entry.slug);
const folders = fields.map(f => selectMediaFolder(config, collection, entry, f));
if ('files' in collection) {
const file = getFileFromSlug(collection, entry.slug);
if (file) {
folders.unshift(selectMediaFolder(config, collection, entry, undefined));
}
} else if ('media_folder' in collection) {
// stop evaluating media folders at collection level
const newCollection = { ...collection };
folders.unshift(selectMediaFolder(config, newCollection, entry, undefined));
}
return [...new Set(...folders)];
}
export function getFieldsNames(fields: Field[] | undefined, prefix = '') {
let names = fields?.map(f => `${prefix}${f.name}`) ?? [];
fields?.forEach((f, index) => {
if ('fields' in f) {
const fields = f.fields;
names = [...names, ...getFieldsNames(fields, `${names[index]}.`)];
} else if ('types' in f) {
const types = f.types;
names = [...names, ...getFieldsNames(types, `${names[index]}.`)];
}
});
return names;
}
export function traverseFields(
fields: Field[],
updater: (field: Field) => Field,
done = () => false,
) {
if (done()) {
return fields;
}
return fields.map(f => {
const field = updater(f as Field);
if (done()) {
return field;
} else if ('fields' in field) {
field.fields = traverseFields(field.fields ?? [], updater, done);
return field;
} else if ('types' in field) {
field.types = traverseFields(field.types ?? [], updater, done) as ObjectField[];
return field;
} else {
return field;
}
});
}
export function updateFieldByKey(
collection: Collection,
key: string,
updater: (field: Field) => Field,
): Collection {
const selected = selectField(collection, key);
if (!selected) {
return collection;
}
let updated = false;
function updateAndBreak(f: Field) {
const field = f as Field;
if (field === selected) {
updated = true;
return updater(field);
} else {
return field;
}
}
if ('fields' in collection) {
collection.fields = traverseFields(collection.fields ?? [], updateAndBreak, () => updated);
}
return collection;
}
export function selectIdentifier(collection: Collection) {
const identifier = collection.identifier_field;
const identifierFields = identifier ? [identifier, ...IDENTIFIER_FIELDS] : [...IDENTIFIER_FIELDS];
const fieldNames = getFieldsNames('fields' in collection ? collection.fields ?? [] : []);
return identifierFields.find(id =>
fieldNames.find(name => name.toLowerCase().trim() === id.toLowerCase().trim()),
);
}
export function selectInferedField(collection: Collection, fieldName: string) {
if (fieldName === 'title' && collection.identifier_field) {
return selectIdentifier(collection);
}
const inferableField = (
INFERABLE_FIELDS as Record<
string,
{
type: string;
synonyms: string[];
secondaryTypes: string[];
fallbackToFirstField: boolean;
showError: boolean;
}
>
)[fieldName];
const fields = 'fields' in collection ? collection.fields ?? [] : undefined;
let field;
// If collection has no fields or fieldName is not defined within inferables list, return null
if (!fields || !inferableField) {
return null;
}
// Try to return a field of the specified type with one of the synonyms
const mainTypeFields = fields
.filter((f: Field | Field) => (f.widget ?? 'string') === inferableField.type)
.map(f => f?.name);
field = mainTypeFields.filter(f => inferableField.synonyms.indexOf(f as string) !== -1);
if (field && field.length > 0) {
return field[0];
}
// Try to return a field for each of the specified secondary types
const secondaryTypeFields = fields
.filter(f => inferableField.secondaryTypes.indexOf(f.widget ?? 'string') !== -1)
.map(f => f?.name);
field = secondaryTypeFields.filter(f => inferableField.synonyms.indexOf(f as string) !== -1);
if (field && field.length > 0) {
return field[0];
}
// Try to return the first field of the specified type
if (inferableField.fallbackToFirstField && mainTypeFields.length > 0) {
return mainTypeFields[0];
}
// Coundn't infer the field. Show error and return null.
if (inferableField.showError) {
consoleError(
`The Field ${fieldName} is missing for the collection “${collection.name}`,
`Static CMS tries to infer the entry ${fieldName} automatically, but one couldn't be found for entries of the collection “${collection.name}”. Please check your site configuration.`,
);
}
return null;
}
export function useInferedFields(collection: Collection) {
return useMemo(() => {
const titleField = selectInferedField(collection, 'title');
const shortTitleField = selectInferedField(collection, 'shortTitle');
const authorField = selectInferedField(collection, 'author');
const iFields: Record<string, InferredField> = {};
if (titleField) {
iFields[titleField] = INFERABLE_FIELDS.title;
}
if (shortTitleField) {
iFields[shortTitleField] = INFERABLE_FIELDS.shortTitle;
}
if (authorField) {
iFields[authorField] = INFERABLE_FIELDS.author;
}
return iFields;
}, [collection]);
}

@ -0,0 +1,7 @@
import type { AlertDialogProps } from '@staticcms/core/components/UI/Alert';
export default class AlertEvent extends CustomEvent<AlertDialogProps> {
constructor(detail: AlertDialogProps) {
super('alert', { detail });
}
}

@ -0,0 +1,7 @@
import type { ConfirmDialogProps } from '@staticcms/core/components/UI/Confirm';
export default class ConfirmEvent extends CustomEvent<ConfirmDialogProps> {
constructor(detail: ConfirmDialogProps) {
super('confirm', { detail });
}
}

@ -0,0 +1,13 @@
/* eslint-disable import/prefer-default-export */
export async function doesUrlFileExist(url: string): Promise<{ type: string; exists: boolean }> {
const cleanUrl = url.replace(/^blob:/g, '');
const baseUrl = `${window.location.protocol}//${window.location.host}/`;
if (!cleanUrl.startsWith('/') && !cleanUrl.startsWith(baseUrl)) {
return { type: 'Unknown', exists: true };
}
const response = await fetch(cleanUrl, { method: 'HEAD' });
return { type: response.headers.get('Content-Type') ?? 'text', exists: response.ok };
}

@ -0,0 +1,67 @@
import { keyToPathArray } from '../widgets/stringTemplate';
import type { t } from 'react-polyglot';
import type { Collection, Field } from '@staticcms/core/interface';
export function selectField(collection: Collection, key: string) {
const array = keyToPathArray(key);
let name: string | undefined;
let field: Field | undefined;
if ('fields' in collection) {
let fields = collection.fields ?? [];
while ((name = array.shift()) && fields) {
field = fields.find(f => f.name === name);
if (field) {
if ('fields' in field) {
fields = field?.fields ?? [];
} else if ('types' in field) {
fields = field?.types ?? [];
}
}
}
}
return field;
}
export function getFieldLabel(field: Field, t: t) {
return `${field.label ?? field.name} ${`${
field.required === false ? ` (${t('editor.editorControl.field.optional')})` : ''
}`}`;
}
function findField(field: Field | undefined, path: string[]): Field | null {
if (!field) {
return null;
}
if (path.length === 0) {
return field;
}
if (!('fields' in field && field.fields)) {
return null;
}
const name = path.slice(0, 1)[0];
const rest = path.slice(1);
return findField(
field.fields.find(f => f.name === name),
rest,
);
}
export function getField(field: Field | Field[], path: string): Field | null {
return findField(
Array.isArray(field)
? {
widget: 'object',
name: 'root',
fields: field,
}
: field,
path.split('.'),
);
}

@ -0,0 +1,12 @@
import { sha256 } from 'js-sha256';
export default (blob: Blob): Promise<string> =>
new Promise((resolve, reject) => {
const fr = new FileReader();
fr.onload = ({ target }) => resolve(sha256(target?.result || ''));
fr.onerror = err => {
fr.abort();
reject(err);
};
fr.readAsArrayBuffer(blob);
});

@ -0,0 +1,133 @@
//
// Pointer file parsing
import { filter, flow, fromPairs, map } from 'lodash/fp';
import getBlobSHA from './getBlobSHA';
import type AssetProxy from '@staticcms/core/valueObjects/AssetProxy';
export interface PointerFile {
size: number;
sha: string;
}
function splitIntoLines(str: string) {
return str.split('\n');
}
function splitIntoWords(str: string) {
return str.split(/\s+/g);
}
function isNonEmptyString(str: string) {
return str !== '';
}
const withoutEmptyLines = flow([map((str: string) => str.trim()), filter(isNonEmptyString)]);
export const parsePointerFile: (data: string) => PointerFile = flow([
splitIntoLines,
withoutEmptyLines,
map(splitIntoWords),
fromPairs,
({ size, oid, ...rest }) => ({
size: parseInt(size),
sha: oid?.split(':')[1],
...rest,
}),
]);
//
// .gitattributes file parsing
function removeGitAttributesCommentsFromLine(line: string) {
return line.split('#')[0];
}
function parseGitPatternAttribute(attributeString: string) {
// There are three kinds of attribute settings:
// - a key=val pair sets an attribute to a specific value
// - a key without a value and a leading hyphen sets an attribute to false
// - a key without a value and no leading hyphen sets an attribute
// to true
if (attributeString.includes('=')) {
return attributeString.split('=');
}
if (attributeString.startsWith('-')) {
return [attributeString.slice(1), false];
}
return [attributeString, true];
}
const parseGitPatternAttributes = flow([map(parseGitPatternAttribute), fromPairs]);
const parseGitAttributesPatternLine = flow([
splitIntoWords,
([pattern, ...attributes]) => [pattern, parseGitPatternAttributes(attributes)],
]);
const parseGitAttributesFileToPatternAttributePairs = flow([
splitIntoLines,
map(removeGitAttributesCommentsFromLine),
withoutEmptyLines,
map(parseGitAttributesPatternLine),
]);
export const getLargeMediaPatternsFromGitAttributesFile = flow([
parseGitAttributesFileToPatternAttributePairs,
filter(
([, attributes]) =>
attributes.filter === 'lfs' && attributes.diff === 'lfs' && attributes.merge === 'lfs',
),
map(([pattern]) => pattern),
]);
export function createPointerFile({ size, sha }: PointerFile) {
return `\
version https://git-lfs.github.com/spec/v1
oid sha256:${sha}
size ${size}
`;
}
export async function getPointerFileForMediaFileObj(
client: { uploadResource: (pointer: PointerFile, resource: Blob) => Promise<string> },
fileObj: File,
path: string,
) {
const { name, size } = fileObj;
const sha = await getBlobSHA(fileObj);
await client.uploadResource({ sha, size }, fileObj);
const pointerFileString = createPointerFile({ sha, size });
const pointerFileBlob = new Blob([pointerFileString]);
const pointerFile = new File([pointerFileBlob], name, { type: 'text/plain' });
const pointerFileSHA = await getBlobSHA(pointerFile);
return {
fileObj: pointerFile,
size: pointerFileBlob.size,
sha: pointerFileSHA,
raw: pointerFileString,
path,
};
}
export async function getLargeMediaFilteredMediaFiles(
client: {
uploadResource: (pointer: PointerFile, resource: Blob) => Promise<string>;
matchPath: (path: string) => boolean;
},
mediaFiles: AssetProxy[],
) {
return await Promise.all(
mediaFiles.map(async mediaFile => {
const { fileObj, path } = mediaFile;
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
if (!client.matchPath(fixedPath)) {
return mediaFile;
}
const pointerFileDetails = await getPointerFileForMediaFileObj(client, fileObj as File, path);
return { ...mediaFile, ...pointerFileDetails };
}),
);
}

@ -0,0 +1,379 @@
import sortBy from 'lodash/sortBy';
import unionBy from 'lodash/unionBy';
import semaphore from 'semaphore';
import { basename } from './path';
import type { Semaphore } from 'semaphore';
import type {
DisplayURL,
DisplayURLObject,
FileMetadata,
ImplementationEntry,
ImplementationFile,
} from '@staticcms/core/interface';
import type { AsyncLock } from './asyncLock';
const MAX_CONCURRENT_DOWNLOADS = 10;
type ReadFile = (
path: string,
id: string | null | undefined,
options: { parseText: boolean },
) => Promise<string | Blob>;
type ReadFileMetadata = (path: string, id: string | null | undefined) => Promise<FileMetadata>;
type CustomFetchFunc = (files: ImplementationFile[]) => Promise<ImplementationEntry[]>;
async function fetchFiles(
files: ImplementationFile[],
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [] as Promise<ImplementationEntry | { error: boolean }>[];
files.forEach(file => {
promises.push(
new Promise(resolve =>
sem.take(async () => {
try {
const [data, fileMetadata] = await Promise.all([
readFile(file.path, file.id, { parseText: true }),
readFileMetadata(file.path, file.id),
]);
resolve({ file: { ...file, ...fileMetadata }, data: data as string });
sem.leave();
} catch (error) {
sem.leave();
console.error(`failed to load file from ${apiName}: ${file.path}`);
resolve({ error: true });
}
}),
),
);
});
return Promise.all(promises).then(loadedEntries =>
loadedEntries.filter(loadedEntry => !(loadedEntry as { error: boolean }).error),
) as Promise<ImplementationEntry[]>;
}
export async function entriesByFolder(
listFiles: () => Promise<ImplementationFile[]>,
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) {
const files = await listFiles();
return fetchFiles(files, readFile, readFileMetadata, apiName);
}
export async function entriesByFiles(
files: ImplementationFile[],
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) {
return fetchFiles(files, readFile, readFileMetadata, apiName);
}
export function blobToFileObj(name: string, blob: Blob) {
const options = name.match(/.svg$/) ? { type: 'image/svg+xml' } : {};
return new File([blob], name, options);
}
export async function getMediaAsBlob(path: string, id: string | null, readFile: ReadFile) {
let blob: Blob;
if (path.match(/.svg$/)) {
const text = (await readFile(path, id, { parseText: true })) as string;
blob = new Blob([text], { type: 'image/svg+xml' });
} else {
blob = (await readFile(path, id, { parseText: false })) as Blob;
}
return blob;
}
export async function getMediaDisplayURL(
displayURL: DisplayURL,
readFile: ReadFile,
semaphore: Semaphore,
) {
const { path, id } = displayURL as DisplayURLObject;
return new Promise<string>((resolve, reject) =>
semaphore.take(() =>
getMediaAsBlob(path, id, readFile)
.then(blob => URL.createObjectURL(blob))
.then(resolve, reject)
.finally(() => semaphore.leave()),
),
);
}
export async function runWithLock(lock: AsyncLock, func: Function, message: string) {
try {
const acquired = await lock.acquire();
if (!acquired) {
console.warn(message);
}
const result = await func();
return result;
} finally {
lock.release();
}
}
const LOCAL_KEY = 'git.local';
type LocalTree = {
head: string;
files: { id: string; name: string; path: string }[];
};
type GetKeyArgs = {
branch: string;
folder: string;
extension: string;
depth: number;
};
function getLocalKey({ branch, folder, extension, depth }: GetKeyArgs) {
return `${LOCAL_KEY}.${branch}.${folder}.${extension}.${depth}`;
}
type PersistLocalTreeArgs = GetKeyArgs & {
localForage: LocalForage;
localTree: LocalTree;
};
type GetLocalTreeArgs = GetKeyArgs & {
localForage: LocalForage;
};
export async function persistLocalTree({
localForage,
localTree,
branch,
folder,
extension,
depth,
}: PersistLocalTreeArgs) {
await localForage.setItem<LocalTree>(
getLocalKey({ branch, folder, extension, depth }),
localTree,
);
}
export async function getLocalTree({
localForage,
branch,
folder,
extension,
depth,
}: GetLocalTreeArgs) {
const localTree = await localForage.getItem<LocalTree>(
getLocalKey({ branch, folder, extension, depth }),
);
return localTree;
}
type GetDiffFromLocalTreeMethods = {
getDifferences: (
to: string,
from: string,
) => Promise<
{
oldPath: string;
newPath: string;
status: string;
}[]
>;
filterFile: (file: { path: string; name: string }) => boolean;
getFileId: (path: string) => Promise<string>;
};
type GetDiffFromLocalTreeArgs = GetDiffFromLocalTreeMethods & {
branch: { name: string; sha: string };
localTree: LocalTree;
folder: string;
extension: string;
depth: number;
};
async function getDiffFromLocalTree({
branch,
localTree,
folder,
getDifferences,
filterFile,
getFileId,
}: GetDiffFromLocalTreeArgs) {
const diff = await getDifferences(branch.sha, localTree.head);
const diffFiles = diff
.filter(d => d.oldPath?.startsWith(folder) || d.newPath?.startsWith(folder))
.reduce((acc, d) => {
if (d.status === 'renamed') {
acc.push({
path: d.oldPath,
name: basename(d.oldPath),
deleted: true,
});
acc.push({
path: d.newPath,
name: basename(d.newPath),
deleted: false,
});
} else if (d.status === 'deleted') {
acc.push({
path: d.oldPath,
name: basename(d.oldPath),
deleted: true,
});
} else {
acc.push({
path: d.newPath || d.oldPath,
name: basename(d.newPath || d.oldPath),
deleted: false,
});
}
return acc;
}, [] as { path: string; name: string; deleted: boolean }[])
.filter(filterFile);
const diffFilesWithIds = await Promise.all(
diffFiles.map(async file => {
if (!file.deleted) {
const id = await getFileId(file.path);
return { ...file, id };
} else {
return { ...file, id: '' };
}
}),
);
return diffFilesWithIds;
}
type AllEntriesByFolderArgs = GetKeyArgs &
GetDiffFromLocalTreeMethods & {
listAllFiles: (
folder: string,
extension: string,
depth: number,
) => Promise<ImplementationFile[]>;
readFile: ReadFile;
readFileMetadata: ReadFileMetadata;
getDefaultBranch: () => Promise<{ name: string; sha: string }>;
isShaExistsInBranch: (branch: string, sha: string) => Promise<boolean>;
apiName: string;
localForage: LocalForage;
customFetch?: CustomFetchFunc;
};
export async function allEntriesByFolder({
listAllFiles,
readFile,
readFileMetadata,
apiName,
branch,
localForage,
folder,
extension,
depth,
getDefaultBranch,
isShaExistsInBranch,
getDifferences,
getFileId,
filterFile,
customFetch,
}: AllEntriesByFolderArgs) {
async function listAllFilesAndPersist() {
const files = await listAllFiles(folder, extension, depth);
const branch = await getDefaultBranch();
await persistLocalTree({
localForage,
localTree: {
head: branch.sha,
files: files.map(f => ({ id: f.id!, path: f.path, name: basename(f.path) })),
},
branch: branch.name,
depth,
extension,
folder,
});
return files;
}
async function listFiles() {
const localTree = await getLocalTree({ localForage, branch, folder, extension, depth });
if (localTree) {
const branch = await getDefaultBranch();
// if the branch was forced pushed the local tree sha can be removed from the remote tree
const localTreeInBranch = await isShaExistsInBranch(branch.name, localTree.head);
if (!localTreeInBranch) {
console.info(
`Can't find local tree head '${localTree.head}' in branch '${branch.name}', rebuilding local tree`,
);
return listAllFilesAndPersist();
}
const diff = await getDiffFromLocalTree({
branch,
localTree,
folder,
extension,
depth,
getDifferences,
getFileId,
filterFile,
}).catch(e => {
console.info('Failed getting diff from local tree:', e);
return null;
});
if (!diff) {
console.info(`Diff is null, rebuilding local tree`);
return listAllFilesAndPersist();
}
if (diff.length === 0) {
// return local copy
return localTree.files;
} else {
const deleted = diff.reduce((acc, d) => {
acc[d.path] = d.deleted;
return acc;
}, {} as Record<string, boolean>);
const newCopy = sortBy(
unionBy(
diff.filter(d => !deleted[d.path]),
localTree.files.filter(f => !deleted[f.path]),
file => file.path,
),
file => file.path,
);
await persistLocalTree({
localForage,
localTree: { head: branch.sha, files: newCopy },
branch: branch.name,
depth,
extension,
folder,
});
return newCopy;
}
} else {
return listAllFilesAndPersist();
}
}
const files = await listFiles();
if (customFetch) {
return await customFetch(files);
}
return await fetchFiles(files, readFile, readFileMetadata, apiName);
}

@ -0,0 +1,41 @@
export { default as AccessTokenError } from './AccessTokenError';
export { readFile, readFileMetadata, requestWithBackoff, throwOnConflictingBranches } from './API';
export { default as APIError } from './APIError';
export { generateContentKey, parseContentKey } from './APIUtils';
export { asyncLock } from './asyncLock';
export {
filterByExtension,
getAllResponses,
getPathDepth,
parseLinkHeader,
parseResponse,
responseParser,
} from './backendUtil';
export { default as Cursor, CURSOR_COMPATIBILITY_SYMBOL } from './Cursor';
export { default as getBlobSHA } from './getBlobSHA';
export {
createPointerFile,
getLargeMediaFilteredMediaFiles,
getLargeMediaPatternsFromGitAttributesFile,
getPointerFileForMediaFileObj,
parsePointerFile,
} from './git-lfs';
export {
allEntriesByFolder,
blobToFileObj,
entriesByFiles,
entriesByFolder,
getMediaAsBlob,
getMediaDisplayURL,
runWithLock,
} from './implementation';
export { default as loadScript } from './loadScript';
export { default as localForage } from './localForage';
export { basename, fileExtension, fileExtensionWithSeparator, isAbsolutePath } from './path';
export { flowAsync, onlySuccessfulPromises, then } from './promise';
export { default as transientOptions } from './transientOptions';
export { default as unsentRequest } from './unsentRequest';
export type { ApiRequest, FetchError } from './API';
export type { AsyncLock } from './asyncLock';
export type { PointerFile } from './git-lfs';

@ -0,0 +1,17 @@
/**
* Simple script loader that returns a promise.
*/
export default function loadScript(url: string) {
return new Promise<void>((resolve, reject) => {
const head = document.getElementsByTagName('head')[0];
const script: HTMLScriptElement = document.createElement('script');
script.src = url;
script.onload = () => {
resolve();
};
script.onerror = error => {
reject(error);
};
head.appendChild(script);
});
}

@ -0,0 +1,21 @@
import localForage from 'localforage';
function localForageTest() {
const testKey = 'localForageTest';
localForage
.setItem(testKey, { expires: Date.now() + 300000 })
.then(() => {
localForage.removeItem(testKey);
})
.catch(err => {
if (err.code === 22) {
const message = 'Unable to set localStorage key. Quota exceeded! Full disk?';
console.warn(message);
}
console.info(err);
});
}
localForageTest();
export default localForage;

@ -0,0 +1,293 @@
import { dirname, join } from 'path';
import trim from 'lodash/trim';
import { folderFormatter } from '../formatters';
import { joinUrlPath } from '../urlHelper';
import { basename, isAbsolutePath } from '.';
import type {
Config,
Field,
Collection,
CollectionFile,
Entry,
FileOrImageField,
MarkdownField,
ListField,
ObjectField,
} from '@staticcms/core/interface';
export const DRAFT_MEDIA_FILES = 'DRAFT_MEDIA_FILES';
function getFileField(collectionFiles: CollectionFile[], slug: string | undefined) {
const file = collectionFiles.find(f => f?.name === slug);
return file;
}
function isMediaField(
folderKey: 'media_folder' | 'public_folder',
field: Field | undefined,
): field is FileOrImageField | MarkdownField {
return Boolean(field && folderKey in field);
}
function hasCustomFolder(
folderKey: 'media_folder' | 'public_folder',
collection: Collection | undefined | null,
slug: string | undefined,
field: Field | undefined,
): field is FileOrImageField | MarkdownField {
if (!collection) {
return false;
}
if (isMediaField(folderKey, field)) {
if (field[folderKey]) {
return true;
}
}
if ('files' in collection) {
const file = getFileField(collection.files, slug);
if (file && file[folderKey]) {
return true;
}
}
if (collection[folderKey]) {
return true;
}
return false;
}
function evaluateFolder(
folderKey: 'media_folder' | 'public_folder',
config: Config,
c: Collection,
entryMap: Entry | undefined,
field: FileOrImageField | MarkdownField,
) {
let currentFolder = config[folderKey]!;
const collection = { ...c };
// add identity template if doesn't exist
if (!collection[folderKey]) {
collection[folderKey] = `{{${folderKey}}}`;
}
if ('files' in collection) {
// files collection evaluate the collection template
// then move on to the specific file configuration denoted by the slug
currentFolder = folderFormatter(
collection[folderKey]!,
entryMap,
collection,
currentFolder,
folderKey,
config.slug,
);
const f = getFileField(collection.files!, entryMap?.slug);
if (f) {
const file = { ...f };
if (!file[folderKey]) {
// add identity template if doesn't exist
file[folderKey] = `{{${folderKey}}}`;
}
// evaluate the file template and keep evaluating until we match our field
currentFolder = folderFormatter(
file[folderKey]!,
entryMap,
collection,
currentFolder,
folderKey,
config.slug,
);
if (field) {
const fieldFolder = traverseFields(
folderKey,
config,
collection,
entryMap,
field,
file.fields! as Field[],
currentFolder,
);
if (fieldFolder !== null) {
currentFolder = fieldFolder;
}
}
}
} else {
// folder collection, evaluate the collection template
// and keep evaluating until we match our field
currentFolder = folderFormatter(
collection[folderKey]!,
entryMap,
collection,
currentFolder,
folderKey,
config.slug,
);
if (field) {
const fieldFolder = traverseFields(
folderKey,
config,
collection,
entryMap,
field,
collection.fields! as Field[],
currentFolder,
);
if (fieldFolder !== null) {
currentFolder = fieldFolder;
}
}
}
return currentFolder;
}
function traverseFields(
folderKey: 'media_folder' | 'public_folder',
config: Config,
collection: Collection,
entryMap: Entry | undefined,
field: FileOrImageField | MarkdownField | ListField | ObjectField,
fields: Field[],
currentFolder: string,
): string | null {
const matchedField = fields.filter(f => f === field)[0] as
| FileOrImageField
| MarkdownField
| ListField
| ObjectField
| undefined;
if (matchedField && isMediaField(folderKey, matchedField)) {
return folderFormatter(
matchedField[folderKey] ? matchedField[folderKey]! : `{{${folderKey}}}`,
entryMap,
collection,
currentFolder,
folderKey,
config.slug,
);
}
for (const f of fields) {
const childField: Field = { ...f };
if (isMediaField(folderKey, childField) && !childField[folderKey]) {
// add identity template if doesn't exist
childField[folderKey] = `{{${folderKey}}}`;
}
const folder = folderFormatter(
isMediaField(folderKey, childField) ? childField[folderKey] ?? '' : '',
entryMap,
collection,
currentFolder,
folderKey,
config.slug,
);
let fieldFolder = null;
if ('fields' in childField && childField.fields) {
fieldFolder = traverseFields(
folderKey,
config,
collection,
entryMap,
childField,
childField.fields,
folder,
);
} else if ('types' in childField && childField.types) {
fieldFolder = traverseFields(
folderKey,
config,
collection,
entryMap,
childField,
childField.types,
folder,
);
}
if (fieldFolder != null) {
return fieldFolder;
}
}
return null;
}
export function selectMediaFolder(
config: Config,
collection: Collection | undefined | null,
entryMap: Entry | undefined,
field: Field | undefined,
) {
const name = 'media_folder';
let mediaFolder = config[name];
if (hasCustomFolder(name, collection, entryMap?.slug, field)) {
const folder = evaluateFolder(name, config, collection!, entryMap, field);
if (folder.startsWith('/')) {
// return absolute paths as is
mediaFolder = join(folder);
} else {
const entryPath = entryMap?.path;
mediaFolder = entryPath
? join(dirname(entryPath), folder)
: join(collection && 'folder' in collection ? collection.folder : '', DRAFT_MEDIA_FILES);
}
}
return trim(mediaFolder, '/');
}
export function selectMediaFilePublicPath(
config: Config,
collection: Collection | null,
mediaPath: string,
entryMap: Entry | undefined,
field: Field | undefined,
) {
if (isAbsolutePath(mediaPath)) {
return mediaPath;
}
const name = 'public_folder';
let publicFolder = config[name]!;
const customFolder = hasCustomFolder(name, collection, entryMap?.slug, field);
if (customFolder) {
publicFolder = evaluateFolder(name, config, collection!, entryMap, field);
}
if (isAbsolutePath(publicFolder)) {
return joinUrlPath(publicFolder, basename(mediaPath));
}
return join(publicFolder, basename(mediaPath));
}
export function selectMediaFilePath(
config: Config,
collection: Collection | null,
entryMap: Entry | undefined,
mediaPath: string,
field: Field | undefined,
) {
if (isAbsolutePath(mediaPath)) {
return mediaPath;
}
const mediaFolder = selectMediaFolder(config, collection, entryMap, field);
return join(mediaFolder, basename(mediaPath));
}

@ -0,0 +1,11 @@
export function isNotNullish<T>(value: T | null | undefined): value is T {
return value !== undefined && value !== null;
}
export function isNullish<T>(value: T | null | undefined): value is null | undefined {
return value === undefined || value === null;
}
export function filterNullish<T>(value: (T | null | undefined)[] | null | undefined): T[] {
return value?.filter<T>(isNotNullish) ?? [];
}

@ -0,0 +1,45 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
function setIn(target: any, path: (string | number)[], value: unknown): any {
if (path.length === 0) {
return value;
}
const pathSegment = path[0];
const restOfPath = path.slice(1);
if (Array.isArray(target)) {
const localTarget = [...(target ?? [])];
if (Number.isNaN(+pathSegment)) {
return localTarget;
}
const index = +pathSegment;
if (index < 0 || index >= localTarget.length) {
return localTarget;
}
localTarget[index] = setIn(localTarget[index], restOfPath, value);
return localTarget;
}
const localTarget = target ?? {};
return {
...localTarget,
[pathSegment]: setIn(localTarget[pathSegment], restOfPath, value),
};
}
export function set<T>(target: T, path: string | undefined | null, value: unknown): T;
export function set(target: any, path: string | undefined | null, value: unknown): any {
return setIn(
target,
(path ?? '').split('.').map(part => {
if (Number.isNaN(+part)) {
return part;
}
return +part;
}),
value,
);
}

@ -0,0 +1,86 @@
const absolutePath = new RegExp('^(?:[a-z]+:)?//', 'i');
function normalizePath(path: string) {
return path.replace(/[\\/]+/g, '/');
}
export function isAbsolutePath(path: string) {
return absolutePath.test(path);
}
/**
* Return the last portion of a path. Similar to the Unix basename command.
* @example Usage example
* path.basename('/foo/bar/baz/asdf/quux.html')
* // returns
* 'quux.html'
*
* path.basename('/foo/bar/baz/asdf/quux.html', '.html')
* // returns
* 'quux'
*/
export function basename(p: string, ext = '') {
// Special case: Normalize will modify this to '.'
if (p === '') {
return p;
}
// Normalize the string first to remove any weirdness.
p = normalizePath(p);
// Get the last part of the string.
const sections = p.split('/');
const lastPart = sections[sections.length - 1];
// Special case: If it's empty, then we have a string like so: foo/
// Meaning, 'foo' is guaranteed to be a directory.
if (lastPart === '' && sections.length > 1) {
return sections[sections.length - 2];
}
// Remove the extension, if need be.
if (ext.length > 0) {
const lastPartExt = lastPart.slice(-ext.length);
if (lastPartExt === ext) {
return lastPart.slice(0, -ext.length);
}
}
return lastPart;
}
/**
* Return the extension of the path, from the last '.' to end of string in the
* last portion of the path. If there is no '.' in the last portion of the path
* or the first character of it is '.', then it returns an empty string.
* @example Usage example
* path.fileExtensionWithSeparator('index.html')
* // returns
* '.html'
*/
export function fileExtensionWithSeparator(p: string) {
p = normalizePath(p);
const sections = p.split('/');
p = sections.pop() as string;
// Special case: foo/file.ext/ should return '.ext'
if (p === '' && sections.length > 0) {
p = sections.pop() as string;
}
if (p === '..') {
return '';
}
const i = p.lastIndexOf('.');
if (i === -1 || i === 0) {
return '';
}
return p.slice(i);
}
/**
* Return the extension of the path, from after the last '.' to end of string in the
* last portion of the path. If there is no '.' in the last portion of the path
* or the first character of it is '.', then it returns an empty string.
* @example Usage example
* path.fileExtension('index.html')
* // returns
* 'html'
*/
export function fileExtension(p: string) {
const ext = fileExtensionWithSeparator(p);
return ext === '' ? ext : ext.slice(1);
}

@ -0,0 +1,21 @@
import flow from 'lodash/flow';
export function then<T, V>(fn: (r: T) => V) {
return (p: Promise<T>) => Promise.resolve(p).then(fn);
}
const filterPromiseSymbol = Symbol('filterPromiseSymbol');
export function onlySuccessfulPromises(promises: Promise<unknown>[]) {
return Promise.all(promises.map(p => p.catch(() => filterPromiseSymbol))).then(results =>
results.filter(result => result !== filterPromiseSymbol),
);
}
function wrapFlowAsync(fn: Function) {
return async (arg: unknown) => fn(await arg);
}
export function flowAsync(fns: Function[]) {
return flow(fns.map(fn => wrapFlowAsync(fn)));
}

@ -0,0 +1,15 @@
/* eslint-disable import/prefer-default-export */
import { COMMIT_AUTHOR, COMMIT_DATE } from '@staticcms/core/constants/commitProps';
import { selectField } from './field.util';
import type { Collection } from '@staticcms/core/interface';
export function selectSortDataPath(collection: Collection, key: string) {
if (key === COMMIT_DATE) {
return 'updatedOn';
} else if (key === COMMIT_AUTHOR && !selectField(collection, key)) {
return 'author';
} else {
return `data.${key}`;
}
}

@ -0,0 +1,24 @@
import { isNotNullish, isNullish } from './null.util';
export function isEmpty(value: string | null | undefined): value is null | undefined {
return isNullish(value) || value === '';
}
export function isNotEmpty(value: string | null | undefined): value is string {
return isNotNullish(value) && value !== '';
}
export function toTitleCase(str: string): string {
return str.charAt(0).toUpperCase() + str.slice(1).toLowerCase();
}
export function toTitleCaseFromKey(str: string) {
return str.replace(/_/g, ' ').replace(/\w\S*/g, toTitleCase);
}
export function toTitleCaseFromVariableName(str: string) {
return str
.split(/(?=[A-Z])/)
.join(' ')
.replace(/\w\S*/g, toTitleCase);
}

@ -0,0 +1,7 @@
import type { CreateStyled } from '@emotion/styled';
const transientOptions: Parameters<CreateStyled>[1] = {
shouldForwardProp: (propName: string) => !propName.startsWith('$'),
};
export default transientOptions;

@ -0,0 +1,149 @@
import type { ApiRequest, ApiRequestObject, ApiRequestURL } from './API';
function isAbortControllerSupported() {
if (typeof window !== 'undefined') {
return !!window.AbortController;
}
return false;
}
const timeout = 60;
function fetchWithTimeout(
input: RequestInfo | URL,
init?: RequestInit | undefined,
): Promise<Response> {
if ((init && init.signal) || !isAbortControllerSupported()) {
return fetch(input, init);
}
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), timeout * 1000);
return fetch(input, { ...init, signal: controller.signal })
.then(res => {
clearTimeout(timeoutId);
return res;
})
.catch((e: unknown) => {
if (e instanceof DOMException) {
if (e.name === 'AbortError' || e.name === 'DOMException') {
throw new Error(`Request timed out after ${timeout} seconds`);
}
}
throw e;
});
}
function decodeParams(paramsString: string): Record<string, string> {
return paramsString
.split('&')
.map(s => s.split('='))
.reduce((acc, [key, value]) => {
acc[key] = decodeURIComponent(value);
return acc;
}, {} as Record<string, string>);
}
function fromURL(wholeURL: string): ApiRequestURL {
const [url, allParamsString] = wholeURL.split('?');
return { url, ...(allParamsString ? { params: decodeParams(allParamsString) } : {}) };
}
function fromFetchArguments(wholeURL: string, options?: RequestInit): ApiRequestObject {
return {
...fromURL(wholeURL),
...(options ? options : {}),
};
}
function encodeParams(params: Required<ApiRequestURL>['params']): string {
return Object.entries(params)
.map(([k, v]) => `${encodeURIComponent(k)}=${encodeURIComponent(v)}`)
.join('&');
}
function toURL(req: ApiRequestURL): string {
return `${req.url}${req.params ? `?${encodeParams(req.params)}` : ''}`;
}
function toFetchArguments(req: ApiRequestObject): {
input: RequestInfo | URL;
init?: RequestInit | undefined;
} {
const { url, params, ...rest } = req;
return { input: toURL({ url, params }), init: rest };
}
function maybeRequestArg(req: ApiRequest): ApiRequestObject {
if (typeof req === 'string') {
return fromURL(req);
}
return req;
}
function ensureRequestArg(func: (req: ApiRequestObject) => Promise<Response>) {
return (req: ApiRequest) => func(maybeRequestArg(req));
}
// This actually performs the built request object
const performRequest = ensureRequestArg((req: ApiRequestObject) => {
const { input, init } = toFetchArguments(req);
return fetchWithTimeout(input, init);
});
// withRoot sets a root URL, unless the URL is already absolute
const absolutePath = new RegExp('^(?:[a-z]+:)?//', 'i');
const getAbsoluteRoot = (root: string, url: string) => {
if (absolutePath.test(url)) {
return url;
}
return root && url && url[0] !== '/' && root[root.length - 1] !== '/'
? `${root}/${url}`
: `${root}${url}`;
};
const withWrapper =
<K extends keyof ApiRequestObject>(key: K) =>
(value: ApiRequestObject[K], req: ApiRequest): ApiRequestObject => {
if (typeof req === 'string') {
return fromFetchArguments(req, { [key]: value });
}
let finalValue = value;
if (key === 'headers') {
finalValue = {
...(req.headers ?? {}),
...(value as HeadersInit),
} as ApiRequestObject[K];
}
return {
...req,
[key]: finalValue,
};
};
const withRoot = (root: string) => (req: ApiRequest) => {
return withWrapper('url')(getAbsoluteRoot(root, typeof req === 'string' ? req : req.url), req);
};
const withMethod = withWrapper('method');
const withBody = withWrapper('body');
const withHeaders = withWrapper('headers');
const withParams = withWrapper('params');
const withCache = withWrapper('cache');
const withNoCache = (req: ApiRequest) => withCache('no-cache', req);
export default {
fetchWithTimeout,
fromURL,
toURL,
fromFetchArguments,
performRequest,
getAbsoluteRoot,
withRoot,
withMethod,
withBody,
withHeaders,
withParams,
withNoCache,
};

@ -0,0 +1,102 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import ValidationErrorTypes from '@staticcms/core/constants/validationErrorTypes';
import type { t } from 'react-polyglot';
import type {
Field,
FieldError,
FieldValidationMethod,
FieldValidationMethodProps,
UnknownField,
ValueOrNestedValue,
Widget,
} from '@staticcms/core/interface';
export function isEmpty(value: ValueOrNestedValue) {
return (
value === null ||
value === undefined ||
(Array.isArray(value) && value.length === 0) ||
(value.constructor === Object && Object.keys(value).length === 0) ||
(typeof value === 'string' && value === '')
);
}
export function validatePresence({
field,
value,
t,
}: FieldValidationMethodProps<ValueOrNestedValue>): false | FieldError {
const isRequired = field.required ?? true;
if (isRequired && isEmpty(value)) {
const error = {
type: ValidationErrorTypes.PRESENCE,
message: t('editor.editorControlPane.widget.required', {
fieldLabel: field.label ?? field.name,
}),
};
return error;
}
return false;
}
export function validatePattern({
field,
value,
t,
}: FieldValidationMethodProps<ValueOrNestedValue>): false | FieldError {
const pattern = field.pattern ?? false;
if (isEmpty(value)) {
return false;
}
let valueToCheck: string;
if (typeof value === 'string') {
valueToCheck = value;
} else if (typeof value === 'number' || typeof value === 'boolean') {
valueToCheck = `${value}`;
} else {
valueToCheck = JSON.stringify(value);
}
if (pattern && !isEmpty(valueToCheck) && !RegExp(pattern[0]).test(valueToCheck)) {
const error = {
type: ValidationErrorTypes.PATTERN,
message: t('editor.editorControlPane.widget.regexPattern', {
fieldLabel: field.label ?? field.name,
pattern: pattern[1],
}),
};
return error;
}
return false;
}
export async function validate(
field: Field,
value: ValueOrNestedValue,
widget: Widget<any, any>,
t: t,
): Promise<FieldError[]> {
const validValue = widget.getValidValue(value);
const errors: FieldError[] = [];
const validations: FieldValidationMethod<ValueOrNestedValue>[] = [
validatePresence,
validatePattern,
widget.validator,
];
for (const validation of validations) {
const response = await validation({ field: field as UnknownField, value: validValue, t });
if (response) {
errors.push(response);
}
}
return errors;
}

@ -0,0 +1,30 @@
import { useEffect } from 'react';
import type AlertEvent from './events/AlertEvent';
import type ConfirmEvent from './events/ConfirmEvent';
interface EventMap {
alert: AlertEvent;
confirm: ConfirmEvent;
}
export function useWindowEvent<K extends keyof WindowEventMap>(
eventName: K,
callback: (event: WindowEventMap[K]) => void,
): void;
export function useWindowEvent<K extends keyof EventMap>(
eventName: K,
callback: (event: EventMap[K]) => void,
): void;
export function useWindowEvent(
eventName: string,
callback: EventListenerOrEventListenerObject,
): void {
useEffect(() => {
window.addEventListener(eventName, callback);
return () => {
window.removeEventListener(eventName, callback);
};
}, [callback, eventName]);
}

@ -0,0 +1,2 @@
export * as stringTemplate from './stringTemplate';
export * as validations from './validations';

@ -0,0 +1,276 @@
import get from 'lodash/get';
import trimEnd from 'lodash/trimEnd';
import truncate from 'lodash/truncate';
import moment from 'moment';
import { basename, dirname, extname } from 'path';
import type { Entry, EntryData, ObjectValue } from '@staticcms/core/interface';
const filters = [
{ pattern: /^upper$/, transform: (str: string) => str.toUpperCase() },
{
pattern: /^lower$/,
transform: (str: string) => str.toLowerCase(),
},
{
pattern: /^date\('(.+)'\)$/,
transform: (str: string, match: RegExpMatchArray) => moment(str).format(match[1]),
},
{
pattern: /^default\('(.+)'\)$/,
transform: (str: string, match: RegExpMatchArray) => (str ? str : match[1]),
},
{
pattern: /^ternary\('(.*)',\s*'(.*)'\)$/,
transform: (str: string, match: RegExpMatchArray) => (str ? match[1] : match[2]),
},
{
pattern: /^truncate\(([0-9]+)(?:(?:,\s*['"])([^'"]*)(?:['"]))?\)$/,
transform: (str: string, match: RegExpMatchArray) => {
const omission = match[2] || '...';
const length = parseInt(match[1]) + omission.length;
return truncate(str, {
length,
omission,
});
},
},
{
pattern: /^split\('(.+)',\s*'(.+)'\)$/,
transform: (str: string, match: RegExpMatchArray) => {
if (!str || str.trim().length === 0) {
return '';
}
const parts = str.split(match[1]);
if (parts.length === 0) {
return '';
}
let output = match[2];
for (let i = 0; i < match[2].length; i++) {
output = output.replace(new RegExp(`\\$${i + 1}`, 'g'), parts[i]);
}
return output;
},
},
];
const FIELD_PREFIX = 'fields.';
const templateContentPattern = '([^}{|]+)';
const filterPattern = '( \\| ([^}{]+))?';
const templateVariablePattern = `{{${templateContentPattern}${filterPattern}}}`;
// prepends a Zero if the date has only 1 digit
function formatDate(date: number) {
return `0${date}`.slice(-2);
}
export const dateParsers: Record<string, (date: Date) => string> = {
year: (date: Date) => `${date.getUTCFullYear()}`,
month: (date: Date) => formatDate(date.getUTCMonth() + 1),
day: (date: Date) => formatDate(date.getUTCDate()),
hour: (date: Date) => formatDate(date.getUTCHours()),
minute: (date: Date) => formatDate(date.getUTCMinutes()),
second: (date: Date) => formatDate(date.getUTCSeconds()),
};
export function parseDateFromEntry(entry: Entry, dateFieldName?: string | null) {
if (!dateFieldName) {
return;
}
const dateValue = entry.data?.[dateFieldName];
if (dateValue instanceof Date) {
return dateValue;
}
const dateMoment =
typeof dateValue === 'string' || typeof dateValue === 'number' ? moment(dateValue) : null;
if (dateMoment && dateMoment.isValid()) {
return dateMoment.toDate();
}
}
export const SLUG_MISSING_REQUIRED_DATE = 'SLUG_MISSING_REQUIRED_DATE';
export function keyToPathArray(key?: string) {
if (!key) {
return [];
}
const parts = [];
const separator = '';
const chars = key.split(separator);
let currentChar;
let currentStr = [];
while ((currentChar = chars.shift())) {
if (['[', ']', '.'].includes(currentChar)) {
if (currentStr.length > 0) {
parts.push(currentStr.join(separator));
}
currentStr = [];
} else {
currentStr.push(currentChar);
}
}
if (currentStr.length > 0) {
parts.push(currentStr.join(separator));
}
return parts;
}
export function expandPath({
data,
path,
paths = [],
}: {
data: EntryData;
path: string;
paths?: string[];
}) {
if (path.endsWith('.*')) {
path = path + '.';
}
const sep = '.*.';
const parts = path.split(sep);
if (parts.length === 1) {
paths.push(path);
} else {
const partialPath = parts[0];
const value = get(data, partialPath);
if (Array.isArray(value)) {
value.forEach((_, index) => {
expandPath({
data,
path: trimEnd(`${partialPath}.${index}.${parts.slice(1).join(sep)}`, '.'),
paths,
});
});
}
}
return paths;
}
// Allow `fields.` prefix in placeholder to override built in replacements
// like "slug" and "year" with values from fields of the same name.
function getExplicitFieldReplacement(key: string, data: ObjectValue | undefined | null) {
if (!key.startsWith(FIELD_PREFIX)) {
return;
}
const fieldName = key.slice(FIELD_PREFIX.length);
const value = get(data, keyToPathArray(fieldName));
if (typeof value === 'object' && value !== null) {
return JSON.stringify(value);
}
return value;
}
function getFilterFunction(filterStr: string) {
if (filterStr) {
let match: RegExpMatchArray | null = null;
const filter = filters.find(filter => {
match = filterStr.match(filter.pattern);
return !!match;
});
if (filter) {
return (str: string) => filter.transform(str, match as RegExpMatchArray);
}
}
return null;
}
export function compileStringTemplate(
template: string,
date: Date | undefined | null,
identifier = '',
data: ObjectValue | undefined | null = {},
processor?: (value: string) => string,
) {
let missingRequiredDate;
// Turn off date processing (support for replacements like `{{year}}`), by passing in
// `null` as the date arg.
const useDate = date !== null;
const compiledString = template.replace(
RegExp(templateVariablePattern, 'g'),
(_full, key: string, _part, filter: string) => {
let replacement;
const explicitFieldReplacement = getExplicitFieldReplacement(key, data);
if (explicitFieldReplacement) {
replacement = explicitFieldReplacement;
} else if (dateParsers[key] && !date) {
missingRequiredDate = true;
return '';
} else if (dateParsers[key]) {
replacement = dateParsers[key](date as Date);
} else if (key === 'slug') {
replacement = identifier;
} else {
replacement = get(data, keyToPathArray(key), '') as string;
}
if (processor) {
return processor(replacement);
} else {
const filterFunction = getFilterFunction(filter);
if (filterFunction) {
replacement = filterFunction(replacement);
}
}
return replacement;
},
);
if (useDate && missingRequiredDate) {
const err = new Error();
err.name = SLUG_MISSING_REQUIRED_DATE;
throw err;
} else {
return compiledString;
}
}
export function extractTemplateVars(template: string) {
const regexp = RegExp(templateVariablePattern, 'g');
const contentRegexp = RegExp(templateContentPattern, 'g');
const matches = template.match(regexp) || [];
return matches.map(elem => {
const match = elem.match(contentRegexp);
return match ? match[0] : '';
});
}
/**
* Appends `dirname`, `filename` and `extension` to the provided `fields` map.
* @param entryPath
* @param fields
* @param folder - optionally include a folder that the dirname will be relative to.
* eg: `addFileTemplateFields('foo/bar/baz.ext', fields, 'foo')`
* will result in: `{ dirname: 'bar', filename: 'baz', extension: 'ext' }`
*/
export function addFileTemplateFields(entryPath: string, fields: EntryData, folder = '') {
if (!entryPath) {
return fields;
}
const extension = extname(entryPath);
const filename = basename(entryPath, extension);
const dirnameExcludingFolder = dirname(entryPath).replace(new RegExp(`^(/?)${folder}/?`), '$1');
return {
...fields,
dirname: dirnameExcludingFolder,
filename,
extension: extension === '' ? extension : extension.slice(1),
};
}

@ -0,0 +1,38 @@
/* eslint-disable import/prefer-default-export */
import isNumber from 'lodash/isNumber';
export function validateMinMax(
t: (key: string, options: unknown) => string,
fieldLabel: string,
value?: string | number | (string | number)[] | undefined | null,
min?: number,
max?: number,
) {
function minMaxError(messageKey: string) {
return {
type: 'RANGE',
message: t(`editor.editorControlPane.widget.${messageKey}`, {
fieldLabel,
minCount: min,
maxCount: max,
count: min,
}),
};
}
if (typeof value === 'string' || typeof value === 'number') {
return false;
}
const length = value?.length ?? 0;
if ([min, max, length].every(isNumber) && (length < min! || length > max!)) {
return minMaxError(min === max ? 'rangeCountExact' : 'rangeCount');
} else if (isNumber(min) && min > 0 && length < min) {
return minMaxError('rangeMin');
} else if (isNumber(max) && length > max) {
return minMaxError('rangeMax');
}
return false;
}