feat(backend-bitbucket): Add Git-LFS support (#3118)

This commit is contained in:
Erez Rokah 2020-01-21 18:57:36 +02:00 committed by GitHub
parent 0755f90142
commit a48c02d852
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
36 changed files with 17763 additions and 7501 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,28 @@
import fixture from './common/media_library';
import { entry1 } from './common/entries';
import * as specUtils from './common/spec_utils';
const backend = 'bitbucket';
const lfs = true;
describe('BitBucket Backend Media Library - Large Media', () => {
let taskResult = { data: {} };
before(() => {
specUtils.before(taskResult, { lfs }, backend);
});
after(() => {
specUtils.after(taskResult, backend);
});
beforeEach(() => {
specUtils.beforeEach(taskResult, backend);
});
afterEach(() => {
specUtils.afterEach(taskResult, backend);
});
fixture({ entries: [entry1], getUser: () => taskResult.data.user });
});

View File

@ -82,7 +82,7 @@ function del(token, path) {
});
}
async function prepareTestGitLabRepo() {
async function prepareTestBitBucketRepo({ lfs }) {
const { owner, repo, token } = await getEnvs();
// postfix a random string to avoid collisions
@ -113,6 +113,15 @@ async function prepareTestGitLabRepo() {
);
await git.push(['-u', 'origin', 'master']);
if (lfs) {
console.log(`Enabling LFS for repo ${owner}/${repo}`);
await git.addConfig('commit.gpgsign', 'false');
await git.raw(['lfs', 'track', '*.png', '*.jpg']);
await git.add('.gitattributes');
await git.commit('chore: track images files under LFS');
await git.push('origin', 'master');
}
return { owner, repo: testRepoName, tempDir };
}
@ -162,12 +171,13 @@ async function resetRepositories({ owner, repo, tempDir }) {
}
async function setupBitBucket(options) {
const { lfs = false, ...rest } = options;
if (process.env.RECORD_FIXTURES) {
console.log('Running tests in "record" mode - live data with be used!');
const [user, repoData] = await Promise.all([getUser(), prepareTestGitLabRepo()]);
const [user, repoData] = await Promise.all([getUser(), prepareTestBitBucketRepo({ lfs })]);
await updateConfig(config => {
merge(config, options, {
merge(config, rest, {
backend: {
repo: `${repoData.owner}/${repoData.repo}`,
},
@ -179,7 +189,7 @@ async function setupBitBucket(options) {
console.log('Running tests in "playback" mode - local data with be used');
await updateConfig(config => {
merge(config, options, {
merge(config, rest, {
backend: {
repo: `${BITBUCKET_REPO_OWNER_SANITIZED_VALUE}/${BITBUCKET_REPO_NAME_SANITIZED_VALUE}`,
},
@ -225,7 +235,9 @@ const sanitizeString = (str, { owner, repo, token, ownerName }) => {
.replace(
new RegExp('https://secure.gravatar.+?/u/.+?v=\\d', 'g'),
`${FAKE_OWNER_USER.links.avatar.href}`,
);
)
.replace(new RegExp(/\?token=.+?&/g), 'token=fakeToken&')
.replace(new RegExp(/&client=.+?&/g), 'client=fakeClient&');
if (ownerName) {
replaced = replaced.replace(
@ -254,6 +266,16 @@ const transformRecordedData = (expectation, toSanitize) => {
}
} else if (httpRequest.body && httpRequest.body.type === 'STRING' && httpRequest.body.string) {
body = httpRequest.body.string;
} else if (
httpRequest.body &&
httpRequest.body.type === 'BINARY' &&
httpRequest.body.base64Bytes
) {
body = {
encoding: 'base64',
content: httpRequest.body.base64Bytes,
contentType: httpRequest.body.contentType,
};
}
return body;
};

View File

@ -34,7 +34,25 @@ const matchRoute = (route, fetchArgs) => {
const options = fetchArgs[1];
const method = options && options.method ? options.method : 'GET';
const body = options && options.body;
let body = options && options.body;
let routeBody = route.body;
let bodyMatch = false;
if (routeBody?.encoding === 'base64' && ['File', 'Blob'].includes(body?.constructor.name)) {
const blob = new Blob([Buffer.from(routeBody.content, 'base64')], {
type: routeBody.contentType,
});
// size matching is good enough
bodyMatch = blob.size === body.size;
} else if (routeBody && body?.constructor.name === 'FormData') {
bodyMatch = Array.from(body.entries()).some(([key, value]) => {
const val = typeof value === 'string' ? value : '';
const match = routeBody.includes(key) && routeBody.includes(val);
return match;
});
} else {
bodyMatch = body === routeBody;
}
// use pattern matching for the timestamp parameter
const urlRegex = escapeRegExp(decodeURIComponent(route.url)).replace(
@ -43,19 +61,23 @@ const matchRoute = (route, fetchArgs) => {
);
return (
method === route.method &&
body === route.body &&
decodeURIComponent(url).match(new RegExp(`${urlRegex}`))
method === route.method && bodyMatch && decodeURIComponent(url).match(new RegExp(`${urlRegex}`))
);
};
const stubFetch = (win, routes) => {
const fetch = win.fetch;
cy.stub(win, 'fetch').callsFake((...args) => {
const routeIndex = routes.findIndex(r => matchRoute(r, args));
let routeIndex = routes.findIndex(r => matchRoute(r, args));
if (routeIndex >= 0) {
const route = routes.splice(routeIndex, 1)[0];
console.log(`matched ${args[0]} to ${route.url} ${route.method} ${route.status}`);
let route = routes.splice(routeIndex, 1)[0];
const message = `matched ${args[0]} to ${route.url} ${route.method} ${route.status}`;
console.log(message);
if (route.status === 302) {
console.log(`resolving redirect to ${route.headers.Location}`);
routeIndex = routes.findIndex(r => matchRoute(r, [route.headers.Location]));
route = routes.splice(routeIndex, 1)[0];
}
let blob;
if (route.response && route.response.encoding === 'base64') {
@ -76,6 +98,8 @@ const stubFetch = (win, routes) => {
} else if (
args[0].includes('api.github.com') ||
args[0].includes('api.bitbucket.org') ||
args[0].includes('bitbucket.org') ||
args[0].includes('api.media.atlassian.com') ||
args[0].includes('gitlab.com') ||
args[0].includes('netlify.com') ||
args[0].includes('s3.amazonaws.com')

View File

@ -30,6 +30,8 @@ const retrieveRecordedExpectations = async () => {
Host.includes('api.github.com') ||
(Host.includes('gitlab.com') && httpRequest.path.includes('api/v4')) ||
Host.includes('api.bitbucket.org') ||
(Host.includes('bitbucket.org') && httpRequest.path.includes('info/lfs')) ||
Host.includes('api.media.atlassian.com') ||
Host.some(host => host.includes('netlify.com')) ||
Host.some(host => host.includes('s3.amazonaws.com'))
);

View File

@ -0,0 +1,100 @@
import minimatch from 'minimatch';
import { ApiRequest, PointerFile } from 'netlify-cms-lib-util';
type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;
interface LfsBatchAction {
href: string;
header?: { [key: string]: string };
expires_in?: number;
expires_at?: string;
}
interface LfsBatchObject {
oid: string;
size: number;
}
interface LfsBatchObjectUpload extends LfsBatchObject {
actions?: {
upload: LfsBatchAction;
verify?: LfsBatchAction;
};
}
interface LfsBatchObjectError extends LfsBatchObject {
error: {
code: number;
message: string;
};
}
interface LfsBatchUploadResponse {
transfer?: string;
objects: (LfsBatchObjectUpload | LfsBatchObjectError)[];
}
export class GitLfsClient {
private static defaultContentHeaders = {
Accept: 'application/vnd.git-lfs+json',
['Content-Type']: 'application/vnd.git-lfs+json',
};
constructor(
public enabled: boolean,
public rootURL: string,
public patterns: string[],
private makeAuthorizedRequest: MakeAuthorizedRequest,
) {}
matchPath(path: string) {
return this.patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
}
async uploadResource(pointer: PointerFile, resource: Blob): Promise<string> {
const requests = await this.getResourceUploadRequests([pointer]);
for (const request of requests) {
await this.doUpload(request.actions!.upload, resource);
if (request.actions!.verify) {
await this.doVerify(request.actions!.verify, request);
}
}
return pointer.sha;
}
private async doUpload(upload: LfsBatchAction, resource: Blob) {
await fetch(decodeURI(upload.href), {
method: 'PUT',
body: resource,
headers: upload.header,
});
}
private async doVerify(verify: LfsBatchAction, object: LfsBatchObject) {
this.makeAuthorizedRequest({
url: decodeURI(verify.href),
method: 'POST',
headers: { ...GitLfsClient.defaultContentHeaders, ...verify.header },
body: JSON.stringify({ oid: object.oid, size: object.size }),
});
}
private async getResourceUploadRequests(objects: PointerFile[]): Promise<LfsBatchObjectUpload[]> {
const response = await this.makeAuthorizedRequest({
url: `${this.rootURL}/objects/batch`,
method: 'POST',
headers: GitLfsClient.defaultContentHeaders,
body: JSON.stringify({
operation: 'upload',
transfers: ['basic'],
objects: objects.map(({ sha, ...rest }) => ({ ...rest, oid: sha })),
}),
});
return ((await response.json()) as LfsBatchUploadResponse).objects.filter(object => {
if ('error' in object) {
console.error(object.error);
return false;
}
return object.actions;
});
}
}

View File

@ -29,10 +29,15 @@ import {
AsyncLock,
asyncLock,
getPreviewStatus,
getLargeMediaPatternsFromGitAttributesFile,
getPointerFileForMediaFileObj,
getLargeMediaFilteredMediaFiles,
FetchError,
} from 'netlify-cms-lib-util';
import NetlifyAuthenticator from 'netlify-cms-lib-auth';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
import { GitLfsClient } from './git-lfs-client';
const MAX_CONCURRENT_DOWNLOADS = 10;
@ -61,6 +66,8 @@ export default class BitbucketBackend implements Implementation {
_mediaDisplayURLSem?: Semaphore;
squashMerges: boolean;
previewContext: string;
largeMediaURL: string;
_largeMediaClientPromise?: Promise<GitLfsClient>;
constructor(config: Config, options = {}) {
this.options = {
@ -87,6 +94,8 @@ export default class BitbucketBackend implements Implementation {
this.apiRoot = config.backend.api_root || 'https://api.bitbucket.org/2.0';
this.baseUrl = config.base_url || '';
this.siteId = config.site_id || '';
this.largeMediaURL =
config.backend.large_media_url || `https://bitbucket.org/${config.backend.repo}/info/lfs`;
this.token = '';
this.mediaFolder = config.media_folder;
this.squashMerges = config.backend.squash_merges || false;
@ -109,6 +118,13 @@ export default class BitbucketBackend implements Implementation {
});
}
requestFunction = (req: ApiRequest) =>
this.getToken()
.then(
token => unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req) as ApiRequest,
)
.then(unsentRequest.performRequest);
restoreUser(user: User) {
return this.authenticate(user);
}
@ -272,6 +288,31 @@ export default class BitbucketBackend implements Implementation {
);
}
getLargeMediaClient() {
if (!this._largeMediaClientPromise) {
this._largeMediaClientPromise = (async (): Promise<GitLfsClient> => {
const patterns = await this.api!.readFile('.gitattributes')
.then(attributes => getLargeMediaPatternsFromGitAttributesFile(attributes as string))
.catch((err: FetchError) => {
if (err.status === 404) {
console.log('This 404 was expected and handled appropriately.');
} else {
console.error(err);
}
return [];
});
return new GitLfsClient(
!!(this.largeMediaURL && patterns.length > 0),
this.largeMediaURL,
patterns,
this.requestFunction,
);
})();
}
return this._largeMediaClientPromise;
}
getMediaDisplayURL(displayURL: DisplayURL) {
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
return getMediaDisplayURL(
@ -300,15 +341,37 @@ export default class BitbucketBackend implements Implementation {
}
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
const client = await this.getLargeMediaClient();
// persistEntry is a transactional operation
return runWithLock(
this.lock,
() => this.api!.persistFiles(entry, mediaFiles, options),
async () =>
this.api!.persistFiles(
entry,
client.enabled ? await getLargeMediaFilteredMediaFiles(client, mediaFiles) : mediaFiles,
options,
),
'Failed to acquire persist entry lock',
);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
const { fileObj, path } = mediaFile;
const displayURL = URL.createObjectURL(fileObj);
const client = await this.getLargeMediaClient();
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
if (!client.enabled || !client.matchPath(fixedPath)) {
return this._persistMedia(mediaFile, options);
}
const persistMediaArgument = await getPointerFileForMediaFileObj(client, fileObj as File, path);
return {
...(await this._persistMedia(persistMediaArgument, options)),
displayURL,
};
}
async _persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
const fileObj = mediaFile.fileObj as File;
const [id] = await Promise.all([

View File

@ -4,7 +4,6 @@ import { fromPairs, get, pick, intersection, unzip } from 'lodash';
import ini from 'ini';
import {
APIError,
getBlobSHA,
unsentRequest,
basename,
ApiRequest,
@ -20,6 +19,11 @@ import {
Config,
ImplementationFile,
UnpublishedEntryMediaFile,
parsePointerFile,
getLargeMediaPatternsFromGitAttributesFile,
PointerFile,
getPointerFileForMediaFileObj,
getLargeMediaFilteredMediaFiles,
} from 'netlify-cms-lib-util';
import { GitHubBackend } from 'netlify-cms-backend-github';
import { GitLabBackend } from 'netlify-cms-backend-gitlab';
@ -27,14 +31,7 @@ import { BitbucketBackend, API as BitBucketAPI } from 'netlify-cms-backend-bitbu
import GitHubAPI from './GitHubAPI';
import GitLabAPI from './GitLabAPI';
import AuthenticationPage from './AuthenticationPage';
import {
parsePointerFile,
createPointerFile,
getLargeMediaPatternsFromGitAttributesFile,
getClient,
Client,
PointerFile,
} from './netlify-lfs-client';
import { getClient, Client } from './netlify-lfs-client';
declare global {
interface Window {
@ -466,49 +463,13 @@ export default class GitGateway implements Implementation {
return this.backend!.getMediaFile(path);
}
async getPointerFileForMediaFileObj(fileObj: File) {
const client = await this.getLargeMediaClient();
const { name, size } = fileObj;
const sha = await getBlobSHA(fileObj);
await client.uploadResource({ sha, size }, fileObj);
const pointerFileString = createPointerFile({ sha, size });
const pointerFileBlob = new Blob([pointerFileString]);
const pointerFile = new File([pointerFileBlob], name, { type: 'text/plain' });
const pointerFileSHA = await getBlobSHA(pointerFile);
return {
file: pointerFile,
blob: pointerFileBlob,
sha: pointerFileSHA,
raw: pointerFileString,
};
}
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
const client = await this.getLargeMediaClient();
if (!client.enabled) {
return this.backend!.persistEntry(entry, mediaFiles, options);
}
const largeMediaFilteredMediaFiles = await Promise.all(
mediaFiles.map(async mediaFile => {
const { fileObj, path } = mediaFile;
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
if (!client.matchPath(fixedPath)) {
return mediaFile;
}
const pointerFileDetails = await this.getPointerFileForMediaFileObj(fileObj as File);
return {
...mediaFile,
fileObj: pointerFileDetails.file,
size: pointerFileDetails.blob.size,
sha: pointerFileDetails.sha,
raw: pointerFileDetails.raw,
};
}),
return this.backend!.persistEntry(
entry,
client.enabled ? await getLargeMediaFilteredMediaFiles(client, mediaFiles) : mediaFiles,
options,
);
return this.backend!.persistEntry(entry, largeMediaFilteredMediaFiles, options);
}
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
@ -520,14 +481,7 @@ export default class GitGateway implements Implementation {
return this.backend!.persistMedia(mediaFile, options);
}
const pointerFileDetails = await this.getPointerFileForMediaFileObj(fileObj as File);
const persistMediaArgument = {
fileObj: pointerFileDetails.file,
size: pointerFileDetails.blob.size,
path,
sha: pointerFileDetails.sha,
raw: pointerFileDetails.raw,
};
const persistMediaArgument = await getPointerFileForMediaFileObj(client, fileObj as File, path);
return {
...(await this.backend!.persistMedia(persistMediaArgument, options)),
displayURL,

View File

@ -1,30 +1,6 @@
import { filter, flow, fromPairs, map } from 'lodash/fp';
import { flow, fromPairs, map } from 'lodash/fp';
import minimatch from 'minimatch';
import { ApiRequest } from 'netlify-cms-lib-util';
//
// Pointer file parsing
const splitIntoLines = (str: string) => str.split('\n');
const splitIntoWords = (str: string) => str.split(/\s+/g);
const isNonEmptyString = (str: string) => str !== '';
const withoutEmptyLines = flow([map((str: string) => str.trim()), filter(isNonEmptyString)]);
export const parsePointerFile: (data: string) => PointerFile = flow([
splitIntoLines,
withoutEmptyLines,
map(splitIntoWords),
fromPairs,
({ size, oid, ...rest }) => ({
size: parseInt(size),
sha: oid?.split(':')[1],
...rest,
}),
]);
export type PointerFile = {
size: number;
sha: string;
};
import { ApiRequest, PointerFile } from 'netlify-cms-lib-util';
type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;
@ -38,56 +14,6 @@ type ClientConfig = {
transformImages: ImageTransformations | boolean;
};
export const createPointerFile = ({ size, sha }: PointerFile) => `\
version https://git-lfs.github.com/spec/v1
oid sha256:${sha}
size ${size}
`;
//
// .gitattributes file parsing
const removeGitAttributesCommentsFromLine = (line: string) => line.split('#')[0];
const parseGitPatternAttribute = (attributeString: string) => {
// There are three kinds of attribute settings:
// - a key=val pair sets an attribute to a specific value
// - a key without a value and a leading hyphen sets an attribute to false
// - a key without a value and no leading hyphen sets an attribute
// to true
if (attributeString.includes('=')) {
return attributeString.split('=');
}
if (attributeString.startsWith('-')) {
return [attributeString.slice(1), false];
}
return [attributeString, true];
};
const parseGitPatternAttributes = flow([map(parseGitPatternAttribute), fromPairs]);
const parseGitAttributesPatternLine = flow([
splitIntoWords,
([pattern, ...attributes]) => [pattern, parseGitPatternAttributes(attributes)],
]);
const parseGitAttributesFileToPatternAttributePairs = flow([
splitIntoLines,
map(removeGitAttributesCommentsFromLine),
withoutEmptyLines,
map(parseGitAttributesPatternLine),
]);
export const getLargeMediaPatternsFromGitAttributesFile = flow([
parseGitAttributesFileToPatternAttributePairs,
filter(
// eslint-disable-next-line @typescript-eslint/no-unused-vars
([_pattern, attributes]) =>
attributes.filter === 'lfs' && attributes.diff === 'lfs' && attributes.merge === 'lfs',
),
map(([pattern]) => pattern),
]);
export const matchPath = ({ patterns }: ClientConfig, path: string) =>
patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));

View File

@ -0,0 +1,118 @@
//
// Pointer file parsing
import { filter, flow, fromPairs, map } from 'lodash/fp';
import getBlobSHA from './getBlobSHA';
import { AssetProxy } from './implementation';
export interface PointerFile {
size: number;
sha: string;
}
const splitIntoLines = (str: string) => str.split('\n');
const splitIntoWords = (str: string) => str.split(/\s+/g);
const isNonEmptyString = (str: string) => str !== '';
const withoutEmptyLines = flow([map((str: string) => str.trim()), filter(isNonEmptyString)]);
export const parsePointerFile: (data: string) => PointerFile = flow([
splitIntoLines,
withoutEmptyLines,
map(splitIntoWords),
fromPairs,
({ size, oid, ...rest }) => ({
size: parseInt(size),
sha: oid?.split(':')[1],
...rest,
}),
]);
//
// .gitattributes file parsing
const removeGitAttributesCommentsFromLine = (line: string) => line.split('#')[0];
const parseGitPatternAttribute = (attributeString: string) => {
// There are three kinds of attribute settings:
// - a key=val pair sets an attribute to a specific value
// - a key without a value and a leading hyphen sets an attribute to false
// - a key without a value and no leading hyphen sets an attribute
// to true
if (attributeString.includes('=')) {
return attributeString.split('=');
}
if (attributeString.startsWith('-')) {
return [attributeString.slice(1), false];
}
return [attributeString, true];
};
const parseGitPatternAttributes = flow([map(parseGitPatternAttribute), fromPairs]);
const parseGitAttributesPatternLine = flow([
splitIntoWords,
([pattern, ...attributes]) => [pattern, parseGitPatternAttributes(attributes)],
]);
const parseGitAttributesFileToPatternAttributePairs = flow([
splitIntoLines,
map(removeGitAttributesCommentsFromLine),
withoutEmptyLines,
map(parseGitAttributesPatternLine),
]);
export const getLargeMediaPatternsFromGitAttributesFile = flow([
parseGitAttributesFileToPatternAttributePairs,
filter(
([, attributes]) =>
attributes.filter === 'lfs' && attributes.diff === 'lfs' && attributes.merge === 'lfs',
),
map(([pattern]) => pattern),
]);
export const createPointerFile = ({ size, sha }: PointerFile) => `\
version https://git-lfs.github.com/spec/v1
oid sha256:${sha}
size ${size}
`;
export async function getPointerFileForMediaFileObj(
client: { uploadResource: (pointer: PointerFile, resource: Blob) => Promise<string> },
fileObj: File,
path: string,
) {
const { name, size } = fileObj;
const sha = await getBlobSHA(fileObj);
await client.uploadResource({ sha, size }, fileObj);
const pointerFileString = createPointerFile({ sha, size });
const pointerFileBlob = new Blob([pointerFileString]);
const pointerFile = new File([pointerFileBlob], name, { type: 'text/plain' });
const pointerFileSHA = await getBlobSHA(pointerFile);
return {
fileObj: pointerFile,
size: pointerFileBlob.size,
sha: pointerFileSHA,
raw: pointerFileString,
path,
};
}
export async function getLargeMediaFilteredMediaFiles(
client: {
uploadResource: (pointer: PointerFile, resource: Blob) => Promise<string>;
matchPath: (path: string) => boolean;
},
mediaFiles: AssetProxy[],
) {
return await Promise.all(
mediaFiles.map(async mediaFile => {
const { fileObj, path } = mediaFile;
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
if (!client.matchPath(fixedPath)) {
return mediaFile;
}
const pointerFileDetails = await getPointerFileForMediaFileObj(client, fileObj as File, path);
return { ...mediaFile, ...pointerFileDetails };
}),
);
}

View File

@ -52,6 +52,14 @@ import {
FetchError as FE,
parseContentKey,
} from './API';
import {
createPointerFile,
getLargeMediaFilteredMediaFiles,
getLargeMediaPatternsFromGitAttributesFile,
parsePointerFile,
getPointerFileForMediaFileObj,
PointerFile as PF,
} from './git-lfs';
export type AsyncLock = AL;
export type Implementation = I;
@ -78,6 +86,7 @@ export type ApiRequest =
| string;
export type Config = C;
export type FetchError = FE;
export type PointerFile = PF;
export const NetlifyCmsLibUtil = {
APIError,
@ -118,6 +127,11 @@ export const NetlifyCmsLibUtil = {
runWithLock,
PreviewState,
parseContentKey,
createPointerFile,
getLargeMediaFilteredMediaFiles,
getLargeMediaPatternsFromGitAttributesFile,
parsePointerFile,
getPointerFileForMediaFileObj,
};
export {
APIError,
@ -161,4 +175,9 @@ export {
runWithLock,
PreviewState,
parseContentKey,
createPointerFile,
getLargeMediaFilteredMediaFiles,
getLargeMediaPatternsFromGitAttributesFile,
parsePointerFile,
getPointerFileForMediaFileObj,
};