feat(backend-bitbucket): Add Git-LFS support (#3118)
This commit is contained in:
parent
0755f90142
commit
a48c02d852
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,28 @@
|
|||||||
|
import fixture from './common/media_library';
|
||||||
|
import { entry1 } from './common/entries';
|
||||||
|
import * as specUtils from './common/spec_utils';
|
||||||
|
|
||||||
|
const backend = 'bitbucket';
|
||||||
|
const lfs = true;
|
||||||
|
|
||||||
|
describe('BitBucket Backend Media Library - Large Media', () => {
|
||||||
|
let taskResult = { data: {} };
|
||||||
|
|
||||||
|
before(() => {
|
||||||
|
specUtils.before(taskResult, { lfs }, backend);
|
||||||
|
});
|
||||||
|
|
||||||
|
after(() => {
|
||||||
|
specUtils.after(taskResult, backend);
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
specUtils.beforeEach(taskResult, backend);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
specUtils.afterEach(taskResult, backend);
|
||||||
|
});
|
||||||
|
|
||||||
|
fixture({ entries: [entry1], getUser: () => taskResult.data.user });
|
||||||
|
});
|
@ -82,7 +82,7 @@ function del(token, path) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function prepareTestGitLabRepo() {
|
async function prepareTestBitBucketRepo({ lfs }) {
|
||||||
const { owner, repo, token } = await getEnvs();
|
const { owner, repo, token } = await getEnvs();
|
||||||
|
|
||||||
// postfix a random string to avoid collisions
|
// postfix a random string to avoid collisions
|
||||||
@ -113,6 +113,15 @@ async function prepareTestGitLabRepo() {
|
|||||||
);
|
);
|
||||||
await git.push(['-u', 'origin', 'master']);
|
await git.push(['-u', 'origin', 'master']);
|
||||||
|
|
||||||
|
if (lfs) {
|
||||||
|
console.log(`Enabling LFS for repo ${owner}/${repo}`);
|
||||||
|
await git.addConfig('commit.gpgsign', 'false');
|
||||||
|
await git.raw(['lfs', 'track', '*.png', '*.jpg']);
|
||||||
|
await git.add('.gitattributes');
|
||||||
|
await git.commit('chore: track images files under LFS');
|
||||||
|
await git.push('origin', 'master');
|
||||||
|
}
|
||||||
|
|
||||||
return { owner, repo: testRepoName, tempDir };
|
return { owner, repo: testRepoName, tempDir };
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -162,12 +171,13 @@ async function resetRepositories({ owner, repo, tempDir }) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function setupBitBucket(options) {
|
async function setupBitBucket(options) {
|
||||||
|
const { lfs = false, ...rest } = options;
|
||||||
if (process.env.RECORD_FIXTURES) {
|
if (process.env.RECORD_FIXTURES) {
|
||||||
console.log('Running tests in "record" mode - live data with be used!');
|
console.log('Running tests in "record" mode - live data with be used!');
|
||||||
const [user, repoData] = await Promise.all([getUser(), prepareTestGitLabRepo()]);
|
const [user, repoData] = await Promise.all([getUser(), prepareTestBitBucketRepo({ lfs })]);
|
||||||
|
|
||||||
await updateConfig(config => {
|
await updateConfig(config => {
|
||||||
merge(config, options, {
|
merge(config, rest, {
|
||||||
backend: {
|
backend: {
|
||||||
repo: `${repoData.owner}/${repoData.repo}`,
|
repo: `${repoData.owner}/${repoData.repo}`,
|
||||||
},
|
},
|
||||||
@ -179,7 +189,7 @@ async function setupBitBucket(options) {
|
|||||||
console.log('Running tests in "playback" mode - local data with be used');
|
console.log('Running tests in "playback" mode - local data with be used');
|
||||||
|
|
||||||
await updateConfig(config => {
|
await updateConfig(config => {
|
||||||
merge(config, options, {
|
merge(config, rest, {
|
||||||
backend: {
|
backend: {
|
||||||
repo: `${BITBUCKET_REPO_OWNER_SANITIZED_VALUE}/${BITBUCKET_REPO_NAME_SANITIZED_VALUE}`,
|
repo: `${BITBUCKET_REPO_OWNER_SANITIZED_VALUE}/${BITBUCKET_REPO_NAME_SANITIZED_VALUE}`,
|
||||||
},
|
},
|
||||||
@ -225,7 +235,9 @@ const sanitizeString = (str, { owner, repo, token, ownerName }) => {
|
|||||||
.replace(
|
.replace(
|
||||||
new RegExp('https://secure.gravatar.+?/u/.+?v=\\d', 'g'),
|
new RegExp('https://secure.gravatar.+?/u/.+?v=\\d', 'g'),
|
||||||
`${FAKE_OWNER_USER.links.avatar.href}`,
|
`${FAKE_OWNER_USER.links.avatar.href}`,
|
||||||
);
|
)
|
||||||
|
.replace(new RegExp(/\?token=.+?&/g), 'token=fakeToken&')
|
||||||
|
.replace(new RegExp(/&client=.+?&/g), 'client=fakeClient&');
|
||||||
|
|
||||||
if (ownerName) {
|
if (ownerName) {
|
||||||
replaced = replaced.replace(
|
replaced = replaced.replace(
|
||||||
@ -254,6 +266,16 @@ const transformRecordedData = (expectation, toSanitize) => {
|
|||||||
}
|
}
|
||||||
} else if (httpRequest.body && httpRequest.body.type === 'STRING' && httpRequest.body.string) {
|
} else if (httpRequest.body && httpRequest.body.type === 'STRING' && httpRequest.body.string) {
|
||||||
body = httpRequest.body.string;
|
body = httpRequest.body.string;
|
||||||
|
} else if (
|
||||||
|
httpRequest.body &&
|
||||||
|
httpRequest.body.type === 'BINARY' &&
|
||||||
|
httpRequest.body.base64Bytes
|
||||||
|
) {
|
||||||
|
body = {
|
||||||
|
encoding: 'base64',
|
||||||
|
content: httpRequest.body.base64Bytes,
|
||||||
|
contentType: httpRequest.body.contentType,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
return body;
|
return body;
|
||||||
};
|
};
|
||||||
|
@ -34,7 +34,25 @@ const matchRoute = (route, fetchArgs) => {
|
|||||||
const options = fetchArgs[1];
|
const options = fetchArgs[1];
|
||||||
|
|
||||||
const method = options && options.method ? options.method : 'GET';
|
const method = options && options.method ? options.method : 'GET';
|
||||||
const body = options && options.body;
|
let body = options && options.body;
|
||||||
|
let routeBody = route.body;
|
||||||
|
|
||||||
|
let bodyMatch = false;
|
||||||
|
if (routeBody?.encoding === 'base64' && ['File', 'Blob'].includes(body?.constructor.name)) {
|
||||||
|
const blob = new Blob([Buffer.from(routeBody.content, 'base64')], {
|
||||||
|
type: routeBody.contentType,
|
||||||
|
});
|
||||||
|
// size matching is good enough
|
||||||
|
bodyMatch = blob.size === body.size;
|
||||||
|
} else if (routeBody && body?.constructor.name === 'FormData') {
|
||||||
|
bodyMatch = Array.from(body.entries()).some(([key, value]) => {
|
||||||
|
const val = typeof value === 'string' ? value : '';
|
||||||
|
const match = routeBody.includes(key) && routeBody.includes(val);
|
||||||
|
return match;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
bodyMatch = body === routeBody;
|
||||||
|
}
|
||||||
|
|
||||||
// use pattern matching for the timestamp parameter
|
// use pattern matching for the timestamp parameter
|
||||||
const urlRegex = escapeRegExp(decodeURIComponent(route.url)).replace(
|
const urlRegex = escapeRegExp(decodeURIComponent(route.url)).replace(
|
||||||
@ -43,19 +61,23 @@ const matchRoute = (route, fetchArgs) => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
method === route.method &&
|
method === route.method && bodyMatch && decodeURIComponent(url).match(new RegExp(`${urlRegex}`))
|
||||||
body === route.body &&
|
|
||||||
decodeURIComponent(url).match(new RegExp(`${urlRegex}`))
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const stubFetch = (win, routes) => {
|
const stubFetch = (win, routes) => {
|
||||||
const fetch = win.fetch;
|
const fetch = win.fetch;
|
||||||
cy.stub(win, 'fetch').callsFake((...args) => {
|
cy.stub(win, 'fetch').callsFake((...args) => {
|
||||||
const routeIndex = routes.findIndex(r => matchRoute(r, args));
|
let routeIndex = routes.findIndex(r => matchRoute(r, args));
|
||||||
if (routeIndex >= 0) {
|
if (routeIndex >= 0) {
|
||||||
const route = routes.splice(routeIndex, 1)[0];
|
let route = routes.splice(routeIndex, 1)[0];
|
||||||
console.log(`matched ${args[0]} to ${route.url} ${route.method} ${route.status}`);
|
const message = `matched ${args[0]} to ${route.url} ${route.method} ${route.status}`;
|
||||||
|
console.log(message);
|
||||||
|
if (route.status === 302) {
|
||||||
|
console.log(`resolving redirect to ${route.headers.Location}`);
|
||||||
|
routeIndex = routes.findIndex(r => matchRoute(r, [route.headers.Location]));
|
||||||
|
route = routes.splice(routeIndex, 1)[0];
|
||||||
|
}
|
||||||
|
|
||||||
let blob;
|
let blob;
|
||||||
if (route.response && route.response.encoding === 'base64') {
|
if (route.response && route.response.encoding === 'base64') {
|
||||||
@ -76,6 +98,8 @@ const stubFetch = (win, routes) => {
|
|||||||
} else if (
|
} else if (
|
||||||
args[0].includes('api.github.com') ||
|
args[0].includes('api.github.com') ||
|
||||||
args[0].includes('api.bitbucket.org') ||
|
args[0].includes('api.bitbucket.org') ||
|
||||||
|
args[0].includes('bitbucket.org') ||
|
||||||
|
args[0].includes('api.media.atlassian.com') ||
|
||||||
args[0].includes('gitlab.com') ||
|
args[0].includes('gitlab.com') ||
|
||||||
args[0].includes('netlify.com') ||
|
args[0].includes('netlify.com') ||
|
||||||
args[0].includes('s3.amazonaws.com')
|
args[0].includes('s3.amazonaws.com')
|
||||||
|
@ -30,6 +30,8 @@ const retrieveRecordedExpectations = async () => {
|
|||||||
Host.includes('api.github.com') ||
|
Host.includes('api.github.com') ||
|
||||||
(Host.includes('gitlab.com') && httpRequest.path.includes('api/v4')) ||
|
(Host.includes('gitlab.com') && httpRequest.path.includes('api/v4')) ||
|
||||||
Host.includes('api.bitbucket.org') ||
|
Host.includes('api.bitbucket.org') ||
|
||||||
|
(Host.includes('bitbucket.org') && httpRequest.path.includes('info/lfs')) ||
|
||||||
|
Host.includes('api.media.atlassian.com') ||
|
||||||
Host.some(host => host.includes('netlify.com')) ||
|
Host.some(host => host.includes('netlify.com')) ||
|
||||||
Host.some(host => host.includes('s3.amazonaws.com'))
|
Host.some(host => host.includes('s3.amazonaws.com'))
|
||||||
);
|
);
|
||||||
|
100
packages/netlify-cms-backend-bitbucket/src/git-lfs-client.ts
Normal file
100
packages/netlify-cms-backend-bitbucket/src/git-lfs-client.ts
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
import minimatch from 'minimatch';
|
||||||
|
import { ApiRequest, PointerFile } from 'netlify-cms-lib-util';
|
||||||
|
|
||||||
|
type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;
|
||||||
|
|
||||||
|
interface LfsBatchAction {
|
||||||
|
href: string;
|
||||||
|
header?: { [key: string]: string };
|
||||||
|
expires_in?: number;
|
||||||
|
expires_at?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LfsBatchObject {
|
||||||
|
oid: string;
|
||||||
|
size: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LfsBatchObjectUpload extends LfsBatchObject {
|
||||||
|
actions?: {
|
||||||
|
upload: LfsBatchAction;
|
||||||
|
verify?: LfsBatchAction;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LfsBatchObjectError extends LfsBatchObject {
|
||||||
|
error: {
|
||||||
|
code: number;
|
||||||
|
message: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LfsBatchUploadResponse {
|
||||||
|
transfer?: string;
|
||||||
|
objects: (LfsBatchObjectUpload | LfsBatchObjectError)[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GitLfsClient {
|
||||||
|
private static defaultContentHeaders = {
|
||||||
|
Accept: 'application/vnd.git-lfs+json',
|
||||||
|
['Content-Type']: 'application/vnd.git-lfs+json',
|
||||||
|
};
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
public enabled: boolean,
|
||||||
|
public rootURL: string,
|
||||||
|
public patterns: string[],
|
||||||
|
private makeAuthorizedRequest: MakeAuthorizedRequest,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
matchPath(path: string) {
|
||||||
|
return this.patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
|
||||||
|
}
|
||||||
|
|
||||||
|
async uploadResource(pointer: PointerFile, resource: Blob): Promise<string> {
|
||||||
|
const requests = await this.getResourceUploadRequests([pointer]);
|
||||||
|
for (const request of requests) {
|
||||||
|
await this.doUpload(request.actions!.upload, resource);
|
||||||
|
if (request.actions!.verify) {
|
||||||
|
await this.doVerify(request.actions!.verify, request);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return pointer.sha;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async doUpload(upload: LfsBatchAction, resource: Blob) {
|
||||||
|
await fetch(decodeURI(upload.href), {
|
||||||
|
method: 'PUT',
|
||||||
|
body: resource,
|
||||||
|
headers: upload.header,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
private async doVerify(verify: LfsBatchAction, object: LfsBatchObject) {
|
||||||
|
this.makeAuthorizedRequest({
|
||||||
|
url: decodeURI(verify.href),
|
||||||
|
method: 'POST',
|
||||||
|
headers: { ...GitLfsClient.defaultContentHeaders, ...verify.header },
|
||||||
|
body: JSON.stringify({ oid: object.oid, size: object.size }),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private async getResourceUploadRequests(objects: PointerFile[]): Promise<LfsBatchObjectUpload[]> {
|
||||||
|
const response = await this.makeAuthorizedRequest({
|
||||||
|
url: `${this.rootURL}/objects/batch`,
|
||||||
|
method: 'POST',
|
||||||
|
headers: GitLfsClient.defaultContentHeaders,
|
||||||
|
body: JSON.stringify({
|
||||||
|
operation: 'upload',
|
||||||
|
transfers: ['basic'],
|
||||||
|
objects: objects.map(({ sha, ...rest }) => ({ ...rest, oid: sha })),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
return ((await response.json()) as LfsBatchUploadResponse).objects.filter(object => {
|
||||||
|
if ('error' in object) {
|
||||||
|
console.error(object.error);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return object.actions;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -29,10 +29,15 @@ import {
|
|||||||
AsyncLock,
|
AsyncLock,
|
||||||
asyncLock,
|
asyncLock,
|
||||||
getPreviewStatus,
|
getPreviewStatus,
|
||||||
|
getLargeMediaPatternsFromGitAttributesFile,
|
||||||
|
getPointerFileForMediaFileObj,
|
||||||
|
getLargeMediaFilteredMediaFiles,
|
||||||
|
FetchError,
|
||||||
} from 'netlify-cms-lib-util';
|
} from 'netlify-cms-lib-util';
|
||||||
import NetlifyAuthenticator from 'netlify-cms-lib-auth';
|
import NetlifyAuthenticator from 'netlify-cms-lib-auth';
|
||||||
import AuthenticationPage from './AuthenticationPage';
|
import AuthenticationPage from './AuthenticationPage';
|
||||||
import API, { API_NAME } from './API';
|
import API, { API_NAME } from './API';
|
||||||
|
import { GitLfsClient } from './git-lfs-client';
|
||||||
|
|
||||||
const MAX_CONCURRENT_DOWNLOADS = 10;
|
const MAX_CONCURRENT_DOWNLOADS = 10;
|
||||||
|
|
||||||
@ -61,6 +66,8 @@ export default class BitbucketBackend implements Implementation {
|
|||||||
_mediaDisplayURLSem?: Semaphore;
|
_mediaDisplayURLSem?: Semaphore;
|
||||||
squashMerges: boolean;
|
squashMerges: boolean;
|
||||||
previewContext: string;
|
previewContext: string;
|
||||||
|
largeMediaURL: string;
|
||||||
|
_largeMediaClientPromise?: Promise<GitLfsClient>;
|
||||||
|
|
||||||
constructor(config: Config, options = {}) {
|
constructor(config: Config, options = {}) {
|
||||||
this.options = {
|
this.options = {
|
||||||
@ -87,6 +94,8 @@ export default class BitbucketBackend implements Implementation {
|
|||||||
this.apiRoot = config.backend.api_root || 'https://api.bitbucket.org/2.0';
|
this.apiRoot = config.backend.api_root || 'https://api.bitbucket.org/2.0';
|
||||||
this.baseUrl = config.base_url || '';
|
this.baseUrl = config.base_url || '';
|
||||||
this.siteId = config.site_id || '';
|
this.siteId = config.site_id || '';
|
||||||
|
this.largeMediaURL =
|
||||||
|
config.backend.large_media_url || `https://bitbucket.org/${config.backend.repo}/info/lfs`;
|
||||||
this.token = '';
|
this.token = '';
|
||||||
this.mediaFolder = config.media_folder;
|
this.mediaFolder = config.media_folder;
|
||||||
this.squashMerges = config.backend.squash_merges || false;
|
this.squashMerges = config.backend.squash_merges || false;
|
||||||
@ -109,6 +118,13 @@ export default class BitbucketBackend implements Implementation {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
requestFunction = (req: ApiRequest) =>
|
||||||
|
this.getToken()
|
||||||
|
.then(
|
||||||
|
token => unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req) as ApiRequest,
|
||||||
|
)
|
||||||
|
.then(unsentRequest.performRequest);
|
||||||
|
|
||||||
restoreUser(user: User) {
|
restoreUser(user: User) {
|
||||||
return this.authenticate(user);
|
return this.authenticate(user);
|
||||||
}
|
}
|
||||||
@ -272,6 +288,31 @@ export default class BitbucketBackend implements Implementation {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getLargeMediaClient() {
|
||||||
|
if (!this._largeMediaClientPromise) {
|
||||||
|
this._largeMediaClientPromise = (async (): Promise<GitLfsClient> => {
|
||||||
|
const patterns = await this.api!.readFile('.gitattributes')
|
||||||
|
.then(attributes => getLargeMediaPatternsFromGitAttributesFile(attributes as string))
|
||||||
|
.catch((err: FetchError) => {
|
||||||
|
if (err.status === 404) {
|
||||||
|
console.log('This 404 was expected and handled appropriately.');
|
||||||
|
} else {
|
||||||
|
console.error(err);
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
});
|
||||||
|
|
||||||
|
return new GitLfsClient(
|
||||||
|
!!(this.largeMediaURL && patterns.length > 0),
|
||||||
|
this.largeMediaURL,
|
||||||
|
patterns,
|
||||||
|
this.requestFunction,
|
||||||
|
);
|
||||||
|
})();
|
||||||
|
}
|
||||||
|
return this._largeMediaClientPromise;
|
||||||
|
}
|
||||||
|
|
||||||
getMediaDisplayURL(displayURL: DisplayURL) {
|
getMediaDisplayURL(displayURL: DisplayURL) {
|
||||||
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
|
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
|
||||||
return getMediaDisplayURL(
|
return getMediaDisplayURL(
|
||||||
@ -300,15 +341,37 @@ export default class BitbucketBackend implements Implementation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
|
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
|
||||||
|
const client = await this.getLargeMediaClient();
|
||||||
// persistEntry is a transactional operation
|
// persistEntry is a transactional operation
|
||||||
return runWithLock(
|
return runWithLock(
|
||||||
this.lock,
|
this.lock,
|
||||||
() => this.api!.persistFiles(entry, mediaFiles, options),
|
async () =>
|
||||||
|
this.api!.persistFiles(
|
||||||
|
entry,
|
||||||
|
client.enabled ? await getLargeMediaFilteredMediaFiles(client, mediaFiles) : mediaFiles,
|
||||||
|
options,
|
||||||
|
),
|
||||||
'Failed to acquire persist entry lock',
|
'Failed to acquire persist entry lock',
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
||||||
|
const { fileObj, path } = mediaFile;
|
||||||
|
const displayURL = URL.createObjectURL(fileObj);
|
||||||
|
const client = await this.getLargeMediaClient();
|
||||||
|
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
|
||||||
|
if (!client.enabled || !client.matchPath(fixedPath)) {
|
||||||
|
return this._persistMedia(mediaFile, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
const persistMediaArgument = await getPointerFileForMediaFileObj(client, fileObj as File, path);
|
||||||
|
return {
|
||||||
|
...(await this._persistMedia(persistMediaArgument, options)),
|
||||||
|
displayURL,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async _persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
||||||
const fileObj = mediaFile.fileObj as File;
|
const fileObj = mediaFile.fileObj as File;
|
||||||
|
|
||||||
const [id] = await Promise.all([
|
const [id] = await Promise.all([
|
||||||
|
@ -4,7 +4,6 @@ import { fromPairs, get, pick, intersection, unzip } from 'lodash';
|
|||||||
import ini from 'ini';
|
import ini from 'ini';
|
||||||
import {
|
import {
|
||||||
APIError,
|
APIError,
|
||||||
getBlobSHA,
|
|
||||||
unsentRequest,
|
unsentRequest,
|
||||||
basename,
|
basename,
|
||||||
ApiRequest,
|
ApiRequest,
|
||||||
@ -20,6 +19,11 @@ import {
|
|||||||
Config,
|
Config,
|
||||||
ImplementationFile,
|
ImplementationFile,
|
||||||
UnpublishedEntryMediaFile,
|
UnpublishedEntryMediaFile,
|
||||||
|
parsePointerFile,
|
||||||
|
getLargeMediaPatternsFromGitAttributesFile,
|
||||||
|
PointerFile,
|
||||||
|
getPointerFileForMediaFileObj,
|
||||||
|
getLargeMediaFilteredMediaFiles,
|
||||||
} from 'netlify-cms-lib-util';
|
} from 'netlify-cms-lib-util';
|
||||||
import { GitHubBackend } from 'netlify-cms-backend-github';
|
import { GitHubBackend } from 'netlify-cms-backend-github';
|
||||||
import { GitLabBackend } from 'netlify-cms-backend-gitlab';
|
import { GitLabBackend } from 'netlify-cms-backend-gitlab';
|
||||||
@ -27,14 +31,7 @@ import { BitbucketBackend, API as BitBucketAPI } from 'netlify-cms-backend-bitbu
|
|||||||
import GitHubAPI from './GitHubAPI';
|
import GitHubAPI from './GitHubAPI';
|
||||||
import GitLabAPI from './GitLabAPI';
|
import GitLabAPI from './GitLabAPI';
|
||||||
import AuthenticationPage from './AuthenticationPage';
|
import AuthenticationPage from './AuthenticationPage';
|
||||||
import {
|
import { getClient, Client } from './netlify-lfs-client';
|
||||||
parsePointerFile,
|
|
||||||
createPointerFile,
|
|
||||||
getLargeMediaPatternsFromGitAttributesFile,
|
|
||||||
getClient,
|
|
||||||
Client,
|
|
||||||
PointerFile,
|
|
||||||
} from './netlify-lfs-client';
|
|
||||||
|
|
||||||
declare global {
|
declare global {
|
||||||
interface Window {
|
interface Window {
|
||||||
@ -466,49 +463,13 @@ export default class GitGateway implements Implementation {
|
|||||||
return this.backend!.getMediaFile(path);
|
return this.backend!.getMediaFile(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
async getPointerFileForMediaFileObj(fileObj: File) {
|
|
||||||
const client = await this.getLargeMediaClient();
|
|
||||||
const { name, size } = fileObj;
|
|
||||||
const sha = await getBlobSHA(fileObj);
|
|
||||||
await client.uploadResource({ sha, size }, fileObj);
|
|
||||||
const pointerFileString = createPointerFile({ sha, size });
|
|
||||||
const pointerFileBlob = new Blob([pointerFileString]);
|
|
||||||
const pointerFile = new File([pointerFileBlob], name, { type: 'text/plain' });
|
|
||||||
const pointerFileSHA = await getBlobSHA(pointerFile);
|
|
||||||
return {
|
|
||||||
file: pointerFile,
|
|
||||||
blob: pointerFileBlob,
|
|
||||||
sha: pointerFileSHA,
|
|
||||||
raw: pointerFileString,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
|
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
|
||||||
const client = await this.getLargeMediaClient();
|
const client = await this.getLargeMediaClient();
|
||||||
if (!client.enabled) {
|
return this.backend!.persistEntry(
|
||||||
return this.backend!.persistEntry(entry, mediaFiles, options);
|
entry,
|
||||||
}
|
client.enabled ? await getLargeMediaFilteredMediaFiles(client, mediaFiles) : mediaFiles,
|
||||||
|
options,
|
||||||
const largeMediaFilteredMediaFiles = await Promise.all(
|
|
||||||
mediaFiles.map(async mediaFile => {
|
|
||||||
const { fileObj, path } = mediaFile;
|
|
||||||
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
|
|
||||||
if (!client.matchPath(fixedPath)) {
|
|
||||||
return mediaFile;
|
|
||||||
}
|
|
||||||
|
|
||||||
const pointerFileDetails = await this.getPointerFileForMediaFileObj(fileObj as File);
|
|
||||||
return {
|
|
||||||
...mediaFile,
|
|
||||||
fileObj: pointerFileDetails.file,
|
|
||||||
size: pointerFileDetails.blob.size,
|
|
||||||
sha: pointerFileDetails.sha,
|
|
||||||
raw: pointerFileDetails.raw,
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
return this.backend!.persistEntry(entry, largeMediaFilteredMediaFiles, options);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
||||||
@ -520,14 +481,7 @@ export default class GitGateway implements Implementation {
|
|||||||
return this.backend!.persistMedia(mediaFile, options);
|
return this.backend!.persistMedia(mediaFile, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
const pointerFileDetails = await this.getPointerFileForMediaFileObj(fileObj as File);
|
const persistMediaArgument = await getPointerFileForMediaFileObj(client, fileObj as File, path);
|
||||||
const persistMediaArgument = {
|
|
||||||
fileObj: pointerFileDetails.file,
|
|
||||||
size: pointerFileDetails.blob.size,
|
|
||||||
path,
|
|
||||||
sha: pointerFileDetails.sha,
|
|
||||||
raw: pointerFileDetails.raw,
|
|
||||||
};
|
|
||||||
return {
|
return {
|
||||||
...(await this.backend!.persistMedia(persistMediaArgument, options)),
|
...(await this.backend!.persistMedia(persistMediaArgument, options)),
|
||||||
displayURL,
|
displayURL,
|
||||||
|
@ -1,30 +1,6 @@
|
|||||||
import { filter, flow, fromPairs, map } from 'lodash/fp';
|
import { flow, fromPairs, map } from 'lodash/fp';
|
||||||
import minimatch from 'minimatch';
|
import minimatch from 'minimatch';
|
||||||
import { ApiRequest } from 'netlify-cms-lib-util';
|
import { ApiRequest, PointerFile } from 'netlify-cms-lib-util';
|
||||||
|
|
||||||
//
|
|
||||||
// Pointer file parsing
|
|
||||||
|
|
||||||
const splitIntoLines = (str: string) => str.split('\n');
|
|
||||||
const splitIntoWords = (str: string) => str.split(/\s+/g);
|
|
||||||
const isNonEmptyString = (str: string) => str !== '';
|
|
||||||
const withoutEmptyLines = flow([map((str: string) => str.trim()), filter(isNonEmptyString)]);
|
|
||||||
export const parsePointerFile: (data: string) => PointerFile = flow([
|
|
||||||
splitIntoLines,
|
|
||||||
withoutEmptyLines,
|
|
||||||
map(splitIntoWords),
|
|
||||||
fromPairs,
|
|
||||||
({ size, oid, ...rest }) => ({
|
|
||||||
size: parseInt(size),
|
|
||||||
sha: oid?.split(':')[1],
|
|
||||||
...rest,
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
|
|
||||||
export type PointerFile = {
|
|
||||||
size: number;
|
|
||||||
sha: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;
|
type MakeAuthorizedRequest = (req: ApiRequest) => Promise<Response>;
|
||||||
|
|
||||||
@ -38,56 +14,6 @@ type ClientConfig = {
|
|||||||
transformImages: ImageTransformations | boolean;
|
transformImages: ImageTransformations | boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const createPointerFile = ({ size, sha }: PointerFile) => `\
|
|
||||||
version https://git-lfs.github.com/spec/v1
|
|
||||||
oid sha256:${sha}
|
|
||||||
size ${size}
|
|
||||||
`;
|
|
||||||
|
|
||||||
//
|
|
||||||
// .gitattributes file parsing
|
|
||||||
|
|
||||||
const removeGitAttributesCommentsFromLine = (line: string) => line.split('#')[0];
|
|
||||||
|
|
||||||
const parseGitPatternAttribute = (attributeString: string) => {
|
|
||||||
// There are three kinds of attribute settings:
|
|
||||||
// - a key=val pair sets an attribute to a specific value
|
|
||||||
// - a key without a value and a leading hyphen sets an attribute to false
|
|
||||||
// - a key without a value and no leading hyphen sets an attribute
|
|
||||||
// to true
|
|
||||||
if (attributeString.includes('=')) {
|
|
||||||
return attributeString.split('=');
|
|
||||||
}
|
|
||||||
if (attributeString.startsWith('-')) {
|
|
||||||
return [attributeString.slice(1), false];
|
|
||||||
}
|
|
||||||
return [attributeString, true];
|
|
||||||
};
|
|
||||||
|
|
||||||
const parseGitPatternAttributes = flow([map(parseGitPatternAttribute), fromPairs]);
|
|
||||||
|
|
||||||
const parseGitAttributesPatternLine = flow([
|
|
||||||
splitIntoWords,
|
|
||||||
([pattern, ...attributes]) => [pattern, parseGitPatternAttributes(attributes)],
|
|
||||||
]);
|
|
||||||
|
|
||||||
const parseGitAttributesFileToPatternAttributePairs = flow([
|
|
||||||
splitIntoLines,
|
|
||||||
map(removeGitAttributesCommentsFromLine),
|
|
||||||
withoutEmptyLines,
|
|
||||||
map(parseGitAttributesPatternLine),
|
|
||||||
]);
|
|
||||||
|
|
||||||
export const getLargeMediaPatternsFromGitAttributesFile = flow([
|
|
||||||
parseGitAttributesFileToPatternAttributePairs,
|
|
||||||
filter(
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
||||||
([_pattern, attributes]) =>
|
|
||||||
attributes.filter === 'lfs' && attributes.diff === 'lfs' && attributes.merge === 'lfs',
|
|
||||||
),
|
|
||||||
map(([pattern]) => pattern),
|
|
||||||
]);
|
|
||||||
|
|
||||||
export const matchPath = ({ patterns }: ClientConfig, path: string) =>
|
export const matchPath = ({ patterns }: ClientConfig, path: string) =>
|
||||||
patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
|
patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
|
||||||
|
|
||||||
|
118
packages/netlify-cms-lib-util/src/git-lfs.ts
Normal file
118
packages/netlify-cms-lib-util/src/git-lfs.ts
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
//
|
||||||
|
// Pointer file parsing
|
||||||
|
|
||||||
|
import { filter, flow, fromPairs, map } from 'lodash/fp';
|
||||||
|
import getBlobSHA from './getBlobSHA';
|
||||||
|
import { AssetProxy } from './implementation';
|
||||||
|
|
||||||
|
export interface PointerFile {
|
||||||
|
size: number;
|
||||||
|
sha: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const splitIntoLines = (str: string) => str.split('\n');
|
||||||
|
const splitIntoWords = (str: string) => str.split(/\s+/g);
|
||||||
|
const isNonEmptyString = (str: string) => str !== '';
|
||||||
|
const withoutEmptyLines = flow([map((str: string) => str.trim()), filter(isNonEmptyString)]);
|
||||||
|
export const parsePointerFile: (data: string) => PointerFile = flow([
|
||||||
|
splitIntoLines,
|
||||||
|
withoutEmptyLines,
|
||||||
|
map(splitIntoWords),
|
||||||
|
fromPairs,
|
||||||
|
({ size, oid, ...rest }) => ({
|
||||||
|
size: parseInt(size),
|
||||||
|
sha: oid?.split(':')[1],
|
||||||
|
...rest,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
//
|
||||||
|
// .gitattributes file parsing
|
||||||
|
|
||||||
|
const removeGitAttributesCommentsFromLine = (line: string) => line.split('#')[0];
|
||||||
|
|
||||||
|
const parseGitPatternAttribute = (attributeString: string) => {
|
||||||
|
// There are three kinds of attribute settings:
|
||||||
|
// - a key=val pair sets an attribute to a specific value
|
||||||
|
// - a key without a value and a leading hyphen sets an attribute to false
|
||||||
|
// - a key without a value and no leading hyphen sets an attribute
|
||||||
|
// to true
|
||||||
|
if (attributeString.includes('=')) {
|
||||||
|
return attributeString.split('=');
|
||||||
|
}
|
||||||
|
if (attributeString.startsWith('-')) {
|
||||||
|
return [attributeString.slice(1), false];
|
||||||
|
}
|
||||||
|
return [attributeString, true];
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseGitPatternAttributes = flow([map(parseGitPatternAttribute), fromPairs]);
|
||||||
|
|
||||||
|
const parseGitAttributesPatternLine = flow([
|
||||||
|
splitIntoWords,
|
||||||
|
([pattern, ...attributes]) => [pattern, parseGitPatternAttributes(attributes)],
|
||||||
|
]);
|
||||||
|
|
||||||
|
const parseGitAttributesFileToPatternAttributePairs = flow([
|
||||||
|
splitIntoLines,
|
||||||
|
map(removeGitAttributesCommentsFromLine),
|
||||||
|
withoutEmptyLines,
|
||||||
|
map(parseGitAttributesPatternLine),
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const getLargeMediaPatternsFromGitAttributesFile = flow([
|
||||||
|
parseGitAttributesFileToPatternAttributePairs,
|
||||||
|
filter(
|
||||||
|
([, attributes]) =>
|
||||||
|
attributes.filter === 'lfs' && attributes.diff === 'lfs' && attributes.merge === 'lfs',
|
||||||
|
),
|
||||||
|
map(([pattern]) => pattern),
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const createPointerFile = ({ size, sha }: PointerFile) => `\
|
||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:${sha}
|
||||||
|
size ${size}
|
||||||
|
`;
|
||||||
|
|
||||||
|
export async function getPointerFileForMediaFileObj(
|
||||||
|
client: { uploadResource: (pointer: PointerFile, resource: Blob) => Promise<string> },
|
||||||
|
fileObj: File,
|
||||||
|
path: string,
|
||||||
|
) {
|
||||||
|
const { name, size } = fileObj;
|
||||||
|
const sha = await getBlobSHA(fileObj);
|
||||||
|
await client.uploadResource({ sha, size }, fileObj);
|
||||||
|
const pointerFileString = createPointerFile({ sha, size });
|
||||||
|
const pointerFileBlob = new Blob([pointerFileString]);
|
||||||
|
const pointerFile = new File([pointerFileBlob], name, { type: 'text/plain' });
|
||||||
|
const pointerFileSHA = await getBlobSHA(pointerFile);
|
||||||
|
return {
|
||||||
|
fileObj: pointerFile,
|
||||||
|
size: pointerFileBlob.size,
|
||||||
|
sha: pointerFileSHA,
|
||||||
|
raw: pointerFileString,
|
||||||
|
path,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getLargeMediaFilteredMediaFiles(
|
||||||
|
client: {
|
||||||
|
uploadResource: (pointer: PointerFile, resource: Blob) => Promise<string>;
|
||||||
|
matchPath: (path: string) => boolean;
|
||||||
|
},
|
||||||
|
mediaFiles: AssetProxy[],
|
||||||
|
) {
|
||||||
|
return await Promise.all(
|
||||||
|
mediaFiles.map(async mediaFile => {
|
||||||
|
const { fileObj, path } = mediaFile;
|
||||||
|
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
|
||||||
|
if (!client.matchPath(fixedPath)) {
|
||||||
|
return mediaFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
const pointerFileDetails = await getPointerFileForMediaFileObj(client, fileObj as File, path);
|
||||||
|
return { ...mediaFile, ...pointerFileDetails };
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
@ -52,6 +52,14 @@ import {
|
|||||||
FetchError as FE,
|
FetchError as FE,
|
||||||
parseContentKey,
|
parseContentKey,
|
||||||
} from './API';
|
} from './API';
|
||||||
|
import {
|
||||||
|
createPointerFile,
|
||||||
|
getLargeMediaFilteredMediaFiles,
|
||||||
|
getLargeMediaPatternsFromGitAttributesFile,
|
||||||
|
parsePointerFile,
|
||||||
|
getPointerFileForMediaFileObj,
|
||||||
|
PointerFile as PF,
|
||||||
|
} from './git-lfs';
|
||||||
|
|
||||||
export type AsyncLock = AL;
|
export type AsyncLock = AL;
|
||||||
export type Implementation = I;
|
export type Implementation = I;
|
||||||
@ -78,6 +86,7 @@ export type ApiRequest =
|
|||||||
| string;
|
| string;
|
||||||
export type Config = C;
|
export type Config = C;
|
||||||
export type FetchError = FE;
|
export type FetchError = FE;
|
||||||
|
export type PointerFile = PF;
|
||||||
|
|
||||||
export const NetlifyCmsLibUtil = {
|
export const NetlifyCmsLibUtil = {
|
||||||
APIError,
|
APIError,
|
||||||
@ -118,6 +127,11 @@ export const NetlifyCmsLibUtil = {
|
|||||||
runWithLock,
|
runWithLock,
|
||||||
PreviewState,
|
PreviewState,
|
||||||
parseContentKey,
|
parseContentKey,
|
||||||
|
createPointerFile,
|
||||||
|
getLargeMediaFilteredMediaFiles,
|
||||||
|
getLargeMediaPatternsFromGitAttributesFile,
|
||||||
|
parsePointerFile,
|
||||||
|
getPointerFileForMediaFileObj,
|
||||||
};
|
};
|
||||||
export {
|
export {
|
||||||
APIError,
|
APIError,
|
||||||
@ -161,4 +175,9 @@ export {
|
|||||||
runWithLock,
|
runWithLock,
|
||||||
PreviewState,
|
PreviewState,
|
||||||
parseContentKey,
|
parseContentKey,
|
||||||
|
createPointerFile,
|
||||||
|
getLargeMediaFilteredMediaFiles,
|
||||||
|
getLargeMediaPatternsFromGitAttributesFile,
|
||||||
|
parsePointerFile,
|
||||||
|
getPointerFileForMediaFileObj,
|
||||||
};
|
};
|
||||||
|
Loading…
x
Reference in New Issue
Block a user