Netlify Large Media integration (#2124)
This commit is contained in:
committed by
Shawn Erquhart
parent
17ae6f3045
commit
da2249c651
@ -127,7 +127,7 @@ export default class GitGatewayAuthenticationPage extends React.Component {
|
||||
return;
|
||||
}
|
||||
|
||||
AuthenticationPage.authClient
|
||||
GitGatewayAuthenticationPage.authClient
|
||||
.login(this.state.email, this.state.password, true)
|
||||
.then(user => {
|
||||
this.props.onLogin(user);
|
||||
|
@ -1,13 +1,20 @@
|
||||
import GoTrue from 'gotrue-js';
|
||||
import jwtDecode from 'jwt-decode';
|
||||
import { get, pick, intersection } from 'lodash';
|
||||
import { APIError, unsentRequest } from 'netlify-cms-lib-util';
|
||||
import { fromPairs, get, pick, intersection, unzip } from 'lodash';
|
||||
import ini from 'ini';
|
||||
import { APIError, getBlobSHA, unsentRequest } from 'netlify-cms-lib-util';
|
||||
import { GitHubBackend } from 'netlify-cms-backend-github';
|
||||
import { GitLabBackend } from 'netlify-cms-backend-gitlab';
|
||||
import { BitBucketBackend, API as BitBucketAPI } from 'netlify-cms-backend-bitbucket';
|
||||
import GitHubAPI from './GitHubAPI';
|
||||
import GitLabAPI from './GitLabAPI';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
import {
|
||||
parsePointerFile,
|
||||
createPointerFile,
|
||||
getLargeMediaPatternsFromGitAttributesFile,
|
||||
getClient,
|
||||
} from './netlify-lfs-client';
|
||||
|
||||
const localHosts = {
|
||||
localhost: true,
|
||||
@ -17,6 +24,7 @@ const localHosts = {
|
||||
const defaults = {
|
||||
identity: '/.netlify/identity',
|
||||
gateway: '/.netlify/git',
|
||||
largeMedia: '/.netlify/large-media',
|
||||
};
|
||||
|
||||
function getEndpoint(endpoint, netlifySiteURL) {
|
||||
@ -58,7 +66,10 @@ export default class GitGateway {
|
||||
config.getIn(['backend', 'gateway_url'], defaults.gateway),
|
||||
netlifySiteURL,
|
||||
);
|
||||
|
||||
this.netlifyLargeMediaURL = getEndpoint(
|
||||
config.getIn(['backend', 'large_media_url'], defaults.largeMedia),
|
||||
netlifySiteURL,
|
||||
);
|
||||
const backendTypeRegex = /\/(github|gitlab|bitbucket)\/?$/;
|
||||
const backendTypeMatches = this.gatewayUrl.match(backendTypeRegex);
|
||||
if (backendTypeMatches) {
|
||||
@ -196,14 +207,136 @@ export default class GitGateway {
|
||||
getEntry(collection, slug, path) {
|
||||
return this.backend.getEntry(collection, slug, path);
|
||||
}
|
||||
|
||||
getMedia() {
|
||||
return this.backend.getMedia();
|
||||
return Promise.all([this.backend.getMedia(), this.getLargeMediaClient()]).then(
|
||||
async ([mediaFiles, largeMediaClient]) => {
|
||||
if (!largeMediaClient.enabled) {
|
||||
return mediaFiles;
|
||||
}
|
||||
const largeMediaURLThunks = await this.getLargeMedia(mediaFiles);
|
||||
return mediaFiles.map(({ id, url, getDisplayURL, ...rest }) => ({
|
||||
...rest,
|
||||
id,
|
||||
url,
|
||||
urlIsPublicPath: false,
|
||||
getDisplayURL: largeMediaURLThunks[id] ? largeMediaURLThunks[id] : getDisplayURL,
|
||||
}));
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// this method memoizes this._getLargeMediaClient so that there can
|
||||
// only be one client at a time
|
||||
getLargeMediaClient() {
|
||||
if (this._largeMediaClientPromise) {
|
||||
return this._largeMediaClientPromise;
|
||||
}
|
||||
this._largeMediaClientPromise = this._getLargeMediaClient();
|
||||
return this._largeMediaClientPromise;
|
||||
}
|
||||
_getLargeMediaClient() {
|
||||
const netlifyLargeMediaEnabledPromise = this.api
|
||||
.readFile('.lfsconfig')
|
||||
.then(ini.decode)
|
||||
.then(({ lfs: { url } }) => new URL(url))
|
||||
.then(lfsURL => ({ enabled: lfsURL.hostname.endsWith('netlify.com') }))
|
||||
.catch(err => ({ enabled: false, err }));
|
||||
|
||||
const lfsPatternsPromise = this.api
|
||||
.readFile('.gitattributes')
|
||||
.then(getLargeMediaPatternsFromGitAttributesFile)
|
||||
.then(patterns => ({ patterns }))
|
||||
.catch(err => (err.message.includes('404') ? [] : { err }));
|
||||
|
||||
return Promise.all([netlifyLargeMediaEnabledPromise, lfsPatternsPromise]).then(
|
||||
([{ enabled: maybeEnabled }, { patterns, err: patternsErr }]) => {
|
||||
const enabled = maybeEnabled && !patternsErr;
|
||||
|
||||
// We expect LFS patterns to exist when the .lfsconfig states
|
||||
// that we're using Netlify Large Media
|
||||
if (maybeEnabled && patternsErr) {
|
||||
console.error(patternsErr);
|
||||
}
|
||||
|
||||
return getClient({
|
||||
enabled,
|
||||
rootURL: this.netlifyLargeMediaURL,
|
||||
makeAuthorizedRequest: this.requestFunction,
|
||||
patterns,
|
||||
transformImages: this.config.getIn(
|
||||
['backend', 'use_large_media_transforms_in_media_library'],
|
||||
true,
|
||||
)
|
||||
? { nf_resize: 'fit', w: 280, h: 160 }
|
||||
: false,
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
getLargeMedia(mediaFiles) {
|
||||
return this.getLargeMediaClient().then(client => {
|
||||
const largeMediaItems = mediaFiles
|
||||
.filter(({ path }) => client.matchPath(path))
|
||||
.map(({ id, path }) => ({ path, sha: id }));
|
||||
return this.backend
|
||||
.fetchFiles(largeMediaItems)
|
||||
.then(items =>
|
||||
items.map(({ file: { sha }, data }) => {
|
||||
const parsedPointerFile = parsePointerFile(data);
|
||||
return [
|
||||
{
|
||||
pointerId: sha,
|
||||
resourceId: parsedPointerFile.sha,
|
||||
},
|
||||
parsedPointerFile,
|
||||
];
|
||||
}),
|
||||
)
|
||||
.then(unzip)
|
||||
.then(async ([idMaps, files]) => [
|
||||
idMaps,
|
||||
await client.getResourceDownloadURLThunks(files).then(fromPairs),
|
||||
])
|
||||
.then(([idMaps, resourceMap]) =>
|
||||
idMaps.map(({ pointerId, resourceId }) => [pointerId, resourceMap[resourceId]]),
|
||||
)
|
||||
.then(fromPairs);
|
||||
});
|
||||
}
|
||||
persistEntry(entry, mediaFiles, options) {
|
||||
return this.backend.persistEntry(entry, mediaFiles, options);
|
||||
}
|
||||
persistMedia(mediaFile, options) {
|
||||
return this.backend.persistMedia(mediaFile, options);
|
||||
const { fileObj, path, value } = mediaFile;
|
||||
const { name, size } = fileObj;
|
||||
return this.getLargeMediaClient().then(client => {
|
||||
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
|
||||
if (!client.enabled || !client.matchPath(fixedPath)) {
|
||||
return this.backend.persistMedia(mediaFile, options);
|
||||
}
|
||||
|
||||
return getBlobSHA(fileObj).then(async sha => {
|
||||
await client.uploadResource({ sha, size }, fileObj);
|
||||
const pointerFileString = createPointerFile({ sha, size });
|
||||
const pointerFileBlob = new Blob([pointerFileString]);
|
||||
const pointerFile = new File([pointerFileBlob], name, { type: 'text/plain' });
|
||||
const pointerFileSHA = await getBlobSHA(pointerFile);
|
||||
const persistMediaArgument = {
|
||||
fileObj: pointerFile,
|
||||
size: pointerFileBlob.size,
|
||||
path,
|
||||
sha: pointerFileSHA,
|
||||
raw: pointerFileString,
|
||||
value,
|
||||
};
|
||||
const persistedMediaFile = await this.backend.persistMedia(persistMediaArgument, options);
|
||||
return {
|
||||
...persistedMediaFile,
|
||||
urlIsPublicPath: false,
|
||||
};
|
||||
});
|
||||
});
|
||||
}
|
||||
deleteFile(path, commitMessage, options) {
|
||||
return this.backend.deleteFile(path, commitMessage, options);
|
||||
|
@ -0,0 +1,189 @@
|
||||
import { filter, flow, fromPairs, map } from 'lodash/fp';
|
||||
import minimatch from 'minimatch';
|
||||
|
||||
//
|
||||
// Pointer file parsing
|
||||
|
||||
const splitIntoLines = str => str.split('\n');
|
||||
const splitIntoWords = str => str.split(/\s+/g);
|
||||
const isNonEmptyString = str => str !== '';
|
||||
const withoutEmptyLines = flow([map(str => str.trim()), filter(isNonEmptyString)]);
|
||||
export const parsePointerFile = flow([
|
||||
splitIntoLines,
|
||||
withoutEmptyLines,
|
||||
map(splitIntoWords),
|
||||
fromPairs,
|
||||
({ size, oid, ...rest }) => ({
|
||||
size: parseInt(size),
|
||||
sha: oid.split(':')[1],
|
||||
...rest,
|
||||
}),
|
||||
]);
|
||||
|
||||
export const createPointerFile = ({ size, sha }) => `\
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:${sha}
|
||||
size ${size}
|
||||
`;
|
||||
|
||||
//
|
||||
// .gitattributes file parsing
|
||||
|
||||
const removeGitAttributesCommentsFromLine = line => line.split('#')[0];
|
||||
|
||||
const parseGitPatternAttribute = attributeString => {
|
||||
// There are three kinds of attribute settings:
|
||||
// - a key=val pair sets an attribute to a specific value
|
||||
// - a key without a value and a leading hyphen sets an attribute to false
|
||||
// - a key without a value and no leading hyphen sets an attribute
|
||||
// to true
|
||||
if (attributeString.includes('=')) {
|
||||
return attributeString.split('=');
|
||||
}
|
||||
if (attributeString.startsWith('-')) {
|
||||
return [attributeString.slice(1), false];
|
||||
}
|
||||
return [attributeString, true];
|
||||
};
|
||||
|
||||
const parseGitPatternAttributes = flow([map(parseGitPatternAttribute), fromPairs]);
|
||||
|
||||
const parseGitAttributesPatternLine = flow([
|
||||
splitIntoWords,
|
||||
([pattern, ...attributes]) => [pattern, parseGitPatternAttributes(attributes)],
|
||||
]);
|
||||
|
||||
const parseGitAttributesFileToPatternAttributePairs = flow([
|
||||
splitIntoLines,
|
||||
map(removeGitAttributesCommentsFromLine),
|
||||
withoutEmptyLines,
|
||||
map(parseGitAttributesPatternLine),
|
||||
]);
|
||||
|
||||
export const getLargeMediaPatternsFromGitAttributesFile = flow([
|
||||
parseGitAttributesFileToPatternAttributePairs,
|
||||
filter(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
([pattern, attributes]) =>
|
||||
attributes.filter === 'lfs' && attributes.diff === 'lfs' && attributes.merge === 'lfs',
|
||||
),
|
||||
map(([pattern]) => pattern),
|
||||
]);
|
||||
|
||||
export const matchPath = ({ patterns }, path) =>
|
||||
patterns.some(pattern => minimatch(path, pattern, { matchBase: true }));
|
||||
|
||||
//
|
||||
// API interactions
|
||||
|
||||
const defaultContentHeaders = {
|
||||
Accept: 'application/vnd.git-lfs+json',
|
||||
['Content-Type']: 'application/vnd.git-lfs+json',
|
||||
};
|
||||
|
||||
const resourceExists = async ({ rootURL, makeAuthorizedRequest }, { sha, size }) => {
|
||||
const response = await makeAuthorizedRequest({
|
||||
url: `${rootURL}/verify`,
|
||||
method: 'POST',
|
||||
headers: defaultContentHeaders,
|
||||
body: JSON.stringify({ oid: sha, size }),
|
||||
});
|
||||
if (response.ok) {
|
||||
return true;
|
||||
}
|
||||
if (response.status === 404) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO: what kind of error to throw here? APIError doesn't seem
|
||||
// to fit
|
||||
};
|
||||
|
||||
const getDownloadURLThunkFromSha = (
|
||||
{ rootURL, makeAuthorizedRequest, transformImages: t },
|
||||
sha,
|
||||
) => () =>
|
||||
makeAuthorizedRequest(
|
||||
`${rootURL}/origin/${sha}${
|
||||
t && Object.keys(t).length > 0 ? `?nf_resize=${t.nf_resize}&w=${t.w}&h=${t.h}` : ''
|
||||
}`,
|
||||
)
|
||||
.then(res => (res.ok ? res : Promise.reject(res)))
|
||||
.then(res => res.blob())
|
||||
.then(blob => URL.createObjectURL(blob))
|
||||
.catch(err => console.error(err) || Promise.resolve(''));
|
||||
|
||||
// We allow users to get thunks which load the blobs instead of fully
|
||||
// resolved blob URLs so that media clients can download the blobs
|
||||
// lazily. This behaves more similarly to the behavior of string
|
||||
// URLs, which only trigger an image download when the DOM element for
|
||||
// that image is created.
|
||||
const getResourceDownloadURLThunks = (clientConfig, objects) =>
|
||||
Promise.resolve(objects.map(({ sha }) => [sha, getDownloadURLThunkFromSha(clientConfig, sha)]));
|
||||
|
||||
const getResourceDownloadURLs = (clientConfig, objects) =>
|
||||
getResourceDownloadURLThunks(clientConfig, objects)
|
||||
.then(map(([sha, thunk]) => Promise.all([sha, thunk()])))
|
||||
.then(Promise.all.bind(Promise));
|
||||
|
||||
const uploadOperation = objects => ({
|
||||
operation: 'upload',
|
||||
transfers: ['basic'],
|
||||
objects: objects.map(({ sha, ...rest }) => ({ ...rest, oid: sha })),
|
||||
});
|
||||
|
||||
const getResourceUploadURLs = async ({ rootURL, makeAuthorizedRequest }, objects) => {
|
||||
const response = await makeAuthorizedRequest({
|
||||
url: `${rootURL}/objects/batch`,
|
||||
method: 'POST',
|
||||
headers: defaultContentHeaders,
|
||||
body: JSON.stringify(uploadOperation(objects)),
|
||||
});
|
||||
return (await response.json()).objects.map(object => {
|
||||
if (object.error) {
|
||||
throw new Error(object.error.message);
|
||||
}
|
||||
return object.actions.upload.href;
|
||||
});
|
||||
};
|
||||
|
||||
const uploadBlob = (clientConfig, uploadURL, blob) =>
|
||||
fetch(uploadURL, {
|
||||
method: 'PUT',
|
||||
body: blob,
|
||||
});
|
||||
|
||||
const uploadResource = async (clientConfig, { sha, size }, resource) => {
|
||||
const existingFile = await resourceExists(clientConfig, { sha, size });
|
||||
if (existingFile) {
|
||||
return sha;
|
||||
}
|
||||
const [uploadURL] = await getResourceUploadURLs(clientConfig, [{ sha, size }]);
|
||||
await uploadBlob(clientConfig, uploadURL, resource);
|
||||
return sha;
|
||||
};
|
||||
|
||||
//
|
||||
// Create Large Media client
|
||||
|
||||
const configureFn = (config, fn) => (...args) => fn(config, ...args);
|
||||
const clientFns = {
|
||||
resourceExists,
|
||||
getResourceUploadURLs,
|
||||
getResourceDownloadURLs,
|
||||
getResourceDownloadURLThunks,
|
||||
uploadResource,
|
||||
matchPath,
|
||||
};
|
||||
export const getClient = clientConfig => {
|
||||
return flow([
|
||||
Object.keys,
|
||||
map(key => [key, configureFn(clientConfig, clientFns[key])]),
|
||||
fromPairs,
|
||||
configuredFns => ({
|
||||
...configuredFns,
|
||||
patterns: clientConfig.patterns,
|
||||
enabled: clientConfig.enabled,
|
||||
}),
|
||||
])(clientFns);
|
||||
};
|
Reference in New Issue
Block a user