2020-01-15 00:15:14 +02:00
|
|
|
import semaphore, { Semaphore } from 'semaphore';
|
2020-04-01 06:13:27 +03:00
|
|
|
import { trimStart } from 'lodash';
|
2020-01-15 00:15:14 +02:00
|
|
|
import { stripIndent } from 'common-tags';
|
|
|
|
import {
|
|
|
|
CURSOR_COMPATIBILITY_SYMBOL,
|
2020-04-01 06:13:27 +03:00
|
|
|
filterByExtension,
|
2020-01-15 00:15:14 +02:00
|
|
|
unsentRequest,
|
|
|
|
basename,
|
|
|
|
getBlobSHA,
|
|
|
|
Entry,
|
|
|
|
ApiRequest,
|
|
|
|
Cursor,
|
|
|
|
AssetProxy,
|
|
|
|
PersistOptions,
|
|
|
|
DisplayURL,
|
|
|
|
Implementation,
|
|
|
|
entriesByFolder,
|
|
|
|
entriesByFiles,
|
|
|
|
User,
|
|
|
|
Credentials,
|
|
|
|
getMediaDisplayURL,
|
|
|
|
getMediaAsBlob,
|
|
|
|
Config,
|
|
|
|
ImplementationFile,
|
|
|
|
unpublishedEntries,
|
|
|
|
runWithLock,
|
|
|
|
AsyncLock,
|
|
|
|
asyncLock,
|
|
|
|
getPreviewStatus,
|
2020-01-21 18:57:36 +02:00
|
|
|
getLargeMediaPatternsFromGitAttributesFile,
|
|
|
|
getPointerFileForMediaFileObj,
|
|
|
|
getLargeMediaFilteredMediaFiles,
|
|
|
|
FetchError,
|
2020-02-10 18:05:47 +02:00
|
|
|
blobToFileObj,
|
2020-02-24 23:44:10 +01:00
|
|
|
contentKeyFromBranch,
|
|
|
|
generateContentKey,
|
2020-04-01 06:13:27 +03:00
|
|
|
localForage,
|
|
|
|
allEntriesByFolder,
|
2020-06-03 12:44:03 +03:00
|
|
|
AccessTokenError,
|
2020-06-18 10:11:37 +03:00
|
|
|
branchFromContentKey,
|
2020-01-15 00:15:14 +02:00
|
|
|
} from 'netlify-cms-lib-util';
|
2020-04-01 06:13:27 +03:00
|
|
|
import { NetlifyAuthenticator } from 'netlify-cms-lib-auth';
|
2020-01-15 00:15:14 +02:00
|
|
|
import AuthenticationPage from './AuthenticationPage';
|
|
|
|
import API, { API_NAME } from './API';
|
2020-01-21 18:57:36 +02:00
|
|
|
import { GitLfsClient } from './git-lfs-client';
|
2020-01-15 00:15:14 +02:00
|
|
|
|
|
|
|
const MAX_CONCURRENT_DOWNLOADS = 10;
|
|
|
|
|
2020-06-15 10:59:28 -04:00
|
|
|
const STATUS_PAGE = 'https://bitbucket.status.atlassian.com';
|
|
|
|
const BITBUCKET_STATUS_ENDPOINT = `${STATUS_PAGE}/api/v2/components.json`;
|
|
|
|
const BITBUCKET_OPERATIONAL_UNITS = ['API', 'Authentication and user management', 'Git LFS'];
|
|
|
|
type BitbucketStatusComponent = {
|
|
|
|
id: string;
|
|
|
|
name: string;
|
|
|
|
status: string;
|
|
|
|
};
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
// Implementation wrapper class
|
|
|
|
export default class BitbucketBackend implements Implementation {
|
|
|
|
lock: AsyncLock;
|
|
|
|
api: API | null;
|
|
|
|
updateUserCredentials: (args: { token: string; refresh_token: string }) => Promise<null>;
|
|
|
|
options: {
|
|
|
|
proxied: boolean;
|
|
|
|
API: API | null;
|
|
|
|
updateUserCredentials: (args: { token: string; refresh_token: string }) => Promise<null>;
|
|
|
|
initialWorkflowStatus: string;
|
|
|
|
};
|
|
|
|
repo: string;
|
|
|
|
branch: string;
|
|
|
|
apiRoot: string;
|
|
|
|
baseUrl: string;
|
|
|
|
siteId: string;
|
|
|
|
token: string | null;
|
|
|
|
mediaFolder: string;
|
|
|
|
refreshToken?: string;
|
|
|
|
refreshedTokenPromise?: Promise<string>;
|
|
|
|
authenticator?: NetlifyAuthenticator;
|
|
|
|
_mediaDisplayURLSem?: Semaphore;
|
|
|
|
squashMerges: boolean;
|
|
|
|
previewContext: string;
|
2020-01-21 18:57:36 +02:00
|
|
|
largeMediaURL: string;
|
|
|
|
_largeMediaClientPromise?: Promise<GitLfsClient>;
|
2020-06-03 12:44:03 +03:00
|
|
|
authType: string;
|
2020-01-15 00:15:14 +02:00
|
|
|
|
|
|
|
constructor(config: Config, options = {}) {
|
|
|
|
this.options = {
|
|
|
|
proxied: false,
|
|
|
|
API: null,
|
|
|
|
updateUserCredentials: async () => null,
|
|
|
|
initialWorkflowStatus: '',
|
|
|
|
...options,
|
|
|
|
};
|
|
|
|
|
|
|
|
if (
|
|
|
|
!this.options.proxied &&
|
|
|
|
(config.backend.repo === null || config.backend.repo === undefined)
|
|
|
|
) {
|
|
|
|
throw new Error('The BitBucket backend needs a "repo" in the backend configuration.');
|
|
|
|
}
|
|
|
|
|
|
|
|
this.api = this.options.API || null;
|
|
|
|
|
|
|
|
this.updateUserCredentials = this.options.updateUserCredentials;
|
|
|
|
|
|
|
|
this.repo = config.backend.repo || '';
|
|
|
|
this.branch = config.backend.branch || 'master';
|
|
|
|
this.apiRoot = config.backend.api_root || 'https://api.bitbucket.org/2.0';
|
|
|
|
this.baseUrl = config.base_url || '';
|
|
|
|
this.siteId = config.site_id || '';
|
2020-01-21 18:57:36 +02:00
|
|
|
this.largeMediaURL =
|
|
|
|
config.backend.large_media_url || `https://bitbucket.org/${config.backend.repo}/info/lfs`;
|
2020-01-15 00:15:14 +02:00
|
|
|
this.token = '';
|
|
|
|
this.mediaFolder = config.media_folder;
|
|
|
|
this.squashMerges = config.backend.squash_merges || false;
|
|
|
|
this.previewContext = config.backend.preview_context || '';
|
|
|
|
this.lock = asyncLock();
|
2020-06-03 12:44:03 +03:00
|
|
|
this.authType = config.backend.auth_type || '';
|
2020-01-15 00:15:14 +02:00
|
|
|
}
|
|
|
|
|
2020-04-01 06:13:27 +03:00
|
|
|
isGitBackend() {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2020-06-03 12:44:03 +03:00
|
|
|
async status() {
|
2020-06-15 10:59:28 -04:00
|
|
|
const api = await fetch(BITBUCKET_STATUS_ENDPOINT)
|
|
|
|
.then(res => res.json())
|
|
|
|
.then(res => {
|
|
|
|
return res['components']
|
|
|
|
.filter((statusComponent: BitbucketStatusComponent) =>
|
|
|
|
BITBUCKET_OPERATIONAL_UNITS.includes(statusComponent.name),
|
|
|
|
)
|
|
|
|
.every(
|
|
|
|
(statusComponent: BitbucketStatusComponent) => statusComponent.status === 'operational',
|
|
|
|
);
|
|
|
|
})
|
|
|
|
.catch(e => {
|
|
|
|
console.warn('Failed getting BitBucket status', e);
|
|
|
|
return true;
|
|
|
|
});
|
|
|
|
|
|
|
|
let auth = false;
|
|
|
|
// no need to check auth if api is down
|
|
|
|
if (api) {
|
|
|
|
auth =
|
|
|
|
(await this.api
|
|
|
|
?.user()
|
|
|
|
.then(user => !!user)
|
|
|
|
.catch(e => {
|
|
|
|
console.warn('Failed getting Bitbucket user', e);
|
|
|
|
return false;
|
|
|
|
})) || false;
|
|
|
|
}
|
2020-06-03 12:44:03 +03:00
|
|
|
|
2020-06-15 10:59:28 -04:00
|
|
|
return { auth: { status: auth }, api: { status: api, statusPage: STATUS_PAGE } };
|
2020-06-03 12:44:03 +03:00
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
authComponent() {
|
|
|
|
return AuthenticationPage;
|
|
|
|
}
|
|
|
|
|
|
|
|
setUser(user: { token: string }) {
|
|
|
|
this.token = user.token;
|
|
|
|
this.api = new API({
|
|
|
|
requestFunction: this.apiRequestFunction,
|
|
|
|
branch: this.branch,
|
|
|
|
repo: this.repo,
|
|
|
|
squashMerges: this.squashMerges,
|
|
|
|
initialWorkflowStatus: this.options.initialWorkflowStatus,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2020-04-01 06:13:27 +03:00
|
|
|
requestFunction = async (req: ApiRequest) => {
|
|
|
|
const token = await this.getToken();
|
|
|
|
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
|
|
|
|
return unsentRequest.performRequest(authorizedRequest);
|
|
|
|
};
|
2020-01-21 18:57:36 +02:00
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
restoreUser(user: User) {
|
|
|
|
return this.authenticate(user);
|
|
|
|
}
|
|
|
|
|
|
|
|
async authenticate(state: Credentials) {
|
|
|
|
this.token = state.token as string;
|
|
|
|
this.refreshToken = state.refresh_token;
|
|
|
|
this.api = new API({
|
|
|
|
requestFunction: this.apiRequestFunction,
|
|
|
|
branch: this.branch,
|
|
|
|
repo: this.repo,
|
|
|
|
apiRoot: this.apiRoot,
|
|
|
|
squashMerges: this.squashMerges,
|
|
|
|
initialWorkflowStatus: this.options.initialWorkflowStatus,
|
|
|
|
});
|
|
|
|
|
|
|
|
const isCollab = await this.api.hasWriteAccess().catch(error => {
|
|
|
|
error.message = stripIndent`
|
|
|
|
Repo "${this.repo}" not found.
|
|
|
|
|
|
|
|
Please ensure the repo information is spelled correctly.
|
|
|
|
|
|
|
|
If the repo is private, make sure you're logged into a Bitbucket account with access.
|
|
|
|
`;
|
|
|
|
throw error;
|
|
|
|
});
|
|
|
|
|
|
|
|
// Unauthorized user
|
|
|
|
if (!isCollab) {
|
|
|
|
throw new Error('Your BitBucket user account does not have access to this repo.');
|
|
|
|
}
|
|
|
|
|
|
|
|
const user = await this.api.user();
|
|
|
|
|
|
|
|
// Authorized user
|
|
|
|
return {
|
|
|
|
...user,
|
|
|
|
name: user.display_name,
|
|
|
|
login: user.username,
|
|
|
|
token: state.token,
|
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
|
|
|
avatar_url: user.links.avatar.href,
|
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
|
|
|
refresh_token: state.refresh_token,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
getRefreshedAccessToken() {
|
2020-06-03 12:44:03 +03:00
|
|
|
if (this.authType === 'implicit') {
|
|
|
|
throw new AccessTokenError(`Can't refresh access token when using implicit auth`);
|
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
if (this.refreshedTokenPromise) {
|
|
|
|
return this.refreshedTokenPromise;
|
|
|
|
}
|
|
|
|
|
|
|
|
// instantiating a new Authenticator on each refresh isn't ideal,
|
2020-06-03 12:44:03 +03:00
|
|
|
if (!this.authenticator) {
|
2020-01-15 00:15:14 +02:00
|
|
|
const cfg = {
|
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
|
|
|
base_url: this.baseUrl,
|
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
|
|
|
site_id: this.siteId,
|
|
|
|
};
|
|
|
|
this.authenticator = new NetlifyAuthenticator(cfg);
|
|
|
|
}
|
|
|
|
|
|
|
|
this.refreshedTokenPromise = this.authenticator! // eslint-disable-next-line @typescript-eslint/camelcase
|
|
|
|
.refresh({ provider: 'bitbucket', refresh_token: this.refreshToken as string })
|
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
|
|
|
.then(({ token, refresh_token }) => {
|
|
|
|
this.token = token;
|
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
|
|
|
this.refreshToken = refresh_token;
|
|
|
|
this.refreshedTokenPromise = undefined;
|
2020-04-01 06:13:27 +03:00
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
// eslint-disable-next-line @typescript-eslint/camelcase
|
|
|
|
this.updateUserCredentials({ token, refresh_token });
|
|
|
|
return token;
|
|
|
|
});
|
|
|
|
|
|
|
|
return this.refreshedTokenPromise;
|
|
|
|
}
|
|
|
|
|
|
|
|
logout() {
|
|
|
|
this.token = null;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
getToken() {
|
|
|
|
if (this.refreshedTokenPromise) {
|
|
|
|
return this.refreshedTokenPromise;
|
|
|
|
}
|
|
|
|
|
|
|
|
return Promise.resolve(this.token);
|
|
|
|
}
|
|
|
|
|
|
|
|
apiRequestFunction = async (req: ApiRequest) => {
|
|
|
|
const token = (this.refreshedTokenPromise
|
|
|
|
? await this.refreshedTokenPromise
|
|
|
|
: this.token) as string;
|
|
|
|
|
2020-04-01 06:13:27 +03:00
|
|
|
const authorizedRequest = unsentRequest.withHeaders({ Authorization: `Bearer ${token}` }, req);
|
|
|
|
const response: Response = await unsentRequest.performRequest(authorizedRequest);
|
|
|
|
if (response.status === 401) {
|
|
|
|
const json = await response.json().catch(() => null);
|
|
|
|
if (json && json.type === 'error' && /^access token expired/i.test(json.error.message)) {
|
|
|
|
const newToken = await this.getRefreshedAccessToken();
|
|
|
|
const reqWithNewToken = unsentRequest.withHeaders(
|
|
|
|
{
|
|
|
|
Authorization: `Bearer ${newToken}`,
|
|
|
|
},
|
|
|
|
req,
|
|
|
|
) as ApiRequest;
|
|
|
|
return unsentRequest.performRequest(reqWithNewToken);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return response;
|
2020-01-15 00:15:14 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
async entriesByFolder(folder: string, extension: string, depth: number) {
|
|
|
|
let cursor: Cursor;
|
|
|
|
|
|
|
|
const listFiles = () =>
|
2020-06-18 10:11:37 +03:00
|
|
|
this.api!.listFiles(folder, depth, 20, this.branch).then(({ entries, cursor: c }) => {
|
2020-02-10 11:44:00 +02:00
|
|
|
cursor = c.mergeMeta({ extension });
|
2020-04-01 06:13:27 +03:00
|
|
|
return entries.filter(e => filterByExtension(e, extension));
|
2020-01-15 00:15:14 +02:00
|
|
|
});
|
|
|
|
|
2020-04-01 06:13:27 +03:00
|
|
|
const head = await this.api!.defaultBranchCommitSha();
|
|
|
|
const readFile = (path: string, id: string | null | undefined) => {
|
|
|
|
return this.api!.readFile(path, id, { head }) as Promise<string>;
|
|
|
|
};
|
|
|
|
|
|
|
|
const files = await entriesByFolder(
|
|
|
|
listFiles,
|
|
|
|
readFile,
|
|
|
|
this.api!.readFileMetadata.bind(this.api),
|
|
|
|
API_NAME,
|
|
|
|
);
|
2020-01-15 00:15:14 +02:00
|
|
|
|
|
|
|
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
|
|
|
|
// @ts-ignore
|
|
|
|
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
|
|
|
|
return files;
|
|
|
|
}
|
|
|
|
|
2020-04-01 06:13:27 +03:00
|
|
|
async listAllFiles(folder: string, extension: string, depth: number) {
|
2020-06-18 10:11:37 +03:00
|
|
|
const files = await this.api!.listAllFiles(folder, depth, this.branch);
|
2020-04-01 06:13:27 +03:00
|
|
|
const filtered = files.filter(file => filterByExtension(file, extension));
|
|
|
|
return filtered;
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
async allEntriesByFolder(folder: string, extension: string, depth: number) {
|
2020-04-01 06:13:27 +03:00
|
|
|
const head = await this.api!.defaultBranchCommitSha();
|
|
|
|
|
|
|
|
const readFile = (path: string, id: string | null | undefined) => {
|
|
|
|
return this.api!.readFile(path, id, { head }) as Promise<string>;
|
|
|
|
};
|
2020-01-15 00:15:14 +02:00
|
|
|
|
2020-04-01 06:13:27 +03:00
|
|
|
const files = await allEntriesByFolder({
|
|
|
|
listAllFiles: () => this.listAllFiles(folder, extension, depth),
|
|
|
|
readFile,
|
|
|
|
readFileMetadata: this.api!.readFileMetadata.bind(this.api),
|
|
|
|
apiName: API_NAME,
|
|
|
|
branch: this.branch,
|
|
|
|
localForage,
|
|
|
|
folder,
|
|
|
|
extension,
|
|
|
|
depth,
|
|
|
|
getDefaultBranch: () => Promise.resolve({ name: this.branch, sha: head }),
|
|
|
|
isShaExistsInBranch: this.api!.isShaExistsInBranch.bind(this.api!),
|
|
|
|
getDifferences: (source, destination) => this.api!.getDifferences(source, destination),
|
|
|
|
getFileId: path => Promise.resolve(this.api!.getFileId(head, path)),
|
|
|
|
filterFile: file => filterByExtension(file, extension),
|
|
|
|
});
|
2020-01-15 00:15:14 +02:00
|
|
|
return files;
|
|
|
|
}
|
|
|
|
|
|
|
|
async entriesByFiles(files: ImplementationFile[]) {
|
2020-04-01 06:13:27 +03:00
|
|
|
const head = await this.api!.defaultBranchCommitSha();
|
|
|
|
const readFile = (path: string, id: string | null | undefined) => {
|
|
|
|
return this.api!.readFile(path, id, { head }) as Promise<string>;
|
|
|
|
};
|
|
|
|
|
|
|
|
return entriesByFiles(files, readFile, this.api!.readFileMetadata.bind(this.api), API_NAME);
|
2020-01-15 00:15:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
getEntry(path: string) {
|
|
|
|
return this.api!.readFile(path).then(data => ({
|
|
|
|
file: { path, id: null },
|
|
|
|
data: data as string,
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
|
|
|
|
getMedia(mediaFolder = this.mediaFolder) {
|
2020-06-18 10:11:37 +03:00
|
|
|
return this.api!.listAllFiles(mediaFolder, 1, this.branch).then(files =>
|
2020-01-15 00:15:14 +02:00
|
|
|
files.map(({ id, name, path }) => ({ id, name, path, displayURL: { id, path } })),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2020-01-21 18:57:36 +02:00
|
|
|
getLargeMediaClient() {
|
|
|
|
if (!this._largeMediaClientPromise) {
|
|
|
|
this._largeMediaClientPromise = (async (): Promise<GitLfsClient> => {
|
|
|
|
const patterns = await this.api!.readFile('.gitattributes')
|
|
|
|
.then(attributes => getLargeMediaPatternsFromGitAttributesFile(attributes as string))
|
|
|
|
.catch((err: FetchError) => {
|
|
|
|
if (err.status === 404) {
|
|
|
|
console.log('This 404 was expected and handled appropriately.');
|
|
|
|
} else {
|
|
|
|
console.error(err);
|
|
|
|
}
|
|
|
|
return [];
|
|
|
|
});
|
|
|
|
|
|
|
|
return new GitLfsClient(
|
|
|
|
!!(this.largeMediaURL && patterns.length > 0),
|
|
|
|
this.largeMediaURL,
|
|
|
|
patterns,
|
|
|
|
this.requestFunction,
|
|
|
|
);
|
|
|
|
})();
|
|
|
|
}
|
|
|
|
return this._largeMediaClientPromise;
|
|
|
|
}
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
getMediaDisplayURL(displayURL: DisplayURL) {
|
|
|
|
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
|
|
|
|
return getMediaDisplayURL(
|
|
|
|
displayURL,
|
|
|
|
this.api!.readFile.bind(this.api!),
|
|
|
|
this._mediaDisplayURLSem,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
async getMediaFile(path: string) {
|
|
|
|
const name = basename(path);
|
|
|
|
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
|
2020-02-10 18:05:47 +02:00
|
|
|
const fileObj = blobToFileObj(name, blob);
|
2020-01-15 00:15:14 +02:00
|
|
|
const url = URL.createObjectURL(fileObj);
|
|
|
|
const id = await getBlobSHA(fileObj);
|
|
|
|
|
|
|
|
return {
|
|
|
|
id,
|
|
|
|
displayURL: url,
|
|
|
|
path,
|
|
|
|
name,
|
|
|
|
size: fileObj.size,
|
|
|
|
file: fileObj,
|
|
|
|
url,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
|
2020-01-21 18:57:36 +02:00
|
|
|
const client = await this.getLargeMediaClient();
|
2020-01-15 00:15:14 +02:00
|
|
|
// persistEntry is a transactional operation
|
|
|
|
return runWithLock(
|
|
|
|
this.lock,
|
2020-01-21 18:57:36 +02:00
|
|
|
async () =>
|
|
|
|
this.api!.persistFiles(
|
|
|
|
entry,
|
|
|
|
client.enabled ? await getLargeMediaFilteredMediaFiles(client, mediaFiles) : mediaFiles,
|
|
|
|
options,
|
|
|
|
),
|
2020-01-15 00:15:14 +02:00
|
|
|
'Failed to acquire persist entry lock',
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
2020-01-21 18:57:36 +02:00
|
|
|
const { fileObj, path } = mediaFile;
|
|
|
|
const displayURL = URL.createObjectURL(fileObj);
|
|
|
|
const client = await this.getLargeMediaClient();
|
|
|
|
const fixedPath = path.startsWith('/') ? path.slice(1) : path;
|
|
|
|
if (!client.enabled || !client.matchPath(fixedPath)) {
|
|
|
|
return this._persistMedia(mediaFile, options);
|
|
|
|
}
|
|
|
|
|
|
|
|
const persistMediaArgument = await getPointerFileForMediaFileObj(client, fileObj as File, path);
|
|
|
|
return {
|
|
|
|
...(await this._persistMedia(persistMediaArgument, options)),
|
|
|
|
displayURL,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
async _persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
2020-01-15 00:15:14 +02:00
|
|
|
const fileObj = mediaFile.fileObj as File;
|
|
|
|
|
|
|
|
const [id] = await Promise.all([
|
|
|
|
getBlobSHA(fileObj),
|
|
|
|
this.api!.persistFiles(null, [mediaFile], options),
|
|
|
|
]);
|
|
|
|
|
|
|
|
const url = URL.createObjectURL(fileObj);
|
|
|
|
|
|
|
|
return {
|
|
|
|
displayURL: url,
|
|
|
|
path: trimStart(mediaFile.path, '/k'),
|
|
|
|
name: fileObj!.name,
|
|
|
|
size: fileObj!.size,
|
|
|
|
id,
|
|
|
|
file: fileObj,
|
|
|
|
url,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
deleteFile(path: string, commitMessage: string) {
|
|
|
|
return this.api!.deleteFile(path, commitMessage);
|
|
|
|
}
|
|
|
|
|
|
|
|
traverseCursor(cursor: Cursor, action: string) {
|
2020-02-10 11:44:00 +02:00
|
|
|
return this.api!.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => {
|
|
|
|
const extension = cursor.meta?.get('extension');
|
|
|
|
if (extension) {
|
2020-04-01 06:13:27 +03:00
|
|
|
entries = entries.filter(e => filterByExtension(e, extension));
|
2020-02-10 11:44:00 +02:00
|
|
|
newCursor = newCursor.mergeMeta({ extension });
|
|
|
|
}
|
2020-04-01 06:13:27 +03:00
|
|
|
const head = await this.api!.defaultBranchCommitSha();
|
|
|
|
const readFile = (path: string, id: string | null | undefined) => {
|
|
|
|
return this.api!.readFile(path, id, { head }) as Promise<string>;
|
|
|
|
};
|
|
|
|
const entriesWithData = await entriesByFiles(
|
|
|
|
entries,
|
|
|
|
readFile,
|
|
|
|
this.api!.readFileMetadata.bind(this.api)!,
|
|
|
|
API_NAME,
|
|
|
|
);
|
|
|
|
|
2020-02-10 11:44:00 +02:00
|
|
|
return {
|
2020-04-01 06:13:27 +03:00
|
|
|
entries: entriesWithData,
|
2020-01-15 00:15:14 +02:00
|
|
|
cursor: newCursor,
|
2020-02-10 11:44:00 +02:00
|
|
|
};
|
|
|
|
});
|
2020-01-15 00:15:14 +02:00
|
|
|
}
|
|
|
|
|
2020-06-18 10:11:37 +03:00
|
|
|
async loadMediaFile(path: string, id: string, { branch }: { branch: string }) {
|
|
|
|
const readFile = async (
|
2020-01-15 00:15:14 +02:00
|
|
|
path: string,
|
|
|
|
id: string | null | undefined,
|
|
|
|
{ parseText }: { parseText: boolean },
|
2020-06-18 10:11:37 +03:00
|
|
|
) => {
|
|
|
|
const content = await this.api!.readFile(path, id, { branch, parseText });
|
|
|
|
return content;
|
|
|
|
};
|
|
|
|
const blob = await getMediaAsBlob(path, id, readFile);
|
|
|
|
const name = basename(path);
|
|
|
|
const fileObj = blobToFileObj(name, blob);
|
|
|
|
return {
|
|
|
|
id: path,
|
|
|
|
displayURL: URL.createObjectURL(fileObj),
|
|
|
|
path,
|
|
|
|
name,
|
|
|
|
size: fileObj.size,
|
|
|
|
file: fileObj,
|
|
|
|
};
|
2020-01-15 00:15:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
async unpublishedEntries() {
|
|
|
|
const listEntriesKeys = () =>
|
|
|
|
this.api!.listUnpublishedBranches().then(branches =>
|
2020-02-24 23:44:10 +01:00
|
|
|
branches.map(branch => contentKeyFromBranch(branch)),
|
2020-01-15 00:15:14 +02:00
|
|
|
);
|
|
|
|
|
2020-06-18 10:11:37 +03:00
|
|
|
const ids = await unpublishedEntries(listEntriesKeys);
|
|
|
|
return ids;
|
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
|
2020-06-18 10:11:37 +03:00
|
|
|
async unpublishedEntry({
|
|
|
|
id,
|
|
|
|
collection,
|
|
|
|
slug,
|
|
|
|
}: {
|
|
|
|
id?: string;
|
|
|
|
collection?: string;
|
|
|
|
slug?: string;
|
|
|
|
}) {
|
|
|
|
if (id) {
|
|
|
|
const data = await this.api!.retrieveUnpublishedEntryData(id);
|
|
|
|
return data;
|
|
|
|
} else if (collection && slug) {
|
|
|
|
const entryId = generateContentKey(collection, slug);
|
|
|
|
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
|
|
|
|
return data;
|
|
|
|
} else {
|
|
|
|
throw new Error('Missing unpublished entry id or collection and slug');
|
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
}
|
|
|
|
|
2020-06-18 10:11:37 +03:00
|
|
|
getBranch(collection: string, slug: string) {
|
2020-02-24 23:44:10 +01:00
|
|
|
const contentKey = generateContentKey(collection, slug);
|
2020-06-18 10:11:37 +03:00
|
|
|
const branch = branchFromContentKey(contentKey);
|
|
|
|
return branch;
|
|
|
|
}
|
|
|
|
|
|
|
|
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
|
|
|
|
const branch = this.getBranch(collection, slug);
|
|
|
|
const data = (await this.api!.readFile(path, id, { branch })) as string;
|
|
|
|
return data;
|
|
|
|
}
|
|
|
|
|
|
|
|
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
|
|
|
|
const branch = this.getBranch(collection, slug);
|
|
|
|
const mediaFile = await this.loadMediaFile(path, id, { branch });
|
|
|
|
return mediaFile;
|
2020-01-15 00:15:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
|
|
|
|
// updateUnpublishedEntryStatus is a transactional operation
|
|
|
|
return runWithLock(
|
|
|
|
this.lock,
|
|
|
|
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
|
|
|
|
'Failed to acquire update entry status lock',
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
async deleteUnpublishedEntry(collection: string, slug: string) {
|
|
|
|
// deleteUnpublishedEntry is a transactional operation
|
|
|
|
return runWithLock(
|
|
|
|
this.lock,
|
|
|
|
() => this.api!.deleteUnpublishedEntry(collection, slug),
|
|
|
|
'Failed to acquire delete entry lock',
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
async publishUnpublishedEntry(collection: string, slug: string) {
|
|
|
|
// publishUnpublishedEntry is a transactional operation
|
|
|
|
return runWithLock(
|
|
|
|
this.lock,
|
|
|
|
() => this.api!.publishUnpublishedEntry(collection, slug),
|
|
|
|
'Failed to acquire publish entry lock',
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
async getDeployPreview(collection: string, slug: string) {
|
|
|
|
try {
|
|
|
|
const statuses = await this.api!.getStatuses(collection, slug);
|
|
|
|
const deployStatus = getPreviewStatus(statuses, this.previewContext);
|
|
|
|
|
|
|
|
if (deployStatus) {
|
|
|
|
const { target_url: url, state } = deployStatus;
|
|
|
|
return { url, status: state };
|
|
|
|
} else {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
} catch (e) {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|