2021-05-31 14:23:16 +02:00
|
|
|
import semaphore from 'semaphore';
|
2020-04-01 06:13:27 +03:00
|
|
|
import { unionBy, sortBy } from 'lodash';
|
2021-05-31 16:46:41 +02:00
|
|
|
|
|
|
|
import { basename } from './path';
|
|
|
|
|
|
|
|
import type { Semaphore } from 'semaphore';
|
2021-05-31 14:23:16 +02:00
|
|
|
import type Cursor from './Cursor';
|
|
|
|
import type { AsyncLock } from './asyncLock';
|
|
|
|
import type { FileMetadata } from './API';
|
2020-01-15 00:15:14 +02:00
|
|
|
|
|
|
|
export type DisplayURLObject = { id: string; path: string };
|
|
|
|
|
2020-03-09 08:45:42 +01:00
|
|
|
export type DisplayURL = DisplayURLObject | string;
|
2020-01-15 00:15:14 +02:00
|
|
|
|
|
|
|
export interface ImplementationMediaFile {
|
|
|
|
name: string;
|
|
|
|
id: string;
|
|
|
|
size?: number;
|
|
|
|
displayURL?: DisplayURL;
|
|
|
|
path: string;
|
|
|
|
draft?: boolean;
|
|
|
|
url?: string;
|
|
|
|
file?: File;
|
|
|
|
}
|
|
|
|
|
|
|
|
export interface UnpublishedEntryMediaFile {
|
|
|
|
id: string;
|
|
|
|
path: string;
|
|
|
|
}
|
|
|
|
|
|
|
|
export interface ImplementationEntry {
|
|
|
|
data: string;
|
2020-04-01 06:13:27 +03:00
|
|
|
file: { path: string; label?: string; id?: string | null; author?: string; updatedOn?: string };
|
2020-06-18 10:11:37 +03:00
|
|
|
}
|
|
|
|
|
2020-09-20 10:30:46 -07:00
|
|
|
export interface UnpublishedEntryDiff {
|
|
|
|
id: string;
|
|
|
|
path: string;
|
|
|
|
newFile: boolean;
|
|
|
|
}
|
|
|
|
|
2020-06-18 10:11:37 +03:00
|
|
|
export interface UnpublishedEntry {
|
|
|
|
slug: string;
|
|
|
|
collection: string;
|
|
|
|
status: string;
|
2020-09-20 10:30:46 -07:00
|
|
|
diffs: UnpublishedEntryDiff[];
|
2020-06-18 10:11:37 +03:00
|
|
|
updatedAt: string;
|
2020-01-15 00:15:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
export interface Map {
|
|
|
|
get: <T>(key: string, defaultValue?: T) => T;
|
|
|
|
getIn: <T>(key: string[], defaultValue?: T) => T;
|
|
|
|
setIn: <T>(key: string[], value: T) => Map;
|
|
|
|
set: <T>(key: string, value: T) => Map;
|
|
|
|
}
|
|
|
|
|
2020-09-20 10:30:46 -07:00
|
|
|
export type DataFile = {
|
|
|
|
path: string;
|
|
|
|
slug: string;
|
|
|
|
raw: string;
|
|
|
|
newPath?: string;
|
|
|
|
};
|
|
|
|
|
2020-01-15 00:15:14 +02:00
|
|
|
export type AssetProxy = {
|
|
|
|
path: string;
|
|
|
|
fileObj?: File;
|
|
|
|
toBase64?: () => Promise<string>;
|
|
|
|
};
|
|
|
|
|
2020-09-20 10:30:46 -07:00
|
|
|
export type Entry = {
|
|
|
|
dataFiles: DataFile[];
|
|
|
|
assets: AssetProxy[];
|
|
|
|
};
|
2020-01-15 00:15:14 +02:00
|
|
|
|
|
|
|
export type PersistOptions = {
|
|
|
|
newEntry?: boolean;
|
|
|
|
commitMessage: string;
|
|
|
|
collectionName?: string;
|
|
|
|
useWorkflow?: boolean;
|
|
|
|
unpublished?: boolean;
|
|
|
|
status?: string;
|
|
|
|
};
|
|
|
|
|
|
|
|
export type DeleteOptions = {};
|
|
|
|
|
|
|
|
export type Credentials = { token: string | {}; refresh_token?: string };
|
|
|
|
|
|
|
|
export type User = Credentials & {
|
|
|
|
backendName?: string;
|
|
|
|
login?: string;
|
|
|
|
name: string;
|
|
|
|
useOpenAuthoring?: boolean;
|
|
|
|
};
|
|
|
|
|
|
|
|
export type Config = {
|
|
|
|
backend: {
|
|
|
|
repo?: string | null;
|
|
|
|
open_authoring?: boolean;
|
2021-04-04 05:11:45 -07:00
|
|
|
always_fork?: boolean;
|
2020-01-15 00:15:14 +02:00
|
|
|
branch?: string;
|
|
|
|
api_root?: string;
|
|
|
|
squash_merges?: boolean;
|
|
|
|
use_graphql?: boolean;
|
|
|
|
preview_context?: string;
|
|
|
|
identity_url?: string;
|
|
|
|
gateway_url?: string;
|
|
|
|
large_media_url?: string;
|
|
|
|
use_large_media_transforms_in_media_library?: boolean;
|
2020-01-22 23:47:34 +02:00
|
|
|
proxy_url?: string;
|
2020-06-03 12:44:03 +03:00
|
|
|
auth_type?: string;
|
|
|
|
app_id?: string;
|
2020-09-06 20:13:46 +02:00
|
|
|
cms_label_prefix?: string;
|
2020-11-26 04:55:24 -06:00
|
|
|
api_version?: string;
|
2020-01-15 00:15:14 +02:00
|
|
|
};
|
|
|
|
media_folder: string;
|
|
|
|
base_url?: string;
|
|
|
|
site_id?: string;
|
|
|
|
};
|
|
|
|
|
|
|
|
export interface Implementation {
|
|
|
|
authComponent: () => void;
|
|
|
|
restoreUser: (user: User) => Promise<User>;
|
|
|
|
|
|
|
|
authenticate: (credentials: Credentials) => Promise<User>;
|
|
|
|
logout: () => Promise<void> | void | null;
|
|
|
|
getToken: () => Promise<string | null>;
|
|
|
|
|
|
|
|
getEntry: (path: string) => Promise<ImplementationEntry>;
|
|
|
|
entriesByFolder: (
|
|
|
|
folder: string,
|
|
|
|
extension: string,
|
|
|
|
depth: number,
|
|
|
|
) => Promise<ImplementationEntry[]>;
|
|
|
|
entriesByFiles: (files: ImplementationFile[]) => Promise<ImplementationEntry[]>;
|
|
|
|
|
|
|
|
getMediaDisplayURL?: (displayURL: DisplayURL) => Promise<string>;
|
|
|
|
getMedia: (folder?: string) => Promise<ImplementationMediaFile[]>;
|
|
|
|
getMediaFile: (path: string) => Promise<ImplementationMediaFile>;
|
|
|
|
|
2020-09-20 10:30:46 -07:00
|
|
|
persistEntry: (entry: Entry, opts: PersistOptions) => Promise<void>;
|
2020-01-15 00:15:14 +02:00
|
|
|
persistMedia: (file: AssetProxy, opts: PersistOptions) => Promise<ImplementationMediaFile>;
|
2020-09-20 10:30:46 -07:00
|
|
|
deleteFiles: (paths: string[], commitMessage: string) => Promise<void>;
|
2020-01-15 00:15:14 +02:00
|
|
|
|
2020-06-18 10:11:37 +03:00
|
|
|
unpublishedEntries: () => Promise<string[]>;
|
|
|
|
unpublishedEntry: (args: {
|
|
|
|
id?: string;
|
|
|
|
collection?: string;
|
|
|
|
slug?: string;
|
|
|
|
}) => Promise<UnpublishedEntry>;
|
|
|
|
unpublishedEntryDataFile: (
|
|
|
|
collection: string,
|
|
|
|
slug: string,
|
|
|
|
path: string,
|
|
|
|
id: string,
|
|
|
|
) => Promise<string>;
|
|
|
|
unpublishedEntryMediaFile: (
|
|
|
|
collection: string,
|
|
|
|
slug: string,
|
|
|
|
path: string,
|
|
|
|
id: string,
|
|
|
|
) => Promise<ImplementationMediaFile>;
|
2020-01-15 00:15:14 +02:00
|
|
|
updateUnpublishedEntryStatus: (
|
|
|
|
collection: string,
|
|
|
|
slug: string,
|
|
|
|
newStatus: string,
|
|
|
|
) => Promise<void>;
|
|
|
|
publishUnpublishedEntry: (collection: string, slug: string) => Promise<void>;
|
|
|
|
deleteUnpublishedEntry: (collection: string, slug: string) => Promise<void>;
|
|
|
|
getDeployPreview: (
|
|
|
|
collectionName: string,
|
|
|
|
slug: string,
|
|
|
|
) => Promise<{ url: string; status: string } | null>;
|
|
|
|
|
|
|
|
allEntriesByFolder?: (
|
|
|
|
folder: string,
|
|
|
|
extension: string,
|
|
|
|
depth: number,
|
|
|
|
) => Promise<ImplementationEntry[]>;
|
|
|
|
traverseCursor?: (
|
|
|
|
cursor: Cursor,
|
|
|
|
action: string,
|
|
|
|
) => Promise<{ entries: ImplementationEntry[]; cursor: Cursor }>;
|
2020-04-01 06:13:27 +03:00
|
|
|
|
|
|
|
isGitBackend?: () => boolean;
|
2020-06-15 10:59:28 -04:00
|
|
|
status: () => Promise<{
|
|
|
|
auth: { status: boolean };
|
|
|
|
api: { status: boolean; statusPage: string };
|
|
|
|
}>;
|
2020-01-15 00:15:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
const MAX_CONCURRENT_DOWNLOADS = 10;
|
|
|
|
|
|
|
|
export type ImplementationFile = {
|
|
|
|
id?: string | null | undefined;
|
|
|
|
label?: string;
|
|
|
|
path: string;
|
|
|
|
};
|
|
|
|
|
|
|
|
type ReadFile = (
|
|
|
|
path: string,
|
|
|
|
id: string | null | undefined,
|
|
|
|
options: { parseText: boolean },
|
|
|
|
) => Promise<string | Blob>;
|
2020-04-01 06:13:27 +03:00
|
|
|
|
2020-04-21 17:46:06 +03:00
|
|
|
type ReadFileMetadata = (path: string, id: string | null | undefined) => Promise<FileMetadata>;
|
2020-04-01 06:13:27 +03:00
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
async function fetchFiles(
|
2020-04-01 06:13:27 +03:00
|
|
|
files: ImplementationFile[],
|
|
|
|
readFile: ReadFile,
|
|
|
|
readFileMetadata: ReadFileMetadata,
|
|
|
|
apiName: string,
|
2021-02-08 20:01:21 +02:00
|
|
|
) {
|
2020-01-15 00:15:14 +02:00
|
|
|
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
|
|
|
|
const promises = [] as Promise<ImplementationEntry | { error: boolean }>[];
|
|
|
|
files.forEach(file => {
|
|
|
|
promises.push(
|
|
|
|
new Promise(resolve =>
|
2020-04-01 06:13:27 +03:00
|
|
|
sem.take(async () => {
|
|
|
|
try {
|
|
|
|
const [data, fileMetadata] = await Promise.all([
|
|
|
|
readFile(file.path, file.id, { parseText: true }),
|
2020-04-21 17:46:06 +03:00
|
|
|
readFileMetadata(file.path, file.id),
|
2020-04-01 06:13:27 +03:00
|
|
|
]);
|
|
|
|
resolve({ file: { ...file, ...fileMetadata }, data: data as string });
|
|
|
|
sem.leave();
|
|
|
|
} catch (error) {
|
|
|
|
sem.leave();
|
|
|
|
console.error(`failed to load file from ${apiName}: ${file.path}`);
|
|
|
|
resolve({ error: true });
|
|
|
|
}
|
|
|
|
}),
|
2020-01-15 00:15:14 +02:00
|
|
|
),
|
|
|
|
);
|
|
|
|
});
|
|
|
|
return Promise.all(promises).then(loadedEntries =>
|
|
|
|
loadedEntries.filter(loadedEntry => !(loadedEntry as { error: boolean }).error),
|
|
|
|
) as Promise<ImplementationEntry[]>;
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
export async function entriesByFolder(
|
2020-01-15 00:15:14 +02:00
|
|
|
listFiles: () => Promise<ImplementationFile[]>,
|
|
|
|
readFile: ReadFile,
|
2020-04-01 06:13:27 +03:00
|
|
|
readFileMetadata: ReadFileMetadata,
|
2020-01-15 00:15:14 +02:00
|
|
|
apiName: string,
|
2021-02-08 20:01:21 +02:00
|
|
|
) {
|
2020-01-15 00:15:14 +02:00
|
|
|
const files = await listFiles();
|
2020-04-01 06:13:27 +03:00
|
|
|
return fetchFiles(files, readFile, readFileMetadata, apiName);
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
export async function entriesByFiles(
|
2020-01-15 00:15:14 +02:00
|
|
|
files: ImplementationFile[],
|
|
|
|
readFile: ReadFile,
|
2020-04-01 06:13:27 +03:00
|
|
|
readFileMetadata: ReadFileMetadata,
|
2020-01-15 00:15:14 +02:00
|
|
|
apiName: string,
|
2021-02-08 20:01:21 +02:00
|
|
|
) {
|
2020-04-01 06:13:27 +03:00
|
|
|
return fetchFiles(files, readFile, readFileMetadata, apiName);
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
export async function unpublishedEntries(listEntriesKeys: () => Promise<string[]>) {
|
2020-01-15 00:15:14 +02:00
|
|
|
try {
|
|
|
|
const keys = await listEntriesKeys();
|
2020-06-18 10:11:37 +03:00
|
|
|
return keys;
|
2020-01-15 00:15:14 +02:00
|
|
|
} catch (error) {
|
|
|
|
if (error.message === 'Not Found') {
|
|
|
|
return Promise.resolve([]);
|
|
|
|
}
|
|
|
|
throw error;
|
|
|
|
}
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
export function blobToFileObj(name: string, blob: Blob) {
|
2020-02-10 18:05:47 +02:00
|
|
|
const options = name.match(/.svg$/) ? { type: 'image/svg+xml' } : {};
|
|
|
|
return new File([blob], name, options);
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-02-10 18:05:47 +02:00
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
export async function getMediaAsBlob(path: string, id: string | null, readFile: ReadFile) {
|
2020-01-15 00:15:14 +02:00
|
|
|
let blob: Blob;
|
|
|
|
if (path.match(/.svg$/)) {
|
|
|
|
const text = (await readFile(path, id, { parseText: true })) as string;
|
|
|
|
blob = new Blob([text], { type: 'image/svg+xml' });
|
|
|
|
} else {
|
|
|
|
blob = (await readFile(path, id, { parseText: false })) as Blob;
|
|
|
|
}
|
|
|
|
return blob;
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
export async function getMediaDisplayURL(
|
2020-01-15 00:15:14 +02:00
|
|
|
displayURL: DisplayURL,
|
|
|
|
readFile: ReadFile,
|
|
|
|
semaphore: Semaphore,
|
2021-02-08 20:01:21 +02:00
|
|
|
) {
|
2020-01-15 00:15:14 +02:00
|
|
|
const { path, id } = displayURL as DisplayURLObject;
|
|
|
|
return new Promise<string>((resolve, reject) =>
|
|
|
|
semaphore.take(() =>
|
|
|
|
getMediaAsBlob(path, id, readFile)
|
|
|
|
.then(blob => URL.createObjectURL(blob))
|
|
|
|
.then(resolve, reject)
|
|
|
|
.finally(() => semaphore.leave()),
|
|
|
|
),
|
|
|
|
);
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-01-15 00:15:14 +02:00
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
export async function runWithLock(lock: AsyncLock, func: Function, message: string) {
|
2020-01-15 00:15:14 +02:00
|
|
|
try {
|
|
|
|
const acquired = await lock.acquire();
|
|
|
|
if (!acquired) {
|
|
|
|
console.warn(message);
|
|
|
|
}
|
|
|
|
|
|
|
|
const result = await func();
|
|
|
|
return result;
|
|
|
|
} finally {
|
|
|
|
lock.release();
|
|
|
|
}
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-04-01 06:13:27 +03:00
|
|
|
|
|
|
|
const LOCAL_KEY = 'git.local';
|
|
|
|
|
|
|
|
type LocalTree = {
|
|
|
|
head: string;
|
|
|
|
files: { id: string; name: string; path: string }[];
|
|
|
|
};
|
|
|
|
|
|
|
|
type GetKeyArgs = {
|
|
|
|
branch: string;
|
|
|
|
folder: string;
|
|
|
|
extension: string;
|
|
|
|
depth: number;
|
|
|
|
};
|
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
function getLocalKey({ branch, folder, extension, depth }: GetKeyArgs) {
|
2020-04-01 06:13:27 +03:00
|
|
|
return `${LOCAL_KEY}.${branch}.${folder}.${extension}.${depth}`;
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-04-01 06:13:27 +03:00
|
|
|
|
|
|
|
type PersistLocalTreeArgs = GetKeyArgs & {
|
|
|
|
localForage: LocalForage;
|
|
|
|
localTree: LocalTree;
|
|
|
|
};
|
|
|
|
|
|
|
|
type GetLocalTreeArgs = GetKeyArgs & {
|
|
|
|
localForage: LocalForage;
|
|
|
|
};
|
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
export async function persistLocalTree({
|
2020-04-01 06:13:27 +03:00
|
|
|
localForage,
|
|
|
|
localTree,
|
|
|
|
branch,
|
|
|
|
folder,
|
|
|
|
extension,
|
|
|
|
depth,
|
2021-02-08 20:01:21 +02:00
|
|
|
}: PersistLocalTreeArgs) {
|
2020-04-01 06:13:27 +03:00
|
|
|
await localForage.setItem<LocalTree>(
|
|
|
|
getLocalKey({ branch, folder, extension, depth }),
|
|
|
|
localTree,
|
|
|
|
);
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-04-01 06:13:27 +03:00
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
export async function getLocalTree({
|
2020-04-01 06:13:27 +03:00
|
|
|
localForage,
|
|
|
|
branch,
|
|
|
|
folder,
|
|
|
|
extension,
|
|
|
|
depth,
|
2021-02-08 20:01:21 +02:00
|
|
|
}: GetLocalTreeArgs) {
|
2020-04-01 06:13:27 +03:00
|
|
|
const localTree = await localForage.getItem<LocalTree>(
|
|
|
|
getLocalKey({ branch, folder, extension, depth }),
|
|
|
|
);
|
|
|
|
return localTree;
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-04-01 06:13:27 +03:00
|
|
|
|
|
|
|
type GetDiffFromLocalTreeMethods = {
|
|
|
|
getDifferences: (
|
|
|
|
to: string,
|
|
|
|
from: string,
|
|
|
|
) => Promise<
|
|
|
|
{
|
|
|
|
oldPath: string;
|
|
|
|
newPath: string;
|
|
|
|
status: string;
|
|
|
|
}[]
|
|
|
|
>;
|
|
|
|
filterFile: (file: { path: string; name: string }) => boolean;
|
|
|
|
getFileId: (path: string) => Promise<string>;
|
|
|
|
};
|
|
|
|
|
|
|
|
type GetDiffFromLocalTreeArgs = GetDiffFromLocalTreeMethods & {
|
|
|
|
branch: { name: string; sha: string };
|
|
|
|
localTree: LocalTree;
|
|
|
|
folder: string;
|
|
|
|
extension: string;
|
|
|
|
depth: number;
|
|
|
|
};
|
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
async function getDiffFromLocalTree({
|
2020-04-01 06:13:27 +03:00
|
|
|
branch,
|
|
|
|
localTree,
|
|
|
|
folder,
|
|
|
|
getDifferences,
|
|
|
|
filterFile,
|
|
|
|
getFileId,
|
2021-02-08 20:01:21 +02:00
|
|
|
}: GetDiffFromLocalTreeArgs) {
|
2020-04-01 06:13:27 +03:00
|
|
|
const diff = await getDifferences(branch.sha, localTree.head);
|
|
|
|
const diffFiles = diff
|
2020-06-18 10:11:37 +03:00
|
|
|
.filter(d => d.oldPath?.startsWith(folder) || d.newPath?.startsWith(folder))
|
2020-04-01 06:13:27 +03:00
|
|
|
.reduce((acc, d) => {
|
|
|
|
if (d.status === 'renamed') {
|
|
|
|
acc.push({
|
|
|
|
path: d.oldPath,
|
|
|
|
name: basename(d.oldPath),
|
|
|
|
deleted: true,
|
|
|
|
});
|
|
|
|
acc.push({
|
|
|
|
path: d.newPath,
|
|
|
|
name: basename(d.newPath),
|
|
|
|
deleted: false,
|
|
|
|
});
|
|
|
|
} else if (d.status === 'deleted') {
|
|
|
|
acc.push({
|
|
|
|
path: d.oldPath,
|
|
|
|
name: basename(d.oldPath),
|
|
|
|
deleted: true,
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
acc.push({
|
|
|
|
path: d.newPath || d.oldPath,
|
|
|
|
name: basename(d.newPath || d.oldPath),
|
|
|
|
deleted: false,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
return acc;
|
|
|
|
}, [] as { path: string; name: string; deleted: boolean }[])
|
|
|
|
|
|
|
|
.filter(filterFile);
|
|
|
|
|
|
|
|
const diffFilesWithIds = await Promise.all(
|
|
|
|
diffFiles.map(async file => {
|
|
|
|
if (!file.deleted) {
|
|
|
|
const id = await getFileId(file.path);
|
|
|
|
return { ...file, id };
|
|
|
|
} else {
|
|
|
|
return { ...file, id: '' };
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
);
|
|
|
|
|
|
|
|
return diffFilesWithIds;
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-04-01 06:13:27 +03:00
|
|
|
|
|
|
|
type AllEntriesByFolderArgs = GetKeyArgs &
|
|
|
|
GetDiffFromLocalTreeMethods & {
|
|
|
|
listAllFiles: (
|
|
|
|
folder: string,
|
|
|
|
extension: string,
|
|
|
|
depth: number,
|
|
|
|
) => Promise<ImplementationFile[]>;
|
|
|
|
readFile: ReadFile;
|
|
|
|
readFileMetadata: ReadFileMetadata;
|
|
|
|
getDefaultBranch: () => Promise<{ name: string; sha: string }>;
|
|
|
|
isShaExistsInBranch: (branch: string, sha: string) => Promise<boolean>;
|
|
|
|
apiName: string;
|
|
|
|
localForage: LocalForage;
|
|
|
|
};
|
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
export async function allEntriesByFolder({
|
2020-04-01 06:13:27 +03:00
|
|
|
listAllFiles,
|
|
|
|
readFile,
|
|
|
|
readFileMetadata,
|
|
|
|
apiName,
|
|
|
|
branch,
|
|
|
|
localForage,
|
|
|
|
folder,
|
|
|
|
extension,
|
|
|
|
depth,
|
|
|
|
getDefaultBranch,
|
|
|
|
isShaExistsInBranch,
|
|
|
|
getDifferences,
|
|
|
|
getFileId,
|
|
|
|
filterFile,
|
2021-02-08 20:01:21 +02:00
|
|
|
}: AllEntriesByFolderArgs) {
|
|
|
|
async function listAllFilesAndPersist() {
|
2020-04-01 06:13:27 +03:00
|
|
|
const files = await listAllFiles(folder, extension, depth);
|
|
|
|
const branch = await getDefaultBranch();
|
|
|
|
await persistLocalTree({
|
|
|
|
localForage,
|
|
|
|
localTree: {
|
|
|
|
head: branch.sha,
|
|
|
|
files: files.map(f => ({ id: f.id!, path: f.path, name: basename(f.path) })),
|
|
|
|
},
|
|
|
|
branch: branch.name,
|
|
|
|
depth,
|
|
|
|
extension,
|
|
|
|
folder,
|
|
|
|
});
|
|
|
|
return files;
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-04-01 06:13:27 +03:00
|
|
|
|
2021-02-08 20:01:21 +02:00
|
|
|
async function listFiles() {
|
2020-04-01 06:13:27 +03:00
|
|
|
const localTree = await getLocalTree({ localForage, branch, folder, extension, depth });
|
|
|
|
if (localTree) {
|
|
|
|
const branch = await getDefaultBranch();
|
|
|
|
// if the branch was forced pushed the local tree sha can be removed from the remote tree
|
|
|
|
const localTreeInBranch = await isShaExistsInBranch(branch.name, localTree.head);
|
|
|
|
if (!localTreeInBranch) {
|
|
|
|
console.log(
|
|
|
|
`Can't find local tree head '${localTree.head}' in branch '${branch.name}', rebuilding local tree`,
|
|
|
|
);
|
|
|
|
return listAllFilesAndPersist();
|
|
|
|
}
|
|
|
|
const diff = await getDiffFromLocalTree({
|
|
|
|
branch,
|
|
|
|
localTree,
|
|
|
|
folder,
|
|
|
|
extension,
|
|
|
|
depth,
|
|
|
|
getDifferences,
|
|
|
|
getFileId,
|
|
|
|
filterFile,
|
|
|
|
}).catch(e => {
|
|
|
|
console.log('Failed getting diff from local tree:', e);
|
|
|
|
return null;
|
|
|
|
});
|
|
|
|
|
|
|
|
if (!diff) {
|
|
|
|
console.log(`Diff is null, rebuilding local tree`);
|
|
|
|
return listAllFilesAndPersist();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (diff.length === 0) {
|
|
|
|
// return local copy
|
|
|
|
return localTree.files;
|
|
|
|
} else {
|
|
|
|
const deleted = diff.reduce((acc, d) => {
|
|
|
|
acc[d.path] = d.deleted;
|
|
|
|
return acc;
|
|
|
|
}, {} as Record<string, boolean>);
|
|
|
|
const newCopy = sortBy(
|
|
|
|
unionBy(
|
|
|
|
diff.filter(d => !deleted[d.path]),
|
|
|
|
localTree.files.filter(f => !deleted[f.path]),
|
2021-02-08 20:01:21 +02:00
|
|
|
file => file.path,
|
2020-04-01 06:13:27 +03:00
|
|
|
),
|
2021-02-08 20:01:21 +02:00
|
|
|
file => file.path,
|
2020-04-01 06:13:27 +03:00
|
|
|
);
|
|
|
|
|
|
|
|
await persistLocalTree({
|
|
|
|
localForage,
|
|
|
|
localTree: { head: branch.sha, files: newCopy },
|
|
|
|
branch: branch.name,
|
|
|
|
depth,
|
|
|
|
extension,
|
|
|
|
folder,
|
|
|
|
});
|
|
|
|
|
|
|
|
return newCopy;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return listAllFilesAndPersist();
|
|
|
|
}
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|
2020-04-01 06:13:27 +03:00
|
|
|
|
|
|
|
const files = await listFiles();
|
|
|
|
return fetchFiles(files, readFile, readFileMetadata, apiName);
|
2021-02-08 20:01:21 +02:00
|
|
|
}
|