Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
ghengeveld committed Dec 13, 2023
1 parent 0ed58ed commit 6392beb
Show file tree
Hide file tree
Showing 27 changed files with 402 additions and 282 deletions.
7 changes: 2 additions & 5 deletions node-src/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,6 @@ vi.mock('node-fetch', () => ({
}

if (query?.match('PublishBuildMutation')) {
if (variables.input.isolatorUrl.startsWith('http://throw-an-error')) {
throw new Error('fetch error');
}
publishedBuild = { id: variables.id, ...variables.input };
return {
data: {
Expand Down Expand Up @@ -132,8 +129,8 @@ vi.mock('node-fetch', () => ({
status: 'IN_PROGRESS',
specCount: 1,
componentCount: 1,
storybookUrl: 'https://5d67dc0374b2e300209c41e7-pfkaemtlit.chromatic.com/',
webUrl: 'http://test.com',
cachedUrl: 'https://5d67dc0374b2e300209c41e7-pfkaemtlit.chromatic.com/iframe.html',
...mockBuildFeatures,
app: {
account: {
Expand Down Expand Up @@ -405,7 +402,7 @@ it('runs in simple situations', async () => {
storybookViewLayer: 'viewLayer',
committerEmail: 'test@test.com',
committerName: 'tester',
isolatorUrl: `https://chromatic.com/iframe.html`,
storybookUrl: `https://chromatic.com/`,
});
});

Expand Down
2 changes: 1 addition & 1 deletion node-src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ export async function run({
code: ctx.exitCode,
url: ctx.build?.webUrl,
buildUrl: ctx.build?.webUrl,
storybookUrl: ctx.build?.cachedUrl?.replace(/iframe\.html.*$/, ''),
storybookUrl: ctx.build?.storybookUrl,
specCount: ctx.build?.specCount,
componentCount: ctx.build?.componentCount,
testCount: ctx.build?.testCount,
Expand Down
2 changes: 1 addition & 1 deletion node-src/lib/compress.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ export default async function makeZipFile(ctx: Context, files: FileDesc[]) {
archive.pipe(sink);

files.forEach(({ localPath, targetPath: name }) => {
ctx.log.debug({ name }, 'Adding file to zip archive');
ctx.log.debug(`Adding to zip archive: ${name}`);
archive.append(createReadStream(localPath), { name });
});

Expand Down
213 changes: 140 additions & 73 deletions node-src/lib/upload.ts
Original file line number Diff line number Diff line change
@@ -1,112 +1,179 @@
import makeZipFile from './compress';
import { Context, FileDesc, TargetedFile } from '../types';
import { Context, FileDesc, TargetInfo } from '../types';
import { uploadZip, waitForUnpack } from './uploadZip';
import { uploadFiles } from './uploadFiles';

const GetUploadUrlsMutation = `
mutation GetUploadUrlsMutation($buildId: ObjID, $paths: [String!]!) {
getUploadUrls(buildId: $buildId, paths: $paths) {
domain
urls {
path
url
contentType
const UploadBuildMutation = `
mutation UploadBuildMutation($buildId: ObjID!, $files: [FileUploadInput!]!, $zip: Boolean) {
uploadBuild(buildId: $buildId, files: $files, zip: $zip) {
info {
targets {
contentType
fileKey
filePath
formAction
formFields
}
zipTarget {
contentType
fileKey
filePath
formAction
formFields
sentinelUrl
}
}
userErrors {
... on UserError {
message
}
... on MaxFileCountExceededError {
maxFileCount
fileCount
}
... on MaxFileSizeExceededError {
maxFileSize
filePaths
}
}
}
}
`;
interface GetUploadUrlsMutationResult {
getUploadUrls: {
domain: string;
urls: {
path: string;
url: string;
contentType: string;
}[];
};
}

const GetZipUploadUrlMutation = `
mutation GetZipUploadUrlMutation($buildId: ObjID) {
getZipUploadUrl(buildId: $buildId) {
domain
url
sentinelUrl
}
}
`;
interface GetZipUploadUrlMutationResult {
getZipUploadUrl: {
domain: string;
url: string;
sentinelUrl: string;
interface UploadBuildMutationResult {
uploadBuild: {
info?: {
targets: TargetInfo[];
zipTarget?: TargetInfo & { sentinelUrl: string };
};
userErrors: {
message: string;
maxFileCount?: number;
maxFileSize?: number;
fileCount?: number;
filePaths?: string[];
}[];
};
}

export async function uploadAsIndividualFiles(
export async function uploadBuild(
ctx: Context,
files: FileDesc[],
options: {
onStart?: () => void;
onProgress?: (progress: number, total: number) => void;
onComplete?: (uploadedBytes: number, domain?: string) => void;
onComplete?: (uploadedBytes: number, uploadedFiles: number) => void;
onError?: (error: Error, path?: string) => void;
} = {}
) {
const { getUploadUrls } = await ctx.client.runQuery<GetUploadUrlsMutationResult>(
GetUploadUrlsMutation,
{ buildId: ctx.announcedBuild.id, paths: files.map(({ targetPath }) => targetPath) }
const { uploadBuild } = await ctx.client.runQuery<UploadBuildMutationResult>(
UploadBuildMutation,
{
buildId: ctx.announcedBuild.id,
files: files.map(({ contentHash, contentLength, targetPath }) => ({
contentHash,
contentLength,
filePath: targetPath,
})),
zip: ctx.options.zip,
}
);
const { domain, urls } = getUploadUrls;
const targets = urls.map<TargetedFile>(({ path, url, contentType }) => {
const file = files.find((f) => f.targetPath === path);
return { ...file, contentType, targetUrl: url };

if (uploadBuild.userErrors.length) {
uploadBuild.userErrors.forEach((e) => ctx.log.error(e.message));
return options.onError?.(new Error('Upload does not meet requirements'));
}

const targets = uploadBuild.info.targets.map((target) => {
const file = files.find((f) => f.targetPath === target.filePath);
return { ...file, ...target };
});
const total = targets.reduce((acc, { contentLength }) => acc + contentLength, 0);

if (!targets.length) {
ctx.log.debug('No new files to upload, continuing');
return options.onComplete?.(0, 0);
}

options.onStart?.();

const total = targets.reduce((acc, { contentLength }) => acc + contentLength, 0);
if (uploadBuild.info.zipTarget) {
try {
const { path, size } = await makeZipFile(ctx, targets);
const compressionRate = (total - size) / total;
ctx.log.debug(`Compression reduced upload size by ${Math.round(compressionRate * 100)}%`);

const target = { ...uploadBuild.info.zipTarget, contentLength: size, localPath: path };
await uploadZip(ctx, target, (progress) => options.onProgress?.(progress, size));
await waitForUnpack(ctx, target.sentinelUrl);
return options.onComplete?.(size, targets.length);
} catch (err) {
ctx.log.debug({ err }, 'Error uploading zip, falling back to uploading individual files');
}
}

try {
await uploadFiles(ctx, targets, (progress) => options.onProgress?.(progress, total));
return options.onComplete?.(total, targets.length);
} catch (e) {
return options.onError?.(e, files.some((f) => f.localPath === e.message) && e.message);
}

options.onComplete?.(total, domain);
}

export async function uploadAsZipFile(
ctx: Context,
files: FileDesc[],
options: {
onStart?: () => void;
onProgress?: (progress: number, total: number) => void;
onComplete?: (uploadedBytes: number, domain?: string) => void;
onError?: (error: Error, path?: string) => void;
} = {}
) {
const originalSize = files.reduce((acc, { contentLength }) => acc + contentLength, 0);
const zipped = await makeZipFile(ctx, files);
const { path, size } = zipped;
const UploadMetadataMutation = `
mutation UploadMetadataMutation($buildId: ObjID!, $files: [FileUploadInput!]!) {
uploadMetadata(buildId: $buildId, files: $files) {
info {
targets {
contentType
fileKey
filePath
formAction
formFields
}
}
userErrors {
... on UserError {
message
}
}
}
}
`;

if (size > originalSize) throw new Error('Zip file is larger than individual files');
ctx.log.debug(`Compression reduced upload size by ${originalSize - size} bytes`);
interface UploadMetadataMutationResult {
uploadMetadata: {
info?: {
targets: TargetInfo[];
};
userErrors: {
message: string;
}[];
};
}

const { getZipUploadUrl } = await ctx.client.runQuery<GetZipUploadUrlMutationResult>(
GetZipUploadUrlMutation,
{ buildId: ctx.announcedBuild.id }
export async function uploadMetadata(ctx: Context, files: FileDesc[]) {
const { uploadMetadata } = await ctx.client.runQuery<UploadMetadataMutationResult>(
UploadMetadataMutation,
{
buildId: ctx.announcedBuild.id,
files: files.map(({ contentHash, contentLength, targetPath }) => ({
contentHash,
contentLength,
filePath: targetPath,
})),
}
);
const { domain, url, sentinelUrl } = getZipUploadUrl;

options.onStart?.();

try {
await uploadZip(ctx, path, url, size, (progress) => options.onProgress?.(progress, size));
} catch (e) {
return options.onError?.(e, path);
if (uploadMetadata.info) {
const targets = uploadMetadata.info.targets.map((target) => {
const file = files.find((f) => f.targetPath === target.filePath);
return { ...file, ...target };
});
await uploadFiles(ctx, targets);
}

await waitForUnpack(ctx, sentinelUrl);

options.onComplete?.(size, domain);
if (uploadMetadata.userErrors.length) {
uploadMetadata.userErrors.forEach((e) => ctx.log.warn(e.message));
}
}
41 changes: 18 additions & 23 deletions node-src/lib/uploadFiles.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,24 @@ import retry from 'async-retry';
import { createReadStream } from 'fs';
import pLimit from 'p-limit';
import progress from 'progress-stream';
import { Context, TargetedFile } from '../types';
import { Context, FileDesc, TargetInfo } from '../types';
import { FormData } from 'node-fetch';
import { filesize } from 'filesize';

export async function uploadFiles(
ctx: Context,
files: TargetedFile[],
onProgress: (progress: number) => void
targets: (FileDesc & TargetInfo)[],
onProgress?: (progress: number) => void
) {
const { experimental_abortSignal: signal } = ctx.options;
const limitConcurrency = pLimit(10);
let totalProgress = 0;

await Promise.all(
files.map(({ localPath, targetUrl, contentType, contentLength }) => {
targets.map(({ contentLength, filePath, formAction, formFields, localPath }) => {
let fileProgress = 0; // The bytes uploaded for this this particular file

ctx.log.debug(
`Uploading ${contentLength} bytes of ${contentType} for '${localPath}' to '${targetUrl}'`
);
ctx.log.debug(`Uploading ${filePath} (${filesize(contentLength)})`);

return limitConcurrency(() =>
retry(
Expand All @@ -33,37 +33,32 @@ export async function uploadFiles(
progressStream.on('progress', ({ delta }) => {
fileProgress += delta; // We upload multiple files so we only care about the delta
totalProgress += delta;
onProgress(totalProgress);
onProgress?.(totalProgress);
});

const formData = new FormData();
Object.entries(formFields).forEach(([k, v]) => formData.append(k, v));
formData.append('file', createReadStream(localPath).pipe(progressStream)); // must be the last one

const res = await ctx.http.fetch(
targetUrl,
{
method: 'PUT',
body: createReadStream(localPath).pipe(progressStream),
headers: {
'content-type': contentType,
'content-length': contentLength.toString(),
'cache-control': 'max-age=31536000',
},
signal,
},
formAction,
{ body: formData, method: 'POST', signal },
{ retries: 0 } // already retrying the whole operation
);

if (!res.ok) {
ctx.log.debug(`Uploading '${localPath}' failed: %O`, res);
ctx.log.debug(`Uploading ${localPath} failed: %O`, res);
throw new Error(localPath);
}
ctx.log.debug(`Uploaded '${localPath}'.`);
ctx.log.debug(`Uploaded ${filePath} (${filesize(contentLength)})`);
},
{
retries: ctx.env.CHROMATIC_RETRIES,
onRetry: (err: Error) => {
totalProgress -= fileProgress;
fileProgress = 0;
ctx.log.debug('Retrying upload %s, %O', targetUrl, err);
onProgress(totalProgress);
ctx.log.debug('Retrying upload for %s, %O', localPath, err);
onProgress?.(totalProgress);
},
}
)
Expand Down
Loading

0 comments on commit 6392beb

Please sign in to comment.