Skip to content

Commit

Permalink
Merge branch 'ghengeveld/ap-4034-batch-file-upload-requests-to-handle…
Browse files Browse the repository at this point in the history
…-thousands-of-files' into ghengeveld/ap-3971-cli-pass-content-hashes-to-uploadbuild-mutation
  • Loading branch information
ghengeveld committed Jan 15, 2024
2 parents 96834e6 + a36bcdb commit 9d6269c
Show file tree
Hide file tree
Showing 14 changed files with 293 additions and 67 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@ name: Release

on:
# PRs have their own canary releases which always run but don't publish the GitHub Action.
# Types are configured so this also runs when labels are added or removed.
# To allow debugging the release process, this does not have a paths filter.
pull_request:
branches:
- main
types: [opened, synchronize, reopened, labeled, unlabeled]

# Pushes to main trigger a release and publish the GitHub Action. To allow the occasional update
# to markdown and config files, only changes to actual source code trigger a release.
Expand Down
33 changes: 33 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,36 @@
# v10.2.1 (Wed Jan 10 2024)

#### 🐛 Bug Fix

- Revert "Replace `getUploadUrls` with `uploadBuild` mutation" [#883](https://github.com/chromaui/chromatic-cli/pull/883) ([@JonathanKolnik](https://github.com/JonathanKolnik))
- Run publish-action script as afterShipIt hook [#877](https://github.com/chromaui/chromatic-cli/pull/877) ([@ghengeveld](https://github.com/ghengeveld))

#### Authors: 2

- Gert Hengeveld ([@ghengeveld](https://github.com/ghengeveld))
- Jono Kolnik ([@JonathanKolnik](https://github.com/JonathanKolnik))

---

# v10.2.0 (Thu Dec 21 2023)

#### 🚀 Enhancement

- Replace `getUploadUrls` with `uploadBuild` mutation [#876](https://github.com/chromaui/chromatic-cli/pull/876) ([@ghengeveld](https://github.com/ghengeveld))
- Implement file hashing for to-be-uploaded files [#870](https://github.com/chromaui/chromatic-cli/pull/870) ([@ghengeveld](https://github.com/ghengeveld))

#### 🐛 Bug Fix

- Allow overriding `NODE_ENV` with `STORYBOOK_NODE_ENV` [#879](https://github.com/chromaui/chromatic-cli/pull/879) ([@tmeasday](https://github.com/tmeasday))
- Use code splitting in tsup CJS output [#873](https://github.com/chromaui/chromatic-cli/pull/873) ([@tmeasday](https://github.com/tmeasday))

#### Authors: 2

- Gert Hengeveld ([@ghengeveld](https://github.com/ghengeveld))
- Tom Coleman ([@tmeasday](https://github.com/tmeasday))

---

# v10.1.0 (Thu Dec 07 2023)

#### 🚀 Enhancement
Expand Down
3 changes: 3 additions & 0 deletions node-src/lib/getEnv.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ export interface Env {
STORYBOOK_BUILD_TIMEOUT: number;
STORYBOOK_CLI_FLAGS_BY_VERSION: typeof STORYBOOK_CLI_FLAGS_BY_VERSION;
STORYBOOK_VERIFY_TIMEOUT: number;
STORYBOOK_NODE_ENV: string;
}

const {
Expand All @@ -33,6 +34,7 @@ const {
LOGGLY_CUSTOMER_TOKEN = 'b5e26204-cdc5-4c78-a9cc-c69eb7fabad3',
STORYBOOK_BUILD_TIMEOUT = String(10 * 60 * 1000),
STORYBOOK_VERIFY_TIMEOUT = String(3 * 60 * 1000),
STORYBOOK_NODE_ENV = 'production',
} = process.env;

const ENVIRONMENT_WHITELIST = [/^GERRIT/, /^TRAVIS/];
Expand Down Expand Up @@ -70,4 +72,5 @@ export default () =>
STORYBOOK_BUILD_TIMEOUT: parseInt(STORYBOOK_BUILD_TIMEOUT, 10),
STORYBOOK_CLI_FLAGS_BY_VERSION,
STORYBOOK_VERIFY_TIMEOUT: parseInt(STORYBOOK_VERIFY_TIMEOUT, 10),
STORYBOOK_NODE_ENV,
} as Env);
107 changes: 71 additions & 36 deletions node-src/lib/upload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ import { uploadZip, waitForUnpack } from './uploadZip';
import { uploadFiles } from './uploadFiles';
import { maxFileCountExceeded } from '../ui/messages/errors/maxFileCountExceeded';
import { maxFileSizeExceeded } from '../ui/messages/errors/maxFileSizeExceeded';
import skippingEmptyFiles from '../ui/messages/warnings/skippingEmptyFiles';

// This limit is imposed by the uploadBuild mutation
const MAX_FILES_PER_REQUEST = 1000;

const UploadBuildMutation = `
mutation UploadBuildMutation($buildId: ObjID!, $files: [FileUploadInput!]!, $zip: Boolean) {
Expand Down Expand Up @@ -80,63 +84,94 @@ export async function uploadBuild(
onError?: (error: Error, path?: string) => void;
} = {}
) {
const { uploadBuild } = await ctx.client.runQuery<UploadBuildMutationResult>(
UploadBuildMutation,
{
buildId: ctx.announcedBuild.id,
files: files.map(({ contentHash, contentLength, targetPath }) => ({
contentHash,
contentLength,
filePath: targetPath,
})),
zip: ctx.options.zip,
}
);
ctx.uploadedBytes = 0;
ctx.uploadedFiles = 0;

if (uploadBuild.userErrors.length) {
uploadBuild.userErrors.forEach((e) => {
if (e.__typename === 'MaxFileCountExceededError') {
ctx.log.error(maxFileCountExceeded(e));
} else if (e.__typename === 'MaxFileSizeExceededError') {
ctx.log.error(maxFileSizeExceeded(e));
} else {
ctx.log.error(e.message);
const targets: (TargetInfo & FileDesc)[] = [];
let zipTarget: (TargetInfo & { sentinelUrl: string }) | undefined;

const batches = files.reduce<typeof files[]>((acc, file, fileIndex) => {
const batchIndex = Math.floor(fileIndex / MAX_FILES_PER_REQUEST);
if (!acc[batchIndex]) acc[batchIndex] = [];
acc[batchIndex].push(file);
return acc;
}, []);

// The uploadBuild mutation has to run in batches to avoid hitting request/response payload limits
// or running out of memory. These run sequentially to avoid too many concurrent requests.
// The uploading itself still happens without batching, since it has its own concurrency limiter.
for (const [index, batch] of batches.entries()) {
ctx.log.debug(`Running uploadBuild batch ${index + 1} / ${batches.length}`);

const { uploadBuild } = await ctx.client.runQuery<UploadBuildMutationResult>(
UploadBuildMutation,
{
buildId: ctx.announcedBuild.id,
files: batch.map(({ contentHash, contentLength, targetPath }) => ({
contentHash,
contentLength,
filePath: targetPath,
})),
zip: ctx.options.zip,
}
});
return options.onError?.(new Error('Upload rejected due to user error'));
}
);

const targets = uploadBuild.info.targets.map((target) => {
const file = files.find((f) => f.targetPath === target.filePath);
return { ...file, ...target };
});
if (uploadBuild.userErrors.length) {
uploadBuild.userErrors.forEach((e) => {
if (e.__typename === 'MaxFileCountExceededError') {
ctx.log.error(maxFileCountExceeded(e));
} else if (e.__typename === 'MaxFileSizeExceededError') {
ctx.log.error(maxFileSizeExceeded(e));
} else {
ctx.log.error(e.message);
}
});
return options.onError?.(new Error('Upload rejected due to user error'));
}

targets.push(
...uploadBuild.info.targets.map((target) => {
const file = batch.find((f) => f.targetPath === target.filePath);
return { ...file, ...target };
})
);
zipTarget = uploadBuild.info.zipTarget;
}

if (!targets.length) {
ctx.log.debug('No new files to upload, continuing');
return options.onComplete?.(0, 0);
return;
}

options.onStart?.();
// Uploading zero-length files is valid, so this might add up to 0.
const totalBytes = targets.reduce((sum, { contentLength }) => sum + contentLength, 0);

const total = targets.reduce((acc, { contentLength }) => acc + contentLength, 0);
if (uploadBuild.info.zipTarget) {
if (zipTarget) {
try {
const { path, size } = await makeZipFile(ctx, targets);
const compressionRate = (total - size) / total;
const compressionRate = totalBytes && (totalBytes - size) / totalBytes;
ctx.log.debug(`Compression reduced upload size by ${Math.round(compressionRate * 100)}%`);

const target = { ...uploadBuild.info.zipTarget, contentLength: size, localPath: path };
const target = { ...zipTarget, contentLength: size, localPath: path };
await uploadZip(ctx, target, (progress) => options.onProgress?.(progress, size));
await waitForUnpack(ctx, target.sentinelUrl);
return options.onComplete?.(size, targets.length);
ctx.uploadedBytes += size;
ctx.uploadedFiles += targets.length;
return;
} catch (err) {
ctx.log.debug({ err }, 'Error uploading zip, falling back to uploading individual files');
}
}

try {
await uploadFiles(ctx, targets, (progress) => options.onProgress?.(progress, total));
return options.onComplete?.(total, targets.length);
const nonEmptyFiles = targets.filter(({ contentLength }) => contentLength > 0);
if (nonEmptyFiles.length !== targets.length) {
const emptyFiles = targets.filter(({ contentLength }) => contentLength === 0);
ctx.log.warn(skippingEmptyFiles({ emptyFiles }));
}
await uploadFiles(ctx, nonEmptyFiles, (progress) => options.onProgress?.(progress, totalBytes));
ctx.uploadedBytes += totalBytes;
ctx.uploadedFiles += nonEmptyFiles.length;
} catch (e) {
return options.onError?.(e, files.some((f) => f.localPath === e.message) && e.message);
}
Expand Down
33 changes: 33 additions & 0 deletions node-src/tasks/build.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,16 @@ import mockfs from 'mock-fs';
import { afterEach, describe, expect, it, vi } from 'vitest';

import { buildStorybook, setSourceDir, setBuildCommand } from './build';
import { beforeEach } from 'node:test';

vi.mock('execa');

const command = vi.mocked(execaCommand);

beforeEach(() => {
command.mockClear();
})

afterEach(() => {
mockfs.restore();
});
Expand Down Expand Up @@ -132,4 +137,32 @@ describe('buildStorybook', () => {
await expect(buildStorybook(ctx)).rejects.toThrow('Command failed');
expect(ctx.log.error).toHaveBeenCalledWith(expect.stringContaining('Operation timed out'));
});

it('passes NODE_ENV=production', async () => {
const ctx = {
buildCommand: 'npm run build:storybook --script-args',
env: { STORYBOOK_BUILD_TIMEOUT: 1000 },
log: { debug: vi.fn() },
options: { storybookLogFile: 'build-storybook.log' },
} as any;
await buildStorybook(ctx);
expect(command).toHaveBeenCalledWith(
ctx.buildCommand,
expect.objectContaining({ env: { NODE_ENV: 'production'} })
);
});

it('allows overriding NODE_ENV with STORYBOOK_NODE_ENV', async () => {
const ctx = {
buildCommand: 'npm run build:storybook --script-args',
env: { STORYBOOK_BUILD_TIMEOUT: 1000, STORYBOOK_NODE_ENV: 'test' },
log: { debug: vi.fn() },
options: { storybookLogFile: 'build-storybook.log' },
} as any;
await buildStorybook(ctx);
expect(command).toHaveBeenCalledWith(
ctx.buildCommand,
expect.objectContaining({ env: { NODE_ENV: 'test'} })
);
});
});
2 changes: 1 addition & 1 deletion node-src/tasks/build.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ export const buildStorybook = async (ctx: Context) => {
const subprocess = execaCommand(ctx.buildCommand, {
stdio: [null, logFile, logFile],
signal,
env: { NODE_ENV: 'production' },
env: { NODE_ENV: ctx.env.STORYBOOK_NODE_ENV || 'production' },
});
await Promise.race([subprocess, timeoutAfter(ctx.env.STORYBOOK_BUILD_TIMEOUT)]);
} catch (e) {
Expand Down
90 changes: 90 additions & 0 deletions node-src/tasks/upload.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -404,6 +404,96 @@ describe('uploadStorybook', () => {
});
});

it.only('batches calls to uploadBuild mutation', async () => {
const client = { runQuery: vi.fn() };
client.runQuery.mockReturnValueOnce({
uploadBuild: {
info: {
targets: Array.from({ length: 1000 }, (_, i) => ({
contentType: 'application/javascript',
filePath: `${i}.js`,
formAction: `https://s3.amazonaws.com/presigned?${i}.js`,
formFields: {},
})),
},
userErrors: [],
},
});
client.runQuery.mockReturnValueOnce({
uploadBuild: {
info: {
targets: [
{
contentType: 'application/javascript',
filePath: `1000.js`,
formAction: `https://s3.amazonaws.com/presigned?1000.js`,
formFields: {},
},
],
},
userErrors: [],
},
});

createReadStreamMock.mockReturnValue({ pipe: vi.fn((x) => x) } as any);
http.fetch.mockReturnValue({ ok: true });

const fileInfo = {
lengths: Array.from({ length: 1001 }, (_, i) => ({ knownAs: `${i}.js`, contentLength: i })),
paths: Array.from({ length: 1001 }, (_, i) => `${i}.js`),
total: Array.from({ length: 1001 }, (_, i) => i).reduce((a, v) => a + v),
};
const ctx = {
client,
env,
log,
http,
sourceDir: '/static/',
options: {},
fileInfo,
announcedBuild: { id: '1' },
} as any;
await uploadStorybook(ctx, {} as any);

expect(client.runQuery).toHaveBeenCalledTimes(2);
expect(client.runQuery).toHaveBeenCalledWith(expect.stringMatching(/UploadBuildMutation/), {
buildId: '1',
files: Array.from({ length: 1000 }, (_, i) => ({
contentHash: undefined,
contentLength: i,
filePath: `${i}.js`,
})),
});
expect(client.runQuery).toHaveBeenCalledWith(expect.stringMatching(/UploadBuildMutation/), {
buildId: '1',
files: [{ contentHash: undefined, contentLength: 1000, filePath: `1000.js` }], // 0-based index makes this file #1001
});

// Empty files are not uploaded
expect(http.fetch).not.toHaveBeenCalledWith(
'https://s3.amazonaws.com/presigned?0.js',
expect.objectContaining({ body: expect.any(FormData), method: 'POST' }),
expect.objectContaining({ retries: 0 })
);
expect(http.fetch).toHaveBeenCalledWith(
'https://s3.amazonaws.com/presigned?1.js',
expect.objectContaining({ body: expect.any(FormData), method: 'POST' }),
expect.objectContaining({ retries: 0 })
);
expect(http.fetch).toHaveBeenCalledWith(
'https://s3.amazonaws.com/presigned?999.js',
expect.objectContaining({ body: expect.any(FormData), method: 'POST' }),
expect.objectContaining({ retries: 0 })
);
expect(http.fetch).toHaveBeenCalledWith(
'https://s3.amazonaws.com/presigned?1000.js',
expect.objectContaining({ body: expect.any(FormData), method: 'POST' }),
expect.objectContaining({ retries: 0 })
);
expect(ctx.uploadedBytes).toBe(500500);
expect(ctx.uploadedFiles).toBe(1000);
});

describe('with file hashes', () => {
it('retrieves file upload locations and uploads only returned targets', async () => {
const client = { runQuery: vi.fn() };
Expand Down
Loading

0 comments on commit 9d6269c

Please sign in to comment.