diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3a4f32e43..12462e8bd 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,10 +2,10 @@ name: Release on: # PRs have their own canary releases which always run but don't publish the GitHub Action. + # Types are configured so this also runs when labels are added or removed. # To allow debugging the release process, this does not have a paths filter. pull_request: - branches: - - main + types: [opened, synchronize, reopened, labeled, unlabeled] # Pushes to main trigger a release and publish the GitHub Action. To allow the occasional update # to markdown and config files, only changes to actual source code trigger a release. diff --git a/CHANGELOG.md b/CHANGELOG.md index bd848838e..def1ce0f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,36 @@ +# v10.2.1 (Wed Jan 10 2024) + +#### 🐛 Bug Fix + +- Revert "Replace `getUploadUrls` with `uploadBuild` mutation" [#883](https://github.com/chromaui/chromatic-cli/pull/883) ([@JonathanKolnik](https://github.com/JonathanKolnik)) +- Run publish-action script as afterShipIt hook [#877](https://github.com/chromaui/chromatic-cli/pull/877) ([@ghengeveld](https://github.com/ghengeveld)) + +#### Authors: 2 + +- Gert Hengeveld ([@ghengeveld](https://github.com/ghengeveld)) +- Jono Kolnik ([@JonathanKolnik](https://github.com/JonathanKolnik)) + +--- + +# v10.2.0 (Thu Dec 21 2023) + +#### 🚀 Enhancement + +- Replace `getUploadUrls` with `uploadBuild` mutation [#876](https://github.com/chromaui/chromatic-cli/pull/876) ([@ghengeveld](https://github.com/ghengeveld)) +- Implement file hashing for to-be-uploaded files [#870](https://github.com/chromaui/chromatic-cli/pull/870) ([@ghengeveld](https://github.com/ghengeveld)) + +#### 🐛 Bug Fix + +- Allow overriding `NODE_ENV` with `STORYBOOK_NODE_ENV` [#879](https://github.com/chromaui/chromatic-cli/pull/879) ([@tmeasday](https://github.com/tmeasday)) +- Use code splitting in tsup CJS output [#873](https://github.com/chromaui/chromatic-cli/pull/873) ([@tmeasday](https://github.com/tmeasday)) + +#### Authors: 2 + +- Gert Hengeveld ([@ghengeveld](https://github.com/ghengeveld)) +- Tom Coleman ([@tmeasday](https://github.com/tmeasday)) + +--- + # v10.1.0 (Thu Dec 07 2023) #### 🚀 Enhancement diff --git a/node-src/lib/getEnv.ts b/node-src/lib/getEnv.ts index 54f3e3fe5..860313129 100644 --- a/node-src/lib/getEnv.ts +++ b/node-src/lib/getEnv.ts @@ -16,6 +16,7 @@ export interface Env { STORYBOOK_BUILD_TIMEOUT: number; STORYBOOK_CLI_FLAGS_BY_VERSION: typeof STORYBOOK_CLI_FLAGS_BY_VERSION; STORYBOOK_VERIFY_TIMEOUT: number; + STORYBOOK_NODE_ENV: string; } const { @@ -33,6 +34,7 @@ const { LOGGLY_CUSTOMER_TOKEN = 'b5e26204-cdc5-4c78-a9cc-c69eb7fabad3', STORYBOOK_BUILD_TIMEOUT = String(10 * 60 * 1000), STORYBOOK_VERIFY_TIMEOUT = String(3 * 60 * 1000), + STORYBOOK_NODE_ENV = 'production', } = process.env; const ENVIRONMENT_WHITELIST = [/^GERRIT/, /^TRAVIS/]; @@ -70,4 +72,5 @@ export default () => STORYBOOK_BUILD_TIMEOUT: parseInt(STORYBOOK_BUILD_TIMEOUT, 10), STORYBOOK_CLI_FLAGS_BY_VERSION, STORYBOOK_VERIFY_TIMEOUT: parseInt(STORYBOOK_VERIFY_TIMEOUT, 10), + STORYBOOK_NODE_ENV, } as Env); diff --git a/node-src/lib/upload.ts b/node-src/lib/upload.ts index e9d354c93..8d2a01a1d 100644 --- a/node-src/lib/upload.ts +++ b/node-src/lib/upload.ts @@ -4,6 +4,10 @@ import { uploadZip, waitForUnpack } from './uploadZip'; import { uploadFiles } from './uploadFiles'; import { maxFileCountExceeded } from '../ui/messages/errors/maxFileCountExceeded'; import { maxFileSizeExceeded } from '../ui/messages/errors/maxFileSizeExceeded'; +import skippingEmptyFiles from '../ui/messages/warnings/skippingEmptyFiles'; + +// This limit is imposed by the uploadBuild mutation +const MAX_FILES_PER_REQUEST = 1000; const UploadBuildMutation = ` mutation UploadBuildMutation($buildId: ObjID!, $files: [FileUploadInput!]!, $zip: Boolean) { @@ -80,63 +84,94 @@ export async function uploadBuild( onError?: (error: Error, path?: string) => void; } = {} ) { - const { uploadBuild } = await ctx.client.runQuery( - UploadBuildMutation, - { - buildId: ctx.announcedBuild.id, - files: files.map(({ contentHash, contentLength, targetPath }) => ({ - contentHash, - contentLength, - filePath: targetPath, - })), - zip: ctx.options.zip, - } - ); + ctx.uploadedBytes = 0; + ctx.uploadedFiles = 0; - if (uploadBuild.userErrors.length) { - uploadBuild.userErrors.forEach((e) => { - if (e.__typename === 'MaxFileCountExceededError') { - ctx.log.error(maxFileCountExceeded(e)); - } else if (e.__typename === 'MaxFileSizeExceededError') { - ctx.log.error(maxFileSizeExceeded(e)); - } else { - ctx.log.error(e.message); + const targets: (TargetInfo & FileDesc)[] = []; + let zipTarget: (TargetInfo & { sentinelUrl: string }) | undefined; + + const batches = files.reduce((acc, file, fileIndex) => { + const batchIndex = Math.floor(fileIndex / MAX_FILES_PER_REQUEST); + if (!acc[batchIndex]) acc[batchIndex] = []; + acc[batchIndex].push(file); + return acc; + }, []); + + // The uploadBuild mutation has to run in batches to avoid hitting request/response payload limits + // or running out of memory. These run sequentially to avoid too many concurrent requests. + // The uploading itself still happens without batching, since it has its own concurrency limiter. + for (const [index, batch] of batches.entries()) { + ctx.log.debug(`Running uploadBuild batch ${index + 1} / ${batches.length}`); + + const { uploadBuild } = await ctx.client.runQuery( + UploadBuildMutation, + { + buildId: ctx.announcedBuild.id, + files: batch.map(({ contentHash, contentLength, targetPath }) => ({ + contentHash, + contentLength, + filePath: targetPath, + })), + zip: ctx.options.zip, } - }); - return options.onError?.(new Error('Upload rejected due to user error')); - } + ); - const targets = uploadBuild.info.targets.map((target) => { - const file = files.find((f) => f.targetPath === target.filePath); - return { ...file, ...target }; - }); + if (uploadBuild.userErrors.length) { + uploadBuild.userErrors.forEach((e) => { + if (e.__typename === 'MaxFileCountExceededError') { + ctx.log.error(maxFileCountExceeded(e)); + } else if (e.__typename === 'MaxFileSizeExceededError') { + ctx.log.error(maxFileSizeExceeded(e)); + } else { + ctx.log.error(e.message); + } + }); + return options.onError?.(new Error('Upload rejected due to user error')); + } + + targets.push( + ...uploadBuild.info.targets.map((target) => { + const file = batch.find((f) => f.targetPath === target.filePath); + return { ...file, ...target }; + }) + ); + zipTarget = uploadBuild.info.zipTarget; + } if (!targets.length) { ctx.log.debug('No new files to upload, continuing'); - return options.onComplete?.(0, 0); + return; } - options.onStart?.(); + // Uploading zero-length files is valid, so this might add up to 0. + const totalBytes = targets.reduce((sum, { contentLength }) => sum + contentLength, 0); - const total = targets.reduce((acc, { contentLength }) => acc + contentLength, 0); - if (uploadBuild.info.zipTarget) { + if (zipTarget) { try { const { path, size } = await makeZipFile(ctx, targets); - const compressionRate = (total - size) / total; + const compressionRate = totalBytes && (totalBytes - size) / totalBytes; ctx.log.debug(`Compression reduced upload size by ${Math.round(compressionRate * 100)}%`); - const target = { ...uploadBuild.info.zipTarget, contentLength: size, localPath: path }; + const target = { ...zipTarget, contentLength: size, localPath: path }; await uploadZip(ctx, target, (progress) => options.onProgress?.(progress, size)); await waitForUnpack(ctx, target.sentinelUrl); - return options.onComplete?.(size, targets.length); + ctx.uploadedBytes += size; + ctx.uploadedFiles += targets.length; + return; } catch (err) { ctx.log.debug({ err }, 'Error uploading zip, falling back to uploading individual files'); } } try { - await uploadFiles(ctx, targets, (progress) => options.onProgress?.(progress, total)); - return options.onComplete?.(total, targets.length); + const nonEmptyFiles = targets.filter(({ contentLength }) => contentLength > 0); + if (nonEmptyFiles.length !== targets.length) { + const emptyFiles = targets.filter(({ contentLength }) => contentLength === 0); + ctx.log.warn(skippingEmptyFiles({ emptyFiles })); + } + await uploadFiles(ctx, nonEmptyFiles, (progress) => options.onProgress?.(progress, totalBytes)); + ctx.uploadedBytes += totalBytes; + ctx.uploadedFiles += nonEmptyFiles.length; } catch (e) { return options.onError?.(e, files.some((f) => f.localPath === e.message) && e.message); } diff --git a/node-src/tasks/build.test.ts b/node-src/tasks/build.test.ts index c5b59741d..ea466c8db 100644 --- a/node-src/tasks/build.test.ts +++ b/node-src/tasks/build.test.ts @@ -3,11 +3,16 @@ import mockfs from 'mock-fs'; import { afterEach, describe, expect, it, vi } from 'vitest'; import { buildStorybook, setSourceDir, setBuildCommand } from './build'; +import { beforeEach } from 'node:test'; vi.mock('execa'); const command = vi.mocked(execaCommand); +beforeEach(() => { + command.mockClear(); +}) + afterEach(() => { mockfs.restore(); }); @@ -132,4 +137,32 @@ describe('buildStorybook', () => { await expect(buildStorybook(ctx)).rejects.toThrow('Command failed'); expect(ctx.log.error).toHaveBeenCalledWith(expect.stringContaining('Operation timed out')); }); + + it('passes NODE_ENV=production', async () => { + const ctx = { + buildCommand: 'npm run build:storybook --script-args', + env: { STORYBOOK_BUILD_TIMEOUT: 1000 }, + log: { debug: vi.fn() }, + options: { storybookLogFile: 'build-storybook.log' }, + } as any; + await buildStorybook(ctx); + expect(command).toHaveBeenCalledWith( + ctx.buildCommand, + expect.objectContaining({ env: { NODE_ENV: 'production'} }) + ); + }); + + it('allows overriding NODE_ENV with STORYBOOK_NODE_ENV', async () => { + const ctx = { + buildCommand: 'npm run build:storybook --script-args', + env: { STORYBOOK_BUILD_TIMEOUT: 1000, STORYBOOK_NODE_ENV: 'test' }, + log: { debug: vi.fn() }, + options: { storybookLogFile: 'build-storybook.log' }, + } as any; + await buildStorybook(ctx); + expect(command).toHaveBeenCalledWith( + ctx.buildCommand, + expect.objectContaining({ env: { NODE_ENV: 'test'} }) + ); + }); }); diff --git a/node-src/tasks/build.ts b/node-src/tasks/build.ts index 6a17e1d6a..64a7236da 100644 --- a/node-src/tasks/build.ts +++ b/node-src/tasks/build.ts @@ -67,7 +67,7 @@ export const buildStorybook = async (ctx: Context) => { const subprocess = execaCommand(ctx.buildCommand, { stdio: [null, logFile, logFile], signal, - env: { NODE_ENV: 'production' }, + env: { NODE_ENV: ctx.env.STORYBOOK_NODE_ENV || 'production' }, }); await Promise.race([subprocess, timeoutAfter(ctx.env.STORYBOOK_BUILD_TIMEOUT)]); } catch (e) { diff --git a/node-src/tasks/upload.test.ts b/node-src/tasks/upload.test.ts index 792ddd66f..aac306e68 100644 --- a/node-src/tasks/upload.test.ts +++ b/node-src/tasks/upload.test.ts @@ -404,6 +404,96 @@ describe('uploadStorybook', () => { }); }); + it.only('batches calls to uploadBuild mutation', async () => { + const client = { runQuery: vi.fn() }; + client.runQuery.mockReturnValueOnce({ + uploadBuild: { + info: { + targets: Array.from({ length: 1000 }, (_, i) => ({ + contentType: 'application/javascript', + filePath: `${i}.js`, + formAction: `https://s3.amazonaws.com/presigned?${i}.js`, + formFields: {}, + })), + }, + userErrors: [], + }, + }); + client.runQuery.mockReturnValueOnce({ + uploadBuild: { + info: { + targets: [ + { + contentType: 'application/javascript', + filePath: `1000.js`, + formAction: `https://s3.amazonaws.com/presigned?1000.js`, + formFields: {}, + }, + ], + }, + userErrors: [], + }, + }); + + createReadStreamMock.mockReturnValue({ pipe: vi.fn((x) => x) } as any); + http.fetch.mockReturnValue({ ok: true }); + + const fileInfo = { + lengths: Array.from({ length: 1001 }, (_, i) => ({ knownAs: `${i}.js`, contentLength: i })), + paths: Array.from({ length: 1001 }, (_, i) => `${i}.js`), + total: Array.from({ length: 1001 }, (_, i) => i).reduce((a, v) => a + v), + }; + const ctx = { + client, + env, + log, + http, + sourceDir: '/static/', + options: {}, + fileInfo, + announcedBuild: { id: '1' }, + } as any; + await uploadStorybook(ctx, {} as any); + + expect(client.runQuery).toHaveBeenCalledTimes(2); + expect(client.runQuery).toHaveBeenCalledWith(expect.stringMatching(/UploadBuildMutation/), { + buildId: '1', + files: Array.from({ length: 1000 }, (_, i) => ({ + contentHash: undefined, + contentLength: i, + filePath: `${i}.js`, + })), + }); + expect(client.runQuery).toHaveBeenCalledWith(expect.stringMatching(/UploadBuildMutation/), { + buildId: '1', + files: [{ contentHash: undefined, contentLength: 1000, filePath: `1000.js` }], // 0-based index makes this file #1001 + }); + + // Empty files are not uploaded + expect(http.fetch).not.toHaveBeenCalledWith( + 'https://s3.amazonaws.com/presigned?0.js', + expect.objectContaining({ body: expect.any(FormData), method: 'POST' }), + expect.objectContaining({ retries: 0 }) + ); + expect(http.fetch).toHaveBeenCalledWith( + 'https://s3.amazonaws.com/presigned?1.js', + expect.objectContaining({ body: expect.any(FormData), method: 'POST' }), + expect.objectContaining({ retries: 0 }) + ); + expect(http.fetch).toHaveBeenCalledWith( + 'https://s3.amazonaws.com/presigned?999.js', + expect.objectContaining({ body: expect.any(FormData), method: 'POST' }), + expect.objectContaining({ retries: 0 }) + ); + expect(http.fetch).toHaveBeenCalledWith( + 'https://s3.amazonaws.com/presigned?1000.js', + expect.objectContaining({ body: expect.any(FormData), method: 'POST' }), + expect.objectContaining({ retries: 0 }) + ); + expect(ctx.uploadedBytes).toBe(500500); + expect(ctx.uploadedFiles).toBe(1000); + }); + describe('with file hashes', () => { it('retrieves file upload locations and uploads only returned targets', async () => { const client = { runQuery: vi.fn() }; diff --git a/node-src/tasks/upload.ts b/node-src/tasks/upload.ts index cacb17729..de8e9485b 100644 --- a/node-src/tasks/upload.ts +++ b/node-src/tasks/upload.ts @@ -13,7 +13,6 @@ import { dryRun, validating, invalid, - preparing, tracing, bailed, traced, @@ -202,33 +201,25 @@ export const calculateFileHashes = async (ctx: Context, task: Task) => { export const uploadStorybook = async (ctx: Context, task: Task) => { if (ctx.skip) return; - transitionTo(preparing)(ctx, task); + transitionTo(starting)(ctx, task); - const files = ctx.fileInfo.paths - .map((path) => ({ - ...(ctx.fileInfo.hashes && { contentHash: ctx.fileInfo.hashes[path] }), - contentLength: ctx.fileInfo.lengths.find(({ knownAs }) => knownAs === path).contentLength, - localPath: join(ctx.sourceDir, path), - targetPath: path, - })) - .filter((f) => f.contentLength); + const files = ctx.fileInfo.paths.map((path) => ({ + ...(ctx.fileInfo.hashes && { contentHash: ctx.fileInfo.hashes[path] }), + contentLength: ctx.fileInfo.lengths.find(({ knownAs }) => knownAs === path).contentLength, + localPath: join(ctx.sourceDir, path), + targetPath: path, + })); await uploadBuild(ctx, files, { - onStart: () => (task.output = starting().output), onProgress: throttle( (progress, total) => { const percentage = Math.round((progress / total) * 100); task.output = uploading({ percentage }).output; - ctx.options.experimental_onTaskProgress?.({ ...ctx }, { progress, total, unit: 'bytes' }); }, // Avoid spamming the logs with progress updates in non-interactive mode ctx.options.interactive ? 100 : ctx.env.CHROMATIC_OUTPUT_INTERVAL ), - onComplete: (uploadedBytes: number, uploadedFiles: number) => { - ctx.uploadedBytes = uploadedBytes; - ctx.uploadedFiles = uploadedFiles; - }, onError: (error: Error, path?: string) => { throw path === error.message ? new Error(failed({ path }).output) : error; }, diff --git a/node-src/ui/messages/warnings/skippingEmptyFiles.stories.ts b/node-src/ui/messages/warnings/skippingEmptyFiles.stories.ts new file mode 100644 index 000000000..07a66c3bb --- /dev/null +++ b/node-src/ui/messages/warnings/skippingEmptyFiles.stories.ts @@ -0,0 +1,14 @@ +import skippingEmptyFiles from './skippingEmptyFiles'; + +export default { + title: 'CLI/Messages/Warnings', +}; + +export const SkippingEmptyFiles = () => + skippingEmptyFiles({ + emptyFiles: Array.from({ length: 3 }, (_, i) => ({ + contentLength: 0, + localPath: `file${i}.js`, + targetPath: `file${i}.js`, + })), + }); diff --git a/node-src/ui/messages/warnings/skippingEmptyFiles.ts b/node-src/ui/messages/warnings/skippingEmptyFiles.ts new file mode 100644 index 000000000..4601ee271 --- /dev/null +++ b/node-src/ui/messages/warnings/skippingEmptyFiles.ts @@ -0,0 +1,17 @@ +import chalk from 'chalk'; +import pluralize from 'pluralize'; +import { dedent } from 'ts-dedent'; + +import { warning } from '../../components/icons'; +import { FileDesc } from '../../../types'; + +export default ({ emptyFiles }: { emptyFiles: FileDesc[] }) => { + const listing = chalk`\n{dim →} ${emptyFiles.map((f) => f.targetPath).join(chalk`\n{dim →} `)}`; + return dedent(chalk` + ${warning} {bold Not uploading empty files} + Found ${pluralize('empty files', emptyFiles.length, true)} in your built Storybook:${listing} + Uploading empty files is not supported except when using a zip file. + You can ignore this warning if your Storybook doesn't need these files. + Otherwise, configure Chromatic with the {bold zip} option. + `); +}; diff --git a/node-src/ui/tasks/upload.stories.ts b/node-src/ui/tasks/upload.stories.ts index b6264d9a2..998c3c314 100644 --- a/node-src/ui/tasks/upload.stories.ts +++ b/node-src/ui/tasks/upload.stories.ts @@ -6,7 +6,6 @@ import { hashing, initial, invalid, - preparing, starting, success, traced, @@ -51,8 +50,6 @@ export const Traced = () => traced({ onlyStoryFiles: Array.from({ length: 5 }) } export const Hashing = () => hashing(); -export const Preparing = () => preparing(); - export const Starting = () => starting(); export const Uploading = () => uploading({ percentage: 42 }); diff --git a/node-src/ui/tasks/upload.ts b/node-src/ui/tasks/upload.ts index 8fc016689..52d5aaba4 100644 --- a/node-src/ui/tasks/upload.ts +++ b/node-src/ui/tasks/upload.ts @@ -81,12 +81,6 @@ export const hashing = () => ({ output: `Calculating file hashes`, }); -export const preparing = () => ({ - status: 'pending', - title: 'Publishing your built Storybook', - output: `Retrieving target location`, -}); - export const starting = () => ({ status: 'pending', title: 'Publishing your built Storybook', diff --git a/package.json b/package.json index a3d1327a6..77546faf6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "chromatic", - "version": "10.1.0", + "version": "10.2.1", "description": "Automate visual testing across browsers. Gather UI feedback. Versioned documentation.", "keywords": [ "storybook-addon", @@ -76,7 +76,7 @@ "lint": "yarn lint:js .storybook bin-src node-src test-stories ./isChromatic.js ./isChromatic.mjs", "lint:js": "cross-env NODE_ENV=production eslint --fix --cache --cache-location=.cache/eslint --ext .js,.json,.mjs,.ts,.cjs --report-unused-disable-directives", "lint:package": "sort-package-json", - "release": "yarn run build && auto shipit && yarn run publish-action", + "release": "yarn run build && auto shipit", "publish-action": "./scripts/publish-action.mjs", "trace": "./dist/bin.js trace", "trim-stats": "./dist/bin.js trim-stats-file", @@ -108,6 +108,7 @@ "@actions/core": "^1.10.0", "@actions/github": "^5.0.0", "@antfu/ni": "^0.21.5", + "@auto-it/exec": "^11.0.4", "@discoveryjs/json-ext": "^0.5.7", "@storybook/addon-essentials": "^6.5.6", "@storybook/builder-webpack5": "^6.5.6", @@ -205,7 +206,13 @@ }, "plugins": [ "npm", - "released" + "released", + [ + "exec", + { + "afterShipIt": "yarn run publish-action" + } + ] ] }, "docs": "https://www.chromatic.com/docs/cli", diff --git a/yarn.lock b/yarn.lock index d2dd8ecfe..a551a58c1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -97,6 +97,18 @@ typescript-memoize "^1.0.0-alpha.3" url-join "^4.0.0" +"@auto-it/exec@^11.0.4": + version "11.0.4" + resolved "https://registry.yarnpkg.com/@auto-it/exec/-/exec-11.0.4.tgz#97efbc9fbc14f2b777ef874c029a40fda8be7a3e" + integrity sha512-n/kB8k/dwci9UeTsLCs8mig8ZQuS2dMHcHlsKXBeM18EmcbGHxDxyaPeMHJsnSlfKNmGtCXhHBHBhGVP18Ia+w== + dependencies: + "@auto-it/core" "11.0.4" + endent "^2.1.0" + fp-ts "^2.5.3" + fromentries "^1.2.0" + io-ts "^2.1.2" + tslib "2.1.0" + "@auto-it/npm@11.0.4": version "11.0.4" resolved "https://registry.yarnpkg.com/@auto-it/npm/-/npm-11.0.4.tgz#67b6bf78fb6d3f814365796f15541b014db73fcb"