diff --git a/.changeset/strong-rats-confess.md b/.changeset/strong-rats-confess.md new file mode 100644 index 000000000000..d970e7ab371c --- /dev/null +++ b/.changeset/strong-rats-confess.md @@ -0,0 +1,5 @@ +--- +"wrangler": minor +--- + +feat: support outputting ND-JSON files via an environment variable diff --git a/packages/wrangler/src/__tests__/output.test.ts b/packages/wrangler/src/__tests__/output.test.ts new file mode 100644 index 000000000000..9898ef7260b5 --- /dev/null +++ b/packages/wrangler/src/__tests__/output.test.ts @@ -0,0 +1,245 @@ +import { readdirSync, readFileSync } from "node:fs"; +import { join } from "node:path"; +import { clearOutputFilePath, writeOutput } from "../output"; +import { runInTempDir } from "./helpers/run-in-tmp"; +import type { OutputEntry } from "../output"; + +const originalProcessEnv = process.env; +const { + WRANGLER_OUTPUT_FILE_DIRECTORY: _, + WRANGLER_OUTPUT_FILE_PATH: __, + ...processEnvNoVars +} = originalProcessEnv; + +describe("writeOutput()", () => { + runInTempDir({ homedir: "home" }); + afterEach(clearOutputFilePath); + + it("should do nothing with no env vars set", () => { + try { + process.env = processEnvNoVars; + writeOutput({ + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }); + // No files written + expect(readdirSync(".")).toEqual(["home"]); + } finally { + process.env = originalProcessEnv; + } + }); + + it("should write to the file given by WRANGLER_OUTPUT_FILE_PATH", () => { + try { + const WRANGLER_OUTPUT_FILE_PATH = "output.json"; + process.env = { ...processEnvNoVars, WRANGLER_OUTPUT_FILE_PATH }; + writeOutput({ + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }); + const outputFile = readFileSync(WRANGLER_OUTPUT_FILE_PATH, "utf8"); + expect(outputFile).toContainEntries([ + { + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }, + ]); + } finally { + process.env = originalProcessEnv; + } + }); + + it("should write to the file given by WRANGLER_OUTPUT_FILE_PATH, ignoring WRANGLER_OUTPUT_FILE_DIRECTORY", () => { + try { + const WRANGLER_OUTPUT_FILE_PATH = "output.json"; + process.env = { + ...processEnvNoVars, + WRANGLER_OUTPUT_FILE_PATH, + WRANGLER_OUTPUT_FILE_DIRECTORY: ".", + }; + writeOutput({ + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }); + const outputFile = readFileSync(WRANGLER_OUTPUT_FILE_PATH, "utf8"); + expect(outputFile).toContainEntries([ + { + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }, + ]); + } finally { + process.env = originalProcessEnv; + } + }); + + it("should write multiple entries to the file given by WRANGLER_OUTPUT_FILE_PATH", () => { + try { + const WRANGLER_OUTPUT_FILE_PATH = "output.json"; + process.env = { ...processEnvNoVars, WRANGLER_OUTPUT_FILE_PATH }; + writeOutput({ + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }); + writeOutput({ + type: "deployment", + version: 1, + worker_name: "Worker", + worker_tag: "ABCDE12345", + deployment_id: "1234", + }); + + const outputFile = readFileSync(WRANGLER_OUTPUT_FILE_PATH, "utf8"); + expect(outputFile).toContainEntries([ + { + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }, + { + type: "deployment", + version: 1, + worker_name: "Worker", + worker_tag: "ABCDE12345", + deployment_id: "1234", + }, + ]); + } finally { + process.env = originalProcessEnv; + } + }); + + it("should write to a random file in WRANGLER_OUTPUT_FILE_DIRECTORY", () => { + try { + process.env = { + ...processEnvNoVars, + WRANGLER_OUTPUT_FILE_DIRECTORY: "output", + }; + writeOutput({ + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }); + + const outputFilePaths = readdirSync("output"); + expect(outputFilePaths.length).toEqual(1); + expect(outputFilePaths[0]).toMatch(/wrangler-output-.+\.json/); + const outputFile = readFileSync( + join("output", outputFilePaths[0]), + "utf8" + ); + expect(outputFile).toContainEntries([ + { + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }, + ]); + } finally { + process.env = originalProcessEnv; + } + }); + + it("should write multiple entries to the same random file in WRANGLER_OUTPUT_FILE_DIRECTORY", () => { + try { + process.env = { + ...processEnvNoVars, + WRANGLER_OUTPUT_FILE_DIRECTORY: "output", + }; + writeOutput({ + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }); + writeOutput({ + type: "deployment", + version: 1, + worker_name: "Worker", + worker_tag: "ABCDE12345", + deployment_id: "1234", + }); + + const outputFilePaths = readdirSync("output"); + expect(outputFilePaths.length).toEqual(1); + expect(outputFilePaths[0]).toMatch(/wrangler-output-.+\.json/); + const outputFile = readFileSync( + join("output", outputFilePaths[0]), + "utf8" + ); + expect(outputFile).toContainEntries([ + { + type: "wrangler-session", + version: 1, + wrangler_version: "0.0.0.0", + command_line_args: ["--help"], + log_file_path: "some/log/path.log", + }, + { + type: "deployment", + version: 1, + worker_name: "Worker", + worker_tag: "ABCDE12345", + deployment_id: "1234", + }, + ]); + } finally { + process.env = originalProcessEnv; + } + }); +}); + +expect.extend({ + toContainEntries(received: string, expected: OutputEntry[]) { + const actual = received + .trim() + .split("\n") + .map((line) => JSON.parse(line)); + + const stamped = expected.map((entry) => ({ + ...entry, + timestamp: expect.any(String), + })); + + return { + pass: this.equals(actual, stamped), + message: () => `Entries are${this.isNot ? "" : " not"} as expected.`, + actual, + expected: stamped, + }; + }, +}); + +interface CustomMatchers { + toContainEntries: (expected: OutputEntry[]) => unknown; +} + +declare module "vitest" { + interface Assertion extends CustomMatchers {} + interface AsymmetricMatchersContaining extends CustomMatchers {} +} diff --git a/packages/wrangler/src/__tests__/versions/versions.deploy.test.ts b/packages/wrangler/src/__tests__/versions/versions.deploy.test.ts index 14f88121f6cb..c4b385f057d6 100644 --- a/packages/wrangler/src/__tests__/versions/versions.deploy.test.ts +++ b/packages/wrangler/src/__tests__/versions/versions.deploy.test.ts @@ -44,7 +44,8 @@ describe("versions deploy", () => { mswListVersions, mswGetVersion(), mswPostNewDeployment, - mswPatchNonVersionedScriptSettings + mswPatchNonVersionedScriptSettings, + ...mswSuccessDeploymentScriptMetadata ); }); diff --git a/packages/wrangler/src/deploy/deploy.ts b/packages/wrangler/src/deploy/deploy.ts index 00154b704939..aeb6537ae08f 100644 --- a/packages/wrangler/src/deploy/deploy.ts +++ b/packages/wrangler/src/deploy/deploy.ts @@ -306,35 +306,44 @@ Update them to point to this script instead?`; return domains.map((domain) => renderRoute(domain)); } -export default async function deploy( - props: Props -): Promise<{ sourceMapSize?: number }> { +export default async function deploy(props: Props): Promise<{ + sourceMapSize?: number; + deploymentId: string | null; + workerTag: string | null; +}> { // TODO: warn if git/hg has uncommitted changes const { config, accountId, name } = props; + let workerTag: string | null = null; + let deploymentId: string | null = null; + if (!props.dispatchNamespace && accountId && name) { try { - const serviceMetaData = await fetchResult( - `/accounts/${accountId}/workers/services/${name}` - ); - const { default_environment } = serviceMetaData as { + const serviceMetaData = await fetchResult<{ default_environment: { - script: { last_deployed_from: "dash" | "wrangler" | "api" }; + script: { + tag: string; + last_deployed_from: "dash" | "wrangler" | "api"; + }; }; - }; + }>(`/accounts/${accountId}/workers/services/${name}`); + const { + default_environment: { script }, + } = serviceMetaData; + workerTag = script.tag; - if (default_environment.script.last_deployed_from === "dash") { + if (script.last_deployed_from === "dash") { logger.warn( `You are about to publish a Workers Service that was last published via the Cloudflare Dashboard.\nEdits that have been made via the dashboard will be overridden by your local code and config.` ); if (!(await confirm("Would you like to continue?"))) { - return {}; + return { deploymentId, workerTag }; } - } else if (default_environment.script.last_deployed_from === "api") { + } else if (script.last_deployed_from === "api") { logger.warn( `You are about to publish a Workers Service that was last updated via the script API.\nEdits that have been made via the script API will be overridden by your local code and config.` ); if (!(await confirm("Would you like to continue?"))) { - return {}; + return { deploymentId, workerTag }; } } } catch (e) { @@ -436,15 +445,13 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m ? `/accounts/${accountId}/workers/services/${scriptName}/environments/${envName}` : `/accounts/${accountId}/workers/scripts/${scriptName}`; - let deploymentId: string | null = null; - const { format } = props.entry; if (!props.dispatchNamespace && prod && accountId && scriptName) { const yes = await confirmLatestDeploymentOverwrite(accountId, scriptName); if (!yes) { cancel("Aborting deploy..."); - return {}; + return { deploymentId, workerTag }; } } @@ -828,7 +835,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m if (props.dryRun) { logger.log(`--dry-run: exiting now.`); - return {}; + return { deploymentId, workerTag }; } assert(accountId, "Missing accountId"); @@ -839,7 +846,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m // Early exit for WfP since it doesn't need the below code if (props.dispatchNamespace !== undefined) { deployWfpUserWorker(props.dispatchNamespace, deploymentId); - return {}; + return { deploymentId, workerTag }; } // deploy triggers @@ -850,7 +857,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m logVersionIdChange(); - return { sourceMapSize }; + return { sourceMapSize, deploymentId, workerTag }; } function deployWfpUserWorker( diff --git a/packages/wrangler/src/deploy/index.ts b/packages/wrangler/src/deploy/index.ts index 326e6141ea14..0ab6cb74f5f7 100644 --- a/packages/wrangler/src/deploy/index.ts +++ b/packages/wrangler/src/deploy/index.ts @@ -11,6 +11,7 @@ import { } from "../index"; import { logger } from "../logger"; import * as metrics from "../metrics"; +import { writeOutput } from "../output"; import { getLegacyAssetPaths, getSiteAssetPaths } from "../sites"; import { requireAuth } from "../user"; import { collectKeyValues } from "../utils/collectKeyValues"; @@ -324,10 +325,11 @@ export async function deployHandler( } const beforeUpload = Date.now(); - const { sourceMapSize } = await deploy({ + const name = getScriptName(args, config); + const { sourceMapSize, deploymentId, workerTag } = await deploy({ config, accountId, - name: getScriptName(args, config), + name, rules: getRules(config), entry, env: args.env, @@ -361,6 +363,14 @@ export async function deployHandler( experimentalVersions: args.experimentalVersions, }); + writeOutput({ + type: "deployment", + version: 1, + worker_name: name ?? null, + worker_tag: workerTag, + deployment_id: deploymentId, + }); + await metrics.sendMetricsEvent( "deploy worker script", { diff --git a/packages/wrangler/src/environment-variables/factory.ts b/packages/wrangler/src/environment-variables/factory.ts index 09c08611a941..8caee778d67d 100644 --- a/packages/wrangler/src/environment-variables/factory.ts +++ b/packages/wrangler/src/environment-variables/factory.ts @@ -17,7 +17,9 @@ type VariableNames = | "WRANGLER_LOG_SANITIZE" | "WRANGLER_REVOKE_URL" | "WRANGLER_SEND_METRICS" - | "WRANGLER_TOKEN_URL"; + | "WRANGLER_TOKEN_URL" + | "WRANGLER_OUTPUT_FILE_DIRECTORY" + | "WRANGLER_OUTPUT_FILE_PATH"; type DeprecatedNames = | "CF_ACCOUNT_ID" diff --git a/packages/wrangler/src/environment-variables/misc-variables.ts b/packages/wrangler/src/environment-variables/misc-variables.ts index 67aa18d7f2c5..70da9ef73db5 100644 --- a/packages/wrangler/src/environment-variables/misc-variables.ts +++ b/packages/wrangler/src/environment-variables/misc-variables.ts @@ -58,10 +58,34 @@ export const getCloudflareApiBaseUrl = getEnvironmentVariableFactory({ : "https://api.cloudflare.com/client/v4", }); -// Should we sanitize debug logs? By default we do, since debug logs could be added to GitHub issues and shouldn't include sensitive information +/** + * `WRANGLER_LOG_SANITIZE` specifies whether we sanitize debug logs. + * + * By default we do, since debug logs could be added to GitHub issues and shouldn't include sensitive information. + */ export const getSanitizeLogs = getEnvironmentVariableFactory({ variableName: "WRANGLER_LOG_SANITIZE", defaultValue() { return "true"; }, }); + +/** + * `WRANGLER_OUTPUT_FILE_DIRECTORY` specifies a directory where we should write a file containing output data in ND-JSON format. + * + * If this is set a random file will be created in this directory, and certain Wrangler commands will write entries to this file. + * This is overridden by the `WRANGLER_OUTPUT_FILE_PATH` environment variable. + */ +export const getOutputFileDirectoryFromEnv = getEnvironmentVariableFactory({ + variableName: "WRANGLER_OUTPUT_FILE_DIRECTORY", +}); + +/** + * `WRANGLER_OUTPUT_FILE_PATH` specifies a path to a file where we should write output data in ND-JSON format. + * + * If this is set certain Wrangler commands will write entries to this file. + * This overrides the `WRANGLER_OUTPUT_FILE_DIRECTORY` environment variable. + */ +export const getOutputFilePathFromEnv = getEnvironmentVariableFactory({ + variableName: "WRANGLER_OUTPUT_FILE_PATH", +}); diff --git a/packages/wrangler/src/index.ts b/packages/wrangler/src/index.ts index 576a2d9356c5..2cc958785f99 100644 --- a/packages/wrangler/src/index.ts +++ b/packages/wrangler/src/index.ts @@ -42,6 +42,7 @@ import { kvBulk, kvKey, kvNamespace, registerKvSubcommands } from "./kv"; import { logBuildFailure, logger, LOGGER_LEVELS } from "./logger"; import * as metrics from "./metrics"; import { mTlsCertificateCommands } from "./mtls-certificate/cli"; +import { writeOutput } from "./output"; import { pages } from "./pages"; import { APIError, formatMessage, ParseError } from "./parse"; import { pubSubCommands } from "./pubsub/pubsub-commands"; @@ -65,6 +66,7 @@ import { logout, validateScopeKeys, } from "./user"; +import { debugLogFilepath } from "./utils/log-file"; import { vectorize } from "./vectorize/index"; import registerVersionsSubcommands from "./versions"; import registerVersionsDeploymentsSubcommands from "./versions/deployments"; @@ -253,6 +255,16 @@ export function createCLIParser(argv: string[]) { process.env[key] = value; } } + + // Write a session entry to the output file (if there is one). + writeOutput({ + type: "wrangler-session", + version: 1, + wrangler_version: wranglerVersion, + command_line_args: argv, + log_file_path: debugLogFilepath, + }); + return true; }) .epilogue( diff --git a/packages/wrangler/src/metrics/metrics-config.ts b/packages/wrangler/src/metrics/metrics-config.ts index dbc9660f28f6..e36795ca7c31 100644 --- a/packages/wrangler/src/metrics/metrics-config.ts +++ b/packages/wrangler/src/metrics/metrics-config.ts @@ -4,7 +4,7 @@ import path from "node:path"; import { fetchResult } from "../cfetch"; import { getConfigCache, saveToConfigCache } from "../config-cache"; import { confirm } from "../dialogs"; -import { getEnvironmentVariableFactory } from "../environment-variables/factory"; +import { getWranglerSendMetricsFromEnv } from "../environment-variables/misc-variables"; import { getGlobalWranglerConfigPath } from "../global-wrangler-config-path"; import { CI } from "../is-ci"; import isInteractive from "../is-interactive"; @@ -23,10 +23,6 @@ import { getAPIToken } from "../user"; export const CURRENT_METRICS_DATE = new Date(2022, 6, 4); export const USER_ID_CACHE_PATH = "user-id.json"; -export const getWranglerSendMetricsFromEnv = getEnvironmentVariableFactory({ - variableName: "WRANGLER_SEND_METRICS", -}); - export interface MetricsConfigOptions { /** * Defines whether to send metrics to Cloudflare: diff --git a/packages/wrangler/src/output.ts b/packages/wrangler/src/output.ts new file mode 100644 index 000000000000..b6f372a9824e --- /dev/null +++ b/packages/wrangler/src/output.ts @@ -0,0 +1,105 @@ +import { randomBytes } from "node:crypto"; +import { appendFileSync } from "node:fs"; +import { resolve } from "node:path"; +import { + getOutputFileDirectoryFromEnv, + getOutputFilePathFromEnv, +} from "./environment-variables/misc-variables"; +import { ensureDirectoryExistsSync } from "./utils/filesystem"; + +/** + * Write an entry to the output file. + * + * Control whether (and where) to write this file by setting either + * `WRANGLER_OUTPUT_FILE_DIRECTORY` or `WRANGLER_OUTPUT_FILE_PATH`. + */ +export function writeOutput(entry: OutputEntry) { + if (outputFilePath === undefined) { + outputFilePath = getOutputFilePath(); + } + if (outputFilePath !== null) { + ensureDirectoryExistsSync(outputFilePath); + const entryJSON = JSON.stringify({ + ...entry, + timestamp: new Date().toISOString(), + }); + appendFileSync(outputFilePath, entryJSON + "\n"); + } +} + +// Only used internally for cleaning up tests +export function clearOutputFilePath() { + outputFilePath = undefined; +} + +let outputFilePath: string | null | undefined = undefined; +function getOutputFilePath() { + const outputFilePathFromEnv = getOutputFilePathFromEnv(); + if (outputFilePathFromEnv) { + return outputFilePathFromEnv; + } + + const outputFileDirectoryFromEnv = getOutputFileDirectoryFromEnv(); + if (outputFileDirectoryFromEnv) { + const date = new Date() + .toISOString() + .replaceAll(":", "-") + .replace(".", "_") + .replace("T", "_") + .replace("Z", ""); + + return resolve( + outputFileDirectoryFromEnv, + `wrangler-output-${date}-${randomBytes(3).toString("hex")}.json` + ); + } + + return null; +} + +interface OutputEntryBase { + version: number; + type: T; +} + +/** + * All the different types of entry you can output. + */ +export type OutputEntry = + | OutputEntrySession + | OutputEntryDeployment + | OutputEntryVersionUpload + | OutputEntryVersionDeployment; + +export type StampedOutputEntry = { timestamp: string } & OutputEntry; + +export interface OutputEntrySession + extends OutputEntryBase<"wrangler-session"> { + version: 1; + wrangler_version: string; + command_line_args: string[]; + log_file_path: string; +} + +export interface OutputEntryDeployment extends OutputEntryBase<"deployment"> { + version: 1; + worker_name: string | null; + worker_tag: string | null; + deployment_id: string | null; +} + +export interface OutputEntryVersionUpload + extends OutputEntryBase<"version-upload"> { + version: 1; + worker_name: string | null; + worker_tag: string | null; + version_id: string | null; +} + +export interface OutputEntryVersionDeployment + extends OutputEntryBase<"version-deploy"> { + version: 1; + worker_name: string | null; + worker_tag: string | null; + version_traffic: Map; +} diff --git a/packages/wrangler/src/utils/filesystem.ts b/packages/wrangler/src/utils/filesystem.ts index 786af4808f4c..390eb7d24169 100644 --- a/packages/wrangler/src/utils/filesystem.ts +++ b/packages/wrangler/src/utils/filesystem.ts @@ -1,3 +1,4 @@ +import { mkdirSync } from "fs"; import { mkdir } from "fs/promises"; import path from "path"; @@ -6,3 +7,9 @@ export async function ensureDirectoryExists(filepath: string) { await mkdir(dirpath, { recursive: true }); } + +export function ensureDirectoryExistsSync(filepath: string) { + const dirpath = path.dirname(filepath); + + mkdirSync(dirpath, { recursive: true }); +} diff --git a/packages/wrangler/src/versions/deploy.ts b/packages/wrangler/src/versions/deploy.ts index 5dd27a3a29f7..3f3104cfd2ef 100644 --- a/packages/wrangler/src/versions/deploy.ts +++ b/packages/wrangler/src/versions/deploy.ts @@ -8,11 +8,13 @@ import { leftT, spinnerWhile, } from "@cloudflare/cli/interactive"; +import { fetchResult } from "../cfetch"; import { findWranglerToml, readConfig } from "../config"; import { UserError } from "../errors"; import { CI } from "../is-ci"; import isInteractive from "../is-interactive"; import * as metrics from "../metrics"; +import { writeOutput } from "../output"; import { APIError } from "../parse"; import { printWranglerBanner } from "../update-check"; import { requireAuth } from "../user"; @@ -203,6 +205,23 @@ export async function versionsDeployHandler(args: VersionsDeployArgs) { cli.success( `Deployed ${workerName} ${trafficSummaryString} (${elapsedString})` ); + + let workerTag: string | null = null; + try { + const serviceMetaData = await fetchResult<{ + default_environment: { script: { tag: string } }; + }>(`/accounts/${accountId}/workers/services/${workerName}`); + workerTag = serviceMetaData.default_environment.script.tag; + } catch { + // If the fetch fails then we just output a null for the workerTag. + } + writeOutput({ + type: "version-deploy", + version: 1, + worker_name: workerName, + worker_tag: workerTag, + version_traffic: confirmedVersionTraffic, + }); } function getConfig( diff --git a/packages/wrangler/src/versions/index.ts b/packages/wrangler/src/versions/index.ts index a541b3a13bc6..15b0cc890bdb 100644 --- a/packages/wrangler/src/versions/index.ts +++ b/packages/wrangler/src/versions/index.ts @@ -11,6 +11,7 @@ import { } from "../index"; import { logger } from "../logger"; import * as metrics from "../metrics"; +import { writeOutput } from "../output"; import { requireAuth } from "../user"; import { collectKeyValues } from "../utils/collectKeyValues"; import { versionsDeployHandler, versionsDeployOptions } from "./deploy"; @@ -241,12 +242,13 @@ export async function versionsUploadHandler( const cliAlias = collectKeyValues(args.alias); const accountId = args.dryRun ? undefined : await requireAuth(config); + const name = getScriptName(args, config); await standardPricingWarning(config); - await versionsUpload({ + const { versionId, workerTag } = await versionsUpload({ config, accountId, - name: getScriptName(args, config), + name, rules: getRules(config), entry, legacyEnv: isLegacyEnv(config), @@ -275,6 +277,14 @@ export async function versionsUploadHandler( tag: args.tag, message: args.message, }); + + writeOutput({ + type: "version-upload", + version: 1, + worker_name: name ?? null, + worker_tag: workerTag, + version_id: versionId, + }); } export default function registerVersionsSubcommands( diff --git a/packages/wrangler/src/versions/upload.ts b/packages/wrangler/src/versions/upload.ts index b80b54a289fe..a68a14a15a57 100644 --- a/packages/wrangler/src/versions/upload.ts +++ b/packages/wrangler/src/versions/upload.ts @@ -108,33 +108,50 @@ function errIsStartupErr(err: unknown): err is ParseError & { code: 10021 } { return false; } -export default async function versionsUpload(props: Props): Promise { +export default async function versionsUpload( + props: Props +): Promise<{ versionId: string | null; workerTag: string | null }> { // TODO: warn if git/hg has uncommitted changes const { config, accountId, name } = props; + let versionId: string | null = null; + let workerTag: string | null = null; + if (accountId && name) { try { - const serviceMetaData = await fetchResult( - `/accounts/${accountId}/workers/services/${name}` // TODO(consider): should this be a /versions endpoint? - ); - const { default_environment } = serviceMetaData as { + const { + default_environment: { script }, + } = await fetchResult<{ default_environment: { - script: { last_deployed_from: "dash" | "wrangler" | "api" }; + script: { + tag: string; + last_deployed_from: "dash" | "wrangler" | "api"; + }; }; - }; + }>( + `/accounts/${accountId}/workers/services/${name}` // TODO(consider): should this be a /versions endpoint? + ); + + workerTag = script.tag; - if (default_environment.script.last_deployed_from === "dash") { + if (script.last_deployed_from === "dash") { logger.warn( `You are about to upload a Worker Version that was last published via the Cloudflare Dashboard.\nEdits that have been made via the dashboard will be overridden by your local code and config.` ); if (!(await confirm("Would you like to continue?"))) { - return; + return { + versionId, + workerTag, + }; } - } else if (default_environment.script.last_deployed_from === "api") { + } else if (script.last_deployed_from === "api") { logger.warn( `You are about to upload a Workers Version that was last updated via the API.\nEdits that have been made via the API will be overridden by your local code and config.` ); if (!(await confirm("Would you like to continue?"))) { - return; + return { + versionId, + workerTag, + }; } } } catch (e) { @@ -454,6 +471,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m bindingsPrinted = true; printBindings({ ...withoutStaticAssets, vars: maskedVars }); logger.log("Worker Version ID:", result.id); + versionId = result.id; } catch (err) { if (!bindingsPrinted) { printBindings({ ...withoutStaticAssets, vars: maskedVars }); @@ -507,7 +525,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m if (props.dryRun) { logger.log(`--dry-run: exiting now.`); - return; + return { versionId, workerTag }; } if (!accountId) { throw new UserError("Missing accountId"); @@ -532,6 +550,8 @@ Changes to non-versioned settings (config properties 'logpush' or 'tail_consumer Changes to triggers (routes, custom domains, cron schedules, etc) must be applied with the command ${cmdTriggersDeploy} `) ); + + return { versionId, workerTag }; } export function helpIfErrorIsSizeOrScriptStartup(