Skip to content

Commit

Permalink
feat: test coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
erikburt committed Nov 28, 2024
1 parent c9d3250 commit 715026b
Show file tree
Hide file tree
Showing 8 changed files with 259 additions and 139 deletions.
5 changes: 5 additions & 0 deletions apps/go-test-caching/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,11 @@ inputs:
Ignore the difference between the current test indexes, and run all tests.
default: "false"

collect-coverage:
description: |
Collect coverage information for the tests. Required during both build and run steps.
default: "false"

build-flags:
description: "Flags to pass when running go test -c"
default: ""
Expand Down
138 changes: 92 additions & 46 deletions apps/go-test-caching/dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -17815,10 +17815,10 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
command_1.issueCommand("error", utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
exports2.error = error6;
function warning5(message, properties = {}) {
function warning6(message, properties = {}) {
command_1.issueCommand("warning", utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
exports2.warning = warning5;
exports2.warning = warning6;
function notice(message, properties = {}) {
command_1.issueCommand("notice", utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
Expand Down Expand Up @@ -106352,7 +106352,7 @@ function logObject(title, obj) {
}
function uploadBuildLogs(directory, key) {
core.info("Uploading build logs");
const buildLogs = getLogFiles(directory, ".compile.log");
const buildLogs = getFilesInDir(directory, ".compile.log");
const runId = github.context.runId;
const artifactName = `build-logs-${key}-${runId}`;
core.debug(`Uploading build logs to ${artifactName}`);
Expand All @@ -106372,7 +106372,7 @@ function uploadBuildLogs(directory, key) {
}
function uploadRunLogs(directory, key) {
core.info("Uploading run logs");
const runLogs = getLogFiles(directory, ".run.log");
const runLogs = getFilesInDir(directory, ".run.log");
const runId = github.context.runId;
const artifactName = `run-logs-${key}-${runId}`;
core.debug(`Uploading run logs to ${artifactName}`);
Expand All @@ -106390,6 +106390,26 @@ function uploadRunLogs(directory, key) {
core.error("Error uploading run logs: " + error6);
}
}
function uploadCoverage(directory, key) {
core.info("Uploading coverage");
const coverageFiles = getFilesInDir(directory, ".cover.out");
const runId = github.context.runId;
const artifactName = `coverage-${key}-${runId}`;
core.debug(`Uploading coverage to ${artifactName}`);
core.debug(`Coverage files: ${coverageFiles.join(", ")}`);
if (coverageFiles.length === 0) {
core.debug("No coverage files found, skipping upload.");
return;
}
try {
const client = new import_artifact.DefaultArtifactClient();
return client.uploadArtifact(artifactName, coverageFiles, directory, {
retentionDays: 3
});
} catch (error6) {
core.error("Error uploading coverage logs: " + error6);
}
}
function uploadStateFile(filePath) {
core.debug("Uploading state object");
try {
Expand All @@ -106403,7 +106423,7 @@ function uploadStateFile(filePath) {
core.warning("Error uploading state object: " + error6);
}
}
function getLogFiles(directory, extension) {
function getFilesInDir(directory, extension) {
const files = (0, import_fs.readdirSync)(directory);
return files.filter((file) => file.endsWith(extension)).map((file) => {
return path6.join(directory, file);
Expand Down Expand Up @@ -106711,7 +106731,6 @@ var defaultExecaOptions = {
stdout: "pipe",
stderr: "pipe"
};
new ExecaError();
function isCompilationSuccess(result) {
return "execution" in result;
}
Expand Down Expand Up @@ -106759,6 +106778,10 @@ async function compileTestBinary(cwd, outputDir, { importPath, directory }, buil
}
async function compileConcurrent(workingDir, outputDir, packages, buildFlags, collectCoverage, maxConcurrency) {
const limit = pLimit(maxConcurrency);
if (collectCoverage) {
core5.info("collect-coverage is true - adding coverage flags to builds.");
buildFlags.push("-cover", "-coverpkg=./...", "-covermode=atomic");
}
const values = Object.values(packages);
const building = /* @__PURE__ */ new Set();
const tasks = values.map(
Expand Down Expand Up @@ -106817,11 +106840,7 @@ function filterForBuiltBinaries(buildDir, successes) {
core5.debug(`Binaries: ${binaries.join("\n")}`);
const compiledPackages = {};
for (const success of successes) {
if (verifyBinaryExistsOrThrow(
success.output.binary,
success.pkg.importPath,
success.execution.stdout
)) {
if ((0, import_fs2.existsSync)(success.output.binary)) {
const value = {
importPath: success.pkg.importPath,
directory: success.pkg.directory,
Expand All @@ -106841,23 +106860,12 @@ function filterForBuiltBinaries(buildDir, successes) {
}
const keys = Object.keys(compiledPackages);
if (keys.length !== binaries.length) {
core5.error(`Expected ${binaries.length} binaries, found ${keys.length}`);
core5.warning(
`Found ${binaries.length} in the output directory, but only found ${keys.length} packages in the results.`
);
}
return compiledPackages;
}
function verifyBinaryExistsOrThrow(binaryPath, importPath, stdout) {
core5.debug(`Verifying Package: ${importPath}, Binary: ${binaryPath}`);
if ((0, import_fs2.existsSync)(binaryPath)) {
return true;
}
if (stdout.startsWith("?") && stdout.includes("[no test files]")) {
core5.debug(`No tests for package ${importPath}`);
return false;
}
throw new Error(
`Binary not found when expected. Package: ${importPath} , Binary: ${binaryPath}`
);
}

// apps/go-test-caching/src/pipeline/hash.ts
var import_fs3 = require("fs");
Expand Down Expand Up @@ -106888,33 +106896,52 @@ function comparePackagesToIndex(runAllTests, packages, hashIndex) {
var import_fs4 = require("fs");
var path8 = __toESM(require("path"));
var core6 = __toESM(require_core());
var defaultExecaOptions2 = {
cwd: "",
all: true,
stdout: "pipe",
stderr: "pipe",
env: {
GOCOVERDIR: ""
}
};
function isRunSuccess(result) {
return "execution" in result;
}
function isRunFailure(result) {
return "error" in result;
}
function execCommand(cmd, flags, cwd) {
core6.debug(`Exec: ${cmd} ${flags.join(" ")} (cwd: ${cwd})`);
return execa(cmd, flags, {
cwd,
all: true,
stdout: "pipe",
stderr: "pipe"
});
}
async function runTestBinary(outputDir, pkg, binaryPath, runFlags) {
async function runTestBinary(outputDir, pkg, binaryPath, runFlags, coverage, coverageDir) {
const goCoverDir = path8.join(pkg.directory, `go-cover-${path8.basename(binaryPath)}`);
(0, import_fs4.mkdirSync)(goCoverDir, { recursive: true });
const coveragePath = path8.join(coverageDir, `${path8.basename(binaryPath)}.cover.out`);
const logPath = path8.join(outputDir, path8.basename(binaryPath) + ".run.log");
const outputStream = (0, import_fs4.createWriteStream)(logPath);
try {
const subprocess = execCommand(binaryPath, runFlags, pkg.directory);
let localFlags = [...runFlags];
if (coverage) {
core6.debug(`Collecting coverage for ${pkg.importPath} at ${coveragePath}`);
localFlags.push(`-test.coverprofile=${coveragePath}`);
}
core6.debug(
`Exec: ${binaryPath} ${localFlags.join(" ")} (cwd: ${pkg.directory})`
);
const subprocess = execa(binaryPath, localFlags, {
...defaultExecaOptions2,
cwd: pkg.directory,
env: {
GOCOVERDIR: goCoverDir
}
});
core6.debug(`Logging output to ${logPath}`);
subprocess.all?.pipe(outputStream);
const execution = await subprocess;
return {
pkg,
execution,
output: {
log: logPath
log: logPath,
coverage: coverage ? coveragePath : void 0
}
};
} catch (error6) {
Expand Down Expand Up @@ -106969,7 +106996,7 @@ function filterOutputLogs(logs) {
}
return filteredLines.join("\n");
}
async function runConcurrent(buildDir, packages, flags, maxConcurrency) {
async function runConcurrent(buildDir, packages, flags, coverage, coverageDir, maxConcurrency) {
const limit = pLimit(maxConcurrency);
const allPackages = Object.values(packages);
const pkgsToRun = Object.values(allPackages).filter((pkg) => pkg.shouldRun);
Expand All @@ -106980,7 +107007,7 @@ async function runConcurrent(buildDir, packages, flags, maxConcurrency) {
const tasks = pkgsToRun.map(
(pkg) => limit(() => {
executing.add(pkg.importPath);
return runTestBinary(buildDir, pkg, pkg.compile.binary, flags).finally(
return runTestBinary(buildDir, pkg, pkg.compile.binary, flags, coverage, coverageDir).finally(
() => executing.delete(pkg.importPath)
);
})
Expand Down Expand Up @@ -107049,10 +107076,11 @@ function flattenRunResults(packages, successes) {
core6.warning(`Package ${importPath} not found in packages.`);
continue;
}
const { log } = success.output;
const { log, coverage } = success.output;
const { command, exitCode, cwd, durationMs } = success.execution;
executedPackages[importPath].run = {
log,
coverage,
execution: {
command,
exitCode: exitCode !== void 0 ? exitCode : -1,
Expand Down Expand Up @@ -107136,11 +107164,20 @@ async function runTestBinaries(inputs, packages) {
inputs.buildDirectory,
packages,
[],
inputs.collectCoverage,
inputs.coverageDirectory,
maxRunConcurrency
);
return validateRunResultsOrThrow(packages, runResults);
}
async function maybeUpdateHashIndex(inputs, hashedPackages) {
const wasCoverageEnabled = Object.values(hashedPackages).some(
(pkg) => pkg?.run?.coverage
);
if (inputs.collectCoverage || wasCoverageEnabled) {
core7.warning("Coverage collection is enabled. Skipping hash index update.");
return;
}
logSection("Updating Hash Index");
if (inputs.forceUpdateIndex) {
core7.warning("Force update index is enabled. Skipping branch check.");
Expand Down Expand Up @@ -107180,14 +107217,15 @@ async function maybeUpdateHashIndex(inputs, hashedPackages) {
function setup() {
const pipelineStep = core8.getInput("pipeline-step");
const moduleDirectory = core8.getInput("module-directory") || ".";
const forceUpdateIndex = core8.getInput("force-update-index") || "false";
const runAllTests = core8.getInput("run-all-tests") || "false";
const buildFlagsString = core8.getInput("build-flags");
const hashesBranch = core8.getInput("hashes-branch");
const testSuite = core8.getInput("test-suite") || "placeholder-test-suite";
const buildDirectory = process.env.RUNNER_TEMP || `/tmp/cl/${testSuite}`;
const forceUpdateIndexString = core8.getInput("force-update-index") || "false";
const runAllTestsString = core8.getInput("run-all-tests") || "false";
const collectCoverageString = core8.getInput("collect-coverage") || "false";
const stepsDirectory = path9.join(buildDirectory, "steps");
const collectCoverage = core8.getInput("collect-coverage") || "false";
const coverageDirectory = path9.join(buildDirectory, "coverage");
if (pipelineStep !== "build" && pipelineStep !== "run" && pipelineStep !== "update" && pipelineStep !== "e2e") {
core8.setFailed(
"Invalid pipeline step. Must be 'build','run', or 'update'."
Expand All @@ -107197,22 +107235,29 @@ function setup() {
if (!fs.existsSync(stepsDirectory)) {
fs.mkdirSync(stepsDirectory, { recursive: true });
}
if (!fs.existsSync(coverageDirectory)) {
fs.mkdirSync(coverageDirectory, { recursive: true });
}
let buildFlags = [];
if (buildFlagsString) {
buildFlags = buildFlagsString.split(" ");
}
const collectCoverage = collectCoverageString === "true";
const forceUpdateIndex = forceUpdateIndexString === "true";
const runAllTests = runAllTestsString === "true" || collectCoverage;
return {
pipelineStep,
moduleDirectory,
buildDirectory,
stepsDirectory,
coverageDirectory,
buildFlags,
forceUpdateIndex: forceUpdateIndex === "true",
hashesBranch,
hashesFile: `${testSuite}.json`,
testSuite,
runAllTests: runAllTests === "true",
collectCoverage: collectCoverage === "true"
runAllTests,
collectCoverage,
forceUpdateIndex
};
}
async function run() {
Expand Down Expand Up @@ -107248,6 +107293,7 @@ async function run() {
await uploadBuildLogs(inputs.buildDirectory, artifactKey);
} else if (inputs.pipelineStep === "run") {
await uploadRunLogs(inputs.buildDirectory, artifactKey);
await uploadCoverage(inputs.coverageDirectory, artifactKey);
}
}
}
Expand Down
30 changes: 27 additions & 3 deletions apps/go-test-caching/src/log.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ export function logObject(title: string, obj: Record<string, unknown>) {
export function uploadBuildLogs(directory: string, key: string) {
core.info("Uploading build logs");

const buildLogs = getLogFiles(directory, ".compile.log");
const buildLogs = getFilesInDir(directory, ".compile.log");
const runId = github.context.runId;
const artifactName = `build-logs-${key}-${runId}`;
core.debug(`Uploading build logs to ${artifactName}`);
Expand All @@ -44,7 +44,7 @@ export function uploadBuildLogs(directory: string, key: string) {
export function uploadRunLogs(directory: string, key: string) {
core.info("Uploading run logs");

const runLogs: string[] = getLogFiles(directory, ".run.log");
const runLogs: string[] = getFilesInDir(directory, ".run.log");
const runId = github.context.runId;
const artifactName = `run-logs-${key}-${runId}`;
core.debug(`Uploading run logs to ${artifactName}`);
Expand All @@ -65,6 +65,30 @@ export function uploadRunLogs(directory: string, key: string) {
}
}

export function uploadCoverage(directory: string, key: string) {
core.info("Uploading coverage");

const coverageFiles: string[] = getFilesInDir(directory, ".cover.out");
const runId = github.context.runId;
const artifactName = `coverage-${key}-${runId}`;
core.debug(`Uploading coverage to ${artifactName}`);
core.debug(`Coverage files: ${coverageFiles.join(", ")}`);

if (coverageFiles.length === 0) {
core.debug("No coverage files found, skipping upload.");
return;
}

try {
const client = new DefaultArtifactClient();
return client.uploadArtifact(artifactName, coverageFiles, directory, {
retentionDays: 3,
});
} catch (error) {
core.error("Error uploading coverage logs: " + error);
}
}

export function uploadStateFile(filePath: string) {
core.debug("Uploading state object");

Expand All @@ -80,7 +104,7 @@ export function uploadStateFile(filePath: string) {
}
}

function getLogFiles(directory: string, extension: string): string[] {
function getFilesInDir(directory: string, extension: string): string[] {
const files: string[] = readdirSync(directory);
return files
.filter((file) => file.endsWith(extension))
Expand Down
Loading

0 comments on commit 715026b

Please sign in to comment.