-
Notifications
You must be signed in to change notification settings - Fork 1.5k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
chore: improve the test reporter's messaging #5724
Changes from 1 commit
8dbbcb1
d9bca64
3957b42
15a5eab
e1b17cf
270f889
bfb1415
54b7df4
fee6fb5
a698e68
6127086
bc0a738
4524cd2
0f9b331
014748e
ed509a9
adf3818
8bb1bce
20f9b4f
7807f9f
d43bf39
0903307
5599b92
6e4bf3a
c61f721
8332321
b415497
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,6 +4,7 @@ import type { | |
TestEventSource, | ||
TestReporter, | ||
TestReporterResult, | ||
TestRunOptions, | ||
} from "./types.js"; | ||
|
||
import chalk from "chalk"; | ||
|
@@ -22,6 +23,7 @@ import { | |
import { annotatePR } from "./github-actions.js"; | ||
import { | ||
isCancelledByParentError, | ||
isParentAlreadyFinishedError, | ||
isSubtestFailedError, | ||
} from "./node-test-error-utils.js"; | ||
|
||
|
@@ -47,11 +49,16 @@ export interface HardhatTestReporterConfig { | |
* @param source | ||
*/ | ||
|
||
const customReporter: TestReporter = hardhatTestReporter(); | ||
|
||
const customReporter: TestReporter = hardhatTestReporter(getTestRunOptions()); | ||
export default customReporter; | ||
|
||
function getTestRunOptions(): TestRunOptions { | ||
const only = process.execArgv.includes("--test-only"); | ||
return { only }; | ||
} | ||
|
||
export function hardhatTestReporter( | ||
options: TestRunOptions, | ||
config: HardhatTestReporterConfig = {}, | ||
): TestReporter { | ||
return async function* (source: TestEventSource): TestReporterResult { | ||
|
@@ -102,20 +109,32 @@ export function hardhatTestReporter( | |
const preFormattedFailureReasons: string[] = []; | ||
|
||
let topLevelFilePassCount = 0; | ||
let parentAlreadyFinishedFailureCount = 0; | ||
|
||
for await (const event of source) { | ||
switch (event.type) { | ||
case "test:diagnostic": { | ||
// We need to subtract the number of top-level file passes from the | ||
// total number of tests/passes, as we are not printing them. | ||
if ( | ||
event.data.message.startsWith("tests") || | ||
event.data.message.startsWith("pass") | ||
) { | ||
// We need to subtract the number of tests/suites that we chose not to | ||
// display from the summary | ||
if (event.data.message.startsWith("tests")) { | ||
const parts = event.data.message.split(" "); | ||
const count = | ||
parseInt(parts[1], 10) - | ||
topLevelFilePassCount - | ||
parentAlreadyFinishedFailureCount; | ||
event.data.message = `${parts[0]} ${count}`; | ||
} | ||
if (event.data.message.startsWith("pass")) { | ||
const parts = event.data.message.split(" "); | ||
const count = parseInt(parts[1], 10) - topLevelFilePassCount; | ||
event.data.message = `${parts[0]} ${count}`; | ||
} | ||
if (event.data.message.startsWith("fail")) { | ||
const parts = event.data.message.split(" "); | ||
const count = | ||
parseInt(parts[1], 10) - parentAlreadyFinishedFailureCount; | ||
event.data.message = `${parts[0]} ${count}`; | ||
} | ||
diagnostics.push(event.data); | ||
break; | ||
} | ||
|
@@ -138,6 +157,22 @@ export function hardhatTestReporter( | |
break; | ||
} | ||
|
||
// We do not want to display failures caused by the parent already | ||
// having finished when running with the --test-only flag because | ||
// they are false negatives. | ||
// We still display the failed suites, because failures for the nested | ||
// tests (cancelled by parent errors) were already displayed. | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This doesn't work well, as it looks like an error. I think a good trade-off would be to detect that situation and maybe display those canceled tests/suites as a particular case in the summary at the end of the test run if needed. This is a complex case, so we can continue addressing it in a follow up PR. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I replied in a longer form in #5724 (comment) TLDR I removed any attempt to handle that case from this PR + the newest node makes the output for these cases a lot better (e.g. in |
||
if ( | ||
options.only === true && | ||
event.data.details.type !== "suite" && | ||
event.type === "test:fail" && | ||
isParentAlreadyFinishedError(event.data.details.error) | ||
) { | ||
parentAlreadyFinishedFailureCount++; | ||
stack.pop(); | ||
break; | ||
} | ||
|
||
if (event.data.details.type === "suite") { | ||
// If a suite failed for a reason other than a subtest failing, we | ||
// want to print its failure. | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Maybe all this logic should go to
processGlobalDiagnostics
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
That's a good catch. I now moved it to after
processGlobalDiagnostics
which I think is much cleaner, see8332321
(#5724)