gtr: Also look at failures in benchmarks when creating a package

When we encounter a failure but there was not failing test, we create a
failing dummy test in the report. This is to prevent the failure from
being overlooked.

The case where the output contained a failing benchmark was not handled
correctly, resulting in a dummy test failure being added to the report
unnecessarily.
This commit is contained in:
Joël Stemmer 2022-06-13 00:47:28 +01:00
parent d05abd4130
commit 46e0aed494
3 changed files with 17 additions and 16 deletions

View File

@ -236,7 +236,7 @@ func (b *reportBuilder) CreatePackage(name, result string, duration time.Duratio
// If the summary result says we failed, but there were no failing tests
// then something else must have failed.
if parseResult(result) == gtr.Fail && (len(b.tests) > 0 || len(b.benchmarks) > 0) && !b.containsFailingTest() {
if parseResult(result) == gtr.Fail && (len(b.tests) > 0 || len(b.benchmarks) > 0) && !b.containsFailures() {
pkg.RunError = gtr.Error{
Name: name,
Output: b.output.Get(globalID),
@ -348,14 +348,19 @@ func (b *reportBuilder) findBenchmark(name string) (int, bool) {
return 0, false
}
// containsFailingTest return true if the current list of tests contains at
// least one failing test or an unknown result.
func (b *reportBuilder) containsFailingTest() bool {
// containsFailures return true if the current list of tests or benchmarks
// contains at least one failing test or an unknown result.
func (b *reportBuilder) containsFailures() bool {
for _, test := range b.tests {
if test.Result == gtr.Fail || test.Result == gtr.Unknown {
return true
}
}
for _, bm := range b.benchmarks {
if bm.Result == gtr.Fail || bm.Result == gtr.Unknown {
return true
}
}
return false
}

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="4" errors="1" failures="2" skipped="1">
<testsuite name="package/name/benchfail" tests="4" failures="2" errors="1" id="0" hostname="hostname" skipped="1" time="0.002" timestamp="2022-01-01T00:00:00Z">
<testsuites tests="3" failures="2" skipped="1">
<testsuite name="package/name/benchfail" tests="3" failures="2" errors="0" id="0" hostname="hostname" skipped="1" time="0.002" timestamp="2022-01-01T00:00:00Z">
<properties>
<property name="go.version" value="1.0"></property>
</properties>
@ -13,11 +13,9 @@
<testcase name="BenchmarkSkip" classname="package/name/benchfail" time="0.000">
<skipped message="Skipped"><![CDATA[ bench_test.go:14: skip message]]></skipped>
</testcase>
<testcase name="Failure" classname="package/name/benchfail" time="0.000">
<error message="Runtime error"><![CDATA[goos: linux
<system-out><![CDATA[goos: linux
goarch: amd64
pkg: package/name/benchfail
exit status 1]]></error>
</testcase>
exit status 1]]></system-out>
</testsuite>
</testsuites>

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="4" errors="1" failures="2" skipped="1">
<testsuite name="package/name/benchfail" tests="4" failures="2" errors="1" id="0" hostname="hostname" skipped="1" time="0.002" timestamp="2022-01-01T00:00:00Z">
<testsuites tests="3" failures="2" skipped="1">
<testsuite name="package/name/benchfail" tests="3" failures="2" errors="0" id="0" hostname="hostname" skipped="1" time="0.002" timestamp="2022-01-01T00:00:00Z">
<properties>
<property name="go.version" value="1.0"></property>
</properties>
@ -13,11 +13,9 @@
<testcase name="BenchmarkSkip" classname="package/name/benchfail" time="0.000">
<skipped message="Skipped"><![CDATA[ bench_test.go:14: skip message]]></skipped>
</testcase>
<testcase name="Failure" classname="package/name/benchfail" time="0.000">
<error message="Runtime error"><![CDATA[goos: linux
<system-out><![CDATA[goos: linux
goarch: amd64
pkg: package/name/benchfail
exit status 1]]></error>
</testcase>
exit status 1]]></system-out>
</testsuite>
</testsuites>