[ci] New script to generate test reports as Buildkite Annotations (#113447)

The CI builds now send the results of every lit run to a unique file.
This means we can read them all to make a combined report for all
tests.

This report will be shown as an "annotation" in the build results:
https://buildkite.com/docs/agent/v3/cli-annotate#creating-an-annotation

Here is an example:
https://buildkite.com/llvm-project/github-pull-requests/builds/112660
(make sure it is showing "All" instead of "Failures")

This is an alternative to using the existing Buildkite plugin:
https://github.com/buildkite-plugins/junit-annotate-buildkite-plugin

As the plugin is:
* Specific to Buildkite, and we may move away from Buildkite.
* Requires docker, unless we were to fork it ourselves.
* Does not let you customise the report format unless again,
  we make our own fork.

Annotations use GitHub's flavour of Markdown so the main code in the
script generates that text. There is an extra "style" argument generated
to make the formatting nicer in Buildkite.

"context" is the name of the annotation that will be created. By using
different context names for Linux and Windows results we get 2 separate
annotations.

The script also handles calling the buildkite-agent. This makes passing
extra arguments to the agent easier, rather than piping the output of
this script into the agent.

In the future we can remove the agent part of it and simply use
the report content. Either printed to stdout or as a comment on
the GitHub PR.
This commit is contained in:
David Spickett 2024-11-12 13:34:47 +00:00 committed by GitHub
parent f539d92dca
commit e74a002433
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 345 additions and 4 deletions

328
.ci/generate_test_report.py Normal file
View File

@ -0,0 +1,328 @@
# Script to parse many JUnit XML result files and send a report to the buildkite
# agent as an annotation.
#
# To run the unittests:
# python3 -m unittest discover -p generate_test_report.py
import argparse
import unittest
from io import StringIO
from junitparser import JUnitXml, Failure
from textwrap import dedent
from subprocess import check_call
def junit_from_xml(xml):
return JUnitXml.fromfile(StringIO(xml))
class TestReports(unittest.TestCase):
def test_title_only(self):
self.assertEqual(_generate_report("Foo", []), ("", None))
def test_no_tests_in_testsuite(self):
self.assertEqual(
_generate_report(
"Foo",
[
junit_from_xml(
dedent(
"""\
<?xml version="1.0" encoding="UTF-8"?>
<testsuites time="0.00">
<testsuite name="Empty" tests="0" failures="0" skipped="0" time="0.00">
</testsuite>
</testsuites>"""
)
)
],
),
("", None),
)
def test_no_failures(self):
self.assertEqual(
_generate_report(
"Foo",
[
junit_from_xml(
dedent(
"""\
<?xml version="1.0" encoding="UTF-8"?>
<testsuites time="0.00">
<testsuite name="Passed" tests="1" failures="0" skipped="0" time="0.00">
<testcase classname="Bar/test_1" name="test_1" time="0.00"/>
</testsuite>
</testsuites>"""
)
)
],
),
(
dedent(
"""\
# Foo
* 1 test passed"""
),
"success",
),
)
def test_report_single_file_single_testsuite(self):
self.assertEqual(
_generate_report(
"Foo",
[
junit_from_xml(
dedent(
"""\
<?xml version="1.0" encoding="UTF-8"?>
<testsuites time="8.89">
<testsuite name="Bar" tests="4" failures="2" skipped="1" time="410.63">
<testcase classname="Bar/test_1" name="test_1" time="0.02"/>
<testcase classname="Bar/test_2" name="test_2" time="0.02">
<skipped message="Reason"/>
</testcase>
<testcase classname="Bar/test_3" name="test_3" time="0.02">
<failure><![CDATA[Output goes here]]></failure>
</testcase>
<testcase classname="Bar/test_4" name="test_4" time="0.02">
<failure><![CDATA[Other output goes here]]></failure>
</testcase>
</testsuite>
</testsuites>"""
)
)
],
),
(
dedent(
"""\
# Foo
* 1 test passed
* 1 test skipped
* 2 tests failed
## Failed tests
(click to see output)
### Bar
<details>
<summary>Bar/test_3/test_3</summary>
```
Output goes here
```
</details>
<details>
<summary>Bar/test_4/test_4</summary>
```
Other output goes here
```
</details>"""
),
"error",
),
)
MULTI_SUITE_OUTPUT = (
dedent(
"""\
# ABC and DEF
* 1 test passed
* 1 test skipped
* 2 tests failed
## Failed tests
(click to see output)
### ABC
<details>
<summary>ABC/test_2/test_2</summary>
```
ABC/test_2 output goes here
```
</details>
### DEF
<details>
<summary>DEF/test_2/test_2</summary>
```
DEF/test_2 output goes here
```
</details>"""
),
"error",
)
def test_report_single_file_multiple_testsuites(self):
self.assertEqual(
_generate_report(
"ABC and DEF",
[
junit_from_xml(
dedent(
"""\
<?xml version="1.0" encoding="UTF-8"?>
<testsuites time="8.89">
<testsuite name="ABC" tests="2" failures="1" skipped="0" time="410.63">
<testcase classname="ABC/test_1" name="test_1" time="0.02"/>
<testcase classname="ABC/test_2" name="test_2" time="0.02">
<failure><![CDATA[ABC/test_2 output goes here]]></failure>
</testcase>
</testsuite>
<testsuite name="DEF" tests="2" failures="1" skipped="1" time="410.63">
<testcase classname="DEF/test_1" name="test_1" time="0.02">
<skipped message="reason"/>
</testcase>
<testcase classname="DEF/test_2" name="test_2" time="0.02">
<failure><![CDATA[DEF/test_2 output goes here]]></failure>
</testcase>
</testsuite>
</testsuites>"""
)
)
],
),
self.MULTI_SUITE_OUTPUT,
)
def test_report_multiple_files_multiple_testsuites(self):
self.assertEqual(
_generate_report(
"ABC and DEF",
[
junit_from_xml(
dedent(
"""\
<?xml version="1.0" encoding="UTF-8"?>
<testsuites time="8.89">
<testsuite name="ABC" tests="2" failures="1" skipped="0" time="410.63">
<testcase classname="ABC/test_1" name="test_1" time="0.02"/>
<testcase classname="ABC/test_2" name="test_2" time="0.02">
<failure><![CDATA[ABC/test_2 output goes here]]></failure>
</testcase>
</testsuite>
</testsuites>"""
)
),
junit_from_xml(
dedent(
"""\
<?xml version="1.0" encoding="UTF-8"?>
<testsuites time="8.89">
<testsuite name="DEF" tests="2" failures="1" skipped="1" time="410.63">
<testcase classname="DEF/test_1" name="test_1" time="0.02">
<skipped message="reason"/>
</testcase>
<testcase classname="DEF/test_2" name="test_2" time="0.02">
<failure><![CDATA[DEF/test_2 output goes here]]></failure>
</testcase>
</testsuite>
</testsuites>"""
)
),
],
),
self.MULTI_SUITE_OUTPUT,
)
def _generate_report(title, junit_objects):
style = None
if not junit_objects:
return ("", style)
failures = {}
tests_run = 0
tests_skipped = 0
tests_failed = 0
for results in junit_objects:
for testsuite in results:
tests_run += testsuite.tests
tests_skipped += testsuite.skipped
tests_failed += testsuite.failures
for test in testsuite:
if (
not test.is_passed
and test.result
and isinstance(test.result[0], Failure)
):
if failures.get(testsuite.name) is None:
failures[testsuite.name] = []
failures[testsuite.name].append(
(test.classname + "/" + test.name, test.result[0].text)
)
if not tests_run:
return ("", style)
style = "error" if tests_failed else "success"
report = [f"# {title}", ""]
tests_passed = tests_run - tests_skipped - tests_failed
def plural(num_tests):
return "test" if num_tests == 1 else "tests"
if tests_passed:
report.append(f"* {tests_passed} {plural(tests_passed)} passed")
if tests_skipped:
report.append(f"* {tests_skipped} {plural(tests_skipped)} skipped")
if tests_failed:
report.append(f"* {tests_failed} {plural(tests_failed)} failed")
if failures:
report.extend(["", "## Failed tests", "(click to see output)"])
for testsuite_name, failures in failures.items():
report.extend(["", f"### {testsuite_name}"])
for name, output in failures:
report.extend(
[
"<details>",
f"<summary>{name}</summary>",
"",
"```",
output,
"```",
"</details>",
]
)
return "\n".join(report), style
def generate_report(title, junit_files):
return _generate_report(title, [JUnitXml.fromfile(p) for p in junit_files])
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"title", help="Title of the test report, without Markdown formatting."
)
parser.add_argument("context", help="Annotation context to write to.")
parser.add_argument("junit_files", help="Paths to JUnit report files.", nargs="*")
args = parser.parse_args()
report, style = generate_report(args.title, args.junit_files)
check_call(
[
"buildkite-agent",
"annotate",
"--context",
args.context,
"--style",
style,
report,
]
)

View File

@ -28,11 +28,16 @@ if [[ -n "${CLEAR_CACHE:-}" ]]; then
ccache --clear
fi
function show-stats {
function at-exit {
mkdir -p artifacts
ccache --print-stats > artifacts/ccache_stats.txt
# If building fails there will be no results files.
shopt -s nullglob
python3 "${MONOREPO_ROOT}"/.ci/generate_test_report.py ":linux: Linux x64 Test Results" \
"linux-x64-test-results" "${BUILD_DIR}"/test-results.*.xml
}
trap show-stats EXIT
trap at-exit EXIT
projects="${1}"
targets="${2}"
@ -42,6 +47,7 @@ lit_args="-v --xunit-xml-output ${BUILD_DIR}/test-results.xml --use-unique-outpu
echo "--- cmake"
pip install -q -r "${MONOREPO_ROOT}"/mlir/python/requirements.txt
pip install -q -r "${MONOREPO_ROOT}"/lldb/test/requirements.txt
pip install -q -r "${MONOREPO_ROOT}"/.ci/requirements.txt
cmake -S "${MONOREPO_ROOT}"/llvm -B "${BUILD_DIR}" \
-D LLVM_ENABLE_PROJECTS="${projects}" \
-G Ninja \

View File

@ -27,17 +27,23 @@ if [[ -n "${CLEAR_CACHE:-}" ]]; then
fi
sccache --zero-stats
function show-stats {
function at-exit {
mkdir -p artifacts
sccache --show-stats >> artifacts/sccache_stats.txt
# If building fails there will be no results files.
shopt -s nullglob
python "${MONOREPO_ROOT}"/.ci/generate_test_report.py ":windows: Windows x64 Test Results" \
"windows-x64-test-results" "${BUILD_DIR}"/test-results.*.xml
}
trap show-stats EXIT
trap at-exit EXIT
projects="${1}"
targets="${2}"
echo "--- cmake"
pip install -q -r "${MONOREPO_ROOT}"/mlir/python/requirements.txt
pip install -q -r "${MONOREPO_ROOT}"/.ci/requirements.txt
# The CMAKE_*_LINKER_FLAGS to disable the manifest come from research
# on fixing a build reliability issue on the build server, please

1
.ci/requirements.txt Normal file
View File

@ -0,0 +1 @@
junitparser==3.2.0