summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPeter Hutterer <peter.hutterer@who-t.net>2019-11-28 09:35:29 +1000
committerBenjamin Tissoires <benjamin.tissoires@gmail.com>2019-11-28 11:52:54 +0100
commit3f051ca9d8b30e80c5783b558d4f575f68d522e1 (patch)
tree85102f2ecf59d4e61b6676a0f7e0317c6261e87d
parent3ac525db195f11c748fecfe8701e042f50300e40 (diff)
gitlab CI: add meson to junit script
This script was written by Emmanuele Bassi, copied from https://gist.github.com/ebassi/e5296ec77ae9e0d3a33fd483b5613b09/ It converts meson test results into a junit file which we can then use to display in the merge request GUI. Note that as litest writes out junit files as well, some tests are reported twice. Specifically: where litest fails the failure will be reported once through litest itself and once by meson test. Oh well. Signed-off-by: Peter Hutterer <peter.hutterer@who-t.net>
-rw-r--r--.gitlab-ci.yml16
-rwxr-xr-x.gitlab-ci/meson-junit-report.py106
2 files changed, 118 insertions, 4 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 98f39374..899d5547 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -119,6 +119,8 @@ variables:
expire_in: 1 week
paths:
- $MESON_BUILDDIR/meson-logs
+ reports:
+ junit: $MESON_BUILDDIR/junit-*.xml
# The default build instructions
.default_build:
@@ -127,7 +129,16 @@ variables:
- meson "$MESON_BUILDDIR" $MESON_ARGS
- meson configure "$MESON_BUILDDIR"
- ninja -C "$MESON_BUILDDIR" $NINJA_ARGS
- - if test x"$MESON_TEST_ARGS" != "x"; then echo "Running meson test -C \"$MESON_BUILDDIR\" $MESON_TEST_ARGS"; meson test -C "$MESON_BUILDDIR" $MESON_TEST_ARGS; fi
+ - if test x"$MESON_TEST_ARGS" != "x"; then
+ echo "Running meson test -C \"$MESON_BUILDDIR\" $MESON_TEST_ARGS";
+ meson test -C "$MESON_BUILDDIR" $MESON_TEST_ARGS || touch .failed;
+ ./.gitlab-ci/meson-junit-report.py
+ --project-name=libinput
+ --job-id="$CI_JOB_ID"
+ --output="$MESON_BUILDDIR/junit-$CI_JOB_NAME-report.xml"
+ "$MESON_BUILDDIR/meson-logs/testlog.json";
+ test -f .failed && exit 1;
+ fi
#################################################################
# #
@@ -625,9 +636,6 @@ fedora:31@default-build:
variables:
FEDORA_VERSION: 31
needs: ['fedora:31@container-prep']
- artifacts:
- reports:
- junit: '$MESON_BUILDDIR/junit-*.xml'
fedora:30@default-build:
stage: distro
diff --git a/.gitlab-ci/meson-junit-report.py b/.gitlab-ci/meson-junit-report.py
new file mode 100755
index 00000000..542065be
--- /dev/null
+++ b/.gitlab-ci/meson-junit-report.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python3
+#
+# meson-junit-report.py: Turns a Meson test log into a JUnit report
+#
+# Copyright 2019 GNOME Foundation
+#
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+import argparse
+import datetime
+import json
+import sys
+import xml.etree.ElementTree as ET
+
+aparser = argparse.ArgumentParser(description='Turns a Meson test log into a JUnit report')
+aparser.add_argument('--project-name', metavar='NAME',
+ help='The project name',
+ default='unknown')
+aparser.add_argument('--job-id', metavar='ID',
+ help='The job ID for the report',
+ default='Unknown')
+aparser.add_argument('--branch', metavar='NAME',
+ help='Branch of the project being tested',
+ default='master')
+aparser.add_argument('--output', metavar='FILE',
+ help='The output file, stdout by default',
+ type=argparse.FileType('w', encoding='UTF-8'),
+ default=sys.stdout)
+aparser.add_argument('infile', metavar='FILE',
+ help='The input testlog.json, stdin by default',
+ type=argparse.FileType('r', encoding='UTF-8'),
+ default=sys.stdin)
+
+args = aparser.parse_args()
+
+outfile = args.output
+
+testsuites = ET.Element('testsuites')
+testsuites.set('id', '{}/{}'.format(args.job_id, args.branch))
+testsuites.set('package', args.project_name)
+testsuites.set('timestamp', datetime.datetime.utcnow().isoformat(timespec='minutes'))
+
+suites = {}
+for line in args.infile:
+ data = json.loads(line)
+ (full_suite, unit_name) = data['name'].split(' / ')
+ (project_name, suite_name) = full_suite.split(':')
+
+ duration = data['duration']
+ return_code = data['returncode']
+ log = data['stdout']
+
+ unit = {
+ 'suite': suite_name,
+ 'name': unit_name,
+ 'duration': duration,
+ 'returncode': return_code,
+ 'stdout': log,
+ }
+
+ units = suites.setdefault(suite_name, [])
+ units.append(unit)
+
+for name, units in suites.items():
+ print('Processing suite {} (units: {})'.format(name, len(units)))
+
+ def if_failed(unit):
+ if unit['returncode'] != 0:
+ return True
+ return False
+
+ def if_succeded(unit):
+ if unit['returncode'] == 0:
+ return True
+ return False
+
+ successes = list(filter(if_succeded, units))
+ failures = list(filter(if_failed, units))
+ print(' - {}: {} pass, {} fail'.format(name, len(successes), len(failures)))
+
+ testsuite = ET.SubElement(testsuites, 'testsuite')
+ testsuite.set('name', '{}/{}'.format(args.project_name, name))
+ testsuite.set('tests', str(len(units)))
+ testsuite.set('errors', str(len(failures)))
+ testsuite.set('failures', str(len(failures)))
+
+ for unit in successes:
+ testcase = ET.SubElement(testsuite, 'testcase')
+ testcase.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
+ testcase.set('name', unit['name'])
+ testcase.set('time', str(unit['duration']))
+
+ for unit in failures:
+ testcase = ET.SubElement(testsuite, 'testcase')
+ testcase.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
+ testcase.set('name', unit['name'])
+ testcase.set('time', str(unit['duration']))
+
+ failure = ET.SubElement(testcase, 'failure')
+ failure.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
+ failure.set('name', unit['name'])
+ failure.set('type', 'error')
+ failure.text = unit['stdout']
+
+output = ET.tostring(testsuites, encoding='unicode')
+outfile.write(output)