bpo-34582: Adds JUnit XML output for regression tests (GH-9210)

This commit is contained in:
Steve Dower 2018-09-18 09:10:26 -07:00 committed by GitHub
parent cb5778f00c
commit d0f49d2f50
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 329 additions and 33 deletions

View File

@ -70,6 +70,15 @@ steps:
displayName: 'Run patchcheck.py'
condition: and(succeeded(), ne(variables['DocOnly'], 'true'))
- script: xvfb-run make buildbottest TESTOPTS="-j4 -uall,-cpu"
- script: xvfb-run make buildbottest TESTOPTS="-j4 -uall,-cpu --junit-xml=$(build.binariesDirectory)/test-results.xml"
displayName: 'Tests'
condition: and(succeeded(), ne(variables['DocOnly'], 'true'))
- task: PublishTestResults@2
displayName: 'Publish Test Results'
inputs:
testResultsFiles: '$(build.binariesDirectory)/test-results.xml'
mergeTestResults: true
testRunTitle: '$(system.pullRequest.targetBranch)-linux'
platform: linux
condition: and(succeededOrFailed(), ne(variables['DocOnly'], 'true'))

View File

@ -50,6 +50,15 @@ steps:
displayName: 'Display build info'
condition: and(succeeded(), ne(variables['DocOnly'], 'true'))
- script: make buildbottest TESTOPTS="-j4 -uall,-cpu"
- script: make buildbottest TESTOPTS="-j4 -uall,-cpu --junit-xml=$(build.binariesDirectory)/test-results.xml"
displayName: 'Tests'
condition: and(succeeded(), ne(variables['DocOnly'], 'true'))
- task: PublishTestResults@2
displayName: 'Publish Test Results'
inputs:
testResultsFiles: '$(build.binariesDirectory)/test-results.xml'
mergeTestResults: true
testRunTitle: '$(system.pullRequest.targetBranch)-macOS'
platform: macOS
condition: and(succeededOrFailed(), ne(variables['DocOnly'], 'true'))

View File

@ -54,8 +54,17 @@ steps:
displayName: 'Display build info'
condition: and(succeeded(), ne(variables['DocOnly'], 'true'))
- script: PCbuild\rt.bat -q -uall -u-cpu -rwW --slowest --timeout=1200 -j0
- script: PCbuild\rt.bat -q -uall -u-cpu -rwW --slowest --timeout=1200 -j0 --junit-xml="$(Build.BinariesDirectory)\test-results.xml"
displayName: 'Tests'
env:
PREFIX: $(Py_OutDir)\$(outDirSuffix)
condition: and(succeeded(), ne(variables['DocOnly'], 'true'))
- task: PublishTestResults@2
displayName: 'Publish Test Results'
inputs:
testResultsFiles: '$(Build.BinariesDirectory)\test-results.xml'
mergeTestResults: true
testRunTitle: '$(System.PullRequest.TargetBranch)-$(outDirSuffix)'
platform: $(outDirSuffix)
condition: and(succeededOrFailed(), ne(variables['DocOnly'], 'true'))

View File

@ -52,7 +52,8 @@ class EINTRBaseTest(unittest.TestCase):
# Issue #25277: Use faulthandler to try to debug a hang on FreeBSD
if hasattr(faulthandler, 'dump_traceback_later'):
faulthandler.dump_traceback_later(10 * 60, exit=True)
faulthandler.dump_traceback_later(10 * 60, exit=True,
file=sys.__stderr__)
@classmethod
def stop_alarm(cls):

View File

@ -268,6 +268,10 @@ def _create_parser():
help='if a test file alters the environment, mark '
'the test as failed')
group.add_argument('--junit-xml', dest='xmlpath', metavar='FILENAME',
help='writes JUnit-style XML results to the specified '
'file')
return parser

View File

@ -100,8 +100,11 @@ class Regrtest:
self.next_single_test = None
self.next_single_filename = None
# used by --junit-xml
self.testsuite_xml = None
def accumulate_result(self, test, result):
ok, test_time = result
ok, test_time, xml_data = result
if ok not in (CHILD_ERROR, INTERRUPTED):
self.test_times.append((test_time, test))
if ok == PASSED:
@ -118,6 +121,15 @@ class Regrtest:
elif ok != INTERRUPTED:
raise ValueError("invalid test result: %r" % ok)
if xml_data:
import xml.etree.ElementTree as ET
for e in xml_data:
try:
self.testsuite_xml.append(ET.fromstring(e))
except ET.ParseError:
print(xml_data, file=sys.__stderr__)
raise
def display_progress(self, test_index, test):
if self.ns.quiet:
return
@ -164,6 +176,9 @@ class Regrtest:
file=sys.stderr)
ns.findleaks = False
if ns.xmlpath:
support.junit_xml_list = self.testsuite_xml = []
# Strip .py extensions.
removepy(ns.args)
@ -384,7 +399,7 @@ class Regrtest:
result = runtest(self.ns, test)
except KeyboardInterrupt:
self.interrupted = True
self.accumulate_result(test, (INTERRUPTED, None))
self.accumulate_result(test, (INTERRUPTED, None, None))
break
else:
self.accumulate_result(test, result)
@ -508,6 +523,31 @@ class Regrtest:
if self.ns.runleaks:
os.system("leaks %d" % os.getpid())
def save_xml_result(self):
if not self.ns.xmlpath and not self.testsuite_xml:
return
import xml.etree.ElementTree as ET
root = ET.Element("testsuites")
# Manually count the totals for the overall summary
totals = {'tests': 0, 'errors': 0, 'failures': 0}
for suite in self.testsuite_xml:
root.append(suite)
for k in totals:
try:
totals[k] += int(suite.get(k, 0))
except ValueError:
pass
for k, v in totals.items():
root.set(k, str(v))
xmlpath = os.path.join(support.SAVEDCWD, self.ns.xmlpath)
with open(xmlpath, 'wb') as f:
for s in ET.tostringlist(root):
f.write(s)
def main(self, tests=None, **kwargs):
global TEMPDIR
@ -570,6 +610,9 @@ class Regrtest:
self.rerun_failed_tests()
self.finalize()
self.save_xml_result()
if self.bad:
sys.exit(2)
if self.interrupted:

View File

@ -85,8 +85,8 @@ def runtest(ns, test):
ns -- regrtest namespace of options
test -- the name of the test
Returns the tuple (result, test_time), where result is one of the
constants:
Returns the tuple (result, test_time, xml_data), where result is one
of the constants:
INTERRUPTED KeyboardInterrupt when run under -j
RESOURCE_DENIED test skipped because resource denied
@ -94,6 +94,9 @@ def runtest(ns, test):
ENV_CHANGED test failed because it changed the execution environment
FAILED test failed
PASSED test passed
If ns.xmlpath is not None, xml_data is a list containing each
generated testsuite element.
"""
output_on_failure = ns.verbose3
@ -106,22 +109,13 @@ def runtest(ns, test):
# reset the environment_altered flag to detect if a test altered
# the environment
support.environment_altered = False
support.junit_xml_list = xml_list = [] if ns.xmlpath else None
if ns.failfast:
support.failfast = True
if output_on_failure:
support.verbose = True
# Reuse the same instance to all calls to runtest(). Some
# tests keep a reference to sys.stdout or sys.stderr
# (eg. test_argparse).
if runtest.stringio is None:
stream = io.StringIO()
runtest.stringio = stream
else:
stream = runtest.stringio
stream.seek(0)
stream.truncate()
stream = io.StringIO()
orig_stdout = sys.stdout
orig_stderr = sys.stderr
try:
@ -138,12 +132,18 @@ def runtest(ns, test):
else:
support.verbose = ns.verbose # Tell tests to be moderately quiet
result = runtest_inner(ns, test, display_failure=not ns.verbose)
return result
if xml_list:
import xml.etree.ElementTree as ET
xml_data = [ET.tostring(x).decode('us-ascii') for x in xml_list]
else:
xml_data = None
return result + (xml_data,)
finally:
if use_timeout:
faulthandler.cancel_dump_traceback_later()
cleanup_test_droppings(test, ns.verbose)
runtest.stringio = None
support.junit_xml_list = None
def post_test_cleanup():

View File

@ -67,7 +67,7 @@ def run_tests_worker(worker_args):
try:
result = runtest(ns, testname)
except KeyboardInterrupt:
result = INTERRUPTED, ''
result = INTERRUPTED, '', None
except BaseException as e:
traceback.print_exc()
result = CHILD_ERROR, str(e)
@ -122,7 +122,7 @@ class MultiprocessThread(threading.Thread):
self.current_test = None
if retcode != 0:
result = (CHILD_ERROR, "Exit code %s" % retcode)
result = (CHILD_ERROR, "Exit code %s" % retcode, None)
self.output.put((test, stdout.rstrip(), stderr.rstrip(),
result))
return False
@ -133,6 +133,7 @@ class MultiprocessThread(threading.Thread):
return True
result = json.loads(result)
assert len(result) == 3, f"Invalid result tuple: {result!r}"
self.output.put((test, stdout.rstrip(), stderr.rstrip(),
result))
return False
@ -195,7 +196,7 @@ def run_tests_multiprocess(regrtest):
regrtest.accumulate_result(test, result)
# Display progress
ok, test_time = result
ok, test_time, xml_data = result
text = format_test_result(test, ok)
if (ok not in (CHILD_ERROR, INTERRUPTED)
and test_time >= PROGRESS_MIN_TIME

View File

@ -6,6 +6,7 @@ if __name__ != 'test.support':
import asyncio.events
import collections.abc
import contextlib
import datetime
import errno
import faulthandler
import fnmatch
@ -13,6 +14,7 @@ import functools
import gc
import importlib
import importlib.util
import io
import logging.handlers
import nntplib
import os
@ -34,6 +36,8 @@ import unittest
import urllib.error
import warnings
from .testresult import get_test_runner
try:
import multiprocessing.process
except ImportError:
@ -295,6 +299,7 @@ use_resources = None # Flag set to [] by regrtest.py
max_memuse = 0 # Disable bigmem tests (they will still be run with
# small sizes, to make sure they work.)
real_max_memuse = 0
junit_xml_list = None # list of testsuite XML elements
failfast = False
# _original_stdout is meant to hold stdout at the time regrtest began.
@ -1891,13 +1896,16 @@ def _filter_suite(suite, pred):
def _run_suite(suite):
"""Run tests from a unittest.TestSuite-derived class."""
if verbose:
runner = unittest.TextTestRunner(sys.stdout, verbosity=2,
failfast=failfast)
else:
runner = BasicTestRunner()
runner = get_test_runner(sys.stdout, verbosity=verbose)
# TODO: Remove this before merging (here for easy comparison with old impl)
#runner = unittest.TextTestRunner(sys.stdout, verbosity=2, failfast=failfast)
result = runner.run(suite)
if junit_xml_list is not None:
junit_xml_list.append(result.get_xml_element())
if not result.wasSuccessful():
if len(result.errors) == 1 and not result.failures:
err = result.errors[0][1]

View File

@ -0,0 +1,201 @@
'''Test runner and result class for the regression test suite.
'''
import functools
import io
import sys
import time
import traceback
import unittest
import xml.etree.ElementTree as ET
from datetime import datetime
class RegressionTestResult(unittest.TextTestResult):
separator1 = '=' * 70 + '\n'
separator2 = '-' * 70 + '\n'
def __init__(self, stream, descriptions, verbosity):
super().__init__(stream=stream, descriptions=descriptions, verbosity=0)
self.buffer = True
self.__suite = ET.Element('testsuite')
self.__suite.set('start', datetime.utcnow().isoformat(' '))
self.__e = None
self.__start_time = None
self.__results = []
self.__verbose = bool(verbosity)
@classmethod
def __getId(cls, test):
try:
test_id = test.id
except AttributeError:
return str(test)
try:
return test_id()
except TypeError:
return str(test_id)
return repr(test)
def startTest(self, test):
super().startTest(test)
self.__e = e = ET.SubElement(self.__suite, 'testcase')
self.__start_time = time.perf_counter()
if self.__verbose:
self.stream.write(f'{self.getDescription(test)} ... ')
self.stream.flush()
def _add_result(self, test, capture=False, **args):
e = self.__e
self.__e = None
if e is None:
return
e.set('name', args.pop('name', self.__getId(test)))
e.set('status', args.pop('status', 'run'))
e.set('result', args.pop('result', 'completed'))
if self.__start_time:
e.set('time', f'{time.perf_counter() - self.__start_time:0.6f}')
if capture:
stdout = self._stdout_buffer.getvalue().rstrip()
ET.SubElement(e, 'system-out').text = stdout
stderr = self._stderr_buffer.getvalue().rstrip()
ET.SubElement(e, 'system-err').text = stderr
for k, v in args.items():
if not k or not v:
continue
e2 = ET.SubElement(e, k)
if hasattr(v, 'items'):
for k2, v2 in v.items():
if k2:
e2.set(k2, str(v2))
else:
e2.text = str(v2)
else:
e2.text = str(v)
def __write(self, c, word):
if self.__verbose:
self.stream.write(f'{word}\n')
@classmethod
def __makeErrorDict(cls, err_type, err_value, err_tb):
if isinstance(err_type, type):
if err_type.__module__ == 'builtins':
typename = err_type.__name__
else:
typename = f'{err_type.__module__}.{err_type.__name__}'
else:
typename = repr(err_type)
msg = traceback.format_exception(err_type, err_value, None)
tb = traceback.format_exception(err_type, err_value, err_tb)
return {
'type': typename,
'message': ''.join(msg),
'': ''.join(tb),
}
def addError(self, test, err):
self._add_result(test, True, error=self.__makeErrorDict(*err))
super().addError(test, err)
self.__write('E', 'ERROR')
def addExpectedFailure(self, test, err):
self._add_result(test, True, output=self.__makeErrorDict(*err))
super().addExpectedFailure(test, err)
self.__write('x', 'expected failure')
def addFailure(self, test, err):
self._add_result(test, True, failure=self.__makeErrorDict(*err))
super().addFailure(test, err)
self.__write('F', 'FAIL')
def addSkip(self, test, reason):
self._add_result(test, skipped=reason)
super().addSkip(test, reason)
self.__write('S', f'skipped {reason!r}')
def addSuccess(self, test):
self._add_result(test)
super().addSuccess(test)
self.__write('.', 'ok')
def addUnexpectedSuccess(self, test):
self._add_result(test, outcome='UNEXPECTED_SUCCESS')
super().addUnexpectedSuccess(test)
self.__write('u', 'unexpected success')
def printErrors(self):
if self.__verbose:
self.stream.write('\n')
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavor, errors):
for test, err in errors:
self.stream.write(self.separator1)
self.stream.write(f'{flavor}: {self.getDescription(test)}\n')
self.stream.write(self.separator2)
self.stream.write('%s\n' % err)
def get_xml_element(self):
e = self.__suite
e.set('tests', str(self.testsRun))
e.set('errors', str(len(self.errors)))
e.set('failures', str(len(self.failures)))
return e
class QuietRegressionTestRunner:
def __init__(self, stream):
self.result = RegressionTestResult(stream, None, 0)
def run(self, test):
test(self.result)
return self.result
def get_test_runner_class(verbosity):
if verbosity:
return functools.partial(unittest.TextTestRunner,
resultclass=RegressionTestResult,
buffer=True,
verbosity=verbosity)
return QuietRegressionTestRunner
def get_test_runner(stream, verbosity):
return get_test_runner_class(verbosity)(stream)
if __name__ == '__main__':
class TestTests(unittest.TestCase):
def test_pass(self):
pass
def test_pass_slow(self):
time.sleep(1.0)
def test_fail(self):
print('stdout', file=sys.stdout)
print('stderr', file=sys.stderr)
self.fail('failure message')
def test_error(self):
print('stdout', file=sys.stdout)
print('stderr', file=sys.stderr)
raise RuntimeError('error message')
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestTests))
stream = io.StringIO()
runner_cls = get_test_runner_class(sum(a == '-v' for a in sys.argv))
runner = runner_cls(sys.stdout)
result = runner.run(suite)
print('Output:', stream.getvalue())
print('XML: ', end='')
for s in ET.tostringlist(result.get_xml_element()):
print(s.decode(), end='')
print()

View File

@ -1459,6 +1459,16 @@ class TestFileTypeRepr(TestCase):
type = argparse.FileType('r', 1, errors='replace')
self.assertEqual("FileType('r', 1, errors='replace')", repr(type))
class StdStreamComparer:
def __init__(self, attr):
self.attr = attr
def __eq__(self, other):
return other == getattr(sys, self.attr)
eq_stdin = StdStreamComparer('stdin')
eq_stdout = StdStreamComparer('stdout')
eq_stderr = StdStreamComparer('stderr')
class RFile(object):
seen = {}
@ -1497,7 +1507,7 @@ class TestFileTypeR(TempDirMixin, ParserTestCase):
('foo', NS(x=None, spam=RFile('foo'))),
('-x foo bar', NS(x=RFile('foo'), spam=RFile('bar'))),
('bar -x foo', NS(x=RFile('foo'), spam=RFile('bar'))),
('-x - -', NS(x=sys.stdin, spam=sys.stdin)),
('-x - -', NS(x=eq_stdin, spam=eq_stdin)),
('readonly', NS(x=None, spam=RFile('readonly'))),
]
@ -1537,7 +1547,7 @@ class TestFileTypeRB(TempDirMixin, ParserTestCase):
('foo', NS(x=None, spam=RFile('foo'))),
('-x foo bar', NS(x=RFile('foo'), spam=RFile('bar'))),
('bar -x foo', NS(x=RFile('foo'), spam=RFile('bar'))),
('-x - -', NS(x=sys.stdin, spam=sys.stdin)),
('-x - -', NS(x=eq_stdin, spam=eq_stdin)),
]
@ -1576,7 +1586,7 @@ class TestFileTypeW(TempDirMixin, ParserTestCase):
('foo', NS(x=None, spam=WFile('foo'))),
('-x foo bar', NS(x=WFile('foo'), spam=WFile('bar'))),
('bar -x foo', NS(x=WFile('foo'), spam=WFile('bar'))),
('-x - -', NS(x=sys.stdout, spam=sys.stdout)),
('-x - -', NS(x=eq_stdout, spam=eq_stdout)),
]
@ -1591,7 +1601,7 @@ class TestFileTypeWB(TempDirMixin, ParserTestCase):
('foo', NS(x=None, spam=WFile('foo'))),
('-x foo bar', NS(x=WFile('foo'), spam=WFile('bar'))),
('bar -x foo', NS(x=WFile('foo'), spam=WFile('bar'))),
('-x - -', NS(x=sys.stdout, spam=sys.stdout)),
('-x - -', NS(x=eq_stdout, spam=eq_stdout)),
]

View File

@ -0,0 +1 @@
Add JUnit XML output for regression tests and update Azure DevOps builds.