def dump_junit_report(suite_res: 'MiniHwResult', artifacts: Path): try: import junitparser except ImportError: LOG.warning("No JUNIT generated - junit parser is not installed") return reportpath = artifacts / 'junit_report.xml' LOG.info(f"[REPORT] Generating JUNIT report: {reportpath}") suites = junitparser.JUnitXml() for task in suite_res.tasks: jsuite = junitparser.TestSuite(name=task.task.name) for case in task.cases: jcase = junitparser.TestCase(name=case.case.name, classname='/'.join( case.case.namespace), time=case.cmd_result.elapsed / 1000000.0 if case.cmd_result else 0) if case.is_fail(): jcase.result = [ junitparser.Failure(c.fail_msg()) for c in case.checks.values() if c.is_fail ] if case.cmd_result: jcase.system_out = str(case.cmd_result.stdout) jcase.system_err = str(case.cmd_result.stderr) elif case.is_skip(): jcase.result = [junitparser.Skipped()] jsuite.add_testcase(jcase)
def dump_junit_report(suite_res: 'SuiteRunResult', artifacts: Path) -> Optional[Path]: try: import junitparser except ImportError: LOG.warning("No JUNIT generated - junit parser is not installed") return None report_path = artifacts / 'junit_report.xml' LOG.info(f"[REPORT] Generating JUNIT report: {report_path}") junit_suites = junitparser.JUnitXml(suite_res.df.name) for unit_res in suite_res.units: unit_suite = junitparser.TestSuite(name=unit_res.df.name) for test_res in unit_res.tests: junit_case = junitparser.TestCase( name=test_res.df.desc, classname=test_res.df.unit.name + '/' + test_res.df.name, time=test_res.cmd_result.elapsed / 1000000.0 if test_res.cmd_result else 0) unit_suite.add_testcase(junit_case) if test_res.kind.is_pass(): continue fails = [] for c in test_res.checks: fail = junitparser.Failure(c.message) fail.text = "\n" + c.fail_msg() fails.append(fail) junit_case.result = fails if test_res.cmd_result: junit_case.system_out = str(test_res.cmd_result.stdout) junit_case.system_err = str(test_res.cmd_result.stderr) junit_suites.add_testsuite(unit_suite) junit_suites.write(str(report_path)) return report_path
def put(self, request, build_number, fmt=None, **kwargs): file = request.data['file'] report, _ = JUnitReport.objects.get_or_create( project=JUnitProject.objects.get(slug=self.kwargs['project_slug']), build_number=build_number ) try: junit = junitparser.JUnitXml().fromfile(file) handle_junit_report(report, junit) except ParseError: return Response(status=400) return Response(status=204)
def parse_junit_reports( path_to_reports: str) -> typing.List[junitparser.TestCase]: if not os.path.exists(path_to_reports): raise FileNotFoundError(f"Path '{path_to_reports}', not found") ret_xml = "" # Return early if the path provided is just a file if os.path.isfile(path_to_reports): ret_xml = junitparser.JUnitXml.fromfile(path_to_reports) elif os.path.isdir(path_to_reports): ret_xml = junitparser.JUnitXml() for root, _, files in os.walk(path_to_reports): for file in [f for f in files if f.endswith("xml")]: ret_xml += junitparser.JUnitXml.fromfile( os.path.join(root, file)) return convert_junit_to_testcases(ret_xml)
def write_xml(self, test_case, filename): filename += '.xml' xml_path = os.path.join(self._output_directory, filename) # Remove existing file if applicable. try: os.unlink(xml_path) except FileNotFoundError: pass # use filename as suitename suite = junitparser.TestSuite(filename) suite.add_testcase(test_case) xml = junitparser.JUnitXml() xml.add_testsuite(suite) xml.write(xml_path)
def run_qtbase_tests(qemu: boot_cheribsd.CheriBSDInstance, args: argparse.Namespace): # TODO: also run the non-corelib tests xml = junitparser.JUnitXml() failed_tests = [] successful_tests = [] build_dir = Path(args.build_dir) all_tests_starttime = datetime.datetime.utcnow() test_subset = Path(args.test_subset) tests_root = Path(build_dir, "tests/auto") assert Path(tests_root, test_subset).is_relative_to(tests_root), "Invalid path " + str( tests_root / test_subset) boot_cheribsd.info("Running qtbase tests for ", test_subset) # Start with some basic smoketests: qemu.checked_run( "/build/tests/auto/corelib/tools/qarraydata/tst_qarraydata") qemu.checked_run("/build/tests/auto/corelib/global/qtendian/tst_qtendian") run_subdir(qemu, Path(tests_root, test_subset), xml, build_dir=build_dir, successful_tests=successful_tests, failed_tests=failed_tests) xml.time = (datetime.datetime.utcnow() - all_tests_starttime).total_seconds() xml.update_statistics() boot_cheribsd.info("JUnit results:", xml) boot_cheribsd.info("Ran " + str(len(successful_tests) + len(failed_tests)), " tests in ", (datetime.datetime.utcnow() - all_tests_starttime)) if failed_tests: boot_cheribsd.failure("The following ", len(failed_tests), " tests failed:\n\t", "\n\t".join(x.name for x in failed_tests), exit=False) # Finally, write the Junit XML file: if not boot_cheribsd.PRETEND: xml.write(args.junit_xml, pretty=True) boot_cheribsd.info("Wrote Junit results to ", args.junit_xml) return not failed_tests
def get_consolidated_junitxml(result_dir_path, test_name_postfix=''): xml_file_list = sorted(result_dir_path.glob('*test*.xml')) #print('\n'.join(xml_file_list)) junit_xml = junitparser.JUnitXml() for xml_file in xml_file_list: _fix_testsuites_if_exist(xml_file)# Mocha junit reporter has testsuites which is incompatible with junitparser test_results = junitparser.JUnitXml.fromfile(xml_file) is_test_suites = False ## case where in the xml file is a test_suites (multiple test_suite) for suite in test_results.testsuites(): junit_xml.add_testsuite(suite) is_test_suites=True if not is_test_suites: junit_xml.add_testsuite(test_results) if test_name_postfix is not None: _append_testname_postfix(junit_xml, test_name_postfix) return junit_xml
def _to_junit(result, platform=""): """ Convert result to junit format. """ report = junit.JUnitXml() if not result: return report platform += platform and "." for target, target_result in result.items(): suite = junit.TestSuite(platform + target) for case_name, case_result in target_result.items(): case = junit.TestCase(case_name) if case_result["status"] == "FAIL": case.result = junit.Failure(case_result.get("details", "")) if case_result["status"] == "IGNORE": case.result = junit.Skipped(case_result.get("details", ""))
def createJunitTestResults(boardToResults, fileName): """Create junit xml test result. Args: boardToResults(dict[str:obj(OtaTestResult)]): Dictionary of the board name to it's OtaTestResult. fileName: The name of the junit test file to create. """ report = junit.JUnitXml() for board in boardToResults.keys(): group_suite = junit.TestSuite(board + '.OTAEndToEndTests') for otaTestResult in boardToResults[board]: test_case = junit.TestCase(otaTestResult.testName) if otaTestResult.result == OtaTestResult.FAIL: test_case.result = junit.Failure(otaTestResult.summary) elif otaTestResult.result == OtaTestResult.ERROR: test_case.result = junit.Skipped(otaTestResult.summary) group_suite.add_testcase(test_case) report.add_testsuite(group_suite) report.update_statistics() report.write(fileName, pretty=True)
# skip if not requested continue # add module test suite in global list if it does not exist if module["name"] not in test_suites: test_suites[module["name"]] = junit.TestSuite(module["name"]) arg_vector.append(module) module_names.append(module["name"]) # contruct waveform directories for module_name in module_names: if not os.path.exists("../waveforms/" + module_name): os.makedirs("../waveforms/" + module_name) pool = ThreadPool() results = pool.map(run_tests, arg_vector) for test_cases, module_name in zip(results, module_names): for test_case in test_cases: test_suites[module_name].add_testcase(test_case) eprint("all tests run") # use junit XML as output format junit_xml = junit.JUnitXml() for name in test_suites: junit_xml.add_testsuite(test_suites[name]) eprint("writing JUnit XML") junit_xml.write("junit.xml")
def _generate_junit_summary( self, ctx: Context, results: Dict[str, List[Tuple[Decoder, TestSuite]]]) -> None: # pylint: disable=import-outside-toplevel try: import junitparser as junitp # type: ignore except ImportError: sys.exit( "error: junitparser required to use JUnit format. Please install with pip install junitparser." ) def _parse_vector_errors(vector: TestVector) -> List[junitp.Error]: junit_err_map = { TestVectorResult.ERROR: junitp.Error, TestVectorResult.FAIL: junitp.Failure, TestVectorResult.NOT_RUN: junitp.Skipped, TestVectorResult.TIMEOUT: junitp.Failure, } jerrors = [] for err in vector.errors: jerr = junit_err_map[vector.test_result]( message=f"FAIL: {err[0]}") jerr.text = "\n".join(err[1:]) jerrors.append(jerr) return jerrors def _parse_suite_results( test_suite_tuple: Tuple[str, List[Tuple[Decoder, TestSuite]]] ) -> junitp.TestSuite: jsuites = [] test_suite_name, test_suite_results = test_suite_tuple for suite_decoder_res in test_suite_results: timeouts = 0 jsuite = junitp.TestSuite(test_suite_name) jsuite.add_property("decoder", suite_decoder_res[0].name) for vector in suite_decoder_res[1].test_vectors.values(): jcase = junitp.TestCase(vector.name) if vector.test_result not in [ TestVectorResult.SUCCESS, TestVectorResult.REFERENCE, ]: jcase.result = _parse_vector_errors(vector) jsuite.add_testcase(jcase) if vector.test_result is TestVectorResult.TIMEOUT and ctx.jobs == 1: timeouts += ctx.timeout jsuite.time = round(suite_decoder_res[1].time_taken - timeouts, 3) jsuites.append(jsuite) return jsuites xml = junitp.JUnitXml() jsuites = map(_parse_suite_results, results.items()) for jsuite in [item for sublist in jsuites for item in sublist]: xml.add_testsuite(jsuite) if ctx.summary_output: with open(ctx.summary_output, "w+", encoding="utf-8") as summary_file: xml.write(summary_file.name, pretty=True)
async def run_testrig(args: argparse.Namespace, remaining_args: list, output_dir: str): trace_rootdir = Path(args.trace_dir) if not trace_rootdir.is_dir(): sys.exit(str(trace_rootdir) + " does not exist!") xunit_output = Path(args.xunit_output) if not xunit_output.parent.is_dir(): sys.exit("invalid xunit output file: " + str(xunit_output)) global DEBUG if args.debug: DEBUG = True command = [ str(Path(__file__).parent / "runTestRIG.py"), "--trace-dir", str(trace_rootdir), "--save-dir", output_dir, "-a", args.reference_impl, "-b", args.test_impl, "--no-shrink", ] + remaining_args + ["-v", "1"] # verbosity 1 required for output if args.timeout: command += ["--timeout", str(args.timeout)] info("Running '", " ".join(shlex.quote(s) for s in command), "'", sep="") process = await asyncio.create_subprocess_exec( command[0], *command[1:], stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT) xml = junitparser.JUnitXml(name="Regression test " + args.test_impl + " vs " + args.reference_impl) testsuite = junitparser.TestSuite(name=args.trace_dir) xml.add_testsuite(testsuite) current_test = None current_output = [] # type: typing.List[str] starttime = datetime.datetime.utcnow() def add_test_result(junit_result=None): nonlocal current_test nonlocal current_output if current_test is None: current_test = junitparser.TestCase("UNKNOWN TEST CASE") if junit_result is not None: current_test.result = junit_result current_test.time = (datetime.datetime.utcnow() - starttime).total_seconds() if current_output: current_test.system_out = "".join(current_output) testsuite.add_testcase(current_test) current_test = None current_output = [] async def fatal_error(error_message): process.terminate() process.kill() remaining_stdout = await process.stdout.read() debug("Remaining output: ", remaining_stdout) current_output.append(remaining_stdout.decode("utf-8")) add_test_result(junit_result=junitparser.Error(message=error_message)) error(error_message) while True: # No output for N seconds means something went wrong... output = b"" try: output = await asyncio.wait_for(process.stdout.readline(), 60) except asyncio.TimeoutError: try: output = await asyncio.wait_for( process.stdout.readuntil(separator=b'\r'), 10) except asyncio.TimeoutError: await fatal_error("TIMEOUT!") break if not output and process.stdout.at_eof(): info("EOF") break debug("==>TR: \x1b[1;33m", output.decode("utf-8").rstrip(), "\x1b[0m", sep="") if output.startswith(b'Reading trace from '): # start of testcase assert current_test is None, "Reading new test before last one finished?" starttime = datetime.datetime.utcnow() trace_file = output[len(b'Reading trace from '):].rstrip().decode( "utf-8") relpath = Path(trace_file).relative_to(trace_rootdir) debug("Starting test", relpath, " from", trace_file) info("==== Testing:", relpath, "... ", end="\n" if DEBUG else "") current_test = junitparser.TestCase(str(relpath)) current_output = [] continue if output.startswith(b'+++ OK, passed'): # End of testcase if current_test.result is not None: if isinstance(current_test.result, junitparser.Failure): print_coloured("ERROR, but TestRIG reported OK", colour=AnsiColour.red) continue assert isinstance( current_test.result, junitparser.Skipped), "unexpected test result" print_coloured("SKIPPED", colour=AnsiColour.yellow) else: print_coloured("OK", colour=AnsiColour.green) assert output == b"+++ OK, passed 1 test.\n", b"output format changed? " + output assert current_test is not None add_test_result(current_test.result) continue elif output == b'Failure.\n': assert isinstance( current_test.result, junitparser.Failure), "Didn't see ''*** Failed!' message?" add_test_result(current_test.result) error("FAILED!") # Not a marker message -> add to current test output if current_output is not None: current_output.append(output.decode("utf-8")) # Check if test failed: if output.startswith(b"*** Failed!"): assert current_test is not None current_test.result = junitparser.Failure( message=output.strip().decode("utf-8")) elif output.startswith(b"Error:"): error(output.decode("utf-8")) assert current_test is not None current_test.result = junitparser.Failure( message=output.strip().decode("utf-8")) if output.startswith( b"Error: implementation A timeout.") or output.startswith( b"Error: implementation B timeout."): if current_test.result is None: current_test.result = junitparser.Error( message=output.strip().decode("utf-8")) continue else: await fatal_error("Unknown error: " + output.decode("utf-8")) break elif output.startswith(b"Warning:"): if output.startswith( b"Warning: reporting success since implementations not running" ): debug("implementations not running!") if current_test.result is None: current_test.result = junitparser.Skipped( message=output.strip().decode("utf-8")) continue await fatal_error("Unknown warning!") break await process.wait() xml.update_statistics() print("SUMMARY:") print("Total tests:", xml.tests) print("Successful: ", xml.tests - xml.failures - xml.errors - xml.skipped) print("Failed: ", xml.failures) print("ERRORS: ", xml.errors) print(xml) if xml.failures != 0: print("Minimized cases: ") subprocess.check_call(["find", str(output_dir)], cwd=str(output_dir)) if str(xunit_output) != "/dev/null": xml.write(filepath=str(xunit_output), pretty=True)