def log_errord(test_name, stderr, stdout): if xml_out: tc = TestCase(test_name, executable, 0, stdout, stderr) tc.add_error_info(message="Test Error\n", output=stdout) test_cases.append(tc) else: print "Test %s failed:\nstderr:\n %s\nstdout:\n%s\n----------\n" % (test_name, stderr, stdout)
def write_test_results(results, args): exit_code = ExitCodes.OK test_cases = [] for test_result in results["result"]: test_case = TestCase(test_result.name, elapsed_sec=test_result.elapsed_time, timestamp=test_result.timestamp) if test_result.name in args.ignore or test_result.state in [ TestStates.DISABLED, TestStates.UNCLEAR, TestStates.MANUAL, TestStates.NA, TestStates.OPTIONAL ]: test_case.add_skipped_info(test_result.detail) elif test_result.state in [TestStates.WARNING, TestStates.FAIL]: test_case.add_failure_info(test_result.detail, failure_type=str(test_result.state)) if test_result.state == TestStates.FAIL: exit_code = max(exit_code, ExitCodes.FAIL) elif test_result.state == TestStates.WARNING: exit_code = max(exit_code, ExitCodes.WARNING) elif test_result.state != TestStates.PASS: test_case.add_error_info(test_result.detail, error_type=str(test_result.state)) test_cases.append(test_case) ts = TestSuite(results["name"] + ": " + results["base_url"], test_cases) with open(args.output, "w") as f: TestSuite.to_file(f, [ts], prettyprint=False) print(" * Test results written to file: {}".format(args.output)) return exit_code
def _gen_cases(n_passes, n_fails, n_skips, n_errors): result = [] for i in range(n_passes): case = TestCase(name='TestPassed%s' % i, classname='generated.xml.test.case.passes', elapsed_sec=rand_duration()) result.append(case) for i in range(n_skips): case = TestCase(name='TestSkipped%s' % i, classname='generated.xml.test.case.skips', elapsed_sec=rand_duration()) case.add_skipped_info(message=rand_string('skipped!')) result.append(case) for i in range(n_fails): case = TestCase(name='TestFailed%s' % i, classname='generated.xml.test.case.fails', elapsed_sec=rand_duration()) case.add_failure_info(message=rand_string('failure!')) result.append(case) for i in range(n_errors): case = TestCase(name='TestErrored%s' % i, classname='generated.xml.test.case.errors', elapsed_sec=rand_duration()) case.add_error_info(message=rand_string('error!')) result.append(case) return result
class JunitReporter: REPORTS_DIRECTORY = "reports" def __init__(self): self._dummy_test_case = None def activate(self): subscribe_event_handlers(self) def on_suite_erred(self, suite_name, exception=None, **kwargs): self._dummy_test_case = TestCase(name=suite_name, status="error") if exception: self._dummy_test_case.add_error_info( message=exception_str(exception), output=format_exception(exception)) def on_suite_results_compiled(self, suite_results, **kwargs): suite_name = suite_results.suite_name or "NamelessSuite" test_cases = convert_tests(suite_results.tests) if self._dummy_test_case: test_cases.append(self._dummy_test_case) suite = dependency(TestSuite)(name=infer_package_name() + suite_name, timestamp=current_time().isoformat(), test_cases=test_cases) xml_report = ElementTree.tostring( suite.build_xml_doc(), encoding="utf-8").decode(encoding="utf-8") EventBroker.publish( event=TestEvent.report_created, suite=suite, cases=test_cases, report_filename=suite_name + ".xml", report_content=xml_report, )
def main(): parser = argparse.ArgumentParser(description='dummy test') parser.add_argument('-classes', type=int, default=5, help='number of classes') parser.add_argument('-testcases', type=int, default=10, help='number of testcases') parser.add_argument('-pass_rate', type=int, default=75, help='pass rate') parser.add_argument('-error_rate', type=int, default=20, help='error rate') parser.add_argument('-failure_rate', type=int, default=10, help='failure rate') parser.add_argument('-skip_rate', type=int, default=10, help='skip rate') parser.add_argument('-outputfile', type=str, default='test_results.xml', help='output file') parser.add_argument('-print', action='store_true', help='print the test results') args = parser.parse_args() ts = TestSuite(name='my test suite', hostname=platform.node(), timestamp=datetime.now()) for i in range(args.classes): for j in range(args.testcases): tc = TestCase(classname=f"myclass{i}", name=f"mytest{j}", elapsed_sec=random.randint(100, 1000), stdout = "stdout output", stderr = "stderr output") if random.randint(0, 100) < args.pass_rate: if random.randint(0, 100) < args.error_rate: tc.add_error_info(message=f"error {i} {j}", output="error output message", error_type="ERR1") elif random.randint(0, 100) < args.failure_rate: tc.add_failure_info(message=f"failure {i} {j}", output="failure output message", failure_type="FAIL1") elif random.randint(0, 100) < args.skip_rate: tc.add_skipped_info(message=f"skipped {i} {j}", output="skipped output message") ts.test_cases.append(tc) # pretty printing is on by default but can be disabled using prettyprint=False if args.print: print(TestSuite.to_xml_string([ts])) with open(args.outputfile, 'w') as f: TestSuite.to_file(f, [ts], prettyprint=True)
def exporter_junit_ioper(self, test_result_ext, test_suite_properties=None): from junit_xml import TestSuite, TestCase test_suites = [] test_cases = [] for platform in sorted(test_result_ext.keys()): # {platform : ['Platform', 'Result', 'Scope', 'Description']) test_cases = [] for tr_result in test_result_ext[platform]: result, name, scope, description = tr_result classname = 'test.ioper.%s.%s.%s' % (platform, name, scope) elapsed_sec = 0 _stdout = description _stderr = '' # Test case tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr) # Test case extra failure / error info if result == 'FAIL': tc.add_failure_info(description, _stdout) elif result == 'ERROR': tc.add_error_info(description, _stdout) elif result == 'SKIP': tc.add_skipped_info(description, _stdout) test_cases.append(tc) ts = TestSuite("test.suite.ioper.%s" % (platform), test_cases) test_suites.append(ts) return TestSuite.to_xml_string(test_suites)
def test_init_utf8(): tc = Case( name="Test äöü", classname="some.class.name.äöü", elapsed_sec=123.345, stdout="I am stdöüt!", stderr="I am stdärr!", ) tc.add_skipped_info(message="Skipped äöü", output="I skippäd!") tc.add_error_info(message="Skipped error äöü", output="I skippäd with an error!") test_suite = Suite("Test UTF-8", [tc]) #pylint: disable=unused-variable ts, tcs = serialize_and_read(test_suite, encoding="utf-8")[0] verify_test_case( tcs[0], { "name": decode("Test äöü", "utf-8"), "classname": decode("some.class.name.äöü", "utf-8"), "time": ("%f" % 123.345), }, stdout=decode("I am stdöüt!", "utf-8"), stderr=decode("I am stdärr!", "utf-8"), skipped_message=decode("Skipped äöü", "utf-8"), skipped_output=decode("I skippäd!", "utf-8"), error_message=decode("Skipped error äöü", "utf-8"), error_output=decode("I skippäd with an error!", "utf-8"), )
def test_init_error_output(): tc = Case("Error-Output") tc.add_error_info(output="I errored!") #pylint: disable=unused-variable ts, tcs = serialize_and_read(Suite("test", [tc]))[0] verify_test_case(tcs[0], {"name": "Error-Output"}, error_output="I errored!")
def test_init_error_message(): tc = Case("Error-Message") tc.add_error_info("error message") #pylint: disable=unused-variable ts, tcs = serialize_and_read(Suite("test", [tc]))[0] verify_test_case(tcs[0], {"name": "Error-Message"}, error_message="error message")
def exporter_junit(test_result_ext, test_suite_properties=None): """! Export test results in JUnit XML compliant format @details This function will import junit_xml library to perform report conversion @return String containing Junit XML formatted test result output """ from junit_xml import TestSuite, TestCase test_suites = [] test_cases = [] targets = sorted(test_result_ext.keys()) for target in targets: test_cases = [] tests = sorted(test_result_ext[target].keys()) for test in tests: test_results = test_result_ext[target][test] classname = 'test.%s.%s' % (target, test) elapsed_sec = test_results['elapsed_time'] _stdout = test_results['single_test_output'] _stderr = '' # Test case tc = TestCase(test, classname, elapsed_sec, _stdout, _stderr) # Test case extra failure / error info if test_results['single_test_result'] == 'FAIL': message = test_results['single_test_result'] tc.add_failure_info(message, _stdout) elif test_results['single_test_result'] != 'OK': message = test_results['single_test_result'] tc.add_error_info(message, _stdout) test_cases.append(tc) ts = TestSuite("test.suite.%s" % target, test_cases) test_suites.append(ts) return TestSuite.to_xml_string(test_suites)
def test_init_error(self): tc = TestCase('Error-Message-and-Output') tc.add_error_info("error message", "I errored!") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case( self, tcs[0], {'name': 'Error-Message-and-Output'}, error_message="error message", error_output="I errored!")
def test_init_unicode(): tc = Case( name=decode("Test äöü", "utf-8"), classname=decode("some.class.name.äöü", "utf-8"), elapsed_sec=123.345, stdout=decode("I am stdöüt!", "utf-8"), stderr=decode("I am stdärr!", "utf-8"), ) tc.add_skipped_info(message=decode("Skipped äöü", "utf-8"), output=decode("I skippäd!", "utf-8")) tc.add_error_info(message=decode("Skipped error äöü", "utf-8"), output=decode("I skippäd with an error!", "utf-8")) ts, tcs = serialize_and_read(Suite("Test Unicode", [tc]))[0] verify_test_case( tcs[0], { "name": decode("Test äöü", "utf-8"), "classname": decode("some.class.name.äöü", "utf-8"), "time": ("%f" % 123.345), }, stdout=decode("I am stdöüt!", "utf-8"), stderr=decode("I am stdärr!", "utf-8"), skipped_message=decode("Skipped äöü", "utf-8"), skipped_output=decode("I skippäd!", "utf-8"), error_message=decode("Skipped error äöü", "utf-8"), error_output=decode("I skippäd with an error!", "utf-8"), )
def test_multiple_errors(): """Tests multiple errors in one test case""" tc = Case("Multiple error", allow_multiple_subelements=True) tc.add_error_info("First error", "First error message") (_, tcs) = serialize_and_read(Suite("test", [tc]))[0] verify_test_case( tcs[0], {"name": "Multiple error"}, errors=[{ "message": "First error", "output": "First error message", "type": "error" }], ) tc.add_error_info("Second error", "Second error message") (_, tcs) = serialize_and_read(Suite("test", [tc]))[0] verify_test_case( tcs[0], {"name": "Multiple error"}, errors=[ { "message": "First error", "output": "First error message", "type": "error" }, { "message": "Second error", "output": "Second error message", "type": "error" }, ], )
def main(): parser = argparse.ArgumentParser(description='generate report from finviz daily data') parser.add_argument('-input', type=str, help='input file') parser.add_argument('-output', type=str, default='daily_report.xml', help='output file') args = parser.parse_args() if args.input is None: filename = '../stock_data/raw_daily_finviz/finviz_' + str(datetime.date.today()) + '.csv' # generate report df = pd.read_csv(filename) df.set_index('Ticker', inplace=True) df.drop_duplicates(inplace=True) ts_list = [] for sector in df.Sector.unique(): ts = TestSuite(name=sector) df_sector = df[df['Sector'] == sector] for industry in df_sector.Industry.unique(): for ticker in df.index[df['Industry'] == industry]: if df.loc[ticker,'Market Cap'].find('B') > 0: print(sector, '-', industry, '-', ticker, '-', df.loc[ticker,'Change']) tc = TestCase(classname=industry, name=ticker, elapsed_sec=df.loc[ticker,'Price'], stdout=df.loc[ticker,'Change'], stderr=df.loc[ticker,'Market Cap']) if df.loc[ticker,'Change'].find('-') >= 0: tc.add_error_info(message='lower') ts.test_cases.append(tc) ts_list.append(ts) with open(args.output, 'w') as f: TestSuite.to_file(f, ts_list, prettyprint=True)
def run_everything_else(xml = False): mega_suite = [] tests = [ run_test_arakoon_changes, run_tests_cli, run_test_big_object ] for x in tests: r = x () mega_suite.append(r) if is_true(xml): from junit_xml import TestSuite, TestCase test_cases = [] for (suite, results) in mega_suite: for (name,result, delta) in results: test_case = TestCase(name, suite, elapsed_sec = delta) if not result: test_case.add_error_info(message = "failed") test_cases.append(test_case) ts = [TestSuite("run_everything_else", test_cases)] with open('./testresults.xml', 'w') as f: TestSuite.to_file(f,ts) else: print mega_suite
def exporter_junit_ioper(self, test_result_ext, test_suite_properties=None): from junit_xml import TestSuite, TestCase test_suites = [] test_cases = [] for platform in sorted(test_result_ext.keys()): # {platform : ['Platform', 'Result', 'Scope', 'Description']) test_cases = [] for tr_result in test_result_ext[platform]: result, name, scope, description = tr_result classname = "test.ioper.%s.%s.%s" % (platform, name, scope) elapsed_sec = 0 _stdout = description _stderr = "" # Test case tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr) # Test case extra failure / error info if result == "FAIL": tc.add_failure_info(description, _stdout) elif result == "ERROR": tc.add_error_info(description, _stdout) elif result == "SKIP" or result == "NOT_SUPPORTED": tc.add_skipped_info(description, _stdout) test_cases.append(tc) ts = TestSuite("test.suite.ioper.%s" % (platform), test_cases) test_suites.append(ts) return TestSuite.to_xml_string(test_suites)
def print_result_cache_junitxml(dict_synonyms, suspicious_policy, untested_policy): test_cases = [] l = list(select(x for x in Mutant)) for filename, mutants in groupby(l, key=lambda x: x.line.sourcefile.filename): for mutant in mutants: tc = TestCase("Mutant #{}".format(mutant.id), file=filename, line=mutant.line.line_number, stdout=mutant.line.line) if mutant.status == BAD_SURVIVED: tc.add_failure_info(message=mutant.status, output=get_unified_diff( mutant.id, dict_synonyms)) if mutant.status == BAD_TIMEOUT: tc.add_error_info(message=mutant.status, error_type="timeout", output=get_unified_diff( mutant.id, dict_synonyms)) if mutant.status == OK_SUSPICIOUS: if suspicious_policy != 'ignore': func = getattr(tc, 'add_{}_info'.format(suspicious_policy)) func(message=mutant.status, output=get_unified_diff(mutant.id, dict_synonyms)) if mutant.status == UNTESTED: if untested_policy != 'ignore': func = getattr(tc, 'add_{}_info'.format(untested_policy)) func(message=mutant.status, output=get_unified_diff(mutant.id, dict_synonyms)) test_cases.append(tc) ts = TestSuite("mutmut", test_cases) print(TestSuite.to_xml_string([ts]))
def _collect_test_suite(self, scenario_result): if isinstance(scenario_result, GeneralError): test_case = TestCase("", "") test_case.add_error_info(scenario_result.message) test_suite = TestSuite("", "") test_suite.test_cases.append(test_case) return test_suite test_suite = TestSuite(scenario_result.name) for test_result in scenario_result.test_results: test_case = TestCase(test_result.name, test_result.name) for result in test_result.results: if isinstance(result, Failed): test_case.add_failure_info("ASSERTION {} failed".format(result.pretty_name), "EXPECTED {}\nGOT {}".format(result.expected, result.actual)) elif isinstance(result, (Error, ConnectionError)): test_case.add_error_info("ASSERTION {} failed".format(result.pretty_name), "ERROR {}".format(result.error)) elif isinstance(result, Passed): pass # TODO: What to do below? else: raise Exception("Unknown state") test_suite.test_cases.append(test_case) return test_suite
def test_init_error_message(self): tc = TestCase('Error-Message') tc.add_error_info("error message") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Error-Message'}, error_message="error message")
def handle_event(self, context: ExecutionContext, event: events.ExecutionEvent) -> None: if isinstance(event, events.Initialized): self.start_time = event.start_time if isinstance(event, events.AfterExecution): test_case = TestCase( f"{event.result.method} {event.result.path}", elapsed_sec=event.elapsed_time, allow_multiple_subelements=True, ) if event.status == Status.failure: checks = deduplicate_failures(event.result.checks) for idx, check in enumerate(checks, 1): # `check.message` is always not empty for events with `failure` status test_case.add_failure_info( message=f"{idx}. {check.message}") if event.status == Status.error: test_case.add_error_info( message=event.result.errors[-1].exception, output=event.result.errors[-1].exception_with_traceback) self.test_cases.append(test_case) if isinstance(event, events.Finished): test_suites = [ TestSuite("schemathesis", test_cases=self.test_cases, hostname=platform.node()) ] to_xml_report_file(file_descriptor=self.file_handle, test_suites=test_suites, prettyprint=True)
def test_init_error_output(self): tc = TestCase('Error-Output') tc.add_error_info(output="I errored!") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Error-Output'}, error_output="I errored!")
def test_init_unicode(self): tc = TestCase(name=decode('Test äöü', 'utf-8'), classname=decode('some.class.name.äöü', 'utf-8'), elapsed_sec=123.345, stdout=decode('I am stdöüt!', 'utf-8'), stderr=decode('I am stdärr!', 'utf-8')) tc.add_skipped_info(message=decode('Skipped äöü', 'utf-8'), output=decode('I skippäd!', 'utf-8')) tc.add_error_info(message=decode('Skipped error äöü', 'utf-8'), output=decode('I skippäd with an error!', 'utf-8')) (ts, tcs) = serialize_and_read(TestSuite('Test Unicode', [tc]))[0] verify_test_case( self, tcs[0], { 'name': decode('Test äöü', 'utf-8'), 'classname': decode('some.class.name.äöü', 'utf-8'), 'time': ("%f" % 123.345) }, stdout=decode('I am stdöüt!', 'utf-8'), stderr=decode('I am stdärr!', 'utf-8'), skipped_message=decode('Skipped äöü', 'utf-8'), skipped_output=decode('I skippäd!', 'utf-8'), error_message=decode('Skipped error äöü', 'utf-8'), error_output=decode('I skippäd with an error!', 'utf-8'))
def simple_report(self): """empty test report""" test_case_1 = TestCase("testcase1", elapsed_sec=1.5) test_case_2 = TestCase("testcase2", elapsed_sec=0.5) test_case_2.add_skipped_info("was skipped") test_case_3 = TestCase("testcase3", elapsed_sec=1.0) test_case_3.add_failure_info("failed") test_case_4 = TestCase("testcase4", elapsed_sec=0.25) test_case_4.add_error_info("errored") test_case_5 = TestCase("testcase5", elapsed_sec=0.1) test_cases = [ test_case_1, test_case_2, test_case_3, test_case_4, test_case_5 ] test_suites = [ TestSuite('testsuite1', test_cases, timestamp=datetime.datetime.utcnow()) ] return TestReport(NAME, {"module": test_suites}, BUILD_NUMBER, True)
def test_multiple_errors(self): """Tests multiple errors in one test case""" tc = TestCase('Multiple error', allow_multiple_subelements=True) tc.add_error_info("First error", "First error message") (_, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Multiple error'}, errors=[{ "message": "First error", "output": "First error message", "type": "error" }]) tc.add_error_info("Second error", "Second error message") (_, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Multiple error'}, errors=[{ "message": "First error", "output": "First error message", "type": "error" }, { "message": "Second error", "output": "Second error message", "type": "error" }])
def test_init_skipped_err_output(): tc = Case("Skipped-Output") tc.add_skipped_info(output="I skipped!") tc.add_error_info(output="I skipped with an error!") ts, tcs = serialize_and_read(Suite("test", [tc]))[0] verify_test_case(tcs[0], {"name": "Skipped-Output"}, skipped_output="I skipped!", error_output="I skipped with an error!")
def format_test_results(results, endpoints, format, args): formatted = None total_time = 0 max_name_len = 0 ignored_tests = [] if "suite" in vars(args): ignored_tests = args.ignore for test_result in results["result"]: _check_test_result(test_result, results) total_time += test_result.elapsed_time max_name_len = max(max_name_len, len(test_result.name)) if format == "json": formatted = { "suite": results["suite"], "timestamp": time.time(), "duration": total_time, "results": [], "config": _export_config(), "endpoints": endpoints } for test_result in results["result"]: formatted["results"].append({ "name": test_result.name, "state": str(TestStates.DISABLED if test_result.name in ignored_tests else test_result.state), "detail": test_result.detail, "duration": test_result.elapsed_time }) formatted = json.dumps(formatted, sort_keys=True, indent=4) elif format == "junit": test_cases = [] for test_result in results["result"]: test_case = TestCase(test_result.name, classname=results["suite"], elapsed_sec=test_result.elapsed_time, timestamp=test_result.timestamp) if test_result.name in ignored_tests or test_result.state in [ TestStates.DISABLED, TestStates.UNCLEAR, TestStates.MANUAL, TestStates.NA, TestStates.OPTIONAL ]: test_case.add_skipped_info(test_result.detail) elif test_result.state in [TestStates.WARNING, TestStates.FAIL]: test_case.add_failure_info(test_result.detail, failure_type=str(test_result.state)) elif test_result.state != TestStates.PASS: test_case.add_error_info(test_result.detail, error_type=str(test_result.state)) test_cases.append(test_case) formatted = TestSuite(results["def"]["name"] + ": " + ", ".join(results["urls"]), test_cases) elif format == "console": formatted = "\r\nPrinting test results for suite '{}' using API(s) '{}'\r\n" \ .format(results["suite"], ", ".join(results["urls"])) formatted += "----------------------------\r\n" for test_result in results["result"]: num_extra_dots = max_name_len - len(test_result.name) test_state = str(TestStates.DISABLED if test_result.name in ignored_tests else test_result.state) formatted += "{} ...{} {}\r\n".format(test_result.name, ("." * num_extra_dots), test_state) formatted += "----------------------------\r\n" formatted += "Ran {} tests in ".format(len(results["result"])) + "{0:.3f}s".format(total_time) + "\r\n" return formatted
def test_init_skipped_err_output(self): tc = TestCase('Skipped-Output') tc.add_skipped_info(output="I skipped!") tc.add_error_info(output="I skipped with an error!") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Skipped-Output'}, skipped_output="I skipped!", error_output="I skipped with an error!")
def test_init_error(self): tc = TestCase('Error-Message-and-Output') tc.add_error_info("error message", "I errored!") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Error-Message-and-Output'}, error_message="error message", error_output="I errored!", error_type="error")
def execute(self, log, keyvals, testDef): testDef.logger.verbose_print("JunitXML Reporter") # pickup the options cmds = {} testDef.parseOptions(log, self.options, keyvals, cmds) if cmds['filename'] is not None: self.fh = open(cmds['filename'] if os.path.isabs(cmds['filename']) \ else os.path.join(testDef.options['scratchdir'],cmds['filename']), 'w') if testDef.options['description'] is not None: print(testDef.options['description'], file=self.fh) print(file=self.fh) # Use the Junit classname field to store the list of inifiles try: classname = testDef.log['inifiles'] except KeyError: classname = None # get the entire log of results fullLog = testDef.logger.getLog(None) testCases = [] # TODO: ain't nobody got time for that. 8-). time = 0 for lg in fullLog: if 'stdout' in lg and lg['stdout'] is not None: stdout = "\n".join(lg['stdout']) else: stdout = None if 'stderr' in lg and lg['stderr'] is not None: stderr = "\n".join(lg['stderr']) else: stderr = None if 'time' in lg and lg['time'] is not None: time = lg['time'] else: time = 0 tc = TestCase(lg['section'], classname, time, stdout, stderr) try: if 0 != lg['status']: # Find sections prefixed with 'TestRun' if re.match("TestRun", lg['section']): tc.add_failure_info("Test reported failure") else: tc.add_error_info("Test error") except KeyError: sys.exit(lg['section'] + " is missing status!") testCases.append(tc) # TODO: Pull in the resource manager jobid. jobid = "job1" ts = TestSuite(jobid, testCases) print(TestSuite.to_xml_string([ts]), file=self.fh) if cmds['filename'] is not None: self.fh.close() log['status'] = 0 return
def execute(self, log, keyvals, testDef): testDef.logger.verbose_print("JunitXML Reporter") # pickup the options cmds = {} testDef.parseOptions(log, self.options, keyvals, cmds) if cmds['filename'] is not None: self.fh = open(cmds['filename'] if os.path.isabs(cmds['filename']) \ else os.path.join(cmds['scratch'],cmds['filename']), 'w') if testDef.options['description'] is not None: print(testDef.options['description'], file=self.fh) print(file=self.fh) # Use the Junit classname field to store the list of inifiles try: classname = testDef.log['inifiles'] except KeyError: classname = None # get the entire log of results fullLog = testDef.logger.getLog(None) testCases = [] # TODO: ain't nobody got time for that. 8-). time = 0 for lg in fullLog: if 'stdout' in lg and lg['stdout'] is not None: stdout = "\n".join(lg['stdout']) else: stdout = None if 'stderr' in lg and lg['stderr'] is not None: stderr = "\n".join(lg['stderr']) else: stderr = None if 'time' in lg and lg['time'] is not None: time = lg['time'] else: time = 0 tc = TestCase(lg['section'], classname, time, stdout, stderr) try: if 0 != lg['status']: # Find sections prefixed with 'TestRun' if re.match("TestRun", lg['section']): tc.add_failure_info("Test reported failure") else: tc.add_error_info("Test error") except KeyError: sys.exit(lg['section'] + " is missing status!") testCases.append(tc) # TODO: Pull in the resource manager jobid. jobid = "job1" ts = TestSuite(jobid, testCases) print(TestSuite.to_xml_string([ts]), file=self.fh) if cmds['filename'] is not None: self.fh.close() log['status'] = 0 return
def test_init_skipped_err_output(self): tc = TestCase('Skipped-Output') tc.add_skipped_info(output="I skipped!") tc.add_error_info(output="I skipped with an error!") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case( self, tcs[0], {'name': 'Skipped-Output'}, skipped_output="I skipped!", error_output="I skipped with an error!")
def build_test_cases(output): test_cases = [] for test_name, failure_message, exc_info in output: test_case = TestCase(test_name) if failure_message: test_case.add_failure_info(output=failure_message) elif exc_info: test_case.add_error_info( output="".join(traceback.format_exception(*exc_info))) test_cases.append(test_case) return test_cases
def test_init_error_type(): tc = Case("Error-Type") tc.add_error_info(error_type="com.example.Error") ts, tcs = serialize_and_read(Suite("test", [tc]))[0] verify_test_case(tcs[0], {"name": "Error-Type"}) tc.add_error_info("error message") ts, tcs = serialize_and_read(Suite("test", [tc]))[0] verify_test_case(tcs[0], {"name": "Error-Type"}, error_message="error message", error_type="com.example.Error")
def test_init_error(): tc = Case("Error-Message-and-Output") tc.add_error_info("error message", "I errored!") ts, tcs = serialize_and_read(Suite("test", [tc]))[0] verify_test_case( tcs[0], {"name": "Error-Message-and-Output"}, error_message="error message", error_output="I errored!", error_type="error", )
def exporter_junit(self, test_result_ext, test_suite_properties=None): """ Export test results in JUnit XML compliant format """ from junit_xml import TestSuite, TestCase test_suites = [] test_cases = [] targets = sorted(test_result_ext.keys()) for target in targets: toolchains = sorted(test_result_ext[target].keys()) for toolchain in toolchains: test_cases = [] tests = sorted(test_result_ext[target][toolchain].keys()) for test in tests: test_results = test_result_ext[target][toolchain][test] for test_res in test_results: test_ids = sorted(test_res.keys()) for test_no in test_ids: test_result = test_res[test_no] name = test_result['description'] classname = '%s.%s.%s.%s' % (self.package, target, toolchain, test_result['id']) elapsed_sec = test_result['elapsed_time'] _stdout = test_result['output'] if 'target_name_unique' in test_result: _stderr = test_result['target_name_unique'] else: _stderr = test_result['target_name'] # Test case tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr) # Test case extra failure / error info message = test_result['result'] if test_result['result'] == 'FAIL': tc.add_failure_info(message, _stdout) elif test_result['result'] == 'SKIP' or test_result[ "result"] == 'NOT_SUPPORTED': tc.add_skipped_info(message, _stdout) elif test_result['result'] != 'OK': tc.add_error_info(message, _stdout) test_cases.append(tc) ts = TestSuite( "test.suite.%s.%s" % (target, toolchain), test_cases, properties=test_suite_properties[target][toolchain]) test_suites.append(ts) return TestSuite.to_xml_string(test_suites)
def test_init_error_type(self): tc = TestCase('Error-Type') tc.add_error_info(error_type='com.example.Error') (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Error-Type'}) tc.add_error_info("error message") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Error-Type'}, error_message="error message", error_type='com.example.Error')
def generate_test_case(spelling_mistakes, file): ''' Create a test case for all of the mistakes in a file ''' test_case = TestCase(name=file) if spelling_mistakes is not None: error_message = "Mispelled words:\n" words = [] for mistake in spelling_mistakes: if mistake[0] not in words: words.append(mistake[0]) error_message = error_message + ", ".join(words) test_case.add_error_info(message=error_message) return test_case
def test_init_error_type(self): tc = TestCase('Error-Type') tc.add_error_info(error_type='com.example.Error') (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Error-Type'}) tc.add_error_info("error message") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case( self, tcs[0], {'name': 'Error-Type'}, error_message="error message", error_type='com.example.Error')
def exporter_junit(self, test_result_ext, test_suite_properties=None): """ Export test results in JUnit XML compliant format """ from junit_xml import TestSuite, TestCase test_suites = [] test_cases = [] targets = sorted(test_result_ext.keys()) for target in targets: toolchains = sorted(test_result_ext[target].keys()) for toolchain in toolchains: test_cases = [] tests = sorted(test_result_ext[target][toolchain].keys()) for test in tests: test_results = test_result_ext[target][toolchain][test] for test_res in test_results: test_ids = sorted(test_res.keys()) for test_no in test_ids: test_result = test_res[test_no] name = test_result["description"] classname = "%s.%s.%s.%s" % (self.package, target, toolchain, test_result["id"]) elapsed_sec = test_result["elapsed_time"] _stdout = test_result["output"] if "target_name_unique" in test_result: _stderr = test_result["target_name_unique"] else: _stderr = test_result["target_name"] # Test case tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr) # Test case extra failure / error info message = test_result["result"] if test_result["result"] == "FAIL": tc.add_failure_info(message, _stdout) elif test_result["result"] == "SKIP" or test_result["result"] == "NOT_SUPPORTED": tc.add_skipped_info(message, _stdout) elif test_result["result"] != "OK": tc.add_error_info(message, _stdout) test_cases.append(tc) ts = TestSuite( "test.suite.%s.%s" % (target, toolchain), test_cases, properties=test_suite_properties[target][toolchain], ) test_suites.append(ts) return TestSuite.to_xml_string(test_suites)
def execute(self, log, keyvals, testDef): testDef.logger.verbose_print("JunitXML Reporter") # pickup the options cmds = {} testDef.parseOptions(log, self.options, keyvals, cmds) if cmds['filename'] is not None: self.fh = open(cmds['filename'] if os.path.isabs(cmds['filename']) \ else os.path.join(testDef.options['scratchdir'],cmds['filename']), 'w') # get the entire log of results fullLog = testDef.logger.getLog(None) testCases = [] time = 0 for lg in fullLog: if 'stdout' in lg and lg['stdout'] is not None: stdout = "\n".join(lg['stdout']) else: stdout = None if 'stderr' in lg and lg['stderr'] is not None: stderr = "\n".join(lg['stderr']) else: stderr = None if 'time' in lg and lg['time'] is not None: time = lg['time'] else: time = 0 # Use the hostname of the system we are running on as root of the classname # Use the filename without the extension as the next layer of the classname hostname = os.uname()[1] classname = hostname + "." + cmds['filename'].split('.')[0] tc = TestCase(lg['section'], classname, time, stdout, stderr) try: if 0 != lg['status']: # Find sections prefixed with 'TestRun' if re.match("TestRun", lg['section']): tc.add_failure_info("Test reported failure") else: tc.add_error_info("Test error") except KeyError: sys.exit(lg['section'] + " is missing status!") testCases.append(tc) # TODO: Pull in the resource manager jobid. jobid = "job1" ts = TestSuite(jobid, testCases) print(TestSuite.to_xml_string([ts]), file=self.fh) if cmds['filename'] is not None: self.fh.close() log['status'] = 0 return
def test_init_utf8(self): tc = TestCase('Test äöü', 'some.class.name.äöü', 123.345, 'I am stdöüt!', 'I am stdärr!') tc.add_skipped_info(message='Skipped äöü', output="I skippäd!") tc.add_error_info(message='Skipped error äöü', output="I skippäd with an error!") test_suite = TestSuite('Test UTF-8', [tc]) (ts, tcs) = serialize_and_read(test_suite, encoding='utf-8')[0] verify_test_case(self, tcs[0], {'name': decode('Test äöü', 'utf-8'), 'classname': decode('some.class.name.äöü', 'utf-8'), 'time': ("%f" % 123.345)}, stdout=decode('I am stdöüt!', 'utf-8'), stderr=decode('I am stdärr!', 'utf-8'), skipped_message=decode('Skipped äöü', 'utf-8'), skipped_output=decode('I skippäd!', 'utf-8'), error_message=decode('Skipped error äöü', 'utf-8'), error_output=decode('I skippäd with an error!', 'utf-8'))
def exporter_junit(self, test_result_ext, test_suite_properties=None): """ Export test results in JUnit XML compliant format """ from junit_xml import TestSuite, TestCase test_suites = [] test_cases = [] targets = sorted(test_result_ext.keys()) for target in targets: toolchains = sorted(test_result_ext[target].keys()) for toolchain in toolchains: test_cases = [] tests = sorted(test_result_ext[target][toolchain].keys()) for test in tests: test_results = test_result_ext[target][toolchain][test] for test_res in test_results: test_ids = sorted(test_res.keys()) for test_no in test_ids: test_result = test_res[test_no] name = test_result['description'] classname = '%s.%s.%s.%s'% (self.package, target, toolchain, test_result['id']) elapsed_sec = test_result['elapsed_time'] _stdout = test_result['output'] if 'target_name_unique' in test_result: _stderr = test_result['target_name_unique'] else: _stderr = test_result['target_name'] # Test case tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr) # Test case extra failure / error info message = test_result['result'] if test_result['result'] == 'FAIL': tc.add_failure_info(message, _stdout) elif test_result['result'] == 'SKIP' or test_result["result"] == 'NOT_SUPPORTED': tc.add_skipped_info(message, _stdout) elif test_result['result'] != 'OK': tc.add_error_info(message, _stdout) test_cases.append(tc) ts = TestSuite("test.suite.%s.%s"% (target, toolchain), test_cases, properties=test_suite_properties[target][toolchain]) test_suites.append(ts) return TestSuite.to_xml_string(test_suites)
def test_init_unicode(self): tc = TestCase(decode('Test äöü', 'utf-8'), decode('some.class.name.äöü', 'utf-8'), 123.345, decode('I am stdöüt!', 'utf-8'), decode('I am stdärr!', 'utf-8')) tc.add_skipped_info(message=decode('Skipped äöü', 'utf-8'), output=decode('I skippäd!', 'utf-8')) tc.add_error_info(message=decode('Skipped error äöü', 'utf-8'), output=decode('I skippäd with an error!', 'utf-8')) (ts, tcs) = serialize_and_read(TestSuite('Test Unicode', [tc]))[0] verify_test_case(self, tcs[0], {'name': decode('Test äöü', 'utf-8'), 'classname': decode('some.class.name.äöü', 'utf-8'), 'time': ("%f" % 123.345)}, stdout=decode('I am stdöüt!', 'utf-8'), stderr=decode('I am stdärr!', 'utf-8'), skipped_message=decode('Skipped äöü', 'utf-8'), skipped_output=decode('I skippäd!', 'utf-8'), error_message=decode('Skipped error äöü', 'utf-8'), error_output=decode('I skippäd with an error!', 'utf-8'))
def _build_test_case(self, task_data, host_data): """ build a TestCase from the given TaskData and HostData """ name = '[%s] %s: %s' % (host_data.name, task_data.play, task_data.name) duration = host_data.finish - task_data.start if self._task_class == 'true': junit_classname = re.sub('\.yml:[0-9]+$', '', task_data.path) else: junit_classname = task_data.path if host_data.status == 'included': return TestCase(name, junit_classname, duration, host_data.result) res = host_data.result._result rc = res.get('rc', 0) dump = self._dump_results(res, indent=0) dump = self._cleanse_string(dump) if host_data.status == 'ok': return TestCase(name, junit_classname, duration, dump) test_case = TestCase(name, junit_classname, duration) if host_data.status == 'failed': if 'exception' in res: message = res['exception'].strip().split('\n')[-1] output = res['exception'] test_case.add_error_info(message, output) elif 'msg' in res: message = res['msg'] test_case.add_failure_info(message, dump) else: test_case.add_failure_info('rc=%s' % rc, dump) elif host_data.status == 'skipped': if 'skip_reason' in res: message = res['skip_reason'] else: message = 'skipped' test_case.add_skipped_info(message) return test_case
def report(self, report_dir=None): test_cases = [] if report_dir is None: report_dir = self.config.report_dir for report_file in os.listdir(report_dir): if report_file.endswith(".pkl"): f = open(os.path.join(report_dir, report_file), "r") result_dict = cPickle.load(f) f.close() tests = result_dict.keys() tests.sort() for test in tests: in_entry = result_dict[test] report_entry = TestCase(test, in_entry["CLASS_NAME"], in_entry["TIME"]) if in_entry["RESULT"] == "FAIL": report_entry.add_failure_info(in_entry["MESSAGE"], in_entry["TRACE"]) elif in_entry["RESULT"] == "ERROR": report_entry.add_error_info(in_entry["MESSAGE"], in_entry["TRACE"]) test_cases.append(report_entry) ts = TestSuite("my test suite", test_cases) f_xml = open(os.path.join(report_dir, "results.xml"), "w") f_xml.write(TestSuite.to_xml_string([ts])) f_xml.close()
def exporter_junit(self, test_result_ext, test_suite_properties=None): """ Export test results in JUnit XML compliant format """ from junit_xml import TestSuite, TestCase test_suites = [] test_cases = [] toolchains = sorted(test_result_ext.keys()) for toolchain in toolchains: targets = sorted(test_result_ext[toolchain].keys()) for target in targets: test_cases = [] tests = sorted(test_result_ext[toolchain][target].keys()) for test in tests: test_results = test_result_ext[toolchain][target][test] test_ids = sorted(test_results.keys()) for test_no in test_ids: test_result = test_results[test_no] name = test_result['test_description'] classname = 'test.%s.%s.%s'% (target, toolchain, test_result['test_id']) elapsed_sec = test_result['elapsed_time'] _stdout = test_result['single_test_output'] _stderr = '' # Test case tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr) # Test case extra failure / error info if test_result['single_test_result'] == 'FAIL': message = test_result['single_test_result'] tc.add_failure_info(message, _stdout) elif test_result['single_test_result'] != 'OK': message = test_result['single_test_result'] tc.add_error_info(message, _stdout) test_cases.append(tc) ts = TestSuite("test.suite.%s.%s"% (target, toolchain), test_cases, properties=test_suite_properties[target][toolchain]) test_suites.append(ts) return TestSuite.to_xml_string(test_suites)
def test_deploy_openstack_run_tempest(self, underlay, config, ccpcluster, k8s_actions, rally): """Deploy base environment Scenario: 1. Revert snapshot 2. Install ccp 3. Deploy environment 4. Run tempest Duration 35 min """ remote = underlay.remote(host=config.k8s.kube_host) if settings.REGISTRY == "127.0.0.1:31500": k8s_actions.create_registry() ccpcluster.build() ccpcluster.deploy() post_os_deploy_checks.check_jobs_status(k8s_actions.api, timeout=4500) post_os_deploy_checks.check_pods_status(k8s_actions.api, timeout=4500) # prepare rally rally.prepare() rally.pull_image() rally.run() # run tempest rally.run_tempest() LOG.info('Storing tests results...') res_file_name = 'result.json' file_prefix = 'results_' + datetime.datetime.now().strftime( '%Y%m%d_%H%M%S') + '_' file_dst = '{0}/logs/{1}{2}'.format( settings.LOGS_DIR, file_prefix, res_file_name) remote.download( '/home/{0}/rally/{1}'.format(settings.SSH_LOGIN, res_file_name), file_dst) res = json.load(remote.open('/home/{}/rally/result.json'.format( settings.SSH_LOGIN))) formatted_tc = [] failed_cases = [res['test_cases'][case] for case in res['test_cases'] if res['test_cases'][case]['status'] in 'fail'] for case in failed_cases: if case: tc = TestCase(case['name']) tc.add_failure_info(case['traceback']) formatted_tc.append(tc) skipped_cases = [res['test_cases'][case] for case in res['test_cases'] if res['test_cases'][case]['status'] in 'skip'] for case in skipped_cases: if case: tc = TestCase(case['name']) tc.add_skipped_info(case['reason']) formatted_tc.append(tc) error_cases = [res['test_cases'][case] for case in res['test_cases'] if res['test_cases'][case]['status'] in 'error'] for case in error_cases: if case: tc = TestCase(case['name']) tc.add_error_info(case['traceback']) formatted_tc.append(tc) success = [res['test_cases'][case] for case in res['test_cases'] if res['test_cases'][case]['status'] in 'success'] for case in success: if case: tc = TestCase(case['name']) formatted_tc.append(tc) ts = TestSuite("tempest", formatted_tc) with open('tempest.xml', 'w') as f: ts.to_file(f, [ts], prettyprint=False) fail_msg = 'Tempest verification fails {}'.format(res) assert res['failures'] == 0, fail_msg
def exporter_testcase_junit(test_result_ext, test_suite_properties=None): """! Export test results in JUnit XML compliant format @param test_result_ext Extended report from Greentea @param test_suite_properties Data from yotta module.json file @details This function will import junit_xml library to perform report conversion @return String containing Junit XML formatted test result output """ from junit_xml import TestSuite, TestCase ym_name = test_suite_properties.get('name', 'unknown') test_suites = [] for target_name in test_result_ext: test_results = test_result_ext[target_name] for test_suite_name in test_results: test = test_results[test_suite_name] # tc_elapsed_sec = test['elapsed_time'] tc_stdout = '' #test['single_test_output'] try: tc_stderr = test['single_test_output'].decode('unicode_escape').encode('ascii','ignore') except UnicodeDecodeError as e: print "exporter_testcase_junit:", str(e) # testcase_result stores info about test case results testcase_result = test['testcase_result'] # "testcase_result": { # "STRINGS004": { # "duration": 0.009999990463256836, # "time_start": 1453073018.275, # "time_end": 1453073018.285, # "result": 1 # }, test_cases = [] for tc_name in sorted(testcase_result.keys()): duration = testcase_result[tc_name].get('duration', 0.0) # result = testcase_result[tc_name].get('result', 0) # passed = testcase_result[tc_name].get('passed', 0) # failed = testcase_result[tc_name].get('failed', 0) utest_log = testcase_result[tc_name].get('utest_log', '') result_text = testcase_result[tc_name].get('result_text', "UNDEF") try: tc_stdout = '\n'.join(utest_log).decode('unicode_escape').encode('ascii','ignore') except UnicodeDecodeError as e: print "exporter_testcase_junit:", str(e) tc_class = ym_name + '.' + target_name + '.' + test_suite_name tc = TestCase(tc_name, tc_class, duration, tc_stdout, tc_stderr) message = '' if result_text == 'FAIL': tc.add_failure_info(message, tc_stdout) elif result_text != 'OK': tc.add_error_info(message, tc_stdout) test_cases.append(tc) ts_name = ym_name + '.' + target_name ts = TestSuite(ts_name, test_cases) test_suites.append(ts) return TestSuite.to_xml_string(test_suites)
def do_build(self, args): try: # add arguments doParser = self.arg_build() doArgs = doParser.parse_args(shlex.split(args)) # if the help command is called, parse_args returns None object if not doArgs: return 2 # -- template = validate(doArgs.file) if template is None: return 2 if doArgs.id: myAppliance = self.api.Users(self.login).Appliances().Getall(Query="dbId==" + doArgs.id) myAppliance = myAppliance.appliances.appliance else: # Get template which correpond to the template file myAppliance = ( self.api.Users(self.login) .Appliances() .Getall( Query="name=='" + template["stack"]["name"] + "';version=='" + template["stack"]["version"] + "'" ) ) myAppliance = myAppliance.appliances.appliance if myAppliance is None or len(myAppliance) != 1: printer.out("No template found on the plateform") return 0 myAppliance = myAppliance[0] rInstallProfile = self.api.Users(self.login).Appliances(myAppliance.dbId).Installprofile("").Getdeprecated() if rInstallProfile is None: printer.out("No installation found on the template '" + template["stack"]["name"] + "'", printer.ERROR) return 0 try: i = 1 if doArgs.junit is not None: test_results = [] for builder in template["builders"]: try: printer.out( "Generating '" + builder["type"] + "' image (" + str(i) + "/" + str(len(template["builders"])) + ")" ) if doArgs.junit is not None: test = TestCase("Generation " + builder["type"]) test_results.append(test) start_time = time.time() format_type = builder["type"] targetFormat = generate_utils.get_target_format_object(self.api, self.login, format_type) if targetFormat is None: printer.out("Builder type unknown: " + format_type, printer.ERROR) return 2 myimage = image() myinstallProfile = installProfile() if rInstallProfile.partitionAuto: if "installation" in builder: if "swapSize" in builder["installation"]: myinstallProfile.swapSize = builder["installation"]["swapSize"] if "diskSize" in builder["installation"]: myinstallProfile.diskSize = builder["installation"]["diskSize"] else: myinstallProfile.swapSize = rInstallProfile.swapSize myinstallProfile.diskSize = rInstallProfile.partitionTable.disks.disk[0].size func = getattr( generate_utils, "generate_" + generics_utils.remove_special_chars(targetFormat.format.name), None, ) if func: myimage, myinstallProfile = func(myimage, builder, myinstallProfile, self.api, self.login) else: printer.out("Builder type unknown: " + format_type, printer.ERROR) return 2 if myimage is None: return 2 myimage.targetFormat = targetFormat myimage.installProfile = myinstallProfile if doArgs.simulated is not None and doArgs.simulated: myimage.simulated = True if doArgs.forced is not None and doArgs.forced: myimage.forceCheckingDeps = True rImage = self.api.Users(self.login).Appliances(myAppliance.dbId).Images().Generate(myimage) status = rImage.status statusWidget = progressbar_widget.Status() statusWidget.status = status widgets = [Bar(">"), " ", statusWidget, " ", ReverseBar("<")] progress = ProgressBar(widgets=widgets, maxval=100).start() while not (status.complete or status.error or status.cancelled): statusWidget.status = status progress.update(status.percentage) status = ( self.api.Users(self.login).Appliances(myAppliance.dbId).Images(rImage.dbId).Status.Get() ) time.sleep(2) statusWidget.status = status progress.finish() if status.error: printer.out( "Generation '" + builder["type"] + "' error: " + status.message + "\n" + status.errorMessage, printer.ERROR, ) if status.detailedError: printer.out(status.detailedErrorMsg) if doArgs.junit is not None: test.elapsed_sec = time.time() - start_time test.add_error_info("Error", status.message + "\n" + status.errorMessage) elif status.cancelled: printer.out( "Generation '" + builder["type"] + "' canceled: " + status.message, printer.WARNING ) if doArgs.junit is not None: test.elapsed_sec = time.time() - start_time test.add_failure_info("Canceled", status.message) else: printer.out("Generation '" + builder["type"] + "' ok", printer.OK) printer.out("Image URI: " + rImage.uri) printer.out("Image Id : " + generics_utils.extract_id(rImage.uri)) if doArgs.junit is not None: test.elapsed_sec = time.time() - start_time # the downloadUri already contains downloadKey at the end if rImage.downloadUri is not None: test.stdout = self.api._url + "/" + rImage.downloadUri i += 1 except Exception as e: if is_uforge_exception(e): print_uforge_exception(e) if doArgs.junit is not None and "test_results" in locals() and len(test_results) > 0: test = test_results[len(test_results) - 1] test.elapsed_sec = time.time() - start_time test.add_error_info("Error", get_uforge_exception(e)) else: raise if doArgs.junit is not None: testName = myAppliance.distributionName + " " + myAppliance.archName ts = TestSuite("Generation " + testName, test_results) with open(doArgs.junit, "w") as f: TestSuite.to_file(f, [ts], prettyprint=False) return 0 except KeyError as e: printer.out("unknown error in template file", printer.ERROR) except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_build() except KeyboardInterrupt: printer.out("\n") if generics_utils.query_yes_no("Do you want to cancel the job ?"): if ( "myAppliance" in locals() and "rImage" in locals() and hasattr(myAppliance, "dbId") and hasattr(rImage, "dbId") ): self.api.Users(self.login).Appliances(myAppliance.dbId).Images(rImage.dbId).Status.Cancel() else: printer.out("Impossible to cancel", printer.WARNING) else: printer.out("Exiting command") except Exception as e: print_uforge_exception(e) if doArgs.junit is not None and "test_results" in locals() and len(test_results) > 0: test = test_results[len(test_results) - 1] if "start_time" in locals(): elapse = time.time() - start_time else: elapse = 0 test.elapsed_sec = elapse test.add_error_info("Error", get_uforge_exception(e)) else: return 2 finally: if ( "doArgs" in locals() and doArgs.junit is not None and "test_results" in locals() and len(test_results) > 0 ): if "myAppliance" in locals(): testName = myAppliance.distributionName + " " + myAppliance.archName else: testName = "" ts = TestSuite("Generation " + testName, test_results) with open(doArgs.junit, "w") as f: TestSuite.to_file(f, [ts], prettyprint=False)
else: navit.zoom_to_route() os.system("/usr/bin/import -window root "+gpx_directory+"/"+filename+export_suffix + ".png") else: print "No route found, last status : " + str(status) + ", duration : "+str(time.time() - start_time) test_cases = TestCase(filename, '', time.time() - start_time, '', '') if dataMap['success']['source'] == 'gpx' : doc = lxml.etree.parse(gpx_directory+"/"+filename+export_suffix + ".gpx") rtept_count = doc.xpath('count(//rtept)') if not(eval(str(rtept_count) + dataMap['success']['operator'] + str(dataMap['success']['value']))): test_cases.add_failure_info('navigation items count mismatch [ got ' + \ str(rtept_count) + ", expected " + dataMap['success']['operator'] + str(dataMap['success']['value']) ) elif dataMap['success']['source'] == 'dbus' : if not(eval(dataMap['success']['item'] + dataMap['success']['operator'] + str(dataMap['success']['value']))): test_cases.add_failure_info('dbus result mismatch [ got ' + \ str(eval(str(dataMap['success']['item']))) + dataMap['success']['operator'] + str(dataMap['success']['value']) ) except: # We had a failure, like navit crash, dbus timeout, ... print "This test failed. Maybe a missing map?" test_cases = TestCase(filename, '', time.time() - start_time, '', '') test_cases.add_error_info('test failed') tests.append(test_cases) ts = [TestSuite("Navit routing tests", tests)] with open(junit_directory+'output.xml', 'w+') as f: TestSuite.to_file(f, ts, prettyprint=False)
def exporter_testcase_junit(test_result_ext, test_suite_properties=None): """! Export test results in JUnit XML compliant format @param test_result_ext Extended report from Greentea @param test_spec Dictionary of test build names to test suite properties @details This function will import junit_xml library to perform report conversion @return String containing Junit XML formatted test result output """ from junit_xml import TestSuite, TestCase test_suites = [] for target_name in test_result_ext: test_results = test_result_ext[target_name] for test_suite_name in test_results: test = test_results[test_suite_name] # tc_elapsed_sec = test['elapsed_time'] tc_stdout = str() #test['single_test_output'] try: tc_stdout = test['single_test_output'].decode('unicode_escape').encode('ascii','ignore') except UnicodeDecodeError as e: err_mgs = "(UnicodeDecodeError) exporter_testcase_junit:", str(e) tc_stdout = err_mgs print err_mgs # testcase_result stores info about test case results testcase_result = test['testcase_result'] # "testcase_result": { # "STRINGS004": { # "duration": 0.009999990463256836, # "time_start": 1453073018.275, # "time_end": 1453073018.285, # "result": 1 # }, test_cases = [] for tc_name in sorted(testcase_result.keys()): duration = testcase_result[tc_name].get('duration', 0.0) utest_log = testcase_result[tc_name].get('utest_log', '') result_text = testcase_result[tc_name].get('result_text', "UNDEF") try: tc_stderr = '\n'.join(utest_log).decode('unicode_escape').encode('ascii','ignore') except UnicodeDecodeError as e: err_mgs = "(UnicodeDecodeError) exporter_testcase_junit:" + str(e) tc_stderr = err_mgs print err_mgs tc_class = target_name + '.' + test_suite_name if result_text == 'SKIPPED': # Skipped test cases do not have logs and we do not want to put # whole log inside JUNIT for skipped test case tc_stderr = str() tc = TestCase(tc_name, tc_class, duration, tc_stdout, tc_stderr) if result_text == 'FAIL': tc.add_failure_info(result_text) elif result_text == 'SKIPPED': tc.add_skipped_info(result_text) elif result_text != 'OK': tc.add_error_info(result_text) test_cases.append(tc) ts_name = target_name test_build_properties = test_suite_properties[target_name] if target_name in test_suite_properties else None ts = TestSuite(ts_name, test_cases, properties=test_build_properties) test_suites.append(ts) return TestSuite.to_xml_string(test_suites)
def readTest(f, expectedFailures): cl = ".".join(f.split(".")[:-1]) name = f.split(".")[-2] with open(f) as fin: try: res = simplejson.load(fin) except simplejson.errors.JSONDecodeError: print("Error loading file %s" % f) raise expectFail = cl in expectedFailures if "killed" in res: tc1 = TestCase(name, cl, 0, '', '') tc2 = TestCase(name, cl, 0, '', '') if expectFail: tc1.add_skipped_info('Killed or crashed; expected failure') else: tc1.add_error_info('Killed or crashed') tc2.add_error_info('Killed or crashed') return (tc1, tc2, cl) tc1 = TestCase(name, cl, res["time"], res["messages"], '') tc2 = TestCase(name, cl, res["time"], res["messages"], '') success = res["success"] shouldPass = res["shouldPass"] if expectFail: if success: tc1.add_error_info('This testcase started working (failure was expected)') else: tc1.add_skipped_info('This testcase still fails (as expected)') elif not success: if shouldPass: tc1.add_error_info('failed') else: tc1.add_error_info('expected failure, but passed') if not success: if shouldPass: tc2.add_error_info('failed') else: tc2.add_error_info('expected failure, but passed') return (tc1, tc2, None if success else cl)
t1_test = time.time() delta_test = t1_test - t0_test test_name = "flavour_%i" % flavour results.append((test_name, result, delta_test)) failures = filter(lambda x: not x[1], results) t1 = time.time() delta = t1 - t0 if is_true(xml): from junit_xml import TestSuite, TestCase test_cases = [] for (name,result, delta) in results: test_case = TestCase(name, 'TestCompat', elapsed_sec = delta) if not result: test_case.add_error_info(message = "failed") test_cases.append(test_case) ts = [TestSuite("compatibility", test_cases)] with open('./testresults.xml', 'w') as f: TestSuite.to_file(f,ts) else: print results @task def run_test_arakoon_changes (): def _inner(): alba.demo_kill () alba.demo_setup(acf = arakoon_config_file_2) # 2 node cluster, and arakoon_0 will be master. def stop_node(node_name):