def test(test_str, exception=None): """ Pass/fail test.""" suite.test_cases.append(TestCase('libspud for python.%s' % test_str)) try: result = eval(test_str) if not result: suite.test_cases[-1].add_failure_info('Failure') except Exception as e: suite.test_cases[-1].add_failure_info('Exception', str(e))
def test_single_suite_single_test_case(self): try: (ts, tcs) = serialize_and_read(TestSuite('test', TestCase('Test1')), to_file=True)[0] self.fail("This should've raised an exeception") # pragma: nocover except Exception as exc: self.assertEqual(str(exc), 'test_cases must be a list of test cases')
def test_init_skipped_err_output(self): tc = TestCase('Skipped-Output') tc.add_skipped_info(output="I skipped!") tc.add_error_info(output="I skipped with an error!") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Skipped-Output'}, skipped_output="I skipped!", error_output="I skipped with an error!")
def test_init_illegal_unicode_char(self): tc = TestCase('Failure-Message') tc.add_failure_info( u("failure message with illegal unicode char: [\x02]")) (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case( self, tcs[0], {'name': 'Failure-Message'}, failure_message=u("failure message with illegal unicode char: []"))
def add_junit_test(test_cases, testname, succ, message='default message', elapsed_sec=0): test_case = TestCase(testname, elapsed_sec=elapsed_sec) if not succ: test_case.add_failure_info(message) test_cases.append(test_case)
def test_init_error(self): tc = TestCase('Error-Message-and-Output') tc.add_error_info("error message", "I errored!") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Error-Message-and-Output'}, error_message="error message", error_output="I errored!", error_type="error")
def handle_message(self, msg): """Manage message of different type and in the context of path.""" source_line = getline(msg.path, msg.line).strip().decode('utf-8') stdout_line = u"{0}:{1}:{2}:{3}".format(msg.path, msg.line, msg.column, source_line) stderr_line = u"{0}:{1}\n{2}".format(msg.msg_id, msg.msg, stdout_line) testcase_name = u"{0}:{1}:{2}".format(msg.module, msg.line, msg.column) testcase = TestCase(testcase_name, stdout=stdout_line, stderr=stderr_line, file=msg.path, line=msg.line, category=msg.category) testcase.add_failure_info(message=msg.symbol, output=stderr_line) self.items[self.current_module].test_cases.append(testcase)
def test_init_classname(self): (ts, tcs) = serialize_and_read( TestSuite( 'test', [TestCase(name='Test1', classname='some.class.name')]))[0] verify_test_case(self, tcs[0], { 'name': 'Test1', 'classname': 'some.class.name' })
def test_init_failure(self): tc = TestCase('Failure-Message-and-Output') tc.add_failure_info("failure message", "I failed!") (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0] verify_test_case(self, tcs[0], {'name': 'Failure-Message-and-Output'}, failure_message="failure message", failure_output="I failed!", failure_type='failure')
def test_to_xml_string(self): test_suites = [ TestSuite('suite1', [TestCase('Test1')]), TestSuite('suite2', [TestCase('Test2')]) ] xml_string = TestSuite.to_xml_string(test_suites) expected_xml_string = textwrap.dedent(""" <?xml version="1.0" ?> <testsuites> \t<testsuite errors="0" failures="0" name="suite1" skipped="0" tests="1" time="0"> \t\t<testcase name="Test1"/> \t</testsuite> \t<testsuite errors="0" failures="0" name="suite2" skipped="0" tests="1" time="0"> \t\t<testcase name="Test2"/> \t</testsuite> </testsuites> """.strip("\n")) self.assertEqual(xml_string, expected_xml_string)
def save(self, filename, conf_files): suite = TestSuite(filename) for conf_file in conf_files: for scenario in conf_file.scenarios: case = TestCase(scenario.name) suite.test_cases.append(case) if not scenario.succeeded(): case.add_error_info("\n".join(scenario.messages))
def run(test, backends): import subprocess import time import difflib source = get_source(test) allargs = get_testargs(source) testcases = [] my_env = os.environ.copy() my_env["CEED_ERROR_HANDLER"] = 'exit'; for args, name in allargs: for ceed_resource in backends: rargs = [os.path.join('build', test)] + args.copy() rargs[rargs.index('{ceed_resource}')] = ceed_resource if skip_rule(test, ceed_resource): case = TestCase('{} {}'.format(test, ceed_resource), elapsed_sec=0, timestamp=time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime()), stdout='', stderr='') case.add_skipped_info('Pre-run skip rule') else: start = time.time() proc = subprocess.run(rargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=my_env) proc.stdout = proc.stdout.decode('utf-8') proc.stderr = proc.stderr.decode('utf-8') case = TestCase('{} {} {}'.format(test, *name, ceed_resource), classname=os.path.dirname(source), elapsed_sec=time.time()-start, timestamp=time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime(start)), stdout=proc.stdout, stderr=proc.stderr) ref_stdout = os.path.join('tests/output', test + '.out') if not case.is_skipped() and proc.stderr: if 'OCCA backend failed to use' in proc.stderr: case.add_skipped_info('occa mode not supported {} {}'.format(test, ceed_resource)) elif 'Backend does not implement' in proc.stderr: case.add_skipped_info('not implemented {} {}'.format(test, ceed_resource))
def handle(line, inpipe): global logged_in global test_cases print("> {}".format(line), end='', flush=True) if not logged_in: if line.startswith('Welcome to Buildroot'): time.sleep(1) print("we got the prompt!") send("root\n", inpipe) logged_in = True time.sleep(5) send("cd /usr/lib/uclibc-ng-test/test\n", inpipe) send("sh uclibcng-testrunner.sh\n", inpipe) else: if 'PASS ' in line: r = re.match("PASS (.*)", line) if r: test_name = r.group(1) test = TestCase(test_name, '', time.time()) test_cases.append(test) if 'FAIL ' in line: r = re.match("FAIL (.*)", line) if r: test_name = r.group(1) test = TestCase(test_name, '', time.time()) test.add_failure_info(message="FAIL") test_cases.append(test) if 'SKIP' in line: r = re.match("SKIP (.*)", line) if r: test_name = r.group(1) test = TestCase(test_name, '', time.time()) test.add_skipped_info(message="SKIP") test_cases.append(test) if 'Total passed:' in line: print( "uClibc-ng testsuite run is over, writing test results and exiting." ) return False return True
def execute(self, log, keyvals, testDef): testDef.logger.verbose_print("JunitXML Reporter") # pickup the options cmds = {} testDef.parseOptions(log, self.options, keyvals, cmds) if cmds['filename'] is not None: self.fh = open(cmds['filename'] if os.path.isabs(cmds['filename']) \ else os.path.join(testDef.options['scratchdir'],cmds['filename']), 'w') if testDef.options['description'] is not None: print(testDef.options['description'], file=self.fh) print(file=self.fh) # Use the Junit classname field to store the list of inifiles try: classname = testDef.log['inifiles'] except KeyError: classname = None # get the entire log of results fullLog = testDef.logger.getLog(None) testCases = [] # TODO: ain't nobody got time for that. 8-). time = 0 for lg in fullLog: if 'stdout' in lg and lg['stdout'] is not None: stdout = "\n".join(lg['stdout']) else: stdout = None if 'stderr' in lg and lg['stderr'] is not None: stderr = "\n".join(lg['stderr']) else: stderr = None if 'time' in lg and lg['time'] is not None: time = lg['time'] else: time = 0 tc = TestCase(lg['section'], classname, time, stdout, stderr) try: if 0 != lg['status']: # Find sections prefixed with 'TestRun' if re.match("TestRun", lg['section']): tc.add_failure_info("Test reported failure") else: tc.add_error_info("Test error") except KeyError: sys.exit(lg['section'] + " is missing status!") testCases.append(tc) # TODO: Pull in the resource manager jobid. jobid = "job1" ts = TestSuite(jobid, testCases) print(TestSuite.to_xml_string([ts]), file=self.fh) if cmds['filename'] is not None: self.fh.close() log['status'] = 0 return
def run_notebook(input_notebook, add_nunit_attachment, parameters=None, kernel_name="ai-architecture-template", root="."): """ Used to run a notebook in the correct directory. Parameters ---------- :param input_notebook: Name of Notebook to Test :param add_nunit_attachment: :param parameters: :param kernel_name: Jupyter Kernal :param root: """ output_notebook = input_notebook.replace(".ipynb", NOTEBOOK_OUTPUT_EXT) try: results = pm.execute_notebook(os.path.join(root, input_notebook), os.path.join(root, output_notebook), parameters=parameters, kernel_name=kernel_name) for cell in results.cells: if cell.cell_type is "code": assert not cell.metadata.papermill.exception, "Error in Python Notebook" finally: with open(os.path.join(root, output_notebook)) as json_file: data = json.load(json_file) jupyter_output = nbformat.reads(json.dumps(data), as_version=nbformat.NO_CONVERT) export_md(jupyter_output, output_notebook, add_nunit_attachment, file_ext=".txt", root=root) regex = r'Deployed (.*) with name (.*). Took (.*) seconds.' with open(os.path.join(root, output_notebook), 'r') as file: data = file.read() test_cases = [] for group in re.findall(regex, data): test_cases.append( TestCase(name=group[0] + " creation", classname=input_notebook, elapsed_sec=float(group[2]), status="Success")) ts = TestSuite("my test suite", test_cases) with open('test-timing-output.xml', 'w') as f: TestSuite.to_file(f, [ts], prettyprint=False)
def readTest(f, expectedFailures): cl = ".".join(f.split(".")[:-1]) name = f.split(".")[-2] with open(f) as fin: try: res = simplejson.load(fin) except simplejson.errors.JSONDecodeError: print("Error loading file %s" % f) raise expectFail = cl in expectedFailures if "killed" in res: tc1 = TestCase(name, cl, 0, '', '') tc2 = TestCase(name, cl, 0, '', '') if expectFail: tc1.add_skipped_info('Killed or crashed; expected failure') else: tc1.add_error_info('Killed or crashed') tc2.add_error_info('Killed or crashed') return (tc1, tc2, cl) tc1 = TestCase(name, cl, res["time"], res["messages"], '') tc2 = TestCase(name, cl, res["time"], res["messages"], '') success = res["success"] shouldPass = res["shouldPass"] if expectFail: if success: tc1.add_error_info( 'This testcase started working (failure was expected)') else: tc1.add_skipped_info('This testcase still fails (as expected)') elif not success: if shouldPass: tc1.add_error_info('failed') else: tc1.add_error_info('expected failure, but passed') if not success: if shouldPass: tc2.add_error_info('failed') else: tc2.add_error_info('expected failure, but passed') return (tc1, tc2, None if success else cl)
def test_init_classname_time(self): (ts, tcs) = serialize_and_read( TestSuite('test', [TestCase('Test1', 'some.class.name', 123.345)]))[0] verify_test_case( self, tcs[0], { 'name': 'Test1', 'classname': 'some.class.name', 'time': ("%f" % 123.345) })
def run_test_set_result(self): mstdout = [''] test_cases = TestCase('run_test_set_result', '', '', mstdout, '') result = self.set_result( 1561, 79, 1, 'set result by auto test.\r\n ' + "put details here") if result == None: test_cases.add_failure_info('run_test_set_result failure') mstdout.insert(-1, str(result)) return test_cases
def exception_test(test_str, exception): """Test should throw exception.""" try: suite.test_cases.append(TestCase("%s fails" % test_str)) eval(test_str) suite.test_cases[-1].add_failure_info('No exception') except exception as e: return # reach here on test failure suite.test_cases[-1].add_failure_info('Exception', str(e))
def run_tests(self): test_cases = [] should_update_env = True for test in self.tests: desc = test['desc'] name = test['name'] index = test['id'] test_case = TestCase(name, self.name) if '[.]' in desc: print('skipping test "{}"'.format(name)) test_case.add_skipped_info( message="Skipped test marked with [.]") else: test_output = StringIO() self.sp.logfile = test_output print('running test "{}"'.format(name)) if should_update_env: res = self.update_env(self.env_vars) if res != BSTestRunner.SUCCESS: print('failed to set environment variables') break should_update_env = False if name in self.mocks: debug_print('setting up mocks') self.mocks[name]['request_env'] = self.request_env self.mocks[name]['setup']() extra_env = mock_decorators.get_all_envs(name) if extra_env is not None: self.update_env(extra_env) t_start = time.time() result = self.run_test(index) if name in self.mocks: debug_print('tearing down mocks') try: self.mocks[name]['teardown']() except AssertionError: debug_print('teardown assert failure') result = BSTestRunner.FAIL t_stop = time.time() self.sp.logfile = None test_case.elapsed_sec = t_stop - t_start debug_print('test output was:') debug_print(test_output.getvalue()) if result == BSTestRunner.SUCCESS: test_case.stdout = filter(lambda c: ord(c) < 128, test_output.getvalue()) print('test "{}" passed'.format(name)) else: print('test "{}" failed'.format(name)) test_case.add_failure_info('Test failed', output=test_output.getvalue()) should_update_env = True test_output.close() test_cases += [test_case] return TestSuite(self.name, test_cases)
def main(): propFile= open( r"URL.properties", "rU" ) propDict= dict() for propLine in propFile: propDef= propLine.strip() if len(propDef) == 0: continue if propDef[0] in ( '!', '#' ): continue punctuation= [ propDef.find(c) for c in ':= ' ] + [ len(propDef) ] found= min( [ pos for pos in punctuation if pos != -1 ] ) name= propDef[:found].rstrip() value= propDef[found:].lstrip(":= ").rstrip() propDict[name]= value propFile.close() print(propDict) output= dict() for key, value in propDict.items(): cmd="curl -Is "+value+"| head -n 1" print(cmd) process=subprocess.Popen([cmd], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = process.communicate()[0] if stdout.decode('utf-8').rstrip()=='': output[key]="Unable to Connect" else: output[key]=stdout.decode("utf-8").rstrip() print(output) test_cases=[] for i, (key, value) in enumerate(output.items()): testname="Test" + str(i) if value != "HTTP/1.1 200": test_cases.append(TestCase(testname, str(key), 1, str(value), 'failure')) test_cases[i].add_failure_info('Unable to Connect') else: test_cases.append(TestCase(testname, str(key), 1, str(value),'')) ts = TestSuite("my test suite", test_cases) print(TestSuite.to_xml_string([ts])) with open('output.xml', 'w') as f: TestSuite.to_file(f, [ts], prettyprint=True)
def _build_test_case(self, task_data, host_data): """ build a TestCase from the given TaskData and HostData """ name = '[%s] %s: %s' % (host_data.name, task_data.play, task_data.name) duration = host_data.finish - task_data.start if self._task_class == 'true': junit_classname = re.sub('\.yml:[0-9]+$', '', task_data.path) else: junit_classname = task_data.path if host_data.status == 'included': return TestCase(name, junit_classname, duration, host_data.result) res = host_data.result._result rc = res.get('rc', 0) dump = self._dump_results(res, indent=0) dump = self._cleanse_string(dump) if host_data.status == 'ok': return TestCase(name, junit_classname, duration, dump) test_case = TestCase(name, junit_classname, duration) if host_data.status == 'failed': if 'exception' in res: message = res['exception'].strip().split('\n')[-1] output = res['exception'] test_case.add_error_info(message, output) elif 'msg' in res: message = res['msg'] test_case.add_failure_info(message, dump) else: test_case.add_failure_info('rc=%s' % rc, dump) elif host_data.status == 'skipped': if 'skip_reason' in res: message = res['skip_reason'] else: message = 'skipped' test_case.add_skipped_info(message) return test_case
def test_to_xml_string(self): test_suites = [ TestSuite(name='suite1', test_cases=[TestCase(name='Test1')]), TestSuite(name='suite2', test_cases=[TestCase(name='Test2')]) ] xml_string = TestSuite.to_xml_string(test_suites) if PY2: self.assertTrue(isinstance(xml_string, unicode)) expected_xml_string = textwrap.dedent(""" <?xml version="1.0" ?> <testsuites disabled="0" errors="0" failures="0" tests="2" time="0.0"> \t<testsuite disabled="0" errors="0" failures="0" name="suite1" skipped="0" tests="1" time="0"> \t\t<testcase name="Test1"/> \t</testsuite> \t<testsuite disabled="0" errors="0" failures="0" name="suite2" skipped="0" tests="1" time="0"> \t\t<testcase name="Test2"/> \t</testsuite> </testsuites> """.strip("\n")) # NOQA self.assertEqual(xml_string, expected_xml_string)
def build_test_cases(output): test_cases = [] for test_name, failure_message, exc_info in output: test_case = TestCase(test_name) if failure_message: test_case.add_failure_info(output=failure_message) elif exc_info: test_case.add_error_info( output="".join(traceback.format_exception(*exc_info))) test_cases.append(test_case) return test_cases
def junit(self) -> TestCase: """ Return JUnit XML test case. :returns: test case. """ tc = TestCase(self.id, stdout=self.msg, file=self.filename, line=self.row) return tc
def on_set_current_module(self, module, filepath): if self.current_module is not None and self.items[self.current_module].test_cases is not None: stdout_line = "All checks passed for: {0}".format(self.current_filepath) testcase_name = "{0}:0:0".format(self.current_module) testcase = TestCase(testcase_name, stdout=stdout_line, file=self.current_filepath, line=0) self.items[self.current_module].test_cases.append(testcase) self.current_module = module self.current_filepath = filepath if module not in self.items: self.items[module] = TestSuite(module)
def generate_junit(self): # Get the test count test_count = self.passes + self.fails # Add the first test case test_cases = [ TestCase(self.script_name + str(0), '', self.duration / test_count, '', '') ] # Add the remaining test cases for x in range(1, test_count): test_cases.append( TestCase(self.script_name + str(x), '', self.duration / test_count, '', '')) # Add any failure info for x in range(0, self.fails): test_cases[0].add_failure_info(self.test_description[x]) ts = TestSuite("test suite", test_cases) with open(join(self.log_dir, self.script_name + '_results.xml'), 'w') as f: TestSuite.to_file(f, [ts], prettyprint=False)
def create_xml_report(failures, dockerfile_path): """Make a full XML report file.""" test_case = TestCase( "Lint " + dockerfile_path, classname="dockerlint.main", ) for f in failures: test_case.add_failure_info(message=f.__str__()) ts = TestSuite("dockerlint", test_cases=[test_case]) return to_xml_report_string([ts])
def dump_junit_xml(): from junit_xml import TestSuite, TestCase test_cases = [ TestCase('testname', 'package.test', 123.345, 'I am stdout!', 'I am stderr!') ] ts = [TestSuite("stress test suite", test_cases)] with open('./testresults.xml', mode='w') as f: TestSuite.to_file(f, ts)
def to_junit_xml(self) -> Dict[str, Any]: test_case = TestCase( self.id, file=str(self.path), line=self.start["line"], classname=str(self.path), ) test_case.add_failure_info(message=self.message, output=self.lines, failure_type=self.severity) return test_case