Ejemplo n.º 1
0
    def print_matches(self, matches, rules=None, filenames=None):
        """Output all the matches"""

        if not rules:
            return None

        test_cases = []
        for rule in rules.all_rules:
            if not rules.is_rule_enabled(rule):
                if not rule.id:
                    continue
                test_case = TestCase(
                    name='{0} {1}'.format(rule.id, rule.shortdesc))

                if rule.experimental:
                    test_case.add_skipped_info(
                        message='Experimental rule - not enabled')
                else:
                    test_case.add_skipped_info(message='Ignored rule')
                test_cases.append(test_case)
            else:
                test_case = TestCase(name='{0} {1}'.format(
                    rule.id, rule.shortdesc),
                                     allow_multiple_subelements=True,
                                     url=rule.source_url)
                for match in matches:
                    if match.rule.id == rule.id:
                        test_case.add_failure_info(
                            message=self._failure_format(match),
                            failure_type=match.message)
                test_cases.append(test_case)

        test_suite = TestSuite('CloudFormation Lint', test_cases)

        return to_xml_report_string([test_suite], prettyprint=True)
Ejemplo n.º 2
0
 def test_init_error(self):
     tc = TestCase('Error-Message-and-Output')
     tc.add_error_info("error message", "I errored!")
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(
         self, tcs[0], {'name': 'Error-Message-and-Output'},
         error_message="error message", error_output="I errored!")
Ejemplo n.º 3
0
def test_attribute_disable():
    tc = Case("Disabled-Test")
    tc.is_enabled = False
    tss = [Suite("suite1", [tc])]
    suites = serialize_and_read(tss)

    assert suites[0][0].attributes["disabled"].value == "1"
Ejemplo n.º 4
0
 def test_init_failure_message(self):
     tc = TestCase('Failure-Message')
     tc.add_failure_info("failure message")
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(self,
                      tcs[0], {'name': 'Failure-Message'},
                      failure_message="failure message")
Ejemplo n.º 5
0
def test_init_unicode():
    tc = Case(
        name=decode("Test äöü", "utf-8"),
        classname=decode("some.class.name.äöü", "utf-8"),
        elapsed_sec=123.345,
        stdout=decode("I am stdöüt!", "utf-8"),
        stderr=decode("I am stdärr!", "utf-8"),
    )
    tc.add_skipped_info(message=decode("Skipped äöü", "utf-8"),
                        output=decode("I skippäd!", "utf-8"))
    tc.add_error_info(message=decode("Skipped error äöü", "utf-8"),
                      output=decode("I skippäd with an error!", "utf-8"))

    ts, tcs = serialize_and_read(Suite("Test Unicode", [tc]))[0]
    verify_test_case(
        tcs[0],
        {
            "name": decode("Test äöü", "utf-8"),
            "classname": decode("some.class.name.äöü", "utf-8"),
            "time": ("%f" % 123.345),
        },
        stdout=decode("I am stdöüt!", "utf-8"),
        stderr=decode("I am stdärr!", "utf-8"),
        skipped_message=decode("Skipped äöü", "utf-8"),
        skipped_output=decode("I skippäd!", "utf-8"),
        error_message=decode("Skipped error äöü", "utf-8"),
        error_output=decode("I skippäd with an error!", "utf-8"),
    )
Ejemplo n.º 6
0
    def run_test_configs(self, project_name):
        mstdout = ['']
        test_cases = TestCase('run_test_configs', '', '', mstdout, '')
        mproj = self.get_project_by_name(project_name)
        configs = self.get_all_configs(mproj['id'])
        for config in configs:
            inconfig = config
            logging.info("-----------------------------")
            logging.info(inconfig)
            logging.info(inconfig['name'])
            mstdout.insert(-1, str(inconfig['name']))
            logging.info(inconfig['id'])
            mstdout.insert(-1, str(inconfig['id']))
            for item in inconfig['configs']:
                mitems = self.toutf8(item)
                logging.info(".." + mitems['id'])
                mstdout.insert(-1, mitems['id'])
                logging.info(".." + mitems['name'])
                mstdout.insert(-1, mitems['name'])
            frdmk64f_id = self.get_config_item_id_by_name(
                mproj['id'], inconfig['name'], "frdm_k64f")
            frdmk64_name = self.get_config_item_name_by_id(
                mproj['id'], inconfig['id'], frdmk64f_id)
            logging.info("frdmk64f id is %s" % (frdmk64f_id))
            mstdout.insert(-1, "frdmk64f id is %s" % (frdmk64f_id))
            if frdmk64f_id == None or frdmk64_name == None:
                test_cases.add_failure_info(
                    'get_config_item_id_by_name failure')

        return test_cases
Ejemplo n.º 7
0
def test_init_skipped():
    tc = Case("Skipped-Message-and-Output")
    tc.add_skipped_info("skipped message", "I skipped!")
    ts, tcs = serialize_and_read(Suite("test", [tc]))[0]
    verify_test_case(tcs[0], {"name": "Skipped-Message-and-Output"},
                     skipped_message="skipped message",
                     skipped_output="I skipped!")
Ejemplo n.º 8
0
def generate_junit_report(test_name, total_thresholds, report_name):
    test_cases = []
    file_name = f"junit_report_{report_name}.xml"
    logger.info(f"Generate report {file_name}")

    for item in total_thresholds["details"]:
        message = item['message']
        test_case = TestCase(
            item['name'],
            classname=f"{item['scope']}",
            status="PASSED",
            stdout=
            f"{item['scope']} {item['name'].lower()} {item['aggregation']} {item['actual']} "
            f"{item['rule']} {item['expected']}")
        if message:
            test_case.status = "FAILED"
            test_case.add_failure_info(message)
        test_cases.append(test_case)

    ts = TestSuite(test_name, test_cases)
    os.makedirs(f"{REPORT_PATH}/junit", exist_ok=True)
    with open(f"{REPORT_PATH}/junit/{file_name}", 'w') as f:
        TestSuite.to_file(f, [ts], prettyprint=True)

    return file_name
Ejemplo n.º 9
0
 def test_init_error_message(self):
     tc = TestCase('Error-Message')
     tc.add_error_info("error message")
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(self,
                      tcs[0], {'name': 'Error-Message'},
                      error_message="error message")
Ejemplo n.º 10
0
    def test_attribute_disable(self):
        tc = TestCase('Disabled-Test')
        tc.is_enabled = False
        tss = [TestSuite('suite1', [tc])]
        suites = serialize_and_read(tss)

        self.assertEqual('1', suites[0][0].attributes['disabled'].value)
Ejemplo n.º 11
0
 def test_init_error_output(self):
     tc = TestCase('Error-Output')
     tc.add_error_info(output="I errored!")
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(self,
                      tcs[0], {'name': 'Error-Output'},
                      error_output="I errored!")
Ejemplo n.º 12
0
    def exporter_junit_ioper(self, test_result_ext, test_suite_properties=None):
        from junit_xml import TestSuite, TestCase

        test_suites = []
        test_cases = []

        for platform in sorted(test_result_ext.keys()):
            # {platform : ['Platform', 'Result', 'Scope', 'Description'])
            test_cases = []
            for tr_result in test_result_ext[platform]:
                result, name, scope, description = tr_result

                classname = "test.ioper.%s.%s.%s" % (platform, name, scope)
                elapsed_sec = 0
                _stdout = description
                _stderr = ""
                # Test case
                tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr)
                # Test case extra failure / error info
                if result == "FAIL":
                    tc.add_failure_info(description, _stdout)
                elif result == "ERROR":
                    tc.add_error_info(description, _stdout)

                test_cases.append(tc)
            ts = TestSuite("test.suite.ioper.%s" % (platform), test_cases)
            test_suites.append(ts)
        return TestSuite.to_xml_string(test_suites)
Ejemplo n.º 13
0
 def Reg_Create_Snow_order(self):
     test_case_result = TestCase(name="Reg_Create_Snow_order", classname=self.__class__.__name__)
     global serviceInstanceName
     provider = "snow"
     passed = False
     try:
         for i in range(1, 2):
             print "Placing order: " + str(i + 1)
             epoch_time = str(int(time.time()))
             serviceInstanceName = "testAPISnow" + epoch_time
             print "serviceInstanceName : " + str(serviceInstanceName)
             # To avoid 429 HTTP server error, we have to add a sleep between http requests
             time.sleep(4)
             print 111
             orderNumber, passed = self.api_client.createOrder(orderURL, serviceInstanceName, provider, i + 1)
             print 2222
             print orderNumber
             print 3333
             print passed
             time.sleep(4)
             if passed:
                 print "Approving order: " + str(orderNumber)
                 print 444
                 passed = self.api_client.approveOrder(orderNumber)
                 print 555
             if not passed:
                 responseBody = "Failure to create order"
                 print "Softlayer order creation failed. Approve will be skipped"
                 test_case_result.add_failure_info("Input " + str(i + 1) + " failed", responseBody)
                 break
     except:
         print "An Error Occured"
         passed=False
     status['APITest'] = passed
     return passed, test_case_result
Ejemplo n.º 14
0
 def log_failed(test_name, actual, expected):
     if xml_out:
         tc = TestCase(test_name, executable, 0, actual, "")
         tc.add_failure_info(message="Test Failed\nExpected:\n%s\nActual:\n%s" % (expected, actual))
         test_cases.append(tc)
     else:
         print "Test %s failed:\nexpected:\n%s\nactual:\n%s\n---------\n" % (test_name, expected, actual)
Ejemplo n.º 15
0
def main():
  parser = argparse.ArgumentParser()
  parser.add_argument('swift', help='path to swift executable')
  parser.add_argument('output', help='where to write xUnit output')
  args = parser.parse_args()

  test_cases = [
      benchmark(
          TestCase('debug build'),
          [args.swift, 'build', '--product', 'TensorFlow']
      ),
      benchmark(
          TestCase('release build'),
          [args.swift, 'build', '-c', 'release', '--product', 'TensorFlow']
      ),

      # The point of "release build -Onone" is to compile TensorFlow in
      # "-whole-module-optimization" mode without "-O".
      benchmark(
          TestCase('release build -Onone'),
          [args.swift, 'build', '-c', 'release', '--product', 'TensorFlow',
           '-Xswiftc', '-Onone']
      ),
  ]

  test_suite = TestSuite('swift-apis compile time', test_cases)

  with open(args.output, 'w') as f:
    TestSuite.to_file(f, [test_suite])
Ejemplo n.º 16
0
 def log_errord(test_name, stderr, stdout):
     if xml_out:
         tc = TestCase(test_name, executable, 0, stdout, stderr)
         tc.add_error_info(message="Test Error\n", output=stdout)
         test_cases.append(tc)
     else:
         print "Test %s failed:\nstderr:\n %s\nstdout:\n%s\n----------\n" % (test_name, stderr, stdout)
Ejemplo n.º 17
0
def write_junitxml(output_junitxml, results):
    """Write output file as JUnitXML format"""
    if not JUNIT_XML_FOUND:
        log = logging.getLogger(__name__ + ".write_junitxml")
        log.warning('junitxml output disabled: the `junit_xml` python module '
                    'is missing.')
        return
    test_cases = []
    duration_re = re.compile('([0-9]+):([0-9]+):([0-9]+).([0-9]+)')
    for vitem in results:
        if vitem.get('Validations'):
            parsed_duration = 0
            test_duration = vitem.get('Duration', '')
            matched_duration = duration_re.match(test_duration)
            if matched_duration:
                parsed_duration = (int(matched_duration[1])*3600
                                   + int(matched_duration[2])*60
                                   + int(matched_duration[3])
                                   + float('0.{}'.format(matched_duration[4])))

            test_stdout = vitem.get('Status_by_Host', '')

            test_case = TestCase('validations', vitem['Validations'],
                                 parsed_duration, test_stdout)
            if vitem['Status'] == 'FAILED':
                test_case.add_failure_info('FAILED')
            test_cases.append(test_case)

    ts = TestSuite("Validations", test_cases)
    with open(output_junitxml, 'w') as output:
        output.write(to_xml_report_string([ts]))
Ejemplo n.º 18
0
 def write_testcase(self, build, machine, test, path, passed, start_ts,
                    end_ts):
     delta = end_ts - start_ts
     tc = {
         'name': test.name,
         'class': machine.getName(),
         'time_elapsed': delta.total_seconds(),
         'stdout': '\n'.join(self._harness.process_output(test, path)),
         'stderr': "",
         'passed': passed
     }
     if have_junit_xml:
         ju_tc = TestCase(
             tc['name'],
             tc['class'],
             tc['time_elapsed'],
             tc['stdout'],
         )
         if not passed:
             errors = self._harness.extract_errors(test, path)
             errorstr = 'Failed'
             if errors is not None and len(errors) > 0:
                 errorstr += ': ' + ''.join(
                     [unicode(l, errors='replace') for l in errors])
             ju_tc.add_failure_info(message=errorstr)
         return ju_tc
     else:
         return tc
Ejemplo n.º 19
0
def test_multiple_failures():
    """Tests multiple failures in one test case"""
    tc = Case("Multiple failures", allow_multiple_subelements=True)
    tc.add_failure_info("First failure", "First failure message")
    (_, tcs) = serialize_and_read(Suite("test", [tc]))[0]
    verify_test_case(
        tcs[0],
        {"name": "Multiple failures"},
        failures=[{
            "message": "First failure",
            "output": "First failure message",
            "type": "failure"
        }],
    )
    tc.add_failure_info("Second failure", "Second failure message")
    (_, tcs) = serialize_and_read(Suite("test", [tc]))[0]
    verify_test_case(
        tcs[0],
        {"name": "Multiple failures"},
        failures=[
            {
                "message": "First failure",
                "output": "First failure message",
                "type": "failure"
            },
            {
                "message": "Second failure",
                "output": "Second failure message",
                "type": "failure"
            },
        ],
    )
Ejemplo n.º 20
0
 def generateJUnitReport(self, lstRunResult, runResultDir):
     #create junit xml report file use junit-xml 1.4   pip install junit-xml
     resultFileName = runResultDir + os.path.sep + 'RunResult.xml'
     previousCaseModuleName = ''
     rowIndex = 0
     lstTestSuites = []
     testSuite = []
     for runResult in lstRunResult:
         #runResult (sheetName, moduleName, testCaseID, runResult, timeElapsedSec, failureMessage)
         #test
         testCaseName = runResult[2]
         className = runResult[1] + '.' + runResult[2]
         timeElapsedSec = runResult[4]
         failureMessage = runResult[5]
         testCase = TestCase(testCaseName, className, timeElapsedSec)
         testCase.add_failure_info(None, failureMessage)
         currTestCaseModuleName = runResult[1]
         if not currTestCaseModuleName == previousCaseModuleName:
             testSuite = TestSuite(currTestCaseModuleName)
             lstTestSuites.append(testSuite)
         testSuite.test_cases.append(testCase)
     #print TestSuite.to_xml_string(lstTestSuites)
     #Write the xml content to result file
     with open(runResultDir + os.path.sep + 'Result.xml', 'w') as f:
         TestSuite.to_file(f, lstTestSuites)
Ejemplo n.º 21
0
    def process_report(requests, thresholds):
        functional_test_cases, threshold_test_cases = [], []
        test_suites = []
        for req in requests:
            if requests[req]['KO'] != 0:
                functional_test_cases.append(TestCase(name=requests[req]['request_name'],
                                                      stdout="PASSED: {}. FAILED: {}".format(str(requests[req]['OK']),
                                                                                             str(requests[req]['KO'])),
                                                      stderr="FAILED: {}".format(str(requests[req]['KO']))))
                functional_test_cases[-1].add_failure_info("Request failed {} times".format(str(requests[req]['KO'])))
            else:
                functional_test_cases.append(
                    TestCase(name=requests[req]['request_name'], stdout="PASSED: {}".format(str(requests[req]['OK'])),
                             stderr="FAILED: {}".format(str(requests[req]['KO']))))

        test_suites.append(TestSuite("Functional errors ", functional_test_cases))

        for th in thresholds:
            threshold_test_cases.append(TestCase(name="Threshold for {}, target - {}".format(th['scope'], th['target']),
                                                 stdout="Value: {} {}. Threshold value: {} {}".format(str(th['value']),
                                                                                                th['metric'],
                                                                                                str(th['threshold']),
                                                                                                th['metric'])))
            if th['status'] == 'FAILED':
                threshold_test_cases[-1].add_failure_info("{} for {} exceeded threshold of {} {}. Test result - {} {}"
                                                          .format(th['target'], th['scope'], str(th['threshold']),
                                                                  th['metric'], str(th['value']), th['metric']))

        test_suites.append(TestSuite("Thresholds ", threshold_test_cases))
        with open("/tmp/reports/jmeter.xml", 'w') as f:
            TestSuite.to_file(f, test_suites, prettyprint=True)
Ejemplo n.º 22
0
def exporter_junit(test_result_ext, test_suite_properties=None):
    """! Export test results in JUnit XML compliant format
    @details This function will import junit_xml library to perform report conversion
    @return String containing Junit XML formatted test result output
    """
    from junit_xml import TestSuite, TestCase

    test_suites = []
    test_cases = []

    targets = sorted(test_result_ext.keys())
    for target in targets:
        test_cases = []
        tests = sorted(test_result_ext[target].keys())
        for test in tests:
            test_results = test_result_ext[target][test]
            classname = 'test.%s.%s' % (target, test)
            elapsed_sec = test_results['elapsed_time']
            _stdout = test_results['single_test_output']
            _stderr = ''
            # Test case
            tc = TestCase(test, classname, elapsed_sec, _stdout, _stderr)
            # Test case extra failure / error info
            if test_results['single_test_result'] == 'FAIL':
                message = test_results['single_test_result']
                tc.add_failure_info(message, _stdout)
            elif test_results['single_test_result'] != 'OK':
                message = test_results['single_test_result']
                tc.add_error_info(message, _stdout)

            test_cases.append(tc)
        ts = TestSuite("test.suite.%s" % target, test_cases)
        test_suites.append(ts)
    return TestSuite.to_xml_string(test_suites)
Ejemplo n.º 23
0
class JunitReporter:

    REPORTS_DIRECTORY = "reports"

    def __init__(self):
        self._dummy_test_case = None

    def activate(self):
        subscribe_event_handlers(self)

    def on_suite_erred(self, suite_name, exception=None, **kwargs):
        self._dummy_test_case = TestCase(name=suite_name, status="error")
        if exception:
            self._dummy_test_case.add_error_info(
                message=exception_str(exception),
                output=format_exception(exception))

    def on_suite_results_compiled(self, suite_results, **kwargs):
        suite_name = suite_results.suite_name or "NamelessSuite"
        test_cases = convert_tests(suite_results.tests)
        if self._dummy_test_case:
            test_cases.append(self._dummy_test_case)
        suite = dependency(TestSuite)(name=infer_package_name() + suite_name,
                                      timestamp=current_time().isoformat(),
                                      test_cases=test_cases)
        xml_report = ElementTree.tostring(
            suite.build_xml_doc(), encoding="utf-8").decode(encoding="utf-8")
        EventBroker.publish(
            event=TestEvent.report_created,
            suite=suite,
            cases=test_cases,
            report_filename=suite_name + ".xml",
            report_content=xml_report,
        )
Ejemplo n.º 24
0
 def test_init_failure(self):
     tc = TestCase('Failure-Message-and-Output')
     tc.add_failure_info("failure message", "I failed!")
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(
         self, tcs[0], {'name': 'Failure-Message-and-Output'},
         failure_message="failure message", failure_output="I failed!")
Ejemplo n.º 25
0
def _gen_cases(n_passes, n_fails, n_skips, n_errors):
    result = []
    for i in range(n_passes):
        case = TestCase(name='TestPassed%s' % i,
                        classname='generated.xml.test.case.passes',
                        elapsed_sec=rand_duration())
        result.append(case)

    for i in range(n_skips):
        case = TestCase(name='TestSkipped%s' % i,
                        classname='generated.xml.test.case.skips',
                        elapsed_sec=rand_duration())
        case.add_skipped_info(message=rand_string('skipped!'))
        result.append(case)

    for i in range(n_fails):
        case = TestCase(name='TestFailed%s' % i,
                        classname='generated.xml.test.case.fails',
                        elapsed_sec=rand_duration())
        case.add_failure_info(message=rand_string('failure!'))
        result.append(case)

    for i in range(n_errors):
        case = TestCase(name='TestErrored%s' % i,
                        classname='generated.xml.test.case.errors',
                        elapsed_sec=rand_duration())
        case.add_error_info(message=rand_string('error!'))
        result.append(case)

    return result
Ejemplo n.º 26
0
    def get_log_entry_case(cls, entry: LogEntry,
                           fail_cases: Dict[str,
                                            List[TestCase]], suite_name: str,
                           failure_message: str) -> List[TestCase]:
        fail_case: List[TestCase] = list()

        if cls._is_duplicate_entry(entry, failure_message, fail_cases):
            return []

        test_case = TestCase(name=entry.func,
                             classname=suite_name,
                             category=suite_name,
                             timestamp=entry.time)
        test_case.failures.append(
            CaseFailure(message=failure_message,
                        output=failure_message,
                        type=entry.level))
        fail_case.append(test_case)

        if entry.level != "fatal":
            # Add test case with the same name so it will be marked in PROW as flaky
            flaky_test_case = TestCase(name=entry.func,
                                       classname=suite_name,
                                       category=suite_name)
            fail_case.append(flaky_test_case)

        return fail_case
Ejemplo n.º 27
0
    def test_attribute_disable(self):
        tc = TestCase('Disabled-Test')
        tc.is_enabled = False
        tss = [TestSuite('suite1', [tc])]
        suites = serialize_and_read(tss)

        self.assertEqual('1', suites[0][0].attributes['disabled'].value)
Ejemplo n.º 28
0
def generate_junit_xml(inputfile):
    target = None
    suite = None
    infos = []
    errors = []
    testcases = []

    for line in inputfile:
        tag = line[0:3]
        props = line[3:].split(':')
        if tag == "[!]":
            if len(props) == 2:
                if props[0].strip().lower() == "target":
                    target = os.path.basename(props[1].strip())
                elif props[0].strip().lower() == "group":
                    suite = props[1].strip()
                else:
                    infos.append(line)
            else:
                infos.append(line)
        if tag == "[x]":
            errors.append(line)
        if tag == "[+]":
            testcases.append(TestCase(name=props[0].strip(), classname=target, stdout=line))
        if tag == "[-]":
            tc = TestCase(name=props[0].strip(), classname=target)
            tc.add_failure_info(message=props[1].strip(), output=line, failure_type="failed")
            testcases.append(tc)

    ts = TestSuite(name=suite, test_cases=testcases, stdout="\n".join(infos), stderr="\n".join(errors))
    return TestSuite.to_xml_string([ts])
Ejemplo n.º 29
0
def test_init_utf8():
    tc = Case(
        name="Test äöü",
        classname="some.class.name.äöü",
        elapsed_sec=123.345,
        stdout="I am stdöüt!",
        stderr="I am stdärr!",
    )
    tc.add_skipped_info(message="Skipped äöü", output="I skippäd!")
    tc.add_error_info(message="Skipped error äöü",
                      output="I skippäd with an error!")
    test_suite = Suite("Test UTF-8", [tc])
    #pylint: disable=unused-variable
    ts, tcs = serialize_and_read(test_suite, encoding="utf-8")[0]
    verify_test_case(
        tcs[0],
        {
            "name": decode("Test äöü", "utf-8"),
            "classname": decode("some.class.name.äöü", "utf-8"),
            "time": ("%f" % 123.345),
        },
        stdout=decode("I am stdöüt!", "utf-8"),
        stderr=decode("I am stdärr!", "utf-8"),
        skipped_message=decode("Skipped äöü", "utf-8"),
        skipped_output=decode("I skippäd!", "utf-8"),
        error_message=decode("Skipped error äöü", "utf-8"),
        error_output=decode("I skippäd with an error!", "utf-8"),
    )
Ejemplo n.º 30
0
def pytest_runtest_makereport(item, call):

    # print('runtest makerepot')
    outcome = yield
    rep = outcome.get_result()
    testScript = rep.fspath
    scriptName = os.path.basename(testScript)

    testname = ""
    try:
        testname = rep.item['name']
    except:
        testname = rep.nodeid

    if rep.when == 'call':
        scenario = rep.scenario
        steps = scenario["steps"]
        durInSeconds = rep.duration
        # for step in steps:
        #     print("********" + step["name"] + ": " + str(step["failed"]))

        stepsSummary = generateTestStepsSummary(steps)
        # print("********name " + testname + " : " + rep.outcome )
        # logging.getLogger().info('new Test CAse ' + testname)
        testCase = TestCase(testname, '', durInSeconds, stepsSummary)
        # logging.getLogger().info('Testname is ' + testname)

        if rep.failed:
            testCase.add_failure_info(rep.longreprtext)

        testCaseDictList[item.fspath.strpath].append(testCase)
Ejemplo n.º 31
0
 def test_init_skipped(self):
     tc = TestCase('Skipped-Message-and-Output')
     tc.add_skipped_info("skipped message", "I skipped!")
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(
         self, tcs[0], {'name': 'Skipped-Message-and-Output'},
         skipped_message="skipped message", skipped_output="I skipped!")
Ejemplo n.º 32
0
 def test_init_skipped_output(self):
     tc = TestCase('Skipped-Output')
     tc.add_skipped_info(output="I skipped!")
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(self,
                      tcs[0], {'name': 'Skipped-Output'},
                      skipped_output="I skipped!")
Ejemplo n.º 33
0
def test_init_illegal_unicode_char():
    tc = Case("Failure-Message")
    tc.add_failure_info(u("failure message with illegal unicode char: [\x02]"))
    ts, tcs = serialize_and_read(Suite("test", [tc]))[0]
    verify_test_case(
        tcs[0], {"name": "Failure-Message"},
        failure_message=u("failure message with illegal unicode char: []"))
Ejemplo n.º 34
0
 def test_init_skipped_message(self):
     tc = TestCase('Skipped-Message')
     tc.add_skipped_info("skipped message")
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(self,
                      tcs[0], {'name': 'Skipped-Message'},
                      skipped_message="skipped message")
Ejemplo n.º 35
0
def test_multiple_errors():
    """Tests multiple errors in one test case"""
    tc = Case("Multiple error", allow_multiple_subelements=True)
    tc.add_error_info("First error", "First error message")
    (_, tcs) = serialize_and_read(Suite("test", [tc]))[0]
    verify_test_case(
        tcs[0],
        {"name": "Multiple error"},
        errors=[{
            "message": "First error",
            "output": "First error message",
            "type": "error"
        }],
    )
    tc.add_error_info("Second error", "Second error message")
    (_, tcs) = serialize_and_read(Suite("test", [tc]))[0]
    verify_test_case(
        tcs[0],
        {"name": "Multiple error"},
        errors=[
            {
                "message": "First error",
                "output": "First error message",
                "type": "error"
            },
            {
                "message": "Second error",
                "output": "Second error message",
                "type": "error"
            },
        ],
    )
Ejemplo n.º 36
0
    def _collect_test_suite(self, scenario_result):
        if isinstance(scenario_result, GeneralError):
            test_case = TestCase("", "")
            test_case.add_error_info(scenario_result.message)
            test_suite = TestSuite("", "")
            test_suite.test_cases.append(test_case)
            return test_suite

        test_suite = TestSuite(scenario_result.name)
        for test_result in scenario_result.test_results:
            test_case = TestCase(test_result.name, test_result.name)
            for result in test_result.results:
                if isinstance(result, Failed):
                    test_case.add_failure_info("ASSERTION {} failed".format(result.pretty_name),
                                               "EXPECTED {}\nGOT {}".format(result.expected,
                                                                            result.actual))
                elif isinstance(result, (Error, ConnectionError)):
                    test_case.add_error_info("ASSERTION {} failed".format(result.pretty_name),
                                             "ERROR {}".format(result.error))
                elif isinstance(result, Passed):
                    pass
                # TODO: What to do below?
                else:
                    raise Exception("Unknown state")
            test_suite.test_cases.append(test_case)
        return test_suite
Ejemplo n.º 37
0
def test_multiple_skipped():
    """Tests multiple skipped messages in one test case"""
    tc = Case("Multiple skipped", allow_multiple_subelements=True)
    tc.add_skipped_info("First skipped", "First skipped message")
    (_, tcs) = serialize_and_read(Suite("test", [tc]))[0]
    verify_test_case(tcs[0], {"name": "Multiple skipped"},
                     skipped=[{
                         "message": "First skipped",
                         "output": "First skipped message"
                     }])
    tc.add_skipped_info("Second skipped", "Second skipped message")
    (_, tcs) = serialize_and_read(Suite("test", [tc]))[0]
    verify_test_case(
        tcs[0],
        {"name": "Multiple skipped"},
        skipped=[
            {
                "message": "First skipped",
                "output": "First skipped message"
            },
            {
                "message": "Second skipped",
                "output": "Second skipped message"
            },
        ],
    )
Ejemplo n.º 38
0
 def test_init_failure_output(self):
     tc = TestCase('Failure-Output')
     tc.add_failure_info(output="I failed!")
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(self,
                      tcs[0], {'name': 'Failure-Output'},
                      failure_output="I failed!")
Ejemplo n.º 39
0
def run_everything_else(xml = False):
    mega_suite = []
    tests = [
        run_test_arakoon_changes,
        run_tests_cli,
        run_test_big_object
    ]
    for x in tests:
        r = x ()
        mega_suite.append(r)

    if is_true(xml):
        from junit_xml import TestSuite, TestCase
        test_cases = []
        for (suite, results) in mega_suite:
            for (name,result, delta) in results:
                test_case = TestCase(name, suite, elapsed_sec = delta)
                if not result:
                    test_case.add_error_info(message = "failed")
                test_cases.append(test_case)

        ts = [TestSuite("run_everything_else", test_cases)]
        with open('./testresults.xml', 'w') as f:
            TestSuite.to_file(f,ts)
    else:
        print mega_suite
Ejemplo n.º 40
0
def generate_junit_report_from_cfn_nag(report):

    total_failures = 0
    """Generate Test Case from cfn_nag report"""

    test_cases = []

    for file_findings in report:
        for violation in file_findings["file_results"]['violations']:
            total_failures += 1
            for i, resource_id in enumerate(violation['logical_resource_ids']):

                test_case = TestCase("%s - %s" %
                                     (violation['id'], violation['message']),
                                     classname=resource_id)

                test_case.add_failure_info(
                    output="%s#L%s" %
                    (file_findings['filename'], violation['line_numbers'][i]))

                test_cases.append(test_case)

    test_suite = TestSuite("cfn-nag test suite", test_cases)

    if total_failures > 0:
        f = open("CFN_NAG_FAILURE", "a")
        f.close()

    return TestSuite.to_xml_string([test_suite], prettyprint=False)
Ejemplo n.º 41
0
def test_init_skipped_output():
    tc = Case("Skipped-Output")
    tc.add_skipped_info(output="I skipped!")
    #pylint: disable=unused-variable
    ts, tcs = serialize_and_read(Suite("test", [tc]))[0]
    verify_test_case(tcs[0], {"name": "Skipped-Output"},
                     skipped_output="I skipped!")
Ejemplo n.º 42
0
    def test(self):
        def Trim(string):
          if len(string) > 4096:
            return string[:4096] + " ..."
          else:
            return string
    
        varsdict = {}
        self.log("Assigning variables:")
        for var in self.variables:
            tmpdict  = {}
            try:
              var.run(tmpdict)
            except:
              self.log("failure.")
              self.pass_status.append('F')
              return self.pass_status

            varsdict[var.name] = tmpdict[var.name]
            self.log("Assigning %s = %s" % (str(var.name), Trim(str(varsdict[var.name]))))

        if len(self.pass_tests) != 0:
            self.log("Running failure tests: ")
            for test in self.pass_tests:
                self.log("Running %s:" % test.name)
                status = test.run(varsdict)
                tc=TestCase(test.name,
                            '%s.%s'%(self.length,
                                     self.filename[:-4]))
                if status == True:
                    self.log("success.")
                    self.pass_status.append('P')
                elif status == False:
                    self.log("failure.")
                    self.pass_status.append('F')
                    tc.add_failure_info(  "Failure" )
                else:
                    self.log("failure (info == %s)." % status)
                    self.pass_status.append('F')
                    tc.add_failure_info(  "Failure", status )
                self.xml_reports.append(tc)

        if len(self.warn_tests) != 0:
            self.log("Running warning tests: ")
            for test in self.warn_tests:
                self.log("Running %s:" % test.name)
                status = test.run(varsdict)
                if status == True:
                    self.log("success.")
                    self.warn_status.append('P')
                elif status == False:
                    self.log("warning.")
                    self.warn_status.append('W')
                else:
                    self.log("warning (info == %s)." % status)
                    self.warn_status.append('W')

        self.log(''.join(self.pass_status + self.warn_status))
        return self.pass_status + self.warn_status
Ejemplo n.º 43
0
 def test_init_illegal_unicode_char(self):
     tc = TestCase('Failure-Message')
     tc.add_failure_info(
         u("failure message with illegal unicode char: [\x02]"))
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(
         self, tcs[0], {'name': 'Failure-Message'}, failure_message=u(
             "failure message with illegal unicode char: []"))
Ejemplo n.º 44
0
    def execute(self, log, keyvals, testDef):
        testDef.logger.verbose_print("JunitXML Reporter")
        # pickup the options
        cmds = {}
        testDef.parseOptions(log, self.options, keyvals, cmds)
        if cmds['filename'] is not None:
            self.fh = open(cmds['filename'] if os.path.isabs(cmds['filename']) \
                           else os.path.join(cmds['scratch'],cmds['filename']), 'w')
        if testDef.options['description'] is not None:
            print(testDef.options['description'], file=self.fh)
            print(file=self.fh)
       
        # Use the Junit classname field to store the list of inifiles
        try:
            classname = testDef.log['inifiles']
        except KeyError:
            classname = None
        # get the entire log of results
        fullLog = testDef.logger.getLog(None)
        testCases = []
        # TODO: ain't nobody got time for that.  8-).
        time = 0
        for lg in fullLog:
            if 'stdout' in lg and lg['stdout'] is not None:
                stdout = "\n".join(lg['stdout'])
            else:
                stdout = None
            if 'stderr' in lg and lg['stderr'] is not None:
                stderr = "\n".join(lg['stderr'])
            else:
                stderr = None
            if 'time' in lg and lg['time'] is not None:
                time = lg['time']
            else:
                time = 0
            tc = TestCase(lg['section'], classname, time, stdout, stderr)
            try:
                if 0 != lg['status']:
                    # Find sections prefixed with 'TestRun'
                    if re.match("TestRun", lg['section']):
                        tc.add_failure_info("Test reported failure")
                    else:
                        tc.add_error_info("Test error")
            except KeyError:
                sys.exit(lg['section'] + " is missing status!")
            testCases.append(tc)

        # TODO:  Pull in the resource manager jobid.
        jobid = "job1"
        ts = TestSuite(jobid, testCases)
        print(TestSuite.to_xml_string([ts]), file=self.fh)

        if cmds['filename'] is not None:
            self.fh.close()
        log['status'] = 0
        return
Ejemplo n.º 45
0
 def test_init_skipped_err_output(self):
     tc = TestCase('Skipped-Output')
     tc.add_skipped_info(output="I skipped!")
     tc.add_error_info(output="I skipped with an error!")
     (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
     verify_test_case(
         self, tcs[0],
         {'name': 'Skipped-Output'},
         skipped_output="I skipped!",
         error_output="I skipped with an error!")
Ejemplo n.º 46
0
    def simple_report(self):
        """empty test report"""
        test_case_1 = TestCase("testcase1", elapsed_sec=1.5)

        test_case_2 = TestCase("testcase2", elapsed_sec=0.5)
        test_case_2.add_skipped_info("was skipped")

        test_case_3 = TestCase("testcase3", elapsed_sec=1.0)
        test_case_3.add_failure_info("failed")

        test_case_4 = TestCase("testcase4", elapsed_sec=0.25)
        test_case_4.add_error_info("errored")

        test_case_5 = TestCase("testcase5", elapsed_sec=0.1)

        test_cases = [
            test_case_1,
            test_case_2,
            test_case_3,
            test_case_4,
            test_case_5
        ]
        test_suites = [
            TestSuite('testsuite1', test_cases, timestamp=datetime.datetime.utcnow())
            ]
        return TestReport(NAME, {"module": test_suites}, BUILD_NUMBER, True)
Ejemplo n.º 47
0
def generate_generic_test_case(name, classname, time, message, result):
    default_pass_message = 'Test passed, check build log for additional details'
    default_skip_message = 'Test skipped, check build log for additional details'
    tc = TestCase(name, classname, time,
                  default_pass_message if result == 'pass' else '')
    if result == 'fail':
        tc.add_failure_info(message=message)
    if result == 'skip':
        tc.add_skipped_info(message=default_skip_message)

    return tc
Ejemplo n.º 48
0
    def take_action(self, args):
        test_cases = []
        if args.playbook is not None:
            playbooks = args.playbook
            results = (models.TaskResult().query
                       .join(models.Task)
                       .filter(models.TaskResult.task_id == models.Task.id)
                       .filter(models.Task.playbook_id.in_(playbooks)))
        else:
            results = models.TaskResult().query.all()

        for result in results:
            task_name = result.task.name
            if not task_name:
                task_name = result.task.action
            additional_results = {
                'host': result.host.name,
                'playbook_path': result.task.playbook.path
            }
            result_str = jsonutils.dumps(additional_results)
            test_path = \
                u'{playbook_file}.{play_name}'.format(
                    playbook_file=os.path.basename(result.task.playbook.path),
                    play_name=result.task.play.name)
            test_case = TestCase(
                name=task_name,
                classname=test_path,
                elapsed_sec=result.duration.seconds,
                stdout=result_str)
            if result.status == 'skipped':
                test_case.add_skipped_info(message=result.result)
            elif ((result.status in ('failed', 'unreachable') and
                    result.ignore_errors is False and
                    'EXPECTED FAILURE' not in task_name and
                    'TOGGLE RESULT' not in task_name) or
                    (result.status == 'ok' and 'TOGGLE RESULT' in task_name)):
                test_case.add_failure_info(message=result.result)
            test_cases.append(test_case)
        test_suite = TestSuite('Ansible Tasks', test_cases)

        # TODO: junit_xml doesn't order the TestCase parameters.
        # This makes it so the order of the parameters for the same exact
        # TestCase is not guaranteed to be the same and thus results in a
        # different stdout (or file). This is easily reproducible on Py3.
        xml_string = six.text_type(test_suite.to_xml_string([test_suite]))
        if args.output_file == '-':
            if six.PY2:
                sys.stdout.write(encodeutils.safe_encode(xml_string))
            else:
                sys.stdout.buffer.write(encodeutils.safe_encode(xml_string))
        else:
            with open(args.output_file, 'wb') as f:
                f.write(encodeutils.safe_encode(xml_string))
Ejemplo n.º 49
0
    def test_init_failure_type(self):
        tc = TestCase('Failure-Type')
        tc.add_failure_info(failure_type='com.example.Error')
        (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
        verify_test_case(self, tcs[0], {'name': 'Failure-Type'})

        tc.add_failure_info("failure message")
        (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
        verify_test_case(
            self, tcs[0], {'name': 'Failure-Type'},
            failure_message="failure message",
            failure_type='com.example.Error')
Ejemplo n.º 50
0
    def test_init_error_type(self):
        tc = TestCase('Error-Type')
        tc.add_error_info(error_type='com.example.Error')
        (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
        verify_test_case(self, tcs[0], {'name': 'Error-Type'})

        tc.add_error_info("error message")
        (ts, tcs) = serialize_and_read(TestSuite('test', [tc]))[0]
        verify_test_case(
            self, tcs[0], {'name': 'Error-Type'},
            error_message="error message",
            error_type='com.example.Error')
Ejemplo n.º 51
0
 def test_init_utf8(self):
     tc = TestCase('Test äöü', 'some.class.name.äöü', 123.345, 'I am stdöüt!', 'I am stdärr!')
     tc.add_skipped_info(message='Skipped äöü', output="I skippäd!")
     tc.add_error_info(message='Skipped error äöü', output="I skippäd with an error!")
     test_suite = TestSuite('Test UTF-8', [tc])
     (ts, tcs) = serialize_and_read(test_suite, encoding='utf-8')[0]
     verify_test_case(self, tcs[0], {'name': decode('Test äöü', 'utf-8'),
                                     'classname': decode('some.class.name.äöü', 'utf-8'),
                                     'time': ("%f" % 123.345)},
                     stdout=decode('I am stdöüt!', 'utf-8'), stderr=decode('I am stdärr!', 'utf-8'),
                     skipped_message=decode('Skipped äöü', 'utf-8'),
                     skipped_output=decode('I skippäd!', 'utf-8'),
                     error_message=decode('Skipped error äöü', 'utf-8'),
                     error_output=decode('I skippäd with an error!', 'utf-8'))
Ejemplo n.º 52
0
def process(config):

    for mnf in config.keys():
        log("Processing manifest: %s" % mnf, 'info')

        if config[mnf].has_key('submodule'):
            sub = config[mnf].pop('submodule')
            sub_manifest = guess_manifest(str(mnf), sub['source'])
            sub_app = qubell.organization.application(name=sub['name'])
            sub_app.upload(sub_manifest)

        if config[mnf].has_key('source'):
            manifest = guess_manifest(str(mnf), config[mnf].pop('source'))
        else:
            error("Missing manifest source directive")

        if not 'launch' == config[mnf].keys()[0]:
            error('Launch action missing or not first')

        if config[mnf]['launch'].has_key('parameters'):
            parameters = config[mnf]['launch']['parameters']
        else:
            parameters = {}

        if config[mnf]['launch'].has_key('settings'):
            settings = config[mnf]['launch']['settings']
        else:
            settings = {}

        if qubell.appid:
            app = qubell.organization.application(id=qubell.appid, manifest=manifest)
        elif qubell.appname:
            app = qubell.organization.application(name=qubell.appname, manifest=manifest)
        else:
            app = qubell.organization.application(name=manifest.name, manifest=manifest)
        assert app.upload(manifest)
        instance = qubell.organization.create_instance(application=app, parameters=parameters, **settings)
        assert instance

        for action in config[mnf].keys():
            if config[mnf][action].has_key('parameters'):
                parameters = config[mnf][action]['parameters']
            else:
                parameters = {}
            if not run_test(instance, manifest, Action(action, parameters, config[mnf][action]['expected'])):
                test = TestCase(action, manifest.name, 0, 'Manifest: ' + manifest.source, 'failed to start')
                test.add_failure_info(str(instance))
                test_cases.append(test)
                return False
Ejemplo n.º 53
0
 def _generate_junit_xml(self, config_name):
     testcases = []
     tests=self.data_source.get_test_results(config_name)
     for test in tests:
         test_time = 0
         if test.func_end_time != None and test.func_start_time != None:
             test_time = test.func_end_time - test.func_start_time
         tc = TestCase(test.name,config_name,test_time, test.description, test.message)
         if 'failed' in test.result:
             tc.add_failure_info(test.result)
         elif 'skipped' in test.result:
             tc.add_skipped_info(test.result)
         testcases.append(tc)
     testsuite = TestSuite(config_name+'_'+self.name, testcases)
     return testsuite
Ejemplo n.º 54
0
    def _output_normal(self, test_result):
        # Need refactor
        if test_result == {}:
            print '[what?!] there are not any test result, what is the test case id?'
        else:
            print
            xml_test_suites = []
            summary_dict = self._get_summary_dict(test_result)
            self.report_create_time = str(time.strftime('%Y%m%d_%H%M%S', time.localtime()))
            for case_classify in test_result.keys():
                xml_test_cases = []
                if 'result' in test_result[case_classify].keys():
                    # Generate HTML report
                    self._generate_html_file(
                        case_classify, test_result[case_classify]['result'],
                        test_result[case_classify]['summary'])

                    # Save the result into the CSV
                    self._output_result_to_csv(test_result)

                    # Show in Console
                    print '{0} {1} {2}'.format('='*16, case_classify, '='*16)
                    test_case_result = test_result[case_classify]['result']
                    for case_id in test_case_result.keys():
                        print '[{0}][{1}] {2}, {3}, {4}'.format(case_classify, case_id,
                                                                test_case_result[case_id][0],
                                                                test_case_result[case_id][1],
                                                                str(test_case_result[case_id][2]))

                        # Produce xml file
                        test_case = TestCase(case_id, case_classify, int(test_case_result[case_id][2]))
                        if test_case_result[case_id][0] == 'Fail' or test_case_result[case_id][0] == 'Error':
                            try:
                                test_case.add_failure_info('msg' + test_case_result[case_id][1])
                            except:
                                test_case.add_failure_info('msg' + str(test_case_result[case_id]))

                        xml_test_cases.append(test_case)

                    xml_test_suites.append(TestSuite(case_classify, xml_test_cases))
                    with open(os.path.join(self.latest_reports_dir, case_classify + '.xml'), 'w') as f:
                        TestSuite.to_file(f, xml_test_suites, prettyprint=True)

            self._generate_summary_html_file(summary_dict)
            print '{0} {1} {2}'.format('='*16, 'Summary', '='*16)
            pprint.pprint(summary_dict)
Ejemplo n.º 55
0
 def run_tests(self):
     test_cases = []
     for test in self.tests:
         desc = test['desc']
         name = test['name']
         index = test['id']
         test_case = TestCase(name, self.name)
         if '[.]' in desc:
             print('skipping test "{}"'.format(name))
             test_case.add_skipped_info(message="Skipped test marked with [.]")
         else:
             test_output = StringIO()
             self.sp.logfile = test_output
             t_start = time.time()
             result = self.run_test(index)
             t_stop = time.time()
             self.sp.logfile = None
             test_case.elapsed_sec = t_stop - t_start
             debug_print('test output was:')
             debug_print(test_output.getvalue())
             if result == BSTestRunner.SUCCESS:
                 test_case.stdout = test_output.getvalue()
                 print('test "{}" passed'.format(name))
             else:
                 print('test "{}" failed'.format(name))
                 test_case.add_failure_info('Test failed', output=test_output.getvalue())
             test_output.close()
         test_cases += [test_case];
     return TestSuite(self.name, test_cases)
Ejemplo n.º 56
0
    def test_init_unicode(self):
        tc = TestCase(decode('Test äöü', 'utf-8'), decode('some.class.name.äöü', 'utf-8'), 123.345,
                      decode('I am stdöüt!', 'utf-8'), decode('I am stdärr!', 'utf-8'))
        tc.add_skipped_info(message=decode('Skipped äöü', 'utf-8'),
                            output=decode('I skippäd!', 'utf-8'))
        tc.add_error_info(message=decode('Skipped error äöü', 'utf-8'),
                          output=decode('I skippäd with an error!', 'utf-8'))

        (ts, tcs) = serialize_and_read(TestSuite('Test Unicode',
                                                 [tc]))[0]
        verify_test_case(self, tcs[0], {'name': decode('Test äöü', 'utf-8'),
                                        'classname': decode('some.class.name.äöü', 'utf-8'),
                                        'time': ("%f" % 123.345)},
                        stdout=decode('I am stdöüt!', 'utf-8'), stderr=decode('I am stdärr!', 'utf-8'),
                        skipped_message=decode('Skipped äöü', 'utf-8'),
                        skipped_output=decode('I skippäd!', 'utf-8'),
                        error_message=decode('Skipped error äöü', 'utf-8'),
                        error_output=decode('I skippäd with an error!', 'utf-8'))
Ejemplo n.º 57
0
def run_tests(TestClass):
    setup_logging()
    # Verify that we have needed input
    assert base_url, 'Please provide a value for base_url in runner.py'
    assert chromedriver_location, 'Please provide a value for chromedriver_location in runner.py'
    assert credentials['email'], 'Please provide credentials in runner.py'
    assert credentials['password'], 'Please provide credentials in runner.py'

    #WebDriver setup
    driver = webdriver.Chrome(chromedriver_location)

    # Init the provided class
    myTestClass = TestClass(driver, base_url, credentials)
    logging.info("Running: %s", myTestClass.__class__.__name__)

    # Run all the methods containing 'test' in name
    test_cases = []
    for object_name in dir(myTestClass):
        if 'test' in object_name:

            test_method = getattr(myTestClass, object_name)
            if not hasattr(test_method, '__call__'):
                break  # If the object is not callable

            this_test_case = TestCase(object_name, myTestClass.__class__.__name__)
            logging.info('#######################################################')
            logging.info("Running %s", object_name)

            try:
                # Run the actual method
                test_method()
            except Exception as e:
                logging.info('TEST FAILED')
                logging.exception(e)
                this_test_case.add_failure_info(output=traceback.format_exc())
            else:
                logging.info("TEST PASSED")

            test_cases.append(this_test_case)
            logging.info('#######################################################')

    # Write the results to junit xml
    write_xml(test_cases, myTestClass.__class__.__name__)
Ejemplo n.º 58
0
    def exporter_junit(self, test_result_ext, test_suite_properties=None):
        """ Export test results in JUnit XML compliant format
        """
        from junit_xml import TestSuite, TestCase

        test_suites = []
        test_cases = []

        toolchains = sorted(test_result_ext.keys())
        for toolchain in toolchains:
            targets = sorted(test_result_ext[toolchain].keys())
            for target in targets:
                test_cases = []
                tests = sorted(test_result_ext[toolchain][target].keys())
                for test in tests:
                    test_results = test_result_ext[toolchain][target][test]
                    for test_res in test_results:
                        test_ids = sorted(test_res.keys())
                        for test_no in test_ids:
                            test_result = test_res[test_no]
                            name = test_result["test_description"]
                            classname = "test.%s.%s.%s" % (target, toolchain, test_result["test_id"])
                            elapsed_sec = test_result["elapsed_time"]
                            _stdout = test_result["single_test_output"]
                            _stderr = test_result["target_name_unique"]
                            # Test case
                            tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr)
                            # Test case extra failure / error info
                            if test_result["single_test_result"] == "FAIL":
                                message = test_result["single_test_result"]
                                tc.add_failure_info(message, _stdout)
                            elif test_result["single_test_result"] != "OK":
                                message = test_result["single_test_result"]
                                tc.add_error_info(message, _stdout)

                            test_cases.append(tc)
                ts = TestSuite(
                    "test.suite.%s.%s" % (target, toolchain),
                    test_cases,
                    properties=test_suite_properties[target][toolchain],
                )
                test_suites.append(ts)
        return TestSuite.to_xml_string(test_suites)
Ejemplo n.º 59
0
def xmlwrite(target, everyline):
	xmltargetpath = os.getcwd() + os.sep + target + '_testresults.xml'
	xmltargetfile = open(xmltargetpath, 'w+')
	test_cases = []
	for line in everyline.split('\n'):
		words = line.split(' ')
		if line.find('success') != -1:
			time_taken = get_time(line.split('-')[-1][7:])
			name = words[0] + ' - ' + line.split('-')[1].split('name:')[1].strip()
			print name
			test_cases.append(TestCase(name, target, time_taken, None))
		elif line.find('failed') != -1:
			time_taken = get_time(line.split('-')[-1][7:])
			name = words[0] + ' - ' + line.split('-')[1].split('name:')[1].strip()
			message = ("-".join(line.split(' - ')[2:-1])).strip()
			tc = TestCase(name, target, time_taken, None)
			tc.add_failure_info(None, message)
			test_cases.append(tc)
	ts = TestSuite("testing this suite", test_cases)
	xmltargetfile.write(TestSuite.to_xml_string([ts]))
	xmltargetfile.close()