def junitxml_to_subunit(xml_input=sys.stdin, output=sys.stdout): output = v2.StreamResultToBytes(output) tree = ET.parse(xml_input) start_time = datetime.datetime.now(iso8601.UTC) for testcase in tree.findall('.//testcase'): test_id = testcase.attrib.get('name', None) test_class = testcase.attrib.get('classname', None) if test_class: if test_id: test_id = '.'.join([test_class, test_id]) else: test_id = test_class test_metadata = None test_status = None skipped = None failure = None test_time = to_timedelta(testcase.attrib.get('time')) skipped = testcase.find('.//skipped') failure = testcase.find('.//failure') attachment = None if skipped is not None: test_status = "skip" attachment = skipped.attrib.get('message', None) elif failure is not None: test_status = "fail" attachment = failure.attrib.get('message', None) else: test_status = "success" write_test(output, test_id, test_status, test_metadata, test_time, start_time, attachment) start_time = start_time + test_time
def save_to_subunit(sla_records, subunit_filename): LOG.debug('Writing subunit stream to: %s', subunit_filename) fd = None state2subunit = {sla.STATE_TRUE: 'success', sla.STATE_FALSE: 'fail'} try: fd = open(subunit_filename, 'w') output = subunit_v2.StreamResultToBytes(fd) for item in sla_records: output.startTestRun() test_id = _get_location(item.record) + ':' + item.expression if item.state != sla.STATE_TRUE: output.status(test_id=test_id, file_name='results', mime_type='text/plain; charset="utf8"', eof=True, file_bytes=yaml.safe_dump( item.record, default_flow_style=False)) output.status(test_id=test_id, test_status=state2subunit.get(item.state, 'skip')) output.stopTestRun() LOG.info('Subunit stream saved to: %s', subunit_filename) except IOError as e: LOG.error('Error writing subunit stream: %s', e) finally: if fd: fd.close()
def main(): parser = make_options() (options, args) = parser.parse_args() output = subunit_v2.StreamResultToBytes(sys.stdout) shouldfail = read_shouldfail_file(options) result = ProcessedStreamResult(output, shouldfail) converter = ByteStreamToStreamResult(source=sys.stdin, non_subunit_name='process-stderr') result.startTestRun() converter.run(result) result.stopTestRun()
def main(argv=None, stdout=None): if stdout is None: stdout = sys.stdout if argv is None: argv = sys.argv parser = argparse.ArgumentParser(argv[0]) parser.add_argument('--loglevel', default=logging.INFO) parser.add_argument('--delta', help="Path to json file to read previous " "values from") parser.add_argument('--subunit', nargs='?', default=None, const='counters.json', help="Wrap the json output in a subunit stream. If an " "argument is passed used that as the filename, " "otherwise 'counters.json' will be used") parser.add_argument('--output', help="Write JSON here. Does not disable " "stdout.") parser.add_argument('--meta-prefix', help="Set a prefix in __meta__") args = parser.parse_args(argv[1:]) logging.basicConfig( format='%(asctime)-15s %(levelname)s %(threadName)s: %(message)s') log = logging.getLogger() log.setLevel(args.loglevel) getmysql = threading.Thread(name='mysql', target=get_mysql) getqueues = threading.Thread(name='queues', target=get_queues) getmysql.start() getqueues.start() log.debug('waiting for threads') getmysql.join() getqueues.join() log.debug('threads all returned') meta = {'unixtime': time.time()} if args.meta_prefix: meta['prefix'] = args.meta_prefix collected = { '__meta__': meta, 'mysql': mysql_data, 'queues': queues_data, } if args.delta: collected = _delta.delta_with_file(args.delta, collected) content = json.dumps(collected, indent=1, sort_keys=True).encode('utf-8') if args.subunit is not None: file_name = args.subunit or 'counters.json' stream = subunit_v2.StreamResultToBytes(stdout) stream.startTestRun() stream.status(file_name=file_name, file_bytes=content, mime_type='application/json') stream.stopTestRun() else: stdout.write(content) stdout.write(b"\n") if args.output: with open(args.output, 'wb') as output: output.write(content) output.write(b"\n")