def json_result(logger, args, result): if not args.publish_results: return # Convert the result into json, and ... json_builder = printer.JsonBuilder() printer.build_result(logger, result, None, args, results_to_print, json_builder) json_result = json_builder.json() json_result["directory"] = result.test.name # ... if there is an output directory, write that also outdir = _mkdir_test_output(logger, args, result) if outdir: # needs to be a relative path json_result["output_directory"] = os.path.join(os.path.basename(os.path.dirname(outdir)), os.path.basename(outdir)) path = os.path.join(outdir, "result.json") logger.info("writing result to '%s'", path) with open(path, "w") as output: jsonutil.dump(json_result, output) output.write("\n") # accumulate results.json JSON_RESULTS.append(json_result) # accumulate summary.json _add(JSON_SUMMARY, "totals", result.test.kind, result.test.status, str(result)) for issue in result.issues: for domain in result.issues[issue]: # count the number of times it occurred _add(JSON_SUMMARY, "totals", result.test.kind, result.test.status, "errors", issue) # extend the times _update_time(min, "start_time", json_result) _update_time(max, "stop_time", json_result)
def json_result(logger, args, result): if not args.publish_results: return # Convert the result into json, and ... json_builder = printer.JsonBuilder() printer.build_result(logger, result, None, args, results_to_print, json_builder) json_result = json_builder.json() json_result["directory"] = result.test.name # ... if there is an output directory, write that also outdir = _mkdir_test_output(logger, args, result) if outdir: # needs to be a relative path json_result["output_directory"] = os.path.join(os.path.basename(os.path.dirname(outdir)), os.path.basename(outdir)) path = os.path.join(outdir, "result.json") logger.info("writing result to '%s'", path) with open(path, "w") as output: jsonutil.dump(json_result, output) output.write("\n") # accumulate the results. JSON_RESULTS.append(json_result) # accumulate the summary _add(JSON_SUMMARY, "totals", result.test.kind, result.test.status, str(result.resolution)) for issue in result.issues: for domain in result.issues[issue]: # count the number of times it occurred _add(JSON_SUMMARY, "errors", issue) # extend the times _update_time(min, "start_time", json_result) _update_time(max, "stop_time", json_result)
def json_results(logger, args): if not args.publish_results: return path = os.path.join(args.publish_results, "results.json") logger.info("writing results to '%s'", path) with open(path, "w") as output: jsonutil.dump(JSON_RESULTS, output) output.write("\n")
def json_status(logger, args, details): if not args.publish_status: return json_status = { "date": datetime.now(), "details": details, } if args.publish_results: # The directory is taken relative to the status (summarydir) # directory. json_status["directory"] = os.path.relpath( args.publish_results, os.path.dirname(args.publish_status)) with open(args.publish_status, "w") as output: jsonutil.dump(json_status, output) output.write("\n")
def json_status(logger, args, details): if not args.publish_status: return json_status = { "date": datetime.now(), "details": details, } if args.publish_results: # The directory is taken relative to the status (summarydir) # directory. json_status["directory"] = os.path.relpath(args.publish_results, os.path.dirname(args.publish_status)) with open(args.publish_status, "w") as output: jsonutil.dump(json_status, output) output.write("\n")
def json_summary(logger, args): if not args.publish_results: return # times start_time = JSON_SUMMARY.get("start_time") end_time = JSON_SUMMARY.get("end_time") if start_time and end_time: runtime = (end_time - start_time).total_seconds() JSON_SUMMARY["runtime"] = datetime.utcfromtimestamp(runtime).strftime("%H:%M") # other stuff JSON_SUMMARY["total"] = len(JSON_RESULTS) path = os.path.join(args.publish_results, "summary.json") logger.info("writing summary to '%s'", path) with open(path, "w") as output: jsonutil.dump(JSON_SUMMARY, output) output.write("\n")
def json_summary(logger, args): if not args.publish_results: return # times start_time = JSON_SUMMARY.get("start_time") stop_time = JSON_SUMMARY.get("stop_time") if start_time and stop_time: # in minutes runtime = round((stop_time - start_time).total_seconds() / 60.0) JSON_SUMMARY["runtime"] = "%d:%02d" % (runtime / 60, runtime % 60) # other stuff JSON_SUMMARY["total"] = len(JSON_RESULTS) if args.publish_hash: JSON_SUMMARY["hash"] = args.publish_hash # emit if args.publish_summary: path = args.publish_summary; else: path = os.path.join(args.publish_results, "summary.json") logger.info("writing summary to '%s'", path) with open(path, "w") as output: jsonutil.dump(JSON_SUMMARY, output) output.write("\n")
def flush(self): jsonutil.dump(self.table, sys.stdout) sys.stdout.write("\n") sys.stdout.flush()
def flush(self): if self.stream: jsonutil.dump(self.table, self.stream) self.stream.write("\n") self.stream.flush()
def main(): parser = argparse.ArgumentParser( description="write 'table.json' to standard output") parser.add_argument("--verbose", "-v", action="store_true") parser.add_argument("directories", metavar="OUTPUT-DIRECTORY", nargs="+", help="output directories containing RESULT files") args = parser.parse_args() # Force the order host_names = ["east", "west", "road", "north", "nic"] columns = ["Test", "Expected", "Result", "Run time"] for host in host_names: columns.append(host) rows = [] first_time = last_time = None total = passed = failed = incomplete = good = 0 for directory in args.directories: args.verbose and sys.stderr.write("%s\n" % (directory)) d = directory if not path.isdir(d): sys.stderr.write("%s (%s) is not a directory\n" % (directory, d)) return 1 # work around python's basename - remove any trailing "/" if not path.basename(d): d = path.dirname(d) if path.basename(d) != "OUTPUT": # try <d>/OUTPUT t = path.join(d, "OUTPUT") if not path.isdir(t): sys.stderr.write("%s (%s) is not an OUTPUT directory\n" % (directory, d)) return 1 d = t result_file = path.join(d, "RESULT") debug_log = path.join(d, "debug.log") if not path.isfile(result_file) and not path.isfile(debug_log): sys.stderr.write("%s (%s) contains no results\n" % (directory, d)) continue total += 1 runtime = "" RESULT = {} # If the RESULT file exists, use that. if path.isfile(result_file): # The RESULT file contains lines of JSON. The last is # the result, and within that is the runtime. field. last_line = None with open(result_file) as f: for line in f: try: j = jsonutil.loads(line) except: sys.stderr.write("%s: invalid json: <<%s>>\n" % (result_file, line)) break if not j: break if "result" in j: RESULT = j break # The debug.log should contain start/end lines, even when # the test didn't finish properly. debug_start_time = "" debug_end_time = "" debug_runtime = "" if path.isfile(debug_log): with open(debug_log) as f: debug = f.read() debug_start_time = debug_time(r"starting debug log at (.*)$", debug) debug_end_time = debug_time(r"ending debug log at (.*)$", debug) if debug_start_time and debug_end_time: debug_runtime = round( (debug_end_time - debug_start_time).total_seconds(), 2) # fill in anyting that is missing # Relative path to this directory so that html can construct # link. RESULT[jsonutil.result.directory] = d # Testname from .../<testname>/OUTPUT. if not jsonutil.result.testname in RESULT: # Python dirname is really basename(dirname). RESULT[jsonutil.result.testname] = path.dirname(d) if not jsonutil.result.result in RESULT: RESULT[jsonutil.result.result] = "incomplete" if RESULT[jsonutil.result.result] == "passed": passed += 1 elif RESULT[jsonutil.result.result] == "failed": failed += 1 else: incomplete += 1 if not jsonutil.result.expect in RESULT: RESULT[jsonutil.result.expect] = "good" if RESULT[jsonutil.result.expect] == "good": good += 1 # this is the end-time if not jsonutil.result.time in RESULT and debug_end_time: RESULT[jsonutil.result.time] = jsonutil.ftime(debug_end_time) # having separate boottime and testtime would be nice if not jsonutil.result.runtime in RESULT and debug_runtime: RESULT[jsonutil.result.runtime] = debug_runtime # Update the total times end_time = "" if debug_end_time: end_time = debug_end_time elif jsonutil.result.time in RESULT: end_time = jsonutil.ptime(RESULT[jsonutil.result.time]) start_time = "" if debug_start_time: start_time = debug_start_time elif end_time and runtime: start_time = end_time - timedelta(seconds=runtime) if start_time: if not first_time: first_time = start_time elif start_time < first_time: first_time = start_time if end_time: if not last_time: last_time = end_time elif end_time > last_time: last_time = end_time rows.append(RESULT) runtime = "00:00:00" if first_time and last_time: runtime = (last_time - first_time) runtime = str(timedelta(days=runtime.days, seconds=runtime.seconds)) date = jsonutil.ftime(datetime.fromordinal(1)) if first_time: date = jsonutil.ftime(first_time) summary = { jsonutil.summary.total: total, jsonutil.summary.passed: passed, jsonutil.summary.failed: failed, jsonutil.summary.incomplete: incomplete, jsonutil.summary.date: date, jsonutil.summary.runtime: runtime, jsonutil.summary.good: good, } table = { jsonutil.results.summary: summary, jsonutil.results.table: rows, } jsonutil.dump(table, sys.stdout) sys.stdout.write("\n") return 0
def main(): parser = argparse.ArgumentParser(description="write 'table.json' to standard output") parser.add_argument("--verbose", "-v", action="store_true") parser.add_argument("directories", metavar="OUTPUT-DIRECTORY", nargs="+", help="output directories containing RESULT files") args = parser.parse_args() # Force the order host_names = [ "east", "west", "road", "north", "nic" ] columns = [ "Test", "Expected", "Result", "Run time" ] for host in host_names: columns.append(host) rows = [] first_time = last_time = None total = passed = failed = incomplete = good = 0 for directory in args.directories: args.verbose and sys.stderr.write("%s\n" % (directory)) d = directory if not path.isdir(d): sys.stderr.write("%s (%s) is not a directory\n" % (directory, d)) return 1 # work around python's basename - remove any trailing "/" if not path.basename(d): d = path.dirname(d) if path.basename(d) != "OUTPUT": # try <d>/OUTPUT t = path.join(d, "OUTPUT") if not path.isdir(t): sys.stderr.write("%s (%s) is not an OUTPUT directory\n" % (directory, d)) return 1 d = t result_file = path.join(d, "RESULT") debug_log = path.join(d, "debug.log") if not path.isfile(result_file) and not path.isfile(debug_log): sys.stderr.write("%s (%s) contains no results\n" % (directory, d)) continue total += 1 runtime = "" RESULT = {} # If the RESULT file exists, use that. if path.isfile(result_file): # The RESULT file contains lines of JSON. The last is # the result, and within that is the runtime. field. last_line = None with open(result_file) as f: for line in f: try: j = jsonutil.loads(line) except: sys.stderr.write("%s: invalid json: <<%s>>\n" % (result_file, line)) break if not j: break if "result" in j: RESULT = j break # The debug.log should contain start/end lines, even when # the test didn't finish properly. debug_start_time = "" debug_end_time = "" debug_runtime = "" if path.isfile(debug_log): with open(debug_log) as f: debug = f.read() debug_start_time = debug_time(r"starting debug log at (.*)$", debug) debug_end_time = debug_time(r"ending debug log at (.*)$", debug) if debug_start_time and debug_end_time: debug_runtime = round((debug_end_time - debug_start_time).total_seconds(), 2) # fill in anyting that is missing # Relative path to this directory so that html can construct # link. RESULT[jsonutil.result.directory] = d # Testname from .../<testname>/OUTPUT. if not jsonutil.result.testname in RESULT: # Python dirname is really basename(dirname). RESULT[jsonutil.result.testname] = path.dirname(d) if not jsonutil.result.result in RESULT: RESULT[jsonutil.result.result] = "incomplete" if RESULT[jsonutil.result.result] == "passed": passed += 1 elif RESULT[jsonutil.result.result] == "failed": failed += 1 else: incomplete += 1 if not jsonutil.result.expect in RESULT: RESULT[jsonutil.result.expect] = "good" if RESULT[jsonutil.result.expect] == "good": good += 1 # this is the end-time if not jsonutil.result.time in RESULT and debug_end_time: RESULT[jsonutil.result.time] = jsonutil.ftime(debug_end_time) # having separate boottime and testtime would be nice if not jsonutil.result.runtime in RESULT and debug_runtime: RESULT[jsonutil.result.runtime] = debug_runtime # Update the total times end_time = "" if debug_end_time: end_time = debug_end_time elif jsonutil.result.time in RESULT: end_time = jsonutil.ptime(RESULT[jsonutil.result.time]) start_time = "" if debug_start_time: start_time = debug_start_time elif end_time and runtime: start_time = end_time - timedelta(seconds=runtime) if start_time: if not first_time: first_time = start_time elif start_time < first_time: first_time = start_time if end_time: if not last_time: last_time = end_time elif end_time > last_time: last_time = end_time rows.append(RESULT) runtime = "00:00:00" if first_time and last_time: runtime = (last_time - first_time) runtime = str(timedelta(days=runtime.days,seconds=runtime.seconds)) date = jsonutil.ftime(datetime.fromordinal(1)) if first_time: date = jsonutil.ftime(first_time) summary = { jsonutil.summary.total: total, jsonutil.summary.passed: passed, jsonutil.summary.failed: failed, jsonutil.summary.incomplete: incomplete, jsonutil.summary.date: date, jsonutil.summary.runtime: runtime, jsonutil.summary.good: good, } table = { jsonutil.results.summary: summary, jsonutil.results.table: rows, } jsonutil.dump(table, sys.stdout) sys.stdout.write("\n") return 0