def write_reports(self, path): lines = [ "{}\n".format(self.jobname), "{}/{}\n".format(self.url, self.jobname), "jobs {} to {}\n".format(self.first_job, self.last_job), "{}\n".format("=" * 90) ] for suitename, suite in self.reports.items(): lines.append("{}\n".format("-" * 70)) lines.append("{:70}\n".format(suitename)) lines.append("{}\n".format("-" * 70)) for testname, test in suite.items(): lines.append("{:70}\n".format(testname)) for jobno, job in test.items(): matches = job.get("matches") new = job.get("new") if matches or new: new = new or 0 lines.append(" *** job {}: new exceptions: {}\n".format(jobno, new)) if matches: for match in matches: lines.append(" - {}\n".format(match)) report_path = "{}/{}.excepts.txt".format(path, self.jobname) logger.info("writing {}".format(report_path)) files.writelines("{}".format(report_path, self.jobname), lines)
def write_reports(self, path): jobline = "{} {:4} {:7} {:7}\n" lines = [ "{}\n".format(self.jobname), "{}/{}\n".format(self.url, self.jobname), "{}\n".format("=" * 90), "failed jobs: {}\n".format(self.failed_jobs), "number failed to stack: {}\n".format(self.failed_stack), "number failed to devstack: {}\n".format(self.failed_devstack), jobline.format("f", "job", "total", "stack"), jobline.format("-", "-" * 4, "-" * 7, "-" * 7, "-" * 7), ] for jobno, job in self.jobs.items(): lines.append( jobline.format(job.get("stack failed", " "), jobno, job.get("total", "0:00:00"), job.get("stack", "0:00:00"))) for suitename, suite in self.reports.items(): if suitename == "jobname": continue lines.append("{} {}\n".format("-" * 70, "-" * 20)) lines.append("{:70} Failed in Job\n".format(suitename)) lines.append("{} {}\n".format("-" * 70, "-" * 20)) for testname, test in suite.items(): lines.append("{:70} {}\n".format(testname, test.get("fail"))) report_path = "{}/{}.console.txt".format(path, self.jobname) logger.info("writing {}".format(report_path)) files.writelines(report_path, lines)
def write_debug_pdata(self): for tindex, (testid, test) in enumerate(self.pdata.items()): tdir = self.outdir + "/" + testid + "_" + test["name"].replace( " ", "_") for nindex, (nodeid, node) in enumerate(test['nodes'].items()): ndir = tdir + "/" + nodeid if RobotFiles.DUMP_FLOWS not in node: continue filename = ndir + "/" + self.fix_command_names( RobotFiles.DUMP_FLOWS) logger.debug("Processing: %s", filename) filename = filename + ".f.txt" flow_table = OvsFlowTable(node.get(RobotFiles.DUMP_FLOWS), "ovs", "dpid", "name") files.writelines(filename, flow_table.fdata)
def run(args): if args.infile: logger.info("Parsing {} into {}".format(args.infile, args.outfile)) data = files.readlines(args.infile) else: logger.info( "Executing ssh {}@{}:{} -c sudo ovs-ofctl dump-flows br-int and parsing into {}" .format(args.ip, args.user, args.port, args.outfile)) data = ssh.execute(args.ip, args.port, args.user, args.pw, "sudo ovs-ofctl dump-flows br-int") if data: flow_table = OvsFlowTable(data, "ovs", "dpid", "name") if flow_table and flow_table.fdata: files.writelines(args.outfile, flow_table.fdata)
def test_writelines(self): data = files.readlines(self.filename) self.assertEqual(len(data), 76) files.writelines(self.outpath, data, True) self.assertTrue(os.path.exists(self.outpath))
def test_format(self): files.writelines("/tmp/flow_dumps.1.out.txt", self.flow_table.fdata) self.assertTrue(os.path.exists(self.filename)) self.assertEqual(76, len(files.readlines(self.filename)))