def summary(dd, engagement_id, test_ids, max_critical=0, max_high=0, max_medium=0): findings = dd.list_findings(engagement_id_in=engagement_id, duplicate="false", active="true", verified="true") print"==============================================" print "Total Number of Vulnerabilities: " + str(findings.data["meta"]["total_count"]) print"==============================================" print_findings(sum_severity(findings)) print findings = dd.list_findings(test_id_in=test_ids, duplicate="true") print"==============================================" print "Total Number of Duplicate Findings: " + str(findings.data["meta"]["total_count"]) print"==============================================" print_findings(sum_severity(findings)) print #Delay while de-dupes sys.stdout.write("Sleeping for 30 seconds for de-dupe celery process:") sys.stdout.flush() for i in range(15): time.sleep(2) sys.stdout.write(".") sys.stdout.flush() findings = dd.list_findings(test_id_in=test_ids, duplicate="false", limit=500) if findings.count() > 0: for finding in findings.data["objects"]: test_cases.append(junit_xml_output.TestCase(finding["title"] + " Severity: " + finding["severity"], finding["description"],"failure")) if not os.path.exists("reports"): os.mkdir("reports") junit("DefectDojo", "reports/junit_dojo.xml") print"\n==============================================" print "Total Number of New Findings: " + str(findings.data["meta"]["total_count"]) print"==============================================" sum_new_findings = sum_severity(findings) print_findings(sum_new_findings) print print"==============================================" strFail = "" if max_critical is not None: if sum_new_findings[4] > max_critical: strFail = "Build Failed: Max Critical" if max_high is not None: if sum_new_findings[3] > max_high: strFail = strFail + " Max High" if max_medium is not None: if sum_new_findings[2] > max_medium: strFail = strFail + " Max Medium" if strFail is None: print "Build Passed!" else: print "Build Failed: " + strFail print"=============================================="
val.wait() cmd = ['./' + arg + '-ldso'] test_env = os.environ.copy() test_env['LD_LIBRARY_PATH'] = '.:../' val = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=test_env) stdout, stderr = get_subprocess_output(val) f = open('output/' + arg, 'w') f.write(stdout) f.close() if val.returncode != 0: test_cases.append( junit_xml_output.TestCase(arg, '(crash)\n ' + stderr, "failure")) print('CRASH ' + arg + ' -- LD_LIBRARY_PATH=.:../ ./' + arg + '-ldso') else: cmd = ['diff', '-q', 'output/' + arg, 'output/' + arg + '.ref'] val = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = get_subprocess_output(val) if val.returncode != 0: test_cases.append(junit_xml_output.TestCase( arg, stderr, "failure")) print('FAIL ' + arg + ' -- LD_LIBRARY_PATH=.:../ ./' + arg + '-ldso') else: test_cases.append(junit_xml_output.TestCase( arg, stdout, "success"))
args = parser.parse_args() test_cases = [] TITLE = 1 DESCRIPTION = 5 base = os.path.basename(args.file) fileName = os.path.join(os.path.dirname(args.file), "generic_" + os.path.splitext(base)[0] + ".csv") csvToParse = fileName #Test for file if os.path.isfile("csvToParse"): with open(csvToParse, 'rb') as csvfile: reader = csv.reader(csvfile, delimiter=',') first = True for row in reader: if first: first = False else: #Output a junit test file, should lows/med be condsider a failure? test_cases.append( junit_xml_output.TestCase(row[TITLE], row[DESCRIPTION], "failure")) junit( args.tool, os.path.join(os.path.dirname(args.file), "junit", "junit_" + os.path.splitext(base)[0] + ".xml")) else: print "File passed in doesn't exist."
import junit_xml_output test_cases = [] test_cases.append(junit_xml_output.TestCase("first", "eg_contents", "failure")) junit_xml = junit_xml_output.JunitXml("example_usage", test_cases) print(junit_xml.dump()) # # # code snippet for the usage # """ a short example of how to use this module """ # test_cases = [] # for i in range(0, 5): # type_c = "" # if i % 2 == 0: # type_c = "failure" # test_cases.append(TestCase(i, str(i) + "contents", type_c) ) # # junit_xml = JunitXml("demo test example", test_cases)
import junit_xml_output import sys counter=0 fname=sys.argv[1] # filename to grep err=sys.argv[2] # error message to grep for test_cases = [] f = open(fname, "r") for x in f: if (x.find(err) != -1): test_cases.append(junit_xml_output.TestCase("line: " + str(counter) , x, "failure")) #print(x) counter += 1 f.close() junit_xml = junit_xml_output.JunitXml(fname, test_cases) print (junit_xml.dump())
def summary_slack(dd, masterYaml, notify, profile, product, engagement_id, test_ids, build_id, repo_url, tags, max_critical, max_high, max_medium): config = Config(masterYaml) max_critical, max_high, max_medium = config.getMasterToolFailValues() summary = {} summary["critical"] = 0 summary["high"] = 0 summary["medium"] = 0 summary["low"] = 0 summary["info"] = 0 summary["total"] = 0 #Ensure tests found for this scan if test_ids is not "": findings = dd.list_findings(test_id_in=test_ids, duplicate="false", limit=1000) if findings.success: if findings.count() > 0: for finding in findings.data["objects"]: test_cases.append( junit_xml_output.TestCase( finding["title"] + " Severity: " + finding["severity"], finding["description"], "failure")) #if not os.path.exists("reports"): # os.mkdir("reports") #junit("DefectDojo", "reports/junit_dojo.xml") print "\n==============================================" print "Total Number of New Findings: " + str( findings.data["meta"]["total_count"]) print "==============================================" for finding in findings.data["objects"]: if finding["severity"] == "Critical": summary["critical"] = summary["critical"] + 1 if finding["severity"] == "High": summary["high"] = summary["high"] + 1 if finding["severity"] == "Medium": summary["medium"] = summary["medium"] + 1 if finding["severity"] == "Low": summary["low"] = summary["low"] + 1 if finding["severity"] == "Info": summary["info"] = summary["info"] + 1 summary["total"] = summary["total"] + 1 strFail = "" comments = None if max_critical is not None: if summary["critical"] >= max_critical: comments = "Build Failed: Max Critical " if max_high is not None: if summary["high"] >= max_high: comments = comments + " Max High " if max_medium is not None: if summary["medium"] >= max_medium: comments = comments + " Max Medium " if comments is None: print "Build Passed!" strFail = "pass" else: print "Build Failed: " + comments strFail = "fail" else: print "An error occurred: " + findings.message else: strFail = "pass" comments = "*Profile:* %s\n*Build Pass/Fail Criteria:* Max Critical: %s, Max High: %s, Max Medium: %s" % ( profile, max_critical, max_high, max_medium) defectdojo_url = urlparse(dd.host) build_report_link = "%s://%s/engagement/%s" % ( defectdojo_url.scheme, defectdojo_url.netloc, engagement_id) open_report_link = "%s://%s/finding/open?test__engagement__product=%s" % ( defectdojo_url.scheme, defectdojo_url.netloc, product) product_name = None product = dd.get_product(product) if product.success: product = product.data['name'] notify.scanSummary(build_report_link, open_report_link, product, strFail, comments, build_id, repo_url, tags, summary)
type=lambda p: Path(p).absolute(), default=Path(__file__).absolute().parent / "target", help="Path to the target directory") parser.add_argument("--file_pattern", type=str, default="*.xml", help="file pattern") p = parser.parse_args() return p if __name__ == "__main__": params = getPathArg() for entry in params.target_dir.glob(params.file_pattern): test_cases = [] fileName = entry.name with entry.open() as f: for line in f: header = "" try: regResult = re.search("(\([A-Z]+[0-9]+\))", line) header = regResult.group(1) except: header = line test_cases.append( junit_xml_output.TestCase(header, line, "failure")) junit_xml = junit_xml_output.JunitXml(fileName, test_cases) # suite output = params.target_dir / fileName with open(str(output) + ".xml", "w") as f: f.write(junit_xml.dump())