def xml_parser(data, project_id, scan_id): """ :param data: :param project_id: :param scan_id: :return: """ fileName = 'Na' filePath = 'Na' evidenceCollected = 'Na' name = 'Na' cvssScore = 'Na' cvssAccessVector = 'Na' cvssAccessComplexity = 'Na' cvssAuthenticationr = 'Na' cvssConfidentialImpact = 'Na' cvssIntegrityImpact = 'Na' cvssAvailabilityImpact = 'Na' severity = 'Na' cwe = 'Na' description = 'Na' references = 'Na' vulnerableSoftware = 'Na' vul_col = 'Na' pt = data.xpath('namespace-uri(.)') # root = data.getroot() inst = [] for scan in data: for dependencies in scan: for dependency in dependencies: if dependency.tag == '{%s}fileName' % pt: fileName = dependency.text if dependency.tag == '{%s}filePath' % pt: filePath = dependency.text if dependency.tag == '{%s}evidenceCollected' % pt: evidenceCollected = dependency.text for vuln in dependency: if vuln.tag == '{%s}vulnerability' % pt: if pt == 'https://jeremylong.github.io/DependencyCheck/dependency-check.2.0.xsd': for vulner in vuln: if vulner.tag == '{%s}name' % pt: name = vulner.text if vulner.tag == '{%s}description' % pt: description = vulner.text if vulner.tag == '{%s}references' % pt: references = vulner.text if vulner.tag == '{%s}vulnerableSoftware' % pt: vulnerableSoftware = vulner.text for vuln_dat in vulner: if vuln_dat.tag == '{%s}cwe' % pt: cwe = vuln_dat.text if vuln_dat.tag == '{%s}severity' % pt: severity_dat = vuln_dat.text if severity_dat == 'HIGH': severity = 'High' elif severity_dat == 'MEDIUM': severity = 'Medium' elif severity_dat == 'LOW': severity = 'Low' elif pt == 'https://jeremylong.github.io/DependencyCheck/dependency-check.2.2.xsd': for dc22 in vuln: if dc22.tag == '{%s}name' % pt: name = dc22.text if dc22.tag == '{%s}description' % pt: description = dc22.text if dc22.tag == '{%s}vulnerableSoftware' % pt: vulnerableSoftware = dc22.text for vuln_dat in dc22: for d in vuln_dat: if d.tag == '{%s}url' % pt: references = d.text if vuln_dat.tag == '{%s}cwe' % pt: cwe = vuln_dat.text if vuln_dat.tag == '{%s}severity' % pt: severity_dat = vuln_dat.text if severity_dat == 'HIGH': severity = 'High' elif severity_dat == 'MEDIUM': severity = 'Medium' elif severity_dat == 'LOW': severity = 'Low' elif pt == 'https://jeremylong.github.io/DependencyCheck/dependency-check.2.3.xsd': for dc22 in vuln: if dc22.tag == '{%s}name' % pt: name = dc22.text if dc22.tag == '{%s}description' % pt: description = dc22.text if dc22.tag == '{%s}vulnerableSoftware' % pt: vulnerableSoftware = dc22.text for vuln_dat in dc22: for d in vuln_dat: if d.tag == '{%s}url' % pt: references = d.text if vuln_dat.tag == '{%s}cwe' % pt: cwe = vuln_dat.text if vuln_dat.tag == '{%s}severity' % pt: severity_dat = vuln_dat.text if severity_dat == 'HIGH': severity = 'High' elif severity_dat == 'MEDIUM': severity = 'Medium' elif severity_dat == 'LOW': severity = 'Low' else: for vulner in vuln: if vulner.tag == '{%s}name' % pt: name = vulner.text if vulner.tag == '{%s}cvssScore' % pt: cvssScore = vulner.text if vulner.tag == '{%s}cvssAccessVector' % pt: cvssAccessVector = vulner.text if vulner.tag == '{%s}cvssAccessComplexity' % pt: cvssAccessComplexity = vulner.text if vulner.tag == '{%s}cvssAuthenticationr' % pt: cvssAuthenticationr = vulner.text if vulner.tag == '{%s}cvssConfidentialImpact' % pt: cvssConfidentialImpact = vulner.text if vulner.tag == '{%s}cvssIntegrityImpact' % pt: cvssIntegrityImpact = vulner.text if vulner.tag == '{%s}cvssAvailabilityImpact' % pt: cvssAvailabilityImpact = vulner.text if vulner.tag == '{%s}severity' % pt: severity = vulner.text if vulner.tag == '{%s}cwe' % pt: cwe = vulner.text if vulner.tag == '{%s}description' % pt: description = vulner.text if vulner.tag == '{%s}references' % pt: references = vulner.text if vulner.tag == '{%s}vulnerableSoftware' % pt: vulnerableSoftware = vulner.text date_time = datetime.now() vul_id = uuid.uuid4() if severity == "High": vul_col = "danger" elif severity == 'Medium': vul_col = "warning" elif severity == 'Low': vul_col = "info" dup_data = name + fileName + severity duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = dependencycheck_scan_results_db.objects.filter( dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 1: duplicate_vuln = 'Yes' elif lenth_match == 0: duplicate_vuln = 'No' else: duplicate_vuln = 'None' false_p = dependencycheck_scan_results_db.objects.filter( false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' if cwe == 'Na': cwe = name save_all = dependencycheck_scan_results_db( # date_time=date_time, vuln_id=vul_id, scan_id=scan_id, project_id=project_id, fileName=fileName, filePath=filePath, evidenceCollected=evidenceCollected, name=name, cvssScore=cvssScore, cvssAccessVector=cvssAccessVector, cvssAccessComplexity=cvssAccessComplexity, cvssAuthenticationr=cvssAuthenticationr, cvssConfidentialImpact=cvssConfidentialImpact, cvssIntegrityImpact=cvssIntegrityImpact, cvssAvailabilityImpact=cvssAvailabilityImpact, severity=severity, cwe=cwe, description=description, references=references, vulnerableSoftware=vulnerableSoftware, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive ) save_all.save() all_dependency_data = dependencycheck_scan_results_db.objects.filter(scan_id=scan_id, false_positive='No') total_vul = len(all_dependency_data) total_high = len(all_dependency_data.filter(severity="High")) total_medium = len(all_dependency_data.filter(severity="Medium")) total_low = len(all_dependency_data.filter(severity="Low")) total_duplicate = len(all_dependency_data.filter(vuln_duplicate='Yes')) dependencycheck_scan_db.objects.filter(scan_id=scan_id).update( total_vuln=total_vul, SEVERITY_HIGH=total_high, SEVERITY_MEDIUM=total_medium, SEVERITY_LOW=total_low, total_dup=total_duplicate ) subject = 'Archery Tool Scan Status - DependencyCheck Report Uploaded' message = 'DependencyCheck Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (name, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message) return HttpResponse(status=201)
def xml_parser(username, root, project_id, scan_id): """ ZAP Proxy scanner xml report parser. :param root: :param project_id: :param scan_id: :return: """ global vul_col, \ confidence, \ wascid, risk, \ reference, \ url, \ name, \ solution, \ instance, \ sourceid, \ pluginid, \ alert, \ desc, \ riskcode, vuln_id, false_positive, duplicate_hash, duplicate_vuln, scan_url for child in root: d = child.attrib scan_url = d['name'] for alert in root.iter('alertitem'): inst = [] for vuln in alert: vuln_id = uuid.uuid4() if vuln.tag == "pluginid": pluginid = vuln.text if vuln.tag == "alert": alert = vuln.text if vuln.tag == "name": name = vuln.text if vuln.tag == "riskcode": riskcode = vuln.text if vuln.tag == "confidence": confidence = vuln.text if vuln.tag == "desc": desc = vuln.text if vuln.tag == "solution": solution = vuln.text if vuln.tag == "reference": reference = vuln.text if vuln.tag == "wascid": wascid = vuln.text if vuln.tag == "sourceid": sourceid = vuln.text for instances in vuln: for ii in instances: instance = {} instance[ii.tag] = ii.text inst.append(instance) if riskcode == "3": vul_col = "danger" risk = "High" elif riskcode == '2': vul_col = "warning" risk = "Medium" elif riskcode == '1': vul_col = "info" risk = "Low" else: vul_col = "info" risk = "Low" if name == "None": print(name) else: dup_data = name + risk + scan_url print(dup_data) duplicate_hash = hashlib.sha256( dup_data.encode('utf-8')).hexdigest() match_dup = zap_scan_results_db.objects.filter( dup_hash=duplicate_hash).values('dup_hash').distinct() lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' dump_data = zap_scan_results_db(vuln_id=vuln_id, vuln_color=vul_col, scan_id=scan_id, project_id=project_id, confidence=confidence, wascid=wascid, risk=risk, reference=reference, url=url, name=name, solution=solution, param=instance, sourceid=sourceid, pluginId=pluginid, alert=alert, description=desc, false_positive=false_positive, rescan='No', vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, evidence=inst, username=username) dump_data.save() else: duplicate_vuln = 'Yes' dump_data = zap_scan_results_db(vuln_id=vuln_id, vuln_color=vul_col, scan_id=scan_id, project_id=project_id, confidence=confidence, wascid=wascid, risk=risk, reference=reference, url=url, name=name, solution=solution, param=instance, sourceid=sourceid, pluginId=pluginid, alert=alert, description=desc, false_positive='Duplicate', rescan='No', vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, evidence=inst, username=username) dump_data.save() false_p = zap_scan_results_db.objects.filter( false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' vul_dat = zap_scan_results_db.objects.filter(username=username, vuln_id=vuln_id) full_data = [] for data in vul_dat: evi = data.evidence evi_data = ast.literal_eval(evi) for evidence in evi_data: for key, value in evidence.items(): if key == 'evidence': key = 'Evidence' if key == 'attack': key = 'Attack' if key == 'uri': key = 'URI' if key == 'method': key = 'Method' if key == 'param': key = 'Parameter' instance = key + ': ' + value full_data.append(instance) removed_list_data = ','.join(full_data) zap_scan_results_db.objects.filter( username=username, vuln_id=vuln_id).update(param=removed_list_data) zap_all_vul = zap_scan_results_db.objects.filter(username=username, scan_id=scan_id, false_positive='No') duplicate_count = zap_scan_results_db.objects.filter(username=username, scan_id=scan_id, vuln_duplicate='Yes') total_high = len(zap_all_vul.filter(risk="High")) total_medium = len(zap_all_vul.filter(risk="Medium")) total_low = len(zap_all_vul.filter(risk="Low")) total_info = len(zap_all_vul.filter(risk="Informational")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) total_vul = total_high + total_medium + total_low + total_info zap_scans_db.objects.filter(username=username, scan_scanid=scan_id) \ .update(total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, info_vul=total_info, total_dup=total_duplicate, scan_url=scan_url ) if total_vul == total_duplicate: zap_scans_db.objects.filter(username=username, scan_scanid=scan_id) \ .update(total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate ) subject = 'Archery Tool Scan Status - ZAP Report Uploaded' message = 'ZAP Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (target_url, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def npmaudit_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ date_time = datetime.now() global vul_col for vuln in data['advisories']: title = data['advisories'][vuln]['title'] found_by = data['advisories'][vuln]['found_by'] reported_by = data['advisories'][vuln]['reported_by'] module_name = data['advisories'][vuln]['module_name'] cves = data['advisories'][vuln]['cves'] vulnerable_versions = data['advisories'][vuln]['vulnerable_versions'] patched_versions = data['advisories'][vuln]['patched_versions'] overview = data['advisories'][vuln]['overview'] recommendation = data['advisories'][vuln]['recommendation'] references = data['advisories'][vuln]['references'] access = data['advisories'][vuln]['access'] severity = data['advisories'][vuln]['severity'] cwe = data['advisories'][vuln]['cwe'] metadata = data['advisories'][vuln]['metadata'] url = data['advisories'][vuln]['url'] findings = (data['advisories'][vuln]['findings']) vuln_versions = {} for find in findings: vuln_versions[find['version']] = [find['paths']] if not title: title = "not found" if not found_by: found_by = "not found" if not reported_by: reported_by = "not found" if not module_name: module_name = "not found" if not cves: cves = "not found" if not vulnerable_versions: vulnerable_versions = "not found" if not patched_versions: patched_versions = "not found" if not recommendation: recommendation = "not found" if not overview: overview = "not found" if not references: references = "not found" if not access: access = "not found" if not severity: severity = "not found" if not cwe: cwe = "not found" if not url: url = "not found" if severity == "critical": severity = 'High' vul_col = "danger" if severity == "high": severity = 'High' vul_col = "danger" elif severity == 'moderate': severity = 'Medium' vul_col = "warning" elif severity == 'low': severity = 'Low' vul_col = "info" elif severity == 'info': severity = 'Low' vul_col = "info" vul_id = uuid.uuid4() dup_data = str(title) + str(severity) + str(module_name) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = npmaudit_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = npmaudit_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = npmaudit_scan_results_db( vuln_id=vul_id, date_time=date_time, scan_id=scan_id, project_id=project_id, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, version=vuln_versions, title=title, found_by=found_by, reported_by=reported_by, module_name=module_name, cves=cves, vulnerable_versions=vulnerable_versions, patched_versions=patched_versions, overview=overview, recommendation=recommendation, references=references, access=access, severity=severity, cwe=cwe, url=url, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = npmaudit_scan_results_db( vuln_id=vul_id, date_time=date_time, scan_id=scan_id, project_id=project_id, vul_col=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', version=vuln_versions, title=title, found_by=found_by, reported_by=reported_by, module_name=module_name, cves=cves, vulnerable_versions=vulnerable_versions, patched_versions=patched_versions, overview=overview, recommendation=recommendation, references=references, access=access, severity=severity, cwe=cwe, url=url, username=username, ) save_all.save() all_findbugs_data = npmaudit_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No') duplicate_count = npmaudit_scan_results_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(severity="High")) total_medium = len(all_findbugs_data.filter(severity="Medium")) total_low = len(all_findbugs_data.filter(severity="Low")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) npmaudit_scan_db.objects.filter(username=username, scan_id=scan_id).update( total_vul=total_vul, date_time=date_time, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate) trend_update(username=username) subject = 'Archery Tool Scan Status - Trivy Report Uploaded' message = 'Trivy Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % ("npm-audit", total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def trivy_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ date_time = datetime.now() vul_col = '' for vuln_data in data: vuln = vuln_data['Vulnerabilities'] if vuln is not None: for issue in vuln: try: VulnerabilityID = issue['VulnerabilityID'] except Exception as e: VulnerabilityID = "Not Found" print(e) try: PkgName = issue['PkgName'] except Exception as e: PkgName = "Not Found" print(e) try: InstalledVersion = issue['InstalledVersion'] except Exception as e: InstalledVersion = "Not Found" print(e) try: FixedVersion = issue['FixedVersion'] except Exception as e: FixedVersion = "Not Found" print(e) try: Title = issue['Title'] except Exception as e: Title = "Not Found" print(e) try: Description = issue['Description'] except Exception as e: Description = "Not Found" print(e) try: Severity = issue['Severity'] except Exception as e: Severity = "Not Found" print(e) try: References = issue['References'] except Exception as e: References = "Not Found" print(e) if Severity == "CRITICAL": Severity = 'High' vul_col = "danger" if Severity == "HIGH": Severity = 'High' vul_col = "danger" if Severity == 'MEDIUM': Severity = 'Medium' vul_col = "warning" if Severity == 'LOW': Severity = 'Low' vul_col = "info" if Severity == 'UNKNOWN': Severity = 'Low' vul_col = "info" vul_id = uuid.uuid4() dup_data = str(VulnerabilityID) + str(Severity) + str(PkgName) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = trivy_scan_results_db.objects.filter(username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = trivy_scan_results_db.objects.filter(username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = trivy_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, VulnerabilityID=VulnerabilityID, PkgName=PkgName, InstalledVersion=InstalledVersion, FixedVersion=FixedVersion, Title=Title, Description=Description, Severity=Severity, References=References, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = trivy_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, VulnerabilityID=VulnerabilityID, PkgName=PkgName, InstalledVersion=InstalledVersion, FixedVersion=FixedVersion, Title=Title, Description=Description, Severity=Severity, References=References, vul_col=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', username=username, ) save_all.save() all_findbugs_data = trivy_scan_results_db.objects.filter(username=username, scan_id=scan_id, false_positive='No') duplicate_count = trivy_scan_results_db.objects.filter(username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(Severity="High")) total_medium = len(all_findbugs_data.filter(Severity="Medium")) total_low = len(all_findbugs_data.filter(Severity="Low")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) trivy_scan_db.objects.filter(scan_id=scan_id).update(username=username, total_vul=total_vul, date_time=date_time, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate ) trend_update(username=username) subject = 'Archery Tool Scan Status - Trivy Report Uploaded' message = 'Trivy Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def updated_xml_parser(root, project_id, scan_id, username): for openvas in root.findall(".//result"): for r in openvas: if r.tag == "name": global name if r.text is None: name = "NA" else: name = r.text if r.tag == "creation_time": global creation_time if r.text is None: creation_time = "NA" else: creation_time = r.text if r.tag == "modification_time": global modification_time if r.text is None: modification_time = "NA" else: modification_time = r.text if r.tag == "host": global host if r.text is None: host = "NA" else: host = r.text if r.tag == "port": global port if r.text is None: port = "NA" else: port = r.text if r.tag == "threat": global threat if r.text is None: threat = "NA" else: threat = r.text if r.tag == "severity": global severity if r.text is None: severity = "NA" else: severity = r.text if r.tag == "description": global description if r.text is None: description = "NA" else: description = r.text for rr in r.getchildren(): if rr.tag == "family": global family if rr.text is None: family = "NA" else: family = rr.text if rr.tag == "cvss_base": global cvss_base if rr.text is None: cvss_base = "NA" else: cvss_base = rr.text if rr.tag == "cve": global cve if rr.text is None: cve = "NA" else: cve = rr.text if rr.tag == "bid": global bid if rr.text is None: bid = "NA" else: bid = rr.text if rr.tag == "xref": global xref if rr.text is None: xref = "NA" else: xref = rr.text if rr.tag == "tags": global tags if rr.text is None: tags = "NA" else: tags = rr.text if rr.tag == "type": global banner if rr.text is None: banner = "NA" else: banner = rr.text date_time = datetime.now() vul_id = uuid.uuid4() dup_data = name + host + severity + port duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = ov_scan_result_db.objects.filter(username=username, vuln_duplicate=duplicate_hash).values('vuln_duplicate').distinct() lenth_match = len(match_dup) vuln_color = '' if threat == 'High': vuln_color = 'danger' elif threat == 'Medium': vuln_color = 'warning' elif threat == 'Low': vuln_color = 'info' elif threat == 'Log': vuln_color = 'info' if lenth_match == 1: duplicate_vuln = 'Yes' elif lenth_match == 0: duplicate_vuln = 'No' else: duplicate_vuln = 'None' false_p = ov_scan_result_db.objects.filter(username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = ov_scan_result_db(scan_id=host, vul_id=vul_id, name=name, creation_time=creation_time, modification_time=modification_time, host=host, port=port, threat=threat, severity=severity, description=description, family=family, cvss_base=cvss_base, cve=cve, bid=bid, xref=xref, tags=tags, banner=banner, date_time=date_time, false_positive=false_positive, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, project_id=project_id, vuln_color=vuln_color, username=username, ) save_all.save() openvas_vul = ov_scan_result_db.objects.filter(username=username, scan_id=host) total_high = len(openvas_vul.filter(threat="High")) total_medium = len(openvas_vul.filter(threat="Medium")) total_low = len(openvas_vul.filter(threat="Low")) total_duplicate = len(openvas_vul.filter(vuln_duplicate='Yes')) total_vul = total_high + total_medium + total_low openvas_scan_db.objects.filter(username=username, scan_id=host). \ update(total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate, scan_ip=host, ) subject = 'Archery Tool Scan Status - OpenVAS Report Uploaded' message = 'OpenVAS Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (scan_id, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def xml_parser(root, project_id, scan_id, username): global vuln_url, vuln_type, vuln_severity, vuln_certainty, vuln_rawrequest, \ vuln_rawresponse, vuln_extrainformation, vuln_classification, vuln_id, \ vul_col, description, impact, actionsToTake, remedy, requiredSkillsForExploitation, \ externalReferences, remedyReferences, proofOfConcept, proofs date_time = datetime.now() for data in root: if data.tag == "target": for url in data: if url.tag == 'url': target = url.text for vuln in data: if vuln.tag == 'url': vuln_url = vuln.text if vuln.tag == 'type': vuln_type = vuln.text if vuln.tag == 'severity': if vuln.text == 'Important': vuln_severity = 'High' else: vuln_severity = vuln.text if vuln.tag == 'certainty': vuln_certainty = vuln.text if vuln.tag == 'rawrequest': vuln_rawrequest = vuln.text if vuln.tag == 'rawresponse': vuln_rawresponse = vuln.text if vuln.tag == 'extrainformation': vuln_extrainformation = vuln.text if vuln.tag == 'classification': vuln_classification = vuln.text if vuln.tag == 'description': description = vuln.text if vuln.tag == 'impact': impact = vuln.text if vuln.tag == 'actionsToTake': actionsToTake = vuln.text if vuln.tag == 'remedy': remedy = vuln.text if vuln.tag == 'requiredSkillsForExploitation': requiredSkillsForExploitation = vuln.text if vuln.tag == 'externalReferences': externalReferences = vuln.text if vuln.tag == 'remedyReferences': remedyReferences = vuln.text if vuln.tag == 'proofOfConcept': proofOfConcept = vuln.text if vuln.tag == 'proofs': proofs = vuln.text vuln_id = uuid.uuid4() if vuln_severity == "Critical": vuln_severity = "High" vul_col = "danger" elif vuln_severity == "High": vul_col = 'danger' elif vuln_severity == 'Medium': vul_col = "warning" elif vuln_severity == 'Low': vul_col = "info" else: vuln_severity = "Low" vul_col = "info" dup_data = str(vuln_type) + str(vuln_url) + str(vuln_severity) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = netsparker_scan_result_db.objects.filter(username=username, dup_hash=duplicate_hash).values('dup_hash').distinct() lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = netsparker_scan_result_db.objects.filter(username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) global false_positive if fp_lenth_match == 1: false_positive = 'Yes' elif lenth_match == 0: false_positive = 'No' else: false_positive = 'No' dump_data = netsparker_scan_result_db(scan_id=scan_id, project_id=project_id, date_time=date_time, vuln_id=vuln_id, vuln_url=vuln_url, type=vuln_type, severity=vuln_severity, certainty=vuln_certainty, rawrequest=vuln_rawrequest, rawresponse=vuln_rawresponse, extrainformation=vuln_extrainformation, classification=vuln_classification, false_positive=false_positive, vuln_color=vul_col, description=description, impact=impact, actionsToTake=actionsToTake, remedy=remedy, requiredSkillsForExploitation=requiredSkillsForExploitation, externalReferences=externalReferences, remedyReferences=remedyReferences, proofOfConcept=proofOfConcept, proofs=proofs, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, username=username ) dump_data.save() else: duplicate_vuln = 'Yes' dump_data = netsparker_scan_result_db(scan_id=scan_id, project_id=project_id, vuln_id=vuln_id, date_time=date_time, vuln_url=vuln_url, type=vuln_type, severity=vuln_severity, certainty=vuln_certainty, rawrequest=vuln_rawrequest, rawresponse=vuln_rawresponse, extrainformation=vuln_extrainformation, classification=vuln_classification, false_positive='Duplicate', vuln_color=vul_col, description=description, impact=impact, actionsToTake=actionsToTake, remedy=remedy, requiredSkillsForExploitation=requiredSkillsForExploitation, externalReferences=externalReferences, remedyReferences=remedyReferences, proofOfConcept=proofOfConcept, proofs=proofs, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, username=username ) dump_data.save() netsparker_all_vul = netsparker_scan_result_db.objects.filter(username=username, scan_id=scan_id, false_positive='No') duplicate_count = netsparker_scan_result_db.objects.filter(username=username, scan_id=scan_id, vuln_duplicate='Yes') total_critical = len(netsparker_all_vul.filter(severity='Critical')) total_high = len(netsparker_all_vul.filter(severity="High")) total_medium = len(netsparker_all_vul.filter(severity="Medium")) total_low = len(netsparker_all_vul.filter(severity="Low")) total_info = len(netsparker_all_vul.filter(severity="Information")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) total_vul = total_critical + total_high + total_medium + total_low + total_info netsparker_scan_db.objects.filter(username=username, scan_id=scan_id).update(total_vul=total_vul, date_time=date_time, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, critical_vul=total_critical, info_vul=total_info, total_dup=total_duplicate, url=target ) if total_vul == total_duplicate: netsparker_scan_db.objects.filter(username=username, scan_id=scan_id).update(total_vul=total_vul, high_vul=total_high, date_time=date_time, medium_vul=total_medium, low_vul=total_low, critical_vul=total_critical, info_vul=total_info, total_dup=total_duplicate, url=target ) trend_update(username=username) subject = 'Archery Tool Scan Status - Netsparker Report Uploaded' message = 'Netsparker Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (target, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def clair_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ # d = data['Vulnerabilities']['Medium'] # # for dd in d: # print dd['Name'] date_time = datetime.now() global vul_col try: high = data['Vulnerabilities']['High'] for vuln in high: vul_id = uuid.uuid4() try: Name = vuln['Name'] except Exception: Name = "Not Found" try: NamespaceName = vuln['NamespaceName'] except Exception: NamespaceName = "Not Found" try: Description = vuln['Description'] except Exception: Description = "Not Found" try: Link = vuln['Link'] except Exception: Link = "Not Found" try: Severity = vuln['Severity'] except Exception: Severity = "Not Found" try: Metadata = vuln['Metadata'] except Exception: Metadata = "Not Found" try: FeatureName = vuln['FeatureName'] except Exception: FeatureName = "Not Found" try: FeatureVersion = vuln['FeatureVersion'] except Exception: FeatureName = "Not Found" if Severity == "High": vul_col = "danger" dup_data = Name + Severity + NamespaceName duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = clair_scan_results_db.objects.filter(username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = clair_scan_results_db.objects.filter(username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = clair_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, Name=Name, NamespaceName=NamespaceName, Description=Description, Link=Link, Severity=Severity, Metadata=Metadata, FeatureName=FeatureName, FeatureVersion=FeatureVersion, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, vul_col=vul_col, username=username ) save_all.save() else: duplicate_vuln = 'Yes' save_all = clair_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, Name=Name, NamespaceName=NamespaceName, Description=Description, Link=Link, Severity=Severity, Metadata=Metadata, FeatureName=FeatureName, FeatureVersion=FeatureVersion, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', vul_col=vul_col, username=username ) save_all.save() except Exception: print("High Vulnerability Not Found") # pass try: medium = data['Vulnerabilities']['Medium'] for vuln in medium: vul_id = uuid.uuid4() try: Name = vuln['Name'] except Exception: Name = "Not Found" try: NamespaceName = vuln['NamespaceName'] except Exception: NamespaceName = "Not Found" try: Description = vuln['Description'] except Exception: Description = "Not Found" try: Link = vuln['Link'] except Exception: Link = "Not Found" try: Severity = vuln['Severity'] except Exception: Severity = "Not Found" try: Metadata = vuln['Metadata'] except Exception: Metadata = "Not Found" try: FeatureName = vuln['FeatureName'] except Exception: FeatureName = "Not Found" try: FeatureVersion = vuln['FeatureVersion'] except Exception: FeatureName = "Not Found" if Severity == "Medium": vul_col = "warning" dup_data = Name + Severity + NamespaceName duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = clair_scan_results_db.objects.filter(username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = clair_scan_results_db.objects.filter(username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = clair_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, Name=Name, NamespaceName=NamespaceName, Description=Description, Link=Link, Severity=Severity, Metadata=Metadata, FeatureName=FeatureName, FeatureVersion=FeatureVersion, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, vul_col=vul_col, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = clair_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, Name=Name, NamespaceName=NamespaceName, Description=Description, Link=Link, Severity=Severity, Metadata=Metadata, FeatureName=FeatureName, FeatureVersion=FeatureVersion, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', vul_col=vul_col, username=username, ) save_all.save() except Exception: print("Medium Vulnerability not found.") # pass try: low = data['Vulnerabilities']['Low'] for vuln in low: vul_id = uuid.uuid4() try: Name = vuln['Name'] except Exception: Name = "Not Found" try: NamespaceName = vuln['NamespaceName'] except Exception: NamespaceName = "Not Found" try: Description = vuln['Description'] except Exception: Description = "Not Found" try: Link = vuln['Link'] except Exception: Link = "Not Found" try: Severity = vuln['Severity'] except Exception: Severity = "Not Found" try: Metadata = vuln['Metadata'] except Exception: Metadata = "Not Found" try: FeatureName = vuln['FeatureName'] except Exception: FeatureName = "Not Found" try: FeatureVersion = vuln['FeatureVersion'] except Exception: FeatureName = "Not Found" if Severity == "Low": vul_col = "info" dup_data = Name + Severity + NamespaceName duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = clair_scan_results_db.objects.filter( dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = clair_scan_results_db.objects.filter(username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = clair_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, Name=Name, NamespaceName=NamespaceName, Description=Description, Link=Link, Severity=Severity, Metadata=Metadata, FeatureName=FeatureName, FeatureVersion=FeatureVersion, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, vul_col=vul_col, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = clair_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, Name=Name, NamespaceName=NamespaceName, Description=Description, Link=Link, Severity=Severity, Metadata=Metadata, FeatureName=FeatureName, FeatureVersion=FeatureVersion, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', vul_col=vul_col, username=username, ) save_all.save() except Exception: print("Low Vulnerability Not found") low = data['vulnerabilities'] for vuln in low: vul_id = uuid.uuid4() try: Name = vuln['vulnerability'] except Exception: Name = "Not Found" try: NamespaceName = vuln['namespace'] except Exception: NamespaceName = "Not Found" try: Description = vuln['description'] except Exception: Description = "Not Found" try: Link = vuln['link'] except Exception: Link = "Not Found" try: Severity = vuln['severity'] except Exception: Severity = "Not Found" try: Metadata = vuln['Metadata'] except Exception: Metadata = "Not Found" try: FeatureName = vuln['featurename'] except Exception: FeatureName = "Not Found" try: FeatureVersion = vuln['featureversion'] except Exception: FeatureName = "Not Found" if Severity == "Low": vul_col = "info" if Severity == "Critical": Severity = "High" vul_col = "danger" if Severity == "High": vul_col = "danger" if Severity == "Medium": vul_col = "warning" dup_data = Name + Severity + NamespaceName duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = clair_scan_results_db.objects.filter(username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = clair_scan_results_db.objects.filter(username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = clair_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, Name=Name, NamespaceName=NamespaceName, Description=Description, Link=Link, Severity=Severity, Metadata=Metadata, FeatureName=FeatureName, FeatureVersion=FeatureVersion, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, vul_col=vul_col, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = clair_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, Name=Name, NamespaceName=NamespaceName, Description=Description, Link=Link, Severity=Severity, Metadata=Metadata, FeatureName=FeatureName, FeatureVersion=FeatureVersion, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', vul_col=vul_col, username=username, ) save_all.save() # pass all_clair_data = clair_scan_results_db.objects.filter(username=username, scan_id=scan_id, false_positive='No') duplicate_count = clair_scan_results_db.objects.filter(username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_clair_data) total_high = len(all_clair_data.filter(Severity='High')) total_medium = len(all_clair_data.filter(Severity='Medium')) total_low = len(all_clair_data.filter(Severity='Low')) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) clair_scan_db.objects.filter(username=username, scan_id=scan_id).update( total_vul=total_vul, high_vul=total_high, date_time=date_time, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate ) subject = 'Archery Tool Scan Status - Clair Report Uploaded' message = 'Clair Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (Name, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def tfsec_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ date_time = datetime.now() global vul_col for vuln in data['results']: rule_id = vuln['rule_id'] link = vuln['link'] filename = vuln['location']['filename'] start_line = vuln['location']['start_line'] end_line = vuln['location']['end_line'] description = vuln['description'] severity = vuln['severity'] if severity == "ERROR": severity = 'High' vul_col = "danger" elif severity == 'WARNING': severity = 'Medium' vul_col = "warning" elif severity == 'INFO': severity = 'Info' vul_col = "info" vul_id = uuid.uuid4() dup_data = str(rule_id) + str(severity) + str(filename) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = tfsec_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = tfsec_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = tfsec_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, rule_id=rule_id, filename=filename, severity=severity, description=description, link=link, start_line=start_line, end_line=end_line, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = tfsec_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, vul_col=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', rule_id=rule_id, filename=filename, severity=severity, description=description, link=link, start_line=start_line, end_line=end_line, username=username, ) save_all.save() all_findbugs_data = tfsec_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No') duplicate_count = tfsec_scan_results_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(severity="High")) total_medium = len(all_findbugs_data.filter(severity="Medium")) total_low = len(all_findbugs_data.filter(severity="Low")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) tfsec_scan_db.objects.filter(username=username, scan_id=scan_id).update( total_vul=total_vul, date_time=date_time, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate) trend_update(username=username) subject = 'Archery Tool Scan Status - tfsec Report Uploaded' message = 'tfsec Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % ("tfsec", total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def checkmarx_report_xml(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ print(data) global vul_col, project, result, result_data, file_name, inst, code_data project = data.attrib['ProjectName'] scan_details = data.attrib for dat in data: query = dat.attrib name = dat.attrib['name'] severity = dat.attrib['Severity'] code_data = [] result_data_all = [] for dd in dat: result_data = dd.attrib file_name = dd.attrib['FileName'] # res_inst = {} # res_inst[dd.attrib] = [''] result_data_all.append(dd.attrib) for d in dd.findall(".//Code"): result = d.text instance = {} instance[file_name] = d.text code_data.append(instance) if severity == "High": vul_col = "danger" elif severity == 'Medium': vul_col = "warning" elif severity == 'Low': vul_col = "info" vul_id = uuid.uuid4() dup_data = str(name) + str(severity) + str(file_name) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = checkmarx_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 1: duplicate_vuln = 'Yes' elif lenth_match == 0: duplicate_vuln = 'No' else: duplicate_vuln = 'None' false_p = checkmarx_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = checkmarx_scan_results_db( vuln_id=vul_id, scan_id=scan_id, project_id=project_id, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, name=name, severity=severity, query=query, result=code_data, scan_details=scan_details, result_data=result_data_all, file_name=file_name, username=username, ) save_all.save() all_findbugs_data = checkmarx_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(severity="High")) total_medium = len(all_findbugs_data.filter(severity="Medium")) total_low = len(all_findbugs_data.filter(severity="Low")) total_duplicate = len(all_findbugs_data.filter(vuln_duplicate='Yes')) checkmarx_scan_db.objects.filter(username=username, scan_id=scan_id).update( project_name=project, total_vuln=total_vul, SEVERITY_HIGH=total_high, SEVERITY_MEDIUM=total_medium, SEVERITY_LOW=total_low, total_dup=total_duplicate) subject = 'Archery Tool Scan Status - checkmarx Report Uploaded' message = 'checkmarx Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % ("checkmarx", total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def xml_parser(root, project_id, scan_id, username, target_url): global name, description, remedy_guidance, remedy_code, severity, check, digest, references, \ vector, remarks, page, signature, \ proof, trusted, platform_type, platform_name, url, action, \ body, vuln_id, vul_col, ref_key, ref_values, vector_input_key, vector_input_values, vector_source_key, vector_source_values, page_body_data, request_url, request_method, request_raw, response_ip, response_raw_headers for issue in root: for data in issue.getchildren(): if data.tag == "issue": for vuln in data: vuln_id = uuid.uuid4() if vuln.tag == "name": if vuln.text is None: name = "NA" else: name = vuln.text if vuln.tag == "description": if vuln.text is None: description = "NA" else: description = vuln.text if vuln.tag == "remedy_guidance": if vuln.text is None: remedy_guidance = "NA" else: remedy_guidance = vuln.text if vuln.tag == "severity": if vuln.text is None: severity = "NA" else: severity = vuln.text if vuln.tag == "references": for ref_vuln in vuln: dat = ref_vuln.attrib for key, values in dat.items(): if key is None: ref_key = "NA" else: ref_key = key if values is None: ref_values = "NA" else: ref_values = values if vuln.tag == "vector": for vec_vuln in vuln: if vec_vuln.tag == 'inputs': for vec_input in vec_vuln: dat = vec_input.attrib for key, values in dat.items(): if key is None: vector_input_key = "NA" else: vector_input_key = key if values is None: vector_input_values = "NA" else: vector_input_values = values if vec_vuln.tag == 'source': for vec_source in vec_vuln: source_dat = vec_source.attrib for key, values in source_dat.items(): if key is None: vector_source_key = "NA" else: vector_source_key = key if values in None: vector_source_values = "NA" else: vector_source_values = values if vuln.tag == "page": for page_body in vuln: if page_body.tag == "body": page_body_dat = page_body.text if page_body_dat is None: page_body_data = "NA" else: page_body_data = page_body_dat for req in vuln: if req.tag == 'request': for req_dat in req: if req_dat.tag == 'url': req_url = req_dat.text if req_url is None: request_url = "NA" else: request_url = req_url if req_dat.tag == 'method': req_method = req_dat.text if req_method is None: request_method = "NA" else: request_method = req_method if req_dat.tag == 'raw': if req_dat.text is None: request_raw = "NA" else: request_raw = req_dat.text if req.tag == 'response': for res_dat in req: if res_dat.tag == 'ip_address': res_ip = res_dat.text if res_ip is None: response_ip = "NA" else: response_ip = res_dat.text if res_dat.tag == 'raw_headers': res_raw_headers = res_dat.text if res_raw_headers is None: response_raw_headers = "NA" else: response_raw_headers = res_dat.text if vuln.tag == "proof": proof = vuln.text if vuln.text is None: proof = "NA" else: proof = vuln.text if severity == "high": vul_col = "danger" severity = "High" elif severity == 'medium': vul_col = "warning" severity = "Medium" elif severity == 'low': severity = "Low" vul_col = "info" else: severity = "Low" vul_col = "info" for extra_data in vuln: for extra_vuln in extra_data.getchildren(): if extra_vuln.tag == "url": if extra_vuln.text is None: url = "NA" else: url = extra_vuln.text if extra_vuln.tag == "action": if extra_vuln.text is None: action = "NA" else: action = extra_vuln.text if extra_vuln.tag == "body": if extra_vuln.text is None: body = "NA" else: body = extra_vuln.text dup_data = name + url + severity duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = arachni_scan_result_db.objects.filter(username=username, dup_hash=duplicate_hash).values('dup_hash').distinct() lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = arachni_scan_result_db.objects.filter(username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) global false_positive if fp_lenth_match == 1: false_positive = 'Yes' elif fp_lenth_match == 0: false_positive = 'No' else: false_positive = "No" dump_data = arachni_scan_result_db(vuln_id=vuln_id, scan_id=scan_id, vuln_color=vul_col, project_id=project_id, name=name, description=description, remedy_guidance=remedy_guidance, severity=severity, proof=proof, url=url, action=action, body=body, ref_key=ref_key, ref_value=ref_values, vector_input_values=vector_input_values, vector_source_key=vector_source_key, vector_source_values=vector_source_values, page_body_data=page_body_data, request_url=request_url, request_method=request_method, request_raw=request_raw, response_ip=response_ip, response_raw_headers=response_raw_headers, vector_input_key=vector_input_key, false_positive=false_positive, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, username=username ) dump_data.save() else: duplicate_vuln = 'Yes' dump_data = arachni_scan_result_db(vuln_id=vuln_id, scan_id=scan_id, vuln_color=vul_col, project_id=project_id, name=name, description=description, remedy_guidance=remedy_guidance, severity=severity, proof=proof, url=url, action=action, body=body, ref_key=ref_key, ref_value=ref_values, vector_input_values=vector_input_values, vector_source_key=vector_source_key, vector_source_values=vector_source_values, page_body_data=page_body_data, request_url=request_url, request_method=request_method, request_raw=request_raw, response_ip=response_ip, response_raw_headers=response_raw_headers, vector_input_key=vector_input_key, false_positive='Duplicate', vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, username=username ) dump_data.save() arachni_all_vul = arachni_scan_result_db.objects.filter(username=username, scan_id=scan_id, false_positive='No') duplicate_count = arachni_scan_result_db.objects.filter(username=username, scan_id=scan_id, vuln_duplicate='Yes') total_high = len(arachni_all_vul.filter(severity="High")) total_medium = len(arachni_all_vul.filter(severity="Medium")) total_low = len(arachni_all_vul.filter(severity="Low")) total_info = len(arachni_all_vul.filter(severity="Informational")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) total_vul = total_high + total_medium + total_low + total_info arachni_scan_db.objects.filter(scan_id=scan_id, username=username).update( url=target_url, total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, info_vul=total_info, total_dup=total_duplicate, ) subject = 'Archery Tool Scan Status - Arachni Report Uploaded' message = 'Arachni Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (url, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def updated_nessus_parser(root, project_id, scan_id, username): global agent, description, fname, \ plugin_modification_date, plugin_name, \ plugin_publication_date, plugin_type, \ risk_factor, script_version, solution, \ synopsis, plugin_output, see_also, scan_ip, \ pluginName, pluginID, protocol, severity, \ svc_name, pluginFamily, port, vuln_color for data in root: for reportHost in data.iter('ReportHost'): print("reportHost = " + str(reportHost.attrib)) try: for key, value in reportHost.items(): scan_ip = value print("IP = " + str(scan_ip)) except: continue scan_status = "100" date_time = datetime.datetime.now() scan_dump = nessus_scan_db(scan_ip=scan_ip, scan_id=scan_ip, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() for ReportItem in reportHost.iter('ReportItem'): for key, value in ReportItem.attrib.items(): if key == 'pluginName': pluginName = value # print ("pluginName = "+str(value)) if key == 'pluginID': pluginID = value # print ("pluginID = "+str(value)) if key == 'protocol': protocol = value # print ("protocol = "+str(value)) if key == 'severity': severity = value # print ("severity = "+str(value)) if key == 'svc_name': svc_name = value # print ("svc_name = "+str(value)) if key == 'pluginFamily': pluginFamily = value # print ("pluginFamily = "+str(value)) if key == 'port': port = value # print ("port = "+str(value)) try: agent = ReportItem.find('agent').text except: agent = "NA" try: description = ReportItem.find('description').text except: description = "NA" try: fname = ReportItem.find('fname').text except: fname = "NA" try: plugin_modification_date = ReportItem.find( 'plugin_modification_date').text except: plugin_modification_date = "NA" try: plugin_name = ReportItem.find('plugin_name').text except: plugin_name = "NA" try: plugin_publication_date = ReportItem.find( 'plugin_publication_date').text except: plugin_publication_date = "NA" try: plugin_type = ReportItem.find('plugin_type').text except: plugin_type = "NA" try: risk_factor = ReportItem.find('risk_factor').text except: risk_factor = "NA" try: script_version = ReportItem.find('script_version').text except: script_version = "NA" try: see_also = ReportItem.find('see_also').text except: see_also = "NA" try: solution = ReportItem.find('solution').text except: solution = "NA" try: synopsis = ReportItem.find('synopsis').text except: synopsis = "NA" try: plugin_output = ReportItem.find('plugin_output').text except: plugin_output = "NA" vul_id = uuid.uuid4() dup_data = scan_ip + plugin_name + severity + port duplicate_hash = hashlib.sha256( dup_data.encode('utf-8')).hexdigest() match_dup = nessus_report_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash').distinct() lenth_match = len(match_dup) if severity == '0': vuln_color = 'info' if severity == '1': vuln_color = 'info' if severity == '2': vuln_color = 'warning' if severity == '3': vuln_color = 'danger' if severity == '4': vuln_color = 'danger' if lenth_match == 1: duplicate_vuln = 'Yes' elif lenth_match == 0: duplicate_vuln = 'No' else: duplicate_vuln = 'None' global false_positive false_p = nessus_report_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' if risk_factor == 'None': risk_factor = 'Informational' all_data_save = nessus_report_db( project_id=project_id, scan_id=scan_ip, scan_ip=scan_ip, vul_id=vul_id, agent=agent, description=description, fname=fname, plugin_modification_date=plugin_modification_date, plugin_name=plugin_name, plugin_publication_date=plugin_publication_date, plugin_type=plugin_type, risk_factor=risk_factor, script_version=script_version, see_also=see_also, solution=solution, synopsis=synopsis, plugin_output=plugin_output, pluginName=pluginName, pluginID=pluginID, protocol=protocol, severity=severity, svc_name=svc_name, pluginFamily=pluginFamily, port=port, false_positive=false_positive, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, severity_color=vuln_color, username=username, ) all_data_save.save() print("RESULTS = " + str(all_data_save.scan_id)) del_na = nessus_report_db.objects.filter(username=username, plugin_name='NA') del_na.delete() ov_all_vul = nessus_report_db.objects.filter( username=username, scan_id=scan_ip).order_by('scan_ip') total_vul = len(ov_all_vul) total_critical = len(ov_all_vul.filter(risk_factor="Critical")) total_high = len(ov_all_vul.filter(risk_factor="High")) total_medium = len(ov_all_vul.filter(risk_factor="Medium")) total_low = len(ov_all_vul.filter(risk_factor="Low")) total_info = len( ov_all_vul.filter(risk_factor="Informational")) total_duplicate = len(ov_all_vul.filter(vuln_duplicate='Yes')) nessus_scan_db.objects.filter(username=username, scan_id=scan_ip) \ .update(total_vul=total_vul, critical_total=total_critical, high_total=total_high, medium_total=total_medium, low_total=total_low, info_total=total_info, total_dup=total_duplicate, scan_ip=scan_ip, ) subject = 'Archery Tool Scan Status - Nessus Report Uploaded' message = 'Nessus Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (scan_id, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def nodejsscan_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ date_time = datetime.now() global vul_col, severity for vuln in data['sec_issues']: for vuln_dat in (data['sec_issues'][vuln]): with open( 'scanners/scanner_parser/staticscanner_parser/nodejsscan_vuln.json' ) as f: vuln_name = json.load(f) for v in (vuln_name['vuln']): if v['name'] == vuln_dat['title']: severity = v['severity'] title = vuln_dat['title'] filename = vuln_dat['filename'] path = vuln_dat['path'] sha2 = vuln_dat['sha2'] tag = vuln_dat['tag'] description = vuln_dat['description'] line = vuln_dat['line'] lines = vuln_dat['lines'] if severity == "High": vul_col = "danger" elif severity == 'Medium': vul_col = "warning" elif severity == 'Low': vul_col = "info" vul_id = uuid.uuid4() dup_data = str(title) + str(severity) + str(filename) + str(line) print(dup_data) duplicate_hash = hashlib.sha256( dup_data.encode('utf-8')).hexdigest() print(duplicate_hash) match_dup = nodejsscan_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = nodejsscan_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = nodejsscan_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, title=title, filename=filename, severity=severity, path=path, sha2=sha2, tag=tag, description=description, line=line, lines=lines, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = nodejsscan_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, vul_col=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', title=title, filename=filename, severity=severity, path=path, sha2=sha2, tag=tag, description=description, line=line, lines=lines, username=username, ) save_all.save() all_findbugs_data = nodejsscan_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No') duplicate_count = nodejsscan_scan_results_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(severity="High")) total_medium = len(all_findbugs_data.filter(severity="Medium")) total_low = len(all_findbugs_data.filter(severity="Low")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) nodejsscan_scan_db.objects.filter(username=username, scan_id=scan_id).update( total_vul=total_vul, date_time=date_time, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate) trend_update(username=username) subject = 'Archery Tool Scan Status - Trivy Report Uploaded' message = 'Trivy Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % ("Nodejsscan", total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def xml_parser(root, project_id, scan_id, username): """ :param root: :param project_id: :param scan_id: :return: """ date_time = datetime.now() global ScanName, \ ScanShortName, ScanStartURL, \ ScanStartTime, \ ScanFinishTime, \ ScanScanTime, \ ScanAborted, \ ScanResponsive, \ ScanResponsive, \ ScanBanner, \ ScanOs, \ ScanWebServer, \ ScanTechnologies, \ ScanCrawler, \ ScanReportItems, \ VulnName, \ VulnModuleName, \ VulnDetails, \ VulnAffects, \ VulnParameter, \ VulnAOP_SourceFile, \ VulnAOP_SourceLine, \ VulnAOP_Additional, \ VulnIsFalsePositive, \ VulnSeverity, \ VulnType, \ VulnImpact, \ VulnDescription, \ VulnDetailedInformation, \ VulnRecommendation, \ VulnTechnicalDetails, \ VulnCWEList, \ VulnCVEList, \ VulnCVSS, \ VulnCVSS3, \ VulnReferences, \ vul_col, \ risk, \ UriName, \ VulnUrl, \ FullURL for scan in root: for reports in scan: if reports.tag == 'Name': ScanName = reports.text if reports.tag == 'ShortName': ScanShortName = reports.text if reports.tag == 'StartURL': ScanStartURL = reports.text if reports.tag == 'StartTime': ScanStartTime = reports.text if reports.tag == 'FinishTime': ScanFinishTime = reports.text if reports.tag == 'ScanTime': ScanScanTime = reports.text if reports.tag == 'Aborted': ScanAborted = reports.text if reports.tag == 'Responsive': ScanResponsive = reports.text if reports.tag == 'Banner': ScanBanner = reports.text if reports.tag == 'Os': ScanOs = reports.text if reports.tag == 'WebServer': ScanWebServer = reports.text if reports.tag == 'Technologies': ScanTechnologies = reports.text if reports.tag == 'Crawler': ScanCrawler = reports.text if reports.tag == 'ReportItems': ScanReportItems = reports.text if reports.tag == 'ReportItems': ScanReportItems = reports.text for report_item in reports: for ReportItem in report_item: # print ReportItem # print(ReportItem.tag) if ReportItem.tag == 'Name': VulnName = ReportItem.text if ReportItem.tag == 'ModuleName': VulnModuleName = ReportItem.text if ReportItem.tag == 'Details': VulnDetails = ReportItem.text if ReportItem.tag == 'Affects': VulnAffects = ScanStartURL + ReportItem.text if ReportItem.tag == 'Parameter': VulnParameter = ReportItem.text if ReportItem.tag == 'AOP_SourceFile': VulnAOP_SourceFile = ReportItem.text if ReportItem.tag == 'AOP_SourceLine': VulnAOP_SourceLine = ReportItem.text if ReportItem.tag == 'AOP_Additional': VulnAOP_Additional = ReportItem.text if ReportItem.tag == 'IsFalsePositive': VulnIsFalsePositive = ReportItem.text if ReportItem.tag == 'Severity': VulnSeverity = ReportItem.text if ReportItem.tag == 'Type': VulnType = ReportItem.text if ReportItem.tag == 'Impact': VulnImpact = ReportItem.text if ReportItem.tag == 'Description': VulnDescription = ReportItem.text if ReportItem.tag == 'DetailedInformation': VulnDetailedInformation = ReportItem.text if ReportItem.tag == 'Recommendation': VulnRecommendation = ReportItem.text if ReportItem.tag == 'TechnicalDetails': VulnTechnicalDetails = ReportItem.text if ReportItem.tag == 'CWEList': VulnCWEList = ReportItem.text if ReportItem.tag == 'CVEList': VulnCVEList = ReportItem.text if ReportItem.tag == 'CVSS': VulnCVSS = ReportItem.text if ReportItem.tag == 'CVSS3': VulnCVSS3 = ReportItem.text if ReportItem.tag == 'References': VulnReferences = ReportItem.text if VulnSeverity == "high": vul_col = "danger" risk = "High" elif VulnSeverity == 'medium': vul_col = "warning" risk = "Medium" elif VulnSeverity == 'low': vul_col = "info" risk = "Low" else: vul_col = "info" risk = "Low" if VulnName is None: print(VulnName) else: for c_url in root.findall('.//SiteFile'): for vuln_url in c_url: if vuln_url.tag == 'Name': UriName = vuln_url.text if vuln_url.tag == 'URL': VulnUrl = vuln_url.text if vuln_url.tag == 'FullURL': FullURL = vuln_url.text vuln_id = uuid.uuid4() dup_data = VulnName + FullURL + risk duplicate_hash = hashlib.sha256( dup_data.encode('utf-8')).hexdigest() match_dup = acunetix_scan_result_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash').distinct() lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = acunetix_scan_result_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' dump_data = acunetix_scan_result_db( username=username, scan_id=scan_id, project_id=project_id, vuln_id=vuln_id, date_time=date_time, false_positive=false_positive, vuln_color=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, ScanName=ScanName, ScanShortName=ScanShortName, ScanStartURL=ScanStartURL, ScanStartTime=ScanStartTime, ScanFinishTime=ScanFinishTime, ScanScanTime=ScanScanTime, ScanAborted=ScanAborted, ScanResponsive=ScanResponsive, ScanBanner=ScanBanner, ScanOs=ScanOs, ScanWebServer=ScanWebServer, ScanTechnologies=ScanTechnologies, ScanCrawler=ScanCrawler, ScanReportItems=ScanReportItems, VulnName=VulnName, VulnModuleName=VulnModuleName, VulnDetails=VulnDetails, VulnAffects=VulnAffects, VulnParameter=VulnParameter, VulnAOP_SourceFile=VulnAOP_SourceFile, VulnAOP_SourceLine=VulnAOP_SourceLine, VulnAOP_Additional=VulnAOP_Additional, VulnIsFalsePositive=VulnIsFalsePositive, VulnSeverity=risk, VulnType=VulnType, VulnImpact=VulnImpact, VulnDescription=VulnDescription, VulnDetailedInformation=VulnDetailedInformation, VulnRecommendation=VulnRecommendation, VulnTechnicalDetails=VulnTechnicalDetails, VulnCWEList=VulnCWEList, VulnCVEList=VulnCVEList, VulnCVSS=VulnCVSS, VulnCVSS3=VulnCVSS3, VulnReferences=VulnReferences, UriName=UriName, VulnUrl=VulnUrl, VulnFullUrl=FullURL) dump_data.save() else: duplicate_vuln = 'Yes' dump_data = acunetix_scan_result_db( username=username, scan_id=scan_id, project_id=project_id, vuln_id=vuln_id, date_time=date_time, false_positive='Duplicate', vuln_color=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, ScanName=ScanName, ScanShortName=ScanShortName, ScanStartURL=ScanStartURL, ScanStartTime=ScanStartTime, ScanFinishTime=ScanFinishTime, ScanScanTime=ScanScanTime, ScanAborted=ScanAborted, ScanResponsive=ScanResponsive, ScanBanner=ScanBanner, ScanOs=ScanOs, ScanWebServer=ScanWebServer, ScanTechnologies=ScanTechnologies, ScanCrawler=ScanCrawler, ScanReportItems=ScanReportItems, VulnName=VulnName, VulnModuleName=VulnModuleName, VulnDetails=VulnDetails, VulnAffects=VulnAffects, VulnParameter=VulnParameter, VulnAOP_SourceFile=VulnAOP_SourceFile, VulnAOP_SourceLine=VulnAOP_SourceLine, VulnAOP_Additional=VulnAOP_Additional, VulnIsFalsePositive=VulnIsFalsePositive, VulnSeverity=risk, VulnType=VulnType, VulnImpact=VulnImpact, VulnDescription=VulnDescription, VulnDetailedInformation=VulnDetailedInformation, VulnRecommendation=VulnRecommendation, VulnTechnicalDetails=VulnTechnicalDetails, VulnCWEList=VulnCWEList, VulnCVEList=VulnCVEList, VulnCVSS=VulnCVSS, VulnCVSS3=VulnCVSS3, VulnReferences=VulnReferences, UriName=UriName, VulnUrl=VulnUrl, VulnFullUrl=FullURL) dump_data.save() acunetix_all_vul = acunetix_scan_result_db.objects.filter( username=username, scan_id=scan_id, false_positive='No') duplicate_count = acunetix_scan_result_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_high = len(acunetix_all_vul.filter(VulnSeverity="High")) total_medium = len(acunetix_all_vul.filter(VulnSeverity="Medium")) total_low = len(acunetix_all_vul.filter(VulnSeverity="Low")) total_info = len(acunetix_all_vul.filter(VulnSeverity="Informational")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) total_vul = total_high + total_medium + total_low + total_info # cal_total_vuln = total_high + total_medium + total_low + total_info acunetix_scan_db.objects.filter(username=username, scan_id=scan_id) \ .update(total_vul=total_vul, date_time=date_time, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, info_vul=total_info, total_dup=total_duplicate, url=ScanStartURL ) subject = 'Archery Tool Scan Status - Acunetix Report Uploaded' message = 'Acunetix Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (ScanStartURL, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def brakeman_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ """ { "scan_info": { "app_path": "/test_app", "rails_version": "4.2.7.1", "security_warnings": 5, "start_time": "2018-10-23 19:32:28 +0300", "end_time": "2018-10-23 19:32:42 +0300", "duration": 3.723474664, "checks_performed": [ "BasicAuth", "BasicAuthTimingAttack", "ContentTag", "CreateWith", "CrossSiteScripting", "DefaultRoutes", "Deserialize", "DetailedExceptions", "DigestDoS", "DynamicFinders", "EscapeFunction", "Evaluation", "Execute", "FileAccess", "FileDisclosure", "FilterSkipping", "ForgerySetting", "HeaderDoS", "I18nXSS", "JRubyXML", "JSONEncoding", "JSONParsing", "LinkTo", "LinkToHref", "MailTo", "MassAssignment", "MimeTypeDoS", "ModelAttrAccessible", "ModelAttributes", "ModelSerialize", "NestedAttributes", "NestedAttributesBypass", "NumberToCurrency", "PermitAttributes", "QuoteTableName", "Redirect", "RegexDoS", "Render", "RenderDoS", "RenderInline", "ResponseSplitting", "RouteDoS", "SQL", "SQLCVEs", "SSLVerify", "SafeBufferManipulation", "SanitizeMethods", "SelectTag", "SelectVulnerability", "Send", "SendFile", "SessionManipulation", "SessionSettings", "SimpleFormat", "SingleQuotes", "SkipBeforeFilter", "StripTags", "SymbolDoSCVE", "TranslateBug", "UnsafeReflection", "ValidationRegex", "WithoutProtection", "XMLDoS", "YAMLParsing" ], "number_of_controllers": 5, "number_of_models": 12, "number_of_templates": 25, "ruby_version": "2.5.1", "brakeman_version": "4.3.1" }, "warnings": [ { "warning_type": "Mass Assignment", "warning_code": 60, "fingerprint": "00a38ca07fd6d6058d0b8664aae5b0ec1e2fd89c59d8d74ee95babab02f6fbdf", "check_name": "ModelAttrAccessible", "message": "Potentially dangerous attribute available for mass assignment", "file": "app/models/test1.rb", "line": null, "link": "https://brakemanscanner.org/docs/warning_types/mass_assignment/", "code": ":test_id", "render_path": null, "location": { "type": "model", "model": "Test1" }, "user_input": null, "confidence": "Weak" }, { "warning_type": "Cross-Site Scripting", "warning_code": 2, "fingerprint": "00ac2b92111049e24c28fa4f315d962c4e81c21a7bb28d7b205c8a32e99f643d", "check_name": "CrossSiteScripting", "message": "Unescaped model attribute", "file": "app/views/test1.html.erb", "line": 88, "link": "https://brakemanscanner.org/docs/warning_types/cross_site_scripting", "code": "Test::Test.find(params[:id]).name(:test)", "render_path": [{"type":"controller","class":"TestController","method":"test_access","line":6,"file":"app/controllers/test1.rb"}], "location": { "type": "template", "template": "test1" }, "user_input": null, "confidence": "High" }, { "warning_type": "SQL Injection", "warning_code": 0, "fingerprint": "0c8be6f7618c44181ab46aa9108a3e3624df7f89146349e4de884f5ae2d35a77", "check_name": "SQL", "message": "Possible SQL injection", "file": "app/models/test2.rb", "line": 260, "link": "https://brakemanscanner.org/docs/warning_types/sql_injection/", "code": "where(\"#{column_name} IS NOT NULL\")", "render_path": null, "location": { "type": "method", "class": "Test", "method": "Test.test_retrieve" }, "user_input": "column_name", "confidence": "Medium" }, { "warning_type": "Dynamic Render Path", "warning_code": 15, "fingerprint": "1c1e1a42a8b8bb0ad2b74bd3b91db2dd48f21062b3fe7e96e45be3ea1faa7c43", "check_name": "Render", "message": "Render path contains parameter value", "file": "app/controllers/test_controller.rb", "line": 5, "link": "https://brakemanscanner.org/docs/warning_types/dynamic_render_path/", "code": "render(action => { :json => (...)})", "render_path": null, "location": { "type": "method", "class": "TestController", "method": "index" }, "user_input": "params[:fields].split(\",\")", "confidence": "Weak" }, { "warning_type": "Attribute Restriction", "warning_code": 19, "fingerprint": "29e2c701f167599ce572ead7c3ff377aac1bc0e71834fe5867f10660e9a42de7", "check_name": "ModelAttributes", "message": "Mass assignment is not restricted using attr_accessible", "file": "app/models/test2.rb", "line": 2, "link": "https://brakemanscanner.org/docs/warning_types/attribute_restriction/", "code": null, "render_path": null, "location": { "type": "method", "model": "Test2::TestParameter" }, "user_input": null, "confidence": "High" } ], "ignored_warnings": [ ], "errors": [ ], "obsolete": [ ] } """ global false_positive date_time = datetime.now() vul_col = '' # Parser for above json data # print(data['warnings']) vuln = data['warnings'] for vuln_data in vuln: try: name = vuln_data['warning_type'] except Exception as e: name = "Not Found" try: warning_code = vuln_data['warning_code'] except Exception as e: warning_code = "Not Found" try: fingerprint = vuln_data['fingerprint'] except Exception as e: fingerprint = "Not Found" try: description = vuln_data['message'] except Exception as e: description = "Not Found" try: check_name = vuln_data['check_name'] except Exception as e: check_name = "Not Found" try: severity = vuln_data['confidence'] if severity == 'Weak': severity = 'Low' except Exception as e: severity = "Not Found" try: file = vuln_data['file'] except Exception as e: file = "Not Found" try: line = vuln_data['line'] except Exception as e: line = "Not Found" try: link = vuln_data['link'] except Exception as e: link = "Not Found" try: code = vuln_data['code'] except Exception as e: code = "Not Found" try: render_path = vuln_data['render_path'] except Exception as e: render_path = "Not Found" if severity == "Critical": severity = 'High' vul_col = "danger" if severity == "High": vul_col = "danger" elif severity == 'Medium': vul_col = "warning" elif severity == 'Low': vul_col = "info" elif severity == 'Unknown': severity = "Low" vul_col = "info" elif severity == 'Everything else': severity = "Low" vul_col = "info" vul_id = uuid.uuid4() dup_data = str(name) + str(severity) + str(file) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = brakeman_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = brakeman_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = brakeman_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, username=username, name=name, warning_code=warning_code, description=description, severity=severity, file=file, check_name=check_name, fingerprint=fingerprint, line=line, code=code, render_path=render_path, link=link, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = brakeman_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, vul_col=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', username=username, name=name, warning_code=warning_code, description=description, severity=severity, file=file, check_name=check_name, fingerprint=fingerprint, line=line, code=code, render_path=render_path, link=link, ) save_all.save() all_findbugs_data = brakeman_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No', vuln_duplicate='No') duplicate_count = brakeman_scan_results_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(severity="High")) total_medium = len(all_findbugs_data.filter(severity="Medium")) total_low = len(all_findbugs_data.filter(severity="Low")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) brakeman_scan_db.objects.filter(scan_id=scan_id).update( username=username, date_time=date_time, total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate) trend_update(username=username) subject = 'Archery Tool Scan Status - brakeman Report Uploaded' message = 'brakeman Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def inspec_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ global controls_results_message, status vul_col = 'info' for key, value in data.items(): if key == 'profiles': for profile in value: controls = profile['controls'] for con in controls: controls_id = con['id'] controls_title = con['title'] controls_desc = con['desc'] controls_descriptions = "" try: controls_descriptions = con['descriptions'][0]['data'] except: controls_descriptions = controls_desc controls_impact = con['impact'] controls_refs = con['refs'] try: controls_tags_severity = con['tags']['severity'] except: controls_tags_severity= "INFO" try: controls_tags_cis_id = con['tags']['severity'] except: controls_tags_cis_id= "None" try: controls_tags_cis_control = con['tags']['cis_control'] except: controls_tags_cis_control= "None" try: controls_tags_cis_level = con['tags']['cis_level'] except: controls_tags_cis_level= "None" try: controls_tags_audit = con['tags']['audit text'] except: controls_tags_audit= "None" try: controls_tags_fix = con['tags']['fix'] except: controls_tags_fix= "None" controls_code = con['code'] controls_source_location = con['source_location']['line'] for res in con['results']: controls_results_status = res['status'] controls_results_code_desc = res['code_desc'] controls_results_run_time = res['run_time'] controls_results_start_time = res['start_time'] for key, value in res.items(): if key == 'message': controls_results_message = value if controls_results_status == "failed": vul_col = "danger" status = "Failed" elif controls_results_status == 'passed': vul_col = "warning" status = "Passed" elif controls_results_status == 'skipped': vul_col = "info" status = "Skipped" vul_id = uuid.uuid4() save_all = inspec_scan_results_db( scan_id=scan_id, project_id=project_id, vul_col=vul_col, vuln_id=vul_id, controls_id=controls_id, controls_title=controls_title, controls_desc=controls_desc, controls_descriptions=controls_descriptions, controls_impact=controls_impact, controls_refs=controls_refs, controls_tags_severity=controls_tags_severity, controls_tags_cis_id=controls_tags_cis_id, controls_tags_cis_control=controls_tags_cis_control, controls_tags_cis_level=controls_tags_cis_level, controls_tags_audit=controls_tags_audit, controls_tags_fix=controls_tags_fix, controls_code=controls_code, controls_source_location=controls_source_location, controls_results_status=status, controls_results_code_desc=controls_results_code_desc, controls_results_run_time=controls_results_run_time, controls_results_start_time=controls_results_start_time, controls_results_message=controls_results_message, username=username, ) save_all.save() all_inspec_data = inspec_scan_results_db.objects.filter(username=username, scan_id=scan_id) total_vul = len(all_inspec_data) inspec_failed = len(all_inspec_data.filter(controls_results_status="Failed")) inspec_passed = len(all_inspec_data.filter(controls_results_status="Passed")) inspec_skipped = len(all_inspec_data.filter(controls_results_status="Skipped")) total_duplicate = len(all_inspec_data.filter(vuln_duplicate='Yes')) inspec_scan_db.objects.filter(username=username, scan_id=scan_id).update( total_vuln=total_vul, inspec_failed=inspec_failed, inspec_passed=inspec_passed, inspec_skipped=inspec_skipped, total_dup=total_duplicate ) subject = 'Archery Tool Scan Status - Inspec Report Uploaded' message = 'Inspec Scanner has completed the scan ' \ ' %s <br> Total: %s <br>Failed: %s <br>' \ 'failed: %s <br>Skipped %s' % (scan_id, total_vul, inspec_failed, inspec_failed, inspec_skipped) email_sch_notify(subject=subject, message=message)
def updated_nessus_parser(root, project_id, scan_id, username): global agent, description, fname, \ plugin_modification_date, plugin_name, \ plugin_publication_date, plugin_type, \ risk_factor, script_version, solution, \ synopsis, plugin_output, see_also, scan_ip, \ pluginName, pluginID, protocol, severity, \ svc_name, pluginFamily, port, vuln_color, total_vul, total_high, total_medium, total_low, target, report_name date_time = datetime.datetime.now() for data in root: if data.tag == 'Report': report_name = data.attrib['name'] scan_status = "100" scan_dump = nessus_scan_db(report_name=report_name, target=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() for reportHost in data.iter('ReportHost'): try: for key, value in reportHost.items(): target = value except: continue for ReportItem in reportHost.iter('ReportItem'): for key, value in ReportItem.attrib.items(): if key == 'pluginName': pluginName = value # print ("pluginName = "+str(value)) if key == 'pluginID': pluginID = value # print ("pluginID = "+str(value)) if key == 'protocol': protocol = value # print ("protocol = "+str(value)) if key == 'severity': severity = value # print ("severity = "+str(value)) if key == 'svc_name': svc_name = value # print ("svc_name = "+str(value)) if key == 'pluginFamily': pluginFamily = value # print ("pluginFamily = "+str(value)) if key == 'port': port = value # print ("port = "+str(value)) try: agent = ReportItem.find('agent').text except: agent = "NA" try: description = ReportItem.find('description').text except: description = "NA" try: fname = ReportItem.find('fname').text except: fname = "NA" try: plugin_modification_date = ReportItem.find( 'plugin_modification_date').text except: plugin_modification_date = "NA" try: plugin_name = ReportItem.find('plugin_name').text except: plugin_name = "NA" try: plugin_publication_date = ReportItem.find( 'plugin_publication_date').text except: plugin_publication_date = "NA" try: plugin_type = ReportItem.find('plugin_type').text except: plugin_type = "NA" try: risk_factor = ReportItem.find('risk_factor').text except: risk_factor = "NA" try: script_version = ReportItem.find('script_version').text except: script_version = "NA" try: see_also = ReportItem.find('see_also').text except: see_also = "NA" try: solution = ReportItem.find('solution').text except: solution = "NA" try: synopsis = ReportItem.find('synopsis').text except: synopsis = "NA" try: plugin_output = ReportItem.find('plugin_output').text except: plugin_output = "NA" vuln_id = uuid.uuid4() if risk_factor == 'Critical': vuln_color = 'danger' risk_factor = 'High' elif risk_factor == 'High': vuln_color = 'danger' risk_factor = 'High' elif risk_factor == 'Medium': vuln_color = 'warning' risk_factor = 'Medium' elif risk_factor == 'Low': vuln_color = 'info' risk_factor = 'Low' else: risk_factor = 'Low' vuln_color = 'info' dup_data = target + plugin_name + severity + port duplicate_hash = hashlib.sha256( dup_data.encode('utf-8')).hexdigest() match_dup = nessus_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash').distinct() lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' global false_positive false_p = nessus_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' if risk_factor == 'None': risk_factor = 'Low' all_data_save = nessus_scan_results_db( project_id=project_id, report_name=report_name, scan_id=scan_id, date_time=date_time, target=target, vuln_id=vuln_id, agent=agent, description=description, fname=fname, plugin_modification_date=plugin_modification_date, plugin_name=plugin_name, plugin_publication_date=plugin_publication_date, plugin_type=plugin_type, risk_factor=risk_factor, script_version=script_version, see_also=see_also, solution=solution, synopsis=synopsis, plugin_output=plugin_output, pluginName=pluginName, pluginID=pluginID, protocol=protocol, severity=severity, svc_name=svc_name, pluginFamily=pluginFamily, port=port, false_positive=false_positive, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, severity_color=vuln_color, username=username, ) all_data_save.save() del_na = nessus_scan_results_db.objects.filter( username=username, plugin_name='NA') del_na.delete() else: duplicate_vuln = 'Yes' all_data_save = nessus_scan_results_db( project_id=project_id, scan_id=scan_id, target=target, vuln_id=vuln_id, date_time=date_time, agent=agent, description=description, fname=fname, plugin_modification_date=plugin_modification_date, plugin_name=plugin_name, plugin_publication_date=plugin_publication_date, plugin_type=plugin_type, risk_factor=risk_factor, script_version=script_version, see_also=see_also, solution=solution, synopsis=synopsis, plugin_output=plugin_output, pluginName=pluginName, pluginID=pluginID, protocol=protocol, severity=severity, svc_name=svc_name, pluginFamily=pluginFamily, port=port, false_positive='Duplicate', vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, severity_color=vuln_color, username=username, ) all_data_save.save() del_na = nessus_scan_results_db.objects.filter( username=username, plugin_name='NA') del_na.delete() ov_all_vul = nessus_scan_results_db.objects.filter( username=username, scan_id=scan_id) total_duplicate = len( ov_all_vul.filter(vuln_duplicate='Yes')) nessus_scan_db.objects.filter(username=username, scan_id=scan_id) \ .update( total_dup=total_duplicate, target=target, ) target_filter = nessus_scan_results_db.objects.filter( username=username, scan_id=scan_id, target=target, vuln_status='Open', vuln_duplicate='No') duplicate_count = nessus_scan_results_db.objects.filter( username=username, scan_id=scan_id, target=target, vuln_duplicate='Yes') target_total_vuln = len(target_filter) target_total_high = len(target_filter.filter(risk_factor="High")) target_total_medium = len( target_filter.filter(risk_factor="Medium")) target_total_low = len(target_filter.filter(risk_factor="Low")) target_total_duplicate = len( duplicate_count.filter(vuln_duplicate='Yes')) target_scan_dump = nessus_targets_db( report_name=report_name, target=target, scan_id=scan_id, date_time=date_time, project_id=project_id, username=username, total_vuln=target_total_vuln, total_high=target_total_high, total_medium=target_total_medium, total_low=target_total_low, total_dup=target_total_duplicate, ) target_scan_dump.save() ov_all_vul = nessus_scan_results_db.objects.filter(username=username, scan_id=scan_id, vuln_status='Open', vuln_duplicate='No') duplicate_count_report = nessus_scan_results_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vuln = len(ov_all_vul) total_high = len(ov_all_vul.filter(risk_factor="High")) total_medium = len(ov_all_vul.filter(risk_factor="Medium")) total_low = len(ov_all_vul.filter(risk_factor="Low")) total_duplicate = len( duplicate_count_report.filter(vuln_duplicate='Yes')) nessus_scan_db.objects.filter(username=username, scan_id=scan_id) \ .update(total_vuln=total_vuln, total_high=total_high, total_medium=total_medium, total_low=total_low, total_dup=total_duplicate, target=target, ) subject = 'Archery Tool Scan Status - Nessus Report Uploaded' message = 'Nessus Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (scan_id, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def whitesource_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ date_time = datetime.now() global vul_col, project vuln = data['vulnerabilities'] for issues in vuln: name = issues['name'] severity = issues['severity'] score = issues['score'] cvss3_severity = issues['cvss3_severity'] cvss3_score = issues['cvss3_score'] publishDate = issues['publishDate'] lastUpdatedDate = issues['lastUpdatedDate'] scoreMetadataVector = issues['scoreMetadataVector'] url = issues['url'] description = issues['description'] project = issues['project'] product = issues['product'] cvss3Attributes = issues['cvss3Attributes'] library = issues['library'] topFix = issues['topFix'] #allFixes = issues['allFixes'] filename = issues['library']['filename'] sha1 = issues['library']['sha1'] version = issues['library']['version'] groupId = issues['library']['groupId'] if severity == "high": severity = 'High' vul_col = "danger" elif severity == 'medium': severity = 'Medium' vul_col = "warning" elif severity == 'low': severity = 'Low' vul_col = "info" vul_id = uuid.uuid4() dup_data = str(name) + str(severity) + str(project) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = whitesource_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = whitesource_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = whitesource_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, name=name, severity=severity, score=score, cvss3_severity=cvss3_severity, cvss3_score=cvss3_score, publishDate=publishDate, lastUpdatedDate=lastUpdatedDate, scoreMetadataVector=scoreMetadataVector, url=url, description=description, project=project, product=product, cvss3Attributes=cvss3Attributes, library=library, topFix=topFix, # allFixes=allFixes, filename=filename, sha1=sha1, version=version, groupId=groupId, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = whitesource_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, vul_col=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', name=name, severity=severity, score=score, cvss3_severity=cvss3_severity, cvss3_score=cvss3_score, publishDate=publishDate, lastUpdatedDate=lastUpdatedDate, scoreMetadataVector=scoreMetadataVector, url=url, description=description, project=project, product=product, cvss3Attributes=cvss3Attributes, library=library, topFix=topFix, # allFixes=allFixes, filename=filename, sha1=sha1, version=version, groupId=groupId, username=username, ) save_all.save() all_findbugs_data = whitesource_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No') duplicate_count = whitesource_scan_results_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(severity="High")) total_medium = len(all_findbugs_data.filter(severity="Medium")) total_low = len(all_findbugs_data.filter(severity="Low")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) whitesource_scan_db.objects.filter( username=username, scan_id=scan_id).update(project_name=project, date_time=date_time, total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate) trend_update(username=username) subject = 'Archery Tool Scan Status - whitesource Report Uploaded' message = 'whitesource Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % ("whitesource", total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def trivy_report_json(data, project_id, scan_id): """ :param data: :param project_id: :param scan_id: :return: """ vul_col = '' for vuln_data in data: Target = vuln_data['Target'] for vuln in data: vulnerbilities = vuln['Vulnerabilities'] try: for dat_vuln in vulnerbilities: try: VulnerabilityID = dat_vuln['VulnerabilityID'] except Exception as e: VulnerabilityID = "Not Found" print(e) try: PkgName = dat_vuln['PkgName'] except Exception as e: PkgName = "Not Found" print(e) try: InstalledVersion = dat_vuln['InstalledVersion'] except Exception as e: InstalledVersion = "Not Found" print(e) try: FixedVersion = dat_vuln['FixedVersion'] except Exception as e: FixedVersion = "Not Found" print(e) try: Title = dat_vuln['Title'] except Exception as e: Title = "Not Found" print(e) try: Description = dat_vuln['Description'] except Exception as e: Description = "Not Found" print(e) try: Severity = dat_vuln['Severity'] except Exception as e: Severity = "Not Found" print(e) try: References = dat_vuln['References'] except Exception as e: References = "Not Found" print(e) if Severity == "HIGH": Severity = 'High' vul_col = "danger" elif Severity == 'MEDIUM': Severity = 'Medium' vul_col = "warning" elif Severity == 'LOW': Severity = 'Low' vul_col = "info" vul_id = uuid.uuid4() dup_data = VulnerabilityID + Severity duplicate_hash = hashlib.sha256( dup_data.encode('utf-8')).hexdigest() match_dup = trivy_scan_results_db.objects.filter( dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 1: duplicate_vuln = 'Yes' elif lenth_match == 0: duplicate_vuln = 'No' else: duplicate_vuln = 'None' false_p = trivy_scan_results_db.objects.filter( false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = trivy_scan_results_db( vuln_id=vul_id, scan_id=scan_id, project_id=project_id, Target=Target, VulnerabilityID=VulnerabilityID, PkgName=PkgName, InstalledVersion=InstalledVersion, FixedVersion=FixedVersion, Title=Title, Description=Description, Severity=Severity, References=References, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, ) save_all.save() except Exception as e: print(e) all_findbugs_data = trivy_scan_results_db.objects.filter( scan_id=scan_id, false_positive='No') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(Severity="High")) total_medium = len(all_findbugs_data.filter(Severity="Medium")) total_low = len(all_findbugs_data.filter(Severity="Low")) total_duplicate = len(all_findbugs_data.filter(vuln_duplicate='Yes')) trivy_scan_db.objects.filter(scan_id=scan_id).update( total_vuln=total_vul, SEVERITY_HIGH=total_high, SEVERITY_MEDIUM=total_medium, SEVERITY_LOW=total_low, total_dup=total_duplicate) subject = 'Archery Tool Scan Status - Trivy Report Uploaded' message = 'Trivy Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def semgrep_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ vul_col = '' vuln = data['results'] for vuln_data in vuln: try: check_id = vuln_data['check_id'] except Exception as e: check_id = 'Not Found' try: path = vuln_data['path'] except Exception as e: path = 'Not Found' try: start = vuln_data['start'] except Exception as e: start = 'Not Found' try: end = vuln_data['end'] except Exception as e: end = 'Not Found' try: message = vuln_data['extra']['message'] except Exception as e: message = 'Not Found' try: metavars = vuln_data['extra']['metavars'] except Exception as e: metavars = 'Not Found' try: metadata = vuln_data['extra']['metadata'] except Exception as e: metadata = 'Not Found' try: severity = vuln_data['extra']['severity'] except Exception as e: severity = 'Not Found' try: lines = vuln_data['extra']['lines'] except Exception as e: lines = 'Not Found' if severity == "ERROR": severity = "High" vul_col = "danger" elif severity == 'WARNING': severity = 'Medium' vul_col = "warning" elif severity == 'INFORMATION': severity = 'Low' vul_col = "info" vul_id = uuid.uuid4() dup_data = str(check_id) + str(severity) + str(path) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = semgrepscan_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = semgrepscan_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = semgrepscan_scan_results_db( vuln_id=vul_id, scan_id=scan_id, project_id=project_id, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, check_id=check_id, path=path, severity=severity, message=message, end=end, metavars=metavars, metadata=metadata, lines=lines, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = semgrepscan_scan_results_db( vuln_id=vul_id, scan_id=scan_id, project_id=project_id, vul_col=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', check_id=check_id, path=path, severity=severity, message=message, end=end, metavars=metavars, metadata=metadata, lines=lines, username=username, ) save_all.save() all_findbugs_data = semgrepscan_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No') duplicate_count = semgrepscan_scan_results_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(severity="High")) total_medium = len(all_findbugs_data.filter(severity="Medium")) total_low = len(all_findbugs_data.filter(severity="Low")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) semgrepscan_scan_db.objects.filter(username=username, scan_id=scan_id).update( total_vuln=total_vul, SEVERITY_HIGH=total_high, SEVERITY_MEDIUM=total_medium, SEVERITY_LOW=total_low, total_dup=total_duplicate) subject = 'Archery Tool Scan Status - semgrep Report Uploaded' message = 'semgrep Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % ("semgrep", total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def xml_parser(root, project_id, scan_id, username): global url, \ Scheme, \ Host, \ Port, \ AttackMethod, \ VulnerableSession, \ TriggerSession, \ VulnerabilityID, \ Severity, \ Name, \ ReportSection, \ HighlightSelections, \ RawResponse, \ SectionText, \ vuln_id, severity_name, vul_col for data in root: for issues in data: for issue in issues: if issue.tag == 'URL': url = issue.text if issue.tag == 'Host': Host = issue.text if issue.tag == 'Port': Port = issue.text if issue.tag == 'AttackMethod': AttackMethod = issue.text if issue.tag == 'VulnerableSession': VulnerableSession = issue.text if issue.tag == 'TriggerSession': TriggerSession = issue.text if issue.tag == 'VulnerabilityID': VulnerabilityID = issue.text if issue.tag == 'Severity': Severity = issue.text if issue.tag == 'Name': Name = issue.text if issue.tag == 'ReportSection': ReportSection = issue.text if issue.tag == 'HighlightSelections': HighlightSelections = issue.text if issue.tag == 'RawResponse': RawResponse = issue.text for d_issue in issue: if d_issue.tag == 'SectionText': SectionText = issue.text vuln_id = uuid.uuid4() if Severity == "4": Severity = 'Critical' vul_col = "danger" elif Severity == "3": Severity = 'High' vul_col = 'danger' elif Severity == "2": Severity = 'Medium' vul_col = "warning" elif Severity == '1': Severity = 'Low' vul_col = "info" elif Severity == '0': Severity = 'Information' vul_col = "info" dup_data = Name + url + Severity duplicate_hash = hashlib.sha256( dup_data.encode('utf-8')).hexdigest() match_dup = webinspect_scan_result_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash').distinct() lenth_match = len(match_dup) if lenth_match == 1: duplicate_vuln = 'Yes' elif lenth_match == 0: duplicate_vuln = 'No' else: duplicate_vuln = 'None' false_p = webinspect_scan_result_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) global false_positive if fp_lenth_match == 1: false_positive = 'Yes' elif lenth_match == 0: false_positive = 'No' else: false_positive = 'No' if Name is None: print(Name) else: dump_data = webinspect_scan_result_db( scan_id=scan_id, vuln_id=vuln_id, vuln_url=url, host=Host, port=Port, attackmethod=AttackMethod, vulnerablesession=VulnerableSession, triggerSession=TriggerSession, vulnerabilityID=VulnerabilityID, severity=Severity, name=Name, reportSection=ReportSection, highlightSelections=HighlightSelections, rawResponse=RawResponse, SectionText=SectionText, severity_name=severity_name, vuln_color=vul_col, false_positive=false_positive, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, project_id=project_id, username=username) dump_data.save() webinspect_all_vul = webinspect_scan_result_db.objects.filter( username=username, scan_id=scan_id, false_positive='No') total_critical = len(webinspect_all_vul.filter(severity='Critical')) total_high = len(webinspect_all_vul.filter(severity="High")) total_medium = len(webinspect_all_vul.filter(severity="Medium")) total_low = len(webinspect_all_vul.filter(severity="Low")) total_info = len(webinspect_all_vul.filter(severity="Information")) total_duplicate = len(webinspect_all_vul.filter(severity='Yes')) total_vul = total_critical + total_high + total_medium + total_low + total_info webinspect_scan_db.objects.filter(username=username, scan_id=scan_id).update( total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, critical_vul=total_critical, info_vul=total_info, total_dup=total_duplicate) subject = 'Archery Tool Scan Status - Webinspect Report Uploaded' message = 'Webinspect Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (Host, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def debcvescan_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ date_time = datetime.now() vul_col = '' vuln = data['vulnerabilities'] for vuln_data in vuln: try: description = vuln_data['description'] except Exception as e: description = "Not Found" if "DISPUTED" in description: continue try: cve = vuln_data['cve'] except Exception as e: cve = "Not Found" try: severity = vuln_data['severity'] except Exception as e: severity = "Not Found" try: package = vuln_data['package'] except Exception as e: package = "Not Found" try: package_ver = vuln_data['installed_version'] except Exception as e: package_ver = "Not Found" try: fix_ver = vuln_data['fixed_version'] except Exception as e: fix_ver = "Not Found" if severity == 3: severity = "High" vul_col = "danger" elif severity == 2: vul_col = "warning" severity = "Medium" elif severity == 1: vul_col = "info" severity = "Low" else: continue vul_id = uuid.uuid4() dup_data = str(cve) + str(severity) + str(package) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = debcvescan_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = debcvescan_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = debcvescan_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, description=description, cve=cve, package=package, package_ver=package_ver, fix_ver=fix_ver, Severity=severity, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = debcvescan_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, description=description, cve=cve, package=package, package_ver=package_ver, fix_ver=fix_ver, Severity=severity, vul_col=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', username=username, ) save_all.save() all_findbugs_data = debcvescan_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No') duplicate_count = debcvescan_scan_results_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(Severity="High")) total_medium = len(all_findbugs_data.filter(Severity="Medium")) total_low = len(all_findbugs_data.filter(Severity="Low")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) debcvescan_scan_db.objects.filter(scan_id=scan_id).update( username=username, total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate) trend_update(username=username) subject = 'Archery Tool Scan Status - Debian CVE Scan Report Uploaded' message = 'Debian CVE Scan has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def bandit_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ for key, items in data.items(): if key == 'results': for res in items: for key, value in res.items(): if key == 'line_number': global line_number if value is None: line_number = "NA" else: line_number = value if key == 'code': global code if value is None: code = "NA" else: code = value if key == 'issue_confidence': global issue_confidence if value is None: issue_confidence = "NA" else: issue_confidence = value if key == 'line_range': global line_range if value is None: line_range = "NA" else: line_range = value if key == 'test_id': global test_id if value is None: test_id = "NA" else: test_id = value if key == 'issue_severity': global issue_severity if value is None: issue_severity = "NA" else: issue_severity = value if key == 'issue_text': global issue_text if value is None: issue_text = "NA" else: issue_text = value if key == 'test_name': global test_name if value is None: test_name = "NA" else: test_name = value if key == 'filename': global filename if value is None: filename = "NA" else: filename = value if key == 'more_info': global more_info if value is None: more_info = "NA" else: more_info = value date_time = datetime.now() vul_id = uuid.uuid4() global vul_col if issue_severity == "HIGH": vul_col = "danger" elif issue_severity == "MEDIUM": vul_col = 'warning' elif issue_severity == "LOW": vul_col = "info" dup_data = test_name + filename + issue_severity duplicate_hash = hashlib.sha256( dup_data.encode('utf-8')).hexdigest() match_dup = bandit_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash').distinct() lenth_match = len(match_dup) if lenth_match == 1: duplicate_vuln = 'Yes' elif lenth_match == 0: duplicate_vuln = 'No' else: duplicate_vuln = 'None' false_p = bandit_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = bandit_scan_results_db( scan_id=scan_id, # rescan_id = rescan_id, scan_date=date_time, project_id=project_id, vuln_id=vul_id, # source_line=source_line, line_number=line_number, code=code, issue_confidence=issue_confidence, line_range=line_range, test_id=test_id, issue_severity=issue_severity, issue_text=issue_text, test_name=test_name, filename=filename, more_info=more_info, vul_col=vul_col, false_positive=false_positive, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, username=username, ) save_all.save() all_bandit_data = bandit_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No') total_vul = len(all_bandit_data) total_high = len(all_bandit_data.filter(issue_severity="HIGH")) total_medium = len(all_bandit_data.filter(issue_severity="MEDIUM")) total_low = len(all_bandit_data.filter(issue_severity="LOW")) total_duplicate = len(all_bandit_data.filter(vuln_duplicate='Yes')) bandit_scan_db.objects.filter(username=username, scan_id=scan_id).update( total_vuln=total_vul, SEVERITY_HIGH=total_high, SEVERITY_MEDIUM=total_medium, SEVERITY_LOW=total_low, total_dup=total_duplicate) subject = 'Archery Tool Scan Status - Bandit Report Uploaded' message = 'Bandit Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (scan_id, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def burp_scan_data(root, project_id, scan_id): """ The function parse the burp result as xml data and stored into archery database. :param xml_data: :return: """ global vuln_id, burp_status, vul_col, \ issue_description, \ issue_remediation, \ issue_reference, \ issue_vulnerability_classifications for issue in root: for data in issue.getchildren(): vuln_id = uuid.uuid4() if data.tag == "serialNumber": global serialNumber if data.text is None: serialNumber = "NA" else: serialNumber = data.text if data.tag == "type": global types if data.text is None: types = "NA" else: types = data.text if data.tag == "name": global name if data.text is None: name = "NA" else: name = data.text if data.tag == "host": global host if data.text is None: host = "NA" else: host = data.text if data.tag == "path": global path if data.text is None: path = "NA" else: path = data.text if data.tag == "location": global location if data.text is None: location = "NA" else: location = data.text if data.tag == "severity": global severity if data.text is None: severity = "NA" else: severity = data.text if data.tag == "confidence": global confidence if data.text is None: confidence = "NA" else: confidence = data.text if data.tag == "requestresponse": global requestresponse if data.text is None: requestresponse = "NA" else: requestresponse = data.text for d in data: req = d.tag met = d.attrib if req == "request": global request_datas reqst = d.text request_datas = base64.b64decode(reqst) # reqst if req == "response": global response_datas res_dat = d.text response_datas = base64.b64decode(res_dat) # res_dat for key, items in met.items(): global methods if key == "method": methods = items if data.tag == "issueBackground": global issue_description if data.text is None: issue_description = "NA" else: issue_description = data.text if data.tag == "remediationBackground": global issue_remediation if data.text is None: issue_remediation = "NA" else: issue_remediation = data.text if data.tag == "references": global issue_reference if data.text is None: issue_reference = "NA" else: issue_reference = data.text if data.tag == "vulnerabilityClassifications": global issue_vulnerability_classifications if data.text is None: issue_vulnerability_classifications = "NA" else: issue_vulnerability_classifications = data.text global vul_col if severity == 'High': vul_col = "danger" elif severity == 'Medium': vul_col = "warning" elif severity == 'Low': vul_col = "info" else: vul_col = "info" vuln_id = uuid.uuid4() dup_data = name + path + severity duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = burp_scan_result_db.objects.filter( dup_hash=duplicate_hash).values('dup_hash').distinct() lenth_match = len(match_dup) if lenth_match == 1: duplicate_vuln = 'Yes' elif lenth_match == 0: duplicate_vuln = 'No' else: duplicate_vuln = 'None' false_p = burp_scan_result_db.objects.filter( false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) global false_positive if fp_lenth_match == 1: false_positive = 'Yes' elif lenth_match == 0: false_positive = 'No' else: false_positive = 'No' url = host + location # all_issue_definitions = burp_issue_definitions.objects.filter(issue_type_id=types) # for def_data in all_issue_definitions: # issue_description = def_data.description # issue_remediation = def_data.remediation # issue_vulnerability_classifications = def_data.vulnerability_classifications # issue_reference = def_data.reference try: data_dump = burp_scan_result_db( scan_id=scan_id, project_id=project_id, vuln_id=vuln_id, name=name, path=path, severity=severity, severity_color=vul_col, confidence=confidence, false_positive=false_positive, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, type_index=types, serial_number=serialNumber, origin=host, request_response_url=url, request_response_request_data=request_datas, request_response_response_data=response_datas, description=issue_description, remediation=issue_remediation, reference=issue_reference, vulnerability_classifications=issue_vulnerability_classifications ) data_dump.save() except Exception as e: print(e) burp_all_vul = burp_scan_result_db.objects.filter(scan_id=scan_id, false_positive='No') total_vul = len(burp_all_vul) total_high = len(burp_all_vul.filter(severity="High")) total_medium = len(burp_all_vul.filter(severity="Medium")) total_low = len(burp_all_vul.filter(severity="Low")) total_info = len(burp_all_vul.filter(severity="Information")) total_duplicate = len(burp_all_vul.filter(vuln_duplicate='Yes')) burp_scan_db.objects.filter(scan_id=scan_id).update( url=host, total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, info_vul=total_info, total_dup=total_duplicate ) subject = 'Archery Tool Scan Status - Burp Report Uploaded' message = 'Burp Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (host, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message) try: email_notification.email_notify() except Exception as e: print(e) HttpResponse(status=201)
def gitlabsast_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ date_time = datetime.now() vul_col = '' vuln = data['vulnerabilities'] for vuln_data in vuln: try: name = vuln_data['name'] except Exception as e: name = "Not Found" try: message = vuln_data['message'] except Exception as e: message = "Not Found" try: description = vuln_data['description'] except Exception as e: description = "Not Found" try: cve = vuln_data['cve'] except Exception as e: cve = "Not Found" try: scanner = vuln_data['scanner'] except Exception as e: scanner = "Not Found" try: location = vuln_data['location'] except Exception as e: location = "Not Found" try: identifiers = vuln_data['identifiers'] except Exception as e: identifiers = "Not Found" try: severity = vuln_data['severity'] except Exception as e: severity = "Not Found" try: file = vuln_data['location']['file'] except Exception as e: file = "Not Found" if severity == "Critical": severity = 'High' vul_col = "danger" if severity == "High": vul_col = "danger" elif severity == 'Medium': vul_col = "warning" elif severity == 'Low': vul_col = "info" elif severity == 'Unknown': severity = "Low" vul_col = "info" elif severity == 'Everything else': severity = "Low" vul_col = "info" vul_id = uuid.uuid4() dup_data = str(message) + str(severity) + str(file) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = gitlabsast_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = gitlabsast_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = gitlabsast_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, name=name, message=message, description=description, cve=cve, gl_scanner=scanner, location=location, file=file, Severity=severity, identifiers=identifiers, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, username=username, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = gitlabsast_scan_results_db( vuln_id=vul_id, scan_id=scan_id, project_id=project_id, date_time=date_time, name=name, message=message, description=description, cve=cve, gl_scanner=scanner, location=location, file=file, Severity=severity, identifiers=identifiers, vul_col=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', username=username, ) save_all.save() all_findbugs_data = gitlabsast_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No', vuln_duplicate='No') duplicate_count = gitlabsast_scan_results_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(Severity="High")) total_medium = len(all_findbugs_data.filter(Severity="Medium")) total_low = len(all_findbugs_data.filter(Severity="Low")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) gitlabsast_scan_db.objects.filter(scan_id=scan_id).update( username=username, date_time=date_time, total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate) trend_update(username=username) subject = 'Archery Tool Scan Status - GitLab SAST Report Uploaded' message = 'GitLab SAST Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def dockle_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ global vul_col for vuln in data['details']: code = vuln['code'] title = vuln['title'] level = vuln['level'] alerts = vuln['alerts'][0] if level == "FATAL": vul_col = "danger" elif level == 'PASS': vul_col = "warning" elif level == 'WARN': vul_col = "warning" elif level == 'INFO': vul_col = "info" vul_id = uuid.uuid4() save_all = dockle_scan_results_db( scan_id=scan_id, project_id=project_id, vul_col=vul_col, vuln_id=vul_id, code=code, title=title, alerts=alerts, level=level, username=username, ) save_all.save() all_dockle_data = dockle_scan_results_db.objects.filter(username=username, scan_id=scan_id) total_vul = len(all_dockle_data) dockle_failed = len(all_dockle_data.filter(level="FATAL")) dockle_passed = len(all_dockle_data.filter(level="PASS")) dockle_warn = len(all_dockle_data.filter(level="WARN")) dockle_info = len(all_dockle_data.filter(level="INFO")) total_duplicate = len(all_dockle_data.filter(level='Yes')) dockle_scan_db.objects.filter(username=username, scan_id=scan_id).update( total_vuln=total_vul, dockle_fatal=dockle_failed, dockle_warn=dockle_warn, dockle_info=dockle_info, dockle_pass=dockle_passed, total_dup=total_duplicate) subject = 'Archery Tool Scan Status - dockle Report Uploaded' message = 'dockle Scanner has completed the scan ' \ ' %s <br> Total: %s <br>Failed: %s <br>' \ 'failed: %s <br>Skipped %s' % (scan_id, total_vul, dockle_failed, dockle_warn, dockle_passed) email_sch_notify(subject=subject, message=message)
def xml_parser(root, project_id, scan_id): """ :param root: :param project_id: :param scan_id: :return: """ global name # print root for bug in root: if bug.tag == 'BugInstance': name = bug.attrib['type'] priority = bug.attrib['priority'] for BugInstance in bug: if BugInstance.tag == 'ShortMessage': global ShortMessage ShortMessage = BugInstance.text if BugInstance.tag == 'LongMessage': global LongMessage LongMessage = BugInstance.text if BugInstance.tag == 'Class': global classname classname = BugInstance.attrib['classname'] if BugInstance.tag == 'SourceLine': global sourcepath, sourcefile sourcepath = BugInstance.attrib['sourcepath'] sourcefile = BugInstance.attrib['sourcefile'] if priority == "1": risk = 'High' vul_col = "danger" elif priority == '2': risk = 'Medium' vul_col = "warning" elif priority == '3': risk = 'Medium' vul_col = "info" vul_id = uuid.uuid4() dup_data = name + classname + risk duplicate_hash = hashlib.sha256( dup_data.encode('utf-8')).hexdigest() match_dup = findbugs_scan_results_db.objects.filter( dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 1: duplicate_vuln = 'Yes' elif lenth_match == 0: duplicate_vuln = 'No' else: duplicate_vuln = 'None' false_p = findbugs_scan_results_db.objects.filter( false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = findbugs_scan_results_db(vuln_id=vul_id, scan_id=scan_id, project_id=project_id, name=name, priority=priority, ShortMessage=ShortMessage, LongMessage=LongMessage, classname=classname, sourcepath=sourcepath, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, risk=risk) save_all.save() if bug.tag == 'BugPattern': for BugPattern in bug: name = bug.attrib['type'] if BugPattern.tag == 'ShortDescription': ShortDescription = BugPattern.text if BugPattern.tag == 'Details': global Details Details = BugPattern.text findbugs_scan_results_db.objects.filter( scan_id=scan_id, name=name).update( ShortDescription=ShortDescription, Details=Details, ) all_findbugs_data = findbugs_scan_results_db.objects.filter( scan_id=scan_id, false_positive='No') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(priority="1")) total_medium = len(all_findbugs_data.filter(priority="2")) total_low = len(all_findbugs_data.filter(priority="3")) total_duplicate = len(all_findbugs_data.filter(vuln_duplicate='Yes')) findbugs_scan_db.objects.filter(scan_id=scan_id).update( total_vuln=total_vul, SEVERITY_HIGH=total_high, SEVERITY_MEDIUM=total_medium, SEVERITY_LOW=total_low, total_dup=total_duplicate) subject = 'Archery Tool Scan Status - Findbugs Report Uploaded' message = 'Findbugs Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (scan_id, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def twistlock_report_json(data, project_id, scan_id, username): """ :param data: :param project_id: :param scan_id: :return: """ """ { "results": [ { "id": "sha256:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", "distro": "Debian GNU/Linux 9 (stretch)", "compliances": [ { "title": "Sensitive information provided in environment variables", "severity": "high", "cause": "The environment variables DD_CELERY_BROKER_PASSWORD,DD_DATABASE_PASSWORD,DD_SECRET_KEY contain sensitive data" } ], "complianceDistribution": { "critical": 0, "high": 1, "medium": 0, "low": 0, "total": 1 }, "vulnerabilities": [ { "id": "CVE-2013-7459", "cvss": 9.8, "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", "description": "Heap-based buffer overflow in the ALGnew function in block_templace.c in Python Cryptography Toolkit (aka pycrypto) allows remote attackers to execute arbitrary code as demonstrated by a crafted iv parameter to cryptmsg.py.", "severity": "critical", "packageName": "pycrypto", "packageVersion": "2.6.1", "link": "https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-7459", "riskFactors": { "Attack complexity: low": {}, "Attack vector: network": {}, "Critical severity": {}, "Remote execution": {} } } ], "vulnerabilityDistribution": { "critical": 1, "high": 0, "medium": 0, "low": 0, "total": 1 } } ] } """ global false_positive date_time = datetime.now() vul_col = '' # Parser for above json data vuln = data['results'][0]['vulnerabilities'] for vuln_data in vuln: try: name = vuln_data['id'] except Exception as e: name = "Not Found" try: cvss = vuln_data['cvss'] except Exception as e: cvss = "Not Found" try: vector = vuln_data['vector'] except Exception as e: vector = "Not Found" try: description = vuln_data['description'] except Exception as e: description = "Not Found" try: severity = vuln_data['severity'] if severity == 'critical': severity = 'High' except Exception as e: severity = "Not Found" try: packageName = vuln_data['packageName'] except Exception as e: packageName = "Not Found" try: packageVersion = vuln_data['packageVersion'] except Exception as e: packageVersion = "Not Found" try: link = vuln_data['link'] except Exception as e: link = "Not Found" if severity == "Critical": severity = 'High' vul_col = "danger" if severity == "High": vul_col = "danger" elif severity == 'Medium': vul_col = "warning" elif severity == 'Low': vul_col = "info" elif severity == 'Unknown': severity = "Low" vul_col = "info" elif severity == 'Everything else': severity = "Low" vul_col = "info" vul_id = uuid.uuid4() dup_data = str(name) + str(severity) + str(packageName) duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = twistlock_scan_results_db.objects.filter( username=username, dup_hash=duplicate_hash).values('dup_hash') lenth_match = len(match_dup) if lenth_match == 0: duplicate_vuln = 'No' false_p = twistlock_scan_results_db.objects.filter( username=username, false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' save_all = twistlock_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, vul_col=vul_col, vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive=false_positive, username=username, name=name, cvss=cvss, description=description, Severity=severity, packageName=packageName, packageVersion=packageVersion, link=link, ) save_all.save() else: duplicate_vuln = 'Yes' save_all = twistlock_scan_results_db( vuln_id=vul_id, scan_id=scan_id, date_time=date_time, project_id=project_id, vul_col=vul_col, vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, false_positive='Duplicate', username=username, name=name, cvss=cvss, description=description, Severity=severity, packageName=packageName, packageVersion=packageVersion, link=link, ) save_all.save() all_findbugs_data = twistlock_scan_results_db.objects.filter( username=username, scan_id=scan_id, false_positive='No', vuln_duplicate='No') duplicate_count = twistlock_scan_results_db.objects.filter( username=username, scan_id=scan_id, vuln_duplicate='Yes') total_vul = len(all_findbugs_data) total_high = len(all_findbugs_data.filter(Severity="High")) total_medium = len(all_findbugs_data.filter(Severity="Medium")) total_low = len(all_findbugs_data.filter(Severity="Low")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) twistlock_scan_db.objects.filter(scan_id=scan_id).update( username=username, date_time=date_time, total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate) trend_update(username=username) subject = 'Archery Tool Scan Status - twistlock Report Uploaded' message = 'twistlock Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)