def zap_result_save(self, all_vuln, project_id, un_scanid): """ The function save all data in Archery Database :param all_vuln: :param project_id: :param un_scanid: :return: """ root_xml = ET.fromstring(all_vuln) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode('ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser(project_id=project_id, scan_id=un_scanid, root=root_xml_en) self.zap.core.delete_all_alerts()
def xml_upload(request): """ Handling XML upload files. :param request: :return: """ username = request.user.username all_project = project_db.objects.filter(username=username) if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": try: tree = ET.parse(xml_file) date_time = datetime.now() root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) scan_dump = zap_scans_db(username=username, scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() zap_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml_en) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('zapscanner:zap_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == "burp_scan": try: date_time = datetime.now() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) scan_dump = burp_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('burpscanner:burp_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == "arachni": try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = arachni_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() arachni_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml, target_url=scan_url) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('arachniscanner:arachni_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'netsparker': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = netsparker_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('netsparkerscanner:netsparker_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'webinspect': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = webinspect_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('webinspectscanner:webinspect_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'acunetix': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = acunetix_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() acunetix_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('acunetixscanner:acunetix_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'dependencycheck': try: date_time = datetime.now() data = etree.parse(xml_file) root = data.getroot() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('dependencycheck:dependencycheck_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'checkmarx': try: date_time = datetime.now() data = etree.parse(xml_file) root = data.getroot() scan_dump = checkmarx_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() checkmarx_xml_report_parser.checkmarx_report_xml( project_id=project_id, scan_id=scan_id, data=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('checkmarx:checkmarx_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'findbugs': try: date_time = datetime.now() tree = ET.parse(xml_file) root = tree.getroot() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse('findbugs:findbugs_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'nikto': try: date_time = datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse('tools:nikto')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) return render(request, 'upload_xml.html', {'all_project': all_project})
def xml_upload(request): """ Handling XML upload files. :param request: :return: """ all_project = project_db.objects.all() if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return HttpResponseRedirect("/zapscanner/zap_scan_list/") elif scanner == "burp_scan": date_time = datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, target_url, scan_id) do_xml_data.burp_scan_data(root_xml) print("Save scan Data") return HttpResponseRedirect("/burpscanner/burp_scan_list") elif scanner == "arachni": date_time = datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Save scan Data") return HttpResponseRedirect("/arachniscanner/arachni_scan_list") elif scanner == 'netsparker': date_time = datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect( "/netsparkerscanner/netsparker_scan_list/") elif scanner == 'webinspect': date_time = datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect( "/webinspectscanner/webinspect_scan_list/") elif scanner == 'acunetix': date_time = datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect("/acunetixscanner/acunetix_scan_list/") elif scanner == 'dependencycheck': date_time = datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = etree.parse(xml_file) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) print("Saved scan data") return HttpResponseRedirect( "/dependencycheck/dependencycheck_list") elif scanner == 'findbugs': date_time = datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root = tree.getroot() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root) print("Saved scan data") return HttpResponseRedirect("/findbugs/findbugs_list") elif scanner == 'nikto': date_time = datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id) print("Saved scan data") return HttpResponseRedirect("/tools/nikto/") return render(request, 'upload_xml.html', {'all_project': all_project})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") xml_file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" print xml_file print scanner if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, scan_url, scan_id) do_xml_data.burp_scan_data(root_xml) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = etree.parse(xml_file) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Uploaded"})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") xml_file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" print xml_file print scanner if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, scan_url, scan_id) do_xml_data.burp_scan_data(root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({"message": "Scan Data Uploaded"}) return Response({"message": "Scan Data Uploaded"})
def xml_upload(request): """ Handling XML upload files. :param request: :return: """ all_project = project_db.objects.all() if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = timezone.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return HttpResponseRedirect("/webscanners/scans_list/") elif scanner == "burp_scan": date_time = timezone.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, target_url, scan_id) do_xml_data.burp_scan_data(root_xml) print "Save scan Data" return HttpResponseRedirect("/webscanners/burp_scan_list") elif scanner == "arachni": print scanner print xml_file print scan_url date_time = timezone.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print "Save scan Data" return HttpResponseRedirect("/webscanners/arachni_scan_list") return render(request, 'upload_xml.html', {'all_project': all_project})
def post(self, request, format=None): username = request.user.username project_id = request.data.get("project_id") scanner = request.data.get("scanner") file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No', username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id, username=username) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username, target_url=scan_url) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "acunetix": date_time = datetime.datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() xml_dat = bytes(bytearray(file, encoding='utf-8')) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'checkmarx': date_time = datetime.datetime.now() scan_dump = checkmarx_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) checkmarx_xml_report_parser.checkmarx_report_xml( data=root_xml, project_id=project_id, scan_id=scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'trivy': date_time = datetime.datetime.now() scan_dump = trivy_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) trivy_json_report_parser.trivy_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'npmaudit': date_time = datetime.datetime.now() scan_dump = npmaudit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) npm_audit_report_json.npmaudit_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nodejsscan': date_time = datetime.datetime.now() scan_dump = nodejsscan_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) nodejsscan_report_json.nodejsscan_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'tfsec': date_time = datetime.datetime.now() scan_dump = tfsec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) tfsec_report_parser.tfsec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'whitesource': date_time = datetime.datetime.now() scan_dump = whitesource_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) whitesource_json_report_parser.whitesource_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dockle': date_time = datetime.datetime.now() scan_dump = dockle_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) dockle_json_parser.dockle_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nessus': date_time = datetime.datetime.now() scan_dump = nessus_scan_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.updated_nessus_parser(root=root_xml_en, scan_id=scan_id, project_id=project_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'openvas': date_time = datetime.datetime.now() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) hosts = OpenVas_Parser.get_hosts(root_xml_en) for host in hosts: scan_dump = scan_save_db(scan_ip=host, scan_id=host, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() OpenVas_Parser.updated_xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db(date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, username=username) scan_dump.save() nikto_html_parser(file, project_id, scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Uploaded"})
def post(self, request, format=None): date_time = datetime.datetime.now() project_uu_id = request.data.get("project_id") project_id = (ProjectDb.objects.filter( uu_id=project_uu_id).values("id").get()["id"]) print(project_id) scanner = request.data.get("scanner") if isinstance(request.data.get("filename"), UploadedFile): file = request.data.get("filename").read().decode("utf-8") else: file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Zap", ) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "burp_scan": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Burp", ) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data( root_xml_en, project_id, scan_id, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "arachni": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Arachni", ) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, target_url=scan_url, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "acunetix": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Acunetix", ) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "netsparker": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Netsparker", ) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "webinspect": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Webinspect", ) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "banditscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Bandit", date_time=date_time, ) scan_dump.save() data = json.loads(file) bandit_report_json( data=data, project_id=project_id, scan_id=scan_id, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "dependencycheck": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Dependencycheck", ) scan_dump.save() xml_dat = bytes(bytearray(file, encoding="utf-8")) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "findbugs": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Findbugs", ) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser = FindsecbugsParser(project_id=project_id, scan_id=scan_id, root=root_xml) findbugs_report_parser.xml_parser() return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "checkmarx": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Checkmarx", ) scan_dump.save() root_xml = ET.fromstring(file) checkmarx_xml_report_parser.checkmarx_report_xml( data=root_xml, project_id=project_id, scan_id=scan_id, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "clair": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Clair", ) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "trivy": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Trivy", ) scan_dump.save() data = json.loads(file) trivy_json_report_parser.trivy_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "gitlabsca": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Gitlabsca", ) scan_dump.save() data = json.loads(file) gitlab_sca_json_report_parser.gitlabsca_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "gitlabsast": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Gitlabsast", ) scan_dump.save() data = json.loads(file) gitlab_sast_json_report_parser.gitlabsast_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "gitlabcontainerscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Gitlabcontainerscan", ) scan_dump.save() data = json.loads(file) gitlab_container_json_report_parser.gitlabcontainerscan_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "npmaudit": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Npmaudit", ) scan_dump.save() data = json.loads(file) npm_audit_report_json.npmaudit_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "nodejsscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Nodejsscan", ) scan_dump.save() data = json.loads(file) nodejsscan_report_json.nodejsscan_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "semgrepscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Semgrepscan", ) scan_dump.save() data = json.loads(file) semgrep_json_report_parser.semgrep_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "tfsec": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Tfsec", ) scan_dump.save() data = json.loads(file) tfsec_report_parser.tfsec_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "whitesource": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Whitesource", ) scan_dump.save() data = json.loads(file) whitesource_json_report_parser.whitesource_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "inspec": scan_dump = InspecScanDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, ) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return Response({ "message": "Scan Data Uploaded", "project_id": escape(project_uu_id), "scan_id": escape(scan_id), "scanner": escape(scanner), }) elif scanner == "dockle": scan_dump = DockleScanDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, ) scan_dump.save() data = json.loads(file) dockle_json_parser.dockle_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return Response({ "message": "Scan Data Uploaded", "project_id": escape(project_uu_id), "scan_id": escape(scan_id), "scanner": escape(scanner), }) elif scanner == "nessus": root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.updated_nessus_parser( root=root_xml_en, scan_id=scan_id, project_id=project_id, ) return self.network_result_data(scan_id, project_uu_id, scanner) elif scanner == "openvas": root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) hosts = OpenVas_Parser.get_hosts(root_xml_en) for host in hosts: scan_dump = NetworkScanDb( ip=host, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Openvas", ) scan_dump.save() OpenVas_Parser.updated_xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) return self.network_result_data(scan_id, project_uu_id, scanner) elif scanner == "nikto": scan_dump = NiktoResultDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser( file, project_id, scan_id, ) return Response({ "message": "Scan Data Uploaded", "project_id": escape(project_uu_id), "scan_id": escape(scan_id), "scanner": escape(scanner), }) elif scanner == "twistlock": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Twistlock", ) scan_dump.save() data = json.loads(file) twistlock_json_report_parser.twistlock_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "brakeman": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Brakeman", ) scan_dump.save() data = json.loads(file) brakeman_json_report_parser.brakeman_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) else: return Response({"message": "Scanner Not Found"})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") file = request.data.get("filename") print("Results file content: ", file) scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": print("Inside zap_scan") date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "acunetix": date_time = datetime.datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() xml_dat = bytes(bytearray(file, encoding='utf-8')) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nessus': date_time = datetime.datetime.now() scan_dump = nessus_scan_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.nessus_parser( root=root_xml_en, scan_id=scan_id, project_id=project_id, ) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'openvas': date_time = datetime.datetime.now() scan_dump = scan_save_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) OpenVas_Parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(file, project_id, scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Not Uploaded"})
def post(self, request): all_project = ProjectDb.objects.filter() project_uu_id = request.POST.get("project_id") project_id = (ProjectDb.objects.filter( uu_id=project_uu_id).values("id").get()["id"]) scanner = request.POST.get("scanner") file = request.FILES["file"] target = request.POST.get("target") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "ZAP Scanner Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) tree = ET.parse(file) date_time = datetime.now() root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, rescan="No", scanner="Zap", ) scan_dump.save() zap_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "burp_scan": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Burp Scan Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() # Burp scan XML parser tree = ET.parse(file) root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Burp", ) scan_dump.save() burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "arachni": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Arachni Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Arachni", ) scan_dump.save() arachni_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, target_url=target, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "netsparker": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Netsparker Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Netsparker", ) scan_dump.save() netsparker_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "webinspect": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Webinspect Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Webinspect", ) scan_dump.save() webinspect_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "acunetix": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Acunetix Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scanner="Acunetix", scan_status=scan_status, ) scan_dump.save() acunetix_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "dependencycheck": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Dependencycheck Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() data = etree.parse(file) root = data.getroot() scan_dump = StaticScansDb( project_name=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Dependencycheck", ) scan_dump.save() dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=root) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "checkmarx": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Checkmarx Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() data = etree.parse(file) root = data.getroot() scan_dump = StaticScansDb( project_name=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, ) scan_dump.save() checkmarx_xml_report_parser.checkmarx_report_xml( project_id=project_id, scan_id=scan_id, data=root) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "findbugs": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Findbugs Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root = tree.getroot() scan_dump = StaticScansDb( project_name=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, ) scan_dump.save() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "nikto": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Nikto Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() scan_dump = NiktoResultDb( date_time=date_time, scan_url=target, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(file, project_id, scan_id) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("tools:nikto")) except: messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) if scanner == "bandit_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Bandit Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Bandit" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "retirejs_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Retirejs Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Retirejs" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "clair_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Clair Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Clair" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "trivy_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Trivy Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Trivy" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "npmaudit_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "NPM Audit Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Npmaudit" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "nodejsscan_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Nodejs scan Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Nodejsscan" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "semgrepscan_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Semgrep scan Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Semgrep" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "tfsec_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Tfsec Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Tfsec" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "whitesource_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Whitesource Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Whitesource" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "inspec_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Inspec Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scan_dump = InspecScanDb( scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, ) scan_dump.save() inspec_report_json( data=data, project_id=project_id, scan_id=scan_id, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("inspec:inspec_list")) except: messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "dockle_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Dockle Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scan_dump = DockleScanDb( scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, ) scan_dump.save() dockle_report_json( data=data, project_id=project_id, scan_id=scan_id, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("dockle:dockle_list")) except: messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "gitlabsast_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Gitlabsast Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Gitlabsast" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "gitlabcontainerscan_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error( request, "Gitlabcontainerscan Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Gitlabcontainerscan" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "gitlabsca_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Gitlabsca Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Gitlabsca" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "twistlock_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Twistlock Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Twistlock" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "brakeman_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Brakeman Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Brakeman_scan" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "openvas": if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Openvas Only XML file Supported") return HttpResponseRedirect(reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() hosts = OpenVas_Parser.get_hosts(root_xml) for host in hosts: scan_dump = NetworkScanDb( ip=host, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Openvas", ) scan_dump.save() OpenVas_Parser.updated_xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("networkscanners:list_scans")) elif scanner == "nessus": if self.check_file_ext(str(file)) != ".nessus": messages.error(request, "Nessus Only .nessus file Supported") return HttpResponseRedirect(reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() Nessus_Parser.updated_nessus_parser( root=root_xml, scan_id=scan_id, project_id=project_id, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("networkscanners:list_scans")) elif scanner == "nmap": tree = ET.parse(file) root_xml = tree.getroot() nmap_parser.xml_parser( root=root_xml, scan_id=scan_id, project_id=project_id, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("tools:nmap_scan"))
def zap_result_save(self, all_vuln, project_id, un_scanid, target_url): """ The function save all data in Archery Database :param all_vuln: :param project_id: :param un_scanid: :return: """ date_time = datetime.now() zap_enabled = False all_zap = ZapSettingsDb.objects.filter() for zap in all_zap: zap_enabled = zap.enabled if zap_enabled is False: root_xml = ET.fromstring(all_vuln) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore" ) root_xml_en = ET.fromstring(en_root_xml) try: zap_xml_parser.xml_parser( project_id=project_id, scan_id=un_scanid, root=root_xml_en, ) self.zap.core.delete_all_alerts() except Exception as e: print(e) else: global name, attack, wascid, description, reference, reference, sourceid, solution, param, method, url, messageId, alert, pluginId, other, evidence, cweid, risk, vul_col, false_positive for data in all_vuln: for key, value in data.items(): if key == "name": name = value if key == "attack": attack = value if key == "wascid": wascid = value if key == "description": description = value if key == "reference": reference = value if key == "sourceid": sourceid = value if key == "solution": solution = value if key == "param": param = value if key == "method": method = value if key == "url": url = value if key == "pluginId": pluginId = value if key == "other": other = value if key == "alert": alert = value if key == "attack": attack = value if key == "messageId": messageId = value if key == "evidence": evidence = value if key == "cweid": cweid = value if key == "risk": risk = value if risk == "High": vul_col = "danger" risk = "High" elif risk == "Medium": vul_col = "warning" risk = "Medium" elif risk == "info": vul_col = "info" risk = "Low" else: vul_col = "info" risk = "Low" dup_data = name + risk + target_url duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest() match_dup = ( WebScanResultsDb.objects.filter(dup_hash=duplicate_hash) .values("dup_hash") .distinct() ) lenth_match = len(match_dup) vuln_id = uuid.uuid4() if lenth_match == 0: duplicate_vuln = "No" dump_data = WebScanResultsDb( vuln_id=vuln_id, severity_color=vul_col, scan_id=un_scanid, project_id=project_id, severity=risk, reference=reference, url=target_url, title=name, solution=solution, instance=evidence, description=description, false_positive="No", jira_ticket="NA", vuln_status="Open", dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, scanner="Zap", ) dump_data.save() else: duplicate_vuln = "Yes" dump_data = WebScanResultsDb( vuln_id=vuln_id, severity_color=vul_col, scan_id=un_scanid, project_id=project_id, severity=risk, reference=reference, url=target_url, title=name, solution=solution, instance="na", description=description, false_positive="Duplicate", jira_ticket="NA", vuln_status="Duplicate", dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, scanner="Zap", ) dump_data.save() false_p = WebScanResultsDb.objects.filter( false_positive_hash=duplicate_hash ) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = "Yes" else: false_positive = "No" vul_dat = WebScanResultsDb.objects.filter( vuln_id=vuln_id, scanner="Zap" ) full_data = [] for data in vul_dat: key = "Evidence" value = data.instance dd = re.sub(r"<[^>]*>", " ", value) instance = key + ": " + dd full_data.append(instance) removed_list_data = ",".join(full_data) WebScanResultsDb.objects.filter(vuln_id=vuln_id).update( instance=full_data ) zap_all_vul = WebScanResultsDb.objects.filter( scan_id=un_scanid, false_positive="No", scanner="Zap" ) duplicate_count = WebScanResultsDb.objects.filter( scan_id=un_scanid, vuln_duplicate="Yes" ) total_high = len(zap_all_vul.filter(severity="High")) total_medium = len(zap_all_vul.filter(severity="Medium")) total_low = len(zap_all_vul.filter(severity="Low")) total_info = len(zap_all_vul.filter(severity="Informational")) total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes")) total_vul = total_high + total_medium + total_low + total_info WebScansDb.objects.filter(scan_id=un_scanid).update( total_vul=total_vul, date_time=date_time, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, info_vul=total_info, total_dup=total_duplicate, scan_url=target_url, ) if total_vul == total_duplicate: WebScansDb.objects.filter(scan_id=un_scanid).update( total_vul=total_vul, date_time=date_time, project_id=project_id, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate, )
def zap_result_save(self, all_vuln, project_id, un_scanid, username, target_url): """ The function save all data in Archery Database :param all_vuln: :param project_id: :param un_scanid: :return: """ date_time = datetime.now() zap_enabled = False all_zap = zap_settings_db.objects.filter(username=username) for zap in all_zap: zap_enabled = zap.enabled if zap_enabled is False: root_xml = ET.fromstring(all_vuln) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode('ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) try: zap_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=un_scanid, root=root_xml_en) self.zap.core.delete_all_alerts() except Exception as e: print(e) else: global name, attack, wascid, description, reference, \ reference, sourceid, \ solution, \ param, \ method, url, messageId, alert, pluginId, other, evidence, cweid, risk, vul_col, false_positive for data in all_vuln: for key, value in data.items(): if key == 'name': name = value if key == 'attack': attack = value if key == 'wascid': wascid = value if key == 'description': description = value if key == 'reference': reference = value if key == 'sourceid': sourceid = value if key == 'solution': solution = value if key == 'param': param = value if key == 'method': method = value if key == 'url': url = value if key == 'pluginId': pluginId = value if key == 'other': other = value if key == 'alert': alert = value if key == 'attack': attack = value if key == 'messageId': messageId = value if key == 'evidence': evidence = value if key == 'cweid': cweid = value if key == 'risk': risk = value if risk == "High": vul_col = "danger" risk = "High" elif risk == 'Medium': vul_col = "warning" risk = "Medium" elif risk == 'info': vul_col = "info" risk = "Low" else: vul_col = "info" risk = "Low" dup_data = name + risk + target_url duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() match_dup = zap_scan_results_db.objects.filter( dup_hash=duplicate_hash).values('dup_hash').distinct() lenth_match = len(match_dup) vuln_id = uuid.uuid4() if lenth_match == 0: duplicate_vuln = 'No' dump_data = zap_scan_results_db(vuln_id=vuln_id, vuln_color=vul_col, scan_id=un_scanid, project_id=project_id, confidence=confidence, wascid=wascid, risk=risk, reference=reference, url=url, name=name, solution=solution, param=url, sourceid=sourceid, pluginId=pluginId, alert=alert, description=description, false_positive='No', rescan='No', vuln_status='Open', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, evidence=evidence, username=username ) dump_data.save() else: duplicate_vuln = 'Yes' dump_data = zap_scan_results_db(vuln_id=vuln_id, vuln_color=vul_col, scan_id=un_scanid, project_id=project_id, confidence=confidence, wascid=wascid, risk=risk, reference=reference, url=url, name=name, solution=solution, param=url, sourceid=sourceid, pluginId=pluginId, alert=alert, description=description, false_positive='Duplicate', rescan='No', vuln_status='Duplicate', dup_hash=duplicate_hash, vuln_duplicate=duplicate_vuln, evidence=evidence, username=username ) dump_data.save() false_p = zap_scan_results_db.objects.filter( false_positive_hash=duplicate_hash) fp_lenth_match = len(false_p) if fp_lenth_match == 1: false_positive = 'Yes' else: false_positive = 'No' vul_dat = zap_scan_results_db.objects.filter(username=username, vuln_id=vuln_id) full_data = [] for data in vul_dat: key = 'Evidence' value = data.evidence instance = key + ': ' + value full_data.append(instance) removed_list_data = ','.join(full_data) zap_scan_results_db.objects.filter(username=username, vuln_id=vuln_id).update(param=full_data) zap_all_vul = zap_scan_results_db.objects.filter(username=username, scan_id=un_scanid, false_positive='No') duplicate_count = zap_scan_results_db.objects.filter(username=username, scan_id=un_scanid, vuln_duplicate='Yes') total_high = len(zap_all_vul.filter(risk="High")) total_medium = len(zap_all_vul.filter(risk="Medium")) total_low = len(zap_all_vul.filter(risk="Low")) total_info = len(zap_all_vul.filter(risk="Informational")) total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes')) total_vul = total_high + total_medium + total_low + total_info zap_scans_db.objects.filter(username=username, scan_scanid=un_scanid) \ .update(total_vul=total_vul, date_time=date_time, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, info_vul=total_info, total_dup=total_duplicate, scan_url=target_url ) if total_vul == total_duplicate: zap_scans_db.objects.filter(username=username, scan_scanid=un_scanid) \ .update(total_vul=total_vul, date_time=date_time, high_vul=total_high, medium_vul=total_medium, low_vul=total_low, total_dup=total_duplicate )
def zap_result_save(self, all_vuln, project_id, un_scanid, username): """ The function save all data in Archery Database :param all_vuln: :param project_id: :param un_scanid: :return: """ # global name, attack, wascid, description, reference, \ # reference, sourceid, \ # solution, \ # param, \ # method, url, messageId, alert, pluginId, other, evidence, cweid, risk, vul_col # for data in all_vuln: # for key, value in data.items(): # if key == 'name': # name = value # # if key == 'attack': # attack = value # # if key == 'wascid': # wascid = value # # if key == 'description': # description = value # # if key == 'reference': # reference = value # # if key == 'sourceid': # sourceid = value # # if key == 'solution': # solution = value # # if key == 'param': # param = value # # if key == 'method': # method = value # # if key == 'url': # url = value # # if key == 'pluginId': # pluginId = value # # if key == 'other': # other = value # # if key == 'alert': # alert = value # # if key == 'attack': # attack = value # # if key == 'messageId': # messageId = value # # if key == 'evidence': # evidence = value # # if key == 'cweid': # cweid = value # # if key == 'risk': # risk = value # if risk == "High": # vul_col = "danger" # risk = "High" # elif risk == 'Medium': # vul_col = "warning" # risk = "Medium" # elif risk == 'info': # vul_col = "info" # risk = "Low" # elif risk == 'Informational': # vul_col = "info" # risk = "Informational" # # dup_data = name + risk # duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest() # match_dup = zap_scan_results_db.objects.filter( # dup_hash=duplicate_hash).values('dup_hash').distinct() # lenth_match = len(match_dup) # # if lenth_match == 1: # duplicate_vuln = 'Yes' # elif lenth_match == 0: # duplicate_vuln = 'No' # else: # duplicate_vuln = 'None' # # false_p = zap_scan_results_db.objects.filter( # false_positive_hash=duplicate_hash) # fp_lenth_match = len(false_p) # # if fp_lenth_match == 1: # false_positive = 'Yes' # else: # false_positive = 'No' # # vuln_id = uuid.uuid4() # dump_data = zap_scan_results_db(vuln_id=vuln_id, # vuln_color=vul_col, # scan_id=un_scanid, # project_id=project_id, # confidence=confidence, # wascid=wascid, # risk=risk, # reference=reference, # url=url, # name=name, # solution=solution, # param=url, # sourceid=sourceid, # pluginId=pluginId, # alert=alert, # description=description, # false_positive=false_positive, # rescan='No', # vuln_status='Open', # dup_hash=duplicate_hash, # vuln_duplicate=duplicate_vuln, # evidence=evidence, # ) # dump_data.save() # full_data = [] # key = 'Evidence' # instance = key + ': ' + "NA" # # full_data.append(instance) # removed_list_data = ','.join(full_data) # zap_scan_results_db.objects.filter(vuln_id=vuln_id).update(param=removed_list_data) # # zap_all_vul = zap_scan_results_db.objects.filter(scan_id=un_scanid, false_positive='No') # # total_high = len(zap_all_vul.filter(risk="High")) # total_medium = len(zap_all_vul.filter(risk="Medium")) # total_low = len(zap_all_vul.filter(risk="Low")) # total_info = len(zap_all_vul.filter(risk="Informational")) # total_duplicate = len(zap_all_vul.filter(vuln_duplicate='Yes')) # total_vul = total_high + total_medium + total_low + total_info # # zap_scans_db.objects.filter(scan_scanid=un_scanid) \ # .update(total_vul=total_vul, # high_vul=total_high, # medium_vul=total_medium, # low_vul=total_low, # info_vul=total_info, # total_dup=total_duplicate, # ) root_xml = ET.fromstring(all_vuln) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode('ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) try: zap_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=un_scanid, root=root_xml_en) self.zap.core.delete_all_alerts() except Exception as e: print(e)