def upload(target, scan_id, date_time, project_id, scan_status, scanner, data): """ :param project_name: :param scan_id: :param date_time: :param project_id: :param scan_status: :param scanner: :param username: :param data: :return: """ scan_dump = StaticScansDb( project_name=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner=scanner, ) scan_dump.save() if scanner == "Bandit": bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Retirejs": retirejs_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Clair": clair_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Trivy": trivy_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Npmaudit": npmaudit_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Nodejsscan": nodejsscan_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Semgrep": semgrep_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Tfsec": tfsec_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Whitesource": whitesource_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Gitlabsast": gitlabsast_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Gitlabcontainerscan": gitlabcontainerscan_report_json( data=data, project_id=project_id, scan_id=scan_id ) elif scanner == "Gitlabsca": gitlabsast_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Gitlabsca": gitlabsca_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Twistlock": twistlock_report_json(data=data, project_id=project_id, scan_id=scan_id) elif scanner == "Brakeman_scan": brakeman_report_json(data=data, project_id=project_id, scan_id=scan_id)
def report_import(request): """ :param request: :return: """ username = request.user.username all_project = project_db.objects.filter(username=username) if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") json_file = request.FILES['jsonfile'] project_name = request.POST.get("project_name") scan_id = uuid.uuid4() scan_status = '100' if scanner == "bandit_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = bandit_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username ) scan_dump.save() bandit_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username ) return HttpResponseRedirect(reverse('banditscanner:banditscans_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "retirejs_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = retirejs_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username ) scan_dump.save() retirejs_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username ) return HttpResponseRedirect(reverse('retirejsscanner:retirejsscans_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "clair_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = clair_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username ) scan_dump.save() clair_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username ) return HttpResponseRedirect(reverse('clair:clair_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "trivy_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = trivy_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username, ) scan_dump.save() trivy_json_report_parser.trivy_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username ) return HttpResponseRedirect(reverse('trivy:trivy_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "npmaudit_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = npmaudit_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username ) scan_dump.save() npm_audit_report_json.npmaudit_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username ) return HttpResponseRedirect(reverse('npmaudit:npmaudit_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "nodejsscan_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = nodejsscan_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username ) scan_dump.save() nodejsscan_report_json.nodejsscan_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username ) return HttpResponseRedirect(reverse('nodejsscan:nodejsscan_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "semgrepscan_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = semgrepscan_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username ) scan_dump.save() semgrep_json_report_parser.semgrep_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username ) return HttpResponseRedirect(reverse('semgrepscan:semgrepscan_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "tfsec_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = tfsec_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username ) scan_dump.save() tfsec_report_parser.tfsec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username ) return HttpResponseRedirect(reverse('tfsec:tfsec_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "whitesource_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = whitesource_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username ) scan_dump.save() whitesource_json_report_parser.whitesource_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username ) return HttpResponseRedirect(reverse('whitesource:whitesource_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "inspec_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = inspec_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username ) scan_dump.save() inspec_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username ) return HttpResponseRedirect(reverse('inspec:inspec_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "dockle_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = dockle_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username ) scan_dump.save() dockle_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username ) return HttpResponseRedirect(reverse('dockle:dockle_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "gitlabsast_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = gitlabsast_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username, ) scan_dump.save() gitlab_sast_json_report_parser.gitlabsast_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username ) return HttpResponseRedirect(reverse('gitlabsast:gitlabsast_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "gitlabcontainerscan_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = gitlabcontainerscan_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username, ) scan_dump.save() gitlab_container_json_report_parser.gitlabcontainerscan_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username ) return HttpResponseRedirect(reverse('gitlabcontainerscan:gitlabcontainerscan_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) if scanner == "gitlabsca_scan": try: date_time = datetime.now() j = json_file.read() data = json.loads(j) scan_dump = gitlabsca_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username, ) scan_dump.save() gitlab_sca_json_report_parser.gitlabsca_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username ) return HttpResponseRedirect(reverse('gitlabsca:gitlabsca_list')) except: messages.error(request, "File Not Supported") return render(request, 'report_import.html', {'all_project': all_project}) return render(request, 'report_import.html', {'all_project': all_project})
def post(self, request, format=None): username = request.user.username project_id = request.data.get("project_id") scanner = request.data.get("scanner") file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No', username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id, username=username) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username, target_url=scan_url) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "acunetix": date_time = datetime.datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() xml_dat = bytes(bytearray(file, encoding='utf-8')) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'checkmarx': date_time = datetime.datetime.now() scan_dump = checkmarx_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) checkmarx_xml_report_parser.checkmarx_report_xml( data=root_xml, project_id=project_id, scan_id=scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'trivy': date_time = datetime.datetime.now() scan_dump = trivy_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) trivy_json_report_parser.trivy_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'npmaudit': date_time = datetime.datetime.now() scan_dump = npmaudit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) npm_audit_report_json.npmaudit_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nodejsscan': date_time = datetime.datetime.now() scan_dump = nodejsscan_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) nodejsscan_report_json.nodejsscan_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'tfsec': date_time = datetime.datetime.now() scan_dump = tfsec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) tfsec_report_parser.tfsec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'whitesource': date_time = datetime.datetime.now() scan_dump = whitesource_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) whitesource_json_report_parser.whitesource_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dockle': date_time = datetime.datetime.now() scan_dump = dockle_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) dockle_json_parser.dockle_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nessus': date_time = datetime.datetime.now() scan_dump = nessus_scan_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.updated_nessus_parser(root=root_xml_en, scan_id=scan_id, project_id=project_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'openvas': date_time = datetime.datetime.now() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) hosts = OpenVas_Parser.get_hosts(root_xml_en) for host in hosts: scan_dump = scan_save_db(scan_ip=host, scan_id=host, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() OpenVas_Parser.updated_xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db(date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, username=username) scan_dump.save() nikto_html_parser(file, project_id, scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Uploaded"})
def post(self, request, format=None): date_time = datetime.datetime.now() project_uu_id = request.data.get("project_id") project_id = (ProjectDb.objects.filter( uu_id=project_uu_id).values("id").get()["id"]) print(project_id) scanner = request.data.get("scanner") if isinstance(request.data.get("filename"), UploadedFile): file = request.data.get("filename").read().decode("utf-8") else: file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Zap", ) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "burp_scan": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Burp", ) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data( root_xml_en, project_id, scan_id, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "arachni": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Arachni", ) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, target_url=scan_url, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "acunetix": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Acunetix", ) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "netsparker": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Netsparker", ) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "webinspect": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Webinspect", ) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "banditscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Bandit", date_time=date_time, ) scan_dump.save() data = json.loads(file) bandit_report_json( data=data, project_id=project_id, scan_id=scan_id, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "dependencycheck": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Dependencycheck", ) scan_dump.save() xml_dat = bytes(bytearray(file, encoding="utf-8")) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "findbugs": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Findbugs", ) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser = FindsecbugsParser(project_id=project_id, scan_id=scan_id, root=root_xml) findbugs_report_parser.xml_parser() return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "checkmarx": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Checkmarx", ) scan_dump.save() root_xml = ET.fromstring(file) checkmarx_xml_report_parser.checkmarx_report_xml( data=root_xml, project_id=project_id, scan_id=scan_id, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "clair": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Clair", ) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "trivy": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Trivy", ) scan_dump.save() data = json.loads(file) trivy_json_report_parser.trivy_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "gitlabsca": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Gitlabsca", ) scan_dump.save() data = json.loads(file) gitlab_sca_json_report_parser.gitlabsca_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "gitlabsast": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Gitlabsast", ) scan_dump.save() data = json.loads(file) gitlab_sast_json_report_parser.gitlabsast_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "gitlabcontainerscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Gitlabcontainerscan", ) scan_dump.save() data = json.loads(file) gitlab_container_json_report_parser.gitlabcontainerscan_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "npmaudit": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Npmaudit", ) scan_dump.save() data = json.loads(file) npm_audit_report_json.npmaudit_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "nodejsscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Nodejsscan", ) scan_dump.save() data = json.loads(file) nodejsscan_report_json.nodejsscan_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "semgrepscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Semgrepscan", ) scan_dump.save() data = json.loads(file) semgrep_json_report_parser.semgrep_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "tfsec": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Tfsec", ) scan_dump.save() data = json.loads(file) tfsec_report_parser.tfsec_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "whitesource": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Whitesource", ) scan_dump.save() data = json.loads(file) whitesource_json_report_parser.whitesource_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "inspec": scan_dump = InspecScanDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, ) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return Response({ "message": "Scan Data Uploaded", "project_id": escape(project_uu_id), "scan_id": escape(scan_id), "scanner": escape(scanner), }) elif scanner == "dockle": scan_dump = DockleScanDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, ) scan_dump.save() data = json.loads(file) dockle_json_parser.dockle_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return Response({ "message": "Scan Data Uploaded", "project_id": escape(project_uu_id), "scan_id": escape(scan_id), "scanner": escape(scanner), }) elif scanner == "nessus": root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.updated_nessus_parser( root=root_xml_en, scan_id=scan_id, project_id=project_id, ) return self.network_result_data(scan_id, project_uu_id, scanner) elif scanner == "openvas": root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) hosts = OpenVas_Parser.get_hosts(root_xml_en) for host in hosts: scan_dump = NetworkScanDb( ip=host, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Openvas", ) scan_dump.save() OpenVas_Parser.updated_xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) return self.network_result_data(scan_id, project_uu_id, scanner) elif scanner == "nikto": scan_dump = NiktoResultDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser( file, project_id, scan_id, ) return Response({ "message": "Scan Data Uploaded", "project_id": escape(project_uu_id), "scan_id": escape(scan_id), "scanner": escape(scanner), }) elif scanner == "twistlock": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Twistlock", ) scan_dump.save() data = json.loads(file) twistlock_json_report_parser.twistlock_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "brakeman": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Brakeman", ) scan_dump.save() data = json.loads(file) brakeman_json_report_parser.brakeman_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) else: return Response({"message": "Scanner Not Found"})
def report_import(request): """ :param request: :return: """ username = request.user.username all_project = project_db.objects.filter(username=username) if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") json_file = request.FILES['jsonfile'] project_name = request.POST.get("project_name") scan_id = uuid.uuid4() scan_status = '100' if scanner == "bandit_scan": date_time = datetime.now() scan_dump = bandit_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() j = json_file.read() data = json.loads(j) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username) return HttpResponseRedirect( reverse('banditscanner:banditscans_list')) if scanner == "retirejs_scan": date_time = datetime.now() scan_dump = retirejs_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() j = json_file.read() data = json.loads(j) retirejs_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username) return HttpResponseRedirect( reverse('retirejsscanner:retirejsscans_list')) if scanner == "clair_scan": date_time = datetime.now() scan_dump = clair_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() j = json_file.read() data = json.loads(j) clair_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username) return HttpResponseRedirect(reverse('clair:clair_list')) if scanner == "trivy_scan": date_time = datetime.now() scan_dump = trivy_scan_db( project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username, ) scan_dump.save() j = json_file.read() data = json.loads(j) trivy_json_report_parser.trivy_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return HttpResponseRedirect(reverse('trivy:trivy_list')) if scanner == "npmaudit_scan": date_time = datetime.now() scan_dump = npmaudit_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() j = json_file.read() data = json.loads(j) npm_audit_report_json.npmaudit_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return HttpResponseRedirect(reverse('npmaudit:npmaudit_list')) if scanner == "nodejsscan_scan": date_time = datetime.now() scan_dump = nodejsscan_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() j = json_file.read() data = json.loads(j) nodejsscan_report_json.nodejsscan_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return HttpResponseRedirect(reverse('nodejsscan:nodejsscan_list')) if scanner == "tfsec_scan": date_time = datetime.now() scan_dump = tfsec_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() j = json_file.read() data = json.loads(j) tfsec_report_parser.tfsec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return HttpResponseRedirect(reverse('tfsec:tfsec_list')) if scanner == "inspec_scan": date_time = datetime.now() scan_dump = inspec_scan_db(project_name=project_name, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() j = json_file.read() data = json.loads(j) inspec_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username) return HttpResponseRedirect(reverse('inspec:inspec_list')) return render(request, 'report_import.html', {'all_project': all_project})