def xml_upload(request): """ Handling XML upload files. :param request: :return: """ username = request.user.username all_project = project_db.objects.filter(username=username) if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": try: tree = ET.parse(xml_file) date_time = datetime.now() root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) scan_dump = zap_scans_db(username=username, scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() zap_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml_en) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('zapscanner:zap_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == "burp_scan": try: date_time = datetime.now() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) scan_dump = burp_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('burpscanner:burp_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == "arachni": try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = arachni_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() arachni_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml, target_url=scan_url) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('arachniscanner:arachni_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'netsparker': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = netsparker_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('netsparkerscanner:netsparker_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'webinspect': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = webinspect_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('webinspectscanner:webinspect_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'acunetix': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = acunetix_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() acunetix_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('acunetixscanner:acunetix_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'dependencycheck': try: date_time = datetime.now() data = etree.parse(xml_file) root = data.getroot() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('dependencycheck:dependencycheck_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'checkmarx': try: date_time = datetime.now() data = etree.parse(xml_file) root = data.getroot() scan_dump = checkmarx_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() checkmarx_xml_report_parser.checkmarx_report_xml( project_id=project_id, scan_id=scan_id, data=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('checkmarx:checkmarx_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'findbugs': try: date_time = datetime.now() tree = ET.parse(xml_file) root = tree.getroot() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse('findbugs:findbugs_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'nikto': try: date_time = datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse('tools:nikto')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) return render(request, 'upload_xml.html', {'all_project': all_project})
def launch_arachni_scan(target, project_id, rescan_id, rescan, scan_id): arachni_hosts = None arachni_ports = None all_arachni = arachni_settings_db.objects.all() for arachni in all_arachni: arachni_hosts = arachni.arachni_url arachni_ports = arachni.arachni_port arachni = PyArachniapi.arachniAPI(arachni_hosts, arachni_ports) check = [ "xss_event", "xss", "xss_script_context", "xss_tag", "xss_path", "xss_dom_script_context", "xss_dom", "sql_injection", "sql_injection_differential", "sql_injection_timing", "csrf", "common_files", "directory_listing", ] # data = {"url": target, "checks": check} data = {"url": target, "checks": check, "audit": {}} d = json.dumps(data) scan_launch = arachni.scan_launch(d) time.sleep(3) print("Scan Launched !!!!!") date_time = datetime.now() try: save_all_scan = arachni_scan_db( project_id=project_id, url=target, scan_id=scan_id, date_time=date_time, rescan_id=rescan_id, rescan=rescan, ) save_all_scan.save() except Exception as e: print e scan_data = scan_launch.data for key, value in scan_data.viewitems(): if key == 'id': scan_run_id = value scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.viewitems(): if key == 'status': scan_status = value while scan_status != 'done': status = '0' if scan_sum['statistics']['browser_cluster'][ 'queued_job_count'] and scan_sum['statistics'][ 'browser_cluster']['total_job_time']: status = 100 - scan_sum['statistics']['browser_cluster']['queued_job_count'] * 100 / \ scan_sum['statistics']['browser_cluster']['total_job_time'] arachni_scan_db.objects.filter(scan_id=scan_id).update( scan_status=status) scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.viewitems(): if key == 'status': scan_status = value time.sleep(3) print "scan_di", scan_run_id if scan_status == 'done': xml_report = arachni.scan_xml_report(id=scan_run_id).data root_xml = ET.fromstring(xml_report) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) arachni_scan_db.objects.filter(scan_id=scan_id).update( scan_status='100') print("Data uploaded !!!!") print scan_run_id
def xml_upload(request): """ Handling XML upload files. :param request: :return: """ all_project = project_db.objects.all() if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return HttpResponseRedirect("/zapscanner/zap_scan_list/") elif scanner == "burp_scan": date_time = datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, target_url, scan_id) do_xml_data.burp_scan_data(root_xml) print("Save scan Data") return HttpResponseRedirect("/burpscanner/burp_scan_list") elif scanner == "arachni": date_time = datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Save scan Data") return HttpResponseRedirect("/arachniscanner/arachni_scan_list") elif scanner == 'netsparker': date_time = datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect( "/netsparkerscanner/netsparker_scan_list/") elif scanner == 'webinspect': date_time = datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect( "/webinspectscanner/webinspect_scan_list/") elif scanner == 'acunetix': date_time = datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect("/acunetixscanner/acunetix_scan_list/") elif scanner == 'dependencycheck': date_time = datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = etree.parse(xml_file) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) print("Saved scan data") return HttpResponseRedirect( "/dependencycheck/dependencycheck_list") elif scanner == 'findbugs': date_time = datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root = tree.getroot() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root) print("Saved scan data") return HttpResponseRedirect("/findbugs/findbugs_list") elif scanner == 'nikto': date_time = datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id) print("Saved scan data") return HttpResponseRedirect("/tools/nikto/") return render(request, 'upload_xml.html', {'all_project': all_project})
def launch_arachni_scan(target, project_id, rescan_id, rescan, scan_id, user): global scan_run_id, scan_status arachni_hosts = None arachni_ports = None arachni_user = "" arachni_pass = "" all_arachni = ArachniSettingsDb.objects.filter() for arachni in all_arachni: arachni_hosts = arachni.arachni_url arachni_ports = arachni.arachni_port arachni_user = arachni.arachni_user arachni_pass = arachni.arachni_pass arachni = PyArachniapi.arachniAPI(arachni_hosts, arachni_ports, arachni_user, arachni_pass) check = [ "xss_event", "xss", "xss_script_context", "xss_tag", "xss_path", "xss_dom_script_context", "xss_dom", "sql_injection", "sql_injection_differential", "sql_injection_timing", "no_sql_injection", "no_sql_injection_differential", "code_injection", "code_injection_timing", "ldap_injection", "path_traversal", "file_inclusion", "response_splitting", "os_cmd_injection", "os_cmd_injection_timing", "rfi", "unvalidated_redirect", "unvalidated_redirect_dom", "xpath_injection", "xxe", "source_code_disclosure", "allowed_methods", "backup_files", "backup_directories", "common_admin_interfaces", "common_directories", "common_files", "http_put", "webdav", "xst", "credit_card", "cvs_svn_users", "private_ip", "backdoors", "htaccess_limit", "interesting_responses", "html_objects", "emails", "ssn", "directory_listing", "mixed_resource", "insecure_cookies", "http_only_cookies", "password_autocomplete", "origin_spoof_access_restriction_bypass", "form_upload", "localstart_asp", "cookie_set_for_parent_domain", "hsts", "x_frame_options", "insecure_cors_policy", "insecure_cross_domain_policy_access", "insecure_cross_domain_policy_headers", "insecure_client_access_policy", "csrf", "common_files", "directory_listing", ] data = {"url": target, "checks": check, "audit": {}} d = json.dumps(data) scan_launch = arachni.scan_launch(d) time.sleep(3) try: scan_data = scan_launch.data for key, value in scan_data.items(): if key == "id": scan_run_id = value notify.send(user, recipient=user, verb="Arachni Scan Started on URL %s" % target) except Exception: notify.send(user, recipient=user, verb="Arachni Connection Not found") print("Arachni Connection Not found") return date_time = datetime.now() try: save_all_scan = WebScansDb( project_id=project_id, scan_url=target, scan_id=scan_id, date_time=date_time, scanner="Arachni", ) save_all_scan.save() except Exception as e: print(e) scan_data = scan_launch.data for key, value in scan_data.items(): if key == "id": scan_run_id = value scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.items(): if key == "status": scan_status = value while scan_status != "done": status = "0" if (scan_sum["statistics"]["browser_cluster"]["queued_job_count"] and scan_sum["statistics"]["browser_cluster"]["total_job_time"]): status = ( 100 - scan_sum["statistics"]["browser_cluster"]["queued_job_count"] * 100 / scan_sum["statistics"]["browser_cluster"]["total_job_time"]) WebScansDb.objects.filter( scan_id=scan_id, scanner="Arachni").update(scan_status=int(status)) scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.items(): if key == "status": scan_status = value time.sleep(3) if scan_status == "done": xml_report = arachni.scan_xml_report(id=scan_run_id).data root_xml = ET.fromstring(xml_report) arachni_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, target_url=target, ) WebScansDb.objects.filter(scan_id=scan_id, scanner="Arachni").update(scan_status="100") print("Data uploaded !!!!") notify.send(user, recipient=user, verb="Arachni Scan Completed on URL %s" % target)
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") xml_file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" print xml_file print scanner if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, scan_url, scan_id) do_xml_data.burp_scan_data(root_xml) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = etree.parse(xml_file) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Uploaded"})
def xml_upload(request): """ Handling XML upload files. :param request: :return: """ all_project = project_db.objects.all() if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = timezone.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return HttpResponseRedirect("/webscanners/scans_list/") elif scanner == "burp_scan": date_time = timezone.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, target_url, scan_id) do_xml_data.burp_scan_data(root_xml) print "Save scan Data" return HttpResponseRedirect("/webscanners/burp_scan_list") elif scanner == "arachni": print scanner print xml_file print scan_url date_time = timezone.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print "Save scan Data" return HttpResponseRedirect("/webscanners/arachni_scan_list") return render(request, 'upload_xml.html', {'all_project': all_project})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") xml_file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" print xml_file print scanner if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, scan_url, scan_id) do_xml_data.burp_scan_data(root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({"message": "Scan Data Uploaded"}) return Response({"message": "Scan Data Uploaded"})
def post(self, request, format=None): username = request.user.username project_id = request.data.get("project_id") scanner = request.data.get("scanner") file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No', username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id, username=username) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username, target_url=scan_url) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "acunetix": date_time = datetime.datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() xml_dat = bytes(bytearray(file, encoding='utf-8')) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'checkmarx': date_time = datetime.datetime.now() scan_dump = checkmarx_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) checkmarx_xml_report_parser.checkmarx_report_xml( data=root_xml, project_id=project_id, scan_id=scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'trivy': date_time = datetime.datetime.now() scan_dump = trivy_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) trivy_json_report_parser.trivy_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'npmaudit': date_time = datetime.datetime.now() scan_dump = npmaudit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) npm_audit_report_json.npmaudit_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nodejsscan': date_time = datetime.datetime.now() scan_dump = nodejsscan_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) nodejsscan_report_json.nodejsscan_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'tfsec': date_time = datetime.datetime.now() scan_dump = tfsec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) tfsec_report_parser.tfsec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'whitesource': date_time = datetime.datetime.now() scan_dump = whitesource_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) whitesource_json_report_parser.whitesource_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dockle': date_time = datetime.datetime.now() scan_dump = dockle_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) dockle_json_parser.dockle_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nessus': date_time = datetime.datetime.now() scan_dump = nessus_scan_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.updated_nessus_parser(root=root_xml_en, scan_id=scan_id, project_id=project_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'openvas': date_time = datetime.datetime.now() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) hosts = OpenVas_Parser.get_hosts(root_xml_en) for host in hosts: scan_dump = scan_save_db(scan_ip=host, scan_id=host, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() OpenVas_Parser.updated_xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db(date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, username=username) scan_dump.save() nikto_html_parser(file, project_id, scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Uploaded"})
def post(self, request, format=None): date_time = datetime.datetime.now() project_uu_id = request.data.get("project_id") project_id = (ProjectDb.objects.filter( uu_id=project_uu_id).values("id").get()["id"]) print(project_id) scanner = request.data.get("scanner") if isinstance(request.data.get("filename"), UploadedFile): file = request.data.get("filename").read().decode("utf-8") else: file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Zap", ) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "burp_scan": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Burp", ) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data( root_xml_en, project_id, scan_id, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "arachni": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Arachni", ) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, target_url=scan_url, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "acunetix": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Acunetix", ) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "netsparker": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Netsparker", ) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "webinspect": scan_dump = WebScansDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Webinspect", ) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) return self.web_result_data(scan_id, project_uu_id, scanner) elif scanner == "banditscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Bandit", date_time=date_time, ) scan_dump.save() data = json.loads(file) bandit_report_json( data=data, project_id=project_id, scan_id=scan_id, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "dependencycheck": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Dependencycheck", ) scan_dump.save() xml_dat = bytes(bytearray(file, encoding="utf-8")) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "findbugs": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Findbugs", ) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser = FindsecbugsParser(project_id=project_id, scan_id=scan_id, root=root_xml) findbugs_report_parser.xml_parser() return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "checkmarx": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Checkmarx", ) scan_dump.save() root_xml = ET.fromstring(file) checkmarx_xml_report_parser.checkmarx_report_xml( data=root_xml, project_id=project_id, scan_id=scan_id, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "clair": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Clair", ) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "trivy": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Trivy", ) scan_dump.save() data = json.loads(file) trivy_json_report_parser.trivy_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "gitlabsca": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Gitlabsca", ) scan_dump.save() data = json.loads(file) gitlab_sca_json_report_parser.gitlabsca_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "gitlabsast": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Gitlabsast", ) scan_dump.save() data = json.loads(file) gitlab_sast_json_report_parser.gitlabsast_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "gitlabcontainerscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Gitlabcontainerscan", ) scan_dump.save() data = json.loads(file) gitlab_container_json_report_parser.gitlabcontainerscan_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "npmaudit": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Npmaudit", ) scan_dump.save() data = json.loads(file) npm_audit_report_json.npmaudit_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "nodejsscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Nodejsscan", ) scan_dump.save() data = json.loads(file) nodejsscan_report_json.nodejsscan_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "semgrepscan": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Semgrepscan", ) scan_dump.save() data = json.loads(file) semgrep_json_report_parser.semgrep_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "tfsec": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Tfsec", ) scan_dump.save() data = json.loads(file) tfsec_report_parser.tfsec_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "whitesource": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Whitesource", ) scan_dump.save() data = json.loads(file) whitesource_json_report_parser.whitesource_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "inspec": scan_dump = InspecScanDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, ) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return Response({ "message": "Scan Data Uploaded", "project_id": escape(project_uu_id), "scan_id": escape(scan_id), "scanner": escape(scanner), }) elif scanner == "dockle": scan_dump = DockleScanDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, ) scan_dump.save() data = json.loads(file) dockle_json_parser.dockle_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return Response({ "message": "Scan Data Uploaded", "project_id": escape(project_uu_id), "scan_id": escape(scan_id), "scanner": escape(scanner), }) elif scanner == "nessus": root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.updated_nessus_parser( root=root_xml_en, scan_id=scan_id, project_id=project_id, ) return self.network_result_data(scan_id, project_uu_id, scanner) elif scanner == "openvas": root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) hosts = OpenVas_Parser.get_hosts(root_xml_en) for host in hosts: scan_dump = NetworkScanDb( ip=host, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Openvas", ) scan_dump.save() OpenVas_Parser.updated_xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) return self.network_result_data(scan_id, project_uu_id, scanner) elif scanner == "nikto": scan_dump = NiktoResultDb( scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser( file, project_id, scan_id, ) return Response({ "message": "Scan Data Uploaded", "project_id": escape(project_uu_id), "scan_id": escape(scan_id), "scanner": escape(scanner), }) elif scanner == "twistlock": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Twistlock", ) scan_dump.save() data = json.loads(file) twistlock_json_report_parser.twistlock_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) elif scanner == "brakeman": scan_dump = StaticScansDb( project_name=scan_url, scan_id=scan_id, project_id=project_id, scan_status=scan_status, scanner="Brakeman", ) scan_dump.save() data = json.loads(file) brakeman_json_report_parser.brakeman_report_json( project_id=project_id, scan_id=scan_id, data=data, ) return self.sast_result_data(scan_id, project_uu_id, scanner) else: return Response({"message": "Scanner Not Found"})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") file = request.data.get("filename") print("Results file content: ", file) scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": print("Inside zap_scan") date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "acunetix": date_time = datetime.datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() xml_dat = bytes(bytearray(file, encoding='utf-8')) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nessus': date_time = datetime.datetime.now() scan_dump = nessus_scan_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.nessus_parser( root=root_xml_en, scan_id=scan_id, project_id=project_id, ) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'openvas': date_time = datetime.datetime.now() scan_dump = scan_save_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) OpenVas_Parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(file, project_id, scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Not Uploaded"})
def post(self, request): all_project = ProjectDb.objects.filter() project_uu_id = request.POST.get("project_id") project_id = (ProjectDb.objects.filter( uu_id=project_uu_id).values("id").get()["id"]) scanner = request.POST.get("scanner") file = request.FILES["file"] target = request.POST.get("target") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "ZAP Scanner Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) tree = ET.parse(file) date_time = datetime.now() root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, rescan="No", scanner="Zap", ) scan_dump.save() zap_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml_en, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "burp_scan": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Burp Scan Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() # Burp scan XML parser tree = ET.parse(file) root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding="utf8").decode( "ascii", "ignore") root_xml_en = ET.fromstring(en_root_xml) scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Burp", ) scan_dump.save() burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "arachni": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Arachni Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Arachni", ) scan_dump.save() arachni_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, target_url=target, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "netsparker": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Netsparker Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Netsparker", ) scan_dump.save() netsparker_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "webinspect": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Webinspect Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Webinspect", ) scan_dump.save() webinspect_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "acunetix": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Acunetix Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() scan_dump = WebScansDb( scan_url=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scanner="Acunetix", scan_status=scan_status, ) scan_dump.save() acunetix_xml_parser.xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("webscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "dependencycheck": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Dependencycheck Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() data = etree.parse(file) root = data.getroot() scan_dump = StaticScansDb( project_name=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Dependencycheck", ) scan_dump.save() dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=root) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "checkmarx": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Checkmarx Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() data = etree.parse(file) root = data.getroot() scan_dump = StaticScansDb( project_name=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, ) scan_dump.save() checkmarx_xml_report_parser.checkmarx_report_xml( project_id=project_id, scan_id=scan_id, data=root) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "findbugs": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Findbugs Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root = tree.getroot() scan_dump = StaticScansDb( project_name=target, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, ) scan_dump.save() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) elif scanner == "nikto": try: if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Nikto Only XML file Support") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() scan_dump = NiktoResultDb( date_time=date_time, scan_url=target, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(file, project_id, scan_id) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("tools:nikto")) except: messages.error(request, "File Not Supported") return render(request, "report_upload/upload.html", {"all_project": all_project}) if scanner == "bandit_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Bandit Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Bandit" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "retirejs_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Retirejs Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Retirejs" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "clair_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Clair Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Clair" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "trivy_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Trivy Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Trivy" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "npmaudit_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "NPM Audit Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Npmaudit" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "nodejsscan_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Nodejs scan Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Nodejsscan" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "semgrepscan_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Semgrep scan Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Semgrep" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "tfsec_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Tfsec Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Tfsec" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "whitesource_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Whitesource Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Whitesource" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "inspec_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Inspec Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scan_dump = InspecScanDb( scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, ) scan_dump.save() inspec_report_json( data=data, project_id=project_id, scan_id=scan_id, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("inspec:inspec_list")) except: messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "dockle_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Dockle Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scan_dump = DockleScanDb( scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, ) scan_dump.save() dockle_report_json( data=data, project_id=project_id, scan_id=scan_id, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("dockle:dockle_list")) except: messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "gitlabsast_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Gitlabsast Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Gitlabsast" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "gitlabcontainerscan_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error( request, "Gitlabcontainerscan Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Gitlabcontainerscan" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "gitlabsca_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Gitlabsca Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Gitlabsca" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "twistlock_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Twistlock Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Twistlock" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "brakeman_scan": try: if self.check_file_ext(str(file)) != ".json": messages.error(request, "Brakeman Only JSON file Supported") return HttpResponseRedirect( reverse("report_upload:upload")) date_time = datetime.now() j = file.read() data = json.loads(j) scanner = "Brakeman_scan" upload( target, scan_id, date_time, project_id, scan_status, scanner, data, ) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse("staticscanners:list_scans")) except Exception as e: print(e) messages.error(request, "File Not Supported") return render( request, "report_upload/upload.html", {"all_project": all_project}, ) if scanner == "openvas": if self.check_file_ext(str(file)) != ".xml": messages.error(request, "Openvas Only XML file Supported") return HttpResponseRedirect(reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() hosts = OpenVas_Parser.get_hosts(root_xml) for host in hosts: scan_dump = NetworkScanDb( ip=host, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, scanner="Openvas", ) scan_dump.save() OpenVas_Parser.updated_xml_parser( project_id=project_id, scan_id=scan_id, root=root_xml, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("networkscanners:list_scans")) elif scanner == "nessus": if self.check_file_ext(str(file)) != ".nessus": messages.error(request, "Nessus Only .nessus file Supported") return HttpResponseRedirect(reverse("report_upload:upload")) date_time = datetime.now() tree = ET.parse(file) root_xml = tree.getroot() Nessus_Parser.updated_nessus_parser( root=root_xml, scan_id=scan_id, project_id=project_id, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("networkscanners:list_scans")) elif scanner == "nmap": tree = ET.parse(file) root_xml = tree.getroot() nmap_parser.xml_parser( root=root_xml, scan_id=scan_id, project_id=project_id, ) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse("tools:nmap_scan"))
def launch_arachni_scan(target, project_id, rescan_id, rescan, scan_id, user): arachni_hosts = None arachni_ports = None username = user.username all_arachni = arachni_settings_db.objects.filter(username=username) for arachni in all_arachni: arachni_hosts = arachni.arachni_url arachni_ports = arachni.arachni_port arachni = PyArachniapi.arachniAPI(arachni_hosts, arachni_ports) check = [ "xss_event", "xss", "xss_script_context", "xss_tag", "xss_path", "xss_dom_script_context", "xss_dom", "sql_injection", "sql_injection_differential", "sql_injection_timing", "csrf", "common_files", "directory_listing", ] data = {"url": target, "checks": check, "audit": {}} d = json.dumps(data) scan_launch = arachni.scan_launch(d) time.sleep(3) try: scan_data = scan_launch.data for key, value in scan_data.items(): if key == 'id': scan_run_id = value notify.send(user, recipient=user, verb='Arachni Scan Started on URL %s' % target) except Exception: notify.send(user, recipient=user, verb='Arachni Connection Not found') print("Arachni Connection Not found") return date_time = datetime.now() try: save_all_scan = arachni_scan_db( username=username, project_id=project_id, url=target, scan_id=scan_id, date_time=date_time, rescan_id=rescan_id, rescan=rescan, ) save_all_scan.save() except Exception as e: print(e) scan_data = scan_launch.data for key, value in scan_data.items(): if key == 'id': scan_run_id = value scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.items(): if key == 'status': scan_status = value while scan_status != 'done': status = '0' if scan_sum['statistics']['browser_cluster'][ 'queued_job_count'] and scan_sum['statistics'][ 'browser_cluster']['total_job_time']: status = 100 - scan_sum['statistics']['browser_cluster']['queued_job_count'] * 100 / \ scan_sum['statistics']['browser_cluster']['total_job_time'] arachni_scan_db.objects.filter( username=username, scan_id=scan_id).update(scan_status=int(status)) scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.items(): if key == 'status': scan_status = value time.sleep(3) if scan_status == 'done': xml_report = arachni.scan_xml_report(id=scan_run_id).data root_xml = ET.fromstring(xml_report) arachni_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml, target_url=target) arachni_scan_db.objects.filter( username=username, scan_id=scan_id).update(scan_status='100') print("Data uploaded !!!!") notify.send(user, recipient=user, verb='Arachni Scan Completed on URL %s' % target)