def xml_upload(request): """ Handling XML upload files. :param request: :return: """ username = request.user.username all_project = project_db.objects.filter(username=username) if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": try: tree = ET.parse(xml_file) date_time = datetime.now() root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) scan_dump = zap_scans_db(username=username, scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() zap_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml_en) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('zapscanner:zap_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == "burp_scan": try: date_time = datetime.now() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) scan_dump = burp_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('burpscanner:burp_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == "arachni": try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = arachni_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() arachni_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml, target_url=scan_url) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('arachniscanner:arachni_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'netsparker': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = netsparker_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('netsparkerscanner:netsparker_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'webinspect': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = webinspect_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('webinspectscanner:webinspect_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'acunetix': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = acunetix_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() acunetix_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('acunetixscanner:acunetix_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'dependencycheck': try: date_time = datetime.now() data = etree.parse(xml_file) root = data.getroot() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('dependencycheck:dependencycheck_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'checkmarx': try: date_time = datetime.now() data = etree.parse(xml_file) root = data.getroot() scan_dump = checkmarx_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() checkmarx_xml_report_parser.checkmarx_report_xml( project_id=project_id, scan_id=scan_id, data=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('checkmarx:checkmarx_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'findbugs': try: date_time = datetime.now() tree = ET.parse(xml_file) root = tree.getroot() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse('findbugs:findbugs_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'nikto': try: date_time = datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse('tools:nikto')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) return render(request, 'upload_xml.html', {'all_project': all_project})
def launch_arachni_scan(target, project_id, rescan_id, rescan, scan_id): arachni_hosts = None arachni_ports = None all_arachni = arachni_settings_db.objects.all() for arachni in all_arachni: arachni_hosts = arachni.arachni_url arachni_ports = arachni.arachni_port arachni = PyArachniapi.arachniAPI(arachni_hosts, arachni_ports) check = [ "xss_event", "xss", "xss_script_context", "xss_tag", "xss_path", "xss_dom_script_context", "xss_dom", "sql_injection", "sql_injection_differential", "sql_injection_timing", "csrf", "common_files", "directory_listing", ] # data = {"url": target, "checks": check} data = {"url": target, "checks": check, "audit": {}} d = json.dumps(data) scan_launch = arachni.scan_launch(d) time.sleep(3) print("Scan Launched !!!!!") date_time = datetime.now() try: save_all_scan = arachni_scan_db( project_id=project_id, url=target, scan_id=scan_id, date_time=date_time, rescan_id=rescan_id, rescan=rescan, ) save_all_scan.save() except Exception as e: print e scan_data = scan_launch.data for key, value in scan_data.viewitems(): if key == 'id': scan_run_id = value scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.viewitems(): if key == 'status': scan_status = value while scan_status != 'done': status = '0' if scan_sum['statistics']['browser_cluster'][ 'queued_job_count'] and scan_sum['statistics'][ 'browser_cluster']['total_job_time']: status = 100 - scan_sum['statistics']['browser_cluster']['queued_job_count'] * 100 / \ scan_sum['statistics']['browser_cluster']['total_job_time'] arachni_scan_db.objects.filter(scan_id=scan_id).update( scan_status=status) scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.viewitems(): if key == 'status': scan_status = value time.sleep(3) print "scan_di", scan_run_id if scan_status == 'done': xml_report = arachni.scan_xml_report(id=scan_run_id).data root_xml = ET.fromstring(xml_report) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) arachni_scan_db.objects.filter(scan_id=scan_id).update( scan_status='100') print("Data uploaded !!!!") print scan_run_id
def xml_upload(request): """ Handling XML upload files. :param request: :return: """ all_project = project_db.objects.all() if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return HttpResponseRedirect("/zapscanner/zap_scan_list/") elif scanner == "burp_scan": date_time = datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, target_url, scan_id) do_xml_data.burp_scan_data(root_xml) print("Save scan Data") return HttpResponseRedirect("/burpscanner/burp_scan_list") elif scanner == "arachni": date_time = datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Save scan Data") return HttpResponseRedirect("/arachniscanner/arachni_scan_list") elif scanner == 'netsparker': date_time = datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect( "/netsparkerscanner/netsparker_scan_list/") elif scanner == 'webinspect': date_time = datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect( "/webinspectscanner/webinspect_scan_list/") elif scanner == 'acunetix': date_time = datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect("/acunetixscanner/acunetix_scan_list/") elif scanner == 'dependencycheck': date_time = datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = etree.parse(xml_file) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) print("Saved scan data") return HttpResponseRedirect( "/dependencycheck/dependencycheck_list") elif scanner == 'findbugs': date_time = datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root = tree.getroot() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root) print("Saved scan data") return HttpResponseRedirect("/findbugs/findbugs_list") elif scanner == 'nikto': date_time = datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id) print("Saved scan data") return HttpResponseRedirect("/tools/nikto/") return render(request, 'upload_xml.html', {'all_project': all_project})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") xml_file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" print xml_file print scanner if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, scan_url, scan_id) do_xml_data.burp_scan_data(root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({"message": "Scan Data Uploaded"}) return Response({"message": "Scan Data Uploaded"})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") xml_file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" print xml_file print scanner if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, scan_url, scan_id) do_xml_data.burp_scan_data(root_xml) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = etree.parse(xml_file) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Uploaded"})
def xml_upload(request): """ Handling XML upload files. :param request: :return: """ all_project = project_db.objects.all() if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = timezone.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return HttpResponseRedirect("/webscanners/scans_list/") elif scanner == "burp_scan": date_time = timezone.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, target_url, scan_id) do_xml_data.burp_scan_data(root_xml) print "Save scan Data" return HttpResponseRedirect("/webscanners/burp_scan_list") elif scanner == "arachni": print scanner print xml_file print scan_url date_time = timezone.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print "Save scan Data" return HttpResponseRedirect("/webscanners/arachni_scan_list") return render(request, 'upload_xml.html', {'all_project': all_project})
def launch_arachni_scan(target, project_id, rescan_id, rescan, scan_id, user): global scan_run_id, scan_status arachni_hosts = None arachni_ports = None username = user.username all_arachni = arachni_settings_db.objects.filter(username=username) for arachni in all_arachni: arachni_hosts = arachni.arachni_url arachni_ports = arachni.arachni_port arachni_user = arachni.arachni_user arachni_pass = arachni.arachni_pass arachni = PyArachniapi.arachniAPI(arachni_hosts, arachni_ports, arachni_user, arachni_pass) check = [ "xss_event", "xss", "xss_script_context", "xss_tag", "xss_path", "xss_dom_script_context", "xss_dom", "sql_injection", "sql_injection_differential", "sql_injection_timing", "no_sql_injection", "no_sql_injection_differential", "code_injection", "code_injection_timing", "ldap_injection", "path_traversal", "file_inclusion", "response_splitting", "os_cmd_injection", "os_cmd_injection_timing", "rfi", "unvalidated_redirect", "unvalidated_redirect_dom", "xpath_injection", "xxe", "source_code_disclosure", "allowed_methods", "backup_files", "backup_directories", "common_admin_interfaces", "common_directories", "common_files", "http_put", "webdav", "xst", "credit_card", "cvs_svn_users", "private_ip", "backdoors", "htaccess_limit", "interesting_responses", "html_objects", "emails", "ssn", "directory_listing", "mixed_resource", "insecure_cookies", "http_only_cookies", "password_autocomplete", "origin_spoof_access_restriction_bypass", "form_upload", "localstart_asp", "cookie_set_for_parent_domain", "hsts", "x_frame_options", "insecure_cors_policy", "insecure_cross_domain_policy_access", "insecure_cross_domain_policy_headers", "insecure_client_access_policy", "csrf", "common_files", "directory_listing", ] data = { "url": target, "checks": check, "audit": { } } d = json.dumps(data) scan_launch = arachni.scan_launch(d) time.sleep(3) try: scan_data = scan_launch.data for key, value in scan_data.items(): if key == 'id': scan_run_id = value notify.send(user, recipient=user, verb='Arachni Scan Started on URL %s' % target) except Exception: notify.send(user, recipient=user, verb='Arachni Connection Not found') print("Arachni Connection Not found") return date_time = datetime.now() try: save_all_scan = arachni_scan_db( username=username, project_id=project_id, url=target, scan_id=scan_id, date_time=date_time, rescan_id=rescan_id, rescan=rescan, ) save_all_scan.save() except Exception as e: print(e) scan_data = scan_launch.data for key, value in scan_data.items(): if key == 'id': scan_run_id = value scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.items(): if key == 'status': scan_status = value while scan_status != 'done': status = '0' if scan_sum['statistics']['browser_cluster']['queued_job_count'] and scan_sum['statistics']['browser_cluster'][ 'total_job_time']: status = 100 - scan_sum['statistics']['browser_cluster']['queued_job_count'] * 100 / \ scan_sum['statistics']['browser_cluster']['total_job_time'] arachni_scan_db.objects.filter(username=username, scan_id=scan_id).update(scan_status=int(status)) scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.items(): if key == 'status': scan_status = value time.sleep(3) if scan_status == 'done': xml_report = arachni.scan_xml_report(id=scan_run_id).data root_xml = ET.fromstring(xml_report) arachni_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml, target_url=target) arachni_scan_db.objects.filter(username=username, scan_id=scan_id).update(scan_status='100') print("Data uploaded !!!!") notify.send(user, recipient=user, verb='Arachni Scan Completed on URL %s' % target)
def post(self, request, format=None): username = request.user.username project_id = request.data.get("project_id") scanner = request.data.get("scanner") file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No', username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id, username=username) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username, target_url=scan_url) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "acunetix": date_time = datetime.datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() xml_dat = bytes(bytearray(file, encoding='utf-8')) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'checkmarx': date_time = datetime.datetime.now() scan_dump = checkmarx_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) checkmarx_xml_report_parser.checkmarx_report_xml( data=root_xml, project_id=project_id, scan_id=scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'trivy': date_time = datetime.datetime.now() scan_dump = trivy_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) trivy_json_report_parser.trivy_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'npmaudit': date_time = datetime.datetime.now() scan_dump = npmaudit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) npm_audit_report_json.npmaudit_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nodejsscan': date_time = datetime.datetime.now() scan_dump = nodejsscan_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) nodejsscan_report_json.nodejsscan_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'tfsec': date_time = datetime.datetime.now() scan_dump = tfsec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) tfsec_report_parser.tfsec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'whitesource': date_time = datetime.datetime.now() scan_dump = whitesource_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) whitesource_json_report_parser.whitesource_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dockle': date_time = datetime.datetime.now() scan_dump = dockle_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) dockle_json_parser.dockle_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nessus': date_time = datetime.datetime.now() scan_dump = nessus_scan_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.updated_nessus_parser(root=root_xml_en, scan_id=scan_id, project_id=project_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'openvas': date_time = datetime.datetime.now() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) hosts = OpenVas_Parser.get_hosts(root_xml_en) for host in hosts: scan_dump = scan_save_db(scan_ip=host, scan_id=host, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() OpenVas_Parser.updated_xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db(date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, username=username) scan_dump.save() nikto_html_parser(file, project_id, scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Uploaded"})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") file = request.data.get("filename") print("Results file content: ", file) scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": print("Inside zap_scan") date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "acunetix": date_time = datetime.datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() xml_dat = bytes(bytearray(file, encoding='utf-8')) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nessus': date_time = datetime.datetime.now() scan_dump = nessus_scan_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.nessus_parser( root=root_xml_en, scan_id=scan_id, project_id=project_id, ) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'openvas': date_time = datetime.datetime.now() scan_dump = scan_save_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) OpenVas_Parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(file, project_id, scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Not Uploaded"})
def launch_arachni_scan(target, project_id, rescan_id, rescan, scan_id, user): arachni_hosts = None arachni_ports = None username = user.username all_arachni = arachni_settings_db.objects.filter(username=username) for arachni in all_arachni: arachni_hosts = arachni.arachni_url arachni_ports = arachni.arachni_port arachni = PyArachniapi.arachniAPI(arachni_hosts, arachni_ports) check = [ "xss_event", "xss", "xss_script_context", "xss_tag", "xss_path", "xss_dom_script_context", "xss_dom", "sql_injection", "sql_injection_differential", "sql_injection_timing", "csrf", "common_files", "directory_listing", ] data = {"url": target, "checks": check, "audit": {}} d = json.dumps(data) scan_launch = arachni.scan_launch(d) time.sleep(3) try: scan_data = scan_launch.data for key, value in scan_data.items(): if key == 'id': scan_run_id = value notify.send(user, recipient=user, verb='Arachni Scan Started on URL %s' % target) except Exception: notify.send(user, recipient=user, verb='Arachni Connection Not found') print("Arachni Connection Not found") return date_time = datetime.now() try: save_all_scan = arachni_scan_db( username=username, project_id=project_id, url=target, scan_id=scan_id, date_time=date_time, rescan_id=rescan_id, rescan=rescan, ) save_all_scan.save() except Exception as e: print(e) scan_data = scan_launch.data for key, value in scan_data.items(): if key == 'id': scan_run_id = value scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.items(): if key == 'status': scan_status = value while scan_status != 'done': status = '0' if scan_sum['statistics']['browser_cluster'][ 'queued_job_count'] and scan_sum['statistics'][ 'browser_cluster']['total_job_time']: status = 100 - scan_sum['statistics']['browser_cluster']['queued_job_count'] * 100 / \ scan_sum['statistics']['browser_cluster']['total_job_time'] arachni_scan_db.objects.filter( username=username, scan_id=scan_id).update(scan_status=int(status)) scan_sum = arachni.scan_summary(id=scan_run_id).data for key, value in scan_sum.items(): if key == 'status': scan_status = value time.sleep(3) if scan_status == 'done': xml_report = arachni.scan_xml_report(id=scan_run_id).data root_xml = ET.fromstring(xml_report) arachni_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml, target_url=target) arachni_scan_db.objects.filter( username=username, scan_id=scan_id).update(scan_status='100') print("Data uploaded !!!!") notify.send(user, recipient=user, verb='Arachni Scan Completed on URL %s' % target)