def launch_zap_scan(target_url, project_id, rescan_id, rescan, scan_id): """ The function Launch ZAP Scans. :param target_url: Target URL :param project_id: Project ID :return: """ # Load ZAP Plugin zap = zap_plugin.ZAPScanner(target_url, project_id, rescan_id, rescan) zap.exclude_url() time.sleep(3) zap.cookies() time.sleep(3) date_time = datetime.now() try: save_all_scan = zap_scans_db( project_id=project_id, scan_url=target_url, scan_scanid=scan_id, date_time=date_time, rescan_id=rescan_id, rescan=rescan, vul_status='0' ) save_all_scan.save() except Exception as e: print e zap.zap_spider_thread(thread_value=30) spider_id = zap.zap_spider() zap.spider_status(spider_id=spider_id) zap.spider_result(spider_id=spider_id) print "Spider Completed" time.sleep(5) print 'Scanning Target %s' % target_url """ ZAP Scan trigger on target_url """ zap_scan_id = zap.zap_scan() # un_scanid = uuid.uuid4() zap.zap_scan_status( scan_id=zap_scan_id, un_scanid=scan_id ) """ Save Vulnerability in database """ time.sleep(5) all_vuln = zap.zap_scan_result() time.sleep(5) save_all_vuln = zap.zap_result_save( all_vuln=all_vuln, project_id=project_id, un_scanid=scan_id, ) print save_all_vuln
def xml_upload(request): all_project = project_db.objects.all() if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) elif scanner == "burp_scan": print scanner print xml_file print scan_url date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_scans(project_id, target_url, scan_id) do_xml_data.burp_scan_data(root_xml) print "Save scan Data" return HttpResponseRedirect("/webscanners/burp_scan_list") return render(request, 'upload_xml.html', {'all_project': all_project})
def launch_zap_scan(target_url, project_id, rescan_id, rescan, scan_id, user): """ The function Launch ZAP Scans. :param target_url: Target URL :param project_id: Project ID :return: """ username = user.username zap_enabled = False random_port = '8091' all_zap = zap_settings_db.objects.filter(username=username) for zap in all_zap: zap_enabled = zap.enabled if zap_enabled is False: print("started local instence") random_port = zap_plugin.zap_local() for i in range(0, 100): while True: try: # Connection Test zap_connect = zap_plugin.zap_connect(random_port, username=username) zap_connect.spider.scan(url=target_url) except Exception as e: print("ZAP Connection Not Found, re-try after 5 sec") time.sleep(5) continue break zap_plugin.zap_spider_thread(count=20, random_port=random_port, username=username) zap_plugin.zap_spider_setOptionMaxDepth(count=5, random_port=random_port, username=username) zap_plugin.zap_scan_thread(count=30, random_port=random_port, username=username) zap_plugin.zap_scan_setOptionHostPerScan(count=3, random_port=random_port, username=username) # Load ZAP Plugin zap = zap_plugin.ZAPScanner(target_url, project_id, rescan_id, rescan, random_port=random_port, username=username) zap.exclude_url() time.sleep(3) zap.cookies() time.sleep(3) date_time = datetime.now() try: save_all_scan = zap_scans_db( username=username, project_id=project_id, scan_url=target_url, scan_scanid=scan_id, date_time=date_time, rescan_id=rescan_id, rescan=rescan, vul_status='0' ) save_all_scan.save() notify.send(user, recipient=user, verb='ZAP Scan URL %s Added' % target_url) except Exception as e: print(e) notify.send(user, recipient=user, verb='ZAP Scan Started') zap.zap_spider_thread(thread_value=30) spider_id = zap.zap_spider() zap.spider_status(spider_id=spider_id) zap.spider_result(spider_id=spider_id) notify.send(user, recipient=user, verb='ZAP Scan Spider Completed') time.sleep(5) """ ZAP Scan trigger on target_url """ zap_scan_id = zap.zap_scan() zap.zap_scan_status( scan_id=zap_scan_id, un_scanid=scan_id ) """ Save Vulnerability in database """ time.sleep(5) all_vuln = zap.zap_scan_result(target_url=target_url) time.sleep(5) save_all_vuln = zap.zap_result_save( all_vuln=all_vuln, project_id=project_id, un_scanid=scan_id, username=username, ) print(save_all_vuln) all_zap_scan = zap_scans_db.objects.filter(username=username) total_vuln = '' total_high = '' total_medium = '' total_low = '' for data in all_zap_scan: total_vuln = data.total_vul total_high = data.high_vul total_medium = data.medium_vul total_low = data.low_vul if zap_enabled is False: zap.zap_shutdown() notify.send(user, recipient=user, verb='ZAP Scan URL %s Completed' % target_url) subject = 'Archery Tool Scan Status - ZAP Scan Completed' message = 'ZAP Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (target_url, total_vuln, total_high, total_medium, total_low) email_notify(user=user, subject=subject, message=message)
def launch_schudle_zap_scan(target_url, project_id, rescan_id, rescan, scan_id): """ The function Launch ZAP Scans. :param target_url: Target URL :param project_id: Project ID :return: """ random_port = '8090' # Connection Test zap_connect = zap_plugin.zap_connect(random_port, username='') try: zap_connect.spider.scan(url=target_url) except Exception: subject = 'ZAP Connection Not Found' message = 'ZAP Scanner failed due to setting not found ' email_sch_notify(subject=subject, message=message) print("ZAP Connection Not Found") return HttpResponseRedirect(reverse('webscanners:index')) # Load ZAP Plugin zap = zap_plugin.ZAPScanner(target_url, project_id, rescan_id, rescan, random_port=random_port) zap.exclude_url() time.sleep(3) zap.cookies() time.sleep(3) date_time = datetime.now() try: save_all_scan = zap_scans_db( project_id=project_id, scan_url=target_url, scan_scanid=scan_id, date_time=date_time, rescan_id=rescan_id, rescan=rescan, vul_status='0' ) save_all_scan.save() except Exception as e: print(e) zap.zap_spider_thread(thread_value=30) spider_id = zap.zap_spider() zap.spider_status(spider_id=spider_id) zap.spider_result(spider_id=spider_id) time.sleep(5) """ ZAP Scan trigger on target_url """ zap_scan_id = zap.zap_scan() zap.zap_scan_status( scan_id=zap_scan_id, un_scanid=scan_id ) """ Save Vulnerability in database """ time.sleep(5) all_vuln = zap.zap_scan_result(target_url=target_url) time.sleep(5) zap.zap_result_save( all_vuln=all_vuln, project_id=project_id, un_scanid=scan_id, username='' ) all_zap_scan = zap_scans_db.objects.all() total_vuln = '' total_high = '' total_medium = '' total_low = '' for data in all_zap_scan: total_vuln = data.total_vul total_high = data.high_vul total_medium = data.medium_vul total_low = data.low_vul subject = 'Archery Tool Scan Status - ZAP Scan Completed' message = 'ZAP Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (target_url, total_vuln, total_high, total_medium, total_low) email_sch_notify(subject=subject, message=message)
def launch_web_scan(target_url, project_id): try: with open(api_key_path, 'r+') as f: data = json.load(f) lod_apikey = data['zap_api_key'] apikey = signing.loads(lod_apikey) zapath = data['zap_path'] zap_port = data['zap_port'] except Exception as e: print e # Define settings to ZAP Proxy zap = ZAPv2(apikey=apikey, proxies={'http': zapath + ':' + zap_port, 'https': zapath + ':' + zap_port}) """ Zap scan start """ # try: # # ZAP launch function # zapscanner.start_zap() # # except Exception as e: # print e # print "ZAP Failed.............." # print "ZAP Restarting" # # time.sleep(15) # Get Excluded URL from excluded_db models try: all_excluded = excluded_db.objects.filter(Q(exclude_url__icontains=target_url)) for data in all_excluded: global excluded_url excluded_url = data.exclude_url print "excluded url ", excluded_url print "Excluded url ", excluded_url # Excluding URL from scans in zap API url_exclude = zap.spider.exclude_from_scan(regex=excluded_url) print "URL Excluded:", url_exclude except Exception as e: print "ZAP Failed.............." print "ZAP Restarting" all_cookie = cookie_db.objects.filter(Q(url__icontains=target_url)) for da in all_cookie: global cookies cookies = da.cookie print da.url print "Cookies from database:", cookies try: remove_cookie = zap.replacer.remove_rule(target_url) except Exception as e: print e print "Remove Cookie :", remove_cookie # Adding cookies value try: cookie_add = zap.replacer.add_rule(apikey=apikey, description=target_url, enabled="true", matchtype='REQ_HEADER', matchregex="false", replacement=cookies, matchstring="Cookie", initiators="") print "Cookies Added :", cookie_add except Exception as e: print e zap.ajaxSpider.scan(target_url) try: scanid = zap.spider.scan(target_url) save_all = zap_spider_db(spider_url=target_url, spider_scanid=scanid) save_all.save() except Exception as e: print e try: zap.spider.set_option_thread_count(apikey=apikey, integer='30') except Exception as e: print e try: while (int(zap.spider.status(scanid)) < 100): global spider_status spider_status = zap.spider.status(scanid) print "Spider progress", spider_status time.sleep(5) except Exception as e: print e spider_status = "100" spider_res_out = zap.spider.results(scanid) data_out = ("\n".join(map(str, spider_res_out))) print data_out print 'Spider Completed------' print 'Target :', target_url global spider_alert spider_alert = "Spider Completed" time.sleep(5) print 'Scanning Target %s' % target_url """ ZAP Scan trigger on target_url """ try: scan_scanid = zap.ascan.scan(target_url) except Exception as e: print e un_scanid = uuid.uuid4() date_time = datetime.datetime.now() try: save_all_scan = zap_scans_db(project_id=project_id, scan_url=target_url, scan_scanid=un_scanid, date_time=date_time) save_all_scan.save() except Exception as e: print e try: while (int(zap.ascan.status(scan_scanid)) < 100): print 'ZAP Scan Status %: ' + zap.ascan.status(scan_scanid) global scans_status scans_status = zap.ascan.status(scan_scanid) zap_scans_db.objects.filter(scan_scanid=un_scanid).update(vul_status=scans_status) time.sleep(5) except Exception as e: print e # Save Vulnerability in database scans_status = "100" zap_scans_db.objects.filter(scan_scanid=un_scanid).update(vul_status=scans_status) print target_url time.sleep(5) all_vuln = zap.core.alerts(target_url) for vuln in all_vuln: vuln_id = uuid.uuid4() confidence = vuln['confidence'] wascid = vuln['wascid'] cweid = vuln['cweid'] risk = vuln['risk'] reference = vuln['reference'] url = vuln['url'] name = vuln['name'] solution = vuln['solution'] param = vuln['param'] evidence = vuln['evidence'] sourceid = vuln['sourceid'] pluginId = vuln['pluginId'] other = vuln['other'] attack = vuln['attack'] messageId = vuln['messageId'] method = vuln['method'] alert = vuln['alert'] ids = vuln['id'] description = vuln['description'] false_positive = 'No' global vul_col if risk == 'High': vul_col = "important" elif risk == 'Medium': vul_col = "warning" elif risk == 'Low': vul_col = "info" else: vul_col = "info" # date_time = datetime.datetime.now() dump_all = zap_scan_results_db(vuln_id=vuln_id, vuln_color=vul_col, scan_id=un_scanid, project_id=project_id, confidence=confidence, wascid=wascid, cweid=cweid, risk=risk, reference=reference, url=url, name=name, solution=solution, param=param, evidence=evidence, sourceid=sourceid, pluginId=pluginId, other=other, attack=attack, messageId=messageId, method=method, alert=alert, ids=ids, description=description, false_positive=false_positive) dump_all.save() time.sleep(5) zap_all_vul = zap_scan_results_db.objects.filter(scan_id=un_scanid).values('name', 'risk', 'vuln_color').distinct() total_vul = len(zap_all_vul) total_high = len(zap_all_vul.filter(risk="High")) total_medium = len(zap_all_vul.filter(risk="Medium")) total_low = len(zap_all_vul.filter(risk="Low")) zap_scans_db.objects.filter(scan_scanid=un_scanid).update(total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low) spider_alert = "Scan Completed" time.sleep(10) print un_scanid zap_web_all = zap_scan_results_db.objects.filter(scan_id=un_scanid) for m in zap_web_all: msg_id = m.messageId request_response = zap.core.message(id=msg_id) ja_son = json.dumps(request_response) ss = ast.literal_eval(ja_son) for key, value in ss.viewitems(): global note if key == "note": note = value global rtt if key == "rtt": rtt = value global tags if key == "tags": tags = value global timestamp if key == "timestamp": timestamp = value global responseHeader if key == "responseHeader": responseHeader = value global requestBody if key == "requestBody": requestBody = value global responseBody if key == "responseBody": responseBody = value global requestHeader if key == "requestHeader": requestHeader = value global cookieParams if key == "cookieParams": cookieParams = value global res_type if key == "type": res_type = value global res_id if key == "id": res_id = value zap_scan_results_db.objects.filter(messageId=msg_id).update(note=note, rtt=rtt, tags=tags, timestamp=timestamp, responseHeader=responseHeader, requestBody=requestBody, responseBody=responseBody, requestHeader=requestHeader, cookieParams=cookieParams, res_type=res_type, res_id=res_id) #zapscanner.stop_zap() try: email_notification.email_notify() except Exception as e: print e return HttpResponse(status=201)
def launch_zap_scan(target_url, project_id, rescan_id, rescan, scan_id, user): """ The function Launch ZAP Scans. :param target_url: Target URL :param project_id: Project ID :return: """ # Connection Test zap_connect = zap_plugin.zap_connect() try: zap_connect.spider.scan(url=target_url) notify.send(user, recipient=user, verb='ZAP Scan Started') except Exception: notify.send(user, recipient=user, verb='ZAP Conection Not Found') subject = 'ZAP Conection Not Found' message = 'ZAP Scanner failed due to setting not found ' email_notify(user=user, subject=subject, message=message) print "ZAP Conection Not Found" return HttpResponseRedirect('/webscanners/') # Load ZAP Plugin zap = zap_plugin.ZAPScanner(target_url, project_id, rescan_id, rescan) zap.exclude_url() time.sleep(3) zap.cookies() time.sleep(3) date_time = datetime.now() try: save_all_scan = zap_scans_db(project_id=project_id, scan_url=target_url, scan_scanid=scan_id, date_time=date_time, rescan_id=rescan_id, rescan=rescan, vul_status='0') save_all_scan.save() notify.send(user, recipient=user, verb='ZAP Scan URL %s Added' % target_url) except Exception as e: print e zap.zap_spider_thread(thread_value=30) spider_id = zap.zap_spider() zap.spider_status(spider_id=spider_id) zap.spider_result(spider_id=spider_id) notify.send(user, recipient=user, verb='ZAP Scan Spider Completed') time.sleep(5) print 'Scanning Target %s' % target_url """ ZAP Scan trigger on target_url """ zap_scan_id = zap.zap_scan() # un_scanid = uuid.uuid4() zap.zap_scan_status(scan_id=zap_scan_id, un_scanid=scan_id) """ Save Vulnerability in database """ time.sleep(5) all_vuln = zap.zap_scan_result() time.sleep(5) save_all_vuln = zap.zap_result_save( all_vuln=all_vuln, project_id=project_id, un_scanid=scan_id, ) print save_all_vuln all_zap_scan = zap_scans_db.objects.all() total_vuln = '' total_high = '' total_medium = '' total_low = '' for data in all_zap_scan: total_vuln = data.total_vul total_high = data.high_vul total_medium = data.medium_vul total_low = data.low_vul notify.send(user, recipient=user, verb='ZAP Scan URL %s Completed' % target_url) subject = 'Archery Tool Scan Status - ZAP Scan Completed' message = 'ZAP Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (target_url, total_vuln, total_high, total_medium, total_low) email_notify(user=user, subject=subject, message=message)
def xml_upload(request): """ Handling XML upload files. :param request: :return: """ username = request.user.username all_project = project_db.objects.filter(username=username) if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": try: tree = ET.parse(xml_file) date_time = datetime.now() root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) scan_dump = zap_scans_db(username=username, scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() zap_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml_en) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('zapscanner:zap_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == "burp_scan": try: date_time = datetime.now() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) scan_dump = burp_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('burpscanner:burp_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == "arachni": try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = arachni_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() arachni_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml, target_url=scan_url) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('arachniscanner:arachni_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'netsparker': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = netsparker_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('netsparkerscanner:netsparker_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'webinspect': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = webinspect_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('webinspectscanner:webinspect_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'acunetix': try: date_time = datetime.now() tree = ET.parse(xml_file) root_xml = tree.getroot() scan_dump = acunetix_scan_db(username=username, url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() acunetix_xml_parser.xml_parser(username=username, project_id=project_id, scan_id=scan_id, root=root_xml) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('acunetixscanner:acunetix_scan_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'dependencycheck': try: date_time = datetime.now() data = etree.parse(xml_file) root = data.getroot() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('dependencycheck:dependencycheck_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'checkmarx': try: date_time = datetime.now() data = etree.parse(xml_file) root = data.getroot() scan_dump = checkmarx_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() checkmarx_xml_report_parser.checkmarx_report_xml( project_id=project_id, scan_id=scan_id, data=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect( reverse('checkmarx:checkmarx_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'findbugs': try: date_time = datetime.now() tree = ET.parse(xml_file) root = tree.getroot() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse('findbugs:findbugs_list')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) elif scanner == 'nikto': try: date_time = datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id, username=username) messages.success(request, "File Uploaded") return HttpResponseRedirect(reverse('tools:nikto')) except: messages.error(request, "File Not Supported") return render(request, 'upload_xml.html', {'all_project': all_project}) return render(request, 'upload_xml.html', {'all_project': all_project})
def xml_upload(request): """ Handling XML upload files. :param request: :return: """ all_project = project_db.objects.all() if request.method == "POST": project_id = request.POST.get("project_id") scanner = request.POST.get("scanner") xml_file = request.FILES['xmlfile'] scan_url = request.POST.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return HttpResponseRedirect("/zapscanner/zap_scan_list/") elif scanner == "burp_scan": date_time = datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, target_url, scan_id) do_xml_data.burp_scan_data(root_xml) print("Save scan Data") return HttpResponseRedirect("/burpscanner/burp_scan_list") elif scanner == "arachni": date_time = datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Save scan Data") return HttpResponseRedirect("/arachniscanner/arachni_scan_list") elif scanner == 'netsparker': date_time = datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect( "/netsparkerscanner/netsparker_scan_list/") elif scanner == 'webinspect': date_time = datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect( "/webinspectscanner/webinspect_scan_list/") elif scanner == 'acunetix': date_time = datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) print("Saved scan data") return HttpResponseRedirect("/acunetixscanner/acunetix_scan_list/") elif scanner == 'dependencycheck': date_time = datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = etree.parse(xml_file) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) print("Saved scan data") return HttpResponseRedirect( "/dependencycheck/dependencycheck_list") elif scanner == 'findbugs': date_time = datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root = tree.getroot() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root) print("Saved scan data") return HttpResponseRedirect("/findbugs/findbugs_list") elif scanner == 'nikto': date_time = datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id) print("Saved scan data") return HttpResponseRedirect("/tools/nikto/") return render(request, 'upload_xml.html', {'all_project': all_project})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") xml_file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" print xml_file print scanner if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, scan_url, scan_id) do_xml_data.burp_scan_data(root_xml) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = etree.parse(xml_file) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(xml_file, project_id, scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Uploaded"})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") xml_file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" print xml_file print scanner if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser tree = ET.parse(xml_file) root_xml = tree.getroot() do_xml_data = burp_plugin.burp_scans(project_id, scan_url, scan_id) do_xml_data.burp_scan_data(root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() tree = ET.parse(xml_file) root_xml = tree.getroot() webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({"message": "Scan Data Uploaded"}) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(xml_file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({"message": "Scan Data Uploaded"}) return Response({"message": "Scan Data Uploaded"})
def url_api_scan(request): if request.POST.get("auth_val"): auth_val = request.POST.get("auth_val") print auth_val if auth_val == 'No': target_url = request.POST.get("scan_url") req_header = ast.literal_eval(request.POST.get("req_header")) req_body = request.POST.get("req_body") method = request.POST.get("method") project_id = request.POST.get("project_id") scan_id = request.POST.get("scan_id") auth_token_key = request.POST.get("auth_token_key") try: with open(api_key_path, 'r+') as f: data = json.load(f) lod_apikey = data['zap_api_key'] apikey = signing.loads(lod_apikey) zapath = data['zap_path'] zap_port = data['zap_port'] except Exception as e: print e zap = ZAPv2(apikey=apikey, proxies={ 'http': 'http://127.0.0.1' + ':' + zap_port, 'https': 'http://127.0.0.1' + ':' + zap_port }) print target_url """ ***Starting ZAP Scanner*** """ try: zap_scanner = zapscanner.start_zap() print "Status of zap scanner:", zap_scanner except Exception as e: print e return HttpResponseRedirect("/webscanners/scans_list/") """ *****End zap scanner**** """ time.sleep(10) """ Excluding URL from scanner """ scanid = zap.spider.scan(target_url) save_all = zap_spider_db(spider_url=target_url, spider_scanid=scanid) save_all.save() try: while (int(zap.spider.status(scanid)) < 100): # print 'Spider progress %:' + zap.spider.status(scanid) global spider_status spider_status = zap.spider.status(scanid) print "Spider progress", spider_status time.sleep(5) except Exception as e: print e spider_status = "100" spider_res_out = zap.spider.results(scanid) data_out = ("\n".join(map(str, spider_res_out))) print data_out total_spider = len(spider_res_out) print 'Spider Completed------' print 'Target :', target_url global spider_alert spider_alert = "Spider Completed" time.sleep(5) print 'Scanning Target %s' % target_url scan_scanid = zap.ascan.scan(target_url) un_scanid = uuid.uuid4() print "updated scanid :", un_scanid try: save_all_scan = zap_scans_db(project_id=project_id, scan_url=target_url, scan_scanid=un_scanid) save_all_scan.save() except Exception as e: print e # zap_scans_db.objects.filter(pk=some_value).update(field1='some value') try: while (int(zap.ascan.status(scan_scanid)) < 100): print 'Scan progress from zap_scan_lauch function %: ' + zap.ascan.status( scan_scanid) global scans_status scans_status = zap.ascan.status(scan_scanid) zap_scans_db.objects.filter(scan_scanid=un_scanid).update( vul_status=scans_status) time.sleep(5) except Exception as e: print e # Save Vulnerability in database scans_status = "100" zap_scans_db.objects.filter(scan_scanid=un_scanid).update( vul_status=scans_status) print target_url time.sleep(5) all_vuln = zap.core.alerts(target_url) # print all_vuln for vuln in all_vuln: vuln_id = uuid.uuid4() confidence = vuln['confidence'] wascid = vuln['wascid'] cweid = vuln['cweid'] risk = vuln['risk'] reference = vuln['reference'] url = vuln['url'] name = vuln['name'] solution = vuln['solution'] param = vuln['param'] evidence = vuln['evidence'] sourceid = vuln['sourceid'] pluginId = vuln['pluginId'] other = vuln['other'] attack = vuln['attack'] messageId = vuln['messageId'] method = vuln['method'] alert = vuln['alert'] ids = vuln['id'] description = vuln['description'] global vul_col if risk == 'High': vul_col = "important" elif risk == 'Medium': vul_col = "warning" elif risk == 'Low': vul_col = "info" dump_all = zap_scan_results_db(vuln_id=vuln_id, vuln_color=vul_col, scan_id=un_scanid, project_id=project_id, confidence=confidence, wascid=wascid, cweid=cweid, risk=risk, reference=reference, url=url, name=name, solution=solution, param=param, evidence=evidence, sourceid=sourceid, pluginId=pluginId, other=other, attack=attack, messageId=messageId, method=method, alert=alert, id=ids, description=description) dump_all.save() time.sleep(5) zap_all_vul = zap_scan_results_db.objects.filter( scan_id=un_scanid).order_by('scan_id') total_vul = len(zap_all_vul) total_high = len(zap_all_vul.filter(risk="High")) total_medium = len(zap_all_vul.filter(risk="Medium")) total_low = len(zap_all_vul.filter(risk="Low")) zap_scans_db.objects.filter(scan_scanid=un_scanid).update( total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low) spider_alert = "Scan Completed" time.sleep(5) for msg in zap_all_vul: msg_id = msg.messageId request_response = zap.core.message(id=msg_id) ja_son = json.dumps(request_response) ss = ast.literal_eval(ja_son) for key, value in ss.viewitems(): global note if key == "note": note = value global rtt if key == "rtt": rtt = value global tags if key == "tags": tags = value global timestamp if key == "timestamp": timestamp = value global responseHeader if key == "responseHeader": responseHeader = value global requestBody if key == "requestBody": requestBody = value global responseBody if key == "responseBody": responseBody = value global requestHeader if key == "requestHeader": requestHeader = value global cookieParams if key == "cookieParams": cookieParams = value global res_type if key == "type": res_type = value global res_id if key == "id": res_id = value zap_scan_results_db.objects.filter(messageId=msg_id).update( note=note, rtt=rtt, tags=tags, timestamp=timestamp, responseHeader=responseHeader, requestBody=requestBody, responseBody=responseBody, requestHeader=requestHeader, cookieParams=cookieParams, res_type=res_type, res_id=res_id) print msg_id print res_id # zap_scanner = zapscanner.stop_zap() print "Status of zap scanner:", zap_scanner return HttpResponseRedirect('/scanapi/') return render(request, 'api_scan_list.html')
def post(self, request, format=None): username = request.user.username project_id = request.data.get("project_id") scanner = request.data.get("scanner") file = request.data.get("filename") scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No', username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id, username=username) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username, target_url=scan_url) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "acunetix": date_time = datetime.datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() xml_dat = bytes(bytearray(file, encoding='utf-8')) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'checkmarx': date_time = datetime.datetime.now() scan_dump = checkmarx_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) checkmarx_xml_report_parser.checkmarx_report_xml( data=root_xml, project_id=project_id, scan_id=scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'trivy': date_time = datetime.datetime.now() scan_dump = trivy_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) trivy_json_report_parser.trivy_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'npmaudit': date_time = datetime.datetime.now() scan_dump = npmaudit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) npm_audit_report_json.npmaudit_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nodejsscan': date_time = datetime.datetime.now() scan_dump = nodejsscan_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) nodejsscan_report_json.nodejsscan_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'tfsec': date_time = datetime.datetime.now() scan_dump = tfsec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) tfsec_report_parser.tfsec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'whitesource': date_time = datetime.datetime.now() scan_dump = whitesource_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) whitesource_json_report_parser.whitesource_report_json( project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dockle': date_time = datetime.datetime.now() scan_dump = dockle_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() data = json.loads(file) dockle_json_parser.dockle_report_json(project_id=project_id, scan_id=scan_id, data=data, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nessus': date_time = datetime.datetime.now() scan_dump = nessus_scan_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.updated_nessus_parser(root=root_xml_en, scan_id=scan_id, project_id=project_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'openvas': date_time = datetime.datetime.now() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) hosts = OpenVas_Parser.get_hosts(root_xml_en) for host in hosts: scan_dump = scan_save_db(scan_ip=host, scan_id=host, date_time=date_time, project_id=project_id, scan_status=scan_status, username=username) scan_dump.save() OpenVas_Parser.updated_xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db(date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, username=username) scan_dump.save() nikto_html_parser(file, project_id, scan_id, username=username) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Uploaded"})
def post(self, request, format=None): project_id = request.data.get("project_id") scanner = request.data.get("scanner") file = request.data.get("filename") print("Results file content: ", file) scan_url = request.data.get("scan_url") scan_id = uuid.uuid4() scan_status = "100" if scanner == "zap_scan": print("Inside zap_scan") date_time = datetime.datetime.now() scan_dump = zap_scans_db(scan_url=scan_url, scan_scanid=scan_id, date_time=date_time, project_id=project_id, vul_status=scan_status, rescan='No') scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) zap_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "ZAP Scan Data Uploaded", "scanner": scanner, "project_id": project_id, "scan_id": scan_id }) elif scanner == "burp_scan": date_time = datetime.datetime.now() scan_dump = burp_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() # Burp scan XML parser root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) burp_xml_parser.burp_scan_data(root_xml_en, project_id, scan_id) return Response({ "message": "Burp Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "arachni": date_time = datetime.datetime.now() scan_dump = arachni_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) arachni_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == "acunetix": date_time = datetime.datetime.now() scan_dump = acunetix_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) acunetix_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'netsparker': date_time = datetime.datetime.now() scan_dump = netsparker_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) netsparker_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'webinspect': date_time = datetime.datetime.now() scan_dump = webinspect_scan_db(url=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) webinspect_xml_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'banditscan': date_time = datetime.datetime.now() scan_dump = bandit_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) bandit_report_json(data=data, project_id=project_id, scan_id=scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'dependencycheck': date_time = datetime.datetime.now() scan_dump = dependencycheck_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() xml_dat = bytes(bytearray(file, encoding='utf-8')) data = etree.XML(xml_dat) dependencycheck_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'findbugs': date_time = datetime.datetime.now() scan_dump = findbugs_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) findbugs_report_parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'clair': date_time = datetime.datetime.now() scan_dump = clair_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) clair_json_report_parser.clair_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'inspec': date_time = datetime.datetime.now() scan_dump = inspec_scan_db(project_name=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() data = json.loads(file) inspec_json_parser.inspec_report_json(project_id=project_id, scan_id=scan_id, data=data) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nessus': date_time = datetime.datetime.now() scan_dump = nessus_scan_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) Nessus_Parser.nessus_parser( root=root_xml_en, scan_id=scan_id, project_id=project_id, ) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'openvas': date_time = datetime.datetime.now() scan_dump = scan_save_db(scan_ip=scan_url, scan_id=scan_id, date_time=date_time, project_id=project_id, scan_status=scan_status) scan_dump.save() root_xml = ET.fromstring(file) en_root_xml = ET.tostring(root_xml, encoding='utf8').decode( 'ascii', 'ignore') root_xml_en = ET.fromstring(en_root_xml) OpenVas_Parser.xml_parser(project_id=project_id, scan_id=scan_id, root=root_xml_en) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) elif scanner == 'nikto': date_time = datetime.datetime.now() scan_dump = nikto_result_db( date_time=date_time, scan_url=scan_url, scan_id=scan_id, project_id=project_id, ) scan_dump.save() nikto_html_parser(file, project_id, scan_id) return Response({ "message": "Scan Data Uploaded", "project_id": project_id, "scan_id": scan_id, "scanner": scanner }) return Response({"message": "Scan Data Not Uploaded"})
def launch_zap_scan(target_url, project_id, rescan_id, rescan, scan_id, user): """ The function Launch ZAP Scans. :param target_url: Target URL :param project_id: Project ID :return: """ zap_enabled = False all_zap = zap_settings_db.objects.all() for zap in all_zap: zap_enabled = zap.enabled if zap_enabled is False: print("started local instence") zap_plugin.zap_local() time.sleep(20) # Connection Test zap_connect = zap_plugin.zap_connect() try: zap_connect.spider.scan(url=target_url) notify.send(user, recipient=user, verb='ZAP Scan Started') except Exception: notify.send(user, recipient=user, verb='ZAP Connection Not Found') subject = 'ZAP Connection Not Found' message = 'ZAP Scanner failed due to setting not found ' email_notify(user=user, subject=subject, message=message) print("ZAP Connection Not Found") return HttpResponseRedirect(reverse('zapscanner:zap_scan_list')) zap_plugin.zap_spider_thread(count=20) zap_plugin.zap_spider_setOptionMaxDepth(count=5) zap_plugin.zap_scan_thread(count=30) zap_plugin.zap_scan_setOptionHostPerScan(count=3) # Load ZAP Plugin zap = zap_plugin.ZAPScanner(target_url, project_id, rescan_id, rescan) zap.exclude_url() time.sleep(3) zap.cookies() time.sleep(3) date_time = datetime.now() try: save_all_scan = zap_scans_db(project_id=project_id, scan_url=target_url, scan_scanid=scan_id, date_time=date_time, rescan_id=rescan_id, rescan=rescan, vul_status='0') save_all_scan.save() notify.send(user, recipient=user, verb='ZAP Scan URL %s Added' % target_url) except Exception as e: print(e) zap.zap_spider_thread(thread_value=30) spider_id = zap.zap_spider() zap.spider_status(spider_id=spider_id) zap.spider_result(spider_id=spider_id) notify.send(user, recipient=user, verb='ZAP Scan Spider Completed') time.sleep(5) """ ZAP Scan trigger on target_url """ zap_scan_id = zap.zap_scan() zap.zap_scan_status(scan_id=zap_scan_id, un_scanid=scan_id) """ Save Vulnerability in database """ time.sleep(5) all_vuln = zap.zap_scan_result() time.sleep(5) save_all_vuln = zap.zap_result_save( all_vuln=all_vuln, project_id=project_id, un_scanid=scan_id, ) print(save_all_vuln) all_zap_scan = zap_scans_db.objects.all() total_vuln = '' total_high = '' total_medium = '' total_low = '' for data in all_zap_scan: total_vuln = data.total_vul total_high = data.high_vul total_medium = data.medium_vul total_low = data.low_vul if zap_enabled is False: zap.zap_shutdown() notify.send(user, recipient=user, verb='ZAP Scan URL %s Completed' % target_url) subject = 'Archery Tool Scan Status - ZAP Scan Completed' message = 'ZAP Scanner has completed the scan ' \ ' %s <br> Total: %s <br>High: %s <br>' \ 'Medium: %s <br>Low %s' % (target_url, total_vuln, total_high, total_medium, total_low) email_notify(user=user, subject=subject, message=message)
def url_api_scan(request): if request.POST.get("auth_val"): auth_val = request.POST.get("auth_val") print auth_val if auth_val == 'No': target_url = request.POST.get("scan_url") req_header = ast.literal_eval(request.POST.get("req_header")) req_body = request.POST.get("req_body") method = request.POST.get("method") project_id = request.POST.get("project_id") scan_id = request.POST.get("scan_id") auth_token_key = request.POST.get("auth_token_key") try: with open(api_key_path, 'r+') as f: data = json.load(f) lod_apikey = data['zap_api_key'] apikey = signing.loads(lod_apikey) zapath = data['zap_path'] zap_port = data['zap_port'] except Exception as e: print e zap = ZAPv2(apikey=apikey, proxies={'http': 'http://127.0.0.1' + ':' + zap_port, 'https': 'http://127.0.0.1' + ':' + zap_port}) print target_url """ ***Starting ZAP Scanner*** """ try: zap_scanner = zapscanner.start_zap() print "Status of zap scanner:", zap_scanner except Exception as e: print e return HttpResponseRedirect("/webscanners/scans_list/") """ *****End zap scanner**** """ time.sleep(10) """ Excluding URL from scanner """ scanid = zap.spider.scan(target_url) save_all = zap_spider_db(spider_url=target_url, spider_scanid=scanid) save_all.save() try: while (int(zap.spider.status(scanid)) < 100): # print 'Spider progress %:' + zap.spider.status(scanid) global spider_status spider_status = zap.spider.status(scanid) print "Spider progress", spider_status time.sleep(5) except Exception as e: print e spider_status = "100" spider_res_out = zap.spider.results(scanid) data_out = ("\n".join(map(str, spider_res_out))) print data_out total_spider = len(spider_res_out) print 'Spider Completed------' print 'Target :', target_url global spider_alert spider_alert = "Spider Completed" time.sleep(5) print 'Scanning Target %s' % target_url scan_scanid = zap.ascan.scan(target_url) un_scanid = uuid.uuid4() print "updated scanid :", un_scanid try: save_all_scan = zap_scans_db(project_id=project_id, scan_url=target_url, scan_scanid=un_scanid) save_all_scan.save() except Exception as e: print e # zap_scans_db.objects.filter(pk=some_value).update(field1='some value') try: while (int(zap.ascan.status(scan_scanid)) < 100): print 'Scan progress from zap_scan_lauch function %: ' + zap.ascan.status(scan_scanid) global scans_status scans_status = zap.ascan.status(scan_scanid) zap_scans_db.objects.filter(scan_scanid=un_scanid).update(vul_status=scans_status) time.sleep(5) except Exception as e: print e # Save Vulnerability in database scans_status = "100" zap_scans_db.objects.filter(scan_scanid=un_scanid).update(vul_status=scans_status) print target_url time.sleep(5) all_vuln = zap.core.alerts(target_url) # print all_vuln for vuln in all_vuln: vuln_id = uuid.uuid4() confidence = vuln['confidence'] wascid = vuln['wascid'] cweid = vuln['cweid'] risk = vuln['risk'] reference = vuln['reference'] url = vuln['url'] name = vuln['name'] solution = vuln['solution'] param = vuln['param'] evidence = vuln['evidence'] sourceid = vuln['sourceid'] pluginId = vuln['pluginId'] other = vuln['other'] attack = vuln['attack'] messageId = vuln['messageId'] method = vuln['method'] alert = vuln['alert'] ids = vuln['id'] description = vuln['description'] global vul_col if risk == 'High': vul_col = "important" elif risk == 'Medium': vul_col = "warning" elif risk == 'Low': vul_col = "info" dump_all = zap_scan_results_db(vuln_id=vuln_id, vuln_color=vul_col, scan_id=un_scanid, project_id=project_id, confidence=confidence, wascid=wascid, cweid=cweid, risk=risk, reference=reference, url=url, name=name, solution=solution, param=param, evidence=evidence, sourceid=sourceid, pluginId=pluginId, other=other, attack=attack, messageId=messageId, method=method, alert=alert, id=ids, description=description) dump_all.save() time.sleep(5) zap_all_vul = zap_scan_results_db.objects.filter(scan_id=un_scanid).order_by('scan_id') total_vul = len(zap_all_vul) total_high = len(zap_all_vul.filter(risk="High")) total_medium = len(zap_all_vul.filter(risk="Medium")) total_low = len(zap_all_vul.filter(risk="Low")) zap_scans_db.objects.filter(scan_scanid=un_scanid).update(total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low) spider_alert = "Scan Completed" time.sleep(5) for msg in zap_all_vul: msg_id = msg.messageId request_response = zap.core.message(id=msg_id) ja_son = json.dumps(request_response) ss = ast.literal_eval(ja_son) for key, value in ss.viewitems(): global note if key == "note": note = value global rtt if key == "rtt": rtt = value global tags if key == "tags": tags = value global timestamp if key == "timestamp": timestamp = value global responseHeader if key == "responseHeader": responseHeader = value global requestBody if key == "requestBody": requestBody = value global responseBody if key == "responseBody": responseBody = value global requestHeader if key == "requestHeader": requestHeader = value global cookieParams if key == "cookieParams": cookieParams = value global res_type if key == "type": res_type = value global res_id if key == "id": res_id = value zap_scan_results_db.objects.filter(messageId=msg_id).update(note=note, rtt=rtt, tags=tags, timestamp=timestamp, responseHeader=responseHeader, requestBody=requestBody, responseBody=responseBody, requestHeader=requestHeader, cookieParams=cookieParams, res_type=res_type, res_id=res_id) print msg_id print res_id zap_scanner = zapscanner.stop_zap() print "Status of zap scanner:", zap_scanner return HttpResponseRedirect('/scanapi/') return render(request, 'api_scan_list.html')