def launch_web_scan(target_url, project_id): try: with open(api_key_path, 'r+') as f: data = json.load(f) lod_apikey = data['zap_api_key'] apikey = signing.loads(lod_apikey) zapath = data['zap_path'] zap_port = data['zap_port'] except Exception as e: print e # Define settings to ZAP Proxy zap = ZAPv2(apikey=apikey, proxies={'http': zapath + ':' + zap_port, 'https': zapath + ':' + zap_port}) """ Zap scan start """ # try: # # ZAP launch function # zapscanner.start_zap() # # except Exception as e: # print e # print "ZAP Failed.............." # print "ZAP Restarting" # # time.sleep(15) # Get Excluded URL from excluded_db models try: all_excluded = excluded_db.objects.filter(Q(exclude_url__icontains=target_url)) for data in all_excluded: global excluded_url excluded_url = data.exclude_url print "excluded url ", excluded_url print "Excluded url ", excluded_url # Excluding URL from scans in zap API url_exclude = zap.spider.exclude_from_scan(regex=excluded_url) print "URL Excluded:", url_exclude except Exception as e: print "ZAP Failed.............." print "ZAP Restarting" all_cookie = cookie_db.objects.filter(Q(url__icontains=target_url)) for da in all_cookie: global cookies cookies = da.cookie print da.url print "Cookies from database:", cookies try: remove_cookie = zap.replacer.remove_rule(target_url) except Exception as e: print e print "Remove Cookie :", remove_cookie # Adding cookies value try: cookie_add = zap.replacer.add_rule(apikey=apikey, description=target_url, enabled="true", matchtype='REQ_HEADER', matchregex="false", replacement=cookies, matchstring="Cookie", initiators="") print "Cookies Added :", cookie_add except Exception as e: print e zap.ajaxSpider.scan(target_url) try: scanid = zap.spider.scan(target_url) save_all = zap_spider_db(spider_url=target_url, spider_scanid=scanid) save_all.save() except Exception as e: print e try: zap.spider.set_option_thread_count(apikey=apikey, integer='30') except Exception as e: print e try: while (int(zap.spider.status(scanid)) < 100): global spider_status spider_status = zap.spider.status(scanid) print "Spider progress", spider_status time.sleep(5) except Exception as e: print e spider_status = "100" spider_res_out = zap.spider.results(scanid) data_out = ("\n".join(map(str, spider_res_out))) print data_out print 'Spider Completed------' print 'Target :', target_url global spider_alert spider_alert = "Spider Completed" time.sleep(5) print 'Scanning Target %s' % target_url """ ZAP Scan trigger on target_url """ try: scan_scanid = zap.ascan.scan(target_url) except Exception as e: print e un_scanid = uuid.uuid4() date_time = datetime.datetime.now() try: save_all_scan = zap_scans_db(project_id=project_id, scan_url=target_url, scan_scanid=un_scanid, date_time=date_time) save_all_scan.save() except Exception as e: print e try: while (int(zap.ascan.status(scan_scanid)) < 100): print 'ZAP Scan Status %: ' + zap.ascan.status(scan_scanid) global scans_status scans_status = zap.ascan.status(scan_scanid) zap_scans_db.objects.filter(scan_scanid=un_scanid).update(vul_status=scans_status) time.sleep(5) except Exception as e: print e # Save Vulnerability in database scans_status = "100" zap_scans_db.objects.filter(scan_scanid=un_scanid).update(vul_status=scans_status) print target_url time.sleep(5) all_vuln = zap.core.alerts(target_url) for vuln in all_vuln: vuln_id = uuid.uuid4() confidence = vuln['confidence'] wascid = vuln['wascid'] cweid = vuln['cweid'] risk = vuln['risk'] reference = vuln['reference'] url = vuln['url'] name = vuln['name'] solution = vuln['solution'] param = vuln['param'] evidence = vuln['evidence'] sourceid = vuln['sourceid'] pluginId = vuln['pluginId'] other = vuln['other'] attack = vuln['attack'] messageId = vuln['messageId'] method = vuln['method'] alert = vuln['alert'] ids = vuln['id'] description = vuln['description'] false_positive = 'No' global vul_col if risk == 'High': vul_col = "important" elif risk == 'Medium': vul_col = "warning" elif risk == 'Low': vul_col = "info" else: vul_col = "info" # date_time = datetime.datetime.now() dump_all = zap_scan_results_db(vuln_id=vuln_id, vuln_color=vul_col, scan_id=un_scanid, project_id=project_id, confidence=confidence, wascid=wascid, cweid=cweid, risk=risk, reference=reference, url=url, name=name, solution=solution, param=param, evidence=evidence, sourceid=sourceid, pluginId=pluginId, other=other, attack=attack, messageId=messageId, method=method, alert=alert, ids=ids, description=description, false_positive=false_positive) dump_all.save() time.sleep(5) zap_all_vul = zap_scan_results_db.objects.filter(scan_id=un_scanid).values('name', 'risk', 'vuln_color').distinct() total_vul = len(zap_all_vul) total_high = len(zap_all_vul.filter(risk="High")) total_medium = len(zap_all_vul.filter(risk="Medium")) total_low = len(zap_all_vul.filter(risk="Low")) zap_scans_db.objects.filter(scan_scanid=un_scanid).update(total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low) spider_alert = "Scan Completed" time.sleep(10) print un_scanid zap_web_all = zap_scan_results_db.objects.filter(scan_id=un_scanid) for m in zap_web_all: msg_id = m.messageId request_response = zap.core.message(id=msg_id) ja_son = json.dumps(request_response) ss = ast.literal_eval(ja_son) for key, value in ss.viewitems(): global note if key == "note": note = value global rtt if key == "rtt": rtt = value global tags if key == "tags": tags = value global timestamp if key == "timestamp": timestamp = value global responseHeader if key == "responseHeader": responseHeader = value global requestBody if key == "requestBody": requestBody = value global responseBody if key == "responseBody": responseBody = value global requestHeader if key == "requestHeader": requestHeader = value global cookieParams if key == "cookieParams": cookieParams = value global res_type if key == "type": res_type = value global res_id if key == "id": res_id = value zap_scan_results_db.objects.filter(messageId=msg_id).update(note=note, rtt=rtt, tags=tags, timestamp=timestamp, responseHeader=responseHeader, requestBody=requestBody, responseBody=responseBody, requestHeader=requestHeader, cookieParams=cookieParams, res_type=res_type, res_id=res_id) #zapscanner.stop_zap() try: email_notification.email_notify() except Exception as e: print e return HttpResponse(status=201)
def burp_scan_data(self, xml_data): global vuln_id, burp_status, vul_col for issue in xml_data: for data in issue.getchildren(): vuln_id = uuid.uuid4() if data.tag == "serialNumber": global serialNumber if data.text is None: serialNumber = "NA" else: serialNumber = data.text if data.tag == "type": global types if data.text is None: types = "NA" else: types = data.text if data.tag == "name": global name if data.text is None: name = "NA" else: name = data.text if data.tag == "host": global host if data.text is None: host = "NA" else: host = data.text if data.tag == "path": global path if data.text is None: path = "NA" else: path = data.text if data.tag == "location": global location if data.text is None: location = "NA" else: location = data.text if data.tag == "severity": global severity if data.text is None: severity = "NA" else: severity = data.text if data.tag == "confidence": global confidence if data.text is None: confidence = "NA" else: confidence = data.text if data.tag == "issueBackground": global issueBackground if data.text is None: issueBackground = "NA" else: issueBackground = data.text if data.tag == "remediationBackground": global remediationBackground if data.text is None: remediationBackground = "NA" else: remediationBackground = data.text if data.tag == "references": global references if data.text is None: references = "NA" else: references = data.text if data.tag == "vulnerabilityClassifications": global vulnerabilityClassifications if data.text is None: vulnerabilityClassifications = "NA" else: vulnerabilityClassifications = data.text if data.tag == "issueDetail": global issueDetail if data.text is None: issueDetail = "NA" else: issueDetail = data.text if data.tag == "requestresponse": global requestresponse if data.text is None: requestresponse = "NA" else: requestresponse = data.text for d in data: req = d.tag met = d.attrib if req == "request": global dec_req reqst = d.text dec_req = base64.b64decode(reqst) # reqst if req == "response": global dec_res res_dat = d.text # print res_dat dec_res = base64.b64decode(res_dat) # res_dat for key, items in met.iteritems(): global methods if key == "method": methods = items global vul_col if severity == 'High': vul_col = "important" elif severity == 'Medium': vul_col = "warning" elif severity == 'Low': vul_col = "info" else: vul_col = "info" try: data_dump = burp_scan_result_db(scan_id=self.scan_id, types=types, method=methods, scan_request=dec_req, scan_response=dec_res, project_id=self.project_id, vuln_id=vuln_id, serialNumber=serialNumber, name=name, host=host, path=path, location=location, severity=severity, severity_color=vul_col, confidence=confidence, issueBackground=issueBackground, remediationBackground=remediationBackground, references=references, vulnerabilityClassifications=vulnerabilityClassifications, issueDetail=issueDetail, requestresponse=requestresponse, false_positive='No') data_dump.save() except Exception as e: print e burp_all_vul = burp_scan_result_db.objects.filter(scan_id=self.scan_id) total_vul = len(burp_all_vul) total_high = len(burp_all_vul.filter(severity="High")) total_medium = len(burp_all_vul.filter(severity="Medium")) total_low = len(burp_all_vul.filter(severity="Low")) total_info = len(burp_all_vul.filter(severity="Information")) burp_scan_db.objects.filter(scan_id=self.scan_id).update(total_vul=total_vul, high_vul=total_high, medium_vul=total_medium, low_vul=total_low) try: email_notification.email_notify() except Exception as e: print e HttpResponse(status=201)