def parse_npm_audit(json_file, user_name, init_es): try: with open(json_file, 'r', encoding='utf-8') as fp: results = json.load(fp) severity_dict = { "critical": 3, "high": 2, "moderate": 1, " ": 0, "info": 0 } advisories = results.get('advisories') for vul in advisories.values(): name = vul.get('title', '') desc = vul.get('overview', '') recommendation = vul.get('recommendation', '') module_name = vul.get('module_name', '') cves = vul.get('cves', []) cve = '' if cves: cve = cves[0] severity = int(severity_dict.get(vul.get('severity'), 0)) cwe_string = vul.get('cwe', "").replace('CWE-', '') or 0 cwe = int(cwe_string) evid_list = [] findings = vul.get('findings', []) for f in findings: version = f.get('version', '') dep_dict = {'file_paths': []} paths = f.get('paths', []) for p in paths: dep_dict['file_paths'].append(p) data_dict = {'url': module_name, 'name': version} evid_list.append(data_dict) vul_dict = init_es vul_dict['vulnerability'] = { 'name': name, 'is_false_positive': False, 'is_remediated': False, 'tool': 'NpmAudit', 'confidence': 2, 'severity': severity, 'description': desc, 'remediation': recommendation, 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'evidences': evid_list } if cwe: vul_dict['vulnerability']['cwe'] = { 'cwe_id': cwe, 'cwe_link': 'https://cwe.mitre.org/data/definitions/%s.html' % cwe } write_results(vul_dict) except BaseException as e: log_exception(e) else: print('NPM Audit parsing completed')
def parse_appspider(xml_file,user_name,init_es): """ parses the xml file obtained by app spider and pushes the result to results to DB """ try: print('appspider XML parsing initiated') try: nreport = xml.parse(xml_file) except (xml.XMLSyntaxError,xml.ParserError): raise MalFormedXMLException(user_name) root_elem = nreport.getroot() path = r'VulnList/Vuln/VulnType' uniq_objs = root_elem.xpath(path) vuls = set([i.text for i in uniq_objs]) p = 'VulnList/Vuln/VulnType[text() = $name]' for v in vuls: obj = root_elem.xpath(p,name=v) url_param_list = [] for u in obj: parent_obj = u.getparent() url_param_list.append({ 'url':parent_obj.findtext('Url',default=''), 'name':parent_obj.findtext('VulnParamType',default=''), 'request':parent_obj.findtext('CrawlTraffic',default=''), 'response':parent_obj.findtext('CrawlTrafficResponse',default=''), }) vul_name = re.sub('<[^<]+?>', '',parent_obj.findtext('VulnType',default='')) desc = re.sub('<[^<]+?>', '',parent_obj.findtext('Description',default='')) recommendation = re.sub('<[^<]+?>', '',parent_obj.findtext('Recommendation',default='')) cwe = parent_obj.findtext('CweId',default='0') severity = sev_dict.get(parent_obj.findtext('AttackScore',default=''),0) vul_dict = init_es vul_dict['vulnerability'] = { 'name':vul_name, 'is_false_positive':False, 'is_remediated':False, 'is_deleted':False, 'tool':'AppSpider', 'confidence':2, 'severity':severity, 'description':desc, 'remediation':recommendation, 'created_on':timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'cwe':{ 'cwe_id':cwe, 'cwe_link':'https://cwe.mitre.org/top25/index.html#CWE-%s'%cwe }, 'evidences':url_param_list } write_results(vul_dict) except BaseException as e: log_exception(e) else: print('XML Parsing Completed')
def parse_bandit(json_file, user_name, init_es): """ parses the json file obtained by bandit scanner and pushes the result to results to DB """ try: print("Bandit json parsing initiated") with open(json_file, 'r', encoding='utf-8') as fp: datafile = json.load(fp) current_date = timezone.now().strftime("%Y-%m-%d %H:%M:%S") scan_date = datafile.get('generated_at', current_date) created_on = parser.parse(scan_date).strftime("%Y-%m-%d %H:%M:%S") results = datafile.get('results', []) new_vuls = defaultdict(list) for r in results: filename = r.get('filename', '') code = r.get('code', '') line_num = r.get('line_number', '') line_range = r.get('line_range') or '' if line_range: line_range = line_range[-1] evid_desc = "File :{0}, Line :{1}".format(filename, line_num) new_vuls[(r.get('test_name', 'Unknown'), r.get('issue_severity', 'LOW'))].append({ 'url': filename.split('/')[-1:][0], 'param': str(line_num) + " " + str(line_range), 'log': code }) for vul_tup, url_param_list in new_vuls.items(): vul_dict = init_es vul_dict['vulnerability'] = { 'name': vul_tup[0], 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'Bandit', 'confidence': 2, 'severity': sev_dict.get(vul_tup[1], 0), 'description': '', 'remediation': '', 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'evidences': url_param_list } write_results(vul_dict) except Exception as e: log_exception(e) else: print('Bandit JSON Parsing Completed')
def parse_nodejsscan(json_file, user_name, init_es): try: print('NodeJSScan parsing initiated') with open(json_file, 'r', encoding='utf-8') as fp: results = json.load(fp) default_dict = {} for k, v in results.get('sec_issues').items(): for x in v: default_dict[(x.get('title'))] = { 'description': '', 'evidences': [] } for z in v: d = { 'url': z.get('path') + " line number " + str(z.get('line')), 'param': z.get('filename'), 'log': z.get('lines') } default_dict[(x.get('title'))]['evidences'].append(d) default_dict[(x.get('title') )]['description'] = z.get('description') for k, v in default_dict.items(): vul_dict = init_es vul_dict['vulnerability'] = { 'name': k, 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'NodeJsScan', 'confidence': 2, 'severity': 2, 'description': v.get('description', ''), 'remediation': '', 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'evidences': v.get('evidences', []) } write_results(vul_dict) except BaseException as e: log_exception(e) else: print('NodeJSScan parsing completed')
def sync_jira_users(org_id): jira_config = JiraIssueTypes.objects.get(org__id=org_id) jira_con = get_jira_con(jira_config) if jira_con: try: groups = jira_con.groups() for group in groups: try: members = jira_con.group_members(group) for member,member_dict in members.items(): if member_dict.get('active'): ud = { 'name':member, 'group':group, 'jira_config':jira_config } exists,created = JiraUsers.objects.get_or_create(**ud) except BaseException as e: log_exception(e) except BaseException as e: log_exception(e)
def process_files(user, application, complete_path, init_es, tool, scan_name, user_host, to_name,hook_log=None): """ calls the parsers to parse the xml file according to the tool selected """ try: application = Application.objects.get(id=application) scan = Scan.objects.get(name=scan_name) scan.scanlog.status = 'Initiated' scan.scanlog.save() scan_log = scan.scanlog scan_log.status = 'In Progress' scan_log.save() try: if tool == 'Burp': ext = complete_path.split('.')[-1] if ext == 'json': parse_burp_json(complete_path,user,init_es) elif ext == 'xml': parse_burp(complete_path,user,init_es) elif tool == 'ZAP': ext = complete_path.split('.')[-1] if ext == 'json': parse_zap_json(complete_path,user,init_es) elif ext == 'xml': parse_zap(complete_path,user,init_es) elif tool == 'OWASP Dependency Checker': parse_owasp_dep_checker(complete_path,user,init_es) elif tool == "FindSecBugs": parser_findsecbug(complete_path,user,init_es) elif tool == "NpmAudit": parse_npm_audit(complete_path,user,init_es) elif tool == 'NodeJsScan': parse_nodejsscan(complete_path,user,init_es) elif tool == 'Bandit': parse_bandit(complete_path,user,init_es) info_debug_log(ip=user_host,user=user,event='XML Parsing',status='success') if hook_log: hook_log.scan_process_event = True hook_log.scan_process_exception = '' hook_log.scan_process_datetime = timezone.now() hook_log.scan_id = scan.name hook_log.vul_process_event = True hook_log.vul_process_exception = '' hook_log.vul_process_datetime = timezone.now() hook_log.save() scan_log.status = 'Completed' scan_log.save() except BaseException as e: scan_log.status = 'Killed' scan_log.save() scan.delete() log_exception(e) if hook_log: hook_log.vul_process_event = False hook_log.vul_process_exception = e hook_log.vul_process_datetime = timezone.now() hook_log.scan_process_event = False hook_log.scan_process_exception = e hook_log.scan_process_datetime = timezone.now() hook_log.scan_id = '' hook_log.save() critical_debug_log(ip=user_host,user=user,event=e,status='failure') except BaseException as e: log_exception(e) scan_log.status = 'Killed' scan_log.save() critical_debug_log(ip=user_host,user=user,event=e,status='failure') if hook_log: hook_log.scan_process_event = False hook_log.scan_process_exception = e hook_log.scan_process_datetime = timezone.now() hook_log.scan_id = '' hook_log.save() finally: info_debug_log(ip=user_host,user=user,event='Remove file after XML parsing',status='success') remove_file(complete_path)
def process_files(user, application, complete_path, init_es, tool, scan_name, user_host, to_name, hook_log=None): """ calls the parsers to parse the xml file according to the tool selected """ try: application = Application.objects.get(id=application) scan = Scan.objects.get(name=scan_name) scan_log = ScanLog.objects.get(scan=scan) scan_log.status = 'In Progress' scan_log.save() try: if tool == 'Burp': parse_burp(complete_path, user, init_es) elif tool == 'ZAP': ext = complete_path.split('.')[-1] if ext == 'json': parse_zap_json(complete_path, user, init_es) elif ext == 'xml': parse_zap(complete_path, user, init_es) elif tool == 'AppSpider': parse_appspider(complete_path, user, init_es) elif tool == 'Arachni': parse_arachni(complete_path, user, init_es) elif tool == 'Bandit': parse_bandit(complete_path, user, init_es) elif tool == 'Checkmarx': parse_checkmarx(complete_path, user, init_es) elif tool == 'AppScan - DAST': parse_appscan_dast(complete_path, user, init_es) elif tool == 'AppScan - SAST': parse_appscan_sast(complete_path, user, init_es) elif tool == 'OWASP Dependency Checker': parse_owasp_dep_checker(complete_path, user, init_es) elif tool == 'w3af': w = W3afParser(complete_path, user, init_es, tool) w.parse_xml() elif tool == "HP Fortify": parse_hp_fortify(complete_path, user, init_es) elif tool == "Xanitizer": parse_xanitizer(complete_path, user, init_es) info_debug_log(ip=user_host, user=user, event='XML Parsing', status='success') if hook_log: hook_log.scan_process_event = True hook_log.scan_process_exception = '' hook_log.scan_process_datetime = timezone.now() hook_log.scan_id = scan.name hook_log.vul_process_event = True hook_log.vul_process_exception = '' hook_log.vul_process_datetime = timezone.now() hook_log.save() scan_log.status = 'Completed' scan_log.save() except BaseException as e: scan_log.status = 'Killed' scan_log.save() scan.delete() log_exception(e) if hook_log: hook_log.vul_process_event = False hook_log.vul_process_exception = e hook_log.vul_process_datetime = timezone.now() hook_log.scan_process_event = False hook_log.scan_process_exception = e hook_log.scan_process_datetime = timezone.now() hook_log.scan_id = '' hook_log.save() # general_error_messages.delay(path='process_files function',msg=log_exception(e)) critical_debug_log(ip=user_host, user=user, event=e, status='failure') except BaseException as e: log_exception(e) scan_log.status = 'Killed' scan_log.save() critical_debug_log(ip=user_host, user=user, event=e, status='failure') if hook_log: hook_log.scan_process_event = False hook_log.scan_process_exception = e hook_log.scan_process_datetime = timezone.now() hook_log.scan_id = '' hook_log.save() finally: info_debug_log(ip=user_host, user=user, event='Remove file after XML parsing', status='success') remove_file(complete_path)
def parse_owasp_dep_checker(xml_file, user_name, init_es): """ parses the xml file obtained by OWASP dependency checker and pushes the result to results to DB """ try: file = open(xml_file).read() data = BeautifulSoup(file, 'lxml') dependencies = data.findAll('dependencies') severity_dict = { 'Low': 1, 'Medium': 2, 'High': 3, 'Information': 0, 'Info': 0, } for dependency in data.findAll('dependency'): vulnerabilities = dependency.find('vulnerabilities') url_param_list = [] if vulnerabilities: filename = dependency.find('filename').get_text() evid_desc = dependency.find('filepath').get_text() vulnerability = dependency.findAll('vulnerability') for vuln in vulnerability: vul_name = vuln.find('name').get_text() description = vuln.find('description').get_text() severity = severity_dict.get( vuln.find('severity').get_text(), 'Low') url_param_list.append({ 'url': filename, 'name': evid_desc, 'log': '' }) cwe = 0 if vuln.find('cwe'): cwe = vuln.find('cwe').get_text().split(' ')[0].split( 'CWE-')[1] vul_dict = init_es vul_dict['vulnerability'] = { 'name': vul_name, 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'Owasp Dependency Checker', 'confidence': 2, 'severity': severity, 'description': description, 'remediation': '', 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'cwe': { 'cwe_id': cwe, 'cwe_link': 'https://cwe.mitre.org/top25/index.html#CWE-{0}'. format(cwe) }, 'evidences': url_param_list } write_results(vul_dict) except BaseException as e: log_exception(e)
def parse_zap_json(file_path, user_name, init_es): try: with open(file_path) as fp: data = json.loads(fp.read()) hosts = data.get('Report', {}).get('Sites', []) def post_results(hosts): host_url = hosts.get('Host', '') alerts = hosts.get('Alerts', {}) alert_items = alerts.get('AlertItem', []) def process_alerts(alert): severity = alert.get('RiskCode', 0) desc = alert.get('Desc', '') vul_name = alert.get('Alert', '') solution = alert.get('Solution', '') cwe = alert.get('CWEID', 0) url_param_list = [] items = alert.get('Item', []) if isinstance(items, list): for item in items: request = response = '' response_header = item.get('ResponseHeader', '') response_body = item.get('ResponseBody', '') if response_header or response_body: response = '{0}\n\n{1}'.format( response_header.encode('utf-8'), response_body.encode('utf-8')) request_header = item.get('RequestHeader', '') request_body = item.get('RequestBody', '') if request_header or request_body: request = '{0}\n\n{1}'.format( request_header.encode('utf-8'), request_body.encode('utf-8')) attack = item.get('Attack', '') uri = item.get('URI', '') param = item.get('Param', '') url = uri.split(':')[-1] evid_dict = { 'url': url, 'name': param, 'attack': attack, 'request': b64encode(bytes(request.encode('utf-8'))), 'response': b64encode(bytes(response.encode('utf-8'))), } url_param_list.append(evid_dict) elif isinstance(items, dict): request = response = '' response_header = items.get('ResponseHeader', '') response_body = items.get('ResponseBody', '') if response_header or response_body: response = '{0}\n\n{1}'.format( response_header.encode('utf-8'), response_body.encode('utf-8')) request_header = items.get('RequestHeader', '') request_body = items.get('RequestBody', '') if request_header or request_body: request = '{0}\n\n{1}'.format( request_header.encode('utf-8'), request_body.encode('utf-8')) param = items.get('Attack', '') uri = items.get('URI', '') if not param: param = items.get('Param', '') url = uri.split(':')[-1] evid_dict = { 'url': url, 'name': param, 'request': b64encode(bytes(request.encode('utf-8'))), 'response': b64encode(bytes(response.encode('utf-8'))), } url_param_list.append(evid_dict) vul_dict = init_es vul_dict['vulnerability'] = { 'name': re.sub('<[^<]+?>', '', vul_name), 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'ZAP', 'confidence': 2, 'severity': severity, 'description': re.sub('<[^<]+?>', '', desc), 'vul_type': 'Insecure Coding', 'remediation': re.sub('<[^<]+?>', '', solution), 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S") } vul_dict['vulnerability']['evidences'] = url_param_list vul_dict['vulnerability']['cwe'] = { 'cwe_id': cwe, 'cwe_link': 'https://cwe.mitre.org/data/definitions/%s.html' % cwe } write_results(vul_dict) if isinstance(alert_items, list): for alert in alert_items: process_alerts(alert) elif isinstance(alert_items, dict): process_alerts(alert_items) if isinstance(hosts, dict): post_results(hosts) elif isinstance(hosts, list): for host in hosts: post_results(host) except BaseException as e: log_exception(e) else: print('XML Parsing Completed') # parse_zap_json(file_path,init_es)
def parse_burp_json(json_file,user_name,init_es): """ parses the json file obtained by burp scanner and pushes the result to results to DB """ try: print("Burp json parsing initiated") with open(json_file) as fp: datafile = json.load(fp) vul_dict = {} unique_name_mutiple_evid = [] for root_key in datafile: results = root_key.get('issue',[]) set_names = set(unique_name_mutiple_evid) check_name_exists = results.get('name') in set_names evids = [] cwe_present = cwe_dict.get(str(results.get('type_index')),'8389632') cwe = 0 if cwe_present: cwe = int(cwe_present[0]) if not check_name_exists: unique_name_mutiple_evid.append(results.get('name')) request_data = '' response_data = '' url = '' for vul_name_all_json in datafile: results_name = vul_name_all_json.get('issue',[]) name = results.get('name', '') if name == results_name.get('name', ''): for request_response in results_name.get('evidence', ''): if request_response.get('request_response'): for key, value in request_response.get('request_response').items(): if key == 'url': url = value if key == 'request': for request_info in value: data_request = request_info.get('data') if data_request is not None: decoded_data = str(b64decode(data_request),'utf-8') request_data += decoded_data if key == 'response': for response_info in value: data_response = response_info.get('data') if data_response is not None : decoded_data = str(b64decode(data_response),'utf-8') response_data += decoded_data evids.append({ 'url':url, 'name':results.get('path', ''), 'request':str(b64encode(request_data.encode()), 'utf-8'), 'response':str(b64encode(response_data.encode()), 'utf-8'), }) vul_dict = init_es vul_dict['vulnerability'] = { 'name':results.get('name', ''), 'is_false_positive':False, 'is_remediated':False, 'is_deleted':False, 'tool':'Burp', 'confidence':burp_confidence_dict.get(results.get('confidence'), 3), 'severity':sev_dict.get(results.get('severity'), 3), 'description':results.get('description', ''), 'vul_type':'Insecure Coding', 'remediation':results.get('remediation', ''), 'observations':'', 'created_on':timezone.now().strftime("%Y-%m-%d %H:%M:%S"), } vul_dict['vulnerability']['evidences'] = evids vul_dict['vulnerability']['cwe'] = { 'cwe_id': cwe } write_results(vul_dict) except Exception as e: log_exception(e) else: print('BURP JSON Parsing Completed')
def parse_burp(xml_file, user_name, init_es): """ Parsing a given xml file using burp. This function returns a dictionary of scan parameters and update the ES. """ try: print('Burp XML parsing initiated') try: nreport = xml.parse(xml_file) except (xml.XMLSyntaxError, xml.ParserError): raise MalFormedXMLException(user_name) root_elem = nreport.getroot() current_date = timezone.now().strftime("%Y-%m-%d %H:%M:%S") scan_date = root_elem.attrib.get('exportTime', current_date) created_on = parser.parse(scan_date).strftime("%Y-%m-%d %H:%M:%S") reg_path = r'issue/name' uniq_objs = root_elem.xpath(reg_path) vuls = set([i.text for i in uniq_objs]) p = '{0}[text() = $name]'.format(reg_path) severity_dict = {'Information': 0, 'Low': 1, 'Medium': 2, 'High': 3} burp_confidence_dict = { "Certain": 3, "Firm": 2, "Tentative": 1, } for v in vuls: obj = root_elem.xpath(p, name=v) url_param_list = [] for u in obj: parent_obj = u.getparent() req = parent_obj.find('requestresponse/request') res = parent_obj.find('requestresponse/response') request = response = b64encode(b'') if req is not None: is_base64_encoded = True if req.get( 'base64') == 'true' else False if is_base64_encoded: request = bytes(req.text.encode('utf-8')) else: request = b64encode(bytes(req.text.encode('utf-8'))) if res is not None: is_base64_encoded = True if res.get( 'base64') == 'true' else False if is_base64_encoded: response = bytes(res.text.encode('utf-8')) else: response = b64encode(bytes(res.text.encode('utf-8'))) url = 'http:/%s' % (parent_obj.findtext('path', default='')) url_param_list.append({ 'url': parent_obj.findtext('location', default=''), 'name': parent_obj.findtext('path', default=''), 'request': request, 'response': response, }) vul_name = parent_obj.findtext('name', default='') severity = parent_obj.findtext('severity', '') issue_type = parent_obj.findtext('type', '8389632') if severity: severity = severity_dict.get(severity) cwe_present = cwe_dict.get(issue_type, []) cwe = 0 if cwe_present: cwe = cwe_present[0] desc = parent_obj.findtext('issueBackground', default='') solution = parent_obj.findtext('remediationBackground', default='') observation = parent_obj.find('issueDetail') confidence = parent_obj.findtext('confidence', default='') if confidence: confidence = burp_confidence_dict.get(confidence) if observation is not None: s = '''You should manually examine the application behavior and attempt to identify any unusual input validation or other obstacles that may be in place.''' obs = observation.text.replace(s, '') else: obs = '' vul_dict = init_es vul_dict['vulnerability'] = { 'name': re.sub('<[^<]+?>', '', vul_name), 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'Burp', 'confidence': confidence, 'severity': severity, 'description': re.sub('<[^<]+?>', '', desc), 'vul_type': 'Insecure Coding', 'remediation': re.sub('<[^<]+?>', '', solution), 'observations': obs, 'created_on': created_on, } vul_dict['vulnerability']['evidences'] = url_param_list vul_dict['vulnerability']['cwe'] = {'cwe_id': cwe} write_results(vul_dict) except BaseException as e: log_exception(e) else: print('Burp XML parsing completed') # End-Of-File
def parser_findsecbug(xml_file, user_name=None, init_es={}): try: print('FindSecBugs XML parsing initiated') vul_dict = {} data = xml.parse(xml_file) data = data.getroot() path = data.xpath(r'//BugInstance[@category="SECURITY"]') all_list = [] # Lower the priority, higher the confidence confidence_dict = {'5': 1, '3': 1, '2': 2, '1': 3} # Higher the rank, lower the severity.[Max rank = 20] severity_dict = {'4': 0, '3': 1, '2': 2, '1': 3, '0': 3} def evidence_return(file_path, start_line, end_line): d = { 'name': 'File: {0} LineNum: {1} - {2}'.format( file_path.split('/')[-1], start_line, end_line), 'url': file_path, 'log': '' } return d for instance in path: evidence_list = [] name = instance.findtext('ShortMessage') cwe = instance.get('cweid', 0) confidence = confidence_dict.get(instance.get('priority')) severity = severity_dict.get( str(int(round(int(instance.get('rank'))) / 5))) description = instance.findtext('LongMessage') remediation = '' for sourceline in instance.findall('SourceLine'): d = evidence_return(file_path=sourceline.get('sourcepath'), start_line=sourceline.get('start'), end_line=sourceline.get('end')) evidence_list.append(d) for sourceline in instance.findall('Class/SourceLine'): d = evidence_return(file_path=sourceline.get('sourcepath'), start_line=sourceline.get('start'), end_line=sourceline.get('end')) evidence_list.append(d) for sourceline in instance.findall('Method/SourceLine'): d = evidence_return(file_path=sourceline.get('sourcepath'), start_line=sourceline.get('start'), end_line=sourceline.get('end')) evidence_list.append(d) if name in vul_dict: vul_dict[name]['evidence_list'] + evidence_list else: vul_dict[name] = { 'cwe': cwe, 'description': description, 'evidence_list': evidence_list, 'confidence': 2, 'severity': 2, 'remediation': remediation } vul_dict[name]['confidence'] = confidence vul_dict[name]['severity'] = severity des_path = data.xpath(r'//BugPattern[@category="SECURITY"]') for instance in des_path: name = instance.findtext('ShortDescription') if name in vul_dict: vul_dict[name]['remediation'] = instance.findtext('Details') else: print('Vul not found') result_dict = init_es for vul in vul_dict.keys(): result_dict['vulnerability'] = { 'name': vul, 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'FindSecBugs', 'confidence': vul_dict.get(vul).get('confidence'), 'severity': vul_dict.get(vul).get('severity'), 'description': vul_dict.get(vul).get('description', ''), 'remediation': vul_dict.get(vul).get('remediation', ''), 'created_on': datetime.now().strftime("%Y-%m-%d %H:%M:%S"), 'cwe': { 'cwe_id': vul_dict.get(vul).get('cwe', 0), 'cwe_link': 'https://cwe.mitre.org/top25/index.html#CWE-{0}'.format( vul_dict.get(vul).get('cwe', 0)) }, 'evidences': vul_dict.get(vul).get('evidence_list', []) } write_results(result_dict) except BaseException as e: log_exception(e) else: print('FindSecBugs XML parsing complete')
def create_vul(data,es_reference,confidence,severity,cwe,tool,evidences): try: evid_dir_path = settings.EVIDENCE_ROOT if not os.path.isdir(evid_dir_path): os.mkdir(evid_dir_path) cvss_dict = {3:7.5,2:4.5,1:2.5,0:0} vul_name = data['vulnerability'].get('name','') created_on = data['vulnerability'].get('created_on',timezone.now()) scan_obj = Scan.objects.select_related('application__org').get(name=es_reference) org_obj = scan_obj.application.org cwe = cwe or 0 if org_obj.orl_config_exists(): common_name = get_open_vul_name_from_api(cwe,org_obj) vul_info = get_open_vul_info_from_api(cwe,org_obj) dread_score = vul_info.get('dread_score',0) else: common_name = vul_name[:255] dread_score = 0 if severity == 4 or severity == '4': severity = 3 try: meta_dict = { 'name':vul_name[:255], 'description':data['vulnerability'].get('description','N/A'), 'remediation':data['vulnerability'].get('remediation','N/A'), 'tool':tool, 'is_false_positive':data['vulnerability'].get('is_false_positive',False), 'is_remediated':data['vulnerability'].get('is_remediated',False), 'confidence':int(confidence), 'severity':int(severity), 'vul_type':data['vulnerability'].get('vul_type','Configuration'), 'owasp':data['vulnerability'].get('owasp','Uncategorized'), 'cvss':data['vulnerability'].get('cvss',cvss_dict.get(severity)), 'cwe':int(cwe), 'scan':scan_obj, 'dread':dread_score, 'common_name':common_name } vul = Vulnerability.objects.create(**meta_dict) vul.created_on = created_on vul.edited_on = created_on vul.save() evids = [] for evid in evidences: log = evid.get('log','') request_file_name = '' response_file_name = '' log_file_name = '' if log: log_file_name = '{0}log_{1}.txt'.format(settings.EVIDENCE_MEDIA_URL,str(uuid4())) full_path = os.path.join(settings.MEDIA_ROOT,log_file_name) with open(full_path,'wb') as fp: if not isinstance(log,bytes): log = bytes(log.encode('utf-8')) fp.write(log) MinioUtil().upload_file_from_path(log_file_name,full_path) else: request = evid.get('request','') response = evid.get('response','') if request: request_file_name = '{0}request_{1}.txt'.format(settings.EVIDENCE_MEDIA_URL,str(uuid4())) full_path = os.path.join(settings.MEDIA_ROOT,request_file_name) with open(full_path,'wb') as fp: if not isinstance(request,bytes): request = bytes(request.encode('utf-8')) fp.write(request) MinioUtil().upload_file_from_path(request_file_name,full_path) if response: response_file_name = '{0}response_{1}.txt'.format(settings.EVIDENCE_MEDIA_URL,str(uuid4())) full_path = os.path.join(settings.MEDIA_ROOT,response_file_name) with open(full_path,'wb') as fp: if not isinstance(response,bytes): response = bytes(response.encode('utf-8')) fp.write(response) MinioUtil().upload_file_from_path(response_file_name,full_path) data = { 'vul':vul, 'url':evid.get('url',''), 'param':evid.get('name',''), 'request':request_file_name, 'response':response_file_name, 'log':log_file_name } evids.append(VulnerabilityEvidence(**data)) if evids: VulnerabilityEvidence.objects.bulk_create(evids) info_debug_log(event='Bulk-create vulnerability',status='success') except Scan.DoesNotExist: pass except BaseException as e: log_exception(e) critical_debug_log(event=e,status='failure')
def write_results_to_db(data): """ This function processes the vulnerabilities identified by the scanner to identify and reduce the false positives. """ try: vul_name = str(data.get('vulnerability',{}).get('name','')) severity_dict = {4:3,3:3,2:2,1:1,0:0} cvss_dict = {3:7.5,2:4.5,1:2.5,0:0} if vul_name: app_name = data.get('host',{}).get('name') app_obj = Application.objects.get(name=app_name) tool = data.get('vulnerability',{}).get('tool') severity = data.get('vulnerability',{}).get('severity') if severity: severity = severity_dict.get(int(severity)) cwe_id = data.get('vulnerability',{}).get('cwe',{}).get('cwe_id',0) confidence = int(data.get('vulnerability',{}).get('confidence',2)) burp_confidence_dict = { "Certain":3, "Firm":2, "Tentative":1, } zap_confidence_dict = { "Medium":2, "Low":1 } data['vulnerability']['vul_type'] = data['vulnerability'].get('vul_type','Configuration') data['vulnerability']['owasp'] = data['vulnerability'].get('owasp','Uncategorized') data['vulnerability']['cvss'] = data['vulnerability'].get('cvss',cvss_dict.get(severity)) data['vulnerability']['cwe'] = { 'cwe_id': cwe_id } if tool == 'Burp': if confidence == 3: data['vulnerability']['is_false_positive'] = False elif confidence == 2: data['vulnerability']['is_false_positive'] = False elif confidence == 1: data['vulnerability']['is_false_positive'] = True elif tool == 'ZAP': if confidence == 1: data['vulnerability']['is_false_positive'] = True elif confidence == 2: data['vulnerability']['is_false_positive'] = False vuls = Vulnerability.objects.select_related('vul','scan__application').filter(name=vul_name,tool=tool,is_false_positive=True,scan__application__name=app_name) if vuls.exists(): data['vulnerability']['is_false_positive'] = True cwe = str(data['vulnerability'].get('cwe',{}).get('cwe_id',0)) es_reference = data.get('scan_reference',{}).get('es_reference','') if not cwe: scan_obj = Scan.objects.select_related('application__org').get(name=es_reference) org_obj = scan_obj.application.org if cwe: data['vulnerability']['cwe']['cwe_id'] = cwe data['vulnerability']['severity'] = severity else: if severity == 0 or severity == 1: if severity == 0: cvss = 0 else: cvss = 2 data['vulnerability']['vul_type'] = 'Configuration' data['vulnerability']['owasp'] = 'Security Misconfiguration' data['vulnerability']['cvss'] = cvss data['vulnerability']['cwe']['cwe_id'] = 0 data['vulnerability']['name'] = vul_name evidences = data.get('vulnerability',{}).get('evidences',[]) create_vul(data,es_reference,confidence,severity,cwe,tool,evidences) info_debug_log(event='Write false positive data to ES',status='success') except BaseException as e: log_exception(e) critical_debug_log(event=e,status='failure')
def parse_xml(self): root_elem = self.get_root() if root_elem is not None: try: vuls = root_elem.findall('vulnerability') severity_dict = { 'Information': 0, 'Low': 1, 'Medium': 2, 'High': 3 } uniq_objs = root_elem.findall('vulnerability') vuls = set( [i.attrib.get('name', 'Unknown') for i in uniq_objs]) p = 'vulnerability[@name = $name]' for v in vuls: cves = [] cwes = {} url_param_list = [] obj = root_elem.xpath(p, name=v) for parent_obj in obj: url = parent_obj.attrib.get('url', '') param = parent_obj.attrib.get('var', '') trans = parent_obj.find('http-transactions') if trans is not None: req_resps = trans.findall('http-transaction') request = response = '\n' for r in req_resps: req = r.find('http-request') request += req.findtext('status', default='') headers = req.findall('headers/header') for h in headers: request += '\n%s:%s' % (h.attrib.get( 'field'), h.attrib.get('content')) res = r.find('http-response') response += res.findtext('status', default='') headers = res.findall('headers/header') for h in headers: response += '\n%s:%s' % (h.attrib.get( 'field'), h.attrib.get('content')) url_param_list.append({ 'url': url, 'name': param, 'request': b64encode(request), 'response': b64encode(response) }) vul_name = parent_obj.attrib.get('name', 'Unknown') desc = parent_obj.findtext('long-description', default='') remedy = parent_obj.findtext('fix-guidance', default='') severity = severity_dict.get( parent_obj.attrib.get('severity', 'Information')) self.write_results(vul_name, '', severity, desc, remedy, cves, cwes, url_param_list) except BaseException as e: log_exception(e) else: self.console_log()
def parse_zap(xml_file, user_name, init_es): """ Parsing a given xml file using zap. This function returns a dictionary of scan parameters and update the ES. """ try: nreport = xml.parse(xml_file) root_elem = nreport.getroot() scan_date = root_elem.attrib.get('generated') created_on = get_created_on(scan_date) host_name = init_es.get('host', {}).get('app_uri') sites = root_elem.findall('site') for site in sites: host = site.get('name') path = r'alerts/alertitem/alert' uniq_objs = site.xpath(path) vuls = set([i.text for i in uniq_objs]) p = 'alerts/alertitem/alert[text() = $name]' for v in vuls: obj = site.xpath(p, name=v) url_param_list = [] for u in obj: parent_obj = u.getparent() instances = parent_obj.find('instances') if instances is not None: instance_list = instances.findall('instance') for ins in instance_list: url_param_list.append({ 'url': ins.findtext('uri', default=''), 'param': ins.findtext('param', default='') }) vul_name = parent_obj.findtext('alert', default='') desc = parent_obj.findtext('desc', default='') cwe = parent_obj.findtext('cweid', default='') solution = parent_obj.findtext('solution', default='') severity = parent_obj.findtext('riskcode', default=0) confidence = parent_obj.findtext('confidence', default=2) vul_dict = init_es vul_dict['vulnerability'] = { 'name': re.sub('<[^<]+?>', '', vul_name), 'is_false_positive': False, 'is_remediated': False, 'tool': 'ZAP', 'confidence': confidence, 'severity': severity, 'description': re.sub('<[^<]+?>', '', desc), 'vul_type': 'Insecure Coding', 'remediation': re.sub('<[^<]+?>', '', solution), 'created_on': created_on } vul_dict['vulnerability']['evidences'] = url_param_list vul_dict['vulnerability']['cwe'] = { 'cwe_id': cwe, 'cwe_link': 'https://cwe.mitre.org/data/definitions/%s.html' % cwe } write_results(vul_dict) except BaseException as e: log_exception(e) else: print('ZAP XML Parsing Completed')
def parse_owasp_dep_checker(xml_file, user_name, init_es): """ parses the xml file obtained by OWASP dependency checker and pushes the result to results to DB """ try: nreport = xml.parse(xml_file) root_elem = nreport.getroot() NS = "{%s}" % root_elem.nsmap.get(None) projectInfo = root_elem.find('%sprojectInfo' % NS) if projectInfo is not None: scan_date = projectInfo.findtext('%sreportDate' % NS, default='') created_on = parser.parse(scan_date).strftime("%Y-%m-%d %H:%M:%S") dependencies = root_elem.findall('%sdependencies/%sdependency' % (NS, NS)) severity_dict = { 'Low': 1, 'Medium': 2, 'High': 3, 'Information': 0, 'Info': 0, } names_list = [] vul_dict = {} for dependency in dependencies: url_param_list = [] vulnerabilities_parent = dependency.find('%svulnerabilities' % NS) related_dependencies_parent = dependency.find( '%srelatedDependencies' % NS) if vulnerabilities_parent is not None: evidence = dependency.findtext( '%sevidence[@type="version"]/%sname' % (NS, NS)) module = dependency.findtext('%sfileName' % NS, default='') version = dependency.findtext( '%sevidenceCollected/%sevidence[@type="version"][@confidence="HIGHEST"]/%svalue' % (NS, NS, NS), default='') dep_dict = {'file_paths': []} if related_dependencies_parent is not None: related_dependencies = related_dependencies_parent.findall( '%srelatedDependency' % NS) for dep in related_dependencies: filePath = dep.findtext('%sfilePath' % NS, default='') dep_dict['file_paths'].append(filePath) vulnerabilities = vulnerabilities_parent.findall( '%svulnerability' % NS) for vuln in vulnerabilities: vul_name = vuln.findtext('%sname' % NS, default='') if vul_name not in vul_dict: vul_dict[vul_name] = {'evidences': []} severity = severity_dict.get( vuln.findtext('%sseverity' % NS, default=''), 'Low') description = vuln.findtext('%sdescription' % NS, default='') cwe = vuln.findtext('%scwe' % NS, default='') if cwe: match = re.search(r'(CWE-)(\d{1,9})(.*?)', cwe) if match: cwe = match.group(2) cve = '' match = re.search(r'(CVE)-(\d+)-(\d+)', vul_name) if match: cve = match.group() url_param_list.append({ 'url': module, 'name': version, 'log': '' }) data_dict = { 'name': vul_name, 'is_false_positive': False, 'is_remediated': False, 'tool': 'OWASP Dependency Checker', 'confidence': 2, 'severity': severity, 'description': description, 'remediation': '', 'created_on': created_on, 'cwe': { 'cwe_id': cwe, 'cwe_link': 'https://cwe.mitre.org/data/definitions/{0}.html'. format(cwe) } } vul_dict[vul_name]['evidences'] = url_param_list vul_dict[vul_name].update(data_dict) for v in vul_dict.values(): vul_dict_final = init_es vul_dict_final['vulnerability'] = v write_results(vul_dict_final) except BaseException as e: log_exception(e) else: print('[ + ] OWASP Dependency Checker parsing completed')