def parse_npm_audit(json_file, user_name, init_es): try: with open(json_file, 'r', encoding='utf-8') as fp: results = json.load(fp) severity_dict = { "critical": 3, "high": 2, "moderate": 1, " ": 0, "info": 0 } advisories = results.get('advisories') for vul in advisories.values(): name = vul.get('title', '') desc = vul.get('overview', '') recommendation = vul.get('recommendation', '') module_name = vul.get('module_name', '') cves = vul.get('cves', []) cve = '' if cves: cve = cves[0] severity = int(severity_dict.get(vul.get('severity'), 0)) cwe_string = vul.get('cwe', "").replace('CWE-', '') or 0 cwe = int(cwe_string) evid_list = [] findings = vul.get('findings', []) for f in findings: version = f.get('version', '') dep_dict = {'file_paths': []} paths = f.get('paths', []) for p in paths: dep_dict['file_paths'].append(p) data_dict = {'url': module_name, 'name': version} evid_list.append(data_dict) vul_dict = init_es vul_dict['vulnerability'] = { 'name': name, 'is_false_positive': False, 'is_remediated': False, 'tool': 'NpmAudit', 'confidence': 2, 'severity': severity, 'description': desc, 'remediation': recommendation, 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'evidences': evid_list } if cwe: vul_dict['vulnerability']['cwe'] = { 'cwe_id': cwe, 'cwe_link': 'https://cwe.mitre.org/data/definitions/%s.html' % cwe } write_results(vul_dict) except BaseException as e: log_exception(e) else: print('NPM Audit parsing completed')
def parse_appspider(xml_file,user_name,init_es): """ parses the xml file obtained by app spider and pushes the result to results to DB """ try: print('appspider XML parsing initiated') try: nreport = xml.parse(xml_file) except (xml.XMLSyntaxError,xml.ParserError): raise MalFormedXMLException(user_name) root_elem = nreport.getroot() path = r'VulnList/Vuln/VulnType' uniq_objs = root_elem.xpath(path) vuls = set([i.text for i in uniq_objs]) p = 'VulnList/Vuln/VulnType[text() = $name]' for v in vuls: obj = root_elem.xpath(p,name=v) url_param_list = [] for u in obj: parent_obj = u.getparent() url_param_list.append({ 'url':parent_obj.findtext('Url',default=''), 'name':parent_obj.findtext('VulnParamType',default=''), 'request':parent_obj.findtext('CrawlTraffic',default=''), 'response':parent_obj.findtext('CrawlTrafficResponse',default=''), }) vul_name = re.sub('<[^<]+?>', '',parent_obj.findtext('VulnType',default='')) desc = re.sub('<[^<]+?>', '',parent_obj.findtext('Description',default='')) recommendation = re.sub('<[^<]+?>', '',parent_obj.findtext('Recommendation',default='')) cwe = parent_obj.findtext('CweId',default='0') severity = sev_dict.get(parent_obj.findtext('AttackScore',default=''),0) vul_dict = init_es vul_dict['vulnerability'] = { 'name':vul_name, 'is_false_positive':False, 'is_remediated':False, 'is_deleted':False, 'tool':'AppSpider', 'confidence':2, 'severity':severity, 'description':desc, 'remediation':recommendation, 'created_on':timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'cwe':{ 'cwe_id':cwe, 'cwe_link':'https://cwe.mitre.org/top25/index.html#CWE-%s'%cwe }, 'evidences':url_param_list } write_results(vul_dict) except BaseException as e: log_exception(e) else: print('XML Parsing Completed')
def parse_bandit(json_file, user_name, init_es): """ parses the json file obtained by bandit scanner and pushes the result to results to DB """ try: print("Bandit json parsing initiated") with open(json_file, 'r', encoding='utf-8') as fp: datafile = json.load(fp) current_date = timezone.now().strftime("%Y-%m-%d %H:%M:%S") scan_date = datafile.get('generated_at', current_date) created_on = parser.parse(scan_date).strftime("%Y-%m-%d %H:%M:%S") results = datafile.get('results', []) new_vuls = defaultdict(list) for r in results: filename = r.get('filename', '') code = r.get('code', '') line_num = r.get('line_number', '') line_range = r.get('line_range') or '' if line_range: line_range = line_range[-1] evid_desc = "File :{0}, Line :{1}".format(filename, line_num) new_vuls[(r.get('test_name', 'Unknown'), r.get('issue_severity', 'LOW'))].append({ 'url': filename.split('/')[-1:][0], 'param': str(line_num) + " " + str(line_range), 'log': code }) for vul_tup, url_param_list in new_vuls.items(): vul_dict = init_es vul_dict['vulnerability'] = { 'name': vul_tup[0], 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'Bandit', 'confidence': 2, 'severity': sev_dict.get(vul_tup[1], 0), 'description': '', 'remediation': '', 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'evidences': url_param_list } write_results(vul_dict) except Exception as e: log_exception(e) else: print('Bandit JSON Parsing Completed')
def parse_hp_fortify(xml_file,user_name,init_es): try: nreport = xml.parse(xml_file) except (xml.XMLSyntaxError,xml.ParserError): raise MalFormedXMLException(user_name) rootelement = nreport.getroot() outsidepath = rootelement.xpath(r'//GroupingSection') for path_value in outsidepath: meta_info = path_value.findall("MajorAttributeSummary/MetaInfo") if len(meta_info) > 2: explination = meta_info[1].findtext('Value') recommendations= meta_info[2].findtext('Value') issue_path=path_value.findall("Issue") for issue in issue_path: name = issue.findtext('Kingdom') friority = issue.findtext('Friority') evids = [] s = issue.find("Primary") s2 = issue.find("Source") if s is not None: d = { 'name':'File : {0} LineNum : {1}'.format(s.findtext('FileName',default='Unknown'),s.findtext('LineStart',default='Unknown')), 'url':s.findtext('FilePath',default=''), 'log':s.findtext('Snippet',default='') } evids.append(d) if s2 is not None: d1 = { 'name':'File : {0} LineNum : {1}'.format(s2.findtext('FileName',default='Unknown'),s2.findtext('LineStart',default='Unknown')), 'url':s2.findtext('FilePath',default=''), 'log':s2.findtext('Snippet',default='') } evids.append(d1) vul_dict = init_es vul_dict['vulnerability'] ={ 'tool':'HP Fortify', 'name': name, 'is_false_positive':False, 'is_remediated':False, 'description': explination, 'remediation': recommendations, 'severity':sev_dict.get(friority,0), 'created_on':timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'evidences':evids } write_results(vul_dict)
def parse_nodejsscan(json_file, user_name, init_es): try: print('NodeJSScan parsing initiated') with open(json_file, 'r', encoding='utf-8') as fp: results = json.load(fp) default_dict = {} for k, v in results.get('sec_issues').items(): for x in v: default_dict[(x.get('title'))] = { 'description': '', 'evidences': [] } for z in v: d = { 'url': z.get('path') + " line number " + str(z.get('line')), 'param': z.get('filename'), 'log': z.get('lines') } default_dict[(x.get('title'))]['evidences'].append(d) default_dict[(x.get('title') )]['description'] = z.get('description') for k, v in default_dict.items(): vul_dict = init_es vul_dict['vulnerability'] = { 'name': k, 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'NodeJsScan', 'confidence': 2, 'severity': 2, 'description': v.get('description', ''), 'remediation': '', 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'evidences': v.get('evidences', []) } write_results(vul_dict) except BaseException as e: log_exception(e) else: print('NodeJSScan parsing completed')
def parse_xanitizer(xml_file, user_name, init_es): try: data = xml.parse(xml_file) except (xml.XMLSyntaxError, xml.ParserError): raise MalFormedXMLException(user_name) data = data.getroot() path = data.xpath(r'//XanitizerFindingsList') for main in path: for finding in main.findall('finding'): problemType = finding.findtext("problemType") rating = finding.findtext("rating") result_rating = int(float(rating)) try: cwehead, cweNumber = finding.findtext("cweNumber").split('-') except: cweNumber = 0 description = finding.findtext("description") startNode = finding.findtext("startNode") node = finding.findall("node") start_node = finding.findall("startNode") classification = finding.findtext("classification") severity = {} if classification == 'Must Fix': severity['HIGH'] = 3 severity_name = 'HIGH' elif classification == 'Warning': severity['Medium'] = 2 severity_name = 'Medium' elif classification == 'Information': severity['Low'] = 1 severity_name = 'Low' elif classification == 'Harmless': severity['Info'] = 0 severity_name = 'Info' for startNode in start_node: keys = startNode.keys() evids = [] for items, key in enumerate(keys): if key == "absolutePath": absolutePath = startNode.values()[items] evids.append({"url": absolutePath}) for dictnode in node: keys = dictnode.keys() evids = [] for items, key in enumerate(keys): if key == "absolutePath": absolutePath = dictnode.values()[items] evids.append({"url": absolutePath}) vul_dict = init_es vul_dict['vulnerability'] = { 'name': problemType, 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'xanitizer', 'confidence': result_rating, 'severity': severity.get(severity_name), 'description': description, 'vul_type': 'Insecure Coding', 'remediation': '', 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), } vul_dict['vulnerability']['evidences'] = evids vul_dict['vulnerability']['cwe'] = { 'cwe_id': cweNumber, 'cwe_link': 'https://cwe.mitre.org/data/definitions/%s.html' % cweNumber } write_results(vul_dict)
def parse_zap(xml_file, user_name, init_es): """ Parsing a given xml file using zap. This function returns a dictionary of scan parameters and update the ES. """ try: nreport = xml.parse(xml_file) root_elem = nreport.getroot() scan_date = root_elem.attrib.get('generated') created_on = get_created_on(scan_date) host_name = init_es.get('host', {}).get('app_uri') sites = root_elem.findall('site') for site in sites: host = site.get('name') path = r'alerts/alertitem/alert' uniq_objs = site.xpath(path) vuls = set([i.text for i in uniq_objs]) p = 'alerts/alertitem/alert[text() = $name]' for v in vuls: obj = site.xpath(p, name=v) url_param_list = [] for u in obj: parent_obj = u.getparent() instances = parent_obj.find('instances') if instances is not None: instance_list = instances.findall('instance') for ins in instance_list: url_param_list.append({ 'url': ins.findtext('uri', default=''), 'param': ins.findtext('param', default='') }) vul_name = parent_obj.findtext('alert', default='') desc = parent_obj.findtext('desc', default='') cwe = parent_obj.findtext('cweid', default='') solution = parent_obj.findtext('solution', default='') severity = parent_obj.findtext('riskcode', default=0) confidence = parent_obj.findtext('confidence', default=2) vul_dict = init_es vul_dict['vulnerability'] = { 'name': re.sub('<[^<]+?>', '', vul_name), 'is_false_positive': False, 'is_remediated': False, 'tool': 'ZAP', 'confidence': confidence, 'severity': severity, 'description': re.sub('<[^<]+?>', '', desc), 'vul_type': 'Insecure Coding', 'remediation': re.sub('<[^<]+?>', '', solution), 'created_on': created_on } vul_dict['vulnerability']['evidences'] = url_param_list vul_dict['vulnerability']['cwe'] = { 'cwe_id': cwe, 'cwe_link': 'https://cwe.mitre.org/data/definitions/%s.html' % cwe } write_results(vul_dict) except BaseException as e: log_exception(e) else: print('ZAP XML Parsing Completed')
def parse_appscan_sast(full_path, user_name, init_es): with open(full_path) as fp: tree = html.fromstring(fp.read()) names = tree.findall( 'body/table[@class="report_tree_link"]/tbody/tr/td/a[@class="form_link"]' ) for i, n in enumerate(names, 1): name = str(n.text or 'Unknown').split('-')[-1] desc = tree.findtext( 'body/div[@id="div_{0}"]/table/tbody/tr/td/pre'.format(i), default='') bullets = tree.findall( 'body/div[@id="div_{0}"]/table/tbody/tr/td[@class="f8-0"]'. format(i)) codes = tree.findall( 'body/div[@id="div_{0}"]/table/tbody/tr/td/div/table/tbody/td/div/table/tbody/' .format(i)) evidences = [] cwe = 0 code_snippet = '' for c in codes: file_name = '' line_num = '' code_block = c.find('td[@colspan="8"]') if code_block is not None: snippet = code_block.find( 'div/table/tbody/tr/td[@class="code"]') if snippet is not None: snippet_child_tags = snippet.findall('.//') for s in snippet_child_tags: # print s.text_content() code_snippet += '{0}'.format(s.text_content()) td4 = c.find('td[4]') if td4 is not None: file_name = td4.text or '' td5 = c.find('td[5]') if td5 is not None: line_num = td5.text or '' td6 = c.find('td[6]/a') if td6 is not None: cwe = td6.text or 0 if file_name and line_num: evid_desc = "Line : {0}".format(line_num) evidences.append({ 'url': file_name, 'name': evid_desc, 'log': code_snippet }) # print code_snippet code_snippet = '' for b in bullets: desc += '\n{0}'.format(b.text or '') vul_dict = init_es vul_dict['vulnerability'] = { 'name': name, 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'AppScan - SAST', 'confidence': 2, 'severity': 2, 'description': desc, 'remediation': '', 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'evidences': evidences } write_results(vul_dict)
def parse_owasp_dep_checker(xml_file, user_name, init_es): """ parses the xml file obtained by OWASP dependency checker and pushes the result to results to DB """ try: file = open(xml_file).read() data = BeautifulSoup(file, 'lxml') dependencies = data.findAll('dependencies') severity_dict = { 'Low': 1, 'Medium': 2, 'High': 3, 'Information': 0, 'Info': 0, } for dependency in data.findAll('dependency'): vulnerabilities = dependency.find('vulnerabilities') url_param_list = [] if vulnerabilities: filename = dependency.find('filename').get_text() evid_desc = dependency.find('filepath').get_text() vulnerability = dependency.findAll('vulnerability') for vuln in vulnerability: vul_name = vuln.find('name').get_text() description = vuln.find('description').get_text() severity = severity_dict.get( vuln.find('severity').get_text(), 'Low') url_param_list.append({ 'url': filename, 'name': evid_desc, 'log': '' }) cwe = 0 if vuln.find('cwe'): cwe = vuln.find('cwe').get_text().split(' ')[0].split( 'CWE-')[1] vul_dict = init_es vul_dict['vulnerability'] = { 'name': vul_name, 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'Owasp Dependency Checker', 'confidence': 2, 'severity': severity, 'description': description, 'remediation': '', 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), 'cwe': { 'cwe_id': cwe, 'cwe_link': 'https://cwe.mitre.org/top25/index.html#CWE-{0}'. format(cwe) }, 'evidences': url_param_list } write_results(vul_dict) except BaseException as e: log_exception(e)
def process_alerts(alert): severity = alert.get('RiskCode', 0) desc = alert.get('Desc', '') vul_name = alert.get('Alert', '') solution = alert.get('Solution', '') cwe = alert.get('CWEID', 0) url_param_list = [] items = alert.get('Item', []) if isinstance(items, list): for item in items: request = response = '' response_header = item.get('ResponseHeader', '') response_body = item.get('ResponseBody', '') if response_header or response_body: response = '{0}\n\n{1}'.format( response_header.encode('utf-8'), response_body.encode('utf-8')) request_header = item.get('RequestHeader', '') request_body = item.get('RequestBody', '') if request_header or request_body: request = '{0}\n\n{1}'.format( request_header.encode('utf-8'), request_body.encode('utf-8')) attack = item.get('Attack', '') uri = item.get('URI', '') param = item.get('Param', '') url = uri.split(':')[-1] evid_dict = { 'url': url, 'name': param, 'attack': attack, 'request': b64encode(bytes(request.encode('utf-8'))), 'response': b64encode(bytes(response.encode('utf-8'))), } url_param_list.append(evid_dict) elif isinstance(items, dict): request = response = '' response_header = items.get('ResponseHeader', '') response_body = items.get('ResponseBody', '') if response_header or response_body: response = '{0}\n\n{1}'.format( response_header.encode('utf-8'), response_body.encode('utf-8')) request_header = items.get('RequestHeader', '') request_body = items.get('RequestBody', '') if request_header or request_body: request = '{0}\n\n{1}'.format( request_header.encode('utf-8'), request_body.encode('utf-8')) param = items.get('Attack', '') uri = items.get('URI', '') if not param: param = items.get('Param', '') url = uri.split(':')[-1] evid_dict = { 'url': url, 'name': param, 'request': b64encode(bytes(request.encode('utf-8'))), 'response': b64encode(bytes(response.encode('utf-8'))), } url_param_list.append(evid_dict) vul_dict = init_es vul_dict['vulnerability'] = { 'name': re.sub('<[^<]+?>', '', vul_name), 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'ZAP', 'confidence': 2, 'severity': severity, 'description': re.sub('<[^<]+?>', '', desc), 'vul_type': 'Insecure Coding', 'remediation': re.sub('<[^<]+?>', '', solution), 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S") } vul_dict['vulnerability']['evidences'] = url_param_list vul_dict['vulnerability']['cwe'] = { 'cwe_id': cwe, 'cwe_link': 'https://cwe.mitre.org/data/definitions/%s.html' % cwe } write_results(vul_dict)
def parse_burp_json(json_file,user_name,init_es): """ parses the json file obtained by burp scanner and pushes the result to results to DB """ try: print("Burp json parsing initiated") with open(json_file) as fp: datafile = json.load(fp) vul_dict = {} unique_name_mutiple_evid = [] for root_key in datafile: results = root_key.get('issue',[]) set_names = set(unique_name_mutiple_evid) check_name_exists = results.get('name') in set_names evids = [] cwe_present = cwe_dict.get(str(results.get('type_index')),'8389632') cwe = 0 if cwe_present: cwe = int(cwe_present[0]) if not check_name_exists: unique_name_mutiple_evid.append(results.get('name')) request_data = '' response_data = '' url = '' for vul_name_all_json in datafile: results_name = vul_name_all_json.get('issue',[]) name = results.get('name', '') if name == results_name.get('name', ''): for request_response in results_name.get('evidence', ''): if request_response.get('request_response'): for key, value in request_response.get('request_response').items(): if key == 'url': url = value if key == 'request': for request_info in value: data_request = request_info.get('data') if data_request is not None: decoded_data = str(b64decode(data_request),'utf-8') request_data += decoded_data if key == 'response': for response_info in value: data_response = response_info.get('data') if data_response is not None : decoded_data = str(b64decode(data_response),'utf-8') response_data += decoded_data evids.append({ 'url':url, 'name':results.get('path', ''), 'request':str(b64encode(request_data.encode()), 'utf-8'), 'response':str(b64encode(response_data.encode()), 'utf-8'), }) vul_dict = init_es vul_dict['vulnerability'] = { 'name':results.get('name', ''), 'is_false_positive':False, 'is_remediated':False, 'is_deleted':False, 'tool':'Burp', 'confidence':burp_confidence_dict.get(results.get('confidence'), 3), 'severity':sev_dict.get(results.get('severity'), 3), 'description':results.get('description', ''), 'vul_type':'Insecure Coding', 'remediation':results.get('remediation', ''), 'observations':'', 'created_on':timezone.now().strftime("%Y-%m-%d %H:%M:%S"), } vul_dict['vulnerability']['evidences'] = evids vul_dict['vulnerability']['cwe'] = { 'cwe_id': cwe } write_results(vul_dict) except Exception as e: log_exception(e) else: print('BURP JSON Parsing Completed')
def parse_appscan_dast(xml_file, user_name, init_es): """ parses the xml file obtained by app scan and pushes the result to results to DB """ report = etree.parse(xml_file) root_elem = report.getroot() fix_recommendations = root_elem.find('fix-recommendation-group') url_group = root_elem.find('url-group') issue_group = root_elem.find('issue-group') items = root_elem.findall('issue-type-group/item') for item in items: severity = item.attrib.get('maxIssueSeverity') ref_id = item.attrib.get('id') name = item.findtext('name', default='Unknown') cwe = item.findtext('cwe', default='0') recommendation_text = '' general = root_elem.find( "fix-recommendation-group/item[@id='{0}']/general/fixRecommendation[@type='General']" .format(ref_id)) if general is not None: recommendation_text += '\n'.join( [g.text or '' for g in general.findall('./') if g is not None]) # dotnet = root_elem.find("fix-recommendation-group/item[@id='{0}']/asp-dot-net/fixRecommendation[@type='ASP.NET']".format(ref_id)) # if dotnet is not None: # recommendation_text += '\n'.join([g.text or '' for g in dotnet.findall('./') if g is not None]) # j2ee = root_elem.find("fix-recommendation-group/item[@id='{0}']/j2ee/fixRecommendation[@type='J2EE']".format(ref_id)) # if j2ee is not None: # recommendation_text += '\n'.join([g.text or '' for g in j2ee.findall('./') if g is not None]) # php = root_elem.find("fix-recommendation-group/item[@id='{0}']/php/fixRecommendation[@type='PHP']".format(ref_id)) # if php is not None: # recommendation_text += '\n'.join([g.text or '' for g in php.findall('./') if g is not None]) req_resp = issue_group.xpath( "item/issue-type/ref[text()='{0}']".format(ref_id)) evidences = [] for r in req_resp: url_ref = r.getparent().getparent().findtext('url/ref', default='') url = root_elem.findtext( 'url-group/item[@id="{0}"]/name'.format(url_ref), default='') parameters = r.getparent().getparent().findall( 'variant-group/item/differences/item') for p in parameters: param_name = p.attrib.get('name', '') new_req_resp = r.getparent().getparent().findtext( 'variant-group/item/test-http-traffic', default='') rs = new_req_resp.replace('TRACE', '\n\n$$Orc$$TRACE').replace( 'PATCH', '\n\n$$Orc$$PATCH').replace( 'POST', '\n\n$$Orc$$POST').replace( 'BOGUS', '\n\n$$Orc$$BOGUS').replace( 'GET', '\n\n$$Orc$$GET').replace( 'PUT', '\n\n$$Orc$$PUT').replace( 'HEAD', '\n\n$$Orc$$HEAD').replace( 'OPTIONS', '\n\n$$Orc$$OPTIONS').replace( 'DELETE', '\n\n$$Orc$$DELETE') req_resp_split = rs.split('\n\n$$Orc$$') req = res = '' for a in req_resp_split: d = a.replace('\n\nHTTP/', '##Orc##HTTP/').split('##Orc##') if len(d) == 2: if 'HTTP/' in d[0] and 'HTTP/' in d[1]: req = d[0] res = d[1] evid_dict = { 'url': url, 'name': param_name, 'request': b64encode(req), 'response': b64encode(res), } evidences.append(evid_dict) vul_dict = init_es vul_dict['vulnerability'] = { 'name': name, 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'AppScan - DAST', 'confidence': 2, 'severity': severity, 'description': 'N/A', 'vul_type': 'Insecure Coding', 'remediation': recommendation_text, 'observations': 'N/A', 'created_on': timezone.now().strftime("%Y-%m-%d %H:%M:%S"), } vul_dict['vulnerability']['evidences'] = evidences vul_dict['vulnerability']['cwe'] = {'cwe_id': cwe} write_results(vul_dict)
def parse_burp(xml_file, user_name, init_es): """ Parsing a given xml file using burp. This function returns a dictionary of scan parameters and update the ES. """ try: print('Burp XML parsing initiated') try: nreport = xml.parse(xml_file) except (xml.XMLSyntaxError, xml.ParserError): raise MalFormedXMLException(user_name) root_elem = nreport.getroot() current_date = timezone.now().strftime("%Y-%m-%d %H:%M:%S") scan_date = root_elem.attrib.get('exportTime', current_date) created_on = parser.parse(scan_date).strftime("%Y-%m-%d %H:%M:%S") reg_path = r'issue/name' uniq_objs = root_elem.xpath(reg_path) vuls = set([i.text for i in uniq_objs]) p = '{0}[text() = $name]'.format(reg_path) severity_dict = {'Information': 0, 'Low': 1, 'Medium': 2, 'High': 3} burp_confidence_dict = { "Certain": 3, "Firm": 2, "Tentative": 1, } for v in vuls: obj = root_elem.xpath(p, name=v) url_param_list = [] for u in obj: parent_obj = u.getparent() req = parent_obj.find('requestresponse/request') res = parent_obj.find('requestresponse/response') request = response = b64encode(b'') if req is not None: is_base64_encoded = True if req.get( 'base64') == 'true' else False if is_base64_encoded: request = bytes(req.text.encode('utf-8')) else: request = b64encode(bytes(req.text.encode('utf-8'))) if res is not None: is_base64_encoded = True if res.get( 'base64') == 'true' else False if is_base64_encoded: response = bytes(res.text.encode('utf-8')) else: response = b64encode(bytes(res.text.encode('utf-8'))) url = 'http:/%s' % (parent_obj.findtext('path', default='')) url_param_list.append({ 'url': parent_obj.findtext('location', default=''), 'name': parent_obj.findtext('path', default=''), 'request': request, 'response': response, }) vul_name = parent_obj.findtext('name', default='') severity = parent_obj.findtext('severity', '') issue_type = parent_obj.findtext('type', '8389632') if severity: severity = severity_dict.get(severity) cwe_present = cwe_dict.get(issue_type, []) cwe = 0 if cwe_present: cwe = cwe_present[0] desc = parent_obj.findtext('issueBackground', default='') solution = parent_obj.findtext('remediationBackground', default='') observation = parent_obj.find('issueDetail') confidence = parent_obj.findtext('confidence', default='') if confidence: confidence = burp_confidence_dict.get(confidence) if observation is not None: s = '''You should manually examine the application behavior and attempt to identify any unusual input validation or other obstacles that may be in place.''' obs = observation.text.replace(s, '') else: obs = '' vul_dict = init_es vul_dict['vulnerability'] = { 'name': re.sub('<[^<]+?>', '', vul_name), 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'Burp', 'confidence': confidence, 'severity': severity, 'description': re.sub('<[^<]+?>', '', desc), 'vul_type': 'Insecure Coding', 'remediation': re.sub('<[^<]+?>', '', solution), 'observations': obs, 'created_on': created_on, } vul_dict['vulnerability']['evidences'] = url_param_list vul_dict['vulnerability']['cwe'] = {'cwe_id': cwe} write_results(vul_dict) except BaseException as e: log_exception(e) else: print('Burp XML parsing completed') # End-Of-File
def parser_findsecbug(xml_file, user_name=None, init_es={}): try: print('FindSecBugs XML parsing initiated') vul_dict = {} data = xml.parse(xml_file) data = data.getroot() path = data.xpath(r'//BugInstance[@category="SECURITY"]') all_list = [] # Lower the priority, higher the confidence confidence_dict = {'5': 1, '3': 1, '2': 2, '1': 3} # Higher the rank, lower the severity.[Max rank = 20] severity_dict = {'4': 0, '3': 1, '2': 2, '1': 3, '0': 3} def evidence_return(file_path, start_line, end_line): d = { 'name': 'File: {0} LineNum: {1} - {2}'.format( file_path.split('/')[-1], start_line, end_line), 'url': file_path, 'log': '' } return d for instance in path: evidence_list = [] name = instance.findtext('ShortMessage') cwe = instance.get('cweid', 0) confidence = confidence_dict.get(instance.get('priority')) severity = severity_dict.get( str(int(round(int(instance.get('rank'))) / 5))) description = instance.findtext('LongMessage') remediation = '' for sourceline in instance.findall('SourceLine'): d = evidence_return(file_path=sourceline.get('sourcepath'), start_line=sourceline.get('start'), end_line=sourceline.get('end')) evidence_list.append(d) for sourceline in instance.findall('Class/SourceLine'): d = evidence_return(file_path=sourceline.get('sourcepath'), start_line=sourceline.get('start'), end_line=sourceline.get('end')) evidence_list.append(d) for sourceline in instance.findall('Method/SourceLine'): d = evidence_return(file_path=sourceline.get('sourcepath'), start_line=sourceline.get('start'), end_line=sourceline.get('end')) evidence_list.append(d) if name in vul_dict: vul_dict[name]['evidence_list'] + evidence_list else: vul_dict[name] = { 'cwe': cwe, 'description': description, 'evidence_list': evidence_list, 'confidence': 2, 'severity': 2, 'remediation': remediation } vul_dict[name]['confidence'] = confidence vul_dict[name]['severity'] = severity des_path = data.xpath(r'//BugPattern[@category="SECURITY"]') for instance in des_path: name = instance.findtext('ShortDescription') if name in vul_dict: vul_dict[name]['remediation'] = instance.findtext('Details') else: print('Vul not found') result_dict = init_es for vul in vul_dict.keys(): result_dict['vulnerability'] = { 'name': vul, 'is_false_positive': False, 'is_remediated': False, 'is_deleted': False, 'tool': 'FindSecBugs', 'confidence': vul_dict.get(vul).get('confidence'), 'severity': vul_dict.get(vul).get('severity'), 'description': vul_dict.get(vul).get('description', ''), 'remediation': vul_dict.get(vul).get('remediation', ''), 'created_on': datetime.now().strftime("%Y-%m-%d %H:%M:%S"), 'cwe': { 'cwe_id': vul_dict.get(vul).get('cwe', 0), 'cwe_link': 'https://cwe.mitre.org/top25/index.html#CWE-{0}'.format( vul_dict.get(vul).get('cwe', 0)) }, 'evidences': vul_dict.get(vul).get('evidence_list', []) } write_results(result_dict) except BaseException as e: log_exception(e) else: print('FindSecBugs XML parsing complete')
def parse_owasp_dep_checker(xml_file, user_name, init_es): """ parses the xml file obtained by OWASP dependency checker and pushes the result to results to DB """ try: nreport = xml.parse(xml_file) root_elem = nreport.getroot() NS = "{%s}" % root_elem.nsmap.get(None) projectInfo = root_elem.find('%sprojectInfo' % NS) if projectInfo is not None: scan_date = projectInfo.findtext('%sreportDate' % NS, default='') created_on = parser.parse(scan_date).strftime("%Y-%m-%d %H:%M:%S") dependencies = root_elem.findall('%sdependencies/%sdependency' % (NS, NS)) severity_dict = { 'Low': 1, 'Medium': 2, 'High': 3, 'Information': 0, 'Info': 0, } names_list = [] vul_dict = {} for dependency in dependencies: url_param_list = [] vulnerabilities_parent = dependency.find('%svulnerabilities' % NS) related_dependencies_parent = dependency.find( '%srelatedDependencies' % NS) if vulnerabilities_parent is not None: evidence = dependency.findtext( '%sevidence[@type="version"]/%sname' % (NS, NS)) module = dependency.findtext('%sfileName' % NS, default='') version = dependency.findtext( '%sevidenceCollected/%sevidence[@type="version"][@confidence="HIGHEST"]/%svalue' % (NS, NS, NS), default='') dep_dict = {'file_paths': []} if related_dependencies_parent is not None: related_dependencies = related_dependencies_parent.findall( '%srelatedDependency' % NS) for dep in related_dependencies: filePath = dep.findtext('%sfilePath' % NS, default='') dep_dict['file_paths'].append(filePath) vulnerabilities = vulnerabilities_parent.findall( '%svulnerability' % NS) for vuln in vulnerabilities: vul_name = vuln.findtext('%sname' % NS, default='') if vul_name not in vul_dict: vul_dict[vul_name] = {'evidences': []} severity = severity_dict.get( vuln.findtext('%sseverity' % NS, default=''), 'Low') description = vuln.findtext('%sdescription' % NS, default='') cwe = vuln.findtext('%scwe' % NS, default='') if cwe: match = re.search(r'(CWE-)(\d{1,9})(.*?)', cwe) if match: cwe = match.group(2) cve = '' match = re.search(r'(CVE)-(\d+)-(\d+)', vul_name) if match: cve = match.group() url_param_list.append({ 'url': module, 'name': version, 'log': '' }) data_dict = { 'name': vul_name, 'is_false_positive': False, 'is_remediated': False, 'tool': 'OWASP Dependency Checker', 'confidence': 2, 'severity': severity, 'description': description, 'remediation': '', 'created_on': created_on, 'cwe': { 'cwe_id': cwe, 'cwe_link': 'https://cwe.mitre.org/data/definitions/{0}.html'. format(cwe) } } vul_dict[vul_name]['evidences'] = url_param_list vul_dict[vul_name].update(data_dict) for v in vul_dict.values(): vul_dict_final = init_es vul_dict_final['vulnerability'] = v write_results(vul_dict_final) except BaseException as e: log_exception(e) else: print('[ + ] OWASP Dependency Checker parsing completed')