def get_findings_json(self, filename, test, active=None, verified=None): data = json.load(filename) findings = list() for item in data['findings']: # remove endpoints of the dictionnary unsaved_endpoints = None if "endpoints" in item: unsaved_endpoints = item["endpoints"] del item["endpoints"] finding = Finding(**item) # manage active/verified overrride if active is not None: finding.active = active if verified is not None: finding.verified = verified # manage endpoints if unsaved_endpoints: finding.unsaved_endpoints = [] for item in unsaved_endpoints: if type(item) is str: if '://' in item: # is the host full uri? endpoint = Endpoint.from_uri(item) # can raise exception if the host is not valid URL else: endpoint = Endpoint.from_uri('//' + item) # can raise exception if there is no way to parse the host else: endpoint = Endpoint(**item) finding.unsaved_endpoints.append(endpoint) findings.append(finding) return findings
def test_url_normalize(self): endpoint1 = Endpoint.from_uri('HTTP://FOO.BAR/') endpoint2 = Endpoint.from_uri('HtTp://foo.BAR/') self.assertEqual(endpoint1.protocol, 'HTTP') self.assertEqual(endpoint1.host, 'foo.bar') self.assertEqual(str(endpoint1), 'http://foo.bar') self.assertEqual(endpoint1, endpoint2)
def test_ports(self): # known port endpoint = Endpoint.from_uri('http://foo.bar/') self.assertEqual(endpoint.port, 80) # unknown port endpoint = Endpoint.from_uri('this-scheme-is-unknown://foo.bar/') self.assertIsNone(endpoint.port)
def validate_endpoints_to_add(endpoints_to_add): errors = [] endpoint_list = [] endpoints = endpoints_to_add.split() for endpoint in endpoints: try: if '://' in endpoint: # is it full uri? endpoint_ins = Endpoint.from_uri( endpoint ) # from_uri validate URI format + split to components else: # from_uri parse any '//localhost', '//127.0.0.1:80', '//foo.bar/path' correctly # format doesn't follow RFC 3986 but users use it endpoint_ins = Endpoint.from_uri('//' + endpoint) endpoint_ins.clean() endpoint_list.append([ endpoint_ins.protocol, endpoint_ins.userinfo, endpoint_ins.host, endpoint_ins.port, endpoint_ins.path, endpoint_ins.query, endpoint_ins.fragment ]) except ValidationError as ves: for ve in ves: errors.append( ValidationError("Invalid endpoint {}: {}".format( endpoint, ve))) return endpoint_list, errors
def test_spacial_char(self): endpoint = Endpoint.from_uri('http://foo.bar/beforeSpace%20afterSpace') self.assertEqual(endpoint.path, 'beforeSpace afterSpace') self.assertEqual(str(endpoint), 'http://foo.bar/beforeSpace%20afterSpace') endpoint = Endpoint.from_uri('//' + 'foo.bar/beforeSpace%20afterSpace') self.assertEqual(endpoint.path, 'beforeSpace afterSpace') self.assertEqual(str(endpoint), 'foo.bar/beforeSpace%20afterSpace')
def test_invalid(self): self.assertRaises(ValidationError, Endpoint.from_uri, 'http://127.0.0.1:portNo/') endpoint = Endpoint.from_uri('http://127.0.0.1:-1/') self.assertRaises(ValidationError, endpoint.clean) endpoint = Endpoint.from_uri('http://127.0.0.1:66666/') self.assertRaises(ValidationError, endpoint.clean) endpoint = Endpoint(host='127.0.0.1', port=-1) self.assertRaises(ValidationError, endpoint.clean) endpoint = Endpoint(host='127.0.0.1', port=66666) self.assertRaises(ValidationError, endpoint.clean)
def get_findings(self, filename, test): data = [json.loads(line) for line in filename] if len(data) == 0: return [] else: dupes = {} for item in data: template_id = item.get('templateID') if template_id is None: template_id = item.get('template-id') info = item.get('info') name = info.get('name') severity = info.get('severity').title() type = item.get('type') matched = item.get('matched') if matched is None: matched = item.get('matched-at') if '://' in matched: endpoint = Endpoint.from_uri(matched) else: endpoint = Endpoint.from_uri('//' + matched) finding = Finding( title=f"{name}", test=test, severity=severity, nb_occurences=1, vuln_id_from_tool=template_id, ) if info.get('description'): finding.description = info.get('description') if info.get('tags'): finding.unsaved_tags = info.get('tags') if info.get('reference'): finding.references = info.get('reference') finding.unsaved_endpoints.append(endpoint) dupe_key = hashlib.sha256( (template_id + type).encode('utf-8') ).hexdigest() if dupe_key in dupes: finding = dupes[dupe_key] if endpoint not in finding.unsaved_endpoints: finding.unsaved_endpoints.append(endpoint) finding.nb_occurences += 1 else: dupes[dupe_key] = finding return list(dupes.values())
def test_less_standard_hosts(self): endpoint = Endpoint.from_uri('http://123_server/') endpoint.clean() endpoint = Endpoint(host='456_desktop') endpoint.clean() endpoint = Endpoint(host='_invalid._host.com') endpoint.clean()
def attach_extras(endpoints, requests, responses, finding, date, qid, test): if finding is None: finding = Finding() finding.unsaved_req_resp = list() finding.unsaved_endpoints = list() finding.test = test if date is not None: finding.date = date finding.vuln_id_from_tool = str(qid) else: # Finding already exists if date is not None and finding.date > date: finding.date = date for endpoint in endpoints: finding.unsaved_endpoints.append(Endpoint.from_uri(endpoint)) for i in range(0, len(requests)): if requests[i] != '' or responses[i] != '': finding.unsaved_req_resp.append({ "req": requests[i], "resp": responses[i] }) return finding
def make_finding(self, test, vulnerability): finding = Finding(test=test) finding.title = vulnerability["name"] finding.date = vulnerability["date_opened"][:10] if vulnerability["cwes"]: finding.cwe = int(vulnerability["cwes"][0][4:]) if vulnerability["cves"]: finding.cve = vulnerability["cves"][0] if vulnerability["cvss_version"] == 3: if vulnerability["cvss_vector"]: finding.cvssv3 = vulnerability["cvss_vector"] finding.url = vulnerability["location"] finding.severity = ES_SEVERITIES[vulnerability["severity"]] finding.description = vulnerability["description"] finding.mitigation = vulnerability["remediation"] finding.active = True if vulnerability["status"] == "open" else False if vulnerability["asset_tags"]: finding.tags = vulnerability["asset_tags"].split(",") finding.unique_id_from_tool = vulnerability["id"] finding.unsaved_endpoints = [ Endpoint.from_uri(vulnerability["location"]) ] return finding
def convert_endpoints(self, affected_targets): """Convert Cobalt affected_targets into DefectDojo endpoints""" endpoints = [] for affected_target in affected_targets: endpoint = Endpoint.from_uri(affected_target) endpoints.append(endpoint) return endpoints
def get_findings(self, filename, test): tree = filename.read() try: data = json.loads(str(tree, 'utf-8-sig')) except: data = json.loads(tree) dupes = dict() for item in data["Vulnerabilities"]: categories = '' language = '' mitigation = '' impact = '' references = '' findingdetail = '' title = '' group = '' status = '' request = '' response = '' title = item["Name"] findingdetail = cleantags(item["Description"]) cwe = int(item["Classification"]["Cwe"]) if "Cwe" in item["Classification"] else None sev = item["Severity"] if sev not in ['Info', 'Low', 'Medium', 'High', 'Critical']: sev = 'Info' mitigation = cleantags(item["RemedialProcedure"]) references = cleantags(item["RemedyReferences"]) url = item["Url"] impact = cleantags(item["Impact"]) dupe_key = title request = item["HttpRequest"]["Content"] response = item["HttpResponse"]["Content"] finding = Finding(title=title, test=test, description=findingdetail, severity=sev.title(), mitigation=mitigation, impact=impact, references=references, cwe=cwe, static_finding=True) if (item["Classification"] is not None) and (item["Classification"]["Cvss"] is not None) and (item["Classification"]["Cvss"]["Vector"] is not None): finding.cvssv3 = item["Classification"]["Cvss"]["Vector"] finding.unsaved_req_resp = [{"req": request, "resp": response}] finding.unsaved_endpoints = [Endpoint.from_uri(url)] if dupe_key in dupes: find = dupes[dupe_key] find.unsaved_req_resp.extend(finding.unsaved_req_resp) find.unsaved_endpoints.extend(finding.unsaved_endpoints) else: dupes[dupe_key] = finding return list(dupes.values())
def test_truncates_large_attributes(self): path = "foo" * 1000 query = "bar" * 1000 fragment = "baz" * 1000 endpoint = Endpoint.from_uri('http://[email protected]:8080/{}?{}#{}'.format(path, query, fragment)) self.assertEqual(len(endpoint.path), 500) self.assertEqual(len(endpoint.query), 1000) self.assertEqual(len(endpoint.fragment), 500)
def test_url_full(self): endpoint = Endpoint.from_uri('http://[email protected]:8080/path1/path2?key1=value&no_value_key#fragment1') self.assertEqual(endpoint.protocol, 'http') self.assertEqual(endpoint.userinfo, 'alice') self.assertEqual(endpoint.host, 'foo.bar') self.assertEqual(endpoint.port, 8080) self.assertEqual(endpoint.path, 'path1/path2') # path begins with '/' but Endpoint store "root-less" path self.assertEqual(endpoint.query, 'key1=value&no_value_key') self.assertEqual(endpoint.fragment, 'fragment1')
def __xml_dynamic_flaw_to_finding(cls, app_id, xml_node, mitigation_text, test): finding = cls.__xml_flaw_to_finding(app_id, xml_node, mitigation_text, test) finding.static_finding = False finding.dynamic_finding = True url_host = xml_node.attrib.get('url') finding.unsaved_endpoints = [Endpoint.from_uri(url_host)] return finding
def test_noscheme(self): endpoint = Endpoint.from_uri('//' + 'localhost:22') self.assertIsNone(endpoint.protocol) self.assertIsNone(endpoint.userinfo) self.assertEqual(endpoint.host, 'localhost') self.assertEqual(endpoint.port, 22) self.assertIsNone(endpoint.path) self.assertIsNone(endpoint.query) self.assertIsNone(endpoint.fragment) self.assertIsNone(endpoint.product)
def process_scandetail(self, scan, test, dupes): for item in scan.findall('item'): # Title titleText = None description = item.findtext("description") # Cut the title down to the first sentence sentences = re.split(r'(?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|\?)\s', description) if len(sentences) > 0: titleText = sentences[0][:900] else: titleText = description[:900] # Description description = "\n".join([ f"**Host:** `{item.findtext('iplink')}`", f"**Description:** `{item.findtext('description')}`", f"**HTTP Method:** `{item.attrib.get('method')}`", ]) # Manage severity the same way with JSON severity = "Info" # Nikto doesn't assign severity, default to Info if item.get('osvdbid') is not None and "0" != item.get('osvdbid'): severity = "Medium" finding = Finding( title=titleText, test=test, description=description, severity=severity, dynamic_finding=True, static_finding=False, vuln_id_from_tool=item.attrib.get('id'), nb_occurences=1, ) # endpoint try: ip = item.findtext("iplink") endpoint = Endpoint.from_uri(ip) finding.unsaved_endpoints = [endpoint] except ValidationError: logger.warn("Invalid iplink in the report") dupe_key = hashlib.sha256(description.encode("utf-8")).hexdigest() if dupe_key in dupes: find = dupes[dupe_key] find.description += "\n-----\n" + finding.description find.unsaved_endpoints.extend(finding.unsaved_endpoints) find.nb_occurences += 1 else: dupes[dupe_key] = finding
def get_findings(self, filename, test): data = json.load(filename) dupes = dict() scan_date = datetime.datetime.strptime(data["Generated"], "%d/%m/%Y %H:%M %p").date() for item in data["Vulnerabilities"]: title = item["Name"] findingdetail = html2text.html2text(item.get("Description", "")) cwe = int(item["Classification"] ["Cwe"]) if "Cwe" in item["Classification"] else None sev = item["Severity"] if sev not in ['Info', 'Low', 'Medium', 'High', 'Critical']: sev = 'Info' mitigation = html2text.html2text(item.get("RemedialProcedure", "")) references = html2text.html2text(item.get("RemedyReferences", "")) url = item["Url"] impact = html2text.html2text(item.get("Impact", "")) dupe_key = title request = item["HttpRequest"]["Content"] if request is None or len(request) <= 0: request = "Request Not Found" response = item["HttpResponse"]["Content"] if response is None or len(response) <= 0: response = "Response Not Found" finding = Finding(title=title, test=test, description=findingdetail, severity=sev.title(), mitigation=mitigation, impact=impact, date=scan_date, references=references, cwe=cwe, static_finding=True) if (item["Classification"] is not None) and ( item["Classification"]["Cvss"] is not None) and (item["Classification"]["Cvss"]["Vector"] is not None): finding.cvssv3 = item["Classification"]["Cvss"]["Vector"] finding.unsaved_req_resp = [{"req": request, "resp": response}] finding.unsaved_endpoints = [Endpoint.from_uri(url)] if dupe_key in dupes: find = dupes[dupe_key] find.unsaved_req_resp.extend(finding.unsaved_req_resp) find.unsaved_endpoints.extend(finding.unsaved_endpoints) else: dupes[dupe_key] = finding return list(dupes.values())
def combine_findings(self, finding, issue): description = html2text.html2text(issue.get('description_html')) if description: if not finding['Description'].count(description) > 0: finding['Description'] += description + "\n\n" if issue.get('evidence'): finding['Evidence'] = finding['Evidence'] + self.parse_evidence( issue.get('evidence')) finding['Endpoints'].append( Endpoint.from_uri(issue['origin'] + issue['path']))
def create_findings(self, items, test): # Dictonary to hold the aggregated findings with: # - key: the concatenated aggregate keys # - value: the finding dupes = dict() for details in items: if details.get('Description') == '': continue aggregateKeys = "{}{}{}{}".format(details.get('Title'), details.get('Description'), details.get('CWE'), details.get('Endpoint')) find = Finding(title=details.get('Title'), description=details.get('Description'), test=test, severity=details.get('Severity'), mitigation=details.get('Mitigation'), references=details.get('References'), impact=details.get('Impact'), cwe=int(details.get('CWE')), false_p=False, duplicate=False, out_of_scope=False, mitigated=None, static_finding=False, dynamic_finding=True, nb_occurences=1) if len(details.get('Request')) > 0: requests = details.get('Request').split('SPLITTER')[:-1] responses = details.get('Response').split('SPLITTER')[:-1] unsaved_req_resp = list() for index in range(0, len(requests)): unsaved_req_resp.append({ "req": requests[index], "resp": responses[index] }) find.unsaved_req_resp = unsaved_req_resp find.unsaved_endpoints = list() dupes[aggregateKeys] = find for url in details.get('Endpoint'): find.unsaved_endpoints.append(Endpoint.from_uri(url)) return list(dupes.values())
def _convert_attack_vectors_to_endpoints( self, attack_vectors: List[dict]) -> List['Endpoint']: """ Takes a list of Attack Vectors dictionaries from the WhiteHat vuln API and converts them to Defect Dojo Endpoints Args: attack_vectors: The list of Attack Vector dictionaries Returns: A list of Defect Dojo Endpoints """ endpoints_list = [] # This should be in the Endpoint class should it not? for attack_vector in attack_vectors: endpoints_list.append( Endpoint.from_uri(attack_vector['request']['url'])) return endpoints_list
def get_findings(self, file, test): tree = ET.parse(file) items = list() for node in tree.findall("site"): for item in node.findall("alerts/alertitem"): finding = Finding( test=test, title=item.findtext("alert"), description=html2text(item.findtext("desc")), severity=self.MAPPING_SEVERITY.get( item.findtext("riskcode")), scanner_confidence=self.MAPPING_CONFIDENCE.get( item.findtext("riskcode")), mitigation=html2text(item.findtext("solution")), references=html2text(item.findtext("reference")), dynamic_finding=True, static_finding=False, vuln_id_from_tool=item.findtext("pluginid"), ) if item.findtext("cweid") is not None and item.findtext( "cweid").isdigit(): finding.cwe = int(item.findtext("cweid")) finding.unsaved_endpoints = [] finding.unsaved_req_resp = [] for instance in item.findall("instances/instance"): endpoint = Endpoint.from_uri(instance.findtext("uri")) request = f"{instance.findtext('method')} {endpoint.query}#{endpoint.fragment}" # we remove query and fragment because with some configuration # the tool generate them on-the-go and it produces a lot of fake endpoints endpoint.query = None endpoint.fragment = None finding.unsaved_endpoints.append(endpoint) finding.unsaved_req_resp.append({ "req": request, "resp": f"{instance.findtext('evidence')}" }) items.append(finding) return items
def __extract_finding(self, raw_finding, metadata: StackHawkScanMetadata, test) -> Finding: steps_to_reproduce = "Use a specific message link and click 'Validate' to see the cURL!\n\n" host = raw_finding['host'] endpoints = [] paths = raw_finding['paths'] for path in paths: steps_to_reproduce += '**' + path['path'] + '**' +\ self.__endpoint_status(path['status']) +\ '\n' + self.__hyperlink(path['pathURL']) + '\n' endpoint = Endpoint.from_uri(host + path['path']) endpoints.append(endpoint) are_all_endpoints_risk_accepted = self.__are_all_endpoints_in_status(paths, 'RISK_ACCEPTED') are_all_endpoints_false_positive = self.__are_all_endpoints_in_status(paths, 'FALSE_POSITIVE') finding = Finding( test=test, title=raw_finding['pluginName'], date=parse_datetime(metadata.date), severity=raw_finding['severity'], description="View this finding in the StackHawk platform at:\n" + self.__hyperlink(raw_finding['findingURL']), steps_to_reproduce=steps_to_reproduce, component_name=metadata.component_name, component_version=metadata.component_version, static_finding=metadata.static_finding, dynamic_finding=metadata.dynamic_finding, vuln_id_from_tool=raw_finding['pluginId'], nb_occurences=raw_finding['totalCount'], service=metadata.service, false_p=are_all_endpoints_false_positive, risk_accepted=are_all_endpoints_risk_accepted ) finding.unsaved_endpoints.extend(endpoints) return finding
def get_items(self, tree, test): """ @return items A list of Host instances """ items = list() for node in tree.findall('site'): site = Site(node) main_host = Endpoint(host=site.host, port=site.port) for item in site.items: severity = item.riskdesc.split(' ', 1)[0] references = '' for ref in item.ref: references += ref + "\n" find = Finding( title=item.name, cwe=item.cwe, description=strip_tags(item.desc), test=test, severity=severity, mitigation=strip_tags(item.resolution), references=references, false_p=False, duplicate=False, out_of_scope=False, mitigated=None, impact="No impact provided", ) find.unsaved_endpoints = [main_host] for i in item.items: endpoint = Endpoint.from_uri(i['uri']) find.unsaved_endpoints.append(endpoint) items.append(find) return items
def get_findings(self, file, test): tree = parse(file) # get root of tree. root = tree.getroot() namespace = self.get_namespace(root) # check if xml file hash correct root or not. if 'Benchmark' not in root.tag: raise ValueError( "This doesn't seem to be a valid Openscap vulnerability scan xml file." ) if 'http://checklists.nist.gov/xccdf/' not in namespace: raise ValueError( "This doesn't seem to be a valid Openscap vulnerability scan xml file." ) # read rules rules = {} for rule in root.findall('.//{0}Rule'.format(namespace)): rules[rule.attrib['id']] = { "title": rule.findtext('./{0}title'.format(namespace)) } # go to test result test_result = tree.find('./{0}TestResult'.format(namespace)) ips = [] # append all target in a list. for ip in test_result.findall('./{0}target'.format(namespace)): ips.append(ip.text) for ip in test_result.findall('./{0}target-address'.format(namespace)): ips.append(ip.text) dupes = dict() # run both rule, and rule-result in parallel so that we can get title for failed test from rule. for rule_result in test_result.findall( './{0}rule-result'.format(namespace)): result = rule_result.findtext('./{0}result'.format(namespace)) # find only failed report. if "fail" in result: # get rule corresponding to rule-result rule = rules[rule_result.attrib['idref']] title = rule['title'] description = "\n".join([ "**IdRef:** `" + rule_result.attrib['idref'] + "`", "**Title:** `" + title + "`", ]) cves = [] for cve in rule_result.findall( "./{0}ident[@system='http://cve.mitre.org']".format( namespace)): cves.append(cve.text) # if finding has only one cve then ok. otherwise insert it in description field. if len(cves) > 1: cve_desc = "" for cve in cves: cve_desc += '[{0}](https://cve.mitre.org/cgi-bin/cvename.cgi?name={0})'.format( cve) + ", " description += "**Related CVE's:** " + cve_desc[:-2] # get severity. severity = rule_result.attrib.get( 'severity', 'medium').lower().capitalize() # according to the spec 'unknown' is a possible value if severity == 'Unknown': severity = 'Info' references = "" # get references. for check_content in rule_result.findall( './{0}check/{0}check-content-ref'.format(namespace)): references += "**name:** : " + check_content.attrib[ 'name'] + "\n" references += "**href** : " + check_content.attrib[ 'href'] + "\n" finding = Finding( title=title, description=description, severity=severity, references=references, dynamic_finding=True, static_finding=False, unique_id_from_tool=rule_result.attrib['idref'], ) if len(cves) == 1: finding.cve = cves[0] finding.unsaved_endpoints = [] for ip in ips: try: validate_ipv46_address(ip) endpoint = Endpoint(host=ip) except ValidationError: if '://' in ip: endpoint = Endpoint.from_uri(ip) else: endpoint = Endpoint.from_uri('//' + ip) finding.unsaved_endpoints.append(endpoint) dupe_key = hashlib.sha256( references.encode('utf-8')).hexdigest() if dupe_key in dupes: find = dupes[dupe_key] if finding.references: find.references = finding.references find.unsaved_endpoints.extend(finding.unsaved_endpoints) else: dupes[dupe_key] = finding return list(dupes.values())
def test_ip(self): endpoint = Endpoint.from_uri('http://127.0.0.1/') self.assertEqual(endpoint.host, '127.0.0.1') endpoint = Endpoint(host='127.0.0.1') self.assertEqual(endpoint.host, '127.0.0.1')
def test_paths(self): endpoint = Endpoint.from_uri('https://foo.bar') self.assertIsNone(endpoint.path) endpoint = Endpoint.from_uri('https://foo.bar/') self.assertIsNone(endpoint.path)
def get_item(self, item_node, report_date): # url management if 'vector' in item_node and 'action' in item_node['vector']: url = item_node['vector']['action'] else: url = item_node['response']['url'] request = item_node['request'] # req = '' # for key, value in request.items(): req += str(key) + ": " + str(value) + "\n\n" # respz = item_node['response'] resp = '' for key, value in respz.items(): if key != 'body': resp += str(key) + ": " + str(value) + "\n\n" resp += "\n\n\n" + force_str(respz['body']) unsaved_req_resp = list() if request is not None and respz is not None: unsaved_req_resp.append({"req": req, "resp": resp}) endpoint = Endpoint.from_uri(url) description = item_node.get('description', 'N/A') description = html2text.html2text(description) remediation = item_node['remedy_guidance'] if 'remedy_guidance' in item_node else 'n/a' if remediation: remediation = html2text.html2text(remediation) references = list(item_node['references'].values()) if 'references' in item_node else None references = '<br/><br/>'.join(reference for reference in references) if references: references = html2text.html2text(references) severity = item_node.get('severity', 'Info').capitalize() if 'Informational' == severity: severity = 'Info' # Finding and Endpoint objects returned have not been saved to the database finding = Finding(title=item_node['name'], date=report_date, severity=severity, description=description, mitigation=remediation, references=references, impact="No impact provided", cwe=item_node.get('cwe'), vuln_id_from_tool=item_node.get('digest'), ) finding.unsaved_endpoints = [endpoint] finding.unsaved_req_resp = unsaved_req_resp finding.unsaved_tags = item_node.get('tags') return finding
def get_item(vuln, test): if vuln["category"] != "dast": return None # scanner_confidence scanner_confidence = get_confidence_numeric(vuln["confidence"]) # description description = f"Scanner: {vuln['scanner']['name']}\n" if "message" in vuln: description += f"{vuln['message']}\n" elif "description" in vuln: description += f"{vuln['description']}\n" finding = Finding( test=test, # Test nb_occurences=1, # int scanner_confidence=scanner_confidence, # int description=description, # str static_finding=False, dynamic_finding=True, ) # date if "discovered_at" in vuln: finding.date = datetime.strptime(vuln["discovered_at"], "%Y-%m-%dT%H:%M:%S.%f") # id if "id" in vuln: finding.unique_id_from_tool = vuln["id"] # title if "name" in vuln: finding.title = vuln["name"] # fallback to using id as a title else: finding.title = finding.unique_id_from_tool # cwe for identifier in vuln["identifiers"]: if "cwe" == identifier["type"].lower(): finding.cwe = int(identifier["value"]) break # references if vuln["links"]: ref = "" for link in vuln["links"]: ref += f"{link['url']}\n" ref = ref[:-1] finding.references = ref # severity if "severity" in vuln: finding.severity = vuln["severity"] # endpoint location = vuln.get("location", {}) if "hostname" in location and "path" in location: url_str = f"{location['hostname']}{location['path']}" finding.unsaved_endpoints = [Endpoint.from_uri(url_str)] # mitigation if "solution" in vuln: finding.mitigation = vuln["solution"] return finding
def get_item(vuln, test): finding = Finding( test=test, unique_id_from_tool=vuln["id"], nb_occurences=1, ) # Defining variables location = vuln["location"] # Endpoint # using url if "url" in location and location["url"] and location["url"] != "None": endpoint = Endpoint.from_uri(location["url"]) # fallback to using old way of creating endpoints elif "domain" in location and location["domain"] and location["domain"] != "None": endpoint = Endpoint(host=str(location["domain"])) else: # no domain, use ip instead if "ip" in location and location["ip"] and location["ip"] != "None": endpoint = Endpoint(host=str(location["ip"])) # check for protocol if ( "applicationProtocol" in location and location["applicationProtocol"] and location["applicationProtocol"] != "None" ): endpoint.protocol = location["applicationProtocol"] # check for port if ( "port" in location and location["port"] in location and location["port"] != "None" ): endpoint.port = location["port"] finding.unsaved_endpoints = [endpoint] # assigning endpoint # Title finding.title = vuln["name"] # Description + CVEs description = vuln["classification"] cves = "no match" if "CVE-NO-MATCH" not in vuln["kb"]["cves"]: finding.cve = vuln["kb"]["cves"][0] cves = "" for cve in vuln["kb"]["cves"]: cves += f"{cve}, " cves = cves[: len(cves) - 2] # removing the comma and the blank space finding.description = description + "; CVEs: " + cves finding.severity = vuln["severity"].title() # Date date_str = vuln["createdOn"] date_str = date_str[: len(date_str) - 3] + date_str[-2:] finding.date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f%z") # Component Name and Version if ( "applicationCpe" in location and location["applicationCpe"] and location["applicationCpe"] != "None" ): cpe = CPE(location["applicationCpe"]) component_name = cpe.get_vendor()[0] + ":" if len( cpe.get_vendor()) > 0 else "" component_name += cpe.get_product()[0] if len( cpe.get_product()) > 0 else "" finding.component_name = component_name if component_name else None finding.component_version = ( cpe.get_version()[0] if len(cpe.get_version()) > 0 else None ) return finding