def get_findings_json(self, filename, test, active=None, verified=None): data = json.load(filename) findings = list() for item in data['findings']: # remove endpoints of the dictionnary unsaved_endpoints = None if "endpoints" in item: unsaved_endpoints = item["endpoints"] del item["endpoints"] finding = Finding(**item) # manage active/verified overrride if active is not None: finding.active = active if verified is not None: finding.verified = verified # manage endpoints if unsaved_endpoints: finding.unsaved_endpoints = [] for item in unsaved_endpoints: if type(item) is str: if '://' in item: # is the host full uri? endpoint = Endpoint.from_uri(item) # can raise exception if the host is not valid URL else: endpoint = Endpoint.from_uri('//' + item) # can raise exception if there is no way to parse the host else: endpoint = Endpoint(**item) finding.unsaved_endpoints.append(endpoint) findings.append(finding) return findings
class TestViewEndpointMetaData(TestCase): def setUp(self): self.p = Product() self.p.Name = 'Test Product' self.p.Description = 'Product for Testing Endpoint functionality' self.p.save() self.e = Endpoint() self.e.product = self.p self.e.host = '127.0.0.1' self.e.save() self.util = EndpointMetaDataTestUtil() self.util.save_custom_field(self.e, 'TestField', 'TestValue') def test_view_endpoint_without_metadata_has_no_additional_info(self): self.util.delete_custom_field(self.e, 'TestField') get_request = self.util.create_get_request(self.util.create_user(True), 'endpoint/1') v = views.view_endpoint(get_request, 1) self.assertNotContains(v, 'Additional Information') def test_view_endpoint_with_metadata_has_additional_info(self): get_request = self.util.create_get_request(self.util.create_user(True), 'endpoint/1') v = views.view_endpoint(get_request, 1) self.assertContains(v, "Additional Information") self.assertContains(v, 'TestField') self.assertContains(v, 'TestValue')
def test_url_normalize(self): endpoint1 = Endpoint.from_uri('HTTP://FOO.BAR/') endpoint2 = Endpoint.from_uri('HtTp://foo.BAR/') self.assertEqual(endpoint1.protocol, 'HTTP') self.assertEqual(endpoint1.host, 'foo.bar') self.assertEqual(str(endpoint1), 'http://foo.bar') self.assertEqual(endpoint1, endpoint2)
def test_spacial_char(self): endpoint = Endpoint.from_uri('http://foo.bar/beforeSpace%20afterSpace') self.assertEqual(endpoint.path, 'beforeSpace afterSpace') self.assertEqual(str(endpoint), 'http://foo.bar/beforeSpace%20afterSpace') endpoint = Endpoint.from_uri('//' + 'foo.bar/beforeSpace%20afterSpace') self.assertEqual(endpoint.path, 'beforeSpace afterSpace') self.assertEqual(str(endpoint), 'foo.bar/beforeSpace%20afterSpace')
def test_ports(self): # known port endpoint = Endpoint.from_uri('http://foo.bar/') self.assertEqual(endpoint.port, 80) # unknown port endpoint = Endpoint.from_uri('this-scheme-is-unknown://foo.bar/') self.assertIsNone(endpoint.port)
def validate_endpoints_to_add(endpoints_to_add): errors = [] endpoint_list = [] endpoints = endpoints_to_add.split() for endpoint in endpoints: try: if '://' in endpoint: # is it full uri? endpoint_ins = Endpoint.from_uri( endpoint ) # from_uri validate URI format + split to components else: # from_uri parse any '//localhost', '//127.0.0.1:80', '//foo.bar/path' correctly # format doesn't follow RFC 3986 but users use it endpoint_ins = Endpoint.from_uri('//' + endpoint) endpoint_ins.clean() endpoint_list.append([ endpoint_ins.protocol, endpoint_ins.userinfo, endpoint_ins.host, endpoint_ins.port, endpoint_ins.path, endpoint_ins.query, endpoint_ins.fragment ]) except ValidationError as ves: for ve in ves: errors.append( ValidationError("Invalid endpoint {}: {}".format( endpoint, ve))) return endpoint_list, errors
def setUp(self): p = Product() p.Name = 'Test Product' p.Description = 'Product for Testing Endpoint functionality' p.save() e = Endpoint() e.product = p e.host = '127.0.0.1' e.save()
def test_invalid(self): self.assertRaises(ValidationError, Endpoint.from_uri, 'http://127.0.0.1:portNo/') endpoint = Endpoint.from_uri('http://127.0.0.1:-1/') self.assertRaises(ValidationError, endpoint.clean) endpoint = Endpoint.from_uri('http://127.0.0.1:66666/') self.assertRaises(ValidationError, endpoint.clean) endpoint = Endpoint(host='127.0.0.1', port=-1) self.assertRaises(ValidationError, endpoint.clean) endpoint = Endpoint(host='127.0.0.1', port=66666) self.assertRaises(ValidationError, endpoint.clean)
def test_less_standard_hosts(self): endpoint = Endpoint.from_uri('http://123_server/') endpoint.clean() endpoint = Endpoint(host='456_desktop') endpoint.clean() endpoint = Endpoint(host='_invalid._host.com') endpoint.clean()
def setUp(self): p = Product() p.Name = 'Test Product' p.Description = 'Product for Testing Endpoint functionality' p.save() e = Endpoint() e.product = p e.host = '127.0.0.1' e.save() call_command('loaddata', 'dojo/fixtures/system_settings', verbosity=0)
def setUp(self): p = Product() p.Name = 'Test Product' p.Description = 'Product for Testing Endpoint functionality' p.save() e = Endpoint() e.product = p e.host = '127.0.0.1' e.save() EndpointMetaDataTestUtil.save_custom_field(e, 'TestField', 'TestValue') EndpointMetaDataTestUtil.save_custom_field(p, 'TestProductField', 'TestProductValue')
def setUp(self): self.p = Product() self.p.Name = 'Test Product' self.p.Description = 'Product for Testing Endpoint functionality' self.p.save() self.e = Endpoint() self.e.product = self.p self.e.host = '127.0.0.1' self.e.save() self.util = EndpointMetaDataTestUtil() self.util.save_custom_field(self.e, 'TestField', 'TestValue')
def get_findings(self, filename, test): data = [json.loads(line) for line in filename] if len(data) == 0: return [] else: dupes = {} for item in data: template_id = item.get('templateID') if template_id is None: template_id = item.get('template-id') info = item.get('info') name = info.get('name') severity = info.get('severity').title() type = item.get('type') matched = item.get('matched') if matched is None: matched = item.get('matched-at') if '://' in matched: endpoint = Endpoint.from_uri(matched) else: endpoint = Endpoint.from_uri('//' + matched) finding = Finding( title=f"{name}", test=test, severity=severity, nb_occurences=1, vuln_id_from_tool=template_id, ) if info.get('description'): finding.description = info.get('description') if info.get('tags'): finding.unsaved_tags = info.get('tags') if info.get('reference'): finding.references = info.get('reference') finding.unsaved_endpoints.append(endpoint) dupe_key = hashlib.sha256( (template_id + type).encode('utf-8') ).hexdigest() if dupe_key in dupes: finding = dupes[dupe_key] if endpoint not in finding.unsaved_endpoints: finding.unsaved_endpoints.append(endpoint) finding.nb_occurences += 1 else: dupes[dupe_key] = finding return list(dupes.values())
def setUp(self): p = Product() p.Name = 'Test Product' p.Description = 'Product for Testing Endpoint functionality' p.save() e = Endpoint() e.product = p e.host = '127.0.0.1' e.save() EndpointMetaDataTestUtil.save_custom_field(e, 'TestField', 'TestValue') EndpointMetaDataTestUtil.save_custom_field(p, 'TestProductField', 'TestProductValue') call_command('loaddata', 'dojo/fixtures/system_settings', verbosity=0)
def make_finding(self, test, vulnerability): finding = Finding(test=test) finding.title = vulnerability["name"] finding.date = vulnerability["date_opened"][:10] if vulnerability["cwes"]: finding.cwe = int(vulnerability["cwes"][0][4:]) if vulnerability["cves"]: finding.cve = vulnerability["cves"][0] if vulnerability["cvss_version"] == 3: if vulnerability["cvss_vector"]: finding.cvssv3 = vulnerability["cvss_vector"] finding.url = vulnerability["location"] finding.severity = ES_SEVERITIES[vulnerability["severity"]] finding.description = vulnerability["description"] finding.mitigation = vulnerability["remediation"] finding.active = True if vulnerability["status"] == "open" else False if vulnerability["asset_tags"]: finding.tags = vulnerability["asset_tags"].split(",") finding.unique_id_from_tool = vulnerability["id"] finding.unsaved_endpoints = [ Endpoint.from_uri(vulnerability["location"]) ] return finding
def convert_endpoints(self, affected_targets): """Convert Cobalt affected_targets into DefectDojo endpoints""" endpoints = [] for affected_target in affected_targets: endpoint = Endpoint.from_uri(affected_target) endpoints.append(endpoint) return endpoints
def attach_extras(endpoints, requests, responses, finding, date, qid, test): if finding is None: finding = Finding() finding.unsaved_req_resp = list() finding.unsaved_endpoints = list() finding.test = test if date is not None: finding.date = date finding.vuln_id_from_tool = str(qid) else: # Finding already exists if date is not None and finding.date > date: finding.date = date for endpoint in endpoints: finding.unsaved_endpoints.append(Endpoint.from_uri(endpoint)) for i in range(0, len(requests)): if requests[i] != '' or responses[i] != '': finding.unsaved_req_resp.append({ "req": requests[i], "resp": responses[i] }) return finding
def setUpTestData(cls): cls.user = User() cls.product_type = Product_Type() cls.product_type_member = Product_Type_Member() cls.product = Product() cls.product_member = Product_Member() cls.product.prod_type = cls.product_type cls.engagement = Engagement() cls.engagement.product = cls.product cls.test = Test() cls.test.engagement = cls.engagement cls.finding = Finding() cls.finding.test = cls.test cls.endpoint = Endpoint() cls.endpoint.product = cls.product cls.product_type_member_reader = Product_Type_Member() cls.product_type_member_reader.user = cls.user cls.product_type_member_reader.product_type = cls.product_type cls.product_type_member_reader.role = Roles.Reader cls.product_type_member_owner = Product_Type_Member() cls.product_type_member_owner.user = cls.user cls.product_type_member_owner.product_type = cls.product_type cls.product_type_member_owner.role = Roles.Owner cls.product_member_reader = Product_Member() cls.product_member_reader.user = cls.user cls.product_member_reader.product = cls.product cls.product_member_reader.role = Roles.Reader cls.product_member_owner = Product_Member() cls.product_member_owner.user = cls.user cls.product_member_owner.product = cls.product cls.product_member_owner.role = Roles.Owner
def __init__(self, file, test): parser = le.XMLParser(resolve_entities=False) nscan = le.parse(file, parser) root = nscan.getroot() if 'nmaprun' not in root.tag: raise NamespaceErr( "This doesn't seem to be a valid Nmap xml file.") dupes = {} for host in root.iter("host"): ip = host.find("address[@addrtype='ipv4']").attrib['addr'] fqdn = host.find( "hostnames/hostname[@type='PTR']").attrib['name'] if host.find( "hostnames/hostname[@type='PTR']") is not None else None for portelem in host.xpath("ports/port[state/@state='open']"): port = portelem.attrib['portid'] protocol = portelem.attrib['protocol'] title = "Open port: %s/%s" % (port, protocol) description = "%s:%s A service was found to be listening on this port." % ( ip, port) if portelem.find('service') is not None: if hasattr(portelem.find('service'), 'product'): serviceinfo = " (%s%s)" % ( portelem.find('service').attrib['product'], " " + portelem.find('service').attrib['version'] if hasattr(portelem.find('service'), 'version') else "") else: serviceinfo = "" description += " It was identified as '%s%s'." % ( portelem.find('service').attrib['name'], serviceinfo) description += '\n\n' severity = "Info" dupe_key = port if dupe_key in dupes: find = dupes[dupe_key] if description is not None: find.description += description else: find = Finding( title=title, test=test, verified=False, description=description, severity=severity, numerical_severity=Finding.get_numerical_severity( severity)) find.unsaved_endpoints = list() dupes[dupe_key] = find find.unsaved_endpoints.append( Endpoint(host=ip, fqdn=fqdn, port=port, protocol=protocol)) self.items = dupes.values()
def process_scandetail(self, scan, test, dupes): for item in scan.findall('item'): # Title titleText = None description = item.findtext("description") # Cut the title down to the first sentence sentences = re.split(r'(?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|\?)\s', description) if len(sentences) > 0: titleText = sentences[0][:900] else: titleText = description[:900] # Description description = "\n".join([ f"**Host:** `{item.findtext('iplink')}`", f"**Description:** `{item.findtext('description')}`", f"**HTTP Method:** `{item.attrib.get('method')}`", ]) # Manage severity the same way with JSON severity = "Info" # Nikto doesn't assign severity, default to Info if item.get('osvdbid') is not None and "0" != item.get('osvdbid'): severity = "Medium" finding = Finding( title=titleText, test=test, description=description, severity=severity, dynamic_finding=True, static_finding=False, vuln_id_from_tool=item.attrib.get('id'), nb_occurences=1, ) # endpoint try: ip = item.findtext("iplink") url = hyperlink.parse(ip) endpoint = Endpoint( protocol=url.scheme, host=url.host, port=url.port, path="/".join(url.path), ) finding.unsaved_endpoints = [endpoint] except ValueError as exce: logger.warn("Invalid iplink in the report") dupe_key = hashlib.sha256(description.encode("utf-8")).hexdigest() if dupe_key in dupes: find = dupes[dupe_key] find.description += "\n-----\n" + finding.description find.unsaved_endpoints.extend(finding.unsaved_endpoints) find.nb_occurences += 1 else: dupes[dupe_key] = finding
def __extract_finding(self, raw_finding, metadata: HydraScanMetadata, test) -> Finding: host = raw_finding.get('host') port = raw_finding.get('port') username = raw_finding.get('login') password = raw_finding.get('password') if (host is None) or (port is None) or (username is None) or (password is None): raise ValueError( "Vital information is missing for this finding! Skipping this finding!" ) finding = Finding( test=test, title="Weak username / password combination found for " + host, date=parse_datetime(metadata.date) if metadata.date else date.today(), severity="High", description=host + " on port " + str(port) + " is allowing logins with easy to guess username " + username + " and password " + password, static_finding=False, dynamic_finding=True, service=metadata.service_type, ) finding.unsaved_endpoints = [Endpoint(host=host, port=port)] return finding
def process_endpoints(self, finding, df, i): protocol = "http" host = "0.0.0.0" query = "" fragment = "" path = df.ix[i, 'Request URI'] if pd.isnull(path) is False: try: dupe_endpoint = Endpoint.objects.get( protocol="protocol", host=host, query=query, fragment=fragment, path=path, product=finding.test.engagement.product) except Endpoint.DoesNotExist: dupe_endpoint = None if not dupe_endpoint: endpoint = Endpoint(protocol=protocol, host=host, query=query, fragment=fragment, path=path, product=finding.test.engagement.product) else: endpoint = dupe_endpoint if not dupe_endpoint: endpoints = [endpoint] else: endpoints = [endpoint, dupe_endpoint] finding.unsaved_endpoints = finding.unsaved_endpoints + endpoints
def attach_extras(endpoints, requests, responses, finding): if finding is None: finding = Finding() finding.unsaved_req_resp = list() finding.unsaved_endpoints = list() for endpoint in endpoints: parsedUrl = urlparse(endpoint) protocol = parsedUrl.scheme query = parsedUrl.query fragment = parsedUrl.fragment path = parsedUrl.path port = "" # Set port to empty string by default # Split the returned network address into host and try: # If there is port number attached to host address host, port = parsedUrl.netloc.split(':') except: # there's no port attached to address host = parsedUrl.netloc finding.unsaved_endpoints.append( Endpoint(host=host, port=port, path=path, protocol=protocol, query=query, fragment=fragment)) for i in range(0, len(requests)): if requests[i] != '' or responses[i] != '': finding.unsaved_req_resp.append({ "req": requests[i], "resp": responses[i] }) return finding
def get_items(self, tree, test): """ @return items A list of Host instances """ items = list() for node in tree.findall('site'): site = Site(node) main_host = Endpoint( host=site.name + (":" + site.port) if site.port is not None else "") for item in site.items: severity = item.riskdesc.split(' ', 1)[0] references = '' for ref in item.ref: references += ref + "\n" find = Finding( title=item.name, cwe=item.cwe, description=strip_tags(item.desc), test=test, severity=severity, mitigation=strip_tags(item.resolution), references=references, active=False, verified=False, false_p=False, duplicate=False, out_of_scope=False, mitigated=None, impact="No impact provided", numerical_severity=Finding.get_numerical_severity( severity)) find.unsaved_endpoints = [main_host] for i in item.items: parts = urlparse(i['uri']) find.unsaved_endpoints.append( Endpoint(protocol=parts.scheme, host=parts.netloc[:500], path=parts.path[:500], query=parts.query[:1000], fragment=parts.fragment[:500], product=test.engagement.product)) items.append(find) return items
def get_findings(self, filename, test): tree = filename.read() try: data = json.loads(str(tree, 'utf-8-sig')) except: data = json.loads(tree) dupes = dict() for item in data["Vulnerabilities"]: categories = '' language = '' mitigation = '' impact = '' references = '' findingdetail = '' title = '' group = '' status = '' request = '' response = '' title = item["Name"] findingdetail = cleantags(item["Description"]) cwe = int(item["Classification"]["Cwe"]) if "Cwe" in item["Classification"] else None sev = item["Severity"] if sev not in ['Info', 'Low', 'Medium', 'High', 'Critical']: sev = 'Info' mitigation = cleantags(item["RemedialProcedure"]) references = cleantags(item["RemedyReferences"]) url = item["Url"] impact = cleantags(item["Impact"]) dupe_key = title request = item["HttpRequest"]["Content"] response = item["HttpResponse"]["Content"] finding = Finding(title=title, test=test, description=findingdetail, severity=sev.title(), mitigation=mitigation, impact=impact, references=references, cwe=cwe, static_finding=True) if (item["Classification"] is not None) and (item["Classification"]["Cvss"] is not None) and (item["Classification"]["Cvss"]["Vector"] is not None): finding.cvssv3 = item["Classification"]["Cvss"]["Vector"] finding.unsaved_req_resp = [{"req": request, "resp": response}] finding.unsaved_endpoints = [Endpoint.from_uri(url)] if dupe_key in dupes: find = dupes[dupe_key] find.unsaved_req_resp.extend(finding.unsaved_req_resp) find.unsaved_endpoints.extend(finding.unsaved_endpoints) else: dupes[dupe_key] = finding return list(dupes.values())
def test_truncates_large_attributes(self): path = "foo" * 1000 query = "bar" * 1000 fragment = "baz" * 1000 endpoint = Endpoint.from_uri('http://[email protected]:8080/{}?{}#{}'.format(path, query, fragment)) self.assertEqual(len(endpoint.path), 500) self.assertEqual(len(endpoint.query), 1000) self.assertEqual(len(endpoint.fragment), 500)
def process_scandetail(self, scan, test, dupes): for item in scan.findall('item'): # Title titleText = None description = item.find("description").text # Cut the title down to the first sentence sentences = re.split(r'(?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|\?)\s', description) if len(sentences) > 0: titleText = sentences[0][:900] else: titleText = description[:900] # Url ip = item.find("iplink").text # Remove the port numbers for 80/443 ip = ip.replace(r":['80']{2}\/?$", "") ip = ip.replace(r":['443']{3}\/?$", "") # Severity severity = "Info" # Nikto doesn't assign severity, default to Info # Description description = "\n".join([ f"**Host:** `{ip}`", f"**Description:** `{item.find('description').text}`", f"**HTTP Method:** `{item.attrib['method']}`", ]) url = hyperlink.parse(ip) endpoint = Endpoint( protocol=url.scheme, host=url.host, port=url.port, path="/".join(url.path), ) dupe_key = hashlib.sha256(description.encode("utf-8")).hexdigest() if dupe_key in dupes: finding = dupes[dupe_key] if finding.description: finding.description = finding.description + "\nHost:" + ip + "\n" + description finding.unsaved_endpoints.append(endpoint) finding.nb_occurences += 1 else: finding = Finding( title=titleText, test=test, description=description, severity=severity, dynamic_finding=True, nb_occurences=1, ) finding.unsaved_endpoints = [endpoint] dupes[dupe_key] = finding
def get_findings(self, filename, test): content = filename.read() if type(content) is bytes: content = content.decode('utf-8') reader = csv.DictReader(io.StringIO(content), delimiter=',', quotechar='"') dupes = dict() for row in reader: # filter 'OK' # possible values: LOW|MEDIUM|HIGH|CRITICAL + WARN|OK|INFO if row['severity'] in ['OK']: continue # convert severity severity = row['severity'].lower().capitalize() if severity == 'Warn': severity = 'Info' # detect CVEs cves = row['cve'].split(' ') if len(cves) == 0: cves = [None] for cve in cves: finding = Finding( title=row['id'], description=row['finding'], severity=severity, nb_occurences=1, ) # manage CVE if cve: finding.cve = cve finding.references = '* [{0}](https://cve.mitre.org/cgi-bin/cvename.cgi?name={0})'.format( cve) # manage CWE if '-' in row['cwe']: finding.cwe = int(row['cwe'].split('-')[1].strip()) # manage endpoint finding.unsaved_endpoints = [ Endpoint(host=row['fqdn/ip'].split("/")[0]) ] if row.get('port') and row['port'].isdigit(): finding.unsaved_endpoints[0].port = int(row['port']) # internal de-duplication dupe_key = hashlib.sha256("|".join( [finding.description, finding.title, str(finding.cve)]).encode('utf-8')).hexdigest() if dupe_key in dupes: dupes[dupe_key].unsaved_endpoints.extend( finding.unsaved_endpoints) dupes[dupe_key].nb_occurences += finding.nb_occurences else: dupes[dupe_key] = finding return list(dupes.values())
def __xml_dynamic_flaw_to_finding(cls, app_id, xml_node, mitigation_text, test): finding = cls.__xml_flaw_to_finding(app_id, xml_node, mitigation_text, test) finding.static_finding = False finding.dynamic_finding = True url_host = xml_node.attrib.get('url') finding.unsaved_endpoints = [Endpoint.from_uri(url_host)] return finding
def test_url_full(self): endpoint = Endpoint.from_uri('http://[email protected]:8080/path1/path2?key1=value&no_value_key#fragment1') self.assertEqual(endpoint.protocol, 'http') self.assertEqual(endpoint.userinfo, 'alice') self.assertEqual(endpoint.host, 'foo.bar') self.assertEqual(endpoint.port, 8080) self.assertEqual(endpoint.path, 'path1/path2') # path begins with '/' but Endpoint store "root-less" path self.assertEqual(endpoint.query, 'key1=value&no_value_key') self.assertEqual(endpoint.fragment, 'fragment1')
def test_empty(self): endpoint = Endpoint() self.assertIsNone(endpoint.protocol) self.assertIsNone(endpoint.userinfo) self.assertIsNone(endpoint.host) self.assertIsNone(endpoint.port) self.assertIsNone(endpoint.path) self.assertIsNone(endpoint.query) self.assertIsNone(endpoint.fragment) self.assertIsNone(endpoint.product)
def test_noscheme(self): endpoint = Endpoint.from_uri('//' + 'localhost:22') self.assertIsNone(endpoint.protocol) self.assertIsNone(endpoint.userinfo) self.assertEqual(endpoint.host, 'localhost') self.assertEqual(endpoint.port, 22) self.assertIsNone(endpoint.path) self.assertIsNone(endpoint.query) self.assertIsNone(endpoint.fragment) self.assertIsNone(endpoint.product)
def attach_unique_extras(endpoints, requests, responses, finding, date, qid, param, payload, unique_id, active_text, test): # finding should always be none, since unique ID's are being used if finding is None: finding = Finding() finding.unsaved_req_resp = list() finding.unsaved_endpoints = list() if date is not None: finding.date = date finding.vuln_id_from_tool = str(qid) finding.unique_id_from_tool = unique_id finding.param = param finding.payload = payload finding.test = test else: # Finding already exists if date is not None and finding.date > date: finding.date = date for endpoint in endpoints: parsedUrl = urlparse(endpoint) protocol = parsedUrl.scheme query = parsedUrl.query fragment = parsedUrl.fragment path = parsedUrl.path port = "" # Set port to empty string by default # Split the returned network address into host and try: # If there is port number attached to host address host, port = parsedUrl.netloc.split(':') except: # there's no port attached to address host = parsedUrl.netloc finding.unsaved_endpoints.append( Endpoint(host=truncate_str(host, 500), port=port, path=truncate_str(path, 500), protocol=protocol, query=truncate_str(query, 1000), fragment=truncate_str(fragment, 500))) for i in range(0, len(requests)): if requests[i] != '' or responses[i] != '': finding.unsaved_req_resp.append({ "req": requests[i], "resp": responses[i] }) if active_text is not None: if 'fixed' in active_text.lower(): finding.active = False # TODO: may need to look up by finding ID and mark current finding as fixed else: finding.active = True return finding
def create_findings(self, items): for details in items: if details.get('Description') == '': continue aggregateKeys = "{}{}{}{}".format(details.get('Title'), details.get('Description'), details.get('CWE'), details.get('Endpoint')) find = Finding(title=details.get('Title'), description=details.get('Description'), test=self.test, severity=details.get('Severity'), mitigation=details.get('Mitigation'), references=details.get('References'), impact=details.get('Impact'), cwe=int(details.get('CWE')), active=False, verified=False, false_p=False, duplicate=False, out_of_scope=False, mitigated=None, numerical_severity=Finding.get_numerical_severity(details.get('Severity')), static_finding=False, dynamic_finding=True, nb_occurences=1) if len(details.get('Request')) > 0: requests = details.get('Request').split('SPLITTER')[:-1] responses = details.get('Response').split('SPLITTER')[:-1] unsaved_req_resp = list() for index in range(0, len(requests)): unsaved_req_resp.append({"req": requests[index], "resp": responses[index]}) find.unsaved_req_resp = unsaved_req_resp url = details.get('Endpoint') parsedUrl = urlparse(url) protocol = parsedUrl.scheme query = parsedUrl.query fragment = parsedUrl.fragment path = parsedUrl.path port = "" # Set port to empty string by default # Split the returned network address into host and try: # If there is port number attached to host address host, port = parsedUrl.netloc.split(':') except: # there's no port attached to address host = parsedUrl.netloc find.unsaved_endpoints = list() self.dupes[aggregateKeys] = find find.unsaved_endpoints.append(Endpoint( host=host, port=port, path=path, protocol=protocol, query=query, fragment=fragment)) return list(self.dupes.values())
def setUp(self): self.p = Product() self.p.Name = 'Test Product' self.p.Description = 'Product for Testing Endpoint functionality' self.p.save() self.e = Endpoint() self.e.product = self.p self.e.host = '127.0.0.1' self.e.save() call_command('loaddata', 'dojo/fixtures/system_settings', verbosity=0) self.util = EndpointMetaDataTestUtil() self.util.save_custom_field(self.e, 'TestField', 'TestValue')