class ShodanPlugin(TestingPlugin): """ This plugin tries to perform passive reconnaissance on a target using the Shodan web API. """ #-------------------------------------------------------------------------- def check_params(self): # Make sure we have an API key. self.get_api_key() #-------------------------------------------------------------------------- def get_accepted_types(self): return [IP] #-------------------------------------------------------------------------- def get_api_key(self): key = Config.plugin_args.get("apikey", None) if not key: key = Config.plugin_config.get("apikey", None) if not key: raise ValueError( "Missing API key! Get one at:" " http://www.shodanhq.com/api_doc") return key #-------------------------------------------------------------------------- def run(self, info): # This is where we'll collect the data we'll return. results = [] # Skip unsupported IP addresses. if info.version != 4: return ip = info.address parsed = netaddr.IPAddress(ip) if parsed.is_loopback() or \ parsed.is_private() or \ parsed.is_link_local(): return # Query Shodan for this host. try: key = self.get_api_key() api = WebAPI(key) shodan = api.host(ip) except Exception, e: tb = traceback.format_exc() Logger.log_error("Error querying Shodan for host %s: %s" % (ip, str(e))) Logger.log_error_more_verbose(tb) return # Make sure we got the same IP address we asked for. if ip != shodan.get("ip", ip): Logger.log_error( "Shodan gave us a different IP address... weird!") Logger.log_error_verbose( "Old IP: %s - New IP: %s" % (ip, shodan["ip"])) ip = to_utf8( shodan["ip"] ) info = IP(ip) results.append(info) # Extract all hostnames and link them to this IP address. # Note: sometimes Shodan sends IP addresses here! (?) seen_host = {} for hostname in shodan.get("hostnames", []): if hostname == ip: continue if hostname in seen_host: domain = seen_host[hostname] else: try: try: host = IP(hostname) except ValueError: host = Domain(hostname) except Exception: tb = traceback.format_exc() Logger.log_error_more_verbose(tb) seen_host[hostname] = host results.append(host) domain = host domain.add_resource(info) # Get the OS fingerprint, if available. os = to_utf8( shodan.get("os") ) if os: Logger.log("Host %s is running %s" % (ip, os)) pass # XXX TODO we'll need to reverse lookup the CPE # Get the GPS data, if available. # Complete any missing data using the default values. try: latitude = float( shodan["latitude"] ) longitude = float( shodan["longitude"] ) except Exception: latitude = None longitude = None if latitude is not None and longitude is not None: area_code = shodan.get("area_code") if not area_code: area_code = None else: area_code = str(area_code) country_code = shodan.get("country_code") if not country_code: country_code = shodan.get("country_code3") if not country_code: country_code = None else: country_code = str(country_code) else: country_code = str(country_code) country_name = shodan.get("country_name") if not country_name: country_name = None city = shodan.get("city") if not city: city = None dma_code = shodan.get("dma_code") if not dma_code: dma_code = None else: dma_code = str(dma_code) postal_code = shodan.get("postal_code") if not postal_code: postal_code = None else: postal_code = str(postal_code) region_name = shodan.get("region_name") if not region_name: region_name = None geoip = Geolocation( latitude, longitude, country_code = country_code, country_name = country_name, region_name = region_name, city = city, zipcode = postal_code, metro_code = dma_code, area_code = area_code, ) results.append(geoip) geoip.add_resource(info) # Go through every result and pick only the latest ones. latest = {} for data in shodan.get("data", []): if ( not "banner" in data or not "ip" in data or not "port" in data or not "timestamp" in data ): Logger.log_error("Malformed results from Shodan?") from pprint import pformat Logger.log_error_more_verbose(pformat(data)) continue key = ( data["ip"], data["port"], data["banner"], ) try: timestamp = reversed( # DD.MM.YYYY -> (YYYY, MM, DD) map(int, data["timestamp"].split(".", 2))) except Exception: continue if key not in latest or timestamp > latest[key][0]: latest[key] = (timestamp, data) # Process the latest results. seen_isp_or_org = set() seen_html = set() for _, data in latest.values(): # Extract all domains, but don't link them. for hostname in data.get("domains", []): if hostname not in seen_host: try: domain = Domain(hostname) except Exception: tb = traceback.format_exc() Logger.log_error_more_verbose(tb) continue seen_host[hostname] = domain results.append(domain) # We don't have any use for this information yet, # but log it so at least the user can see it. isp = to_utf8( data.get("isp") ) org = to_utf8( data.get("org") ) if org and org not in seen_isp_or_org: seen_isp_or_org.add(org) Logger.log_verbose( "Host %s belongs to: %s" % (ip, org) ) if isp and (not org or isp != org) and isp not in seen_isp_or_org: seen_isp_or_org.add(isp) Logger.log_verbose( "IP address %s is provided by ISP: %s" % (ip, isp) ) # Get the HTML content, if available. raw_html = to_utf8( data.get("html") ) if raw_html: hash_raw_html = hash(raw_html) if hash_raw_html not in seen_html: seen_html.add(hash_raw_html) try: html = HTML(raw_html) except Exception: html = None tb = traceback.format_exc() Logger.log_error_more_verbose(tb) if html: html.add_resource(info) results.append(html) # Get the banner, if available. raw_banner = to_utf8( data.get("banner") ) try: port = int( data.get("port", "0") ) except Exception: port = 0 if raw_banner and port: try: banner = Banner(info, raw_banner, port) except Exception: banner = None tb = traceback.format_exc() Logger.log_error_more_verbose(tb) if banner: results.append(banner) # Was this host located somewhere else in the past? for data in reversed(shodan.get("data", [])): try: timestamp = reversed( # DD.MM.YYYY -> (YYYY, MM, DD) map(int, data["timestamp"].split(".", 2))) old_location = data.get("location") if old_location: old_latitude = old_location.get("latitude", latitude) old_longitude = old_location.get("longitude", longitude) if ( old_latitude is not None and old_longitude is not None and (old_latitude != latitude or old_longitude != longitude) ): # Get the geoip information. area_code = old_location.get("area_code") if not area_code: area_code = None country_code = old_location.get("country_code") if not country_code: country_code = old_location.get("country_code3") if not country_code: country_code = None country_name = old_location.get("country_name") if not country_name: country_name = None city = old_location.get("city") if not city: city = None postal_code = old_location.get("postal_code") if not postal_code: postal_code = None region_name = old_location.get("region_name") if not region_name: region_name = None geoip = Geolocation( latitude, longitude, country_code = country_code, country_name = country_name, region_name = region_name, city = city, zipcode = postal_code, area_code = area_code, ) # If this is the first time we geolocate this IP, # use this information as it if were up to date. if latitude is None or longitude is None: latitude = old_latitude longitude = old_longitude results.append(geoip) geoip.add_resource(info) # Otherwise, just log the event. else: discard_data(geoip) where = str(geoip) when = datetime.date(*timestamp) msg = "Host %s used to be located at %s on %s." msg %= (ip, where, when.strftime("%B %d, %Y")) Logger.log_verbose(msg) except Exception: tb = traceback.format_exc() Logger.log_error_more_verbose(tb) # Return the results. return results
def recv_info(self, info): m_domain = info.root # Skips localhost if m_domain == "localhost": return m_return = None # Checks if the hostname has been already processed if not self.state.check(m_domain): Logger.log_more_verbose("Starting DNS zone transfer plugin") m_return = [] # # Make the zone transfer # m_ns_servers, m_zone_transfer = DNS.zone_transfer( m_domain, ns_allowed_zone_transfer=True) m_return_append = m_return.append if m_zone_transfer: Logger.log_more_verbose("DNS zone transfer successful") m_return.extend(m_zone_transfer) for l_ns in m_ns_servers: # Create the vuln l_v = DNSZoneTransfer(l_ns) l_resource = None # Is a IPaddress? try: ip = IPAddress(l_ns) except Exception: ip = None if ip is not None: # Create the IP resource l_resource = IP(l_ns) else: # Create the Domain resource l_resource = Domain(l_ns) # Associate the resource to the vuln l_v.add_resource(l_resource) # Append to the results: the resource and the vuln m_return_append(l_v) m_return_append(l_resource) else: Logger.log_more_verbose( "DNS zone transfer failed, server not vulnerable") m_return.extend(m_ns_servers) # Set the domain parsed self.state.set(m_domain, True) return m_return
def sf_DEFACED_AFFILIATE(self, sf_module, source, raw_data): if self.allow_external: domain = Domain(source) vulnerability = DefacedDomain(domain, tool_id = sf_module) return domain, vulnerability
def sf_MALICIOUS_AFFILIATE(self, sf_module, source, raw_data): if self.allow_external: domain = Domain(source) vulnerability = MaliciousDomain(domain, tool_id = sf_module) return domain, vulnerability
def sf_PROVIDER_MAIL(self, sf_module, source, raw_data): try: return IP(raw_data) except ValueError: return Domain(raw_data)
def sf_SSL_CERTIFICATE_MISMATCH(self, sf_module, source, raw_data): domain = Domain(parse_url(source).host) vulnerability = InvalidCertificate( # XXX or is it InvalidCommonName? domain, tool_id = sf_module) return domain, vulnerability
def recv_info(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip if the root domain is out of scope. if root not in Config.audit_scope: return # Skip root domains we've already processed. if self.state.put(root, True): return # Attempt a DNS zone transfer. ns_servers, results = DNS.zone_transfer(root, ns_allowed_zone_transfer=True) # On failure, skip. if not results: Logger.log_verbose( "DNS zone transfer failed, server %r not vulnerable" % root) return # Create a Domain object for the root domain. domain = Domain(root) # Associate all the results with the root domain. map(domain.add_information, results) # Add the root domain to the results. results.append(domain) # We have a vulnerability on each of the nameservers involved. msg = "DNS zone transfer successful, " if len(ns_servers) > 1: msg += "%d nameservers for %r are vulnerable!" msg %= (len(ns_servers), root) else: msg += "nameserver for %r is vulnerable!" % root Logger.log(msg) # If we don't have the name servers... if not ns_servers: # Link the vulnerability to the root domain instead. vulnerability = DNSZoneTransfer(root) vulnerability.add_resource(domain) results.append(vulnerability) # If we have the name servers... else: # Create a vulnerability for each nameserver in scope. for ns in ns_servers: # Instance the vulnerability object. vulnerability = DNSZoneTransfer(ns) # Instance a Domain or IP object. try: resource = IP(ns) except ValueError: resource = Domain(ns) # Associate the resource to the root domain. domain.add_resource(resource) # Associate the nameserver to the vulnerability. vulnerability.add_resource(resource) # Add both to the results. results.append(resource) results.append(vulnerability) # Return the results. return results
def sf_AFFILIATE_DOMAIN(self, sf_module, source, raw_data): if self.allow_external: return Domain(raw_data)
def parse_results(openvas_results, ip=None): """ Convert the OpenVAS scan results to the GoLismero data model. :param openvas_results: OpenVAS scan results. :type openvas_results: list(OpenVASResult) :param ip: (Optional) IP address to link the vulnerabilities to. :type ip: IP | None :returns: Scan results converted to the GoLismero data model. :rtype: list(Data) """ # This is where we'll store the results. results = [] # Remember the hosts we've seen so we don't create them twice. hosts_seen = {} LEVELS_CORRESPONDENCES = { 'debug': 'low', 'log': 'informational', 'low': "low", 'medium': 'middle', 'high': "high", } # For each OpenVAS result... for opv in openvas_results: try: # Get the host. host = opv.host # Get or create the vulnerable resource. target = ip if host in hosts_seen: target = hosts_seen[host] elif not ip or ip.address != host: try: target = IP(host) except ValueError: target = Domain(host) hosts_seen[host] = target results.append(target) # Get the threat level. try: level = opv.threat.lower() except Exception: level = "informational" # Get the metadata. nvt = opv.nvt ##references = nvt.xrefs ##cvss = nvt.cvss ##cve = nvt.cve ##vulnerability_type = nvt.category # Get the vulnerability description. description = opv.description if not description: description = nvt.description if not description: description = nvt.summary if not description: description = "A vulnerability has been found." if opv.notes: description += "\n" + "\n".join(" - " + note.text for note in opv.notes) # Create the vulnerability instance. vuln = Vulnerability( level=LEVELS_CORRESPONDENCES[level.lower()], description=description, ##cvss = cvss, ##cve = cve, ##references = references.split("\n"), ) ##vuln.vulnerability_type = vulnerability_type # Link the vulnerability to the resource. if target is not None: target.add_vulnerability(vuln) # Skip on error. except Exception, e: t = format_exc() Logger.log_error_verbose("Error parsing OpenVAS results: %s" % str(e)) Logger.log_error_more_verbose(t) continue # Add the vulnerability. results.append(vuln)
def run(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip root domains we've already processed. if self.state.put(root, True): return # Load the subdomains wordlist. try: wordlist = WordListLoader.get_wordlist_as_list(Config.plugin_args["wordlist"]) except WordlistNotFound: Logger.log_error_verbose("Wordlist '%s' not found.." % Config.plugin_args["wordlist"]) return except TypeError: Logger.log_error_verbose("Wordlist '%s' is not a file." % Config.plugin_args["wordlist"]) return # Load the subdomains whitelist. try: whitelist = WordListLoader.get_wordlist_as_list(Config.plugin_config["wordlist"]) except WordlistNotFound: Logger.log_error_verbose("Wordlist '%s' not found.." % Config.plugin_config["wordlist"]) return except TypeError: Logger.log_error_verbose("Wordlist '%s' is not a file." % Config.plugin_config["wordlist"]) return # # Set a base line for dinamyc sub-domains # m_virtual_domains = [] for v in (generate_random_string(40) for x in xrange(3)): l_subdomain = ".".join((v, root)) records = DNS.get_a(l_subdomain, also_CNAME=True) for rec in records: if rec.type == "CNAME": m_virtual_domains.append(rec.target) # If 3 subdomains are the same, set the base domain m_base_domain = None if len(set(m_virtual_domains)) == 1: m_base_domain = m_virtual_domains[0] # Configure the progress notifier. self.progress.set_total(len(wordlist)) self.progress.min_delta = 1 # notify every 1% # For each subdomain in the wordlist... found = 0 results = [] visited = set() for prefix in wordlist: # Mark as completed before actually trying. # We can't put this at the end of the loop where it belongs, # because the "continue" statements would skip over this too. self.progress.add_completed() # Build the domain name. name = ".".join((prefix, root)) # Skip if out of scope. if name not in Config.audit_scope: continue # Resolve the subdomain. records = DNS.get_a(name, also_CNAME=True) records.extend( DNS.get_aaaa(name, also_CNAME=True) ) # If no DNS records were found, skip. if not records: continue # If CNAME is the base domain, skip chk = [True for x in records if x.type == "CNAME" and x.target == m_base_domain] if len(chk) > 0 and all(chk): continue # We found a subdomain! found += 1 Logger.log_more_verbose( "Subdomain found: %s" % name) # Create the Domain object for the subdomain. domain = Domain(name) results.append(domain) # # Check for Domain disclosure # if prefix not in whitelist: d = DomainDisclosure(domain, risk = 0, level = "low", title = "Possible subdomain leak", description = "A subdomain was discovered which may be an unwanted information disclosure." ) results.append(d) # For each DNs record, grab the address or name. # Skip duplicated records. for rec in records: if rec.type == "CNAME": location = rec.target elif rec.type in ("A", "AAAA"): location = rec.address else: # should not happen... results.append(rec) domain.add_information(rec) continue if location not in visited: visited.add(location) results.append(rec) domain.add_information(rec) # Log the results. if found: Logger.log( "Found %d subdomains for root domain: %s" % (found, root)) else: Logger.log_verbose( "No subdomains found for root domain: %s" % root) # Return the results. return results
def parse_results(openvas_results, ip=None): """ Convert the OpenVAS scan results to the GoLismero data model. :param openvas_results: OpenVAS scan results. :type openvas_results: list(OpenVASResult) :param ip: (Optional) IP address to link the vulnerabilities to. :type ip: IP | None :returns: Scan results converted to the GoLismero data model. :rtype: list(Data) """ # This is where we'll store the results. results = [] # Remember the hosts we've seen so we don't create them twice. hosts_seen = {} # Map of OpenVAS levels to GoLismero levels. openvas_level_2_golismero = { 'debug': 'informational', 'log': 'informational', 'low': "low", 'medium': 'middle', 'high': "high", } RISKS = { 'none': 0, 'debug': 0, 'log': 0, 'low': 1, 'medium': 2, 'high': 3, 'critical': 4 } # Do we have the OpenVAS plugin database? if not os.path.exists(openvas_db): Logger.log_error( "OpenVAS plugin not initialized, please run setup.py") return # Load database use_openvas_db = Pickler.load(open(openvas_db, "rb")) # For each OpenVAS result... for opv in openvas_results: try: # Get the host. host = opv.host if host is None: continue # # Get or create the vulnerable resource. # target = ip if host in hosts_seen: target = hosts_seen[host] elif not ip or ip.address != host: try: target = IP(host) except ValueError: target = Domain(host) hosts_seen[host] = target results.append(target) # Get the vulnerability description. description = opv.description if not description: description = nvt.description if not description: description = nvt.summary if not description: description = None # # Common data # oid = int(opv.nvt.oid.split(".")[-1]) nvt = opv.nvt cve = nvt.cve.split(", ") if nvt.cve else [] risk = RISKS.get(nvt.risk_factor.lower(), 0) name = getattr(nvt, "name", "") level = getattr(opv, "threat", "informational").lower() cvss_base = getattr(nvt, "cvss_base", 0.0) references = extract_from_text( description) # Get the reference URLs. # Notes in vuln? if opv.notes: description += "\n" + "\n".join(" - " + note.text for note in opv.notes) # # Prepare the vulnerability properties. # kwargs = { "level": openvas_level_2_golismero[level.lower()], "description": description, "references": references, "cve": cve, "risk": risk, "severity": risk, "impact": risk, "cvss_base": cvss_base, "title": name, "tool_id": "openvas_plugin_%s" % str(oid) } # If we have the OpenVAS plugin database, look up the plugin ID # that reported this vulnerability and create the vulnerability # using a specific class. Otherwise use the vulnerability class # for uncategorized vulnerabilities. candidate_classes = ["UncategorizedVulnerability"] # Looking for plugin ID in database if oid in use_openvas_db: candidate_classes = use_openvas_db[oid][0] # Make vulnerabilities for c in candidate_classes: clazz = globals()[c] # Create the vuln vuln = clazz(target, **kwargs) # Add the vulnerability. results.append(vuln) # Skip on error. except Exception, e: t = format_exc() Logger.log_error_verbose("Error parsing OpenVAS results: %s" % str(e)) Logger.log_error_more_verbose(t)
def parse_sslscan_results(cls, output_filename): """ Convert the output of a SSLScan run to the GoLismero data model. :param output_filename: Path to the output filename. The format should always be XML. :type output_filename: :returns: Results from the SSLScan scan, and the vulnerability count. :rtype: list(Domain|Vulnerability), int """ Ciphers = cls.Ciphers results = [] count = 0 try: # Read the XML file contents. with open(output_filename, "rU") as f: m_info = f.read() # Force conversion to UTF-8, or Latin-1 on failure. # This prevents XML parsing errors. try: m_text = m_info.encode("utf-8") except UnicodeDecodeError: m_text = m_info.decode("latin-1").encode("utf-8") # Parse the XML file. tree = BeautifulStoneSoup(m_text) # For each scan result... try: tags = tree.findAll("ssltest") except Exception, e: tb = format_exc() Logger.log_error("Error parsing XML file: %s" % str(e)) Logger.log_error_more_verbose(tb) return results, count for t in tags: try: # Get the target hostname. info = Domain( t.get("host") ) results.append(info) # Self-signed? m_t_pk = t.find("pk") if m_t_pk is not None: m_self_signed = m_t_pk.get("error") if m_self_signed: results.append( InvalidCertificate(info) ) count += 1 # Valid CN? m_t_cn = t.find("subject") if m_t_cn is not None: m_cn = re.search( "(CN=)([0-9a-zA-Z\.\*]+)", m_t_cn.text).group(2) if m_cn != info.hostname: results.append( InvalidCommonName(info, m_cn) ) count += 1 # Outdated? m_t_before = t.find("not-valid-before") m_t_after = t.find("not-valid-after") if m_t_before is not None and m_t_after is not None: m_valid_before = re.search( "([a-zA-Z:0-9\s]+)( GMT)", m_t_before.text).group(1) m_valid_after = re.search( "([a-zA-Z:0-9\s]+)( GMT)", m_t_after.text).group(1) m_valid_before_date = datetime.strptime( m_valid_before, "%b %d %H:%M:%S %Y") m_valid_after_date = datetime.strptime( m_valid_after, "%b %d %H:%M:%S %Y") if m_valid_after_date < m_valid_before_date: results.append( OutdatedCertificate(info) ) count += 1 # Get the ciphers. m_ciphers = [ Ciphers(version = c.get("sslversion"), bits = c.get("bits"), cipher = c.get("cipher")) for c in t.findAll("cipher") if c.get("status") == "accepted" ] # Insecure algorithm? c = [y.cipher for y in m_ciphers if "CBC" in y.cipher] if c: results.append( InsecureAlgorithm(info, c) ) count += 1 # Weak keys? k = [int(y.bits) for i in m_ciphers if int(y.bits) <= 56] if k: results.append( WeakKey(info, k) ) count += 1 # Obsolete protocol? c = [y.version for y in m_ciphers if "SSLv1" in y.version] if c: results.append( ObsoleteProtocol(info, "SSLv1") ) count += 1 # On error, log the exception and continue. except Exception, e: tb = format_exc() Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(tb)
def parse_nmap_host(host, hostmap): """ Convert the output of an Nmap scan to the GoLismero data model. :param host: XML node with the scanned host information. :type host: xml.etree.ElementTree.Element :param hostmap: Dictionary that maps IP addresses to IP data objects. This prevents the plugin from reporting duplicated addresses. Updated by this method. :type hostmap: dict( str -> IP ) :returns: Results from the Nmap scan for this host. :rtype: list(Data) """ # File format details can be found here: # https://svn.nmap.org/nmap/docs/nmap.dtd # Get the timestamp. timestamp = host.get("endtime") if timestamp: timestamp = long(timestamp) if not timestamp: timestamp = host.get("starttime") if timestamp: timestamp = long(timestamp) # Get all the IP addresses. Skip the MAC addresses. ip_addresses = [] for node in host.findall(".//address"): if node.get("addrtype", "") not in ("ipv4, ipv6"): continue address = node.get("addr") if not address: continue if address not in hostmap: hostmap[address] = IP(address) ip_addresses.append(hostmap[address]) # Link all the IP addresses to each other. ips_visited = set() for ip_1 in ip_addresses: if ip_1.address not in ips_visited: ips_visited.add(ip_1.address) for ip_2 in ip_addresses: if ip_2.address not in ips_visited: ips_visited.add(ip_2.address) ip_1.add_resource(ip_2) ips_visited.clear() # Get all the MAC addresses. mac_addresses = [] seen_macs = set() for node in host.findall(".//address"): if node.get("addrtype", "") != "mac": continue address = node.get("addr") if not address: continue if address not in seen_macs: seen_macs.add(address) mac_addresses.append(MAC(address)) # Get all the hostnames. domain_names = [] for node in host.findall(".//hostname"): hostname = node.get("name") if not hostname: continue if hostname not in hostmap: hostmap[hostname] = Domain(hostname) domain_names.append(hostmap[hostname]) # Link all domain names to all IP addresses. for name in domain_names: for ip in ip_addresses: name.add_resource(ip) # Link all MAC addresses to all IP addresses. for mac in mac_addresses: for ip in ip_addresses: mac.add_resource(ip) # Abort if no resources were found. if not ip_addresses and not domain_names and not mac_addresses: return [] # Get the port scan results. ports = set() services = set() for node in host.findall(".//port"): try: portid = node.get("portid") protocol = node.get("protocol") if protocol not in ("tcp", "udp"): continue try: port = int(portid) except Exception: port = getservbyname(portid) state = node.find("state").get("state") if state not in ("open", "closed", "filtered"): continue ports.add((state, protocol, port)) if state == "open": serv_node = node.find("service") if serv_node is not None: service = serv_node.get("name") if service: if service == "https": service = "http" protocol = "SSL" elif serv_node.get("tunnel") == "ssl": protocol = "SSL" else: protocol = protocol.upper() services.add((service, port, protocol)) except Exception: warn("Error parsing port scan results: %s" % format_exc(), RuntimeWarning) # Get the traceroute results. traces = [] for node in host.findall(".//trace"): try: if node.get("port") is None or node.get("proto") is None: # This happens for trivial cases like empty traceroute # result tags. Example: trying to traceroute a host that's # only one hop away from you, like your home router. continue port = int(node.get("port")) proto = node.get("proto") hops = {} broken = False for node in node.findall(".//hop"): try: ttl = int(node.get("ttl")) address = node.get("ipaddr") rtt = float(node.get("rtt")) hostname = node.get("host", None) hops[ttl] = Hop(address, rtt, hostname) except Exception: warn( "Error parsing traceroute results: %s" % format_exc(), RuntimeWarning) broken = True break if not broken: if hops: ttl = hops.keys() sane_hops = tuple( hops.get(i, None) for i in xrange(min(*ttl), max(*ttl) + 1)) else: sane_hops = () traces.append((port, proto, sane_hops)) except Exception: warn("Error parsing traceroute results: %s" % format_exc(), RuntimeWarning) # Get the fingerprint results. fingerprints = set() for node in host.findall(".//osmatch"): try: name = node.get("name", None) for node in node.findall(".//osclass"): accuracy = float(node.get("accuracy")) os_type = node.get("type", None) vendor = node.get("vendor", None) family = node.get("osfamily", None) generation = node.get("osgen", None) cpe = node.find("cpe").text fingerprints.add((cpe, accuracy, name, vendor, os_type, generation, family)) except Exception: warn("Error parsing OS fingerprint results: %s" % format_exc(), RuntimeWarning) # This is where we'll gather all the results. results = ip_addresses + domain_names + mac_addresses # Link the port scan results to the IP addresses. for ip in ip_addresses: try: portscan = Portscan(ip, ports, timestamp) except Exception: warn(format_exc(), RuntimeWarning) continue results.append(portscan) # Link the service identification results to the IP addresses. for service, port, protocol in services: try: sfp = ServiceFingerprint(service, port, protocol) except Exception: warn(format_exc(), RuntimeWarning) continue for ip in ip_addresses: ip.add_information(sfp) results.append(sfp) # Link the traceroute results to the IP addresses. for ip in ip_addresses: if ip.version == 4: for trace in traces: try: traceroute = Traceroute(ip, *trace) except Exception: warn(format_exc(), RuntimeWarning) continue results.append(traceroute) # Link the fingerprint results to the IP addresses. for ip in ip_addresses: for args in fingerprints: try: fingerprint = OSFingerprint(*args) except Exception: warn(format_exc(), RuntimeWarning) continue ip.add_information(fingerprint) results.append(fingerprint) # Return the results. return results
def sf_INITIAL_TARGET(self, sf_module, source, raw_data): # TODO: use this to reconstruct the original scope return Domain(raw_data)
continue all_names = set() all_names.add(name) all_names.add(real_name) all_names.update(aliaslist) for name in all_names: if name and name not in visited: visited.add(name) with warnings.catch_warnings(): warnings.filterwarnings("ignore") in_scope = name in Config.audit_scope if not in_scope: Logger.log_more_verbose("Hostname out of scope: %s" % name) continue data = Domain(name) data.add_resource(info) results.append(data) for ip in addresslist: with warnings.catch_warnings(): warnings.filterwarnings("ignore") in_scope = ip in Config.audit_scope if not in_scope: Logger.log_more_verbose( "IP address out of scope: %s" % ip) continue d = IP(ip) data.add_resource(d) results.append(d) text = "Found %d emails and %d hostnames for keyword %r"
def sf_SUBDOMAIN(self, sf_module, source, raw_data): if self.allow_subdomains: return Domain(raw_data)
def parse_nikto_results(info, output_filename): """ Convert the output of a Nikto scan to the GoLismero data model. :param info: Data object to link all results to (optional). :type info: BaseUrl :param output_filename: Path to the output filename. The format should always be CSV. :type output_filename: :returns: Results from the Nikto scan, and the vulnerability count. :rtype: list(Data), int """ # Parse the scan results. # On error log the exception and continue. results = [] vuln_count = 0 hosts_seen = set() urls_seen = {} try: with open(output_filename, "rU") as f: csv_reader = reader(f) for row in csv_reader: try: # Each row (except for the first) has always # the same 7 columns, but some may be empty. if len(row) < 7: continue host, ip, port, vuln_tag, method, path, text = row[:7] # Report domain names and IP addresses. if ((info is None or host != info.hostname) and host not in hosts_seen): hosts_seen.add(host) if host in Config.audit_scope: results.append(Domain(host)) if ip not in hosts_seen: hosts_seen.add(ip) if ip in Config.audit_scope: results.append(IP(ip)) # Skip rows not informing of vulnerabilities. if not vuln_tag: continue # Calculate the vulnerable URL. if info is not None: target = urljoin(info.url, path) else: if port == 443: target = urljoin("https://%s/" % host, path) else: target = urljoin("http://%s/" % host, path) # Skip if out of scope. if target not in Config.audit_scope: continue # Report the URLs. if (target, method) not in urls_seen: url = Url(target, method) urls_seen[(target, method)] = url results.append(url) else: url = urls_seen[(target, method)] # Get the reference URLs. refs = extract_from_text(text) refs.difference_update(urls_seen.itervalues()) # Report the vulnerabilities. if vuln_tag == "OSVDB-0": kwargs = {"level": "informational"} else: kwargs = extract_vuln_ids("%s: %s" % (vuln_tag, text)) kwargs["description"] = text if text else None kwargs["references"] = refs if "osvdb" in kwargs and "OSVDB-0" in kwargs["osvdb"]: tmp = list(kwargs["osvdb"]) tmp.remove("OSVDB-0") if tmp: kwargs["osvdb"] = tuple(tmp) else: del kwargs["osvdb"] if vuln_tag == "OSVDB-0": vuln = UncategorizedVulnerability(**kwargs) vuln.add_resource(url) else: vuln = VulnerableWebApp(url, **kwargs) results.append(vuln) vuln_count += 1 # On error, log the exception and continue. except Exception, e: Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(format_exc()) # On error, log the exception. except Exception, e: Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(format_exc())
def sf_CO_HOSTED_SITE(self, sf_module, source, raw_data): if self.allow_external: return Domain(raw_data)
def recv_info(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip root domains we've already processed. if self.state.put(root, True): return # Load the subdomains wordlist. try: wordlist = WordListLoader.get_advanced_wordlist_as_list(Config.plugin_args["wordlist"]) except WordlistNotFound: Logger.log_error_verbose("Wordlist '%s' not found.." % Config.plugin_args["wordlist"]) return except TypeError: Logger.log_error_verbose("Wordlist '%s' is not a file." % Config.plugin_args["wordlist"]) return # Configure the progress notifier. self.progress.set_total(len(wordlist)) self.progress.min_delta = 1 # notify every 1% # For each subdomain in the wordlist... found = 0 results = [] visited = set() for prefix in wordlist: # Mark as completed before actually trying. # We can't put this at the end of the loop where it belongs, # because the "continue" statements would skip over this too. self.progress.add_completed() # Build the domain name. name = ".".join((prefix, root)) # Skip if out of scope. if name not in Config.audit_scope: continue # Resolve the subdomain. records = DNS.get_a(name, also_CNAME=True) records.extend( DNS.get_aaaa(name, also_CNAME=True) ) # If no DNS records were found, skip. if not records: continue # We found a subdomain! found += 1 Logger.log_more_verbose( "Subdomain found: %s" % name) # Create the Domain object for the subdomain. domain = Domain(name) results.append(domain) # For each DNs record, grab the address or name. # Skip duplicated records. for rec in records: if rec.type == "CNAME": location = rec.target elif rec.type in ("A", "AAAA"): location = rec.address else: # should not happen... results.append(rec) domain.add_information(rec) continue if location not in visited: visited.add(location) results.append(rec) domain.add_information(rec) # Log the results. if found: Logger.log( "Found %d subdomains for root domain: %s" % (found, root)) else: Logger.log_verbose( "No subdomains found for root domain: %s" % root) # Return the results. return results
def sf_WEBSERVER_BANNER(self, sf_module, source, raw_data): parsed = parse_url(source) domain = Domain(parsed.host) banner = Banner(domain, raw_data, parsed.port) return domain, banner
def recv_info(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip if the root domain is out of scope. if root not in Config.audit_scope: return # Skip root domains we've already processed. if self.state.put(root, True): return # Attempt a DNS zone transfer. ns_servers, results = DNS.zone_transfer( root, ns_allowed_zone_transfer = True) # On failure, skip. if not results: Logger.log_verbose( "DNS zone transfer failed, server %r not vulnerable" % root) return # Create a Domain object for the root domain. domain = Domain(root) # Associate all the results with the root domain. map(domain.add_information, results) # Add the root domain to the results. results.append(domain) # We have a vulnerability on each of the nameservers involved. msg = "DNS zone transfer successful, " if len(ns_servers) > 1: msg += "%d nameservers for %r are vulnerable!" msg %= (len(ns_servers), root) else: msg += "nameserver for %r is vulnerable!" % root Logger.log(msg) # If we don't have the name servers... if not ns_servers: # Link the vulnerability to the root domain instead. vulnerability = DNSZoneTransfer(root) vulnerability.add_resource(domain) results.append(vulnerability) # If we have the name servers... else: # Create a vulnerability for each nameserver in scope. for ns in ns_servers: # Instance the vulnerability object. vulnerability = DNSZoneTransfer(ns) # Instance a Domain or IP object. try: resource = IP(ns) except ValueError: resource = Domain(ns) # Associate the resource to the root domain. domain.add_resource(resource) # Associate the nameserver to the vulnerability. vulnerability.add_resource(resource) # Add both to the results. results.append(resource) results.append(vulnerability) # Return the results. return results
def sf_SSL_CERTIFICATE_EXPIRED(self, sf_module, source, raw_data): domain = Domain(parse_url(source).host) vulnerability = OutdatedCertificate( domain, tool_id = sf_module) return domain, vulnerability
def recv_info(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip root domains we've already processed. if self.state.put(root, True): return # Load the subdomains wordlist. try: wordlist = WordListLoader.get_advanced_wordlist_as_list(Config.plugin_args["wordlist"]) except WordlistNotFound: Logger.log_error_verbose("Wordlist '%s' not found.." % Config.plugin_args["wordlist"]) return except TypeError: Logger.log_error_verbose("Wordlist '%s' is not a file." % Config.plugin_args["wordlist"]) return # Load the subdomains whitelist. try: whitelist = WordListLoader.get_advanced_wordlist_as_list(Config.plugin_config["wordlist"]) except WordlistNotFound: Logger.log_error_verbose("Wordlist '%s' not found.." % Config.plugin_config["wordlist"]) return except TypeError: Logger.log_error_verbose("Wordlist '%s' is not a file." % Config.plugin_config["wordlist"]) return # # Set a base line for dinamyc sub-domains # m_virtual_domains = [] for v in (generate_random_string(40) for x in xrange(3)): l_subdomain = ".".join((v, root)) records = DNS.get_a(l_subdomain, also_CNAME=True) for rec in records: if rec.type == "CNAME": m_virtual_domains.append(rec.target) # If 3 subdomains are the same, set the base domain m_base_domain = None if len(set(m_virtual_domains)) == 1: m_base_domain = m_virtual_domains[0] # Configure the progress notifier. self.progress.set_total(len(wordlist)) self.progress.min_delta = 1 # notify every 1% # For each subdomain in the wordlist... found = 0 results = [] visited = set() for prefix in wordlist: # Mark as completed before actually trying. # We can't put this at the end of the loop where it belongs, # because the "continue" statements would skip over this too. self.progress.add_completed() # Build the domain name. name = ".".join((prefix, root)) # Skip if out of scope. if name not in Config.audit_scope: continue # Resolve the subdomain. records = DNS.get_a(name, also_CNAME=True) records.extend( DNS.get_aaaa(name, also_CNAME=True) ) # If no DNS records were found, skip. if not records: continue # If CNAME is the base domain, skip chk = [True for x in records if x.type == "CNAME" and x.target == m_base_domain] if len(chk) > 0 and all(chk): continue # We found a subdomain! found += 1 Logger.log_more_verbose( "Subdomain found: %s" % name) # Create the Domain object for the subdomain. domain = Domain(name) results.append(domain) # # Check for Domain disclosure # if prefix not in whitelist: d = DomainDisclosure(name, risk = 0, level = "low", title = "Possible subdomain leak", description = "A subdomain was discovered which may be an unwanted information disclosure." ) d.add_resource(domain) results.append(d) # For each DNs record, grab the address or name. # Skip duplicated records. for rec in records: if rec.type == "CNAME": location = rec.target elif rec.type in ("A", "AAAA"): location = rec.address else: # should not happen... results.append(rec) domain.add_information(rec) continue if location not in visited: visited.add(location) results.append(rec) domain.add_information(rec) # Log the results. if found: Logger.log( "Found %d subdomains for root domain: %s" % (found, root)) else: Logger.log_verbose( "No subdomains found for root domain: %s" % root) # Return the results. return results
def sf_MALICIOUS_SUBDOMAIN(self, sf_module, source, raw_data): domain = Domain(source) vulnerability = MaliciousDomain(domain, tool_id = sf_module) return domain, vulnerability
def parse_nikto_results(info, output_filename): """ Run Nikto and convert the output to the GoLismero data model. :param info: Base URL to scan. :type info: BaseUrl :param output_filename: Path to the output filename. The format should always be CSV. :type output_filename: :returns: Results from the Nikto scan, and the vulnerability count. :rtype: list(Data), int """ # Parse the scan results. # On error log the exception and continue. results = [] vuln_count = 0 hosts_seen = set() urls_seen = {} try: if output_filename.startswith("/cygdrive/"): output_filename = cygwin_to_win_path(output_filename) with open(output_filename, "rU") as f: csv_reader = reader(f) for row in csv_reader: try: # Each row (except for the first) has always # the same 7 columns, but some may be empty. if len(row) < 7: continue host, ip, port, vuln_tag, method, path, text = row[:7] # Report domain names and IP addresses. if (info is None or host != info.hostname) and host not in hosts_seen: hosts_seen.add(host) if host in Config.audit_scope: results.append( Domain(host) ) if ip not in hosts_seen: hosts_seen.add(ip) if ip in Config.audit_scope: results.append( IP(ip) ) # Skip rows not informing of vulnerabilities. if not vuln_tag: continue # Calculate the vulnerable URL. if info is not None: target = urljoin(info.url, path) else: if port == 443: target = urljoin("https://%s/" % host, path) else: target = urljoin("http://%s/" % host, path) # Skip if out of scope. if target not in Config.audit_scope: continue # Report the URLs. if (target, method) not in urls_seen: url = Url(target, method) urls_seen[ (target, method) ] = url results.append(url) else: url = urls_seen[ (target, method) ] # Report the vulnerabilities. vuln = UrlVulnerability( url = url, level = "informational", # TODO: use the OSVDB API description = "%s: %s" % (vuln_tag, text), ) results.append(vuln) vuln_count += 1 # On error, log the exception and continue. except Exception, e: Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(format_exc()) # On error, log the exception. except Exception, e: Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(format_exc())
except socket.error: continue all_names = set() all_names.add(name) all_names.add(real_name) all_names.update(aliaslist) for name in all_names: if name and name not in visited: visited.add(name) with warnings.catch_warnings(): warnings.filterwarnings("ignore") in_scope = name in Config.audit_scope if not in_scope: Logger.log_more_verbose("Hostname out of scope: %s" % name) continue data = Domain(name) data.add_resource(info) results.append(data) for ip in addresslist: with warnings.catch_warnings(): warnings.filterwarnings("ignore") in_scope = ip in Config.audit_scope if not in_scope: Logger.log_more_verbose("IP address out of scope: %s" % ip) continue d = IP(ip) data.add_resource(d) results.append(d) text = "Found %d emails and %d hostnames for keyword %r" text = text % (len(all_emails), len(all_hosts), word)
def run(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip if the root domain is out of scope. if root not in Config.audit_scope: return # Skip root domains we've already processed. if self.state.put(root, True): return # Attempt a DNS zone transfer. ns_servers, resolv = DNS.zone_transfer(root, ns_allowed_zone_transfer=True) # On failure, skip. if not resolv: Logger.log_verbose( "DNS zone transfer failed, server %r not vulnerable" % root) return # Create a Domain object for the root domain. domain = Domain(root) # Associate all the results with the root domain. for r in resolv: map(domain.add_information, r) # Add the root domain to the results. results = [] results.append(domain) # We have a vulnerability on each of the nameservers involved. msg = "DNS zone transfer successful, " if len(ns_servers) > 1: msg += "%d nameservers for %r are vulnerable!" msg %= (len(ns_servers), root) else: msg += "nameserver for %r is vulnerable!" % root Logger.log(msg) # If we don't have the name servers... if not ns_servers: # Assume the root domain also points to the nameserver. vulnerability = DNSZoneTransfer(domain, root) results.append(vulnerability) # If we have the name servers... else: # Create a vulnerability for each nameserver in scope. for ns in ns_servers: vulnerability = DNSZoneTransfer(domain, ns) results.append(vulnerability) # Return the results. return results
def parse_results(openvas_results, ip=None): """ Convert the OpenVAS scan results to the GoLismero data model. :param openvas_results: OpenVAS scan results. :type openvas_results: list(OpenVASResult) :param ip: (Optional) IP address to link the vulnerabilities to. :type ip: IP | None :returns: Scan results converted to the GoLismero data model. :rtype: list(Data) """ # This is where we'll store the results. results = [] # Remember the hosts we've seen so we don't create them twice. hosts_seen = {} # Maps of OpenVAS levels to GoLismero levels. LEVELS = { 'debug': 'informational', 'log': 'informational', 'low': "low", 'medium': 'middle', 'high': "high", } RISKS = { 'none': 0, 'debug': 0, 'log': 0, 'low': 1, 'medium': 2, 'high': 3, 'critical': 4 } # Do we have the OpenVAS plugin database? if not os.path.exists(openvas_db): Logger.log_error( "OpenVAS plugin not initialized, please run setup.py") return # Load the database. with open(openvas_db, "rb") as f: use_openvas_db = Pickler.load(f) # Get the configuration. import_log = Config.audit_config.boolean( Config.plugin_args.get("import_log", "no")) import_debug = Config.audit_config.boolean( Config.plugin_args.get("import_debug", "no")) # For each OpenVAS result... for opv in openvas_results: try: # Get the host. host = opv.host # Skip if we don't have a target host. if host is None: continue # Get the threat level. threat = getattr(opv, "threat", "log").lower() # Discard log and debug entries, keep only the vulnerabilities. if threat == "log" and not import_log: continue if threat == "debug" and not import_debug: continue # Get or create the vulnerable resource. target = ip if host in hosts_seen: target = hosts_seen[host] elif not ip or ip.address != host: try: target = IP(host) except ValueError: target = Domain(host) hosts_seen[host] = target results.append(target) # Extract the relevant information from the results. nvt = opv.nvt vid = opv.id oid = int(nvt.oid.split(".")[-1]) name = getattr(nvt, "name", None) cvss_base = getattr(nvt, "cvss_base", None) level = LEVELS.get(threat, "informational") risk = RISKS.get( getattr(opv.nvt, "risk_factor", "none").lower(), 0) # Get the vulnerability description. description = opv.raw_description if not description: description = nvt.description if not description: description = nvt.summary if not description: description = None # Extract the CVEs and Bugtraq IDs. cve = nvt.cve.split(", ") if nvt.cve else [] if "NOCVE" in cve: cve.remove("NOCVE") bid = [] if nvt.bid: bid.extend("BID-" + x for x in nvt.bid.split(", ")) if nvt.bugtraq: bid.extend("BID-" + x for x in nvt.bugtraq.split(", ")) if "NOBID" in bid: cve.remove("NOBID") # Extract the notes and add them to the description text. if opv.notes and description is not None: description += "\n" + "\n".join(" - " + note.text for note in opv.notes) # Extract the reference URLs from the description text. references = [] if description is not None: p = description.find("URL:") while p >= 0: p += 4 q2 = description.find("\n", p) q1 = description.find(",", p, q2) if q1 > p: q = q1 else: q = q2 if q < p: q = len(description) url = description[p:q].strip() try: url = parse_url(url).url references.append(url) except Exception: Logger.log_error(format_exc()) pass p = description.find("URL:", q) # Prepare the vulnerability properties. kwargs = { "title": name, "description": description, "references": references, "level": level, "risk": risk, "severity": risk, "impact": risk, "cvss_base": cvss_base, "cve": cve, "bid": bid, "tool_id": "openvas_plugin_%s" % oid, "custom_id": vid, } # If we have the OpenVAS plugin database, look up the plugin ID # that reported this vulnerability and create the vulnerability # using a specific class. Otherwise use the vulnerability class # for uncategorized vulnerabilities. classname = "UncategorizedVulnerability" if oid in use_openvas_db: classname = use_openvas_db[oid][0][0] # Create the Vulnerability object. try: clazz = globals()[classname] vuln = clazz(target, **kwargs) except Exception, e: t = format_exc() Logger.log_error_more_verbose( "Could not load vulnerability of type: %s" % classname) Logger.log_error_more_verbose(t) vuln = UncategorizedVulnerability(target, **kwargs) results.append(vuln) # Skip this result on error. except Exception, e: t = format_exc() Logger.log_error_verbose("Error parsing OpenVAS results: %s" % str(e)) Logger.log_error_more_verbose(t)
def recv_info(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip root domains we've already processed. if self.state.put(root, True): return # Load the subdomains wordlist. try: wordlist = WordListLoader.get_advanced_wordlist_as_list( Config.plugin_args["wordlist"]) except WordlistNotFound: Logger.log_error_verbose("Wordlist '%s' not found.." % Config.plugin_args["wordlist"]) return except TypeError: Logger.log_error_verbose("Wordlist '%s' is not a file." % Config.plugin_args["wordlist"]) return # Configure the progress notifier. self.progress.set_total(len(wordlist)) self.progress.min_delta = 1 # notify every 1% # For each subdomain in the wordlist... found = 0 results = [] visited = set() for prefix in wordlist: # Mark as completed before actually trying. # We can't put this at the end of the loop where it belongs, # because the "continue" statements would skip over this too. self.progress.add_completed() # Build the domain name. name = ".".join((prefix, root)) # Skip if out of scope. if name not in Config.audit_scope: continue # Resolve the subdomain. records = DNS.get_a(name, also_CNAME=True) records.extend(DNS.get_aaaa(name, also_CNAME=True)) # If no DNS records were found, skip. if not records: continue # We found a subdomain! found += 1 Logger.log_more_verbose("Subdomain found: %s" % name) # Create the Domain object for the subdomain. domain = Domain(name) results.append(domain) # For each DNs record, grab the address or name. # Skip duplicated records. for rec in records: if rec.type == "CNAME": location = rec.target elif rec.type in ("A", "AAAA"): location = rec.address else: # should not happen... results.append(rec) domain.add_information(rec) continue if location not in visited: visited.add(location) results.append(rec) domain.add_information(rec) # Log the results. if found: Logger.log("Found %d subdomains for root domain: %s" % (found, root)) else: Logger.log_verbose("No subdomains found for root domain: %s" % root) # Return the results. return results