def run_xsser(self, hostname, url, args): """ Run XSSer against the given target. :param url: The URL to be tested. :type url: str :param command: Path to the XSSer script. :type command: str :param args: The arguments to pass to XSSer. :type args: list :return: True id successful, False otherwise. :rtype: bool """ Logger.log("Launching XSSer against: %s" % url) Logger.log_more_verbose("XSSer arguments: %s" % " ".join(args)) xsser_script = join(get_tools_folder(), "xsser", "xsser.py") with ConnectionSlot(hostname): t1 = time() code = run_external_tool(xsser_script, args, callback=Logger.log_verbose) t2 = time() if code: Logger.log_error("XSSer execution failed, status code: %d" % code) return False Logger.log("XSSer scan finished in %s seconds for target: %s" % (t2 - t1, url)) return True
def run(self, info): # Skip if the vulnerability doesn't have CVE or OSVDB tags. if not info.cve and not info.osvdb: Logger.log_more_verbose("No CVE or OSVDB tags found, skipped.") return # Search for public exploits in the exploit-db database. ids = self.query_exploitdb(info.cve, info.osvdb) # Add any previously existing IDs. ids.update(info.edb) # Are there any new IDs? new_ids = ids.difference(info.edb) if new_ids: # Log the new IDs. msg = "\n".join( " " + x for x in sorted( convert_vuln_ids_to_references(new_ids) ) ) Logger.log("Public exploits found:\n" + msg) # Update the Vulnerability object. info.edb = ids # Return the updated object. return info
def run(self, info): # Build the command line arguments for Nmap. args = shlex.split(Config.plugin_args["args"]) if info.version == 6 and "-6" not in args: args.append("-6") args.append(info.address) # The Nmap output will be saved in XML format in a temporary file. with tempfile(suffix=".xml") as output: args.append("-oX") args.append(output) # Run Nmap and capture the text output. Logger.log("Launching Nmap against: %s" % info.address) Logger.log_more_verbose("Nmap arguments: %s" % " ".join(args)) with ConnectionSlot(info.address): t1 = time() code = run_external_tool("nmap", args, callback=Logger.log_verbose) t2 = time() # Log the output in extra verbose mode. if code: Logger.log_error("Nmap execution failed, status code: %d" % code) else: Logger.log("Nmap scan finished in %s seconds for target: %s" % (t2 - t1, info.address)) # Parse and return the results. return self.parse_nmap_results(info, output)
def make_injection(self, target, args): """ Run SQLMap against the given target. :param target: URL to scan. :type target: Url :param args: Arguments to pass to SQLMap. :type args: list(str) :return: True on success, False on failure. :rtype: bool """ Logger.log("Launching SQLMap against: %s" % target) Logger.log_more_verbose("SQLMap arguments: %s" % " ".join(args)) sqlmap_script = join(get_tools_folder(), "sqlmap", "sqlmap.py") with ConnectionSlot(target): t1 = time() code = run_external_tool(sqlmap_script, args, callback=Logger.log_verbose) t2 = time() if code: Logger.log_error("SQLMap execution failed, status code: %d" % code) return False Logger.log( "SQLMap scan finished in %s seconds for target: %s" % (t2 - t1, target)) return True
def run(self, info): if not isinstance(info, HTTP_Response): return response = info.data url = list(info.associated_resources)[0] # Load signatures try: signatures = pickle.load(open(plugin_data, "rb")) except pickle.PickleError: signatures = {} # Starting the search total = float(len(signatures)) for step, (server_name, server_page) in enumerate(signatures.iteritems()): # Update status progress = float(step) / total self.update_status(progress=progress) level = get_diff_ratio(response, server_page) Logger.log(level) if level > 0.55: # magic number :) # Match found. vulnerability = DefaultErrorPage( url, server_name, title="Default error page for server '%s'" % server_name) return vulnerability
def run(self, info): if not isinstance(info, HTTP_Response): return response = info.data url = list(info.associated_resources)[0] # Load signatures try: signatures = pickle.load(open(plugin_data, "rb")) except pickle.PickleError: signatures = {} # Starting the search total = float(len(signatures)) for step, (server_name, server_page) in enumerate(signatures.iteritems()): # Update status progress = float(step) / total self.update_status(progress=progress) level = get_diff_ratio(response, server_page) Logger.log(level) if level > 0.55: # magic number :) # Match found. vulnerability = DefaultErrorPage(url, server_name, title="Default error page for server '%s'" % server_name) return vulnerability
def make_injection(self, target, command, args): """ Run sqlmap over the target. :param target: Base URL to scan. :type target: BaseUrl :param command: Path to the Nikto script. :type command: str :param args: Arguments to pass to Nikto. :type args: list(str) :return: True if runs is ok. False otherwise. :rtype: bool """ # Run Nmap and capture the text output. Logger.log("Launching sqlmap against: %s" % target) Logger.log_more_verbose("sqlmap arguments: %s" % " ".join(args)) with ConnectionSlot(target): t1 = time() code = run_external_tool(command, args, callback=Logger.log_verbose) t2 = time() # Log in extra verbose mode. if code: Logger.log_error("sqlmap execution failed, status code: %d" % code) return False else: Logger.log("sqlmap scan finished in %s seconds for target: %s"% (t2 - t1, target)) return True
def run(self, info): if not isinstance(info, FolderURL): raise TypeError("Expected FolderURL, got %r instead" % type(info)) # Intentionally cause a warning to be shown. Logger.log("This is a log message.") return UrlDisclosure(info)
def run_xsser(self,url,command,args): """ Run xsser target :param url: the url to be tested :type url: str :param command: path to xsser script :type command: str :param args: the arguments pass to xsser :type args: list :return: return True is run successful, or False for fail :rtype: bool """ Logger.log("Launching xsser against: %s" % url) Logger.log_more_verbose("xsser arguments: %s" % " ".join(args)) t1 = time() code = run_external_tool(command, args, callback=Logger.log_verbose) t2 = time() # Log in extra verbose mode. if code: Logger.log_error("xsser execution failed, status code: %d" % code) return False else: Logger.log("xsser scan finished in %s seconds for target: %s"% (t2 - t1, url)) return True
def make_injection(self, target, args): """ Run SQLMap against the given target. :param target: URL to scan. :type target: URL :param args: Arguments to pass to SQLMap. :type args: list(str) :return: True on success, False on failure. :rtype: bool """ Logger.log("Launching SQLMap against: %s" % target) Logger.log_more_verbose("SQLMap arguments: %s" % " ".join(args)) sqlmap_script = join(get_tools_folder(), "sqlmap", "sqlmap.py") with ConnectionSlot(target): t1 = time() code = run_external_tool(sqlmap_script, args, callback=Logger.log_verbose) t2 = time() if code: Logger.log_error("SQLMap execution failed, status code: %d" % code) return False Logger.log( "SQLMap scan finished in %s seconds for target: %s" % (t2 - t1, target)) return True
def import_results(self, input_file): results = NmapScanPlugin.parse_nmap_results(None, input_file) if results: Database.async_add_many(results) Logger.log("Loaded %d elements from file: %s" % (len(results), input_file)) else: Logger.log_verbose("No data found in file: %s" % input_file)
def run(self, info): # Skip if the vulnerability doesn't have CVE or OSVDB tags. if not info.cve and not info.osvdb: Logger.log_more_verbose("No CVE or OSVDB tags found, skipped.") return # Search for public exploits in the exploit-db database. ids = self.query_exploitdb(info.cve, info.osvdb) # Add any previously existing IDs. ids.update(info.edb) # Are there any new IDs? new_ids = ids.difference(info.edb) if new_ids: # Log the new IDs. msg = "\n".join( " " + x for x in sorted(convert_vuln_ids_to_references(new_ids))) Logger.log("Public exploits found:\n" + msg) # Update the Vulnerability object. info.edb = ids # Return the updated object. return info
def import_results(self, input_file): results, count = SSLScanPlugin.parse_sslscan_results(input_file) if results: Database.async_add_many(results) Logger.log("Loaded %d hosts and %d vulnerabilities from file: %s" % (len(results) - count, count, input_file)) else: Logger.log_verbose("No data found in file: %s" % input_file)
def recv_info(self, info): if not info.has_url_params and not info.has_post_params: return # Result info results = [] # Get user args user_args = shlex.split(Config.plugin_args["args"]) with tempdir() as output_dir: # Basic command line args = [ "-u", info.url, "--batch", "--output-dir", output_dir ] # Add the user args args.extend(user_args) # # GET Parameters injection # if info.has_url_params: args.extend([ "-p", ",".join([x for x in info.url_params if x not in WEB_SERVERS_VARS]), ]) r = self.make_injection(info.url, args) if r: results.extend(self.parse_sqlmap_results(info, output_dir)) # # POST Parameters injection # if info.has_post_params: args.extend([ "--data", "&".join(["%s=%s" % (k, v) for k, v in info.post_params.iteritems() if k not in WEB_SERVERS_VARS]) ]) r = self.make_injection(info.url, args) if r: results.extend(self.parse_sqlmap_results(info, output_dir)) if results: Logger.log("Found %s SQL injection vulnerabilities." % len(results)) else: Logger.log("No SQL injection vulnerabilities found.") return results
def run(self, info): if not info.has_url_params and not info.has_post_params: return # Result info results = [] # Get user args user_args = shlex.split(Config.plugin_args["args"]) with tempdir() as output_dir: # Basic command line args = [ "-u", info.url, "--batch", "--output-dir", output_dir ] # Add the user args args.extend(user_args) # # GET Parameters injection # if info.has_url_params: args.extend([ "-p", ",".join([x for x in info.url_params if x not in WEB_SERVERS_VARS]), ]) r = self.make_injection(info.url, args) if r: results.extend(self.parse_sqlmap_results(info, output_dir)) # # POST Parameters injection # if info.has_post_params: args.extend([ "--data", "&".join(["%s=%s" % (k, v) for k, v in info.post_params.iteritems() if k not in WEB_SERVERS_VARS]) ]) r = self.make_injection(info.url, args) if r: results.extend(self.parse_sqlmap_results(info, output_dir)) if results: Logger.log("Found %s SQL injection vulnerabilities." % len(results)) else: Logger.log("No SQL injection vulnerabilities found.") return results
def run(self, info): m_url = info.url Logger.log("GXHacking against: %s" % m_url) Logger.log_more_verbose("Start to process URL: %r" % m_url) # Server specified by param? webserver_finger = Config.plugin_args.get("server_banner", None) if webserver_finger: server_canonical_name = webserver_finger servers_related = [] # Set with related web servers else: # User fingerprint info webserver_finger = info.get_associated_informations_by_category(WebServerFingerprint.information_type) if webserver_finger: webserver_finger = webserver_finger.pop() server_canonical_name = webserver_finger.canonical_name servers_related = webserver_finger.related # Set with related web servers # Find XML files #new_file = find_xml_files(m_url) new_file = find_xml_files(m_url) + find_htm_file(m_url) wordlist = set() # Common wordlists try: w = Config.plugin_extra_config["common"] wordlist.update([l_w for l_w in w.itervalues()]) except KeyError: Logger.log_error("Can't load common wordlists") # There is fingerprinting information? if webserver_finger: # # Load wordlists # wordlist_update = wordlist.update # Wordlist of server name try: w = Config.plugin_extra_config["%s_predictables" % server_canonical_name] wordlist_update([l_w for l_w in w.itervalues()]) except KeyError: Logger.log_error("Can't load predictables wordlists for server: '%s'." % server_canonical_name) # Wordlist of related with the server found try: for l_servers_related in servers_related: w = Config.plugin_extra_config["%s_predictables" % l_servers_related] wordlist_update([l_w for l_w in w.itervalues()]) except KeyError, e: Logger.log_error("Can't load wordlists predictables wordlists for related webserver: '%s'" % e)
def run(self, data): # # # PUT YOUR CODE HERE # # if data.is_instance(URL): Logger.log_verbose("Found an URL! %s" % data.url) elif data.is_instance(Relationship(Username, Password)): Logger.log("Found a valid password! User: %s, Pass: %s" % (data[0].name, data[1].password)) else: Logger.log_error("This should never happen...")
def run(self, data): # # # PUT YOUR CODE HERE # # if data.is_instance(URL): Logger.log_verbose("Found an URL! %s" % data.url) elif data.is_instance(Relationship(Username, Password)): Logger.log( "Found a valid password! User: %s, Pass: %s" % (data[0].name, data[1].password)) else: Logger.log_error("This should never happen...")
def find_xml_files(url): new_file = [] for file_name in ['execute.xml', 'DeveloperMenu.xml']: url_check = url[1:] if url.startswith("/") else url tmp_u = urljoin(url_check, file_name) p = HTTP.get_url(tmp_u, use_cache=False, method="GET") if p.status == "200": file_save = download(tmp_u) tree = ET.fromstring(file_save.raw_data) try: for links in tree.findall('Object'): Logger.log(links.find('ObjLink').text) new_file.append(links.find('ObjLink').text) except Exception: ##raise # XXX DEBUG pass return new_file
def recv_info(self, info): # Get the hostname to test. m_host = info.hostname # Workaround for a bug in SSLScan: if the target port doesn't answer # back the SSL handshake (i.e. if port 443 is open but another protocol # is being used) then SSLScan just blocks indefinitely. try: with ConnectionSlot(m_host): s = socket(AF_INET, SOCK_STREAM) try: s.settimeout(4.0) s.connect( (m_host, 443) ) s = wrap_socket(s) s.shutdown(2) finally: s.close() except Exception: Logger.log_error_more_verbose( "Host %r doesn't seem to support SSL, aborting." % m_host) return # Create a temporary output file. with tempfile(suffix = ".xml") as output: # Build the command line arguments. args = [ "--no-failed", "--xml=" + output, # non standard cmdline parsing :( m_host ] # Run SSLScan and capture the text output. Logger.log("Launching SSLScan against: %s" % m_host) Logger.log_more_verbose("SSLScan arguments: %s" % " ".join(args)) with ConnectionSlot(m_host): t1 = time() code = run_external_tool("sslscan", args, callback=Logger.log_verbose) t2 = time() if code: Logger.log_error( "SSLScan execution failed, status code: %d" % code) else: Logger.log("SSLScan scan finished in %s seconds for target: %s" % (t2 - t1, m_host)) # Parse and return the results. r, v = self.parse_sslscan_results(output) if v: Logger.log("Found %s SSL vulnerabilities." % v) else: Logger.log("No SSL vulnerabilities found.") return r
def __detect_wordpress_installation(self, url, wordpress_urls): """ Try to detect a wordpress instalation in the current path. :param url: URL where try to find the WordPress installation. :type url: str :param wordpress_urls: string with wordlist name with WordPress URLs. :type wordpress_urls: str :return: True if wordpress installation found. False otherwise. :rtype: bool """ Logger.log("Detecting Wordpress instalation in URI: '%s'." % url) total_urls = 0 urls_found = 0 for u in WordListLoader.get_wordlist(wordpress_urls): total_urls += 1 tmp_url = urljoin(url, u) r = HTTP.get_url(tmp_url, use_cache=False) if r.status == "200": urls_found += 1 discard_data(r) # If Oks > 85% continue if (urls_found / float(total_urls)) < 0.85: # If all fails, make another last test url_wp_admin = urljoin(url, "wp-admin/") try: p = HTTP.get_url(url_wp_admin, use_cache=False, allow_redirects=False) if p: discard_data(p) except Exception, e: return False if p.status == "302" and "wp-login.php?redirect_to=" in p.headers.get("Location", ""): return True else: return False
def command_update(parser, P, cmdParams, auditParams): # Fail if we got any arguments. if P.targets: parser.error("too many arguments") # Setup a dummy environment so we can call the API. with PluginTester(autoinit=False) as t: t.orchestrator_config.ui_mode = "console" t.orchestrator_config.verbose = cmdParams.verbose t.orchestrator_config.color = cmdParams.color t.init_environment(mock_audit=False) # Run Git here to download the latest version. if cmdParams.verbose: Logger.log("Updating GoLismero...") run_external_tool("git", ["pull"], cwd = here, callback = Logger.log if cmdParams.verbose else lambda x: x) # Update the TLD names. if cmdParams.verbose: Logger.log("Updating list of TLD names...") import tldextract tldextract.TLDExtract().update(True) # Done! if cmdParams.verbose: Logger.log("Update complete.") exit(0)
def recvmsg(s): hdr = recvall(s, 5) if hdr is None: Logger.log('Unexpected EOF receiving record header - server closed connection') return None, None, None typ, ver, ln = struct.unpack('>BHH', hdr) pay = recvall(s, ln, 10) if pay is None: Logger.log('Unexpected EOF receiving record payload - server closed connection') return None, None, None Logger.log(' ... received message: type = %d, ver = %04x, length = %d' % (typ, ver, len(pay))) return typ, ver, pay
def recv_info(self, info): m_host = info.hostname # Create a temporary output file. with tempfile(suffix = ".xml") as output: # Build the command line arguments. args = [ "--no-failed", "--xml=%s" % output, m_host ] # Run Nmap and capture the text output. Logger.log("Launching SSLscan against: %s" % m_host) Logger.log_more_verbose("SSLscan arguments: %s" % " ".join(args)) t1 = time() code = run_external_tool("sslscan", args, callback=Logger.log_verbose) t2 = time() # Log the output in extra verbose mode. if code: Logger.log_error( "SSLscan execution failed, status code: %d" % code) else: Logger.log("SSLscan scan finished in %s seconds for target: %s" % (t2 - t1, m_host)) # Parse and return the results. r = self.parse_sslscan_results(info, output) if r: Logger.log("Found %s SSL vulns." % len(r)) else: Logger.log("No SSL vulns found.") return r
def recvmsg(s): hdr = recvall(s, 5) if hdr is None: Logger.log( 'Unexpected EOF receiving record header - server closed connection' ) return None, None, None typ, ver, ln = struct.unpack('>BHH', hdr) pay = recvall(s, ln, 10) if pay is None: Logger.log( 'Unexpected EOF receiving record payload - server closed connection' ) return None, None, None Logger.log(' ... received message: type = %d, ver = %04x, length = %d' % (typ, ver, len(pay))) return typ, ver, pay
def recv_info(self, info): if not isinstance(info, Url): return if not info.has_url_params and not info.has_post_params: Logger.log("URL '%s' has not parameters" % info.url) return # Get xss script executable xsser_script = self.get_xsser() results = [] args = [ "-u", info.url, ] with tempfile(prefix="tmpxss", suffix=".xml") as filename: args.extend([ "--xml=%s" % filename ]) if info.has_url_params: if self.run_xsser(info.url,xsser_script, args): results.extend(self.parse_xsser_result(info,filename)) if info.has_post_params: args.extend([ "-p", "&".join([ "%s=%s" % (k, v) for k, v in info.post_params.iteritems()]) ]) if self.run_xsser(info.url,xsser_script, args): results.extend(self.parse_xsser_result(info,filename)) if results: Logger.log("Found %s xss vulns." % len(results)) else: Logger.log("No xss vulns found.") return results
def run(self, info): # Query PunkSPIDER. host_id = info.hostname host_id = parse_url(host_id).hostname host_id = ".".join(reversed(host_id.split("."))) d = self.query_punkspider(host_id) # Stop if we have no results. if not d: Logger.log("No results found for host: %s" % info.hostname) return # This is where we'll collect the data we'll return. results = [] # For each vulnerability... for v in d["data"]: try: # Future-proof checks. if v["protocol"] not in ("http", "https"): Logger.log_more_verbose( "Skipped non-web vulnerability: %s" % to_utf8(v["id"])) continue if v["bugType"] not in ("xss", "sqli", "bsqli"): Logger.log_more_verbose( "Skipped unknown vulnerability type: %s" % to_utf8(v["bugType"])) continue # Get the vulnerable URL, parameter and payload. url = to_utf8(v["vulnerabilityUrl"]) param = to_utf8(v["parameter"]) parsed = parse_url(url) payload = parsed.query_params[param] # Get the level. level = to_utf8(v["level"]) # Create the URL object. url_o = URL(url) results.append(url_o) # Get the vulnerability class. if v["bugType"] == "xss": clazz = XSS else: clazz = SQLInjection # Create the Vulnerability object. vuln = clazz( url_o, vulnerable_params={param: payload}, injection_point=clazz.INJECTION_POINT_URL, injection_type=to_utf8(v["bugType"]), # FIXME level=level, tool_id=to_utf8(v["id"]), ) print '------------' print vuln print type(vuln) print '------------' results.append(vuln) # Log errors. except Exception, e: tb = traceback.format_exc() Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(tb)
#-------------------------------------------------------------------------- def import_results(self, input_file): try: openvas_results = report_parser(input_file) golismero_results = OpenVASPlugin.parse_results(openvas_results) if golismero_results: Database.async_add_many(golismero_results) except Exception, e: fmt = format_exc() Logger.log_error("Could not load OpenVAS results from file: %s" % input_file) Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(fmt) else: if golismero_results: data_count = len(golismero_results) vuln_count = sum(1 for x in golismero_results if x.is_instance(Vulnerability)) if vuln_count == 0: vuln_msg = "" elif vuln_count == 1: vuln_msg = " (1 vulnerability)" else: vuln_msg = " (%d vulnerabilities)" % vuln_count Logger.log( "Loaded %d %s%s from file: %s" % (data_count, "results" if data_count != 1 else "result", vuln_msg, input_file)) else: Logger.log_error("No results found in file: %s" % input_file)
results.append(data) for ip in addresslist: with warnings.catch_warnings(): warnings.filterwarnings("ignore") in_scope = ip in Config.audit_scope if not in_scope: Logger.log_more_verbose("IP address out of scope: %s" % ip) continue d = IP(ip) data.add_resource(d) results.append(d) text = "Found %d emails and %d hostnames for keyword %r" text = text % (len(all_emails), len(all_hosts), word) if len(all_emails) + len(all_hosts) > 0: Logger.log(text) else: Logger.log_verbose(text) # Return the data. return results #-------------------------------------------------------------------------- @staticmethod def search(engine, word, limit = 100): """ Run a theHarvester search on the given engine. :param engine: Search engine. :type engine: str
def run(self, info): if not isinstance(info, URL): return if not info.has_url_params and not info.has_post_params: return # Get user args user_args = shlex.split(Config.plugin_args.get("args", [])) # Result info results = [] with tempfile(prefix="tmpxss", suffix=".xml") as filename: args = ["--xml=%s" % filename, "--no-head", "--threads", "1"] # Add the user args args.extend(user_args) if info.has_url_params: # Get payload for config injection point args.extend([ "-u", "%s://%s" % (info.parsed_url.scheme, info.parsed_url.host), ]) # When we want to try GET parameters, we must pass to xsser one by one. for param, value in info.parsed_url.query_params.iteritems(): # Not evaluate web server params if param in WEB_SERVERS_VARS: continue # Prepare and reorder params fixed_params = "&".join([ "%s=%s" % (x, y) for x, y in info.parsed_url.query_params.iteritems() if x != param ]) # Add param to text + fixed params if fixed_params: # -> empty fixed params params = "%s?%s&%s=" % (info.parsed_url.path, fixed_params, param) else: params = "%s?%s=" % (info.parsed_url.path, param) # Prepary args for xsser args.extend(["-g", params]) # Run xsser if self.run_xsser(info.hostname, info.url, args): results.extend(self.parse_xsser_result(info, filename)) if info.has_post_params: args.extend([ "-u", info.url, "-p", "&".join([ "%s=%s" % (k, v) for k, v in info.post_params.iteritems() if k not in WEB_SERVERS_VARS ]), ]) if self.run_xsser(info.hostname, info.url, args): results.extend(self.parse_xsser_result(info, filename)) if results: Logger.log("Found %s XSS vulnerabilities." % len(results)) else: Logger.log_verbose("No XSS vulnerabilities found.") return results
openvas_results = VulnscanManager.transform(xml_root) golismero_results = OpenVASPlugin.parse_results(openvas_results) if golismero_results: Database.async_add_many(golismero_results) except Exception, e: fmt = format_exc() Logger.log_error( "Could not load OpenVAS results from file: %s" % input_file) Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(fmt) else: if golismero_results: data_count = len(golismero_results) vuln_count = sum( 1 for x in golismero_results if x.is_instance(Vulnerability) ) if vuln_count == 0: vuln_msg = "" elif vuln_count == 1: vuln_msg = " (1 vulnerability)" else: vuln_msg = " (%d vulnerabilities)" % vuln_count Logger.log( "Loaded %d %s%s from file: %s" % (data_count, "results" if data_count != 1 else "result", vuln_msg, input_file) ) else: Logger.log_error("No results found in file: %s" % input_file)
#-------------------------------------------------------------------------- def is_supported(self, input_file): if input_file and input_file.lower().endswith(".csv"): with open(input_file, "rU") as fd: return "Nikto" in fd.readline() return False #-------------------------------------------------------------------------- def import_results(self, input_file): try: results, vuln_count = NiktoPlugin.parse_nikto_results( None, input_file) if results: Database.async_add_many(results) except Exception, e: fmt = format_exc() Logger.log_error( "Could not load Nikto results from file: %s" % input_file) Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(fmt) else: if results: Logger.log( "Loaded %d vulnerabilities and %d resources from file: %s" % (vuln_count, len(results) - vuln_count, input_file) ) else: Logger.log_verbose("No data found in file: %s" % input_file)
def run_nikto(self, info, output_filename, command, args): """ Run Nikto and convert the output to the GoLismero data model. :param info: Base URL to scan. :type info: BaseUrl :param output_filename: Path to the output filename. The format should always be CSV. :type output_filename: :param command: Path to the Nikto executable. May also be the Perl interpreter executable, with the Nikto script as its first argument. :type command: str :param args: Arguments to pass to the executable. :type args: list(str) :returns: Results from the Nikto scan. :rtype: list(Data) """ # Append the output file name to the arguments. args.append("-output") args.append(output_filename) # Turn off DOS path warnings for Cygwin. # Does nothing on other platforms. env = os.environ.copy() cygwin = env.get("CYGWIN", "") if "nodosfilewarning" not in cygwin: if cygwin: cygwin += " " cygwin += "nodosfilewarning" env["CYGWIN"] = cygwin # Run Nikto and capture the text output. Logger.log("Launching Nikto against: %s" % info.hostname) Logger.log_more_verbose("Nikto arguments: %s %s" % (command, " ".join(args))) ##output, code = run_external_tool("C:\\cygwin\\bin\\perl.exe", ["-V"], env) # DEBUG output, code = run_external_tool(command, args, env) # Log the output in extra verbose mode. if code: Logger.log_error("Nikto execution failed, status code: %d" % code) if output: Logger.log_error_more_verbose(output) elif output: Logger.log_more_verbose(output) # Parse the results. results, vuln_count = self.parse_nikto_results(info, output_filename) # Log how many results we found. msg = ( "Nikto found %d vulnerabilities for host: %s" % ( vuln_count, info.hostname, ) ) if vuln_count: Logger.log(msg) else: Logger.log_verbose(msg) # Return the results. return results
#-------------------------------------------------------------------------- def is_supported(self, input_file): if input_file and input_file.lower().endswith(".csv"): with open(input_file, "rU") as fd: return "Nikto" in fd.readline() return False #-------------------------------------------------------------------------- def import_results(self, input_file): try: results, vuln_count = NiktoPlugin.parse_nikto_results( None, input_file) if results: Database.async_add_many(results) except Exception, e: Logger.log_error( "Could not load Nikto results from file: %s" % input_file) Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(format_exc()) else: if results: Logger.log( "Loaded %d vulnerabilities and %d resources from file: %s" % (vuln_count, len(results) - vuln_count, input_file) ) else: Logger.log_verbose("No data found in file: %s" % input_file)
def hit_hb(s): while True: typ, ver, pay = recvmsg(s) if typ is None: Logger.log( 'No heartbeat response received, server likely not vulnerable') return False if typ == 24: Logger.log('Received heartbeat response:\n' + hexdump(pay)) if len(pay) > 3: Logger.log( 'WARNING: server returned more data than it should - server is vulnerable!' ) else: Logger.log( 'Server processed malformed heartbeat, but did not return any extra data.' ) return True if typ == 21: Logger.log('Received alert:\n' + hexdump(pay)) Logger.log('Server returned error, likely not vulnerable') return False
def run(self, info): # This is where we'll collect the data we'll return. results = [] # Augment geolocation data obtained through other means. # (For example: image metadata) if info.is_instance(Geolocation): if not info.street_addr: street_addr = self.query_google(info.latitude, info.longitude) if street_addr: info.street_addr = street_addr # # TODO: parse the street address # Logger.log("Location (%s, %s) is in %s" % \ (info.latitude, info.longitude, street_addr)) return # Extract IPs from traceroute results and geolocate them. if info.is_instance(Traceroute): addr_to_ip = {} for hop in info.hops: if hop is not None: if hop.address and hop.address not in addr_to_ip: addr_to_ip[hop.address] = IP(hop.address) results.extend(addr_to_ip.itervalues()) coords_to_geoip = {} for res in addr_to_ip.itervalues(): r = self.run(res) if r: for x in r: if not x.is_instance(Geolocation): results.append(x) else: key = (x.latitude, x.longitude) if key not in coords_to_geoip: coords_to_geoip[key] = x results.append(x) else: coords_to_geoip[key].merge(x) return results # Geolocate IP addresses using Freegeoip. if info.is_instance(IP): # Skip unsupported targets. if info.version != 4: return ip = info.address parsed = netaddr.IPAddress(ip) if parsed.is_loopback() or \ parsed.is_private() or \ parsed.is_link_local(): return # Query the freegeoip.net service. kwargs = self.query_freegeoip(ip) if not kwargs: return # Translate the arguments for Geolocation(). kwargs.pop("ip") # Geolocate BSSIDs using Skyhook. elif info.is_instance(BSSID) or info.is_instance(MAC): skyhook = self.query_skyhook(info.address) if not skyhook: return # Translate the arguments for Geolocation(). kwargs = { "latitude": skyhook["latitude"], "longitude": skyhook["longitude"], "accuracy": skyhook["hpe"], "country_name": skyhook["country"], "country_code": skyhook["country_code"], "region_code": skyhook["state_code"], "region_name": skyhook["state"], } # Fail for other data types. else: assert False, "Internal error! Unexpected type: %r" % type(info) # Query the Google Geocoder to get the street address. street_addr = self.query_google(kwargs["latitude"], kwargs["longitude"]) if street_addr: kwargs["street_addr"] = street_addr # Create a Geolocation object. geoip = Geolocation(**kwargs) geoip.add_resource(info) results.append(geoip) # Log the location. try: Logger.log_verbose("%s %s is located in %s" % (info.display_name, info.address, geoip)) except Exception, e: fmt = traceback.format_exc() Logger.log_error("Error: %s" % str(e)) Logger.log_error_more_verbose(fmt)
def main(host, port, starttls=False, version="1.0"): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: Logger.log('Connecting...') s.connect((host, port)) try: if starttls: Logger.log('Sending STARTTLS...') re = s.recv(4096) s.sendall('ehlo starttlstest\n') re = s.recv(1024) if not 'STARTTLS' in re: Logger.log('STARTTLS not supported.') return s.sendall('starttls\n') re = s.recv(1024) Logger.log('Sending Client Hello...') s.sendall(hello) Logger.log('Waiting for Server Hello...') while True: typ, ver, pay = recvmsg(s) if typ == None: Logger.log( 'Server closed connection without sending Server Hello.' ) return # Look for server hello done message. if typ == 22 and ord(pay[0]) == 0x0E: break Logger.log('Sending heartbeat request...') s.sendall({ "1.0": hbv10, "1.1": hbv11, "1.2": hbv12, }[version]) return hit_hb(s) finally: try: s.shutdown(2) except Exception: pass finally: s.close()
for ip in addresslist: with warnings.catch_warnings(): warnings.filterwarnings("ignore") in_scope = ip in Config.audit_scope if not in_scope: Logger.log_more_verbose( "IP address out of scope: %s" % ip) continue d = IP(ip) data.add_resource(d) results.append(d) text = "Found %d emails and %d hostnames for keyword %r" text = text % (len(all_emails), len(all_hosts), word) if len(all_emails) + len(all_hosts) > 0: Logger.log(text) else: Logger.log_verbose(text) # Return the data. return results #-------------------------------------------------------------------------- @staticmethod def search(engine, word, limit=100): """ Run a theHarvester search on the given engine. :param engine: Search engine. :type engine: str
def command_update(parser, P, cmdParams, auditParams): # Fail if we got any arguments. if P.targets: parser.error("too many arguments") # Setup a dummy environment so we can call the API. with PluginTester(autoinit=False) as t: t.orchestrator_config.ui_mode = "console" t.orchestrator_config.verbose = cmdParams.verbose t.orchestrator_config.color = cmdParams.color t.init_environment(mock_audit=False) # Flag to tell if we fetched new code. did_update = False # Run Git here to download the latest version. if cmdParams.verbose: Logger.log("Updating GoLismero...") if os.path.exists(os.path.join(here, ".git")): helper = _GitHelper(cmdParams.verbose) run_external_tool("git", ["pull"], cwd=here, callback=helper) did_update = helper.did_update elif cmdParams.verbose: Logger.log_error( "Cannot update GoLismero if installed from a zip file! You" " must install it from the Git repository to get updates.") # Update the TLD names. if cmdParams.verbose: Logger.log("Updating list of TLD names...") import tldextract tldextract.TLDExtract().update(True) # If no code was updated, just quit here. if not did_update: if cmdParams.verbose: Logger.log("Update complete.") exit(0) # Tell the user we're about to restart. if cmdParams.verbose: Logger.log("Reloading GoLismero...") # Unload GoLismero. import golismero.patches.mp golismero.patches.mp.undo() x = here if not x.endswith(os.path.sep): x += os.path.sep our_modules = { n: m for n, m in sys.modules.iteritems() if n.startswith("golismero.") or ( hasattr(m, "__file__") and m.__file__.startswith(x)) } for n in our_modules.iterkeys(): if n.startswith("golismero.") or n.startswith("plugin_"): del sys.modules[n] # Restart GoLismero. # Note that after this point we need to explicitly import the classes we # use, and make sure they're the newer versions of them. That means: # ALWAYS USE FULLY QUALIFIED NAMES FROM HERE ON. import golismero.api.logger import golismero.main.testing with golismero.main.testing.PluginTester(autoinit=False) as t: t.orchestrator_config.ui_mode = "console" t.orchestrator_config.verbose = cmdParams.verbose t.orchestrator_config.color = cmdParams.color t.init_environment(mock_audit=False) # Call the plugin hooks. all_plugins = sorted( t.orchestrator.pluginManager.load_plugins().iteritems()) for plugin_id, plugin in all_plugins: if hasattr(plugin, "update"): if cmdParams.verbose: golismero.api.logger.Logger.log("Updating plugin %r..." % plugin_id) try: t.run_plugin_method(plugin_id, "update") except Exception: golismero.api.logger.Logger.log_error(format_exc()) # Done! if cmdParams.verbose: golismero.api.logger.Logger.log("Update complete.") exit(0)
def __call__(self, msg): self.log.append(msg) if self.verbose: Logger.log(msg)
if P.command == "UPDATE": # Fail if we got any arguments. if P.targets: parser.error("too many arguments") # Setup a dummy environment so we can call the API. with PluginTester(autoinit=False) as t: t.orchestrator_config.ui_mode = "console" t.orchestrator_config.verbose = cmdParams.verbose t.orchestrator_config.color = cmdParams.color t.init_environment(mock_audit=False) # Run Git here to download the latest version. if cmdParams.verbose: Logger.log("Updating GoLismero...") run_external_tool("git", ["pull"], cwd = here, callback = Logger.log if cmdParams.verbose else lambda x: x) # Done! Logger.log("Update complete.") exit(0) #-------------------------------------------------------------------------- # Check if all options are correct. if P.command != "SCAN": auditParams.plugin_load_overrides.append( (False, "testing") ) guessed_urls = []
def run_nikto(self, info, output_filename, command, args): """ Run Nikto and convert the output to the GoLismero data model. :param info: Base URL to scan. :type info: BaseURL :param output_filename: Path to the output filename. The format should always be CSV. :type output_filename: str :param command: Path to the Nikto script. :type command: str :param args: Arguments to pass to Nikto. :type args: list(str) :returns: Results from the Nikto scan. :rtype: list(Data) """ # Get the Nikto directory. cwd = split(abspath(command))[0] # On Windows, we must run Perl explicitly. # Also it only works under Cygwin. if sep == "\\": perl = find_cygwin_binary_in_path("perl.exe") if not perl: Logger.log_error("Perl interpreter not found, cannot run Nikto!") args.insert(0, command) command = perl # Run Nikto and capture the text output. Logger.log("Launching Nikto against: %s" % info.hostname) Logger.log_more_verbose( "Nikto arguments: %s %s" % (command, " ".join(args))) with ConnectionSlot(info.hostname): code = run_external_tool(command, args, cwd = cwd, callback = Logger.log_verbose) # Log the output in extra verbose mode. if code: Logger.log_error("Nikto execution failed, status code: %d" % code) # Parse the results. results, vuln_count = self.parse_nikto_results(info, output_filename) # Log how many results we found. msg = ( "Nikto found %d vulnerabilities for host: %s" % ( vuln_count, info.hostname, ) ) if vuln_count: Logger.log(msg) else: Logger.log_verbose(msg) # Return the results. return results
if P.command == "UPDATE": # Fail if we got any arguments. if P.targets: parser.error("too many arguments") # Setup a dummy environment so we can call the API. with PluginTester(autoinit=False) as t: t.orchestrator_config.ui_mode = "console" t.orchestrator_config.verbose = cmdParams.verbose t.orchestrator_config.color = cmdParams.color t.init_environment(mock_audit=False) # Run Git here to download the latest version. if cmdParams.verbose: Logger.log("Updating GoLismero...") run_external_tool("git", ["pull"], cwd = here, callback = Logger.log if cmdParams.verbose else lambda x: x) # Update the NIST CPE database. if cmdParams.verbose: Logger.log("Updating NIST CPE database...") t.orchestrator.cpedb.update() t.orchestrator.cpedb.vacuum() # Done! Logger.log("Update complete.") exit(0) #--------------------------------------------------------------------------
def recv_info(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip if the root domain is out of scope. if root not in Config.audit_scope: return # Skip root domains we've already processed. if self.state.put(root, True): return # Attempt a DNS zone transfer. ns_servers, results = DNS.zone_transfer(root, ns_allowed_zone_transfer=True) # On failure, skip. if not results: Logger.log_verbose( "DNS zone transfer failed, server %r not vulnerable" % root) return # Create a Domain object for the root domain. domain = Domain(root) # Associate all the results with the root domain. map(domain.add_information, results) # Add the root domain to the results. results.append(domain) # We have a vulnerability on each of the nameservers involved. msg = "DNS zone transfer successful, " if len(ns_servers) > 1: msg += "%d nameservers for %r are vulnerable!" msg %= (len(ns_servers), root) else: msg += "nameserver for %r is vulnerable!" % root Logger.log(msg) # If we don't have the name servers... if not ns_servers: # Link the vulnerability to the root domain instead. vulnerability = DNSZoneTransfer(root) vulnerability.add_resource(domain) results.append(vulnerability) # If we have the name servers... else: # Create a vulnerability for each nameserver in scope. for ns in ns_servers: # Instance the vulnerability object. vulnerability = DNSZoneTransfer(ns) # Instance a Domain or IP object. try: resource = IP(ns) except ValueError: resource = Domain(ns) # Associate the resource to the root domain. domain.add_resource(resource) # Associate the nameserver to the vulnerability. vulnerability.add_resource(resource) # Add both to the results. results.append(resource) results.append(vulnerability) # Return the results. return results
def run(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip if the root domain is out of scope. if root not in Config.audit_scope: return # Skip root domains we've already processed. if self.state.put(root, True): return # Attempt a DNS zone transfer. ns_servers, resolv = DNS.zone_transfer( root, ns_allowed_zone_transfer = True) # On failure, skip. if not resolv: Logger.log_verbose( "DNS zone transfer failed, server %r not vulnerable" % root) return # Create a Domain object for the root domain. domain = Domain(root) # Associate all the results with the root domain. for r in resolv: map(domain.add_information, r) # Add the root domain to the results. results = [] results.append(domain) # We have a vulnerability on each of the nameservers involved. msg = "DNS zone transfer successful, " if len(ns_servers) > 1: msg += "%d nameservers for %r are vulnerable!" msg %= (len(ns_servers), root) else: msg += "nameserver for %r is vulnerable!" % root Logger.log(msg) # If we don't have the name servers... if not ns_servers: # Assume the root domain also points to the nameserver. vulnerability = DNSZoneTransfer(domain, root) results.append(vulnerability) # If we have the name servers... else: # Create a vulnerability for each nameserver in scope. for ns in ns_servers: vulnerability = DNSZoneTransfer(domain, ns) results.append(vulnerability) # Return the results. return results
def recv_info(self, info): # This is where we'll collect the data we'll return. results = [] # Augment geolocation data obtained through other means. # (For example: image metadata) if info.is_instance(Geolocation): if not info.street_addr: street_addr = self.query_google(info.latitude, info.longitude) if street_addr: info.street_addr = street_addr # # TODO: parse the street address # Logger.log("(%s, %s) is in %s" % \ (info.latitude, info.longitude, street_addr)) return # Extract IPs and domains from traceroute results and geolocate them. if info.is_instance(Traceroute): hops = [] for hop in info.hops: if hop is not None: if hop.address: hops.append( IP(hop.address) ) elif hop.hostname: hops.append( Domain(hop.hostname) ) results.extend(hops) for res in hops: r = self.recv_info(res) if r: results.extend(r) return results # Get the IP address or domain name. # Skip unsupported targets. if info.is_instance(IP): if info.version != 4: return target = info.address parsed = netaddr.IPAddress(target) if parsed.is_loopback() or \ parsed.is_private() or \ parsed.is_link_local(): return elif info.is_instance(Domain): target = info.hostname if "." not in target: return else: assert False, type(info) # Query the freegeoip.net service. kwargs = self.query_freegeoip(target) if not kwargs: return # Remove the IP address from the response. address = kwargs.pop("ip") # Query the Google Geocoder. street_addr = self.query_google( kwargs["latitude"], kwargs["longitude"]) if street_addr: kwargs["street_addr"] = street_addr # Create a Geolocation object. geoip = Geolocation(**kwargs) geoip.add_resource(info) results.append(geoip) # Log the location. try: Logger.log_verbose("%s is in %s" % (target, geoip)) except Exception, e: fmt = traceback.format_exc() Logger.log_error("Error: %s" % str(e)) Logger.log_error_more_verbose(fmt)
def run(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip if the root domain is out of scope. if root not in Config.audit_scope: return # Skip root domains we've already processed. if self.state.put(root, True): return # Attempt a DNS zone transfer. ns_servers, resolv = DNS.zone_transfer(root, ns_allowed_zone_transfer=True) # On failure, skip. if not resolv: Logger.log_verbose( "DNS zone transfer failed, server %r not vulnerable" % root) return # Create a Domain object for the root domain. domain = Domain(root) # Associate all the results with the root domain. for r in resolv: map(domain.add_information, r) # Add the root domain to the results. results = [] results.append(domain) # We have a vulnerability on each of the nameservers involved. msg = "DNS zone transfer successful, " if len(ns_servers) > 1: msg += "%d nameservers for %r are vulnerable!" msg %= (len(ns_servers), root) else: msg += "nameserver for %r is vulnerable!" % root Logger.log(msg) # If we don't have the name servers... if not ns_servers: # Assume the root domain also points to the nameserver. vulnerability = DNSZoneTransfer(domain, root) results.append(vulnerability) # If we have the name servers... else: # Create a vulnerability for each nameserver in scope. for ns in ns_servers: vulnerability = DNSZoneTransfer(domain, ns) results.append(vulnerability) # Return the results. return results
class ShodanPlugin(TestingPlugin): """ This plugin tries to perform passive reconnaissance on a target using the Shodan web API. """ #-------------------------------------------------------------------------- def check_params(self): # Make sure we have an API key. self.get_api_key() #-------------------------------------------------------------------------- def get_accepted_types(self): return [IP] #-------------------------------------------------------------------------- def get_api_key(self): key = Config.plugin_args.get("apikey", None) if not key: key = Config.plugin_config.get("apikey", None) if not key: raise ValueError( "Missing API key! Get one at:" " http://www.shodanhq.com/api_doc") return key #-------------------------------------------------------------------------- def run(self, info): # This is where we'll collect the data we'll return. results = [] # Skip unsupported IP addresses. if info.version != 4: return ip = info.address parsed = netaddr.IPAddress(ip) if parsed.is_loopback() or \ parsed.is_private() or \ parsed.is_link_local(): return # Query Shodan for this host. try: key = self.get_api_key() api = WebAPI(key) shodan = api.host(ip) except Exception, e: tb = traceback.format_exc() Logger.log_error("Error querying Shodan for host %s: %s" % (ip, str(e))) Logger.log_error_more_verbose(tb) return # Make sure we got the same IP address we asked for. if ip != shodan.get("ip", ip): Logger.log_error( "Shodan gave us a different IP address... weird!") Logger.log_error_verbose( "Old IP: %s - New IP: %s" % (ip, shodan["ip"])) ip = to_utf8( shodan["ip"] ) info = IP(ip) results.append(info) # Extract all hostnames and link them to this IP address. # Note: sometimes Shodan sends IP addresses here! (?) seen_host = {} for hostname in shodan.get("hostnames", []): if hostname == ip: continue if hostname in seen_host: domain = seen_host[hostname] else: try: try: host = IP(hostname) except ValueError: host = Domain(hostname) except Exception: tb = traceback.format_exc() Logger.log_error_more_verbose(tb) seen_host[hostname] = host results.append(host) domain = host domain.add_resource(info) # Get the OS fingerprint, if available. os = to_utf8( shodan.get("os") ) if os: Logger.log("Host %s is running %s" % (ip, os)) pass # XXX TODO we'll need to reverse lookup the CPE # Get the GPS data, if available. # Complete any missing data using the default values. try: latitude = float( shodan["latitude"] ) longitude = float( shodan["longitude"] ) except Exception: latitude = None longitude = None if latitude is not None and longitude is not None: area_code = shodan.get("area_code") if not area_code: area_code = None else: area_code = str(area_code) country_code = shodan.get("country_code") if not country_code: country_code = shodan.get("country_code3") if not country_code: country_code = None else: country_code = str(country_code) else: country_code = str(country_code) country_name = shodan.get("country_name") if not country_name: country_name = None city = shodan.get("city") if not city: city = None dma_code = shodan.get("dma_code") if not dma_code: dma_code = None else: dma_code = str(dma_code) postal_code = shodan.get("postal_code") if not postal_code: postal_code = None else: postal_code = str(postal_code) region_name = shodan.get("region_name") if not region_name: region_name = None geoip = Geolocation( latitude, longitude, country_code = country_code, country_name = country_name, region_name = region_name, city = city, zipcode = postal_code, metro_code = dma_code, area_code = area_code, ) results.append(geoip) geoip.add_resource(info) # Go through every result and pick only the latest ones. latest = {} for data in shodan.get("data", []): if ( not "banner" in data or not "ip" in data or not "port" in data or not "timestamp" in data ): Logger.log_error("Malformed results from Shodan?") from pprint import pformat Logger.log_error_more_verbose(pformat(data)) continue key = ( data["ip"], data["port"], data["banner"], ) try: timestamp = reversed( # DD.MM.YYYY -> (YYYY, MM, DD) map(int, data["timestamp"].split(".", 2))) except Exception: continue if key not in latest or timestamp > latest[key][0]: latest[key] = (timestamp, data) # Process the latest results. seen_isp_or_org = set() seen_html = set() for _, data in latest.values(): # Extract all domains, but don't link them. for hostname in data.get("domains", []): if hostname not in seen_host: try: domain = Domain(hostname) except Exception: tb = traceback.format_exc() Logger.log_error_more_verbose(tb) continue seen_host[hostname] = domain results.append(domain) # We don't have any use for this information yet, # but log it so at least the user can see it. isp = to_utf8( data.get("isp") ) org = to_utf8( data.get("org") ) if org and org not in seen_isp_or_org: seen_isp_or_org.add(org) Logger.log_verbose( "Host %s belongs to: %s" % (ip, org) ) if isp and (not org or isp != org) and isp not in seen_isp_or_org: seen_isp_or_org.add(isp) Logger.log_verbose( "IP address %s is provided by ISP: %s" % (ip, isp) ) # Get the HTML content, if available. raw_html = to_utf8( data.get("html") ) if raw_html: hash_raw_html = hash(raw_html) if hash_raw_html not in seen_html: seen_html.add(hash_raw_html) try: html = HTML(raw_html) except Exception: html = None tb = traceback.format_exc() Logger.log_error_more_verbose(tb) if html: html.add_resource(info) results.append(html) # Get the banner, if available. raw_banner = to_utf8( data.get("banner") ) try: port = int( data.get("port", "0") ) except Exception: port = 0 if raw_banner and port: try: banner = Banner(info, raw_banner, port) except Exception: banner = None tb = traceback.format_exc() Logger.log_error_more_verbose(tb) if banner: results.append(banner) # Was this host located somewhere else in the past? for data in reversed(shodan.get("data", [])): try: timestamp = reversed( # DD.MM.YYYY -> (YYYY, MM, DD) map(int, data["timestamp"].split(".", 2))) old_location = data.get("location") if old_location: old_latitude = old_location.get("latitude", latitude) old_longitude = old_location.get("longitude", longitude) if ( old_latitude is not None and old_longitude is not None and (old_latitude != latitude or old_longitude != longitude) ): # Get the geoip information. area_code = old_location.get("area_code") if not area_code: area_code = None country_code = old_location.get("country_code") if not country_code: country_code = old_location.get("country_code3") if not country_code: country_code = None country_name = old_location.get("country_name") if not country_name: country_name = None city = old_location.get("city") if not city: city = None postal_code = old_location.get("postal_code") if not postal_code: postal_code = None region_name = old_location.get("region_name") if not region_name: region_name = None geoip = Geolocation( latitude, longitude, country_code = country_code, country_name = country_name, region_name = region_name, city = city, zipcode = postal_code, area_code = area_code, ) # If this is the first time we geolocate this IP, # use this information as it if were up to date. if latitude is None or longitude is None: latitude = old_latitude longitude = old_longitude results.append(geoip) geoip.add_resource(info) # Otherwise, just log the event. else: discard_data(geoip) where = str(geoip) when = datetime.date(*timestamp) msg = "Host %s used to be located at %s on %s." msg %= (ip, where, when.strftime("%B %d, %Y")) Logger.log_verbose(msg) except Exception: tb = traceback.format_exc() Logger.log_error_more_verbose(tb) # Return the results. return results
def run(self, info): # Get the base URL to the SpiderFoot API. base_url = Config.plugin_args["url"] # Find out if we should delete the scan when we're done. must_delete = Config.audit_config.boolean( Config.plugin_args.get("delete", "y")) # We need to catch SystemExit in order to stop and delete the scan. scan_id = None try: # Create a new scan. resp = post(urljoin(base_url, "startscan"), { "scanname": Config.audit_name, "scantarget": info.hostname, "modulelist": self.get_list("modulelist", "module_"), "typelist": self.get_list("typelist", "type_"), }) if resp.status_code != 200: r = resp.content p = r.find("<div class=\"alert alert-error\">") if p >= 0: p = r.find("<h4>", p) + 4 q = r.find("</h4>", p) m = r[p:q].strip() raise RuntimeError("Could not start scan, reason: " + m) # Wait until the scan is finished. try: interval = float(Config.plugin_args.get("interval", "5.0")) except Exception: interval = 5.0 url_scanlist = urljoin(base_url, "scanlist") last_msg = "" is_created = False scan_id = None while True: resp = get(url_scanlist) if resp.status_code != 200: status = "ERROR-FAILED" break scanlist = resp.json() found = False for scan in scanlist: scan_id, scan_name = scan[:2] status, count = scan[-2:] if scan_name == Config.audit_name: found = True break if found: is_created = True is_finished = status in ("FINISHED", "ABORTED", "ERROR-FAILED") msg = "Status: %s (%s elements%s)" % ( status, count, " so far" if not is_finished else "" ) if msg != last_msg: last_msg = msg Logger.log_verbose(msg) if is_finished: break else: if not is_created: Logger.log_verbose("Status: CREATING") else: Logger.log_verbose("Status: DELETED") Logger.log_error( "Scan deleted from the SpiderFoot UI, aborting!") return sleep(interval) # Tell the user if the scan didn't finish correctly. results = None try: has_partial = is_created and int(count) > 0 except Exception: has_partial = is_created try: # Get the scan results. if has_partial: Logger.log_error("Scan didn't finish correctly!") Logger.log("Attempting to load partial results...") parser = SpiderFootParser() url = parse_url("scaneventresultexport", base_url) url.query_params = {"id": scan_id, "type": "ALL"} resp = get(url.url) if resp.status_code != 200: Logger.log_error( "Could not get scan results, error code: %s" % resp.status_code) else: results = parser.parse(StringIO(resp.content)) if results: if len(results) == 1: Logger.log("Loaded 1 result.") else: Logger.log("Loaded %d results." % len(results)) else: Logger.log("No results loaded.") else: Logger.log_error("Scan didn't finish correctly, aborting!") finally: # Delete the scan. try: if is_created and must_delete: url = parse_url("scandelete", base_url) url.query_params = {"id": scan_id, "confirm": "1"} get(url.url) ##if resp.status_code != 200: ## Logger.log_error_more_verbose( ## "Could not delete scan, error code: %s" ## % resp.status_code) except Exception, e: tb = format_exc() Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(tb) # Return the results. return results
def run(self, info): # Get the root domain only. root = info.root # Skip localhost. if root == "localhost": return # Skip root domains we've already processed. if self.state.put(root, True): return # Load the subdomains wordlist. try: wordlist = WordListLoader.get_wordlist_as_list(Config.plugin_args["wordlist"]) except WordlistNotFound: Logger.log_error_verbose("Wordlist '%s' not found.." % Config.plugin_args["wordlist"]) return except TypeError: Logger.log_error_verbose("Wordlist '%s' is not a file." % Config.plugin_args["wordlist"]) return # Load the subdomains whitelist. try: whitelist = WordListLoader.get_wordlist_as_list(Config.plugin_config["wordlist"]) except WordlistNotFound: Logger.log_error_verbose("Wordlist '%s' not found.." % Config.plugin_config["wordlist"]) return except TypeError: Logger.log_error_verbose("Wordlist '%s' is not a file." % Config.plugin_config["wordlist"]) return # # Set a base line for dinamyc sub-domains # m_virtual_domains = [] for v in (generate_random_string(40) for x in xrange(3)): l_subdomain = ".".join((v, root)) records = DNS.get_a(l_subdomain, also_CNAME=True) for rec in records: if rec.type == "CNAME": m_virtual_domains.append(rec.target) # If 3 subdomains are the same, set the base domain m_base_domain = None if len(set(m_virtual_domains)) == 1: m_base_domain = m_virtual_domains[0] # Configure the progress notifier. self.progress.set_total(len(wordlist)) self.progress.min_delta = 1 # notify every 1% # For each subdomain in the wordlist... found = 0 results = [] visited = set() for prefix in wordlist: # Mark as completed before actually trying. # We can't put this at the end of the loop where it belongs, # because the "continue" statements would skip over this too. self.progress.add_completed() # Build the domain name. name = ".".join((prefix, root)) # Skip if out of scope. if name not in Config.audit_scope: continue # Resolve the subdomain. records = DNS.get_a(name, also_CNAME=True) records.extend( DNS.get_aaaa(name, also_CNAME=True) ) # If no DNS records were found, skip. if not records: continue # If CNAME is the base domain, skip chk = [True for x in records if x.type == "CNAME" and x.target == m_base_domain] if len(chk) > 0 and all(chk): continue # We found a subdomain! found += 1 Logger.log_more_verbose( "Subdomain found: %s" % name) # Create the Domain object for the subdomain. domain = Domain(name) results.append(domain) # # Check for Domain disclosure # if prefix not in whitelist: d = DomainDisclosure(domain, risk = 0, level = "low", title = "Possible subdomain leak", description = "A subdomain was discovered which may be an unwanted information disclosure." ) results.append(d) # For each DNs record, grab the address or name. # Skip duplicated records. for rec in records: if rec.type == "CNAME": location = rec.target elif rec.type in ("A", "AAAA"): location = rec.address else: # should not happen... results.append(rec) domain.add_information(rec) continue if location not in visited: visited.add(location) results.append(rec) domain.add_information(rec) # Log the results. if found: Logger.log( "Found %d subdomains for root domain: %s" % (found, root)) else: Logger.log_verbose( "No subdomains found for root domain: %s" % root) # Return the results. return results
def recv_info(self, info): Logger.log(info) if not isinstance(info, Url): return if not info.has_url_params and not info.has_post_params: Logger.log("URL '%s' has not parameters" % info.url) return # Get sqlmap script executable sqlmap_script = self.get_sqlmap() results = [] with tempdir() as output_dir: # Basic command line args = [ "-u", info.url, "-b", "--batch", "--output-dir", output_dir, "-u", info.url, ] # # GET Parameters injection # if info.has_url_params: args = [ "-p", ",".join(info.url_params), ] r = self.make_injection(info.url, sqlmap_script, args) # Parse and return the results. if r: results.extend(self.parse_sqlmap_results(info, output_dir)) # # POST Parameters injection # if info.has_post_params: args = [ "--data", "&".join([ "%s=%s" % (k, v) for k, v in info.post_params.iteritems()]) ] r = self.make_injection(info.url, sqlmap_script, args) # Parse and return the results. if r: results.extend(self.parse_sqlmap_results(info, output_dir)) if results: Logger.log("Found %s SQL injection vulns." % len(results)) else: Logger.log("No SQL injection vulns found.") return results
class PunkSPIDER(TestingPlugin): """ This plugin tries to perform passive reconnaissance on a target using the PunkSPIDER vulnerability lookup engine. """ #-------------------------------------------------------------------------- def get_accepted_types(self): return [Domain] #-------------------------------------------------------------------------- def run(self, info): # Query PunkSPIDER. host_id = info.hostname host_id = parse_url(host_id).hostname host_id = ".".join(reversed(host_id.split("."))) d = self.query_punkspider(host_id) # Stop if we have no results. if not d: Logger.log("No results found for host: %s" % info.hostname) return # This is where we'll collect the data we'll return. results = [] # For each vulnerability... for v in d["data"]: try: # Future-proof checks. if v["protocol"] not in ("http", "https"): Logger.log_more_verbose( "Skipped non-web vulnerability: %s" % to_utf8(v["id"])) continue if v["bugType"] not in ("xss", "sqli", "bsqli"): Logger.log_more_verbose( "Skipped unknown vulnerability type: %s" % to_utf8(v["bugType"])) continue # Get the vulnerable URL, parameter and payload. url = to_utf8(v["vulnerabilityUrl"]) param = to_utf8(v["parameter"]) parsed = parse_url(url) payload = parsed.query_params[param] # Get the level. level = to_utf8(v["level"]) # Create the URL object. url_o = URL(url) results.append(url_o) # Get the vulnerability class. if v["bugType"] == "xss": clazz = XSS else: clazz = SQLInjection # Create the Vulnerability object. vuln = clazz( url_o, vulnerable_params={param: payload}, injection_point=clazz.INJECTION_POINT_URL, injection_type=to_utf8(v["bugType"]), # FIXME level=level, tool_id=to_utf8(v["id"]), ) print '------------' print vuln print type(vuln) print '------------' results.append(vuln) # Log errors. except Exception, e: tb = traceback.format_exc() Logger.log_error_verbose(str(e)) Logger.log_error_more_verbose(tb) # Log how many vulnerabilities we found. count = int(len(results) / 2) if count == 0: Logger.log("No vulnerabilities found for host: " + info.hostname) elif count == 1: Logger.log("Found one vulnerability for host: " + info.hostname) else: Logger.log("Found %d vulnerabilities for host: %s" % (count, info.hostname)) # Return the results. return results
def run_nikto(self, info, output_filename, command, args): """ Run Nikto and convert the output to the GoLismero data model. :param info: Base URL to scan. :type info: BaseUrl :param output_filename: Path to the output filename. The format should always be CSV. :type output_filename: str :param command: Path to the Nikto script. :type command: str :param args: Arguments to pass to Nikto. :type args: list(str) :returns: Results from the Nikto scan. :rtype: list(Data) """ # Get the Nikto directory. cwd = split(abspath(command))[0] # On Windows, we must run Perl explicitly. # Also it only works under Cygwin. if sep == "\\": perl = find_cygwin_binary_in_path("perl.exe") if not perl: Logger.log_error( "Perl interpreter not found, cannot run Nikto!") args.insert(0, command) command = perl # Run Nikto and capture the text output. Logger.log("Launching Nikto against: %s" % info.hostname) Logger.log_more_verbose("Nikto arguments: %s %s" % (command, " ".join(args))) code = run_external_tool(command, args, cwd=cwd, callback=Logger.log_verbose) # Log the output in extra verbose mode. if code: Logger.log_error("Nikto execution failed, status code: %d" % code) # Parse the results. results, vuln_count = self.parse_nikto_results(info, output_filename) # Log how many results we found. msg = ("Nikto found %d vulnerabilities for host: %s" % ( vuln_count, info.hostname, )) if vuln_count: Logger.log(msg) else: Logger.log_verbose(msg) # Return the results. return results