def HTTP_response_headers_analyzer(response_header_1, response_header_2): """ Does a HTTP comparison to determinate if two HTTP response matches with the same content without need the body content. To do that, remove some HTTP headers (like Date or Cache info). Return a value between 0-1 with the level of difference. 0 is lowest and 1 the highest. - If response_header_1 is more similar to response_header_2, value will be near to 100. - If response_header_1 is more different to response_header_2, value will be near to 0. :param response_header_1: text with http response headers. :type response_header_1: http headers :param response_header_2: text with http response headers. :type response_header_2: http headers """ m_invalid_headers = [ "Date", "Expires", "Last-Modified", ] m_res1 = ''.join([ "%s:%s" % (k,v) for k,v in response_header_1.iteritems() if k not in m_invalid_headers ]) m_res2 = ''.join([ "%s:%s" % (k,v) for k,v in response_header_2.iteritems() if k not in m_invalid_headers ]) return get_diff_ratio(m_res1, m_res2)
def HTTP_response_headers_analyzer(response_header_1, response_header_2): """ Does a HTTP comparison to determinate if two HTTP response matches with the same content without need the body content. To do that, remove some HTTP headers (like Date or Cache info). Return a value between 0-1 with the level of difference. 0 is lowest and 1 the highest. - If response_header_1 is more similar to response_header_2, value will be near to 100. - If response_header_1 is more different to response_header_2, value will be near to 0. :param response_header_1: text with http response headers. :type response_header_1: http headers :param response_header_2: text with http response headers. :type response_header_2: http headers """ m_invalid_headers = [ "Date", "Expires", "Last-Modified", ] m_res1 = ''.join([ "%s:%s" % (k, v) for k, v in response_header_1.iteritems() if k not in m_invalid_headers ]) m_res2 = ''.join([ "%s:%s" % (k, v) for k, v in response_header_2.iteritems() if k not in m_invalid_headers ]) return get_diff_ratio(m_res1, m_res2)
def matches_by_value_with_level(self, word): """ Search a word passed as parameter in values of wordlist and return a list of dicts with matches and level of correspondence. The matching level is a value between 0-1. :param word: word to search. :type word: str. :return: a list with matches and correpondences. :rtype: list(list(KEY, VALUE, LEVEL)) """ if not word: return [] word = str(word) m_return = set() m_return_append = m_return.add for v in self.__wordlist.itervalues(): if word not in v: continue for l in v: if word == l: m_return_append((l, v, get_diff_ratio(word, l))) return m_return
def run(self, info): if not isinstance(info, HTTP_Response): return response = info.data url = list(info.associated_resources)[0] # Load signatures try: signatures = pickle.load(open(plugin_data, "rb")) except pickle.PickleError: signatures = {} # Starting the search total = float(len(signatures)) for step, (server_name, server_page) in enumerate(signatures.iteritems()): # Update status progress = float(step) / total self.update_status(progress=progress) level = get_diff_ratio(response, server_page) Logger.log(level) if level > 0.55: # magic number :) # Match found. vulnerability = DefaultErrorPage(url, server_name, title="Default error page for server '%s'" % server_name) return vulnerability
def run(self, info): if not isinstance(info, HTTP_Response): return response = info.data url = list(info.associated_resources)[0] # Load signatures try: signatures = pickle.load(open(plugin_data, "rb")) except pickle.PickleError: signatures = {} # Starting the search total = float(len(signatures)) for step, (server_name, server_page) in enumerate(signatures.iteritems()): # Update status progress = float(step) / total self.update_status(progress=progress) level = get_diff_ratio(response, server_page) Logger.log(level) if level > 0.55: # magic number :) # Match found. vulnerability = DefaultErrorPage( url, server_name, title="Default error page for server '%s'" % server_name) return vulnerability
def __detect_wordpress_installation(self, url, wordpress_urls): """ Try to detect a wordpress instalation in the current path. :param url: URL where try to find the WordPress installation. :type url: str :param wordpress_urls: string with wordlist name with WordPress URLs. :type wordpress_urls: str :return: True if wordpress installation found. False otherwise. :rtype: bool """ Logger.log_more_verbose( "Detecting Wordpress instalation in URI: '%s'." % url) total_urls = 0 urls_found = 0 error_page = get_error_page(url).raw_data for u in WordListLoader.get_wordlist(wordpress_urls): total_urls += 1 tmp_url = urljoin(url, u) r = HTTP.get_url(tmp_url, use_cache=False) if r.status == "200": # Try to detect non-default error pages ratio = get_diff_ratio(r.raw_response, error_page) if ratio < 0.35: urls_found += 1 discard_data(r) # If Oks > 85% continue if (urls_found / float(total_urls)) < 0.85: # If all fails, make another last test url_wp_admin = urljoin(url, "wp-admin/") try: p = HTTP.get_url(url_wp_admin, use_cache=False, allow_redirects=False) if p: discard_data(p) except Exception, e: return False if p.status == "302" and "wp-login.php?redirect_to=" in p.headers.get( "Location", ""): return True else: return False
def __detect_wordpress_installation(self, url, wordpress_urls): """ Try to detect a wordpress instalation in the current path. :param url: URL where try to find the WordPress installation. :type url: str :param wordpress_urls: string with wordlist name with WordPress URLs. :type wordpress_urls: str :return: True if wordpress installation found. False otherwise. :rtype: bool """ Logger.log_more_verbose("Detecting Wordpress instalation in URI: '%s'." % url) total_urls = 0 urls_found = 0 error_page = get_error_page(url).raw_data for u in WordListLoader.get_wordlist(wordpress_urls): total_urls += 1 tmp_url = urljoin(url, u) r = HTTP.get_url(tmp_url, use_cache=False) if r.status == "200": # Try to detect non-default error pages ratio = get_diff_ratio(r.raw_response, error_page) if ratio < 0.35: urls_found += 1 discard_data(r) # If Oks > 85% continue if (urls_found / float(total_urls)) < 0.85: # If all fails, make another last test url_wp_admin = urljoin(url, "wp-admin/") try: p = HTTP.get_url(url_wp_admin, use_cache=False, allow_redirects=False) if p: discard_data(p) except Exception, e: return False if p.status == "302" and "wp-login.php?redirect_to=" in p.headers.get("Location", ""): return True else: return False
def recv_info(self, info): # Get the response page. response = HTTP.get_url(info.url, callback=self.check_response) if response: try: # Look for a match. page_text = response.data total = float(len(signatures)) for step, (server_name, server_page) in enumerate(signatures.iteritems()): # Update status progress = float(step) / total self.update_status(progress=progress) level = get_diff_ratio(page_text, server_page) if level > 0.95: # magic number :) # Match found. vulnerability = DefaultErrorPage(info, server_name) vulnerability.add_information(response) return [vulnerability, response] # Discard the response if no match was found. discard_data(response) except Exception: # Discard the response on error. discard_data(response) raise
def recv_info(self, info): # Get the response page. response = HTTP.get_url(info.url, callback = self.check_response) if response: try: # Look for a match. page_text = response.data total = float(len(signatures)) for step, (server_name, server_page) in enumerate(signatures.iteritems()): # Update status progress = float(step) / total self.update_status(progress=progress) level = get_diff_ratio(page_text, server_page) if level > 0.95: # magic number :) # Match found. vulnerability = DefaultErrorPage(info, server_name) vulnerability.add_information(response) return [vulnerability, response] # Discard the response if no match was found. discard_data(response) except Exception: # Discard the response on error. discard_data(response) raise
def is_URL_in_windows(self, main_url): """ Detect if platform is Windows or \*NIX. To do this, get the first link, in scope, and does two resquest. If are the same response, then, platform are Windows. Else are \*NIX. :returns: True, if the remote host is a Windows system. False is \*NIX or None if unknown. :rtype: bool """ m_forbidden = ( "logout", "logoff", "exit", "sigout", "signout", ) # Get the main web page m_r = download(main_url, callback=self.check_download) if not m_r: return None discard_data(m_r) # Get the first link if m_r.information_type == Information.INFORMATION_HTML: m_links = extract_from_html(m_r.raw_data, main_url) else: m_links = extract_from_text(m_r.raw_data, main_url) if not m_links: return None # Get the first link of the page that's in scope of the audit m_first_link = None for u in m_links: if u in Config.audit_scope and not any(x in u for x in m_forbidden): m_first_link = u break if not m_first_link: return None # Now get two request to the links. One to the original URL and other # as upper URL. # Original m_response_orig = HTTP.get_url(m_first_link, callback=self.check_response) # FIXME handle exceptions! discard_data(m_response_orig) # Uppercase m_response_upper = HTTP.get_url(m_first_link.upper(), callback=self.check_response) # FIXME handle exceptions! discard_data(m_response_upper) # Compare them m_orig_data = m_response_orig.raw_response if m_response_orig else "" m_upper_data = m_response_upper.raw_response if m_response_upper else "" m_match_level = get_diff_ratio(m_orig_data, m_upper_data) # If the responses are equal by 90%, two URL are the same => Windows; else => *NIX m_return = None if m_match_level > 0.95: m_return = True else: m_return = False return m_return
def __find_plugins(self, url, plugins_wordlist, update_func): """ Try to find available plugins :param url: base URL to test. :type url: str :param plugins_wordlist: path to wordlist with plugins lists. :type plugins_wordlist: str :param update_func: function to update plugin status. :type update_func: function :return: list of lists as format: list([PLUGIN_NAME, PLUGIN_URL, PLUGIN_INSTALLED_VERSION, PLUGIN_LAST_VERSION, [CVE1, CVE2...]]) :type: list(list()) """ results = [] urls_to_test = { "readme.txt": r"(Stable tag:[\svV]*)([0-9\.]+)", "README.txt": r"(Stable tag:[\svV]*)([0-9\.]+)", } # Generates the error page error_response = get_error_page(url).raw_data # Load plugins info plugins = [] plugins_append = plugins.append with open(plugins_wordlist, "rU") as f: for x in f: plugins_append(x.replace("\n", "")) # Calculate sizes total_plugins = len(plugins) # Load CSV info csv_info = csv.reader(plugins) # Process the URLs for i, plugin_row in enumerate(csv_info): # Plugin properties plugin_URI = plugin_row[0] plugin_name = plugin_row[1] plugin_last_version = plugin_row[2] plugin_CVEs = [] if plugin_row[3] == "" else plugin_row[3].split("|") # Update status update_func((float(i) * 100.0) / float(total_plugins)) # Make plugin URL partial_plugin_url = "%s/%s" % (url, "wp-content/plugins/%s" % plugin_URI) # Test each URL with possible plugin version info for target, regex in urls_to_test.iteritems(): plugin_url = "%s/%s" % (partial_plugin_url, target) # Try to get plugin p = None try: p = HTTP.get_url(plugin_url, use_cache=False) if p: discard_data(p) except Exception, e: Logger.log_error_more_verbose("Error while download: '%s': %s" % (plugin_url, str(e))) continue plugin_installed_version = None if p.status == "403": # Installed, but inaccesible plugin_installed_version = "Unknown" elif p.status == "200": # Check if page is and non-generic not found page with 404 code if get_diff_ratio(error_response, p.raw_response) < 0.52: # Find the version tmp_version = re.search(regex, p.raw_response) if tmp_version is not None: plugin_installed_version = tmp_version.group(2) # Store info if plugin_installed_version is not None: Logger.log("Discovered plugin: '%s (installed version: %s)' (latest version: %s)" % (plugin_name, plugin_installed_version, plugin_last_version)) results.append([ plugin_name, plugin_url, plugin_installed_version, plugin_last_version, plugin_CVEs ]) # Plugin found -> not more URL test for this plugin break
def is_URL_in_windows(self, main_url): """ Detect if platform is Windows or \*NIX. To do this, get the first link, in scope, and does two resquest. If are the same response, then, platform are Windows. Else are \*NIX. :returns: True, if the remote host is a Windows system. False is \*NIX or None if unknown. :rtype: bool """ m_forbidden = ( "logout", "logoff", "exit", "sigout", "signout", ) # Get the main web page m_r = download(main_url, callback=self.check_download) if not m_r: return None discard_data(m_r) # Get the first link if m_r.information_type == Information.INFORMATION_HTML: m_links = extract_from_html(m_r.raw_data, main_url) else: m_links = extract_from_text(m_r.raw_data, main_url) if not m_links: return None # Get the first link of the page that's in scope of the audit m_first_link = None for u in m_links: if u in Config.audit_scope and not any(x in u for x in m_forbidden): m_first_link = u break if not m_first_link: return None # Now get two request to the links. One to the original URL and other # as upper URL. # Original m_response_orig = HTTP.get_url( m_first_link, callback=self.check_response) # FIXME handle exceptions! discard_data(m_response_orig) # Uppercase m_response_upper = HTTP.get_url( m_first_link.upper(), callback=self.check_response) # FIXME handle exceptions! discard_data(m_response_upper) # Compare them m_orig_data = m_response_orig.raw_response if m_response_orig else "" m_upper_data = m_response_upper.raw_response if m_response_upper else "" m_match_level = get_diff_ratio(m_orig_data, m_upper_data) # If the responses are equal by 90%, two URL are the same => Windows; else => *NIX m_return = None if m_match_level > 0.95: m_return = True else: m_return = False return m_return
def __find_plugins(self, url, plugins_wordlist, update_func): """ Try to find available plugins :param url: base URL to test. :type url: str :param plugins_wordlist: path to wordlist with plugins lists. :type plugins_wordlist: str :param update_func: function to update plugin status. :type update_func: function :return: list of lists as format: list([PLUGIN_NAME, PLUGIN_URL, PLUGIN_INSTALLED_VERSION, PLUGIN_LAST_VERSION, [CVE1, CVE2...]]) :type: list(list()) """ results = [] urls_to_test = { "readme.txt": r"(Stable tag:[\svV]*)([0-9\.]+)", "README.txt": r"(Stable tag:[\svV]*)([0-9\.]+)", } # Generates the error page error_response = get_error_page(url).raw_data # Load plugins info plugins = [] plugins_append = plugins.append with open(plugins_wordlist, "rU") as f: for x in f: plugins_append(x.replace("\n", "")) # Calculate sizes total_plugins = len(plugins) # Load CSV info csv_info = csv.reader(plugins) # Process the URLs for i, plugin_row in enumerate(csv_info): # Plugin properties plugin_URI = plugin_row[0] plugin_name = plugin_row[1] plugin_last_version = plugin_row[2] plugin_CVEs = [] if plugin_row[3] == "" else plugin_row[3].split( "|") # Update status update_func((float(i) * 100.0) / float(total_plugins)) # Make plugin URL partial_plugin_url = "%s/%s" % (url, "wp-content/plugins/%s" % plugin_URI) # Test each URL with possible plugin version info for target, regex in urls_to_test.iteritems(): plugin_url = "%s/%s" % (partial_plugin_url, target) # Try to get plugin p = None try: p = HTTP.get_url(plugin_url, use_cache=False) if p: discard_data(p) except Exception, e: Logger.log_error_more_verbose( "Error while download: '%s': %s" % (plugin_url, str(e))) continue plugin_installed_version = None if p.status == "403": # Installed, but inaccesible plugin_installed_version = "Unknown" elif p.status == "200": # Check if page is and non-generic not found page with 404 code if get_diff_ratio(error_response, p.raw_response) < 0.52: # Find the version tmp_version = re.search(regex, p.raw_response) if tmp_version is not None: plugin_installed_version = tmp_version.group(2) # Store info if plugin_installed_version is not None: Logger.log( "Discovered plugin: '%s (installed version: %s)' (latest version: %s)" % (plugin_name, plugin_installed_version, plugin_last_version)) results.append([ plugin_name, plugin_url, plugin_installed_version, plugin_last_version, plugin_CVEs ]) # Plugin found -> not more URL test for this plugin break
class OSFingerprinting(TestingPlugin): """ Plugin to fingerprint the remote OS. """ #---------------------------------------------------------------------- def get_accepted_info(self): return [IP, BaseUrl] #---------------------------------------------------------------------- def recv_info(self, info): """ Main function for OS fingerprint. Get a domain or IP and return the fingerprint results. :param info: Folder URL. :type info: FolderUrl :return: OS Fingerprint. :rtype: OSFingerprint """ # # Detection methods and their weights. # # The weight is a value between 1-5 # FINGERPRINT_METHODS_OS_AND_VERSION = { 'ttl': { 'function': self.ttl_platform_detection, 'weight': 2 } } FUNCTIONS = None # Fingerprint methods to run m_host = None is_windows = None if isinstance(info, IP): m_host = info.address FUNCTIONS = ['ttl'] else: # BaseUrl m_host = info.hostname FUNCTIONS = ['ttl'] # Try to detect if remote system is a Windows m_windows_host = "%s://%s:%s" % (info.parsed_url.scheme, info.parsed_url.host, info.parsed_url.port) is_windows = self.is_URL_in_windows(m_windows_host) # Logging Logger.log_more_verbose( "Starting OS fingerprinting plugin for site: %s" % m_host) m_counter = Counter() # Run functions for f in FUNCTIONS: l_function = FINGERPRINT_METHODS_OS_AND_VERSION[f]['function'] ### For future use ### l_weight = FINGERPRINT_METHODS_OS_AND_VERSION[f]['weight'] # Run results = l_function(m_host) if results: for l_r in results: m_counter[l_r] += 1 # Return value m_return = None # # Filter the results # if len(m_counter) > 0: # Fooking for a windows system if is_windows: # If Windows is detected l_counter = Counter() # Extract windows systems for x, y in m_counter.iteritems(): if "windows" == x: l_counter[x] += y # Replace the counter for the new m_counter = l_counter # Get most common systems l_most_common = m_counter.most_common(5) # First elemente will be the detected OS m_OS_family = l_most_common[0][0][0] m_OS_version = l_most_common[0][0][1] # Next 4 will be the 'others' m_length = float(len(l_most_common)) m_others = { "%s-%s" % (l_most_common[i][0][0], l_most_common[i][0][1]): float('{:.2f}'.format(l_most_common[i][1] / m_length)) for i in xrange(1, len(l_most_common), 1) } # create the data m_return = OSFingerprint(m_OS_family, m_OS_version, others=m_others) elif is_windows is not None: if is_windows: # Windows system detected m_return = OSFingerprint("windows") else: # *NIX system detected m_return = OSFingerprint("unix_or_compatible") # If there is information, associate it with the resource if m_return: info.add_information(m_return) return m_return #---------------------------------------------------------------------- # # Platform detection methods # #---------------------------------------------------------------------- def is_URL_in_windows(self, main_url): """ Detect if platform is Windows or \*NIX. To do this, get the first link, in scope, and does two resquest. If are the same response, then, platform are Windows. Else are \*NIX. :returns: True, if the remote host is a Windows system. False is \*NIX or None if unknown. :rtype: bool """ m_forbidden = ( "logout", "logoff", "exit", "sigout", "signout", ) # Get the main web page m_r = download(main_url, callback=self.check_download) if not m_r or not m_r.raw_data: return None discard_data(m_r) # Get the first link m_links = None try: if m_r.information_type == Information.INFORMATION_HTML: m_links = extract_from_html(m_r.raw_data, main_url) else: m_links = extract_from_text(m_r.raw_data, main_url) except TypeError, e: Logger.log_error_more_verbose("Plugin error: %s" % format_exc()) return None if not m_links: return None # Get the first link of the page that's in scope of the audit m_first_link = None for u in m_links: if u in Config.audit_scope and not any(x in u for x in m_forbidden): m_first_link = u break if not m_first_link: return None # Now get two request to the links. One to the original URL and other # as upper URL. # Original m_response_orig = HTTP.get_url( m_first_link, callback=self.check_response) # FIXME handle exceptions! discard_data(m_response_orig) # Uppercase m_response_upper = HTTP.get_url( m_first_link.upper(), callback=self.check_response) # FIXME handle exceptions! discard_data(m_response_upper) # Compare them m_orig_data = m_response_orig.raw_response if m_response_orig else "" m_upper_data = m_response_upper.raw_response if m_response_upper else "" m_match_level = get_diff_ratio(m_orig_data, m_upper_data) # If the responses are equal by 90%, two URL are the same => Windows; else => *NIX m_return = None if m_match_level > 0.95: m_return = True else: m_return = False return m_return