def _test_DNS(self, original_response, dns_wildcard_url): ''' Check if http://www.domain.tld/ == http://domain.tld/ ''' headers = Headers([('Host', dns_wildcard_url.get_domain())]) try: modified_response = self._uri_opener.GET( original_response.get_url(), cache=True, headers=headers) except w3afException: return else: if relative_distance_lt(modified_response.get_body(), original_response.get_body(), 0.35): desc = 'The target site has NO DNS wildcard, and the contents' \ ' of "%s" differ from the contents of "%s".' desc = desc % (dns_wildcard_url, original_response.get_url()) i = Info('No DNS wildcard', desc, modified_response.id, self.get_name()) i.set_url(dns_wildcard_url) kb.kb.append(self, 'dns_wildcard', i) om.out.information(i.get_desc()) else: desc = 'The target site has a DNS wildcard configuration, the'\ ' contents of "%s" are equal to the ones of "%s".' desc = desc % (dns_wildcard_url, original_response.get_url()) i = Info('DNS wildcard', desc, modified_response.id, self.get_name()) i.set_url(original_response.get_url()) kb.kb.append(self, 'dns_wildcard', i) om.out.information(i.get_desc())
def _analyze_methods(self, url, allowed_methods, id_list): # Check for DAV if set(allowed_methods).intersection(self.DAV_METHODS): # dav is enabled! # Save the results in the KB so that other plugins can use this # information desc = 'The URL "%s" has the following allowed methods. These'\ ' include DAV methods and should be disabled: %s' desc = desc % (url, ', '.join(allowed_methods)) i = Info('DAV methods enabled', desc, id_list, self.get_name()) i.set_url(url) i['methods'] = allowed_methods kb.kb.append(self, 'dav-methods', i) else: # Save the results in the KB so that other plugins can use this # information. Do not remove these information, other plugins # REALLY use it ! desc = 'The URL "%s" has the following enabled HTTP methods: %s' desc = desc % (url, ', '.join(allowed_methods)) i = Info('Allowed HTTP methods', desc, id_list, self.get_name()) i.set_url(url) i['methods'] = allowed_methods kb.kb.append(self, 'methods', i)
def _PUT(self, domain_path): ''' Tests PUT method. ''' # upload url = domain_path.url_join(rand_alpha(5)) rnd_content = rand_alnum(6) put_response = self._uri_opener.PUT(url, data=rnd_content) # check if uploaded res = self._uri_opener.GET(url, cache=True) if res.get_body() == rnd_content: msg = 'File upload with HTTP PUT method was found at resource:' \ ' "%s". A test file was uploaded to: "%s".' msg = msg % (domain_path, res.get_url()) v = Vuln('Insecure DAV configuration', msg, severity.HIGH, [put_response.id, res.id], self.get_name()) v.set_url(url) v.set_method('PUT') self.kb_append(self, 'dav', v) # Report some common errors elif put_response.get_code() == 500: msg = 'DAV seems to be incorrectly configured. The web server' \ ' answered with a 500 error code. In most cases, this means'\ ' that the DAV extension failed in some way. This error was'\ ' found at: "%s".' % put_response.get_url() i = Info('DAV incorrect configuration', msg, res.id, self.get_name()) i.set_url(url) i.set_method('PUT') self.kb_append(self, 'dav', i) # Report some common errors elif put_response.get_code() == 403: msg = 'DAV seems to be correctly configured and allowing you to'\ ' use the PUT method but the directory does not have the'\ ' correct permissions that would allow the web server to'\ ' write to it. This error was found at: "%s".' msg = msg % put_response.get_url() i = Info('DAV incorrect configuration', msg, [put_response.id, res.id], self.get_name()) i.set_url(url) i.set_method('PUT') self.kb_append(self, 'dav', i)
def _analyze_crossdomain_clientaccesspolicy(self, url, response, file_name): try: dom = xml.dom.minidom.parseString(response.get_body()) except Exception: # Report this, it may be interesting for the final user # not a vulnerability per-se... but... it's information after all if 'allow-access-from' in response.get_body() or \ 'cross-domain-policy' in response.get_body() or \ 'cross-domain-access' in response.get_body(): desc = 'The "%s" file at: "%s" is not a valid XML.' desc = desc % (file_name, response.get_url()) i = Info('Invalid RIA settings file', desc, response.id, self.get_name()) i.set_url(response.get_url()) kb.kb.append(self, 'info', i) om.out.information(i.get_desc()) else: if (file_name == 'crossdomain.xml'): url_list = dom.getElementsByTagName("allow-access-from") attribute = 'domain' if (file_name == 'clientaccesspolicy.xml'): url_list = dom.getElementsByTagName("domain") attribute = 'uri' for url in url_list: url = url.getAttribute(attribute) desc = 'The "%s" file at "%s" allows flash/silverlight'\ ' access from any site.' desc = desc % (file_name, response.get_url()) if url == '*': v = Vuln('Insecure RIA settings', desc, severity.LOW, response.id, self.get_name()) v.set_url(response.get_url()) v.set_method('GET') kb.kb.append(self, 'vuln', v) om.out.vulnerability(v.get_desc(), severity=v.get_severity()) else: i = Info('Cross-domain allow ACL', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.set_method('GET') kb.kb.append(self, 'info', i) om.out.information(i.get_desc())
def _html_in_comment(self, comment, request, response): ''' Find HTML code in HTML comments ''' html_in_comment = self.HTML_RE.search(comment) if html_in_comment and \ (comment, response.get_url()) not in self._already_reported_interesting: # There is HTML code in the comment. comment = comment.replace('\n', '') comment = comment.replace('\r', '') desc = 'A comment with the string "%s" was found in: "%s".'\ ' This could be interesting.' desc = desc % (comment, response.get_url()) i = Info('HTML comment contains HTML code', desc, response.id, self.get_name()) i.set_dc(request.get_dc()) i.set_uri(response.get_uri()) i.add_to_highlight(html_in_comment.group(0)) kb.kb.append(self, 'html_comment_hides_html', i) om.out.information(i.get_desc()) self._already_reported_interesting.add( (comment, response.get_url()))
def analyze_document_links(self, request, response): ''' Find session IDs in the URI and store them in the KB. ''' try: doc_parser = parser_cache.dpc.get_document_parser_for(response) except: pass else: parsed_refs, _ = doc_parser.get_references() for link_uri in parsed_refs: if self._has_sessid(link_uri) and \ response.get_url() not in self._already_reported: # report these informations only once self._already_reported.add(response.get_url()) desc = 'The HTML content at "%s" contains a link (%s)'\ ' which holds a session id. The ID could be leaked'\ ' to third party domains through the referrer'\ ' header.' desc = desc % (response.get_url(), link_uri) # append the info object to the KB. i = Info('Session ID in URL', desc, response.id, self.get_name()) i.set_uri(response.get_uri()) self.kb_append(self, 'url_session', i) break
def _fingerprint_data(self, domain_path, wp_unique_url, response): ''' Find wordpress version from data ''' for wp_fingerprint in self._get_wp_fingerprints(): # The URL in the XML is relative AND it has two different variables # that we need to replace: # $wp-content$ -> wp-content/ # $wp-plugins$ -> wp-content/plugins/ path = wp_fingerprint.filepath path = path.replace('$wp-content$', 'wp-content/') path = path.replace('$wp-plugins$', 'wp-content/plugins/') test_url = domain_path.url_join(path) response = self._uri_opener.GET(test_url, cache=True) response_hash = hashlib.md5(response.get_body()).hexdigest() if response_hash == wp_fingerprint.hash: version = wp_fingerprint.version # Save it to the kb! desc = 'WordPress version "%s" fingerprinted by matching known md5'\ ' hashes to HTTP responses of static resources available at'\ ' the remote WordPress install.' desc = desc % version i = Info('Fingerprinted Wordpress version', desc, response.id, self.get_name()) i.set_url(test_url) kb.kb.append(self, 'info', i) om.out.information(i.get_desc()) break
def _fingerprint_meta(self, domain_path, wp_unique_url, response): ''' Check if the wp version is in index header ''' # Main scan URL passed from w3af + wp index page wp_index_url = domain_path.url_join('index.php') response = self._uri_opener.GET(wp_index_url, cache=True) # Find the string in the response html find = '<meta name="generator" content="[Ww]ord[Pp]ress (\d\.\d\.?\d?)" />' m = re.search(find, response.get_body()) # If string found, group version if m: version = m.group(1) # Save it to the kb! desc = 'WordPress version "%s" found in the index header.' desc = desc % version i = Info('Fingerprinted Wordpress version', desc, response.id, self.get_name()) i.set_url(wp_index_url) kb.kb.append(self, 'info', i) om.out.information(i.get_desc())
def _analyze_author(self, response, frontpage_author): ''' Analyze the author URL. :param response: The http response object for the _vti_inf file. :param frontpage_author: A regex match object. :return: None. All the info is saved to the kb. ''' author_location = response.get_url().get_domain_path().url_join( frontpage_author.group(1)) # Check for anomalies in the location of author.exe if frontpage_author.group(1) != '_vti_bin/_vti_aut/author.exe': name = 'Customized frontpage configuration' desc = 'The FPAuthorScriptUrl is at: "%s" instead of the default'\ ' location: "/_vti_bin/_vti_adm/author.exe". This is very'\ ' uncommon.' desc = desc % author_location else: name = 'FrontPage FPAuthorScriptUrl' desc = 'The FPAuthorScriptUrl is at: "%s".' desc = desc % author_location i = Info(name, desc, response.id, self.get_name()) i.set_url(author_location) i['FPAuthorScriptUrl'] = author_location kb.kb.append(self, 'frontpage_version', i) om.out.information(i.get_desc())
def _do_request(self, url, mutant): ''' Perform a simple GET to see if the result is an error or not, and then run the actual fuzzing. ''' response = self._uri_opener.GET( mutant, cache=True, headers=self._headers) if not (is_404(response) or response.get_code() in (403, 401) or self._return_without_eval(mutant)): for fr in self._create_fuzzable_requests(response): self.output_queue.put(fr) # # Save it to the kb (if new)! # if response.get_url() not in self._seen and response.get_url().get_file_name(): desc = 'A potentially interesting file was found at: "%s".' desc = desc % response.get_url() i = Info('Potentially interesting file', desc, response.id, self.get_name()) i.set_url(response.get_url()) kb.kb.append(self, 'files', i) om.out.information(i.get_desc()) # Report only once self._seen.add(response.get_url())
def grep(self, request, response): ''' Plugin entry point. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. ''' url = response.get_url() if response.is_text_or_html() and url not in self._already_inspected: # Don't repeat URLs self._already_inspected.add(url) dom = response.get_dom() # In some strange cases, we fail to normalize the document if dom is None: return script_elements = self._script_xpath(dom) for element in script_elements: # returns the text between <script> and </script> script_content = element.text if script_content is not None: res = self._ajax_regex_re.search(script_content) if res: desc = 'The URL: "%s" has AJAX code.' % url i = Info('AJAX code', desc, response.id, self.get_name()) i.set_url(url) i.add_to_highlight(res.group(0)) self.kb_append_uniq(self, 'ajax', i, 'URL')
def discover(self, fuzzable_request): ''' :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' root_domain = fuzzable_request.get_url().get_root_domain() pks_se = pks(self._uri_opener) results = pks_se.search(root_domain) pks_url = 'http://pgp.mit.edu:11371/' for result in results: mail = result.username + '@' + root_domain desc = 'The mail account: "%s" was found at: "%s".' desc = desc % (mail, pks_url) i = Info('Email account', desc, result.id, self.get_name()) i.set_url(URL(pks_url)) i['mail'] = mail i['user'] = result.username i['name'] = result.name i['url_list'] = [ URL(pks_url), ] kb.kb.append('emails', 'emails', i) # Don't save duplicated information in the KB. It's useless. #kb.kb.append( self, 'emails', i ) om.out.information(i.get_desc())
def _match_cookie_fingerprint(self, request, response, cookie_obj): ''' Now we analyze the cookie and try to guess the remote web server or programming framework based on the cookie that was sent. :return: True if the cookie was fingerprinted ''' cookie_obj_str = cookie_obj.output(header='') for cookie_str_db, system_name in self.COOKIE_FINGERPRINT: if cookie_str_db in cookie_obj_str: if system_name not in self._already_reported_server: desc = 'A cookie matching the cookie fingerprint DB'\ ' has been found when requesting "%s".'\ ' The remote platform is: "%s".' desc = desc % (response.get_url(), system_name) i = Info('Identified cookie', desc, response.id, self.get_name()) i.set_url(response.get_url()) i['httpd'] = system_name self._set_cookie_to_rep(i, cobj=cookie_obj) kb.kb.append(self, 'security', i) self._already_reported_server.append(system_name) return True return False
def _do_request(self, mutated_url, user): ''' Perform the request and compare. :return: The HTTP response id if the mutated_url is a web user directory, None otherwise. ''' response = self._uri_opener.GET(mutated_url, cache=True, headers=self._headers) path = mutated_url.get_path() response_body = response.get_body().replace(path, '') if relative_distance_lt(response_body, self._non_existent, 0.7): # Avoid duplicates if user not in [u['user'] for u in kb.kb.get('user_dir', 'users')]: desc = 'A user directory was found at: %s' desc = desc % response.get_url() i = Info('Web user home directory', desc, response.id, self.get_name()) i.set_url(response.get_url()) i['user'] = user kb.kb.append(self, 'users', i) for fr in self._create_fuzzable_requests(response): self.output_queue.put(fr) return response.id return None
def grep(self, request, response): ''' Plugin entry point, find feeds. :param request: The HTTP request object. :param response: The HTTP response object :return: None ''' dom = response.get_dom() uri = response.get_uri() # In some strange cases, we fail to normalize the document if uri not in self._already_inspected and dom is not None: self._already_inspected.add(uri) # Find all feed tags element_list = self._tag_xpath(dom) for element in element_list: feed_tag = element.tag feed_type = self._feed_types[feed_tag.lower()] version = element.attrib.get('version', 'unknown') fmt = 'The URL "%s" is a %s version %s feed.' desc = fmt % (uri, feed_type, version) i = Info('Content feed resource', desc, response.id, self.get_name()) i.set_uri(uri) i.add_to_highlight(feed_type) self.kb_append_uniq(self, 'feeds', i, 'URL')
def _parse_document(self, response): ''' Parses the HTML and adds the mail addresses to the kb. ''' try: document_parser = parser_cache.dpc.get_document_parser_for( response) except w3afException: # Failed to find a suitable parser for the document pass else: # Search for email addresses for mail in document_parser.get_emails(self._domain_root): if mail not in self._accounts: self._accounts.append(mail) desc = 'The mail account: "%s" was found at: "%s".' desc = desc % (mail, response.get_uri()) i = Info('Email account', desc, response.id, self.get_name()) i.set_url(response.get_uri()) i['mail'] = mail i['user'] = mail.split('@')[0] i['url_list'] = set([ response.get_uri(), ]) self.kb_append('emails', 'emails', i) self.kb_append(self, 'emails', i)
def _force_disclosures(self, domain_path, potentially_vulnerable_paths): ''' :param domain_path: The path to wordpress' root directory :param potentially_vulnerable_paths: A list with the paths I'll URL-join with @domain_path, GET and parse. ''' for pvuln_path in potentially_vulnerable_paths: pvuln_url = domain_path.url_join(pvuln_path) response = self._uri_opener.GET(pvuln_url, cache=True) if is_404(response): continue response_body = response.get_body() if 'Fatal error: ' in response_body: desc = 'Analyze the HTTP response body to find the full path'\ ' where wordpress was installed.' i = Info('WordPress path disclosure', desc, response.id, self.get_name()) i.set_url(pvuln_url) kb.kb.append(self, 'info', i) om.out.information(i.get_desc()) break
def grep(self, request, response): ''' Plugin entry point. Parse the object tags. :param request: The HTTP request object. :param response: The HTTP response object :return: None ''' url = response.get_url() dom = response.get_dom() if response.is_text_or_html() and dom is not None \ and url not in self._already_analyzed: self._already_analyzed.add(url) elem_list = self._tag_xpath(dom) for element in elem_list: tag_name = element.tag desc = 'The URL: "%s" has an "%s" tag. We recommend you download'\ ' the client side code and analyze it manually.' desc = desc % (response.get_uri(), tag_name) i = Info('Browser plugin content', desc, response.id, self.get_name()) i.set_url(url) i.add_to_highlight(tag_name) self.kb_append_uniq(self, tag_name, i, 'URL')
def _analyze_results(self, filtered, not_filtered): ''' Analyze the test results and save the conclusion to the kb. ''' if len(filtered) >= len(self._get_offending_strings()) / 5.0: desc = 'The remote network has an active filter. IMPORTANT: The'\ ' result of all the other plugins will be unaccurate, web'\ ' applications could be vulnerable but "protected" by the'\ ' active filter.' i = Info('Active filter detected', desc, 1, self.get_name()) i['filtered'] = filtered kb.kb.append(self, 'afd', i) om.out.information(i.get_desc()) om.out.information('The following URLs were filtered:') for i in filtered: om.out.information('- ' + i) if not_filtered: om.out.information( 'The following URLs passed undetected by the filter:') for i in not_filtered: om.out.information('- ' + i)
def grep(self, request, response): ''' Plugin entry point. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. ''' url = response.get_url() if response.is_text_or_html() and url not in self._already_inspected: # Don't repeat URLs self._already_inspected.add(url) if self.symfony_detected(response): dom = response.get_dom() if dom is not None and not self.csrf_detected(dom): desc = 'The URL: "%s" seems to be generated by the'\ ' Symfony framework and contains a form that'\ ' perhaps has CSRF protection disabled.' desc = desc % url i = Info('Symfony Framework with CSRF protection disabled', desc, response.id, self.get_name()) i.set_url(url) self.kb_append_uniq(self, 'symfony', i, 'URL')
def grep(self, request, response): ''' Plugin entry point, find the blank bodies and report them. :param request: The HTTP request object. :param response: The HTTP response object :return: None ''' if response.get_body() == '' and request.get_method() in self.METHODS\ and response.get_code() not in self.HTTP_CODES\ and 'location' not in response.get_lower_case_headers()\ and response.get_url() not in self._already_reported: # report these informations only once self._already_reported.add(response.get_url()) desc = 'The URL: "%s" returned an empty body, this could indicate'\ ' an application error.' desc = desc % response.get_url() i = Info('Blank http response body', desc, response.id, self.get_name()) i.set_url(response.get_url()) self.kb_append(self, 'blank_body', i)
def grep(self, request, response): ''' Plugin entry point, verify if the HTML has a form with file uploads. :param request: The HTTP request object. :param response: The HTTP response object :return: None ''' url = response.get_url() if response.is_text_or_html() and not url in self._already_inspected: self._already_inspected.add(url) dom = response.get_dom() # In some strange cases, we fail to normalize the document if dom is not None: # Loop through file inputs tags for input_file in self._file_input_xpath(dom): msg = 'The URL: "%s" has form with file upload capabilities.' msg = msg % url i = Info('File upload form', msg, response.id, self.get_name()) i.set_url(url) to_highlight = etree.tostring(input_file) i.add_to_highlight(to_highlight) self.kb_append_uniq(self, 'file_upload', i, 'URL')
def discover(self, fuzzable_request): ''' Identify server software using favicon. :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' domain_path = fuzzable_request.get_url().get_domain_path() # TODO: Maybe I should also parse the html to extract the favicon location? favicon_url = domain_path.url_join('favicon.ico') response = self._uri_opener.GET(favicon_url, cache=True) remote_fav_md5 = hashlib.md5(response.get_body()).hexdigest() if not is_404(response): # check if MD5 is matched in database/list for md5part, favicon_desc in self._read_favicon_db(): if md5part == remote_fav_md5: desc = 'Favicon.ico file was identified as "%s".' % favicon_desc i = Info('Favicon identification', desc, response.id, self.get_name()) i.set_url(favicon_url) kb.kb.append(self, 'info', i) om.out.information(i.get_desc()) break else: # # Report to the kb that we failed to ID this favicon.ico # and that the md5 should be sent to the developers. # desc = 'Favicon identification failed. If the remote site is' \ ' using framework that is being exposed by its favicon,'\ ' please send an email to [email protected]'\ ' including this md5 hash "%s" and the' \ ' name of the server or Web application it represents.' \ ' New fingerprints make this plugin more powerful and ' \ ' accurate.' desc = desc % remote_fav_md5 i = Info('Favicon identification failed', desc, response.id, self.get_name()) i.set_url(favicon_url) kb.kb.append(self, 'info', i) om.out.information(i.get_desc())
def grep(self, request, response): ''' Plugin entry point. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. ''' uri = response.get_uri() # 501 Code is "Not Implemented" which in some cases responds with # this in the body: # <body><h2>HTTP/1.1 501 Not Implemented</h2></body> # Which creates a false positive. if response.get_code() != 501\ and response.is_text_or_html()\ and uri not in self._already_inspected: # Don't repeat URLs self._already_inspected.add(uri) body_without_tags = response.get_clear_text_body() if body_without_tags is None: return for match, _, _, reqres in self._multi_re.query(body_without_tags): if reqres == 'REQUEST': desc = 'An HTTP request was found in the HTTP body of'\ ' a response.' i = Info('HTTP Request in HTTP body', desc, response.id, self.get_name()) i.set_uri(uri) i.add_to_highlight(match.group(0)) kb.kb.append(self, 'request', i) if reqres == 'RESPONSE': desc = 'An HTTP response was found in the HTTP body of'\ ' a response.' i = Info('HTTP Response in HTTP body', desc, response.id, self.get_name()) i.set_uri(uri) i.add_to_highlight(match.group(0)) kb.kb.append(self, 'response', i)
def grep(self, request, response): ''' If I find __VIEWSTATE and empty __EVENTVALIDATION => vuln. :param request: The HTTP request object. :param response: The HTTP response object ''' if not response.is_text_or_html(): return # First verify if we havent analyzed this URI yet if request.get_url() in self._already_analyzed: return self._already_analyzed.add(request.get_url()) res = self._viewstate.search(response.get_body()) if res: # I have __viewstate!, verify if event validation is enabled if not self._eventvalidation.search(response.get_body()): desc = 'The URL: "%s" has .NET Event Validation disabled.'\ ' This programming/configuration error should be'\ ' manually verified.' desc = desc % response.get_url() i = Info('.NET Event Validation is disabled', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(res.group()) self.kb_append(self, 'dot_net_event_validation', i) if not self._encryptedVs.search(response.get_body()): # Nice! We can decode the viewstate! =) desc = 'The URL: "%s" has .NET ViewState encryption disabled.'\ ' This programming/configuration error could be exploited'\ ' to decode the viewstate contents.' desc = desc % response.get_url() i = Info('.NET ViewState encryption is disabled', desc, response.id, self.get_name()) i.set_url(response.get_url()) self.kb_append(self, 'dot_net_event_validation', i)
def grep(self, request, response): ''' Plugin entry point, search for meta tags. :param request: The HTTP request object. :param response: The HTTP response object :return: None ''' uri = response.get_uri() if not response.is_text_or_html() or uri in self._already_inspected\ or is_404(response): return self._already_inspected.add(uri) try: dp = parser_cache.dpc.get_document_parser_for(response) except w3afException: return meta_tag_list = dp.get_meta_tags() for tag in meta_tag_list: tag_name = self._find_name(tag) for key, val in tag.items(): for word in self.INTERESTING_WORDS: # Check if we have something interesting # and WHERE that thing actually is where = content = None if (word in key): where = 'name' content = key elif (word in val): where = 'value' content = val # Now... if we found something, report it =) if where is not None: # The atribute is interesting! fmt = 'The URI: "%s" sent a <meta> tag with attribute'\ ' %s set to "%s" which looks interesting.' desc = fmt % (response.get_uri(), where, content) if self.INTERESTING_WORDS.get(tag_name, None): usage = self.INTERESTING_WORDS[tag_name] desc += ' The tag is used for %s.' % usage i = Info('Interesting META tag', desc, response.id, self.get_name()) i.set_uri(response.get_uri()) i.add_to_highlight(where, content) self.kb_append_uniq(self, 'meta_tags', i, 'URL')
def _check_server_header(self, fuzzable_request): ''' HTTP GET and analyze response for server header ''' response = self._uri_opener.GET(fuzzable_request.get_url(), cache=True) for hname, hvalue in response.get_lower_case_headers().iteritems(): if hname == 'server': server = hvalue desc = 'The server header for the remote web server is: "%s".' desc = desc % server i = Info('Server header', desc, response.id, self.get_name()) i['server'] = server i.add_to_highlight(hname + ':') om.out.information(i.get_desc()) # Save the results in the KB so the user can look at it kb.kb.append(self, 'server', i) # Also save this for easy internal use # other plugins can use this information kb.kb.raw_write(self, 'server_string', server) break else: # strange ! desc = 'The remote HTTP Server omitted the "server" header in'\ ' its response.' i = Info('Omitted server header', desc, response.id, self.get_name()) om.out.information(i.get_desc()) # Save the results in the KB so that other plugins can use this # information kb.kb.append(self, 'ommited_server_header', i) # Also save this for easy internal use # other plugins can use this information kb.kb.raw_write(self, 'server_string', '')
def grep(self, request, response): ''' Plugin entry point, search for the user defined regex. :param request: The HTTP request object. :param response: The HTTP response object :return: None ''' if self._all_in_one is None: return if not response.is_text_or_html(): return # TODO: Verify this this is really a performance improvement html_string = response.get_body() if not self._all_in_one.search(html_string): return #One of them is in there, now we need to find out which one for index, regex_tuple in enumerate(self._regexlist_compiled): regex, info_inst = regex_tuple match_object = regex.search(html_string) if match_object: with self._plugin_lock: #Don't change the next line to "if info_inst:", #because the info_inst is an empty dict {} #which evaluates to false #but an info object is not the same as None if not info_inst is None: ids = info_inst.get_id() ids.append(response.id) info_inst.set_id(ids) else: str_match = match_object.group(0) if len(str_match) > 20: str_match = str_match[:20] + '...' desc = 'User defined regular expression "%s" matched a' \ ' response. The matched string is: "%s".' desc = desc % (regex.pattern, str_match) info_inst = Info( 'User defined regular expression match', desc, response.id, self.get_name()) info_inst.set_url(response.get_url()) om.out.information(desc) self.kb_append_uniq(self, 'user_defined_regex', info_inst, 'URL') # Save the info_inst self._regexlist_compiled[index] = (regex, info_inst)
def _ssl_info_to_kb(self, url, domain): cert, cert_der, cipher = self._get_cert(url, domain) # Print the SSL information to the log desc = 'This is the information about the SSL certificate used for'\ ' %s site:\n%s' % (domain, self._dump_ssl_info(cert, cert_der, cipher)) om.out.information(desc) i = Info('SSL Certificate dump', desc, 1, self.get_name()) i.set_url(url) self.kb_append(self, 'certificate', i)
def _report_finding(self, response): ''' Save the finding to the kb. :param response: The response that triggered the detection ''' desc = 'The remote web server seems to have a reverse proxy installed.' i = Info('Reverse proxy identified', desc, response.id, self.get_name()) i.set_url(response.get_url()) kb.kb.append(self, 'detect_reverse_proxy', i) om.out.information(i.get_desc())