def grep(self, request, response): """ Check if the header names are common or not :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ # Check for protocol anomalies self._content_location_not_300(request, response) # Check header names for header_name in response.get_headers().keys(): if header_name.upper() in self.COMMON_HEADERS: continue # Create a new info object and save it to the KB hvalue = response.get_headers()[header_name] desc = 'The remote web server sent the HTTP header: "%s"'\ ' with value: "%s", which is quite uncommon and'\ ' requires manual analysis.' desc = desc % (header_name, hvalue) i = Info('Strange header', desc, response.id, self.get_name()) i.add_to_highlight(hvalue, header_name) i.set_url(response.get_url()) i[StrangeHeaderInfoSet.ITAG] = header_name i['header_value'] = hvalue self.kb_append_uniq_group(self, 'strange_headers', i, group_klass=StrangeHeaderInfoSet)
def grep(self, request, response): """ Plugin entry point. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ if not response.is_text_or_html(): return url = response.get_url() dom = response.get_dom() # In some strange cases, we fail to normalize the document if dom is None: return script_elements = self._script_xpath(dom) for element in script_elements: # returns the text between <script> and </script> script_content = element.text if script_content is not None: res = self._ajax_regex_re.search(script_content) if res: desc = 'The URL: "%s" has AJAX code.' % url i = Info('AJAX code', desc, response.id, self.get_name()) i.set_url(url) i.add_to_highlight(res.group(0)) self.kb_append_uniq(self, 'ajax', i, 'URL')
def grep(self, request, response): """ Plugin entry point. Parse the object tags. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ url = response.get_url() dom = response.get_dom() if response.is_text_or_html() and dom is not None: elem_list = self._tag_xpath(dom) for element in elem_list: tag_name = element.tag desc = 'The URL: "%s" has an "%s" tag. We recommend you download'\ ' the client side code and analyze it manually.' desc = desc % (response.get_uri(), tag_name) i = Info('Browser plugin content', desc, response.id, self.get_name()) i.set_url(url) i.add_to_highlight(tag_name) self.kb_append_uniq(self, tag_name, i, 'URL')
def grep(self, request, response): """ Plugin entry point, find feeds. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ dom = response.get_dom() uri = response.get_uri() # In some strange cases, we fail to normalize the document if dom is None: return # Find all feed tags element_list = self._tag_xpath(dom) for element in element_list: feed_tag = element.tag feed_type = self._feed_types[feed_tag.lower()] version = element.attrib.get('version', 'unknown') fmt = 'The URL "%s" is a %s version %s feed.' desc = fmt % (uri, feed_type, version) i = Info('Content feed resource', desc, response.id, self.get_name()) i.set_uri(uri) i.add_to_highlight(feed_type) self.kb_append_uniq(self, 'feeds', i, 'URL')
def grep(self, request, response): """ Plugin entry point. Analyze if the HTTP response codes are strange. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ if response.get_code() in self.COMMON_HTTP_CODES: return # Create a new info object from scratch and save it to the kb desc = 'The remote Web server sent a strange HTTP response code:'\ ' "%s" with the message: "%s", manual inspection is recommended.' desc = desc % (response.get_code(), response.get_msg()) i = Info('Strange HTTP response code', desc, response.id, self.get_name()) i.add_to_highlight(str(response.get_code()), response.get_msg()) i.set_url(response.get_url()) i[StrangeCodesInfoSet.ITAG] = response.get_code() i['message'] = response.get_msg() self.kb_append_uniq_group(self, 'strange_http_codes', i, group_klass=StrangeCodesInfoSet)
def _analyze_domain(self, response, script_full_url, script_tag): """ Checks if the domain is the same, or if it's considered secure. """ url = response.get_url() script_domain = script_full_url.get_domain() if script_domain != response.get_url().get_domain(): for secure_domain in self._secure_js_domains: # We do a "in" because the secure js domains list contains # entries such as ".google." which should be match. This is to # take into account things like ".google.com.br" without having # to list all of them. # # Not the best, could raise some false negatives, but... bleh! if secure_domain in script_domain: # It's a third party that we trust return to_highlight = script_tag.attrib.get('src') desc = ('The URL: "%s" has a script tag with a source that points' ' to a third party site ("%s"). This practice is not' ' recommended, the security of the current site is being' ' delegated to the external entity.') desc %= (url, script_domain) i = Info('Cross-domain javascript source', desc, response.id, self.get_name()) i.set_url(url) i.add_to_highlight(to_highlight) i[CrossDomainInfoSet.ITAG] = script_domain self.kb_append_uniq_group(self, 'cross_domain_js', i, group_klass=CrossDomainInfoSet)
def grep(self, request, response): """ Plugin entry point. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ headers = response.get_headers() heaver_value, header_name = headers.iget('x-xss-protection', '') heaver_value = heaver_value.strip() if heaver_value == '0': desc = 'The remote web server sent the HTTP X-XSS-Protection'\ ' header with a 0 value, which disables Internet' \ ' Explorer\'s XSS filter. In most cases, this is a bad' \ ' practice and should be subject to review.' i = Info('Insecure X-XSS-Protection header usage', desc, response.id, self.get_name()) i.add_to_highlight('X-XSS-Protection') i.set_uri(response.get_uri()) self.kb_append_uniq_group(self, 'xss_protection_header', i, group_klass=XSSProtectionInfoSet)
def _grep_worker(self, request, response, document_parser, kb_key, domain): emails = set(document_parser.get_emails(domain)) for mail_address in emails: # Reduce false positives if request.sent(mail_address): continue # Email address are case insensitive mail_address = mail_address.lower() url = response.get_url() uniq_key = (mail_address, url) if uniq_key in self._already_reported: continue # Avoid duplicates self._already_reported.add(uniq_key) # Create a new info object, and report it desc = 'The mail account: "%s" was found at "%s".' desc %= (mail_address, url) i = Info('Email address disclosure', desc, response.id, self.get_name()) i.add_to_highlight(mail_address) i.set_url(url) i[EmailInfoSet.ITAG] = mail_address i['user'] = mail_address.split('@')[0] self.kb_append_uniq_group('emails', kb_key, i, group_klass=EmailInfoSet)
def _content_location_not_300(self, request, response): """ Check if the response has a content-location header and the response code is not in the 300 range. :return: None, all results are saved in the kb. """ if not 300 < response.get_code() < 310: return headers = response.get_headers() header_value, header_name = headers.iget('content-location') if header_value is None: return desc = ('The URL: "%s" sent the HTTP header: "content-location"' ' with value: "%s" in an HTTP response with code %s which' ' is a violation to the RFC.') desc %= (response.get_url(), header_value, response.get_code()) i = Info('Content-Location HTTP header anomaly', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight('content-location') kb.kb.append(self, 'anomaly', i)
def test_to_json(self): i = Info('Blind SQL injection vulnerability', MockInfo.LONG_DESC, 1, 'plugin_name') i['test'] = 'foo' i.add_to_highlight('abc', 'def') iset = InfoSet([i]) jd = iset.to_json() json_string = json.dumps(jd) jd = json.loads(json_string) self.assertEqual(jd['name'], iset.get_name()) self.assertEqual(jd['url'], str(iset.get_url())) self.assertEqual(jd['var'], iset.get_token_name()) self.assertEqual(jd['response_ids'], iset.get_id()) self.assertEqual(jd['vulndb_id'], iset.get_vulndb_id()) self.assertEqual(jd['desc'], iset.get_desc(with_id=False)) self.assertEqual(jd['long_description'], iset.get_long_description()) self.assertEqual(jd['fix_guidance'], iset.get_fix_guidance()) self.assertEqual(jd['fix_effort'], iset.get_fix_effort()) self.assertEqual(jd['tags'], iset.get_tags()) self.assertEqual(jd['wasc_ids'], iset.get_wasc_ids()) self.assertEqual(jd['wasc_urls'], list(iset.get_wasc_urls())) self.assertEqual(jd['cwe_urls'], list(iset.get_cwe_urls())) self.assertEqual(jd['references'], BLIND_SQLI_REFS) self.assertEqual(jd['owasp_top_10_references'], BLIND_SQLI_TOP10_REFS) self.assertEqual(jd['plugin_name'], iset.get_plugin_name()) self.assertEqual(jd['severity'], iset.get_severity()) self.assertEqual(jd['attributes'], iset.first_info.copy()) self.assertEqual(jd['highlight'], list(iset.get_to_highlight()))
def grep(self, request, response): """ Plugin entry point. Parse the object tags. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ if not response.is_text_or_html(): return url = response.get_url() for tag in parser_cache.dpc.get_tags_by_filter(response, self.TAGS): # pylint: disable=E1101 desc = ('The URL: "%s" has an "%s" tag. We recommend you download' ' the client side code and analyze it manually.') desc %= (response.get_uri(), tag.name) i = Info('Browser plugin content', desc, response.id, self.get_name()) i.set_url(url) i.add_to_highlight('<%s' % tag.name) self.kb_append_uniq(self, tag.name, i, 'URL')
def grep(self, request, response): """ Plugin entry point, find the error pages and report them. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ if not response.is_text_or_html(): return if not response.get_code() == 200: return for _, (key, keypair_type) in self._multi_in.query(response.body): desc = u'The URL: "%s" discloses a key of type: "%s"' desc %= (response.get_url(), key) if keypair_type == self.PUBLIC: item = Info( 'Public key disclosure', desc, response.id, self.get_name()) elif keypair_type == self.PRIVATE: item = Vuln( 'Private key disclosure', desc, severity.HIGH, response.id, self.get_name()) item.set_url(response.get_url()) item.add_to_highlight(key) self.kb_append(self, 'keys', item)
def end(self): """ This method is called when the plugin wont be used anymore. """ all_findings = kb.kb.get_all_findings() for title, desc, _id, url, highlight in self._potential_vulns: for info in all_findings: # This makes sure that if the sqli plugin found a vulnerability # in the same URL as we found a detailed error, we won't report # the detailed error. # # If the user fixes the sqli vulnerability and runs the scan again # most likely the detailed error will disappear too. If the sqli # vulnerability disappears and this one remains, it will appear # as a new vulnerability in the second scan. if info.get_url() == url: break else: i = Info(title, desc, _id, self.get_name()) i.set_url(url) i.add_to_highlight(highlight) self.kb_append_uniq(self, 'error_page', i) self._potential_vulns.cleanup()
def _html_in_comment(self, comment, request, response): """ Find HTML code in HTML comments """ html_in_comment = self.HTML_RE.search(comment) if html_in_comment and \ (comment, response.get_url()) not in self._already_reported_interesting: # There is HTML code in the comment. comment = comment.strip() comment = comment.replace('\n', '') comment = comment.replace('\r', '') comment = comment[:40] desc = 'A comment with the string "%s" was found in: "%s".'\ ' This could be interesting.' desc = desc % (comment, response.get_url()) i = Info('HTML comment contains HTML code', desc, response.id, self.get_name()) i.set_dc(request.get_dc()) i.set_uri(response.get_uri()) i.add_to_highlight(html_in_comment.group(0)) kb.kb.append(self, 'html_comment_hides_html', i) om.out.information(i.get_desc()) self._already_reported_interesting.add( (comment, response.get_url()))
def test_to_json(self): i = Info("Blind SQL injection vulnerability", MockInfo.LONG_DESC, 1, "plugin_name") i["test"] = "foo" i.add_to_highlight("abc", "def") jd = i.to_json() json_string = json.dumps(jd) jd = json.loads(json_string) self.assertEqual(jd["name"], i.get_name()) self.assertEqual(jd["url"], str(i.get_url())) self.assertEqual(jd["var"], i.get_token_name()) self.assertEqual(jd["response_ids"], i.get_id()) self.assertEqual(jd["vulndb_id"], i.get_vulndb_id()) self.assertEqual(jd["desc"], i.get_desc(with_id=False)) self.assertEqual(jd["long_description"], i.get_long_description()) self.assertEqual(jd["fix_guidance"], i.get_fix_guidance()) self.assertEqual(jd["fix_effort"], i.get_fix_effort()) self.assertEqual(jd["tags"], i.get_tags()) self.assertEqual(jd["wasc_ids"], i.get_wasc_ids()) self.assertEqual(jd["wasc_urls"], list(i.get_wasc_urls())) self.assertEqual(jd["cwe_urls"], list(i.get_cwe_urls())) self.assertEqual(jd["references"], BLIND_SQLI_REFS) self.assertEqual(jd["owasp_top_10_references"], BLIND_SQLI_TOP10_REFS) self.assertEqual(jd["plugin_name"], i.get_plugin_name()) self.assertEqual(jd["severity"], i.get_severity()) self.assertEqual(jd["attributes"], i.copy()) self.assertEqual(jd["highlight"], list(i.get_to_highlight()))
def grep(self, request, response): """ Plugin entry point, find feeds. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ uri = response.get_uri() for tag in mp_doc_parser.get_tags_by_filter(response, self.TAGS): feed_tag = tag.name feed_type = self._feed_types[feed_tag.lower()] version = tag.attrib.get('version', 'unknown') fmt = 'The URL "%s" is a %s version %s feed.' desc = fmt % (uri, feed_type, version) i = Info('Content feed resource', desc, response.id, self.get_name()) i.set_uri(uri) i.add_to_highlight(feed_type) self.kb_append_uniq(self, 'feeds', i, 'URL')
def _report_generic(self, request, response): """ When we were unable to identify any specific "strange" HTTP response codes we call this method to report the generic ones. :param request: HTTP request :param response: HTTP response :return: None, we save the information to the KB """ # Create a new info object from scratch and save it to the kb desc = ('The remote Web server sent a strange HTTP response code:' ' "%s" with the message: "%s", manual inspection is' ' recommended.') desc %= (response.get_code(), response.get_msg()) i = Info('Strange HTTP response code', desc, response.id, self.get_name()) i.add_to_highlight(str(response.get_code()), response.get_msg()) i.set_url(response.get_url()) i[StrangeCodesInfoSet.ITAG] = response.get_code() i['message'] = response.get_msg() self.kb_append_uniq_group(self, 'strange_http_codes', i, group_klass=StrangeCodesInfoSet)
def grep(self, request, response): """ Analyze if the HTTP response reason messages are strange. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ response_code = response.get_code() msg_list = W3C_REASONS.get(response_code, None) if msg_list is None: return response_reason = response.get_msg().lower() if response_reason in msg_list: # It's common, nothing to do here. return # Create a new info object from scratch and save it to the kb: desc = "The remote Web server sent a strange HTTP reason" 'message "%s", manual inspection is recommended.' desc = desc % response.get_msg() i = Info("Strange HTTP Reason message", desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(response.get_msg()) i[StrangeHeaderInfoSet.ITAG] = response.get_msg() self.kb_append_uniq_group(self, "strange_reason", i, group_klass=StrangeHeaderInfoSet)
def _check_x_power(self, fuzzable_request, response): """ Analyze X-Powered-By header. """ for header_name in response.get_headers().keys(): for needle in ['ASPNET', 'POWERED']: if needle in header_name.upper(): powered_by = response.get_headers()[header_name] if powered_by in self._x_powered: return self._x_powered.add(powered_by) desc = 'The %s header for the target HTTP server is "%s".' desc %= (header_name, powered_by) i = Info('Powered-by header', desc, response.id, self.get_name()) i['powered_by'] = powered_by i.add_to_highlight(header_name + ':') om.out.information(i.get_desc()) # Save the results in the KB so that other plugins can # use this information. Before knowing that some servers # may return more than one poweredby header I had: # # kb.kb.raw_write( self , 'powered_by' , powered_by ) # # But I have seen an IIS server with PHP that returns # both the ASP.NET and the PHP headers kb.kb.append(self, 'powered_by', i) # Save the list to the KB kb.kb.raw_write(self, 'powered_by_string', list(powered_by))
def grep(self, request, response): """ Plugin entry point. Analyze if the HTTP response codes are strange. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ if response.get_code() in self.COMMON_HTTP_CODES: return # Create a new info object from scratch and save it to the kb desc = ('The remote Web server sent a strange HTTP response code:' ' "%s" with the message: "%s", manual inspection is' ' recommended.') desc %= (response.get_code(), response.get_msg()) i = Info('Strange HTTP response code', desc, response.id, self.get_name()) i.add_to_highlight(str(response.get_code()), response.get_msg()) i.set_url(response.get_url()) i[StrangeCodesInfoSet.ITAG] = response.get_code() i['message'] = response.get_msg() self.kb_append_uniq_group(self, 'strange_http_codes', i, group_klass=StrangeCodesInfoSet)
def grep(self, request, response): """ Analyze if the HTTP response reason messages are strange. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ response_code = response.get_code() msg_list = W3C_REASONS.get(response_code, None) if msg_list is None: return response_reason = response.get_msg().lower() if response_reason in msg_list: # It's common, nothing to do here. return # Create a new info object from scratch and save it to the kb: desc = ('The remote Web server sent a strange HTTP reason' ' message "%s", manual inspection is recommended.') desc %= response.get_msg() i = Info('Strange HTTP Reason message', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(response.get_msg()) i[StrangeHeaderInfoSet.ITAG] = response.get_msg() self.kb_append_uniq_group(self, 'strange_reason', i, group_klass=StrangeHeaderInfoSet)
def grep(self, request, response): """ Plugin entry point, verify if the HTML has a form with file uploads. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ if not response.is_text_or_html(): return dom = response.get_dom() url = response.get_url() # In some strange cases, we fail to normalize the document if dom is not None: # Loop through file inputs tags for input_file in self._file_input_xpath(dom): msg = 'The URL: "%s" has form with file upload capabilities.' msg = msg % url i = Info('File upload form', msg, response.id, self.get_name()) i.set_url(url) to_highlight = etree.tostring(input_file) i.add_to_highlight(to_highlight) self.kb_append_uniq(self, 'file_upload', i, 'URL')
def _check_server_header(self, fuzzable_request, response): """ HTTP GET and analyze response for server header """ if is_no_content_response(response): # # UrlOpenerProxy(), a helper class used by most plugins, will # generate 204 HTTP responses for HTTP requests that fail. # This makes plugins have less error handling code (try/except), # and looks like this in the scan log: # # Generated 204 "No Content" response (id:2131) # # The problem is that in some strange cases, like this plugin, # the 204 response will trigger a false positive. Because of # that I had to add this if statement to completely ignore # the HTTP responses with 204 status code # return server, header_name = response.get_headers().iget('server') if server in self._server_headers: return self._server_headers.add(server) if server: desc = 'The server header for the remote web server is: "%s".' desc %= server i = Info('Server header', desc, response.id, self.get_name()) i['server'] = server i.add_to_highlight(header_name + ':') om.out.information(i.get_desc()) # Save the results in the KB so the user can look at it kb.kb.append(self, 'server', i) # Also save this for easy internal use # other plugins can use this information kb.kb.raw_write(self, 'server_string', server) else: # strange ! desc = ('The remote HTTP Server omitted the "server" header in' ' its response.') i = Info('Omitted server header', desc, response.id, self.get_name()) om.out.information(i.get_desc()) # Save the results in the KB so that other plugins can use this # information kb.kb.append(self, 'omitted_server_header', i) # Also save this for easy internal use # other plugins can use this information kb.kb.raw_write(self, 'server_string', '')
def grep(self, request, response): """ Plugin entry point, search for meta tags. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ if not response.is_text_or_html() or is_404(response): return try: dp = parser_cache.dpc.get_document_parser_for(response) except BaseFrameworkException: return meta_tag_list = dp.get_meta_tags() for tag in meta_tag_list: for attr_name, attr_value in tag.items(): for word in self.INTERESTING_WORDS: # Check if we have something interesting and WHERE that # thing actually is where = content = None if word in attr_name: where = ATTR_NAME content = attr_name elif word in attr_value: where = ATTR_VALUE content = attr_value # Go to the next one if nothing is found if where is None: continue # Now... if we found something, report it =) desc = ( 'The URI: "%s" sent a <meta> tag with the attribute' ' %s set to "%s" which looks interesting.') desc %= (response.get_uri(), where, content) tag_name = self._find_tag_name(tag) usage = self.INTERESTING_WORDS.get(tag_name, None) if usage is not None: desc += ' The tag is used for %s.' % usage i = Info('Interesting META tag', desc, response.id, self.get_name()) i.set_uri(response.get_uri()) i.add_to_highlight(where, content) i[CONTENT] = content i[WHERE] = where self.kb_append_uniq_group(self, 'meta_tags', i, group_klass=MetaTagsInfoSet)
def grep(self, request, response): """ Plugin entry point, search for meta tags. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ if not response.is_text_or_html() or is_404(response): return try: dp = parser_cache.dpc.get_document_parser_for(response) except BaseFrameworkException: return meta_tag_list = dp.get_meta_tags() for tag in meta_tag_list: for attr_name, attr_value in tag.items(): if not attr_name or not attr_value: # https://github.com/andresriancho/w3af/issues/2012 continue for word in self.INTERESTING_WORDS: # Check if we have something interesting and WHERE that # thing actually is if word in attr_name: where = ATTR_NAME content = attr_name elif word in attr_value: where = ATTR_VALUE content = attr_value else: # Go to the next one if nothing is found continue # Now... if we found something, report it =) desc = ('The URI: "%s" sent a <meta> tag with the attribute' ' %s set to "%s" which looks interesting.') desc %= (response.get_uri(), where, content) tag_name = self._find_tag_name(tag) usage = self.INTERESTING_WORDS.get(tag_name, None) if usage is not None: desc += ' The tag is used for %s.' % usage i = Info('Interesting META tag', desc, response.id, self.get_name()) i.set_uri(response.get_uri()) i.add_to_highlight(where, content) i[CONTENT] = content i[WHERE] = where self.kb_append_uniq_group(self, 'meta_tags', i, group_klass=MetaTagsInfoSet)
def analyze_disco(self, request, response): for disco_string in self._disco_strings: if disco_string in response: desc = ('The URL: "%s" is a DISCO file that contains' ' references to WSDL URLs.') desc %= response.get_url() i = Info('DISCO resource', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(disco_string) self.kb_append_uniq(self, 'disco', i, 'URL') break
def grep(self, request, response): """ Plugin entry point, search for meta tags. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ if not response.is_text_or_html() or is_404(response): return try: dp = parser_cache.dpc.get_document_parser_for(response) except BaseFrameworkException: return meta_tag_list = dp.get_meta_tags() for tag in meta_tag_list: for attr_name, attr_value in tag.items(): for word in self.INTERESTING_WORDS: # Check if we have something interesting # and WHERE that thing actually is where = content = None if word in attr_name: where = self.ATTR_NAME content = attr_name elif word in attr_value: where = self.ATTR_VALUE content = attr_value # Now... if we found something, report it =) if self._should_report(attr_name, attr_value, where): # The attribute is interesting! fmt = 'The URI: "%s" sent a <meta> tag with attribute'\ ' %s set to "%s" which looks interesting.' desc = fmt % (response.get_uri(), where, content) tag_name = self._find_name(tag) if self.INTERESTING_WORDS.get(tag_name, None): usage = self.INTERESTING_WORDS[tag_name] desc += ' The tag is used for %s.' % usage i = Info('Interesting META tag', desc, response.id, self.get_name()) i.set_uri(response.get_uri()) i.add_to_highlight(where, content) self.kb_append_uniq(self, 'meta_tags', i, 'URL')
def _save_to_kb(self, request, response, generator): desc = 'Found generator meta tag value: "%s"' % generator info = Info('Generator information', desc, response.id, self.get_name()) info.set_uri(response.get_uri()) info.add_to_highlight(generator) info[MetaTagsInfoSet.ITAG] = generator self.kb_append_uniq_group(self, 'content_generator', info, group_klass=MetaTagsInfoSet)
def analyze_wsdl(self, request, response): for match in self._multi_in.query(response.body): desc = ('The URL: "%s" is a Web Services Description Language' ' page. This requires manual analysis to determine the' ' security of the web service.') desc %= response.get_url() i = Info('WSDL resource', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(match) self.kb_append_uniq(self, 'wsdl', i, 'URL') break
def analyze_disco(self, request, response): for disco_string in self._disco_strings: if disco_string in response: desc = 'The URL: "%s" is a DISCO file that contains references'\ ' to WSDL URLs.' desc = desc % response.get_url() i = Info('DISCO resource', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(disco_string) self.kb_append_uniq(self, 'disco', i, 'URL') break
def grep(self, request, response): """ Plugin entry point, search for meta tags. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ if not response.is_text_or_html() or is_404(response): return try: dp = parser_cache.dpc.get_document_parser_for(response) except BaseFrameworkException: return meta_tag_list = dp.get_meta_tags() for tag in meta_tag_list: tag_name = self._find_name(tag) for key, val in tag.items(): for word in self.INTERESTING_WORDS: # Check if we have something interesting # and WHERE that thing actually is where = content = None if word in key: where = "name" content = key elif word in val: where = "value" content = val # Now... if we found something, report it =) if where is not None: # The atribute is interesting! fmt = ( 'The URI: "%s" sent a <meta> tag with attribute' ' %s set to "%s" which looks interesting.' ) desc = fmt % (response.get_uri(), where, content) if self.INTERESTING_WORDS.get(tag_name, None): usage = self.INTERESTING_WORDS[tag_name] desc += " The tag is used for %s." % usage i = Info("Interesting META tag", desc, response.id, self.get_name()) i.set_uri(response.get_uri()) i.add_to_highlight(where, content) self.kb_append_uniq(self, "meta_tags", i, "URL")
def analyze_wsdl(self, request, response): for match in self._multi_in.query(response.body): desc = ('The URL: "%s" is a Web Services Description Language' ' page. This requires manual analysis to determine the' ' security of the web service.') desc = desc % response.get_url() i = Info('WSDL resource', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(match) self.kb_append_uniq(self, 'wsdl', i, 'URL') break
def _grep_worker(self, request, response, kb_key, domain=None): """ Helper method for using in self.grep() :param request: The HTTP request :param response: The HTTP response :param kb_key: Knowledge base dict key :param domain: Target domain for get_emails filter :return: None """ try: dp = parser_cache.dpc.get_document_parser_for(response) except BaseFrameworkException: msg = 'Failed to get document parser for "%s" at get_emails.' om.out.debug(msg % response.get_url()) return emails = set(dp.get_emails(domain)) for mail_address in emails: # Reduce false positives if request.sent(mail_address): continue # Email address are case insensitive mail_address = mail_address.lower() url = response.get_url() uniq_key = (mail_address, url) if uniq_key in self._already_reported: continue # Avoid dups self._already_reported.add(uniq_key) # Create a new info object, and report it desc = 'The mail account: "%s" was found at "%s".' desc = desc % (mail_address, url) i = Info('Email address disclosure', desc, response.id, self.get_name()) i.add_to_highlight(mail_address) i.set_url(url) i[EmailInfoSet.ITAG] = mail_address i['user'] = mail_address.split('@')[0] self.kb_append_uniq_group('emails', kb_key, i, group_klass=EmailInfoSet)
def grep(self, request, response): """ Plugin entry point, verify if the HTML has a form with file uploads. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ if not response.is_text_or_html(): return url = response.get_url() dom = response.get_dom() # In some strange cases, we fail to normalize the document if dom is None: return # Loop through script inputs tags for script_src_tag in self._script_src_xpath(dom): # This should be always False due to the XPATH we're using # but you never know... if not 'src' in script_src_tag.attrib: continue script_src = script_src_tag.attrib['src'] try: script_full_url = response.get_url().url_join(script_src) except ValueError: msg = 'Invalid URL found by cross_domain_js: "%s"' om.out.debug(msg % script_src) continue script_domain = script_full_url.get_domain() if script_domain != response.get_url().get_domain(): desc = 'The URL: "%s" has script tag with a source that points' \ ' to a third party site ("%s"). This practice is not' \ ' recommended as security of the current site is being' \ ' delegated to that external entity.' desc = desc % (url, script_domain) i = Info('Cross-domain javascript source', desc, response.id, self.get_name()) i.set_url(url) to_highlight = etree.tostring(script_src_tag) i.add_to_highlight(to_highlight) self.kb_append_uniq(self, 'cross_domain_js', i, 'URL')
def find_error_page(self, request, response): for msg in self._multi_in.query(response.body): desc = 'The URL: "%s" contains the descriptive error: "%s".' desc = desc % (response.get_url(), msg) i = Info('Descriptive error page', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(msg) self.kb_append_uniq(self, 'error_page', i, 'URL') # There is no need to report more than one info for the same result, # the user will read the info object and analyze it even if we # report it only once. If we report it twice, he'll get mad ;) break
def grep(self, request, response): """ If I find __VIEWSTATE and empty __EVENTVALIDATION => vuln. :param request: The HTTP request object. :param response: The HTTP response object """ if not response.is_text_or_html(): return viewstate_mo = self._viewstate.search(response.get_body()) if not viewstate_mo: return # I have __viewstate!, verify if event validation is enabled if not self._eventvalidation.search(response.get_body()): desc = ('The URL: "%s" has .NET Event Validation disabled. This' ' programming/configuration error should be manually' ' verified.') desc %= response.get_url() i = Info('.NET Event Validation is disabled', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(viewstate_mo.group()) i[EVDisabledInfoSet.ITAG] = response.get_url().get_domain() self.kb_append_uniq_group(self, self.get_name(), i, group_klass=EVDisabledInfoSet) if not self._encryptedVs.search(response.get_body()): # Nice! We can decode the viewstate! =) desc = ('The URL: "%s" has .NET ViewState encryption disabled.' ' This programming/configuration error could be' ' exploited to decode the viewstate contents.') desc %= response.get_url() i = Info('.NET ViewState encryption is disabled', desc, response.id, self.get_name()) i.set_url(response.get_url()) i[EVClearTextInfoSet.ITAG] = response.get_url().get_domain() self.kb_append_uniq_group(self, self.get_name(), i, group_klass=EVClearTextInfoSet)
def grep(self, request, response): """ Plugin entry point, test existence of HTML auto-completable forms containing password-type inputs. Either form's <autocomplete> attribute is not present or is 'off'. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ url = response.get_url() dom = response.get_dom() if not response.is_text_or_html() or dom is None: return autocompletable = lambda inp: inp.get('autocomplete', 'on').lower( ) != 'off' # Loop through "auto-completable" forms for form in self._autocomplete_forms_xpath(dom): passwd_inputs = self._pwd_input_xpath(form) # Test existence of password-type inputs and verify that # all inputs are autocompletable if passwd_inputs and all( map(autocompletable, chain(passwd_inputs, self._text_input_xpath(form)))): form_str = etree.tostring(form) to_highlight = form_str[:form_str.find('>') + 1] desc = ('The URL: "%s" has a "<form>" element with ' 'auto-complete enabled.') desc %= url i = Info('Auto-completable form', desc, response.id, self.get_name()) i.set_url(url) i.add_to_highlight(to_highlight) self.kb_append_uniq(self, 'form_autocomplete', i, filter_by='URL') break
def grep(self, request, response): """ Plugin entry point, test existence of HTML auto-completable forms containing password-type inputs. Either form's <autocomplete> attribute is not present or is 'off'. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ if not response.is_text_or_html(): return try: doc_parser = parser_cache.dpc.get_document_parser_for(response) except BaseFrameworkException: return for form in doc_parser.get_forms(): # Only analyze forms which have autocomplete enabled at <form> if form.get_autocomplete() is False: continue for form_field_list in form.meta.itervalues(): for form_field in form_field_list: if form_field.input_type != INPUT_TYPE_PASSWD: continue if not form_field.autocomplete: continue url = response.get_url() desc = ('The URL: "%s" has a "<form>" element with ' 'auto-complete enabled.') desc %= url i = Info('Auto-completable form', desc, response.id, self.get_name()) i.add_to_highlight('autocomplete') i.set_url(url) self.kb_append_uniq(self, 'form_autocomplete', i, filter_by='URL') break
def _check_x_power(self, fuzzable_request): """ Analyze X-Powered-By header. """ response = self._uri_opener.GET(fuzzable_request.get_url(), cache=True) for header_name in response.get_headers().keys(): for i in ['ASPNET', 'POWERED']: if i in header_name.upper() or header_name.upper() in i: powered_by = response.get_headers()[header_name] # Only get the first one self._x_powered = False # # Check if I already have this info in the KB # pow_by_kb = kb.kb.get('server_header', 'powered_by') powered_by_in_kb = [j['powered_by'] for j in pow_by_kb] if powered_by not in powered_by_in_kb: # # I don't have it in the KB, so I need to add it, # desc = 'The %s header for the target HTTP server is "%s".' desc = desc % (header_name, powered_by) i = Info('Powered-by header', desc, response.id, self.get_name()) i['powered_by'] = powered_by i.add_to_highlight(header_name + ':') om.out.information(i.get_desc()) # Save the results in the KB so that other plugins can # use this information. Before knowing that some servers # may return more than one poweredby header I had: # kb.kb.raw_write( self , 'powered_by' , powered_by ) # But I have seen an IIS server with PHP that returns # both the ASP.NET and the PHP headers kb.kb.append(self, 'powered_by', i) # Update the list and save it, powered_by_in_kb.append(powered_by) kb.kb.raw_write(self, 'powered_by_string', powered_by_in_kb)
def grep(self, request, response): """ Plugin entry point, test existence of HTML auto-completable forms containing password-type inputs. Either form's <autocomplete> attribute is not present or is 'off'. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ if not response.is_text_or_html(): return try: doc_parser = parser_cache.dpc.get_document_parser_for(response) except BaseFrameworkException: return for form in doc_parser.get_forms(): # Only analyze forms which have autocomplete enabled at <form> if form.get_autocomplete() is False: continue for form_field_list in form.meta.itervalues(): for form_field in form_field_list: if form_field.input_type != INPUT_TYPE_PASSWD: continue if not form_field.autocomplete: continue url = response.get_url() desc = ('The URL: "%s" has a "<form>" element with ' 'auto-complete enabled.') desc %= url i = Info('Auto-completable form', desc, response.id, self.get_name()) i.add_to_highlight('autocomplete') i.set_url(url) i[AutoCompleteInfoSet.ITAG] = form.get_action().uri2url() self.kb_append_uniq_group(self, 'form_autocomplete', i, group_klass=AutoCompleteInfoSet) break
def grep(self, request, response): """ Plugin entry point. Analyze if the HTTP response reason messages are strange. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ response_code = response.get_code() msg_list = W3C_REASONS.get(response_code, None) if msg_list is not None: response_reason = response.get_msg().lower() if response_reason not in msg_list: # # I check if the kb already has a info object with this code: # strange_reason_infos = kb.kb.get('strange_reason', 'strange_reason') corresponding_info = None for info_obj in strange_reason_infos: if info_obj['reason'] == response.get_msg(): corresponding_info = info_obj break if corresponding_info: # Work with the "old" info object: id_list = corresponding_info.get_id() id_list.append(response.id) corresponding_info.set_id(id_list) else: # Create a new info object from scratch and save it to the kb: desc = 'The remote Web server sent a strange HTTP reason'\ 'message: "%s" manual inspection is advised.' desc = desc % response.get_msg() i = Info('Strange HTTP Reason message', desc, response.id, self.get_name()) i.set_url(response.get_url()) i['reason'] = response.get_msg() i.add_to_highlight(response.get_msg()) self.kb_append_uniq(self, 'strange_reason', i, 'URL')
def _check_x_power(self, fuzzable_request): """ Analyze X-Powered-By header. """ response = self._uri_opener.GET(fuzzable_request.get_url(), cache=True) for header_name in response.get_headers().keys(): for i in ['ASPNET', 'POWERED']: if i in header_name.upper() or header_name.upper() in i: powered_by = response.get_headers()[header_name] # Only get the first one self._x_powered = False # # Check if I already have this info in the KB # pow_by_kb = kb.kb.get('server_header', 'powered_by') powered_by_in_kb = [j['powered_by'] for j in pow_by_kb] if powered_by not in powered_by_in_kb: # # I don't have it in the KB, so I need to add it, # desc = 'The %s header for the target HTTP server is "%s".' desc %= (header_name, powered_by) i = Info('Powered-by header', desc, response.id, self.get_name()) i['powered_by'] = powered_by i.add_to_highlight(header_name + ':') om.out.information(i.get_desc()) # Save the results in the KB so that other plugins can # use this information. Before knowing that some servers # may return more than one poweredby header I had: # kb.kb.raw_write( self , 'powered_by' , powered_by ) # But I have seen an IIS server with PHP that returns # both the ASP.NET and the PHP headers kb.kb.append(self, 'powered_by', i) # Update the list and save it, powered_by_in_kb.append(powered_by) kb.kb.raw_write(self, 'powered_by_string', powered_by_in_kb)
def grep(self, request, response): """ Plugin entry point, test existance of HTML auto-completable forms containing password-type inputs. Either form's <autocomplete> attribute is not present or is 'off'. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ url = response.get_url() dom = response.get_dom() if not response.is_text_or_html() or dom is None: return autocompletable = lambda inp: inp.get('autocomplete', 'on').lower() != 'off' # Loop through "auto-completable" forms for form in self._autocomplete_forms_xpath(dom): passwd_inputs = self._pwd_input_xpath(form) # Test existence of password-type inputs and verify that # all inputs are autocompletable if passwd_inputs and all(map(autocompletable, chain(passwd_inputs, self._text_input_xpath(form)))): desc = 'The URL: "%s" has a "<form>" element with ' \ 'auto-complete enabled.' desc = desc % url i = Info('Auto-completable form', desc, response.id, self.get_name()) i.set_url(url) form_str = etree.tostring(form) to_highlight = form_str[:form_str.find('>') + 1] i.add_to_highlight(to_highlight) # Store and print kb.kb.append(self, 'form_autocomplete', i) om.out.information(desc) break
def _analyze_strange(self, request, response, ref, token_name, token_value): if self._is_strange(request, token_name, token_value): desc = ('The URI: "%s" has a parameter named: "%s" with value:' ' "%s", which is very uncommon. and requires manual' ' verification.') desc %= (response.get_uri(), token_name, token_value) i = Info('Uncommon query string parameter', desc, response.id, self.get_name()) i['parameter_value'] = token_value i.add_to_highlight(token_value) i.set_uri(ref) self.kb_append(self, 'strange_parameters', i) return True return False
def grep(self, request, response): """ Plugin entry point, identify hashes in the HTTP response. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ # I know that by doing this I loose the chance of finding hashes in # PDF files, but... this is much faster if not response.is_text_or_html(): return body = response.get_body() splitted_body = self._split_re.split(body) for possible_hash in splitted_body: # This is a performance enhancement that cuts the execution # time of this plugin in half. if len(possible_hash) < 31 or\ len(possible_hash) > 129 : return hash_type = self._get_hash_type(possible_hash) if not hash_type: return possible_hash = possible_hash.lower() if self._has_hash_distribution(possible_hash): if (possible_hash, response.get_url()) not in self._already_reported: desc = 'The URL: "%s" returned a response that may contain'\ ' a "%s" hash. The hash string is: "%s". This is'\ ' uncommon and requires human verification.' desc = desc % (response.get_url(), hash_type, possible_hash) i = Info('Hash string in HTML content', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(possible_hash) self.kb_append(self, 'hash_analysis', i) self._already_reported.add( (possible_hash, response.get_url()) )
def grep(self, request, response): """ Plugin entry point. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ # Check if the header names are common or not for header_name in response.get_headers().keys(): if header_name.upper() not in self.COMMON_HEADERS: # Check if the kb already has a info object with this code: strange_header_infos = kb.kb.get('strange_headers', 'strange_headers') for info_obj in strange_header_infos: if info_obj['header_name'] == header_name: # Work with the "old" info object: id_list = info_obj.get_id() id_list.append(response.id) info_obj.set_id(id_list) break else: # Create a new info object from scratch and save it to # the kb: hvalue = response.get_headers()[header_name] desc = 'The remote web server sent the HTTP header: "%s"'\ ' with value: "%s", which is quite uncommon and'\ ' requires manual analysis.' desc = desc % (header_name, hvalue) i = Info('Strange header', desc, response.id, self.get_name()) i.set_url(response.get_url()) i['header_name'] = header_name i['header_value'] = hvalue i.add_to_highlight(hvalue, header_name) kb.kb.append(self, 'strange_headers', i) # Now check for protocol anomalies self._content_location_not_300(request, response)
def _check_server_header(self, fuzzable_request): """ HTTP GET and analyze response for server header """ response = self._uri_opener.GET(fuzzable_request.get_url(), cache=True) for hname, hvalue in response.get_lower_case_headers().iteritems(): if hname == 'server': server = hvalue desc = 'The server header for the remote web server is: "%s".' desc = desc % server i = Info('Server header', desc, response.id, self.get_name()) i['server'] = server i.add_to_highlight(hname + ':') om.out.information(i.get_desc()) # Save the results in the KB so the user can look at it kb.kb.append(self, 'server', i) # Also save this for easy internal use # other plugins can use this information kb.kb.raw_write(self, 'server_string', server) break else: # strange ! desc = 'The remote HTTP Server omitted the "server" header in'\ ' its response.' i = Info('Omitted server header', desc, response.id, self.get_name()) om.out.information(i.get_desc()) # Save the results in the KB so that other plugins can use this # information kb.kb.append(self, 'ommited_server_header', i) # Also save this for easy internal use # other plugins can use this information kb.kb.raw_write(self, 'server_string', '')
def grep(self, request, response): """ Plugin entry point. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ headers = response.get_headers() heaver_value, header_name = headers.iget('x-xss-protection', '') heaver_value = heaver_value.strip() if heaver_value == '0': desc = 'The remote web server sent the HTTP X-XSS-Protection header'\ ' with a 0 value, which disables Internet Explorer\'s XSS ' \ ' filter. In most cases, this is a bad practice and should' \ ' be subject to review.' i = Info('Insecure X-XSS-Protection header usage', desc, response.id, self.get_name()) i.add_to_highlight('X-XSS-Protection') self.kb_append_uniq(self, 'xss_protection_header', i, 'URL')