def _test_DNS(self, original_response, dns_wildcard_url): """ Check if http://www.domain.tld/ == http://domain.tld/ """ headers = Headers([('Host', dns_wildcard_url.get_domain())]) try: modified_response = self._uri_opener.GET( original_response.get_url(), cache=True, headers=headers) except BaseFrameworkException: return else: if fuzzy_not_equal(modified_response.get_body(), original_response.get_body(), 0.35): desc = 'The target site has NO DNS wildcard, and the contents' \ ' of "%s" differ from the contents of "%s".' desc = desc % (dns_wildcard_url, original_response.get_url()) i = Info('No DNS wildcard', desc, modified_response.id, self.get_name()) i.set_url(dns_wildcard_url) kb.kb.append(self, 'dns_wildcard', i) om.out.information(i.get_desc()) else: desc = 'The target site has a DNS wildcard configuration, the' \ ' contents of "%s" are equal to the ones of "%s".' desc = desc % (dns_wildcard_url, original_response.get_url()) i = Info('DNS wildcard', desc, modified_response.id, self.get_name()) i.set_url(original_response.get_url()) kb.kb.append(self, 'dns_wildcard', i) om.out.information(i.get_desc())
def _analyze_methods(self, url, allowed_methods, id_list): # Sometimes there are no allowed methods, which means that our plugin # failed to identify any methods. if not allowed_methods: return # Check for DAV elif set(allowed_methods).intersection(self.DAV_METHODS): # dav is enabled! # Save the results in the KB so that other plugins can use this # information desc = 'The URL "%s" has the following allowed methods. These' \ ' include DAV methods and should be disabled: %s' desc = desc % (url, ', '.join(allowed_methods)) i = Info('DAV methods enabled', desc, id_list, self.get_name()) i.set_url(url) i['methods'] = allowed_methods kb.kb.append(self, 'dav-methods', i) else: # Save the results in the KB so that other plugins can use this # information. Do not remove these information, other plugins # REALLY use it ! desc = 'The URL "%s" has the following enabled HTTP methods: %s' desc = desc % (url, ', '.join(allowed_methods)) i = Info('Allowed HTTP methods', desc, id_list, self.get_name()) i.set_url(url) i['methods'] = allowed_methods kb.kb.append(self, 'methods', i)
def _check_server_header(self, fuzzable_request, response): """ HTTP GET and analyze response for server header """ if is_no_content_response(response): # # UrlOpenerProxy(), a helper class used by most plugins, will # generate 204 HTTP responses for HTTP requests that fail. # This makes plugins have less error handling code (try/except), # and looks like this in the scan log: # # Generated 204 "No Content" response (id:2131) # # The problem is that in some strange cases, like this plugin, # the 204 response will trigger a false positive. Because of # that I had to add this if statement to completely ignore # the HTTP responses with 204 status code # return server, header_name = response.get_headers().iget('server') if server in self._server_headers: return self._server_headers.add(server) if server: desc = 'The server header for the remote web server is: "%s".' desc %= server i = Info('Server header', desc, response.id, self.get_name()) i['server'] = server i.add_to_highlight(header_name + ':') om.out.information(i.get_desc()) # Save the results in the KB so the user can look at it kb.kb.append(self, 'server', i) # Also save this for easy internal use # other plugins can use this information kb.kb.raw_write(self, 'server_string', server) else: # strange ! desc = ('The remote HTTP Server omitted the "server" header in' ' its response.') i = Info('Omitted server header', desc, response.id, self.get_name()) om.out.information(i.get_desc()) # Save the results in the KB so that other plugins can use this # information kb.kb.append(self, 'omitted_server_header', i) # Also save this for easy internal use # other plugins can use this information kb.kb.raw_write(self, 'server_string', '')
def _PUT(self, domain_path): """ Tests PUT method. """ # upload url = domain_path.url_join(rand_alpha(5)) rnd_content = rand_alnum(6) headers = Headers([('content-type', 'text/plain')]) put_response = self._uri_opener.PUT(url, data=rnd_content, headers=headers) # check if uploaded res = self._uri_opener.GET(url, cache=True) if res.get_body() == rnd_content: msg = 'File upload with HTTP PUT method was found at resource:' \ ' "%s". A test file was uploaded to: "%s".' msg = msg % (domain_path, res.get_url()) v = Vuln('Insecure DAV configuration', msg, severity.HIGH, [put_response.id, res.id], self.get_name()) v.set_url(url) v.set_method('PUT') self.kb_append(self, 'dav', v) # Report some common errors elif put_response.get_code() == 500: msg = 'DAV seems to be incorrectly configured. The web server' \ ' answered with a 500 error code. In most cases, this means'\ ' that the DAV extension failed in some way. This error was'\ ' found at: "%s".' % put_response.get_url() i = Info('DAV incorrect configuration', msg, res.id, self.get_name()) i.set_url(url) i.set_method('PUT') self.kb_append(self, 'dav', i) # Report some common errors elif put_response.get_code() == 403: msg = 'DAV seems to be correctly configured and allowing you to'\ ' use the PUT method but the directory does not have the'\ ' correct permissions that would allow the web server to'\ ' write to it. This error was found at: "%s".' msg = msg % put_response.get_url() i = Info('DAV incorrect configuration', msg, [put_response.id, res.id], self.get_name()) i.set_url(url) i.set_method('PUT') self.kb_append(self, 'dav', i)
def _analyze_crossdomain_clientaccesspolicy(self, url, response, file_name): # https://github.com/andresriancho/w3af/issues/14491 if file_name not in self.FILE_TAG_ATTR: return try: dom = xml.dom.minidom.parseString(response.get_body()) except Exception: # Report this, it may be interesting for the final user # not a vulnerability per-se... but... it's information after all if 'allow-access-from' in response.get_body() or \ 'cross-domain-policy' in response.get_body() or \ 'cross-domain-access' in response.get_body(): desc = 'The "%s" file at: "%s" is not a valid XML.' desc %= (file_name, response.get_url()) i = Info('Invalid RIA settings file', desc, response.id, self.get_name()) i.set_url(response.get_url()) kb.kb.append(self, 'info', i) om.out.information(i.get_desc()) return tag, attribute = self.FILE_TAG_ATTR.get(file_name) url_list = dom.getElementsByTagName(tag) for url in url_list: url = url.getAttribute(attribute) if url == '*': desc = 'The "%s" file at "%s" allows flash/silverlight'\ ' access from any site.' desc %= (file_name, response.get_url()) v = Vuln('Insecure RIA settings', desc, severity.LOW, response.id, self.get_name()) v.set_url(response.get_url()) v.set_method('GET') kb.kb.append(self, 'vuln', v) om.out.vulnerability(v.get_desc(), severity=v.get_severity()) else: desc = 'The "%s" file at "%s" allows flash/silverlight'\ ' access from "%s".' desc %= (file_name, response.get_url(), url) i = Info('Cross-domain allow ACL', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.set_method('GET') kb.kb.append(self, 'info', i) om.out.information(i.get_desc())
def _analyze_crossdomain_clientaccesspolicy(self, url, response, file_name): try: dom = xml.dom.minidom.parseString(response.get_body()) except Exception: # Report this, it may be interesting for the final user # not a vulnerability per-se... but... it's information after all if 'allow-access-from' in response.get_body() or \ 'cross-domain-policy' in response.get_body() or \ 'cross-domain-access' in response.get_body(): desc = 'The "%s" file at: "%s" is not a valid XML.' desc = desc % (file_name, response.get_url()) i = Info('Invalid RIA settings file', desc, response.id, self.get_name()) i.set_url(response.get_url()) kb.kb.append(self, 'info', i) om.out.information(i.get_desc()) else: if file_name == 'crossdomain.xml': url_list = dom.getElementsByTagName("allow-access-from") attribute = 'domain' if file_name == 'clientaccesspolicy.xml': url_list = dom.getElementsByTagName("domain") attribute = 'uri' for url in url_list: url = url.getAttribute(attribute) if url == '*': desc = 'The "%s" file at "%s" allows flash/silverlight'\ ' access from any site.' desc = desc % (file_name, response.get_url()) v = Vuln('Insecure RIA settings', desc, severity.LOW, response.id, self.get_name()) v.set_url(response.get_url()) v.set_method('GET') kb.kb.append(self, 'vuln', v) om.out.vulnerability(v.get_desc(), severity=v.get_severity()) else: desc = 'The "%s" file at "%s" allows flash/silverlight'\ ' access from "%s".' desc = desc % (file_name, response.get_url(), url) i = Info('Cross-domain allow ACL', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.set_method('GET') kb.kb.append(self, 'info', i) om.out.information(i.get_desc())
def grep(self, request, response): """ If I find __VIEWSTATE and empty __EVENTVALIDATION => vuln. :param request: The HTTP request object. :param response: The HTTP response object """ if not response.is_text_or_html(): return viewstate_mo = self._viewstate.search(response.get_body()) if not viewstate_mo: return # I have __viewstate!, verify if event validation is enabled if not self._eventvalidation.search(response.get_body()): desc = ('The URL: "%s" has .NET Event Validation disabled. This' ' programming/configuration error should be manually' ' verified.') desc %= response.get_url() i = Info('.NET Event Validation is disabled', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight(viewstate_mo.group()) i[EVDisabledInfoSet.ITAG] = response.get_url().get_domain() self.kb_append_uniq_group(self, self.get_name(), i, group_klass=EVDisabledInfoSet) if not self._encryptedVs.search(response.get_body()): # Nice! We can decode the viewstate! =) desc = ('The URL: "%s" has .NET ViewState encryption disabled.' ' This programming/configuration error could be' ' exploited to decode the viewstate contents.') desc %= response.get_url() i = Info('.NET ViewState encryption is disabled', desc, response.id, self.get_name()) i.set_url(response.get_url()) i[EVClearTextInfoSet.ITAG] = response.get_url().get_domain() self.kb_append_uniq_group(self, self.get_name(), i, group_klass=EVClearTextInfoSet)
def discover(self, fuzzable_request, debugging_id): """ Identify server software using favicon. :param debugging_id: A unique identifier for this call to discover() :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ domain_path = fuzzable_request.get_url().get_domain_path() # TODO: Maybe I should also parse the html to extract the favicon location? favicon_url = domain_path.url_join('favicon.ico') response = self._uri_opener.GET(favicon_url, cache=True) remote_fav_md5 = hashlib.md5(response.get_body()).hexdigest() if not is_404(response): # check if MD5 is matched in database/list for md5part, favicon_desc in self._read_favicon_db(): if md5part == remote_fav_md5: desc = 'Favicon.ico file was identified as "%s".' % favicon_desc i = Info('Favicon identification', desc, response.id, self.get_name()) i.set_url(favicon_url) kb.kb.append(self, 'info', i) om.out.information(i.get_desc()) break else: # # Report to the kb that we failed to ID this favicon.ico # and that the md5 should be sent to the developers. # desc = 'Favicon identification failed. If the remote site is' \ ' using framework that is being exposed by its favicon,'\ ' please send an email to [email protected]'\ ' including this md5 hash "%s" and the' \ ' name of the server or Web application it represents.' \ ' New fingerprints make this plugin more powerful and ' \ ' accurate.' desc = desc % remote_fav_md5 i = Info('Favicon identification failed', desc, response.id, self.get_name()) i.set_url(favicon_url) kb.kb.append(self, 'info', i) om.out.information(i.get_desc())
def _do_request(self, url, mutant): """ Perform a simple GET to see if the result is an error or not, and then run the actual fuzzing. """ response = self._uri_opener.GET(mutant, cache=True, headers=self._headers) if not (is_404(response) or response.get_code() in (403, 401) or self._return_without_eval(mutant)): # Create the fuzzable request and send it to the core fr = FuzzableRequest.from_http_response(response) self.output_queue.put(fr) # # Save it to the kb (if new)! # if response.get_url() not in self._seen and response.get_url( ).get_file_name(): desc = 'A potentially interesting file was found at: "%s".' desc = desc % response.get_url() i = Info('Potentially interesting file', desc, response.id, self.get_name()) i.set_url(response.get_url()) kb.kb.append(self, 'files', i) om.out.information(i.get_desc()) # Report only once self._seen.add(response.get_url())
def grep(self, request, response): """ Check if the header names are common or not :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ # Check for protocol anomalies self._content_location_not_300(request, response) # Check header names for header_name in response.get_headers().keys(): if header_name.upper() in self.COMMON_HEADERS: continue # Create a new info object and save it to the KB hvalue = response.get_headers()[header_name] desc = 'The remote web server sent the HTTP header: "%s"'\ ' with value: "%s", which is quite uncommon and'\ ' requires manual analysis.' desc = desc % (header_name, hvalue) i = Info('Strange header', desc, response.id, self.get_name()) i.add_to_highlight(hvalue, header_name) i.set_url(response.get_url()) i[StrangeHeaderInfoSet.ITAG] = header_name i['header_value'] = hvalue self.kb_append_uniq_group(self, 'strange_headers', i, group_klass=StrangeHeaderInfoSet)
def test_to_json(self): i = Info('Blind SQL injection vulnerability', MockInfo.LONG_DESC, 1, 'plugin_name') i['test'] = 'foo' i.add_to_highlight('abc', 'def') iset = InfoSet([i]) jd = iset.to_json() json_string = json.dumps(jd) jd = json.loads(json_string) self.assertEqual(jd['name'], iset.get_name()) self.assertEqual(jd['url'], str(iset.get_url())) self.assertEqual(jd['var'], iset.get_token_name()) self.assertEqual(jd['response_ids'], iset.get_id()) self.assertEqual(jd['vulndb_id'], iset.get_vulndb_id()) self.assertEqual(jd['desc'], iset.get_desc(with_id=False)) self.assertEqual(jd['long_description'], iset.get_long_description()) self.assertEqual(jd['fix_guidance'], iset.get_fix_guidance()) self.assertEqual(jd['fix_effort'], iset.get_fix_effort()) self.assertEqual(jd['tags'], iset.get_tags()) self.assertEqual(jd['wasc_ids'], iset.get_wasc_ids()) self.assertEqual(jd['wasc_urls'], list(iset.get_wasc_urls())) self.assertEqual(jd['cwe_urls'], list(iset.get_cwe_urls())) self.assertEqual(jd['references'], BLIND_SQLI_REFS) self.assertEqual(jd['owasp_top_10_references'], BLIND_SQLI_TOP10_REFS) self.assertEqual(jd['plugin_name'], iset.get_plugin_name()) self.assertEqual(jd['severity'], iset.get_severity()) self.assertEqual(jd['attributes'], iset.first_info.copy()) self.assertEqual(jd['highlight'], list(iset.get_to_highlight()))
def _analyze_results(self, filtered, not_filtered): """ Analyze the test results and save the conclusion to the kb. """ if len(filtered) >= len(self._get_offending_strings()) / 5.0: desc = ('The remote network has an active filter. IMPORTANT: The' ' result of all the other plugins will be inaccurate, web' ' applications could be vulnerable but "protected" by the' ' active filter.') i = Info('Active filter detected', desc, 1, self.get_name()) i['filtered'] = filtered kb.kb.append(self, 'afd', i) om.out.information(i.get_desc()) om.out.information('The following URLs were filtered:') for i in filtered: om.out.information('- ' + i) if not_filtered: msg = 'The following URLs passed undetected by the filter:' om.out.information(msg) for i in not_filtered: om.out.information('- ' + i) # Cleanup some memory self._not_filtered = [] self._filtered = []
def grep(self, request, response): """ Plugin entry point. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ headers = response.get_headers() heaver_value, header_name = headers.iget('x-xss-protection', '') heaver_value = heaver_value.strip() if heaver_value == '0': desc = 'The remote web server sent the HTTP X-XSS-Protection'\ ' header with a 0 value, which disables Internet' \ ' Explorer\'s XSS filter. In most cases, this is a bad' \ ' practice and should be subject to review.' i = Info('Insecure X-XSS-Protection header usage', desc, response.id, self.get_name()) i.add_to_highlight('X-XSS-Protection') i.set_uri(response.get_uri()) self.kb_append_uniq_group(self, 'xss_protection_header', i, group_klass=XSSProtectionInfoSet)
def _analyze_author(self, response, frontpage_author): """ Analyze the author URL. :param response: The http response object for the _vti_inf file. :param frontpage_author: A regex match object. :return: None. All the info is saved to the kb. """ domain_path = response.get_url().get_domain_path() author_location = domain_path.url_join(frontpage_author.group(1)) # Check for anomalies in the location of author.exe if frontpage_author.group(1) != '_vti_bin/_vti_aut/author.exe': name = 'Customized frontpage configuration' desc = ('The FPAuthorScriptUrl is at: "%s" instead of the default' ' location: "/_vti_bin/_vti_adm/author.exe". This is very' ' uncommon.') desc %= author_location else: name = 'FrontPage FPAuthorScriptUrl' desc = 'The FPAuthorScriptUrl is at: "%s".' desc %= author_location i = Info(name, desc, response.id, self.get_name()) i.set_url(author_location) i['FPAuthorScriptUrl'] = author_location kb.kb.append(self, 'frontpage_version', i) om.out.information(i.get_desc())
def grep(self, request, response): """ Plugin entry point, find feeds. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ uri = response.get_uri() for tag in mp_doc_parser.get_tags_by_filter(response, self.TAGS): feed_tag = tag.name feed_type = self._feed_types[feed_tag.lower()] version = tag.attrib.get('version', 'unknown') fmt = 'The URL "%s" is a %s version %s feed.' desc = fmt % (uri, feed_type, version) i = Info('Content feed resource', desc, response.id, self.get_name()) i.set_uri(uri) i.add_to_highlight(feed_type) self.kb_append_uniq(self, 'feeds', i, 'URL')
def grep(self, request, response): """ Plugin entry point. Analyze if the HTTP response codes are strange. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ if response.get_code() in self.COMMON_HTTP_CODES: return # Create a new info object from scratch and save it to the kb desc = 'The remote Web server sent a strange HTTP response code:'\ ' "%s" with the message: "%s", manual inspection is recommended.' desc = desc % (response.get_code(), response.get_msg()) i = Info('Strange HTTP response code', desc, response.id, self.get_name()) i.add_to_highlight(str(response.get_code()), response.get_msg()) i.set_url(response.get_url()) i[StrangeCodesInfoSet.ITAG] = response.get_code() i['message'] = response.get_msg() self.kb_append_uniq_group(self, 'strange_http_codes', i, group_klass=StrangeCodesInfoSet)
def analyze_document_links(self, request, response): """ Find session IDs in the URI and store them in the KB. """ try: doc_parser = parser_cache.dpc.get_document_parser_for(response) except: pass else: parsed_refs, _ = doc_parser.get_references() for link_uri in parsed_refs: if self._has_sessid(link_uri) and \ response.get_url() not in self._already_reported: # report these informations only once self._already_reported.add(response.get_url()) desc = 'The HTML content at "%s" contains a link (%s)'\ ' which holds a session id. The ID could be leaked'\ ' to third party domains through the referrer'\ ' header.' desc = desc % (response.get_url(), link_uri) # append the info object to the KB. i = Info('Session ID in URL', desc, response.id, self.get_name()) i.set_uri(response.get_uri()) self.kb_append(self, 'url_session', i) break
def test_vulndb_id_get_from_name(self): # Since there is no vulndb_id set, the name wins: i = Info('Blind SQL injection vulnerability', MockInfo.LONG_DESC, 1, 'plugin_name') # lazy calculation self.assertIsNone(i._vulndb) expected_references = [ Reference(d['url'], d['title']) for d in BLIND_SQLI_REFS ] self.assertTrue(i.has_db_details()) self.assertEqual(i.get_vulndb_id(), 46) self.assertIsInstance(i.get_long_description(), basestring) self.assertIsInstance(i.get_fix_guidance(), basestring) self.assertEqual(i.get_fix_effort(), 50) self.assertEqual(i.get_tags(), [u'web', u'sql', u'blind', u'injection', u'database']) self.assertEqual(i.get_wasc_ids(), []) self.assertEqual(list(i.get_wasc_urls()), []) self.assertEqual(list(i.get_cwe_urls()), [u'https://cwe.mitre.org/data/definitions/89.html']) self.assertEqual(i.get_cwe_ids(), [u'89']) self.assertEqual(i.get_references(), expected_references) self.assertEqual( list(i.get_owasp_top_10_references()), [(u'2013', 1, 'https://www.owasp.org/index.php/Top_10_2013-A1')]) self.assertIsInstance(i.get_vuln_info_from_db(), DBVuln) # lazy calculation success self.assertIsNotNone(i._vulndb)
def grep(self, request, response): """ Plugin entry point, verify if the HTML has a form with file uploads. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ if not response.is_text_or_html(): return for tag in parser_cache.dpc.get_tags_by_filter(response, ('input', )): input_type = tag.attrib.get('type', None) if input_type is None: continue if input_type.lower() != 'file': continue url = response.get_url() msg = 'A form which allows file uploads was found at "%s"' msg %= url i = Info('File upload form', msg, response.id, self.get_name()) i.set_url(url) self.kb_append_uniq(self, 'file_upload', i, 'URL') break
def grep(self, request, response): """ Plugin entry point, find feeds. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ dom = response.get_dom() uri = response.get_uri() # In some strange cases, we fail to normalize the document if dom is None: return # Find all feed tags element_list = self._tag_xpath(dom) for element in element_list: feed_tag = element.tag feed_type = self._feed_types[feed_tag.lower()] version = element.attrib.get('version', 'unknown') fmt = 'The URL "%s" is a %s version %s feed.' desc = fmt % (uri, feed_type, version) i = Info('Content feed resource', desc, response.id, self.get_name()) i.set_uri(uri) i.add_to_highlight(feed_type) self.kb_append_uniq(self, 'feeds', i, 'URL')
def _advanced_identification(self, url, ident): """ :return: None, This method will save the results to the kb and print and informational message to the user. """ for data_related_to_user, user in self._get_users_from_csv(ident): url_user_list = self._create_dirs(url, user_list=[user, ]) for user_dir, user in url_user_list: http_response_id = self._do_request(user_dir, user) if http_response_id is not None: if ident == 'os': desc = 'The remote OS can be identified as "%s" based'\ ' on the remote user "%s" information that is'\ ' exposed by the web server.' desc = desc % (data_related_to_user, user) name = 'Fingerprinted operating system' else: desc = 'The remote server has "%s" installed, w3af'\ ' found this information based on the remote'\ ' user "%s".' desc = desc % (data_related_to_user, user) name = 'Identified installed application' i = Info(name, desc, http_response_id, self.get_name()) i[ident] = data_related_to_user kb.kb.append(self, ident, i)
def _do_request(self, mutated_url, user): """ Perform the request and compare. :return: The HTTP response id if the mutated_url is a web user directory, None otherwise. """ response = self._uri_opener.GET(mutated_url, cache=True, headers=self._headers) path = mutated_url.get_path() response_body = response.get_body().replace(path, '') if relative_distance_lt(response_body, self._non_existent, 0.7): # Avoid duplicates if user not in [u['user'] for u in kb.kb.get('user_dir', 'users')]: desc = 'A user directory was found at: %s' desc = desc % response.get_url() i = Info('Web user home directory', desc, response.id, self.get_name()) i.set_url(response.get_url()) i['user'] = user kb.kb.append(self, 'users', i) for fr in self._create_fuzzable_requests(response): self.output_queue.put(fr) return response.id return None
def grep(self, request, response): """ Plugin entry point. :param request: The HTTP request object. :param response: The HTTP response object :return: None, all results are saved in the kb. """ if not response.is_text_or_html(): return if not self.symfony_detected(response): return if self.has_csrf_token(response): return desc = ('The URL: "%s" seems to be generated by the Symfony framework' ' and contains a form that has CSRF protection disabled.') desc %= response.get_url() i = Info('Symfony Framework with CSRF protection disabled', desc, response.id, self.get_name()) i.set_url(response.get_url()) self.kb_append_uniq(self, 'symfony', i, 'URL')
def _lowest_privilege_test(self, response): regex_str = 'User/Group </td><td class="v">(.*?)\((\d.*?)\)/(\d.*?)</td>' lowest_privilege_test = re.search(regex_str, response.get_body(), re.I) if lowest_privilege_test: lpt_uname = lowest_privilege_test.group(1) lpt_uid = lowest_privilege_test.group(2) lpt_uid = int(lpt_uid) lpt_gid = lowest_privilege_test.group(3) if lpt_uid < 99 or lpt_gid < 99 or \ re.match('root|apache|daemon|bin|operator|adm', lpt_uname, re.I): desc = 'phpinfo()::PHP may be executing as a higher privileged'\ ' group. Username: %s, UserID: %s, GroupID: %s.' desc = desc % (lpt_uname, lpt_uid, lpt_gid) v = Vuln('PHP lowest_privilege_test:fail', desc, severity.MEDIUM, response.id, self.get_name()) v.set_url(response.get_url()) kb.kb.append(self, 'phpinfo', v) om.out.vulnerability(v.get_desc(), severity=v.get_severity()) else: lpt_name = 'privilege:' + lpt_uname lpt_desc = 'phpinfo()::PHP is executing under ' lpt_desc += 'username: '******', ' lpt_desc += 'userID: ' + str(lpt_uid) + ', ' lpt_desc += 'groupID: ' + lpt_gid i = Info(lpt_name, lpt_desc, response.id, self.get_name()) i.set_url(response.get_url()) kb.kb.append(self, 'phpinfo', i) om.out.information(i.get_desc())
def grep(self, request, response): """ Plugin entry point. Parse the object tags. :param request: The HTTP request object. :param response: The HTTP response object :return: None """ if not response.is_text_or_html(): return url = response.get_url() for tag in parser_cache.dpc.get_tags_by_filter(response, self.TAGS): # pylint: disable=E1101 desc = ('The URL: "%s" has an "%s" tag. We recommend you download' ' the client side code and analyze it manually.') desc %= (response.get_uri(), tag.name) i = Info('Browser plugin content', desc, response.id, self.get_name()) i.set_url(url) i.add_to_highlight('<%s' % tag.name) self.kb_append_uniq(self, tag.name, i, 'URL')
def _avoid_report(self, request, response, msg): # We should avoid multiple reports for the same error message # the idea here is that the root cause for the same error # message might be the same, and fixing one will fix all. # # So the user receives the first report with MAX_REPORTED_PER_MSG # vulnerabilities, fixes the root cause, scans again and then # all those instances go away. # # Without this code, the scanner will potentially report # thousands of issues for the same error message. Which will # overwhelm the user. count = 0 for title, desc, _id, url, highlight in self._potential_vulns: if highlight == msg: count += 1 if count < self.MAX_REPORTED_PER_MSG: return False if msg not in self._already_reported_max_msg_exceeded: self._already_reported_max_msg_exceeded.append(msg) desc = ('The application returned multiple HTTP responses' ' containing detailed error pages containing exceptions' ' and internal information. The maximum number of' ' vulnerabilities for this issue type was reached' ' and no more issues will be reported.') i = Info('Multiple descriptive error pages', desc, [], self.get_name()) self.kb_append_uniq(self, 'error_page', i) return True
def _cert_expiration_analysis(self, domain, port, cert, cert_der, cipher): not_after = cert['notAfter'] try: exp_date = datetime.strptime(not_after, '%Y%m%d%H%M%SZ') except ValueError: msg = 'Invalid SSL certificate date format: %s' % not_after om.out.debug(msg) return except KeyError: msg = 'SSL certificate does not have an "notAfter" field.' om.out.debug(msg) return exp_date_parsed = date(exp_date.year, exp_date.month, exp_date.day) expire_days = (exp_date_parsed - date.today()).days if expire_days > self._min_expire_days: om.out.debug('Certificate will expire in %s days' % expire_days) return desc = 'The certificate for "%s" will expire soon.' % domain i = Info('Soon to expire SSL certificate', desc, 1, self.get_name()) i.set_url(self._url_from_parts(domain, port)) self.kb_append(self, 'ssl_soon_expire', i)
def end(self): """ This method is called when the plugin wont be used anymore. """ all_findings = kb.kb.get_all_findings() for title, desc, _id, url, highlight in self._potential_vulns: for info in all_findings: # This makes sure that if the sqli plugin found a vulnerability # in the same URL as we found a detailed error, we won't report # the detailed error. # # If the user fixes the sqli vulnerability and runs the scan again # most likely the detailed error will disappear too. If the sqli # vulnerability disappears and this one remains, it will appear # as a new vulnerability in the second scan. if info.get_url() == url: break else: i = Info(title, desc, _id, self.get_name()) i.set_url(url) i.add_to_highlight(highlight) self.kb_append_uniq(self, 'error_page', i) self._potential_vulns.cleanup()
def _parse_document(self, response): """ Parses the HTML and adds the mail addresses to the kb. """ get_document_parser_for = parser_cache.dpc.get_document_parser_for try: document_parser = get_document_parser_for(response, cache=False) except BaseFrameworkException: # Failed to find a suitable parser for the document return # # Search for email addresses # for mail in document_parser.get_emails(self._domain_root): if mail not in self._accounts: self._accounts.append(mail) desc = 'The mail account: "%s" was found at: "%s".' desc %= (mail, response.get_uri()) i = Info('Email account', desc, response.id, self.get_name()) i.set_url(response.get_uri()) i['mail'] = mail i['user'] = mail.split('@')[0] i['url_list'] = {response.get_uri()} self.kb_append('emails', 'emails', i)
def _content_location_not_300(self, request, response): """ Check if the response has a content-location header and the response code is not in the 300 range. :return: None, all results are saved in the kb. """ if not 300 < response.get_code() < 310: return headers = response.get_headers() header_value, header_name = headers.iget('content-location') if header_value is None: return desc = ('The URL: "%s" sent the HTTP header: "content-location"' ' with value: "%s" in an HTTP response with code %s which' ' is a violation to the RFC.') desc %= (response.get_url(), header_value, response.get_code()) i = Info('Content-Location HTTP header anomaly', desc, response.id, self.get_name()) i.set_url(response.get_url()) i.add_to_highlight('content-location') kb.kb.append(self, 'anomaly', i)