示例#1
0
    def _test_DNS(self, original_response, dns_wildcard_url):
        """
        Check if http://www.domain.tld/ == http://domain.tld/
        """
        headers = Headers([('Host', dns_wildcard_url.get_domain())])
        try:
            modified_response = self._uri_opener.GET(
                original_response.get_url(), cache=True, headers=headers)
        except BaseFrameworkException:
            return
        else:
            if fuzzy_not_equal(modified_response.get_body(),
                               original_response.get_body(), 0.35):
                desc = 'The target site has NO DNS wildcard, and the contents' \
                       ' of "%s" differ from the contents of "%s".'
                desc = desc % (dns_wildcard_url, original_response.get_url())

                i = Info('No DNS wildcard', desc, modified_response.id,
                         self.get_name())
                i.set_url(dns_wildcard_url)

                kb.kb.append(self, 'dns_wildcard', i)
                om.out.information(i.get_desc())
            else:
                desc = 'The target site has a DNS wildcard configuration, the' \
                       ' contents of "%s" are equal to the ones of "%s".'
                desc = desc % (dns_wildcard_url, original_response.get_url())

                i = Info('DNS wildcard', desc, modified_response.id,
                         self.get_name())
                i.set_url(original_response.get_url())

                kb.kb.append(self, 'dns_wildcard', i)
                om.out.information(i.get_desc())
    def _test_DNS(self, original_response, dns_wildcard_url):
        """
        Check if http://www.domain.tld/ == http://domain.tld/
        """
        headers = Headers([("Host", dns_wildcard_url.get_domain())])
        try:
            modified_response = self._uri_opener.GET(original_response.get_url(), cache=True, headers=headers)
        except BaseFrameworkException:
            return
        else:
            if relative_distance_lt(modified_response.get_body(), original_response.get_body(), 0.35):
                desc = (
                    "The target site has NO DNS wildcard, and the contents" ' of "%s" differ from the contents of "%s".'
                )
                desc = desc % (dns_wildcard_url, original_response.get_url())

                i = Info("No DNS wildcard", desc, modified_response.id, self.get_name())
                i.set_url(dns_wildcard_url)

                kb.kb.append(self, "dns_wildcard", i)
                om.out.information(i.get_desc())
            else:
                desc = (
                    "The target site has a DNS wildcard configuration, the"
                    ' contents of "%s" are equal to the ones of "%s".'
                )
                desc = desc % (dns_wildcard_url, original_response.get_url())

                i = Info("DNS wildcard", desc, modified_response.id, self.get_name())
                i.set_url(original_response.get_url())

                kb.kb.append(self, "dns_wildcard", i)
                om.out.information(i.get_desc())
示例#3
0
    def _check_server_header(self, fuzzable_request, response):
        """
        HTTP GET and analyze response for server header
        """
        if is_no_content_response(response):
            #
            # UrlOpenerProxy(), a helper class used by most plugins, will
            # generate 204 HTTP responses for HTTP requests that fail.
            # This makes plugins have less error handling code (try/except),
            # and looks like this in the scan log:
            #
            #   Generated 204 "No Content" response (id:2131)
            #
            # The problem is that in some strange cases, like this plugin,
            # the 204 response will trigger a false positive. Because of
            # that I had to add this if statement to completely ignore
            # the HTTP responses with 204 status code
            #
            return

        server, header_name = response.get_headers().iget('server')

        if server in self._server_headers:
            return

        self._server_headers.add(server)

        if server:
            desc = 'The server header for the remote web server is: "%s".'
            desc %= server

            i = Info('Server header', desc, response.id, self.get_name())
            i['server'] = server
            i.add_to_highlight(header_name + ':')

            om.out.information(i.get_desc())

            # Save the results in the KB so the user can look at it
            kb.kb.append(self, 'server', i)

            # Also save this for easy internal use
            # other plugins can use this information
            kb.kb.raw_write(self, 'server_string', server)
        else:
            # strange !
            desc = ('The remote HTTP Server omitted the "server" header in'
                    ' its response.')
            i = Info('Omitted server header', desc, response.id,
                     self.get_name())

            om.out.information(i.get_desc())

            # Save the results in the KB so that other plugins can use this
            # information
            kb.kb.append(self, 'omitted_server_header', i)

            # Also save this for easy internal use
            # other plugins can use this information
            kb.kb.raw_write(self, 'server_string', '')
示例#4
0
    def _analyze_crossdomain_clientaccesspolicy(self, url, response, file_name):

        # https://github.com/andresriancho/w3af/issues/14491
        if file_name not in self.FILE_TAG_ATTR:
            return

        try:
            dom = xml.dom.minidom.parseString(response.get_body())
        except Exception:
            # Report this, it may be interesting for the final user
            # not a vulnerability per-se... but... it's information after all
            if 'allow-access-from' in response.get_body() or \
            'cross-domain-policy' in response.get_body() or \
            'cross-domain-access' in response.get_body():

                desc = 'The "%s" file at: "%s" is not a valid XML.'
                desc %= (file_name, response.get_url())

                i = Info('Invalid RIA settings file', desc, response.id,
                         self.get_name())
                i.set_url(response.get_url())

                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())

            return

        tag, attribute = self.FILE_TAG_ATTR.get(file_name)
        url_list = dom.getElementsByTagName(tag)

        for url in url_list:
            url = url.getAttribute(attribute)

            if url == '*':
                desc = 'The "%s" file at "%s" allows flash/silverlight'\
                       ' access from any site.'
                desc %= (file_name, response.get_url())

                v = Vuln('Insecure RIA settings', desc, severity.LOW,
                         response.id, self.get_name())
                v.set_url(response.get_url())
                v.set_method('GET')

                kb.kb.append(self, 'vuln', v)
                om.out.vulnerability(v.get_desc(),
                                     severity=v.get_severity())
            else:
                desc = 'The "%s" file at "%s" allows flash/silverlight'\
                       ' access from "%s".'
                desc %= (file_name, response.get_url(), url)

                i = Info('Cross-domain allow ACL', desc, response.id,
                         self.get_name())
                i.set_url(response.get_url())
                i.set_method('GET')

                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())
示例#5
0
    def _analyze_crossdomain_clientaccesspolicy(self, url, response,
                                                file_name):

        # https://github.com/andresriancho/w3af/issues/14491
        if file_name not in self.FILE_TAG_ATTR:
            return

        try:
            dom = xml.dom.minidom.parseString(response.get_body())
        except Exception:
            # Report this, it may be interesting for the final user
            # not a vulnerability per-se... but... it's information after all
            if 'allow-access-from' in response.get_body() or \
            'cross-domain-policy' in response.get_body() or \
            'cross-domain-access' in response.get_body():

                desc = 'The "%s" file at: "%s" is not a valid XML.'
                desc %= (file_name, response.get_url())

                i = Info('Invalid RIA settings file', desc, response.id,
                         self.get_name())
                i.set_url(response.get_url())

                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())

            return

        tag, attribute = self.FILE_TAG_ATTR.get(file_name)
        url_list = dom.getElementsByTagName(tag)

        for url in url_list:
            url = url.getAttribute(attribute)

            if url == '*':
                desc = 'The "%s" file at "%s" allows flash/silverlight'\
                       ' access from any site.'
                desc %= (file_name, response.get_url())

                v = Vuln('Insecure RIA settings', desc, severity.LOW,
                         response.id, self.get_name())
                v.set_url(response.get_url())
                v.set_method('GET')

                kb.kb.append(self, 'vuln', v)
                om.out.vulnerability(v.get_desc(), severity=v.get_severity())
            else:
                desc = 'The "%s" file at "%s" allows flash/silverlight'\
                       ' access from "%s".'
                desc %= (file_name, response.get_url(), url)

                i = Info('Cross-domain allow ACL', desc, response.id,
                         self.get_name())
                i.set_url(response.get_url())
                i.set_method('GET')

                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())
示例#6
0
    def _analyze_crossdomain_clientaccesspolicy(self, url, response,
                                                file_name):
        try:
            dom = xml.dom.minidom.parseString(response.get_body())
        except Exception:
            # Report this, it may be interesting for the final user
            # not a vulnerability per-se... but... it's information after all
            if 'allow-access-from' in response.get_body() or \
            'cross-domain-policy' in response.get_body() or \
            'cross-domain-access' in response.get_body():

                desc = 'The "%s" file at: "%s" is not a valid XML.'
                desc = desc % (file_name, response.get_url())

                i = Info('Invalid RIA settings file', desc, response.id,
                         self.get_name())
                i.set_url(response.get_url())

                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())
        else:
            if file_name == 'crossdomain.xml':
                url_list = dom.getElementsByTagName("allow-access-from")
                attribute = 'domain'
            if file_name == 'clientaccesspolicy.xml':
                url_list = dom.getElementsByTagName("domain")
                attribute = 'uri'

            for url in url_list:
                url = url.getAttribute(attribute)

                if url == '*':
                    desc = 'The "%s" file at "%s" allows flash/silverlight'\
                           ' access from any site.'
                    desc = desc % (file_name, response.get_url())

                    v = Vuln('Insecure RIA settings', desc, severity.LOW,
                             response.id, self.get_name())
                    v.set_url(response.get_url())
                    v.set_method('GET')

                    kb.kb.append(self, 'vuln', v)
                    om.out.vulnerability(v.get_desc(),
                                         severity=v.get_severity())
                else:
                    desc = 'The "%s" file at "%s" allows flash/silverlight'\
                           ' access from "%s".'
                    desc = desc % (file_name, response.get_url(), url)

                    i = Info('Cross-domain allow ACL', desc, response.id,
                             self.get_name())
                    i.set_url(response.get_url())
                    i.set_method('GET')

                    kb.kb.append(self, 'info', i)
                    om.out.information(i.get_desc())
示例#7
0
    def _analyze_crossdomain_clientaccesspolicy(self, url, response, file_name):
        try:
            dom = xml.dom.minidom.parseString(response.get_body())
        except Exception:
            # Report this, it may be interesting for the final user
            # not a vulnerability per-se... but... it's information after all
            if 'allow-access-from' in response.get_body() or \
            'cross-domain-policy' in response.get_body() or \
            'cross-domain-access' in response.get_body():

                desc = 'The "%s" file at: "%s" is not a valid XML.'
                desc = desc % (file_name, response.get_url())
            
                i = Info('Invalid RIA settings file', desc, response.id,
                         self.get_name())
                i.set_url(response.get_url())
                
                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())
        else:
            if file_name == 'crossdomain.xml':
                url_list = dom.getElementsByTagName("allow-access-from")
                attribute = 'domain'
            if file_name == 'clientaccesspolicy.xml':
                url_list = dom.getElementsByTagName("domain")
                attribute = 'uri'

            for url in url_list:
                url = url.getAttribute(attribute)

                if url == '*':
                    desc = 'The "%s" file at "%s" allows flash/silverlight'\
                           ' access from any site.'
                    desc = desc % (file_name, response.get_url())

                    v = Vuln('Insecure RIA settings', desc, severity.LOW,
                             response.id, self.get_name())
                    v.set_url(response.get_url())
                    v.set_method('GET')

                    kb.kb.append(self, 'vuln', v)
                    om.out.vulnerability(v.get_desc(),
                                         severity=v.get_severity())
                else:
                    desc = 'The "%s" file at "%s" allows flash/silverlight'\
                           ' access from "%s".'
                    desc = desc % (file_name, response.get_url(), url)

                    i = Info('Cross-domain allow ACL', desc, response.id,
                             self.get_name())
                    i.set_url(response.get_url())
                    i.set_method('GET')

                    kb.kb.append(self, 'info', i)
                    om.out.information(i.get_desc())
示例#8
0
    def discover(self, fuzzable_request, debugging_id):
        """
        Identify server software using favicon.

        :param debugging_id: A unique identifier for this call to discover()
        :param fuzzable_request: A fuzzable_request instance that contains
                                (among other things) the URL to test.
        """
        domain_path = fuzzable_request.get_url().get_domain_path()

        # TODO: Maybe I should also parse the html to extract the favicon location?
        favicon_url = domain_path.url_join('favicon.ico')
        response = self._uri_opener.GET(favicon_url, cache=True)
        remote_fav_md5 = hashlib.md5(response.get_body()).hexdigest()

        if not is_404(response):

            # check if MD5 is matched in database/list
            for md5part, favicon_desc in self._read_favicon_db():

                if md5part == remote_fav_md5:
                    desc = 'Favicon.ico file was identified as "%s".' % favicon_desc
                    i = Info('Favicon identification', desc, response.id,
                             self.get_name())
                    i.set_url(favicon_url)
                    
                    kb.kb.append(self, 'info', i)
                    om.out.information(i.get_desc())
                    break
            else:
                #
                #   Report to the kb that we failed to ID this favicon.ico
                #   and that the md5 should be sent to the developers.
                #
                desc = 'Favicon identification failed. If the remote site is'  \
                       ' using framework that is being exposed by its favicon,'\
                       ' please send an email to [email protected]'\
                       ' including this md5 hash "%s" and the' \
                       ' name of the server or Web application it represents.' \
                       ' New fingerprints make this plugin more powerful and ' \
                       ' accurate.'
                desc = desc % remote_fav_md5
                i = Info('Favicon identification failed', desc, response.id,
                         self.get_name())
                i.set_url(favicon_url)

                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())
示例#9
0
    def discover(self, fuzzable_request):
        """
        :param fuzzable_request: A fuzzable_request instance that contains
                                    (among other things) the URL to test.
        """
        root_domain = fuzzable_request.get_url().get_root_domain()

        pks_se = pks(self._uri_opener)
        results = pks_se.search(root_domain)
        pks_url = 'http://pgp.mit.edu:11371/'

        for result in results:
            mail = result.username + '@' + root_domain

            desc = 'The mail account: "%s" was found at: "%s".'
            desc = desc % (mail, pks_url)

            i = Info('Email account', desc, result.id, self.get_name())
            i.set_url(URL(pks_url))
            i['mail'] = mail
            i['user'] = result.username
            i['name'] = result.name
            i['url_list'] = {URL(pks_url)}

            kb.kb.append('emails', 'emails', i)
            om.out.information(i.get_desc())
示例#10
0
    def _fingerprint_readme(self, domain_path, wp_unique_url, response):
        """
        GET the readme.html file and extract the version information from there.
        """
        wp_readme_url = domain_path.url_join('readme.html')
        response = self._uri_opener.GET(wp_readme_url, cache=True)

        # Find the string in the response html
        find = '<br /> Version (\d\.\d\.?\d?)'
        m = re.search(find, response.get_body())

        # If string found, group version
        if m:
            version = m.group(1)

            desc = 'WordPress version "%s" found in the readme.html file.'
            desc %= version

            i = Info('Fingerprinted WordPress version', desc, response.id,
                     self.get_name())
            i.set_url(wp_readme_url)

            kb.kb.append(self, 'info', i)
            om.out.information(i.get_desc())

            # Send link to core
            fr = FuzzableRequest(response.get_uri())
            self.output_queue.put(fr)
示例#11
0
    def _fingerprint_meta(self, domain_path, wp_unique_url, response):
        """
        Check if the wp version is in index header
        """
        # Main scan URL passed from w3af + wp index page
        wp_index_url = domain_path.url_join('index.php')
        response = self._uri_opener.GET(wp_index_url, cache=True)

        # Find the string in the response html
        find = '<meta name="generator" content="[Ww]ord[Pp]ress (\d\.\d\.?\d?)" />'
        m = re.search(find, response.get_body())

        # If string found, group version
        if m:
            version = m.group(1)

            # Save it to the kb!
            desc = 'WordPress version "%s" found in the index header.'
            desc = desc % version

            i = Info('Fingerprinted Wordpress version', desc, response.id,
                     self.get_name())
            i.set_url(wp_index_url)
            
            kb.kb.append(self, 'info', i)
            om.out.information(i.get_desc())
示例#12
0
    def _fingerprint_data(self, domain_path, wp_unique_url, response):
        """
        Find wordpress version from data
        """
        for wp_fingerprint in self._get_wp_fingerprints():
            
            # The URL in the XML is relative AND it has two different variables
            # that we need to replace:
            #        $wp-content$    -> wp-content/
            #        $wp-plugins$    -> wp-content/plugins/
            path = wp_fingerprint.filepath
            path = path.replace('$wp-content$', 'wp-content/')
            path = path.replace('$wp-plugins$', 'wp-content/plugins/')
            test_url = domain_path.url_join(path)
            
            response = self._uri_opener.GET(test_url, cache=True)

            response_hash = hashlib.md5(response.get_body()).hexdigest()

            if response_hash == wp_fingerprint.hash:
                version = wp_fingerprint.version

                # Save it to the kb!
                desc = 'WordPress version "%s" fingerprinted by matching known md5'\
                       ' hashes to HTTP responses of static resources available at'\
                       ' the remote WordPress install.'
                desc = desc % version
                i = Info('Fingerprinted Wordpress version', desc, response.id,
                         self.get_name())
                i.set_url(test_url)
        
                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())
                
                break
示例#13
0
    def test_to_json(self):
        i = Info("Blind SQL injection vulnerability", MockInfo.LONG_DESC, 1, "plugin_name")

        i["test"] = "foo"
        i.add_to_highlight("abc", "def")

        jd = i.to_json()
        json_string = json.dumps(jd)
        jd = json.loads(json_string)

        self.assertEqual(jd["name"], i.get_name())
        self.assertEqual(jd["url"], str(i.get_url()))
        self.assertEqual(jd["var"], i.get_token_name())
        self.assertEqual(jd["response_ids"], i.get_id())
        self.assertEqual(jd["vulndb_id"], i.get_vulndb_id())
        self.assertEqual(jd["desc"], i.get_desc(with_id=False))
        self.assertEqual(jd["long_description"], i.get_long_description())
        self.assertEqual(jd["fix_guidance"], i.get_fix_guidance())
        self.assertEqual(jd["fix_effort"], i.get_fix_effort())
        self.assertEqual(jd["tags"], i.get_tags())
        self.assertEqual(jd["wasc_ids"], i.get_wasc_ids())
        self.assertEqual(jd["wasc_urls"], list(i.get_wasc_urls()))
        self.assertEqual(jd["cwe_urls"], list(i.get_cwe_urls()))
        self.assertEqual(jd["references"], BLIND_SQLI_REFS)
        self.assertEqual(jd["owasp_top_10_references"], BLIND_SQLI_TOP10_REFS)
        self.assertEqual(jd["plugin_name"], i.get_plugin_name())
        self.assertEqual(jd["severity"], i.get_severity())
        self.assertEqual(jd["attributes"], i.copy())
        self.assertEqual(jd["highlight"], list(i.get_to_highlight()))
示例#14
0
    def _html_in_comment(self, comment, request, response):
        """
        Find HTML code in HTML comments
        """
        html_in_comment = self.HTML_RE.search(comment)
        
        if html_in_comment and \
        (comment, response.get_url()) not in self._already_reported_interesting:
            # There is HTML code in the comment.
            comment = comment.strip()
            comment = comment.replace('\n', '')
            comment = comment.replace('\r', '')
            comment = comment[:40]
            desc = 'A comment with the string "%s" was found in: "%s".'\
                   ' This could be interesting.'
            desc = desc % (comment, response.get_url())

            i = Info('HTML comment contains HTML code', desc,
                     response.id, self.get_name())
            i.set_dc(request.get_dc())
            i.set_uri(response.get_uri())
            i.add_to_highlight(html_in_comment.group(0))
            
            kb.kb.append(self, 'html_comment_hides_html', i)
            om.out.information(i.get_desc())
            self._already_reported_interesting.add(
                (comment, response.get_url()))
示例#15
0
    def _analyze_author(self, response, frontpage_author):
        """
        Analyze the author URL.

        :param response: The http response object for the _vti_inf file.
        :param frontpage_author: A regex match object.
        :return: None. All the info is saved to the kb.
        """
        domain_path = response.get_url().get_domain_path()
        author_location = domain_path.url_join(frontpage_author.group(1))

        # Check for anomalies in the location of author.exe
        if frontpage_author.group(1) != '_vti_bin/_vti_aut/author.exe':
            name = 'Customized frontpage configuration'

            desc = ('The FPAuthorScriptUrl is at: "%s" instead of the default'
                    ' location: "/_vti_bin/_vti_adm/author.exe". This is very'
                    ' uncommon.')
            desc %= author_location
        else:
            name = 'FrontPage FPAuthorScriptUrl'

            desc = 'The FPAuthorScriptUrl is at: "%s".'
            desc %= author_location

        i = Info(name, desc, response.id, self.get_name())
        i.set_url(author_location)
        i['FPAuthorScriptUrl'] = author_location

        kb.kb.append(self, 'frontpage_version', i)
        om.out.information(i.get_desc())
    def _force_disclosures(self, domain_path, potentially_vulnerable_paths):
        """
        :param domain_path: The path to wordpress' root directory
        :param potentially_vulnerable_paths: A list with the paths I'll URL-join
                                             with @domain_path, GET and parse.
        """
        for pvuln_path in potentially_vulnerable_paths:

            pvuln_url = domain_path.url_join(pvuln_path)
            response = self._uri_opener.GET(pvuln_url, cache=True)

            if is_404(response):
                continue

            response_body = response.get_body()
            if 'Fatal error: ' in response_body:
                desc = 'Analyze the HTTP response body to find the full path'\
                       ' where wordpress was installed.'
                i = Info('WordPress path disclosure', desc, response.id,
                         self.get_name())
                i.set_url(pvuln_url)
                
                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())
                break
示例#17
0
    def send_and_check(self, url):

        response = self.http_get_and_parse(url)

        for regex in self.ORACLE_RE:
            mo = regex.search(response.get_body(), re.DOTALL)

            if mo:
                desc = '"%s" version "%s" was detected at "%s".'
                desc %= (mo.group(1).title(), mo.group(2).title(),
                         response.get_url())

                i = Info('Oracle Application Server', desc, response.id,
                         self.get_name())
                i.set_url(response.get_url())

                kb.kb.append(self, 'oracle_discovery', i)
                om.out.information(i.get_desc())
                break

        else:
            msg = ('oracle_discovery found the URL: "%s" but failed to'
                   ' parse it as an Oracle page. The first 50 bytes of'
                   ' the response body is: "%s".')
            body_start = response.get_body()[:50]
            om.out.debug(msg % (response.get_url(), body_start))
    def _force_disclosures(self, domain_path, potentially_vulnerable_paths):
        """
        :param domain_path: The path to wordpress' root directory
        :param potentially_vulnerable_paths: A list with the paths I'll URL-join
                                             with @domain_path, GET and parse.
        """
        for pvuln_path in potentially_vulnerable_paths:

            pvuln_url = domain_path.url_join(pvuln_path)
            response = self._uri_opener.GET(pvuln_url, cache=True)

            if is_404(response):
                continue

            response_body = response.get_body()
            if 'Fatal error: ' in response_body:
                desc = 'Analyze the HTTP response body to find the full path'\
                       ' where wordpress was installed.'
                i = Info('WordPress path disclosure', desc, response.id,
                         self.get_name())
                i.set_url(pvuln_url)
                
                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())
                break
示例#19
0
    def _lowest_privilege_test(self, response):
        regex_str = 'User/Group </td><td class="v">(.*?)\((\d.*?)\)/(\d.*?)</td>'
        lowest_privilege_test = re.search(regex_str, response.get_body(), re.I)
        if lowest_privilege_test:
            lpt_uname = lowest_privilege_test.group(1)
            lpt_uid = lowest_privilege_test.group(2)
            lpt_uid = int(lpt_uid)
            lpt_gid = lowest_privilege_test.group(3)
            if lpt_uid < 99 or lpt_gid < 99 or \
            re.match('root|apache|daemon|bin|operator|adm', lpt_uname, re.I):

                desc = 'phpinfo()::PHP may be executing as a higher privileged'\
                       ' group. Username: %s, UserID: %s, GroupID: %s.' 
                desc = desc % (lpt_uname, lpt_uid, lpt_gid)
                
                v = Vuln('PHP lowest_privilege_test:fail', desc,
                         severity.MEDIUM, response.id, self.get_name())
                v.set_url(response.get_url())

                kb.kb.append(self, 'phpinfo', v)
                om.out.vulnerability(v.get_desc(), severity=v.get_severity())
            else:
                lpt_name = 'privilege:' + lpt_uname
                lpt_desc = 'phpinfo()::PHP is executing under '
                lpt_desc += 'username: '******', '
                lpt_desc += 'userID: ' + str(lpt_uid) + ', '
                lpt_desc += 'groupID: ' + lpt_gid
                i = Info(lpt_name, lpt_desc, response.id, self.get_name())
                i.set_url(response.get_url())

                kb.kb.append(self, 'phpinfo', i)
                om.out.information(i.get_desc())
示例#20
0
文件: afd.py 项目: RON313/w3af
    def _analyze_results(self, filtered, not_filtered):
        """
        Analyze the test results and save the conclusion to the kb.
        """
        if len(filtered) >= len(self._get_offending_strings()) / 5.0:
            desc = 'The remote network has an active filter. IMPORTANT: The'\
                   ' result of all the other plugins will be inaccurate, web'\
                   ' applications could be vulnerable but "protected" by the'\
                   ' active filter.'
                   
            i = Info('Active filter detected', desc, 1, self.get_name())
            i['filtered'] = filtered
            
            kb.kb.append(self, 'afd', i)
            om.out.information(i.get_desc())

            om.out.information('The following URLs were filtered:')
            for i in filtered:
                om.out.information('- ' + i)

            if not_filtered:
                om.out.information(
                    'The following URLs passed undetected by the filter:')
                for i in not_filtered:
                    om.out.information('- ' + i)
示例#21
0
    def discover(self, fuzzable_request):
        """
        :param fuzzable_request: A fuzzable_request instance that contains
                                    (among other things) the URL to test.
        """
        root_domain = fuzzable_request.get_url().get_root_domain()

        pks_se = pks(self._uri_opener)
        results = pks_se.search(root_domain)
        pks_url = 'http://pgp.mit.edu:11371/'

        for result in results:
            mail = result.username + '@' + root_domain
            
            desc = 'The mail account: "%s" was found at: "%s".'
            desc = desc % (mail, pks_url)

            i = Info('Email account', desc, result.id, self.get_name())
            i.set_url(URL(pks_url))
            i['mail'] = mail
            i['user'] = result.username
            i['name'] = result.name
            i['url_list'] = {URL(pks_url)}
            
            kb.kb.append('emails', 'emails', i)
            om.out.information(i.get_desc())
示例#22
0
    def _do_request(self, url, mutant):
        """
        Perform a simple GET to see if the result is an error or not, and then
        run the actual fuzzing.
        """
        response = self._uri_opener.GET(
            mutant, cache=True, headers=self._headers)

        if not (is_404(response) or
        response.get_code() in (403, 401) or
        self._return_without_eval(mutant)):

            # Create the fuzzable request and send it to the core
            fr = FuzzableRequest.from_http_response(response)
            self.output_queue.put(fr)
            
            #
            #   Save it to the kb (if new)!
            #
            if response.get_url() not in self._seen and response.get_url().get_file_name():
                desc = 'A potentially interesting file was found at: "%s".'
                desc = desc % response.get_url()

                i = Info('Potentially interesting file', desc, response.id,
                         self.get_name())
                i.set_url(response.get_url())
                
                kb.kb.append(self, 'files', i)
                om.out.information(i.get_desc())

                # Report only once
                self._seen.add(response.get_url())
示例#23
0
    def _fingerprint_data(self, domain_path, wp_unique_url, response):
        """
        Find wordpress version from data
        """
        for wp_fingerprint in self._get_wp_fingerprints():

            # The URL in the XML is relative AND it has two different variables
            # that we need to replace:
            #        $wp-content$    -> wp-content/
            #        $wp-plugins$    -> wp-content/plugins/
            path = wp_fingerprint.filepath
            path = path.replace('$wp-content$', 'wp-content/')
            path = path.replace('$wp-plugins$', 'wp-content/plugins/')
            test_url = domain_path.url_join(path)

            response = self._uri_opener.GET(test_url, cache=True)

            response_hash = hashlib.md5(response.get_body()).hexdigest()

            if response_hash == wp_fingerprint.hash:
                version = wp_fingerprint.version

                # Save it to the kb!
                desc = 'WordPress version "%s" fingerprinted by matching known md5'\
                       ' hashes to HTTP responses of static resources available at'\
                       ' the remote WordPress install.'
                desc = desc % version
                i = Info('Fingerprinted Wordpress version', desc, response.id,
                         self.get_name())
                i.set_url(test_url)

                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())

                break
示例#24
0
    def _analyze_results(self, filtered, not_filtered):
        """
        Analyze the test results and save the conclusion to the kb.
        """
        if len(filtered) >= len(self._get_offending_strings()) / 5.0:
            desc = ('The remote network has an active filter. IMPORTANT: The'
                    ' result of all the other plugins will be inaccurate, web'
                    ' applications could be vulnerable but "protected" by the'
                    ' active filter.')

            i = Info('Active filter detected', desc, 1, self.get_name())
            i['filtered'] = filtered

            kb.kb.append(self, 'afd', i)
            om.out.information(i.get_desc())

            om.out.information('The following URLs were filtered:')
            for i in filtered:
                om.out.information('- ' + i)

            if not_filtered:
                msg = 'The following URLs passed undetected by the filter:'
                om.out.information(msg)
                for i in not_filtered:
                    om.out.information('- ' + i)

        # Cleanup some memory
        self._not_filtered = []
        self._filtered = []
示例#25
0
    def _check_x_power(self, fuzzable_request, response):
        """
        Analyze X-Powered-By header.
        """
        for header_name in response.get_headers().keys():
            for needle in ['ASPNET', 'POWERED']:
                if needle in header_name.upper():
                    powered_by = response.get_headers()[header_name]

                    if powered_by in self._x_powered:
                        return

                    self._x_powered.add(powered_by)

                    desc = 'The %s header for the target HTTP server is "%s".'
                    desc %= (header_name, powered_by)

                    i = Info('Powered-by header', desc, response.id, self.get_name())
                    i['powered_by'] = powered_by
                    i.add_to_highlight(header_name + ':')

                    om.out.information(i.get_desc())

                    # Save the results in the KB so that other plugins can
                    # use this information. Before knowing that some servers
                    # may return more than one poweredby header I had:
                    #
                    #     kb.kb.raw_write( self , 'powered_by' , powered_by )
                    #
                    # But I have seen an IIS server with PHP that returns
                    # both the ASP.NET and the PHP headers
                    kb.kb.append(self, 'powered_by', i)

                    # Save the list to the KB
                    kb.kb.raw_write(self, 'powered_by_string', list(powered_by))
示例#26
0
    def _fingerprint_meta(self, domain_path, wp_unique_url, response):
        """
        Check if the wp version is in index header
        """
        # Main scan URL passed from w3af + wp index page
        wp_index_url = domain_path.url_join('index.php')
        response = self._uri_opener.GET(wp_index_url, cache=True)

        # Find the string in the response html
        find = '<meta name="generator" content="[Ww]ord[Pp]ress (\d\.\d\.?\d?)" />'
        m = re.search(find, response.get_body())

        # If string found, group version
        if m:
            version = m.group(1)

            # Save it to the kb!
            desc = 'WordPress version "%s" found in the index header.'
            desc = desc % version

            i = Info('Fingerprinted Wordpress version', desc, response.id,
                     self.get_name())
            i.set_url(wp_index_url)

            kb.kb.append(self, 'info', i)
            om.out.information(i.get_desc())
示例#27
0
    def test_to_json(self):
        i = Info('Blind SQL injection vulnerability', MockInfo.LONG_DESC, 1,
                 'plugin_name')

        i['test'] = 'foo'
        i.add_to_highlight('abc', 'def')

        jd = i.to_json()
        json_string = json.dumps(jd)
        jd = json.loads(json_string)

        self.assertEqual(jd['name'], i.get_name())
        self.assertEqual(jd['url'], str(i.get_url()))
        self.assertEqual(jd['var'], i.get_token_name())
        self.assertEqual(jd['response_ids'], i.get_id())
        self.assertEqual(jd['vulndb_id'], i.get_vulndb_id())
        self.assertEqual(jd['desc'], i.get_desc(with_id=False))
        self.assertEqual(jd['long_description'], i.get_long_description())
        self.assertEqual(jd['fix_guidance'], i.get_fix_guidance())
        self.assertEqual(jd['fix_effort'], i.get_fix_effort())
        self.assertEqual(jd['tags'], i.get_tags())
        self.assertEqual(jd['wasc_ids'], i.get_wasc_ids())
        self.assertEqual(jd['wasc_urls'], list(i.get_wasc_urls()))
        self.assertEqual(jd['cwe_urls'], list(i.get_cwe_urls()))
        self.assertEqual(jd['references'], BLIND_SQLI_REFS)
        self.assertEqual(jd['owasp_top_10_references'], BLIND_SQLI_TOP10_REFS)
        self.assertEqual(jd['plugin_name'], i.get_plugin_name())
        self.assertEqual(jd['severity'], i.get_severity())
        self.assertEqual(jd['attributes'], i.copy())
        self.assertEqual(jd['highlight'], list(i.get_to_highlight()))
示例#28
0
    def _analyze_author(self, response, frontpage_author):
        """
        Analyze the author URL.

        :param response: The http response object for the _vti_inf file.
        :param frontpage_author: A regex match object.
        :return: None. All the info is saved to the kb.
        """
        author_location = response.get_url().get_domain_path().url_join(
            frontpage_author.group(1))

        # Check for anomalies in the location of author.exe
        if frontpage_author.group(1) != '_vti_bin/_vti_aut/author.exe':
            name = 'Customized frontpage configuration'

            desc = 'The FPAuthorScriptUrl is at: "%s" instead of the default'\
                   ' location: "/_vti_bin/_vti_adm/author.exe". This is very'\
                   ' uncommon.'
            desc = desc % author_location
        else:
            name = 'FrontPage FPAuthorScriptUrl'

            desc = 'The FPAuthorScriptUrl is at: "%s".'
            desc = desc % author_location

        i = Info(name, desc, response.id, self.get_name())
        i.set_url(author_location)
        i['FPAuthorScriptUrl'] = author_location
        
        kb.kb.append(self, 'frontpage_version', i)
        om.out.information(i.get_desc())
示例#29
0
    def _do_request(self, url, mutant):
        """
        Perform a simple GET to see if the result is an error or not, and then
        run the actual fuzzing.
        """
        response = self._uri_opener.GET(mutant,
                                        cache=True,
                                        headers=self._headers)

        if not (is_404(response) or response.get_code() in (403, 401)
                or self._return_without_eval(mutant)):

            # Create the fuzzable request and send it to the core
            fr = FuzzableRequest.from_http_response(response)
            self.output_queue.put(fr)

            #
            #   Save it to the kb (if new)!
            #
            if response.get_url() not in self._seen and response.get_url(
            ).get_file_name():
                desc = 'A potentially interesting file was found at: "%s".'
                desc = desc % response.get_url()

                i = Info('Potentially interesting file', desc, response.id,
                         self.get_name())
                i.set_url(response.get_url())

                kb.kb.append(self, 'files', i)
                om.out.information(i.get_desc())

                # Report only once
                self._seen.add(response.get_url())
示例#30
0
    def test_to_json(self):
        i = Info('Blind SQL injection vulnerability', MockInfo.LONG_DESC, 1,
                 'plugin_name')

        i['test'] = 'foo'
        i.add_to_highlight('abc', 'def')

        jd = i.to_json()
        json_string = json.dumps(jd)
        jd = json.loads(json_string)

        self.assertEqual(jd['name'], i.get_name())
        self.assertEqual(jd['url'], str(i.get_url()))
        self.assertEqual(jd['var'], i.get_token_name())
        self.assertEqual(jd['response_ids'], i.get_id())
        self.assertEqual(jd['vulndb_id'], i.get_vulndb_id())
        self.assertEqual(jd['desc'], i.get_desc(with_id=False))
        self.assertEqual(jd['long_description'], i.get_long_description())
        self.assertEqual(jd['fix_guidance'], i.get_fix_guidance())
        self.assertEqual(jd['fix_effort'], i.get_fix_effort())
        self.assertEqual(jd['tags'], i.get_tags())
        self.assertEqual(jd['wasc_ids'], i.get_wasc_ids())
        self.assertEqual(jd['wasc_urls'], list(i.get_wasc_urls()))
        self.assertEqual(jd['cwe_urls'], list(i.get_cwe_urls()))
        self.assertEqual(jd['references'], BLIND_SQLI_REFS)
        self.assertEqual(jd['owasp_top_10_references'], BLIND_SQLI_TOP10_REFS)
        self.assertEqual(jd['plugin_name'], i.get_plugin_name())
        self.assertEqual(jd['severity'], i.get_severity())
        self.assertEqual(jd['attributes'], i.copy())
        self.assertEqual(jd['highlight'], list(i.get_to_highlight()))
示例#31
0
    def discover(self, fuzzable_request):
        """
        Identify server software using favicon.

        :param fuzzable_request: A fuzzable_request instance that contains
                                (among other things) the URL to test.
        """
        domain_path = fuzzable_request.get_url().get_domain_path()

        # TODO: Maybe I should also parse the html to extract the favicon location?
        favicon_url = domain_path.url_join('favicon.ico')
        response = self._uri_opener.GET(favicon_url, cache=True)
        remote_fav_md5 = hashlib.md5(response.get_body()).hexdigest()

        if not is_404(response):

            # check if MD5 is matched in database/list
            for md5part, favicon_desc in self._read_favicon_db():

                if md5part == remote_fav_md5:
                    desc = 'Favicon.ico file was identified as "%s".' % favicon_desc
                    i = Info('Favicon identification', desc, response.id,
                             self.get_name())
                    i.set_url(favicon_url)
                    
                    kb.kb.append(self, 'info', i)
                    om.out.information(i.get_desc())
                    break
            else:
                #
                #   Report to the kb that we failed to ID this favicon.ico
                #   and that the md5 should be sent to the developers.
                #
                desc = 'Favicon identification failed. If the remote site is'  \
                       ' using framework that is being exposed by its favicon,'\
                       ' please send an email to [email protected]'\
                       ' including this md5 hash "%s" and the' \
                       ' name of the server or Web application it represents.' \
                       ' New fingerprints make this plugin more powerful and ' \
                       ' accurate.'
                desc = desc % remote_fav_md5
                i = Info('Favicon identification failed', desc, response.id,
                         self.get_name())
                i.set_url(favicon_url)

                kb.kb.append(self, 'info', i)
                om.out.information(i.get_desc())
示例#32
0
    def _extract_version_from_egg(self, query_results):
        """
        Analyzes the eggs and tries to deduce a PHP version number
        ( which is then saved to the kb ).
        """
        if not query_results:
            return None
        else:
            cmp_list = []
            for query_result in query_results:
                body = query_result.http_response.get_body()
                if isinstance(body, unicode): body = body.encode('utf-8')
                hash_str = hashlib.md5(body).hexdigest()
                
                cmp_list.append((hash_str, query_result.egg_desc))
                
            cmp_set = set(cmp_list)

            found = False
            matching_versions = []
            for version in self.EGG_DB:
                version_hashes = set(self.EGG_DB[version])

                if len(cmp_set) == len(cmp_set.intersection(version_hashes)):
                    matching_versions.append(version)
                    found = True

            if matching_versions:
                desc = 'The PHP framework version running on the remote'\
                       ' server was identified as:\n- %s'
                versions = '\n- '.join(matching_versions)
                desc = desc % versions
                
                response_ids = [r.http_response.get_id() for r in query_results]
                
                i = Info('Fingerprinted PHP version', desc, response_ids,
                         self.get_name())
                i['version'] = matching_versions
                
                kb.kb.append(self, 'version', i)
                om.out.information(i.get_desc())

            if not found:
                version = 'unknown'
                powered_by_headers = kb.kb.raw_read('server_header',
                                                    'powered_by_string')
                try:
                    for v in powered_by_headers:
                        if 'php' in v.lower():
                            version = v.split('/')[1]
                except:
                    pass
                
                msg = 'The PHP version could not be identified using PHP eggs,'\
                      ', please send this signature and the PHP version to the'\
                      ' w3af project develop mailing list. Signature:'\
                      ' EGG_DB[\'%s\'] = %s\n'
                msg = msg % (version, str(list(cmp_set)))
                om.out.information(msg)
示例#33
0
    def crawl(self, fuzzable_request):
        """
        Get the execute.xml file and parse it.

        :param fuzzable_request: A fuzzable_request instance that contains
                                (among other things) the URL to test.
        """
        base_url = fuzzable_request.get_url().base_url()

        for file_name in ('execute.xml', 'DeveloperMenu.xml'):
            genexus_url = base_url.url_join(file_name)

            http_response = self._uri_opener.GET(genexus_url, cache=True)

            if '</ObjLink>' in http_response and not is_404(http_response):
                # Save it to the kb!
                desc = 'The "%s" file was found at: "%s", this file might'\
                       ' expose private URLs and requires a manual review. The'\
                       ' scanner will add all URLs listed in this file to the'\
                       ' crawl queue.'
                desc = desc % (file_name, genexus_url)
                title_info = 'GeneXus "%s" file' % file_name

                i = Info(title_info, desc, http_response.id, self.get_name())
                i.set_url(genexus_url)

                kb.kb.append(self, file_name, i)
                om.out.information(i.get_desc())

                # Extract the links
                om.out.debug('Analyzing "%s" file.' % file_name)
                for fr in self._create_fuzzable_requests(http_response):
                    self.output_queue.put(fr)

                om.out.debug('Parsing xml file with xml.dot.minidom.')
                try:
                    dom = xml.dom.minidom.parseString(http_response.get_body())
                except:
                    raise BaseFrameworkException('Error while parsing "%s"' %
                                                 file_name)
                else:
                    raw_url_list = dom.getElementsByTagName("ObjLink")
                    parsed_url_list = []

                    for url in raw_url_list:
                        try:
                            url = url.childNodes[0].data
                            url = base_url.url_join(url)
                        except ValueError, ve:
                            msg = '"%s" file had an invalid URL "%s"'
                            om.out.debug(msg % (file_name, ve))
                        except:
                            msg = '"%s" file had an invalid format'
                            om.out.debug(msg % file_name)
                        else:
                            parsed_url_list.append(url)
示例#34
0
 def _file_uploads(self, response):
     regex_str = 'file_uploads</td><td class="v">(On|<i>no value</i>)</td>'
     file_uploads = re.search(regex_str, response.get_body(), re.IGNORECASE)
     if file_uploads:
         desc = 'The phpinfo()::file_uploads is enabled.'
         i = Info('PHP file_uploads: On', desc, response.id, self.get_name())
         i.set_url(response.get_url())
         
         kb.kb.append(self, 'phpinfo', i)
         om.out.information(i.get_desc())
示例#35
0
    def _check_server_header(self, fuzzable_request):
        """
        HTTP GET and analyze response for server header
        """
        response = self._uri_opener.GET(fuzzable_request.get_url(), cache=True)

        for hname, hvalue in response.get_lower_case_headers().iteritems():
            if hname == 'server':
                server = hvalue
                
                desc = 'The server header for the remote web server is: "%s".'
                desc = desc % server
                
                i = Info('Server header', desc, response.id, self.get_name())
                i['server'] = server
                i.add_to_highlight(hname + ':')
                
                om.out.information(i.get_desc())

                # Save the results in the KB so the user can look at it
                kb.kb.append(self, 'server', i)

                # Also save this for easy internal use
                # other plugins can use this information
                kb.kb.raw_write(self, 'server_string', server)
                break

        else:
            # strange !
            desc = 'The remote HTTP Server omitted the "server" header in'\
                  ' its response.'
            i = Info('Omitted server header', desc, response.id,
                     self.get_name())

            om.out.information(i.get_desc())

            # Save the results in the KB so that other plugins can use this
            # information
            kb.kb.append(self, 'ommited_server_header', i)

            # Also save this for easy internal use
            # other plugins can use this information
            kb.kb.raw_write(self, 'server_string', '')
示例#36
0
    def _check_server_header(self, fuzzable_request, response):
        """
        HTTP GET and analyze response for server header
        """
        server, header_name = response.get_headers().iget('server')

        if server in self._server_headers:
            return

        self._server_headers.add(server)

        if server:
            desc = 'The server header for the remote web server is: "%s".'
            desc %= server

            i = Info('Server header', desc, response.id, self.get_name())
            i['server'] = server
            i.add_to_highlight(header_name + ':')

            om.out.information(i.get_desc())

            # Save the results in the KB so the user can look at it
            kb.kb.append(self, 'server', i)

            # Also save this for easy internal use
            # other plugins can use this information
            kb.kb.raw_write(self, 'server_string', server)
        else:
            # strange !
            desc = ('The remote HTTP Server omitted the "server" header in'
                    ' its response.')
            i = Info('Omitted server header', desc, response.id,
                     self.get_name())

            om.out.information(i.get_desc())

            # Save the results in the KB so that other plugins can use this
            # information
            kb.kb.append(self, 'omitted_server_header', i)

            # Also save this for easy internal use
            # other plugins can use this information
            kb.kb.raw_write(self, 'server_string', '')
示例#37
0
    def _check_server_header(self, fuzzable_request):
        """
        HTTP GET and analyze response for server header
        """
        response = self._uri_opener.GET(fuzzable_request.get_url(), cache=True)

        for hname, hvalue in response.get_lower_case_headers().iteritems():
            if hname == 'server':
                server = hvalue

                desc = 'The server header for the remote web server is: "%s".'
                desc = desc % server

                i = Info('Server header', desc, response.id, self.get_name())
                i['server'] = server
                i.add_to_highlight(hname + ':')

                om.out.information(i.get_desc())

                # Save the results in the KB so the user can look at it
                kb.kb.append(self, 'server', i)

                # Also save this for easy internal use
                # other plugins can use this information
                kb.kb.raw_write(self, 'server_string', server)
                break

        else:
            # strange !
            desc = 'The remote HTTP Server omitted the "server" header in' \
                   ' its response.'
            i = Info('Omitted server header', desc, response.id,
                     self.get_name())

            om.out.information(i.get_desc())

            # Save the results in the KB so that other plugins can use this
            # information
            kb.kb.append(self, 'ommited_server_header', i)

            # Also save this for easy internal use
            # other plugins can use this information
            kb.kb.raw_write(self, 'server_string', '')
示例#38
0
    def crawl(self, fuzzable_request):
        """
        Get the execute.xml file and parse it.

        :param fuzzable_request: A fuzzable_request instance that contains
                                (among other things) the URL to test.
        """
        base_url = fuzzable_request.get_url().base_url()
        
        for file_name in ('execute.xml', 'DeveloperMenu.xml'):
            genexus_url = base_url.url_join(file_name)
            
            http_response = self._uri_opener.GET(genexus_url, cache=True)
            
            if '</ObjLink>' in http_response and not is_404(http_response):
                # Save it to the kb!
                desc = 'The "%s" file was found at: "%s", this file might'\
                       ' expose private URLs and requires a manual review. The'\
                       ' scanner will add all URLs listed in this file to the'\
                       ' crawl queue.'
                desc =  desc % (file_name, genexus_url)
                title_info = 'GeneXus "%s" file' % file_name
            
                i = Info(title_info, desc, http_response.id, self.get_name())
                i.set_url(genexus_url)

                kb.kb.append(self, file_name, i)
                om.out.information(i.get_desc())

                # Extract the links
                om.out.debug('Analyzing "%s" file.'  % file_name)
                for fr in self._create_fuzzable_requests(http_response):
                    self.output_queue.put(fr)

                om.out.debug('Parsing xml file with xml.dot.minidom.')
                try:
                    dom = xml.dom.minidom.parseString(http_response.get_body())
                except:
                    raise BaseFrameworkException('Error while parsing "%s"' % file_name)
                else:
                    raw_url_list = dom.getElementsByTagName("ObjLink")
                    parsed_url_list = []
                    
                    for url in raw_url_list:
                        try:
                            url = url.childNodes[0].data
                            url = base_url.url_join(url)
                        except ValueError, ve:
                            msg = '"%s" file had an invalid URL "%s"'
                            om.out.debug(msg % (file_name,ve))
                        except:
                            msg = '"%s" file had an invalid format'
                            om.out.debug(msg % file_name)
                        else:
                            parsed_url_list.append(url)
示例#39
0
    def _test_dns(self, original_response, dns_wildcard_url):
        """
        Check if http://www.domain.tld/ == http://domain.tld/
        """
        headers = Headers([('Host', dns_wildcard_url.get_domain())])

        try:
            modified_response = self._uri_opener.GET(original_response.get_url(),
                                                     cache=True,
                                                     headers=headers)
        except BaseFrameworkException as bfe:
            msg = ('An error occurred while fetching IP address URL in '
                   ' dns_wildcard plugin: "%s"')
            om.out.debug(msg % bfe)
            return

        if fuzzy_not_equal(modified_response.get_body(), original_response.get_body(), 0.35):
            desc = ('The target site has NO DNS wildcard, and the contents'
                    ' of "%s" differ from the contents of "%s".')
            desc %= (dns_wildcard_url, original_response.get_url())

            i = Info('No DNS wildcard',
                     desc,
                     [original_response.id, modified_response.id],
                     self.get_name())
            i.set_url(dns_wildcard_url)

            kb.kb.append(self, 'dns_wildcard', i)
            om.out.information(i.get_desc())
        else:
            desc = ('The target site has a DNS wildcard configuration, the'
                    ' contents of "%s" are equal to the ones of "%s".')
            desc %= (dns_wildcard_url, original_response.get_url())

            i = Info('DNS wildcard',
                     desc,
                     [original_response.id, modified_response.id],
                     self.get_name())
            i.set_url(original_response.get_url())

            kb.kb.append(self, 'dns_wildcard', i)
            om.out.information(i.get_desc())
示例#40
0
    def _report_to_kb_if_needed(self, http_response, parser):
        """
        If the parser did find something, then we report it to the KB.

        :param http_response: The HTTP response that was parsed
        :param parser: The OpenAPI parser instance
        :return: None
        """
        if not parser.get_api_calls():
            return

        # Save it to the kb!
        desc = ('An Open API specification was found at: "%s", the scanner'
                ' was able to extract %s API endpoints which will be audited'
                ' for vulnerabilities.')
        desc %= (http_response.get_url(), len(parser.get_api_calls()))

        i = Info('Open API specification found', desc, http_response.id,
                 self.get_name())
        i.set_url(http_response.get_url())

        kb.kb.append(self, 'open_api', i)
        om.out.information(i.get_desc())

        # Warn the user about missing credentials
        if self._query_string_auth or self._header_auth:
            return

        desc = (
            'An Open API specification was found at: "%s", but no credentials'
            ' were provided in the `open_api` plugin. The scanner will try'
            ' to audit the identified endpoints but coverage will most likely'
            ' be reduced due to missing authentication.')
        desc %= http_response.get_url()

        i = Info('Open API missing credentials', desc, http_response.id,
                 self.get_name())
        i.set_url(http_response.get_url())

        kb.kb.append(self, 'open_api', i)
        om.out.information(i.get_desc())
示例#41
0
    def _get_dead_domains(self, fuzzable_request):
        """
        Find every link on a HTML document verify if the domain can be resolved

        :return: Yield domains that can not be resolved or resolve to a private
                 IP address
        """
        original_response = self._uri_opener.GET(fuzzable_request.get_uri(),
                                                 cache=True)

        try:
            dp = parser_cache.dpc.get_document_parser_for(original_response)
        except BaseFrameworkException:
            # Failed to find a suitable parser for the document
            return

        # Note:
        #
        # - With parsed_references I'm 100% that it's really something in the
        #   HTML that the developer intended to add.
        #
        # - The re_references are the result of regular expressions, which in
        #   some cases are just false positives.
        #
        # In this case, and because I'm only going to use the domain name of the
        # URL I'm going to trust the re_references also.
        parsed_references, re_references = dp.get_references()
        parsed_references.extend(re_references)

        for link in parsed_references:
            domain = link.get_domain()

            if domain in self._already_queried_dns:
                continue

            self._already_queried_dns.add(domain)

            if not is_private_site(domain):
                continue

            desc = (
                u'The content of "%s" references a non existent domain: "%s".'
                u' This can be a broken link, or an internal domain name.')
            desc %= (fuzzable_request.get_url(), domain)

            i = Info(u'Internal hostname in HTML link', desc,
                     original_response.id, self.get_name())
            i.set_url(fuzzable_request.get_url())

            kb.kb.append(self, 'find_vhosts', i)
            om.out.information(i.get_desc())

            yield domain
示例#42
0
    def _fingerprint_installer(self, domain_path, wp_unique_url, response):
        """
        GET latest.zip and latest.tar.gz and compare with the hashes from the
        release.db that was previously generated from wordpress.org [0]
        and contains all release hashes.

        This gives the initial wordpress version, not the current one.

        [0] http://wordpress.org/download/release-archive/
        """
        zip_url = domain_path.url_join('latest.zip')
        tar_gz_url = domain_path.url_join('latest.tar.gz')
        install_urls = [zip_url, tar_gz_url]

        for install_url in install_urls:
            response = self._uri_opener.GET(install_url,
                                            cache=True,
                                            respect_size_limit=False)

            # md5sum the response body
            m = hashlib.md5()
            m.update(response.get_body())
            remote_release_hash = m.hexdigest()

            release_db = self._release_db

            for line in file(release_db):
                try:
                    line = line.strip()
                    release_db_hash, release_db_name = line.split(',')
                except:
                    continue

                if release_db_hash == remote_release_hash:

                    desc = (
                        'The sysadmin used WordPress version "%s" during the'
                        ' installation, which was found by matching the contents'
                        ' of "%s" with the hashes of known releases. If the'
                        ' sysadmin did not update wordpress, the current version'
                        ' will still be the same.')
                    desc %= (release_db_name, install_url)

                    i = Info('Fingerprinted Wordpress version', desc,
                             response.id, self.get_name())
                    i.set_url(install_url)

                    kb.kb.append(self, 'info', i)
                    om.out.information(i.get_desc())

                    # Send link to core
                    fr = FuzzableRequest(response.get_uri())
                    self.output_queue.put(fr)
示例#43
0
    def _report_no_realm(self, response):
        # Report this strange case
        desc = 'The resource: "%s" requires authentication (HTTP Code'\
               ' 401) but the www-authenticate header is not present.'\
               ' This requires human verification.'
        desc = desc % response.get_url() 
        i = Info('Authentication without www-authenticate header', desc,
                 response.id, self.get_name())
        i.set_url(response.get_url())

        kb.kb.append(self, 'non_rfc_auth', i)
        om.out.information(i.get_desc())
示例#44
0
    def _report_no_realm(self, response):
        # Report this strange case
        desc = 'The resource: "%s" requires authentication (HTTP Code' \
               ' 401) but the www-authenticate header is not present.' \
               ' This requires human verification.'
        desc = desc % response.get_url()
        i = Info('Authentication without www-authenticate header', desc,
                 response.id, self.get_name())
        i.set_url(response.get_url())

        kb.kb.append(self, 'non_rfc_auth', i)
        om.out.information(i.get_desc())
示例#45
0
    def _report_finding(self, response):
        """
        Save the finding to the kb.

        :param response: The response that triggered the detection
        """
        desc = 'The remote web server seems to have a reverse proxy installed.'

        i = Info('Reverse proxy identified', desc, response.id, self.get_name())
        i.set_url(response.get_url())

        kb.kb.append(self, 'detect_reverse_proxy', i)
        om.out.information(i.get_desc())
示例#46
0
    def _analyze_gears_manifest(self, url, response, file_name):
        if '"entries":' in response:
            # Save it to the kb!
            desc = 'A gears manifest file was found at: "%s".'\
                   ' Each file should be manually reviewed for sensitive'\
                   ' information that may get cached on the client.'
            desc = desc % url

            i = Info('Gears manifest resource', desc, response.id,
                     self.get_name())
            i.set_url(url)

            kb.kb.append(self, url, i)
            om.out.information(i.get_desc())
示例#47
0
    def crawl(self, fuzzable_request):
        """
        Get the robots.txt file and parse it.

        :param fuzzable_request: A fuzzable_request instance that contains
                                (among other things) the URL to test.
        """
        dirs = []

        base_url = fuzzable_request.get_url().base_url()
        robots_url = base_url.url_join('robots.txt')
        http_response = self._uri_opener.GET(robots_url, cache=True)

        if is_404(http_response):
            return

        # Save it to the kb!
        desc = ('A robots.txt file was found at: "%s", this file might'
                ' expose private URLs and requires a manual review. The'
                ' scanner will add all URLs listed in this files to the'
                ' analysis queue.')
        desc %= robots_url

        i = Info('robots.txt file', desc, http_response.id, self.get_name())
        i.set_url(robots_url)

        kb.kb.append(self, 'robots.txt', i)
        om.out.information(i.get_desc())

        # Work with it...
        dirs.append(robots_url)
        for line in http_response.get_body().split('\n'):

            line = line.strip()

            if len(line) > 0 and line[0] != '#' and \
                (line.upper().find('ALLOW') == 0 or
                 line.upper().find('DISALLOW') == 0):

                url = line[line.find(':') + 1:]
                url = url.strip()
                try:
                    url = base_url.url_join(url)
                except:
                    # Simply ignore the invalid URL
                    pass
                else:
                    dirs.append(url)

        self.worker_pool.map(self.http_get_and_parse, dirs)
示例#48
0
 def _analyze_gears_manifest(self, url, response, file_name):
     if '"entries":' in response:
         # Save it to the kb!
         desc = 'A gears manifest file was found at: "%s".'\
                ' Each file should be manually reviewed for sensitive'\
                ' information that may get cached on the client.'
         desc = desc % url
         
         i = Info('Gears manifest resource', desc, response.id,
                  self.get_name())
         i.set_url(url)
         
         kb.kb.append(self, url, i)
         om.out.information(i.get_desc())
示例#49
0
    def crawl(self, fuzzable_request):
        """
        Get the robots.txt file and parse it.

        :param fuzzable_request: A fuzzable_request instance that contains
                                (among other things) the URL to test.
        """
        dirs = []

        base_url = fuzzable_request.get_url().base_url()
        robots_url = base_url.url_join('robots.txt')
        http_response = self._uri_opener.GET(robots_url, cache=True)

        if is_404(http_response):
            return

        # Save it to the kb!
        desc = ('A robots.txt file was found at: "%s", this file might'
                ' expose private URLs and requires a manual review. The'
                ' scanner will add all URLs listed in this files to the'
                ' analysis queue.')
        desc %= robots_url

        i = Info('robots.txt file', desc, http_response.id, self.get_name())
        i.set_url(robots_url)

        kb.kb.append(self, 'robots.txt', i)
        om.out.information(i.get_desc())

        # Work with it...
        dirs.append(robots_url)
        for line in http_response.get_body().split('\n'):

            line = line.strip()

            if len(line) > 0 and line[0] != '#' and \
                (line.upper().find('ALLOW') == 0 or
                 line.upper().find('DISALLOW') == 0):

                url = line[line.find(':') + 1:]
                url = url.strip()
                try:
                    url = base_url.url_join(url)
                except:
                    # Simply ignore the invalid URL
                    pass
                else:
                    dirs.append(url)

        self.worker_pool.map(self.http_get_and_parse, dirs)
示例#50
0
 def _kb_info_user(self, url, response_id, username):
     """
     Put user in Kb
     :return: None, everything is saved in kb
     """
     desc = 'WordPress user "%s" found during username enumeration.'
     desc = desc % username
     
     i = Info('Identified WordPress user', desc, response_id,
              self.get_name())
     i.set_url(url)
     
     kb.kb.append(self, 'users', i)
     om.out.information(i.get_desc())
示例#51
0
 def discover(self, fuzzable_request):
     """
     :param fuzzable_request: A fuzzable_request instance that contains
                                 (among other things) the URL to test.
     """
     if self._is_proxyed_conn(fuzzable_request):
         desc = 'Your ISP seems to have a transparent proxy installed,'\
                ' this can influence scan results in unexpected ways.'
        
         i = Info('Transparent proxy detected', desc, 1, self.get_name())
         i.set_url(fuzzable_request.get_url())
         
         kb.kb.append(self, 'detect_transparent_proxy', i)
         om.out.information(i.get_desc())
     else:
         om.out.information('Your ISP has no transparent proxy.')
示例#52
0
    def _fingerprint_installer(self, domain_path, wp_unique_url, response):
        """
        GET latest.zip and latest.tar.gz and compare with the hashes from the
        release.db that was previously generated from wordpress.org [0]
        and contains all release hashes.

        This gives the initial wordpress version, not the current one.

        [0] http://wordpress.org/download/release-archive/
        """
        zip_url = domain_path.url_join('latest.zip')
        tar_gz_url = domain_path.url_join('latest.tar.gz')
        install_urls = [zip_url, tar_gz_url]

        for install_url in install_urls:
            response = self._uri_opener.GET(install_url, cache=True,
                                            respect_size_limit=False)

            # md5sum the response body
            m = hashlib.md5()
            m.update(response.get_body())
            remote_release_hash = m.hexdigest()

            release_db = self._release_db

            for line in file(release_db):
                try:
                    line = line.strip()
                    release_db_hash, release_db_name = line.split(',')
                except:
                    continue

                if release_db_hash == remote_release_hash:

                    desc = 'The sysadmin used WordPress version "%s" during the'\
                           ' installation, which was found by matching the contents'\
                           ' of "%s" with the hashes of known releases. If the'\
                           ' sysadmin did not update wordpress, the current version'\
                           ' will still be the same.'
                    desc = desc % (release_db_name, install_url)

                    i = Info('Fingerprinted Wordpress version', desc, response.id,
                             self.get_name())
                    i.set_url(install_url)
                    
                    kb.kb.append(self, 'info', i)
                    om.out.information(i.get_desc())
示例#53
0
文件: zone_h.py 项目: 0x554simon/w3af
    def _parse_zone_h_result(self, response):
        """
        Parse the result from the zone_h site and create the corresponding info
        objects.

        :return: None
        """
        #
        #   I'm going to do only one big "if":
        #
        #       - The target site was hacked more than one time
        #       - The target site was hacked only one time
        #

        # This is the string I have to parse:
        # in the zone_h response, they are two like this, the first has to be ignored!
        regex = 'Total notifications: <b>(\d*)</b> of which <b>(\d*)</b> single ip and <b>(\d*)</b> mass'
        regex_result = re.findall(regex, response.get_body())

        try:
            total_attacks = int(regex_result[0][0])
        except IndexError:
            om.out.debug('An error was generated during the parsing of the zone_h website.')
        else:

            # Do the if...
            if total_attacks > 1:
                desc = 'The target site was defaced more than one time in the'\
                       ' past. For more information please visit the following'\
                       ' URL: "%s".' % response.get_url()
                       
                v = Vuln('Previous defacements', desc,
                         severity.MEDIUM, response.id, self.get_name())
                v.set_url(response.get_url())
                
                kb.kb.append(self, 'defacements', v)
                om.out.information(v.get_desc())
            elif total_attacks == 1:
                desc = 'The target site was defaced in the past. For more'\
                       ' information please visit the following URL: "%s".'
                desc = desc % response.get_url()
                i = Info('Previous defacements', desc, response.id,
                         self.get_name())
                i.set_url(response.get_url())

                kb.kb.append(self, 'defacements', i)
                om.out.information(i.get_desc())
示例#54
0
    def _check_x_power(self, fuzzable_request):
        """
        Analyze X-Powered-By header.
        """
        response = self._uri_opener.GET(fuzzable_request.get_url(), cache=True)

        for header_name in response.get_headers().keys():
            for i in ['ASPNET', 'POWERED']:
                if i in header_name.upper() or header_name.upper() in i:
                    powered_by = response.get_headers()[header_name]

                    # Only get the first one
                    self._x_powered = False

                    #
                    #    Check if I already have this info in the KB
                    #
                    pow_by_kb = kb.kb.get('server_header', 'powered_by')
                    powered_by_in_kb = [j['powered_by'] for j in pow_by_kb]
                    if powered_by not in powered_by_in_kb:

                        #
                        #    I don't have it in the KB, so I need to add it,
                        #
                        desc = 'The %s header for the target HTTP server is "%s".'
                        desc = desc % (header_name, powered_by)
                        
                        i = Info('Powered-by header', desc, response.id,
                                 self.get_name())
                        i['powered_by'] = powered_by
                        i.add_to_highlight(header_name + ':')
                        
                        om.out.information(i.get_desc())

                        # Save the results in the KB so that other plugins can
                        # use this information. Before knowing that some servers
                        # may return more than one poweredby header I had:
                        #     kb.kb.raw_write( self , 'powered_by' , powered_by )
                        # But I have seen an IIS server with PHP that returns
                        # both the ASP.NET and the PHP headers
                        kb.kb.append(self, 'powered_by', i)
                        
                        # Update the list and save it,
                        powered_by_in_kb.append(powered_by)
                        kb.kb.raw_write(self, 'powered_by_string',
                                        powered_by_in_kb)
示例#55
0
    def _analyze_response(self, response):
        """
        It seems that we have found a _vti_inf file, parse it and analyze the
        content!

        :param response: The http response object for the _vti_inf file.
        :return: None. All the info is saved to the kb.
        """
        version_mo = self.VERSION_RE.search(response.get_body())
        admin_mo = self.ADMIN_URL_RE.search(response.get_body())
        author_mo = self.AUTHOR_URL_RE.search(response.get_body())

        if version_mo and admin_mo and author_mo:
            #Set the self._exec to false
            self._exec = False

            desc = 'The FrontPage Configuration Information file was found'\
                   ' at: "%s" and the version of FrontPage Server Extensions'\
                   ' is: "%s".'
            desc = desc % (response.get_url(), version_mo.group(1))

            i = Info('FrontPage configuration information', desc, response.id,
                     self.get_name())
            i.set_url(response.get_url())
            i['version'] = version_mo.group(1)
            
            kb.kb.append(self, 'frontpage_version', i)
            om.out.information(i.get_desc())

            #
            # Handle the admin.exe file
            #
            self._analyze_admin(response, admin_mo)

            #
            # Handle the author.exe file
            #
            self._analyze_author(response, author_mo)

        else:
            # This is strange... we found a _vti_inf file, but there is no
            # frontpage information in it... IPS? WAF? honeypot?
            msg = '[IMPROVEMENT] Invalid frontPage configuration information'\
                  ' found at %s (id: %s).'
            msg = msg % (response.get_url(), response.id)
            om.out.debug(msg)
示例#56
0
    def _extract_server_version(self, fuzzable_request, response):
        """
        Get the server version from the HTML:
            <dl><dt>Server Version: Apache/2.2.9 (Unix)</dt>
        """
        for version in re.findall('<dl><dt>Server Version: (.*?)</dt>',
                                  response.get_body()):
            # Save the results in the KB so the user can look at it
            desc = 'The web server has the apache server status module'\
                   ' enabled which discloses the following remote server'\
                   ' version: "%s".'
            desc %= version
            
            i = Info('Apache Server version', desc, response.id, self.get_name())
            i.set_url(response.get_url())

            om.out.information(i.get_desc())
            kb.kb.append(self, 'server', i)