示例#1
0
    def test_remove_table(self):
        disk_list = DiskList()
        table_name = disk_list.table_name
        db = get_default_temp_db_instance()

        self.assertTrue(db.table_exists(table_name))

        disk_list.cleanup()

        self.assertFalse(db.table_exists(table_name))
示例#2
0
 def test_remove_table(self):
     disk_list = DiskList()
     table_name = disk_list.table_name
     db = get_default_temp_db_instance()
     
     self.assertTrue(db.table_exists(table_name))
     
     disk_list.cleanup()
     
     self.assertFalse(db.table_exists(table_name))
示例#3
0
文件: generic.py 项目: weisst/w3af
class generic(AuditPlugin):
    '''
    Find all kind of bugs without using a fixed database of errors.
    :author: Andres Riancho ([email protected])
    '''

    ERROR_STRINGS = ['d\'kc"z\'gj\'\"**5*(((;-*`)', '']

    def __init__(self):
        AuditPlugin.__init__(self)

        #   Internal variables
        self._potential_vulns = DiskList()

        #   User configured variables
        self._diff_ratio = 0.30

    def audit(self, freq, orig_response):
        '''
        Find all kind of bugs without using a fixed database of errors.

        :param freq: A FuzzableRequest
        '''
        # First, get the original response and create the mutants
        mutants = create_mutants(freq, [
            '',
        ], orig_resp=orig_response)

        for m in mutants:

            # First I check that the current modified parameter in the mutant
            # doesn't have an already reported vulnerability. I don't want to
            # report vulnerabilities more than once.
            if (m.get_url(), m.get_var()) in self._potential_vulns:
                continue

            # Now, we request the limit (something that doesn't exist)
            # If http://localhost/a.php?b=1 ; then I should request b=12938795
            #                                                       (random number)
            # If http://localhost/a.php?b=abc ; then I should request b=hnv98yks
            #                                                         (random alnum)
            limit_response = self._get_limit_response(m)

            # Now I request something that could generate an error
            #     If http://localhost/a.php?b=1 ; then I should request b=d'kcz'gj'"**5*(((*)
            #     If http://localhost/a.php?b=abc ; then I should request b=d'kcz'gj'"**5*(((*)
            #
            # I also try to trigger errors by sending empty strings
            #     If http://localhost/a.php?b=1 ; then I should request b=
            #     If http://localhost/a.php?b=abc ; then I should request b=
            for error_string in self.ERROR_STRINGS:

                m.set_mod_value(error_string)
                error_response = self._uri_opener.send_mutant(m)

                # Now I compare responses
                self._analyze_responses(orig_response, limit_response,
                                        error_response, m)

    def _analyze_responses(self, orig_resp, limit_response, error_response,
                           mutant):
        '''
        Analyze responses; if error_response doesn't look like orig_resp nor
        limit_response, then we have a vuln.

        :return: None
        '''
        original_to_error = relative_distance(orig_resp.get_body(),
                                              error_response.get_body())
        limit_to_error = relative_distance(limit_response.get_body(),
                                           error_response.get_body())
        original_to_limit = relative_distance(limit_response.get_body(),
                                              orig_resp.get_body())

        ratio = self._diff_ratio + (1 - original_to_limit)

        #om.out.debug('original_to_error: ' +  str(original_to_error) )
        #om.out.debug('limit_to_error: ' +  str(limit_to_error) )
        #om.out.debug('original_to_limit: ' +  str(original_to_limit) )
        #om.out.debug('ratio: ' +  str(ratio) )

        if original_to_error < ratio and limit_to_error < ratio:
            # Maybe the limit I requested wasn't really a non-existant one
            # (and the error page really found the limit),
            # let's request a new limit (one that hopefully doesn't exist)
            # in order to remove some false positives
            limit_response2 = self._get_limit_response(mutant)

            id_list = [orig_resp.id, limit_response.id, error_response.id]

            if relative_distance(limit_response2.get_body(), limit_response.get_body()) > \
                    1 - self._diff_ratio:
                # The two limits are "equal"; It's safe to suppose that we have found the
                # limit here and that the error string really produced an error
                self._potential_vulns.append(
                    (mutant.get_url(), mutant.get_var(), mutant, id_list))

    def _get_limit_response(self, m):
        '''
        We request the limit (something that doesn't exist)
            - If http://localhost/a.php?b=1 ; then I should request b=12938795
                                                                 (random number)
            - If http://localhost/a.php?b=abc ; then I should request b=hnv98yks
                                                                    (random alnum)

        :return: The limit response object
        '''
        # Copy the dc, needed to make a good vuln report
        dc = copy.deepcopy(m.get_dc())

        if m.get_original_value().isdigit():
            m.set_mod_value(rand_number(length=8))
        else:
            m.set_mod_value(rand_alnum(length=8))
        limit_response = self._uri_opener.send_mutant(m)

        # restore the dc
        m.set_dc(dc)
        return limit_response

    def end(self):
        '''
        This method is called when the plugin wont be used anymore.
        '''
        all_vulns_and_infos = kb.kb.get_all_vulns()
        all_vulns_and_infos.extend(kb.kb.get_all_infos())

        for url, variable, mutant, id_list in self._potential_vulns:
            for info in all_vulns_and_infos:
                if info.get_var() == variable and info.get_url() == url:
                    break
            else:
                desc = 'An unidentified vulnerability was found at: %s'
                desc = desc % mutant.found_at()

                v = Vuln.from_mutant('Unidentified vulnerability',
                                     desc, severity.MEDIUM, id_list,
                                     self.get_name(), mutant)

                self.kb_append_uniq(self, 'generic', v)

        self._potential_vulns.cleanup()

    def get_options(self):
        '''
        :return: A list of option objects for this plugin.
        '''
        ol = OptionList()

        d = 'If two strings have a diff ratio less than diff_ratio, then they'\
            '  are really different.'
        o = opt_factory('diff_ratio', self._diff_ratio, d, 'float')
        ol.add(o)

        return ol

    def set_options(self, options_list):
        '''
        This method sets all the options that are configured using the user interface
        generated by the framework using the result of get_options().

        :param OptionList: A dictionary with the options for the plugin.
        :return: No value is returned.
        '''
        self._diff_ratio = options_list['diff_ratio'].get_value()

    def get_long_desc(self):
        '''
        :return: A DETAILED description of the plugin functions and features.
        '''
        return '''
示例#4
0
class csp(GrepPlugin):
    '''
    This plugin identifies incorrect or too permissive CSP (Content Security Policy) headers returned by the web application under analysis.
    '''
    def __init__(self):
        '''
        Class init
        '''
        GrepPlugin.__init__(self)

        self._total_count = 0
        self._vulns = DiskList()
        self._urls = DiskList()

    def get_long_desc(self):
        return '''
        This plugin identifies incorrect or too permissive CSP (Content Security Policy) headers
        returned by the web application under analysis.

        Additional information: 
        https://www.owasp.org/index.php/Content_Security_Policy
        http://www.w3.org/TR/CSP
        '''

    def grep(self, request, response):
        '''
        Perform search on current HTTP request/response exchange.
        Store informations about vulns for further global processing.
        
        @param request: HTTP request
        @param response: HTTP response  
        '''
        #Check that current URL has not been already analyzed
        response_url = str(response.get_url().uri2url())
        if response_url in self._urls:
            return
        else:
            self._urls.append(response_url)

        #Search issues using dedicated module
        csp_vulns = find_vulns(response)

        #Analyze issue list
        if len(csp_vulns) > 0:
            vuln_store_item = DiskCSPVulnStoreItem(response_url, response.id,
                                                   csp_vulns)
            self._vulns.append(vuln_store_item)
            #Increment the vulnerabilities counter
            for csp_directive_name in csp_vulns:
                self._total_count += len(csp_vulns[csp_directive_name])

    def end(self):
        '''
        Perform global analysis for all vulnerabilities found.
        '''
        #Check if vulns has been found
        if self._total_count == 0:
            return

        #Parse vulns collection
        vuln_already_reported = []
        total_url_processed_count = len(self._urls)
        for vuln_store_item in self._vulns:
            for csp_directive_name, csp_vulns_list in vuln_store_item.csp_vulns.iteritems(
            ):
                for csp_vuln in csp_vulns_list:
                    #Check if the current vuln is common (shared) to several url processed
                    #and has been already reported
                    if csp_vuln.desc in vuln_already_reported:
                        continue
                    #Search for current vuln occurences in order to know if
                    #the vuln is common (shared) to several url processed
                    occurences = self._find_occurences(csp_vuln.desc)
                    v = None
                    if len(occurences) > 1:
                        #Shared vuln case
                        v = Vuln('CSP vulnerability',
                                 csp_vuln.desc, csp_vuln.severity, occurences,
                                 self.get_name())
                        vuln_already_reported.append(csp_vuln.desc)
                    else:
                        #Isolated vuln case
                        v = Vuln('CSP vulnerability', csp_vuln.desc,
                                 csp_vuln.severity, vuln_store_item.resp_id,
                                 self.get_name())
                    #Report vuln
                    self.kb_append(self, 'csp', v)

        #Cleanup
        self._urls.cleanup()
        self._vulns.cleanup()

    def _find_occurences(self, vuln_desc):
        '''
        Internal utility function to find all occurences of a vuln 
        into the global collection of vulns found by the plugin.
        
        @param vuln_desc: Vulnerability description.
        @return: List of response ID for which the vuln is found.
        '''
        list_resp_id = []

        #Check input for quick exit
        if vuln_desc is None or vuln_desc.strip() == "":
            return list_resp_id

        #Parse vulns collection
        ref = vuln_desc.lower().strip()
        for vuln_store_item in self._vulns:
            for csp_directive_name, csp_vulns_list in vuln_store_item.csp_vulns.iteritems(
            ):
                for csp_vuln in csp_vulns_list:
                    if csp_vuln.desc.strip().lower() == ref:
                        if vuln_store_item.resp_id not in list_resp_id:
                            list_resp_id.append(vuln_store_item.resp_id)

        return list_resp_id
示例#5
0
class path_disclosure(GrepPlugin):
    '''
    Grep every page for traces of path disclosure vulnerabilities.

    :author: Andres Riancho ([email protected])
    '''

    def __init__(self):
        GrepPlugin.__init__(self)

        # Internal variables
        self._already_added = DiskList()

        # Compile all regular expressions and store information to avoid
        # multiple queries to the same function
        self._common_directories = get_common_directories()
        self._compiled_regexes = {}
        self._compile_regex()

    def _compile_regex(self):
        '''
        :return: None, the result is saved in self._path_disc_regex_list
        '''
        #
        #    I tried to enhance the performance of this plugin by putting
        #    all the regular expressions in one (1|2|3|4...|N)
        #    That gave no visible result.
        #
        for path_disclosure_string in self._common_directories:
            regex_string = '(%s.*?)[^A-Za-z0-9\._\-\\/\+~]'
            regex_string = regex_string % path_disclosure_string
            regex = re.compile(regex_string, re.IGNORECASE)
            
            self._compiled_regexes[path_disclosure_string] = regex

    def _potential_disclosures(self, html_string):
        '''
        Taking into account that regular expressions are slow, we first
        apply this function to check if the HTML string has potential
        path disclosures.

        With this performance enhancement we reduce the plugin run time
        to 1/8 of the time in cases where no potential disclosures are found,
        and around 1/3 when potential disclosures *are* found.

        :return: A list of the potential path disclosures
        '''
        potential_disclosures = []

        for path_disclosure_string in self._common_directories:
            if path_disclosure_string in html_string:
                potential_disclosures.append(path_disclosure_string)

        return potential_disclosures

    def grep(self, request, response):
        '''
        Identify the path disclosure vulnerabilities.

        :param request: The HTTP request object.
        :param response: The HTTP response object
        :return: None, the result is saved in the kb.
        '''
        if not response.is_text_or_html():
            return
        
        if self.find_path_disclosure(request, response):
            self._update_KB_path_list()
        
    def find_path_disclosure(self, request, response):
        '''
        Actually find the path disclosure vulnerabilities
        '''
        html_string = response.get_body()

        for potential_disclosure in self._potential_disclosures(html_string):

            path_disc_regex = self._compiled_regexes[potential_disclosure]
            match_list = path_disc_regex.findall(html_string)

            # Decode the URL, this will transform things like
            #     http://host.tld/?id=%2Fhome
            # into,
            #     http://host.tld/?id=/home
            realurl = response.get_url().url_decode()

            #   Sort by the longest match, this is needed for filtering out
            #   some false positives please read the note below.
            match_list.sort(self._longest)

            for match in match_list:

                # This if is to avoid false positives
                if not request.sent(match) and not \
                self._attr_value(match, html_string):

                    # Check for dups
                    if (realurl, match) in self._already_added:
                        continue

                    #   There is a rare bug also, which is triggered in cases like this one:
                    #
                    #   >>> import re
                    #   >>> re.findall('/var/www/.*','/var/www/foobar/htdocs/article.php')
                    #   ['/var/www/foobar/htdocs/article.php']
                    #   >>> re.findall('/htdocs/.*','/var/www/foobar/htdocs/article.php')
                    #   ['/htdocs/article.php']
                    #   >>>
                    #
                    #   What I need to do here, is to keep the longest match.
                    for realurl_added, match_added in self._already_added:
                        if match_added.endswith(match):
                            break
                    else:

                        #   Note to self: I get here when "break" is NOT executed.
                        #   It's a new one, report!
                        self._already_added.append((realurl, match))

                        desc = 'The URL: "%s" has a path disclosure'\
                               ' vulnerability which discloses "%s".'
                        desc = desc % (response.get_url(), match)

                        v = Vuln('Path disclosure vulnerability', desc,
                                 severity.LOW, response.id, self.get_name())

                        v.set_url(realurl)
                        v['path'] = match
                        v.add_to_highlight(match)
                        
                        self.kb_append(self, 'path_disclosure', v)
                        return True
                    
        return False

    def _longest(self, a, b):
        '''
        :param a: A string.
        :param a: Another string.
        :return: The longest string.
        '''
        return cmp(len(a), len(b))

    def _attr_value(self, path_disclosure_string, response_body):
        '''
        This method was created to remove some false positives.

        :return: True if path_disclosure_string is the value of an attribute inside a tag.

        Examples:
            path_disclosure_string = '/home/image.png'
            response_body = '....<img src="/home/image.png">...'
            return: True

            path_disclosure_string = '/home/image.png'
            response_body = '...<b>Error while processing /home/image.png</b>...'
            return: False
        '''
        regex = '<.+?(["|\']%s["|\']).*?>' % re.escape(path_disclosure_string)
        regex_res = re.findall(regex, response_body)
        in_attr = path_disclosure_string in regex_res
        return in_attr

    def _update_KB_path_list(self):
        '''
        If a path disclosure was found, I can create a list of full paths to
        all URLs ever visited. This method updates that list.
        '''
        path_disc_vulns = kb.kb.get('path_disclosure', 'path_disclosure')
        url_list = kb.kb.get_all_known_urls()
        
        # Now I find the longest match between one of the URLs that w3af has
        # discovered, and one of the path disclosure strings that this plugin
        # has found. I use the longest match because with small match_list I
        # have more probability of making a mistake.
        longest_match = ''
        longest_path_disc_vuln = None
        for path_disc_vuln in path_disc_vulns:
            for url in url_list:
                path_and_file = url.get_path()

                if path_disc_vuln['path'].endswith(path_and_file):
                    if len(longest_match) < len(path_and_file):
                        longest_match = path_and_file
                        longest_path_disc_vuln = path_disc_vuln

        # Now I recalculate the place where all the resources are in disk, all
        # this is done taking the longest_match as a reference, so... if we
        # don't have a longest_match, then nothing is actually done
        if not longest_match:
            return

        # Get the webroot
        webroot = longest_path_disc_vuln['path'].replace(longest_match, '')

        #
        # This if fixes a strange case reported by Olle
        #         if webroot[0] == '/':
        #         IndexError: string index out of range
        # That seems to be because the webroot == ''
        #
        if not webroot:
            return
        
        # Check what path separator we should use (linux / windows)
        path_sep = '/' if webroot.startswith('/') else '\\'

        # Create the remote locations
        remote_locations = []
        for url in url_list:
            remote_path = url.get_path().replace('/', path_sep)
            remote_locations.append(webroot + remote_path)
        remote_locations = list(set(remote_locations))

        kb.kb.raw_write(self, 'list_files', remote_locations)
        kb.kb.raw_write(self, 'webroot', webroot)

    def end(self):
        self._already_added.cleanup()

    def get_long_desc(self):
        '''
        :return: A DETAILED description of the plugin functions and features.
        '''
        return '''
示例#6
0
class generic(AuditPlugin):
    '''
    Find all kind of bugs without using a fixed database of errors.
    :author: Andres Riancho ([email protected])
    '''

    ERROR_STRINGS = ['d\'kc"z\'gj\'\"**5*(((;-*`)', '']

    def __init__(self):
        AuditPlugin.__init__(self)

        #   Internal variables
        self._potential_vulns = DiskList()

        #   User configured variables
        self._diff_ratio = 0.30

    def audit(self, freq, orig_response):
        '''
        Find all kind of bugs without using a fixed database of errors.

        :param freq: A FuzzableRequest
        '''
        # First, get the original response and create the mutants
        mutants = create_mutants(freq, ['', ], orig_resp=orig_response)

        for m in mutants:

            # First I check that the current modified parameter in the mutant
            # doesn't have an already reported vulnerability. I don't want to
            # report vulnerabilities more than once.
            if (m.get_url(), m.get_var()) in self._potential_vulns:
                continue

            # Now, we request the limit (something that doesn't exist)
            # If http://localhost/a.php?b=1 ; then I should request b=12938795
            #                                                       (random number)
            # If http://localhost/a.php?b=abc ; then I should request b=hnv98yks
            #                                                         (random alnum)
            limit_response = self._get_limit_response(m)

            # Now I request something that could generate an error
            #     If http://localhost/a.php?b=1 ; then I should request b=d'kcz'gj'"**5*(((*)
            #     If http://localhost/a.php?b=abc ; then I should request b=d'kcz'gj'"**5*(((*)
            #
            # I also try to trigger errors by sending empty strings
            #     If http://localhost/a.php?b=1 ; then I should request b=
            #     If http://localhost/a.php?b=abc ; then I should request b=
            for error_string in self.ERROR_STRINGS:

                m.set_mod_value(error_string)
                error_response = self._uri_opener.send_mutant(m)

                # Now I compare responses
                self._analyze_responses(orig_response, limit_response,
                                        error_response, m)

    def _analyze_responses(self, orig_resp, limit_response, error_response, mutant):
        '''
        Analyze responses; if error_response doesn't look like orig_resp nor
        limit_response, then we have a vuln.

        :return: None
        '''
        original_to_error = relative_distance(
            orig_resp.get_body(), error_response.get_body())
        limit_to_error = relative_distance(
            limit_response.get_body(), error_response.get_body())
        original_to_limit = relative_distance(
            limit_response.get_body(), orig_resp.get_body())

        ratio = self._diff_ratio + (1 - original_to_limit)

        #om.out.debug('original_to_error: ' +  str(original_to_error) )
        #om.out.debug('limit_to_error: ' +  str(limit_to_error) )
        #om.out.debug('original_to_limit: ' +  str(original_to_limit) )
        #om.out.debug('ratio: ' +  str(ratio) )

        if original_to_error < ratio and limit_to_error < ratio:
            # Maybe the limit I requested wasn't really a non-existant one
            # (and the error page really found the limit),
            # let's request a new limit (one that hopefully doesn't exist)
            # in order to remove some false positives
            limit_response2 = self._get_limit_response(mutant)

            id_list = [orig_resp.id, limit_response.id, error_response.id]

            if relative_distance(limit_response2.get_body(), limit_response.get_body()) > \
                    1 - self._diff_ratio:
                # The two limits are "equal"; It's safe to suppose that we have found the
                # limit here and that the error string really produced an error
                self._potential_vulns.append((mutant.get_url(),
                                              mutant.get_var(),
                                              mutant, id_list))


    def _get_limit_response(self, m):
        '''
        We request the limit (something that doesn't exist)
            - If http://localhost/a.php?b=1 ; then I should request b=12938795
                                                                 (random number)
            - If http://localhost/a.php?b=abc ; then I should request b=hnv98yks
                                                                    (random alnum)

        :return: The limit response object
        '''
        # Copy the dc, needed to make a good vuln report
        dc = copy.deepcopy(m.get_dc())

        if m.get_original_value().isdigit():
            m.set_mod_value(rand_number(length=8))
        else:
            m.set_mod_value(rand_alnum(length=8))
        limit_response = self._uri_opener.send_mutant(m)

        # restore the dc
        m.set_dc(dc)
        return limit_response

    def end(self):
        '''
        This method is called when the plugin wont be used anymore.
        '''
        all_vulns_and_infos = kb.kb.get_all_vulns()
        all_vulns_and_infos.extend(kb.kb.get_all_infos())

        for url, variable, mutant, id_list in self._potential_vulns:
            for info in all_vulns_and_infos:
                if info.get_var() == variable and info.get_url() == url:
                    break
            else:
                desc = 'An unidentified vulnerability was found at: %s'
                desc = desc % mutant.found_at()
                
                v = Vuln.from_mutant('Unidentified vulnerability', desc,
                                     severity.MEDIUM, id_list, self.get_name(),
                                     mutant)
        
                self.kb_append_uniq(self, 'generic', v)
        
        self._potential_vulns.cleanup()        
                
    def get_options(self):
        '''
        :return: A list of option objects for this plugin.
        '''
        ol = OptionList()

        d = 'If two strings have a diff ratio less than diff_ratio, then they'\
            '  are really different.'
        o = opt_factory('diff_ratio', self._diff_ratio, d, 'float')
        ol.add(o)

        return ol

    def set_options(self, options_list):
        '''
        This method sets all the options that are configured using the user interface
        generated by the framework using the result of get_options().

        :param OptionList: A dictionary with the options for the plugin.
        :return: No value is returned.
        '''
        self._diff_ratio = options_list['diff_ratio'].get_value()

    def get_long_desc(self):
        '''
        :return: A DETAILED description of the plugin functions and features.
        '''
        return '''
示例#7
0
class click_jacking(GrepPlugin):
    '''
    Grep every page for X-Frame-Options header.

    :author: Taras ([email protected])
    '''
    def __init__(self):
        GrepPlugin.__init__(self)

        self._total_count = 0
        self._vuln_count = 0
        self._vulns = DiskList()
        self._ids = DiskList()

    def grep(self, request, response):
        '''
        TODO: need to check here for auth cookie?!
        '''
        if not response.is_text_or_html():
            return

        self._total_count += 1

        headers = response.get_lower_case_headers()
        x_frame_options = headers.get('x-frame-options', '')

        if not x_frame_options.lower() in ('deny', 'sameorigin'):
            self._vuln_count += 1
            if response.get_url() not in self._vulns:
                self._vulns.append(response.get_url())
                self._ids.append(response.id)

    def end(self):
        # If all URLs implement protection, don't report anything.
        if not self._vuln_count:
            return

        response_ids = [_id for _id in self._ids]

        # If none of the URLs implement protection, simply report
        # ONE vulnerability that says that.
        if self._total_count == self._vuln_count:
            desc = 'The whole target has no protection (X-Frame-Options'\
                  ' header) against Click-Jacking attacks'
        # If most of the URLs implement the protection but some
        # don't, report ONE vulnerability saying: "Most are protected,
        # but x, y are not.
        if self._total_count > self._vuln_count:
            desc = 'Some URLs have no protection (X-Frame-Options header) '\
                  'against Click-Jacking attacks. Among them:\n '\
                  ' '.join([str(url) + '\n' for url in self._vulns])

        v = Vuln('Click-Jacking vulnerability', desc, severity.MEDIUM,
                 response_ids, self.get_name())

        self.kb_append(self, 'click_jacking', v)

        self._vulns.cleanup()
        self._ids.cleanup()

    def get_long_desc(self):
        return '''
示例#8
0
class xss(AuditPlugin):
    '''
    Identify cross site scripting vulnerabilities.
    
    :author: Andres Riancho ( [email protected] )
    :author: Taras ( [email protected] )
    '''
    PAYLOADS = [
                'RANDOMIZE</->',
                'RANDOMIZE/*',
                'RANDOMIZE"RANDOMIZE',
                "RANDOMIZE'RANDOMIZE",
                "RANDOMIZE`",
                "RANDOMIZE ="
                ]
        
    def __init__(self):
        AuditPlugin.__init__(self)
        
        self._xss_mutants = DiskList()

        # User configured parameters
        self._check_persistent_xss = True

    def audit(self, freq, orig_response):
        '''
        Tests an URL for XSS vulnerabilities.
        
        :param freq: A FuzzableRequest
        '''
        fake_mutants = create_mutants(freq, ['',])
        
        # Run this in the worker pool in order to get different
        # parameters tested at the same time.
        self.worker_pool.map(self._check_xss_in_parameter, fake_mutants)
        
    def _check_xss_in_parameter(self, mutant):
        '''
        Tries to identify (persistent) XSS in one parameter.
        ''' 
        if not self._identify_trivial_xss(mutant):
            self._search_xss(mutant)

    def _report_vuln(self, mutant, response, mod_value):
        '''
        Create a Vuln object and store it in the KB.
        
        :return: None
        '''
        csp_protects = site_protected_against_xss_by_csp(response)
        vuln_severity = severity.LOW if csp_protects else severity.MEDIUM
        
        desc = 'A Cross Site Scripting vulnerability was found at: %s'
        desc = desc % mutant.found_at()
        
        if csp_protects:
            desc += 'The risk associated with this vulnerability was lowered'\
                    ' because the site correctly implements CSP. The'\
                    ' vulnerability is still a risk for the application since'\
                    ' only the latest versions of some browsers implement CSP'\
                    ' checking.'
        
        v = Vuln.from_mutant('Cross site scripting vulnerability', desc,
                             vuln_severity, response.id, self.get_name(),
                             mutant)
        v.add_to_highlight(mod_value) 
        
        self.kb_append_uniq(self, 'xss', v)

    def _identify_trivial_xss(self, mutant):
        '''
        Identify trivial cases of XSS where all chars are echoed back and no
        filter and/or encoding is in place.
        
        :return: True in the case where a trivial XSS was identified.
        '''
        payload = replace_randomize(''.join(self.PAYLOADS))
        
        trivial_mutant = mutant.copy()
        trivial_mutant.set_mod_value(payload)
        
        response = self._uri_opener.send_mutant(trivial_mutant)

        # Add data for the persistent xss checking
        if self._check_persistent_xss:
            self._xss_mutants.append((trivial_mutant, response.id))
        
        if payload in response.get_body():
            self._report_vuln(mutant, response, payload)
            return True
        
        return False

    def _search_xss(self, mutant):
        '''
        Analyze the mutant for reflected XSS.
        
        @parameter mutant: A mutant that was used to test if the parameter
            was echoed back or not
        '''
        xss_strings = [replace_randomize(i) for i in self.PAYLOADS]
        mutant_list = create_mutants(
                                     mutant.get_fuzzable_req(),
                                     xss_strings,
                                     fuzzable_param_list=[mutant.get_var()]
                                     )

        self._send_mutants_in_threads(self._uri_opener.send_mutant,
                                      mutant_list,
                                      self._analyze_echo_result)

    def _analyze_echo_result(self, mutant, response):
        '''
        Do we have a reflected XSS?
        
        :return: None, record all the results in the kb.
        '''
        # Add data for the persistent xss checking
        if self._check_persistent_xss:
            self._xss_mutants.append((mutant, response.id))
        
        with self._plugin_lock:
            
            if self._has_bug(mutant):
                return
            
            mod_value = mutant.get_mod_value()

            for contexts in get_context(response.get_body(), mod_value):
                for context in contexts:
                    if context.is_executable() or context.can_break(mod_value):
                        self._report_vuln(mutant, response, mod_value)
                        return
       
    def end(self):
        '''
        This method is called when the plugin wont be used anymore.
        '''
        if self._check_persistent_xss:
            self._identify_persistent_xss()
        
        self._xss_mutants.cleanup()
    
    def _identify_persistent_xss(self):
        '''
        This method is called to check for persistent xss. 
    
        Many times a xss isn't on the page we get after the GET/POST of
        the xss string. This method searches for the xss string on all
        the pages that are known to the framework.
        
        :return: None, Vuln (if any) are saved to the kb.
        '''
        # Get all known fuzzable requests from the core
        fuzzable_requests = kb.kb.get_all_known_fuzzable_requests()
        
        self._send_mutants_in_threads(self._uri_opener.send_mutant,
                                      fuzzable_requests,
                                      self._analyze_persistent_result,
                                      grep=False, cache=False)    
    
    def _analyze_persistent_result(self, fuzzable_request, response):
        '''
        After performing an HTTP request to "fuzzable_request" and getting
        "response" analyze if the response contains any of the information sent
        by any of the mutants.
        
        :return: None, Vuln (if any) are saved to the kb.
        '''
        response_body = response.get_body()
        
        for mutant, mutant_response_id in self._xss_mutants:
            
            mod_value = mutant.get_mod_value()
            
            for contexts in get_context(response_body, mod_value):
                for context in contexts:
                    if context.is_executable() or context.can_break(mod_value):
                        self._report_persistent_vuln(mutant, response,
                                                     mutant_response_id,
                                                     mod_value,
                                                     fuzzable_request)
                        break
    
    def _report_persistent_vuln(self, mutant, response, mutant_response_id,
                                mod_value, fuzzable_request):
        '''
        Report a persistent XSS vulnerability to the core.
        
        :return: None, a vulnerability is saved in the KB.
        '''
        response_ids = [response.id, mutant_response_id]
        name = 'Persistent Cross-Site Scripting vulnerability'
        
        desc = 'A persistent Cross Site Scripting vulnerability'\
               ' was found by sending "%s" to the "%s" parameter'\
               ' at %s, which is echoed when browsing to %s.'
        desc = desc % (mod_value, mutant.get_var(), mutant.get_url(),
                       response.get_url())
        
        csp_protects = site_protected_against_xss_by_csp(response)
        vuln_severity = severity.MEDIUM if csp_protects else severity.HIGH
        
        if csp_protects:
            desc += 'The risk associated with this vulnerability was lowered'\
                    ' because the site correctly implements CSP. The'\
                    ' vulnerability is still a risk for the application since'\
                    ' only the latest versions of some browsers implement CSP'\
                    ' checking.'
                    
        v = Vuln.from_mutant(name, desc, vuln_severity,
                             response_ids, self.get_name(),
                             mutant)
        
        v['persistent'] = True
        v['write_payload'] = mutant
        v['read_payload'] = fuzzable_request
        v.add_to_highlight(mutant.get_mod_value())

        om.out.vulnerability(v.get_desc())
        self.kb_append_uniq(self, 'xss', v)
        

    def get_options(self):
        '''
        :return: A list of option objects for this plugin.
        '''
        ol = OptionList()
        
        d1 = 'Identify persistent cross site scripting vulnerabilities'
        h1 = 'If set to True, w3af will navigate all pages of the target one'\
             ' more time, searching for persistent cross site scripting'\
             ' vulnerabilities.'
        o1 = opt_factory('persistent_xss', self._check_persistent_xss, d1,
                         'boolean', help=h1)
        ol.add(o1)
        
        return ol
        
    def set_options(self, options_list):
        '''
        This method sets all the options that are configured using the user
        interface generated by the framework using the result of get_options().
        
        @parameter options_list: A dictionary with the options for the plugin.
        :return: No value is returned.
        '''
        self._check_persistent_xss = options_list['persistent_xss'].get_value()
        
    def get_long_desc(self):
        '''
        :return: A DETAILED description of the plugin functions and features.
        '''
        return '''
示例#9
0
class ssi(AuditPlugin):
    '''
    Find server side inclusion vulnerabilities.
    :author: Andres Riancho ([email protected])
    '''
    def __init__(self):
        AuditPlugin.__init__(self)

        # Internal variables
        self._expected_res_mutant = DiskDict()
        self._freq_list = DiskList()

        re_str = '<!--#exec cmd="echo -n (.*?);echo -n (.*?)" -->'
        self._extract_results_re = re.compile(re_str)

    def audit(self, freq, orig_response):
        '''
        Tests an URL for server side inclusion vulnerabilities.

        :param freq: A FuzzableRequest
        '''
        # Create the mutants to send right now,
        ssi_strings = self._get_ssi_strings()
        mutants = create_mutants(freq, ssi_strings, orig_resp=orig_response)

        # Used in end() to detect "persistent SSI"
        for mut in mutants:
            expected_result = self._extract_result_from_payload(
                mut.get_mod_value())
            self._expected_res_mutant[expected_result] = mut

        self._freq_list.append(freq)
        # End of persistent SSI setup

        self._send_mutants_in_threads(self._uri_opener.send_mutant, mutants,
                                      self._analyze_result)

    def _get_ssi_strings(self):
        '''
        This method returns a list of server sides to try to include.

        :return: A string, see above.
        '''
        yield '<!--#exec cmd="echo -n %s;echo -n %s" -->' % (rand_alpha(5),
                                                             rand_alpha(5))

        # TODO: Add mod_perl ssi injection support
        # http://www.sens.buffalo.edu/services/webhosting/advanced/perlssi.shtml
        #yield <!--#perl sub="sub {print qq/If you see this, mod_perl is working!/;}" -->

    def _extract_result_from_payload(self, payload):
        '''
        Extract the expected result from the payload we're sending.
        '''
        match = self._extract_results_re.search(payload)
        return match.group(1) + match.group(2)

    def _analyze_result(self, mutant, response):
        '''
        Analyze the result of the previously sent request.
        :return: None, save the vuln to the kb.
        '''
        if self._has_no_bug(mutant):
            e_res = self._extract_result_from_payload(mutant.get_mod_value())
            if e_res in response and not e_res in mutant.get_original_response_body(
            ):

                desc = 'Server side include (SSI) was found at: %s'
                desc = desc % mutant.found_at()

                v = Vuln.from_mutant('Server side include vulnerability',
                                     desc, severity.HIGH, response.id,
                                     self.get_name(), mutant)

                v.add_to_highlight(e_res)
                self.kb_append_uniq(self, 'ssi', v)

    def end(self):
        '''
        This method is called when the plugin wont be used anymore and is used
        to find persistent SSI vulnerabilities.

        Example where a persistent SSI can be found:

        Say you have a "guestbook" (a CGI application that allows visitors
        to leave messages for everyone to see) on a server that has SSI
        enabled. Most such guestbooks around the Net actually allow visitors
        to enter HTML code as part of their comments. Now, what happens if a
        malicious visitor decides to do some damage by entering the following:

        <!--#exec cmd="ls" -->

        If the guestbook CGI program was designed carefully, to strip SSI
        commands from the input, then there is no problem. But, if it was not,
        there exists the potential for a major headache!

        For a working example please see moth VM.
        '''
        multi_in_inst = multi_in(self._expected_res_mutant.keys())

        def filtered_freq_generator(freq_list):
            already_tested = ScalableBloomFilter()

            for freq in freq_list:
                if freq not in already_tested:
                    already_tested.add(freq)
                    yield freq

        def analyze_persistent(freq, response):

            for matched_expected_result in multi_in_inst.query(
                    response.get_body()):
                # We found one of the expected results, now we search the
                # self._persistent_data to find which of the mutants sent it
                # and create the vulnerability
                mutant = self._expected_res_mutant[matched_expected_result]

                desc = 'Server side include (SSI) was found at: %s' \
                       ' The result of that injection is shown by browsing'\
                       ' to "%s".'
                desc = desc % (mutant.found_at(), freq.get_url())

                v = Vuln.from_mutant(
                    'Persistent server side include vulnerability', desc,
                    severity.HIGH, response.id, self.get_name(), mutant)

                v.add_to_highlight(matched_expected_result)
                self.kb_append(self, 'ssi', v)

        self._send_mutants_in_threads(self._uri_opener.send_mutant,
                                      filtered_freq_generator(self._freq_list),
                                      analyze_persistent,
                                      cache=False)

        self._expected_res_mutant.cleanup()
        self._freq_list.cleanup()

    def get_long_desc(self):
        '''
        :return: A DETAILED description of the plugin functions and features.
        '''
        return '''
示例#10
0
文件: csp.py 项目: Adastra-thw/w3af
class csp(GrepPlugin):
    '''
    This plugin identifies incorrect or too permissive CSP (Content Security Policy) headers returned by the web application under analysis.
    '''

    def __init__(self):
        '''
        Class init
        '''
        GrepPlugin.__init__(self)

        self._total_count = 0
        self._vulns = DiskList()
        self._urls = DiskList() 
                
    def get_long_desc(self):
        return '''
        This plugin identifies incorrect or too permissive CSP (Content Security Policy) headers
        returned by the web application under analysis.

        Additional information: 
        https://www.owasp.org/index.php/Content_Security_Policy
        http://www.w3.org/TR/CSP
        '''        

    def grep(self, request, response):
        '''
        Perform search on current HTTP request/response exchange.
        Store informations about vulns for further global processing.
        
        @param request: HTTP request
        @param response: HTTP response  
        '''
        #Check that current URL has not been already analyzed
        response_url = str(response.get_url().uri2url())
        if response_url in self._urls:
            return        
        else:
            self._urls.append(response_url)    
                
        #Search issues using dedicated module
        csp_vulns = find_vulns(response)
        
        #Analyze issue list
        if len(csp_vulns) > 0:
            vuln_store_item = DiskCSPVulnStoreItem(response_url, response.id, csp_vulns)   
            self._vulns.append(vuln_store_item)
            #Increment the vulnerabilities counter 
            for csp_directive_name in csp_vulns:
                self._total_count += len(csp_vulns[csp_directive_name])
                
    def end(self):
        '''
        Perform global analysis for all vulnerabilities found.
        '''
        #Check if vulns has been found
        if self._total_count == 0:
            return
        
        #Parse vulns collection
        vuln_already_reported = []
        total_url_processed_count = len(self._urls)
        for vuln_store_item in self._vulns:
            for csp_directive_name, csp_vulns_list in vuln_store_item.csp_vulns.iteritems():
                for csp_vuln in csp_vulns_list:
                    #Check if the current vuln is common (shared) to several url processed 
                    #and has been already reported
                    if csp_vuln.desc in vuln_already_reported:
                        continue
                    #Search for current vuln occurences in order to know if 
                    #the vuln is common (shared) to several url processed                                    
                    occurences = self._find_occurences(csp_vuln.desc)
                    v = None
                    if len(occurences) > 1:
                        #Shared vuln case
                        v = Vuln('CSP vulnerability', csp_vuln.desc,
                            csp_vuln.severity, occurences, self.get_name())
                        vuln_already_reported.append(csp_vuln.desc)
                    else:
                        #Isolated vuln case
                        v = Vuln('CSP vulnerability', csp_vuln.desc,
                            csp_vuln.severity, vuln_store_item.resp_id, self.get_name())
                    #Report vuln
                    self.kb_append(self, 'csp', v)
                
        #Cleanup
        self._urls.cleanup()
        self._vulns.cleanup()


    def _find_occurences(self, vuln_desc):
        '''
        Internal utility function to find all occurences of a vuln 
        into the global collection of vulns found by the plugin.
        
        @param vuln_desc: Vulnerability description.
        @return: List of response ID for which the vuln is found.
        '''
        list_resp_id = []

        #Check input for quick exit
        if vuln_desc is None or vuln_desc.strip() == "":
            return list_resp_id
       
        #Parse vulns collection
        ref = vuln_desc.lower().strip()        
        for vuln_store_item in self._vulns:
            for csp_directive_name, csp_vulns_list in vuln_store_item.csp_vulns.iteritems():
                for csp_vuln in csp_vulns_list:        
                    if csp_vuln.desc.strip().lower() == ref:
                        if vuln_store_item.resp_id  not in list_resp_id:
                            list_resp_id.append(vuln_store_item.resp_id)

        return list_resp_id
示例#11
0
文件: ssi.py 项目: Adastra-thw/w3af
class ssi(AuditPlugin):
    '''
    Find server side inclusion vulnerabilities.
    :author: Andres Riancho ([email protected])
    '''

    def __init__(self):
        AuditPlugin.__init__(self)

        # Internal variables
        self._expected_res_mutant = DiskDict()
        self._freq_list = DiskList()
        
        re_str = '<!--#exec cmd="echo -n (.*?);echo -n (.*?)" -->'
        self._extract_results_re = re.compile(re_str) 

    def audit(self, freq, orig_response):
        '''
        Tests an URL for server side inclusion vulnerabilities.

        :param freq: A FuzzableRequest
        '''
        # Create the mutants to send right now,
        ssi_strings = self._get_ssi_strings()
        mutants = create_mutants(freq, ssi_strings, orig_resp=orig_response)

        # Used in end() to detect "persistent SSI"
        for mut in mutants:
            expected_result = self._extract_result_from_payload(
                mut.get_mod_value())
            self._expected_res_mutant[expected_result] = mut

        self._freq_list.append(freq)
        # End of persistent SSI setup

        self._send_mutants_in_threads(self._uri_opener.send_mutant,
                                      mutants,
                                      self._analyze_result)

    def _get_ssi_strings(self):
        '''
        This method returns a list of server sides to try to include.

        :return: A string, see above.
        '''
        yield '<!--#exec cmd="echo -n %s;echo -n %s" -->' % (rand_alpha(5),
                                                             rand_alpha(5))

        # TODO: Add mod_perl ssi injection support
        # http://www.sens.buffalo.edu/services/webhosting/advanced/perlssi.shtml
        #yield <!--#perl sub="sub {print qq/If you see this, mod_perl is working!/;}" -->

    def _extract_result_from_payload(self, payload):
        '''
        Extract the expected result from the payload we're sending.
        '''
        match = self._extract_results_re.search(payload)
        return match.group(1) + match.group(2)

    def _analyze_result(self, mutant, response):
        '''
        Analyze the result of the previously sent request.
        :return: None, save the vuln to the kb.
        '''
        if self._has_no_bug(mutant):
            e_res = self._extract_result_from_payload(mutant.get_mod_value())
            if e_res in response and not e_res in mutant.get_original_response_body():
                
                desc = 'Server side include (SSI) was found at: %s'
                desc = desc % mutant.found_at()
                
                v = Vuln.from_mutant('Server side include vulnerability', desc,
                                     severity.HIGH, response.id, self.get_name(),
                                     mutant)

                v.add_to_highlight(e_res)
                self.kb_append_uniq(self, 'ssi', v)

    def end(self):
        '''
        This method is called when the plugin wont be used anymore and is used
        to find persistent SSI vulnerabilities.

        Example where a persistent SSI can be found:

        Say you have a "guestbook" (a CGI application that allows visitors
        to leave messages for everyone to see) on a server that has SSI
        enabled. Most such guestbooks around the Net actually allow visitors
        to enter HTML code as part of their comments. Now, what happens if a
        malicious visitor decides to do some damage by entering the following:

        <!--#exec cmd="ls" -->

        If the guestbook CGI program was designed carefully, to strip SSI
        commands from the input, then there is no problem. But, if it was not,
        there exists the potential for a major headache!

        For a working example please see moth VM.
        '''
        multi_in_inst = multi_in(self._expected_res_mutant.keys())

        def filtered_freq_generator(freq_list):
            already_tested = ScalableBloomFilter()

            for freq in freq_list:
                if freq not in already_tested:
                    already_tested.add(freq)
                    yield freq

        def analyze_persistent(freq, response):

            for matched_expected_result in multi_in_inst.query(response.get_body()):
                # We found one of the expected results, now we search the
                # self._persistent_data to find which of the mutants sent it
                # and create the vulnerability
                mutant = self._expected_res_mutant[matched_expected_result]
                
                desc = 'Server side include (SSI) was found at: %s' \
                       ' The result of that injection is shown by browsing'\
                       ' to "%s".' 
                desc = desc % (mutant.found_at(), freq.get_url())
                
                v = Vuln.from_mutant('Persistent server side include vulnerability',
                                     desc, severity.HIGH, response.id,
                                     self.get_name(), mutant)
                
                v.add_to_highlight(matched_expected_result)
                self.kb_append(self, 'ssi', v)

        self._send_mutants_in_threads(self._uri_opener.send_mutant,
                                      filtered_freq_generator(self._freq_list),
                                      analyze_persistent,
                                      cache=False)
        
        self._expected_res_mutant.cleanup()
        self._freq_list.cleanup()

    def get_long_desc(self):
        '''
        :return: A DETAILED description of the plugin functions and features.
        '''
        return '''
示例#12
0
class cache_control(GrepPlugin):
    '''
    Grep every page for Pragma and Cache-Control headers.

    :author: Andres Riancho ([email protected])
    '''
    
    SAFE_CONFIG = {'pragma': 'no-cache',
                   'cache-control': 'no-store'}
    
    def __init__(self):
        GrepPlugin.__init__(self)

        self._total_count = 0
        self._vuln_count = 0
        self._vulns = DiskList()
        self._ids = DiskList()

    def grep(self, request, response):
        if response.is_image() or response.is_swf():
            return

        elif response.get_url().get_protocol() == 'http':
            return
        
        elif response.body == '':
            return
        
        else:
            self._total_count += 1
    
            cache_control_settings = self._get_cache_control(response)
            self._analyze_cache_control(cache_control_settings, response)
    
    def _get_cache_control(self, response):
        '''
        :param response: The http response we want to extract the information
                         from.
        :return: A list with the headers and meta tag information used to
                 configure the browser cache control.
        '''
        res = []
        CacheSettings = namedtuple('CacheSettings', ['type', 'value'])
        cache_control_headers = self.SAFE_CONFIG.keys()
        headers = response.get_headers()
        
        for _type in cache_control_headers:
            header_value, _ = headers.iget(_type, None)
            if header_value is not None:
                res.append( CacheSettings(_type, header_value.lower()) )
                
        try:
            doc_parser = parser_cache.dpc.get_document_parser_for(response)
        except w3afException:
            pass
        else:
            for meta_tag in doc_parser.get_meta_tags():
                header_name = meta_tag.get('http-equiv', None)
                header_value = meta_tag.get('content', None)
                if header_name is not None and header_value is not None:
                    header_name = header_name.lower()
                    header_value = header_value.lower()
                    if header_name in cache_control_headers:
                        res.append( CacheSettings(header_name, header_value) )
        
        return res

    def _analyze_cache_control(self, cache_control_settings, response):
        '''
        Analyze the cache control settings set in headers and meta tags,
        store the information to report the vulnerabilities.
        '''
        received_headers = set()
        
        for cache_setting in cache_control_settings:
            expected_header = self.SAFE_CONFIG[cache_setting.type]
            received_header = cache_setting.value.lower()
            received_headers.add(cache_setting.type)
            if expected_header not in received_header:
                # The header has an incorrect value
                self.is_vuln(response)
                return
        
        if len(received_headers) != len(self.SAFE_CONFIG):
            # No cache control header found
            self.is_vuln(response)
    
    def is_vuln(self, response):
        self._vuln_count += 1
        if response.get_url() not in self._vulns:
            self._vulns.append(response.get_url())
            self._ids.append(response.id)
    
    def end(self):
        # If all URLs implement protection, don't report anything.
        if not self._vuln_count:
            return

        # If none of the URLs implement protection, simply report
        # ONE vulnerability that says that.
        if self._total_count == self._vuln_count:
            desc = 'The whole target web application has no protection (Pragma'\
                   ' and Cache-Control headers) against sensitive content'\
                   ' caching.'
            
        # If most of the URLs implement the protection but some
        # don't, report ONE vulnerability saying: "Most are protected, but x, y are not.
        if self._total_count > self._vuln_count:
            desc = 'Some URLs have no protection (Pragma and Cache-Control'\
                  ' headers) against sensitive content caching. Among them:\n'
            desc += ' '.join([str(url) + '\n' for url in self._vulns])
        
        response_ids = [_id for _id in self._ids]
        
        v = Vuln('Missing cache control for HTTPS content', desc,
                 severity.LOW, response_ids, self.get_name())
        
        self.kb_append_uniq(self, 'cache_control', v, 'URL')
        
        self._vulns.cleanup()
        self._ids.cleanup()

    def get_long_desc(self):
        return '''\
示例#13
0
class path_disclosure(GrepPlugin):
    '''
    Grep every page for traces of path disclosure vulnerabilities.

    :author: Andres Riancho ([email protected])
    '''
    def __init__(self):
        GrepPlugin.__init__(self)

        # Internal variables
        self._already_added = DiskList()

        # Compile all regular expressions and store information to avoid
        # multiple queries to the same function
        self._common_directories = get_common_directories()
        self._compiled_regexes = {}
        self._compile_regex()

    def _compile_regex(self):
        '''
        :return: None, the result is saved in self._path_disc_regex_list
        '''
        #
        #    I tried to enhance the performance of this plugin by putting
        #    all the regular expressions in one (1|2|3|4...|N)
        #    That gave no visible result.
        #
        for path_disclosure_string in self._common_directories:
            regex_string = '(%s.*?)[^A-Za-z0-9\._\-\\/\+~]'
            regex_string = regex_string % path_disclosure_string
            regex = re.compile(regex_string, re.IGNORECASE)

            self._compiled_regexes[path_disclosure_string] = regex

    def _potential_disclosures(self, html_string):
        '''
        Taking into account that regular expressions are slow, we first
        apply this function to check if the HTML string has potential
        path disclosures.

        With this performance enhancement we reduce the plugin run time
        to 1/8 of the time in cases where no potential disclosures are found,
        and around 1/3 when potential disclosures *are* found.

        :return: A list of the potential path disclosures
        '''
        potential_disclosures = []

        for path_disclosure_string in self._common_directories:
            if path_disclosure_string in html_string:
                potential_disclosures.append(path_disclosure_string)

        return potential_disclosures

    def grep(self, request, response):
        '''
        Identify the path disclosure vulnerabilities.

        :param request: The HTTP request object.
        :param response: The HTTP response object
        :return: None, the result is saved in the kb.
        '''
        if not response.is_text_or_html():
            return

        if self.find_path_disclosure(request, response):
            self._update_KB_path_list()

    def find_path_disclosure(self, request, response):
        '''
        Actually find the path disclosure vulnerabilities
        '''
        html_string = response.get_body()

        for potential_disclosure in self._potential_disclosures(html_string):

            path_disc_regex = self._compiled_regexes[potential_disclosure]
            match_list = path_disc_regex.findall(html_string)

            # Decode the URL, this will transform things like
            #     http://host.tld/?id=%2Fhome
            # into,
            #     http://host.tld/?id=/home
            realurl = response.get_url().url_decode()

            #   Sort by the longest match, this is needed for filtering out
            #   some false positives please read the note below.
            match_list.sort(self._longest)

            for match in match_list:

                # This if is to avoid false positives
                if not request.sent(match) and not \
                self._attr_value(match, html_string):

                    # Check for dups
                    if (realurl, match) in self._already_added:
                        continue

                    #   There is a rare bug also, which is triggered in cases like this one:
                    #
                    #   >>> import re
                    #   >>> re.findall('/var/www/.*','/var/www/foobar/htdocs/article.php')
                    #   ['/var/www/foobar/htdocs/article.php']
                    #   >>> re.findall('/htdocs/.*','/var/www/foobar/htdocs/article.php')
                    #   ['/htdocs/article.php']
                    #   >>>
                    #
                    #   What I need to do here, is to keep the longest match.
                    for realurl_added, match_added in self._already_added:
                        if match_added.endswith(match):
                            break
                    else:

                        #   Note to self: I get here when "break" is NOT executed.
                        #   It's a new one, report!
                        self._already_added.append((realurl, match))

                        desc = 'The URL: "%s" has a path disclosure'\
                               ' vulnerability which discloses "%s".'
                        desc = desc % (response.get_url(), match)

                        v = Vuln('Path disclosure vulnerability', desc,
                                 severity.LOW, response.id, self.get_name())

                        v.set_url(realurl)
                        v['path'] = match
                        v.add_to_highlight(match)

                        self.kb_append(self, 'path_disclosure', v)
                        return True

        return False

    def _longest(self, a, b):
        '''
        :param a: A string.
        :param a: Another string.
        :return: The longest string.
        '''
        return cmp(len(a), len(b))

    def _attr_value(self, path_disclosure_string, response_body):
        '''
        This method was created to remove some false positives.

        :return: True if path_disclosure_string is the value of an attribute inside a tag.

        Examples:
            path_disclosure_string = '/home/image.png'
            response_body = '....<img src="/home/image.png">...'
            return: True

            path_disclosure_string = '/home/image.png'
            response_body = '...<b>Error while processing /home/image.png</b>...'
            return: False
        '''
        regex = '<.+?(["|\']%s["|\']).*?>' % re.escape(path_disclosure_string)
        regex_res = re.findall(regex, response_body)
        in_attr = path_disclosure_string in regex_res
        return in_attr

    def _update_KB_path_list(self):
        '''
        If a path disclosure was found, I can create a list of full paths to
        all URLs ever visited. This method updates that list.
        '''
        path_disc_vulns = kb.kb.get('path_disclosure', 'path_disclosure')
        url_list = kb.kb.get_all_known_urls()

        # Now I find the longest match between one of the URLs that w3af has
        # discovered, and one of the path disclosure strings that this plugin
        # has found. I use the longest match because with small match_list I
        # have more probability of making a mistake.
        longest_match = ''
        longest_path_disc_vuln = None
        for path_disc_vuln in path_disc_vulns:
            for url in url_list:
                path_and_file = url.get_path()

                if path_disc_vuln['path'].endswith(path_and_file):
                    if len(longest_match) < len(path_and_file):
                        longest_match = path_and_file
                        longest_path_disc_vuln = path_disc_vuln

        # Now I recalculate the place where all the resources are in disk, all
        # this is done taking the longest_match as a reference, so... if we
        # don't have a longest_match, then nothing is actually done
        if not longest_match:
            return

        # Get the webroot
        webroot = longest_path_disc_vuln['path'].replace(longest_match, '')

        #
        # This if fixes a strange case reported by Olle
        #         if webroot[0] == '/':
        #         IndexError: string index out of range
        # That seems to be because the webroot == ''
        #
        if not webroot:
            return

        # Check what path separator we should use (linux / windows)
        path_sep = '/' if webroot.startswith('/') else '\\'

        # Create the remote locations
        remote_locations = []
        for url in url_list:
            remote_path = url.get_path().replace('/', path_sep)
            remote_locations.append(webroot + remote_path)
        remote_locations = list(set(remote_locations))

        kb.kb.raw_write(self, 'list_files', remote_locations)
        kb.kb.raw_write(self, 'webroot', webroot)

    def end(self):
        self._already_added.cleanup()

    def get_long_desc(self):
        '''
        :return: A DETAILED description of the plugin functions and features.
        '''
        return '''
示例#14
0
class click_jacking(GrepPlugin):
    '''
    Grep every page for X-Frame-Options header.

    :author: Taras ([email protected])
    '''

    def __init__(self):
        GrepPlugin.__init__(self)

        self._total_count = 0
        self._vuln_count = 0
        self._vulns = DiskList()
        self._ids = DiskList()

    def grep(self, request, response):
        '''
        TODO: need to check here for auth cookie?!
        '''
        if not response.is_text_or_html():
            return

        self._total_count += 1

        headers = response.get_lower_case_headers()
        x_frame_options = headers.get('x-frame-options', '')

        if not x_frame_options.lower() in ('deny', 'sameorigin'):
            self._vuln_count += 1
            if response.get_url() not in self._vulns:
                self._vulns.append(response.get_url())
                self._ids.append(response.id)

    def end(self):
        # If all URLs implement protection, don't report anything.
        if not self._vuln_count:
            return

        response_ids = [_id for _id in self._ids]
        
        # If none of the URLs implement protection, simply report
        # ONE vulnerability that says that.
        if self._total_count == self._vuln_count:
            desc = 'The whole target has no protection (X-Frame-Options'\
                  ' header) against Click-Jacking attacks'
        # If most of the URLs implement the protection but some
        # don't, report ONE vulnerability saying: "Most are protected,
        # but x, y are not.
        if self._total_count > self._vuln_count:
            desc = 'Some URLs have no protection (X-Frame-Options header) '\
                  'against Click-Jacking attacks. Among them:\n '\
                  ' '.join([str(url) + '\n' for url in self._vulns])

        v = Vuln('Click-Jacking vulnerability', desc,
                 severity.MEDIUM, response_ids, self.get_name())
        
        self.kb_append(self, 'click_jacking', v)
        
        self._vulns.cleanup()
        self._ids.cleanup()

    def get_long_desc(self):
        return '''
示例#15
0
class cache_control(GrepPlugin):
    '''
    Grep every page for Pragma and Cache-Control headers.

    :author: Andres Riancho ([email protected])
    '''

    SAFE_CONFIG = {'pragma': 'no-cache', 'cache-control': 'no-store'}

    def __init__(self):
        GrepPlugin.__init__(self)

        self._total_count = 0
        self._vuln_count = 0
        self._vulns = DiskList()
        self._ids = DiskList()

    def grep(self, request, response):
        if response.is_image() or response.is_swf():
            return

        elif response.get_url().get_protocol() == 'http':
            return

        elif response.body == '':
            return

        else:
            self._total_count += 1

            cache_control_settings = self._get_cache_control(response)
            self._analyze_cache_control(cache_control_settings, response)

    def _get_cache_control(self, response):
        '''
        :param response: The http response we want to extract the information
                         from.
        :return: A list with the headers and meta tag information used to
                 configure the browser cache control.
        '''
        res = []
        CacheSettings = namedtuple('CacheSettings', ['type', 'value'])
        cache_control_headers = self.SAFE_CONFIG.keys()
        headers = response.get_headers()

        for _type in cache_control_headers:
            header_value, _ = headers.iget(_type, None)
            if header_value is not None:
                res.append(CacheSettings(_type, header_value.lower()))

        try:
            doc_parser = parser_cache.dpc.get_document_parser_for(response)
        except w3afException:
            pass
        else:
            for meta_tag in doc_parser.get_meta_tags():
                header_name = meta_tag.get('http-equiv', None)
                header_value = meta_tag.get('content', None)
                if header_name is not None and header_value is not None:
                    header_name = header_name.lower()
                    header_value = header_value.lower()
                    if header_name in cache_control_headers:
                        res.append(CacheSettings(header_name, header_value))

        return res

    def _analyze_cache_control(self, cache_control_settings, response):
        '''
        Analyze the cache control settings set in headers and meta tags,
        store the information to report the vulnerabilities.
        '''
        received_headers = set()

        for cache_setting in cache_control_settings:
            expected_header = self.SAFE_CONFIG[cache_setting.type]
            received_header = cache_setting.value.lower()
            received_headers.add(cache_setting.type)
            if expected_header not in received_header:
                # The header has an incorrect value
                self.is_vuln(response)
                return

        if len(received_headers) != len(self.SAFE_CONFIG):
            # No cache control header found
            self.is_vuln(response)

    def is_vuln(self, response):
        self._vuln_count += 1
        if response.get_url() not in self._vulns:
            self._vulns.append(response.get_url())
            self._ids.append(response.id)

    def end(self):
        # If all URLs implement protection, don't report anything.
        if not self._vuln_count:
            return

        # If none of the URLs implement protection, simply report
        # ONE vulnerability that says that.
        if self._total_count == self._vuln_count:
            desc = 'The whole target web application has no protection (Pragma'\
                   ' and Cache-Control headers) against sensitive content'\
                   ' caching.'

        # If most of the URLs implement the protection but some
        # don't, report ONE vulnerability saying: "Most are protected, but x, y are not.
        if self._total_count > self._vuln_count:
            desc = 'Some URLs have no protection (Pragma and Cache-Control'\
                  ' headers) against sensitive content caching. Among them:\n'
            desc += ' '.join([str(url) + '\n' for url in self._vulns])

        response_ids = [_id for _id in self._ids]

        v = Vuln('Missing cache control for HTTPS content', desc, severity.LOW,
                 response_ids, self.get_name())

        self.kb_append_uniq(self, 'cache_control', v, 'URL')

        self._vulns.cleanup()
        self._ids.cleanup()

    def get_long_desc(self):
        return '''\