Exemplo n.º 1
0
 def discover(self, fuzzableRequest ):
     '''
     Sends the special request.
     
     @parameter fuzzableRequest: A fuzzableRequest instance that contains
                                                 (among other things) the URL to test.
     '''
     domain = urlParser.getDomain(fuzzableRequest.getURL())
     extension = urlParser.getDomain(fuzzableRequest.getURL())
     
     if (domain, extension) not in self._already_tested:
         
         # Do it only one time
         self._already_tested.append( (domain, extension) )
         
         # Generate the new URL
         domain += '.'
         path = urlParser.getPath( fuzzableRequest.getURL() )
         protocol = urlParser.getProtocol( fuzzableRequest.getURL() )
         new_URL = protocol + '://' + domain + path
         try:
             # GET the original response
             original_response = self._urlOpener.GET( fuzzableRequest.getURL(), useCache=False )
             # GET the response with the modified domain (with the trailing dot)
             response = self._urlOpener.GET( new_URL, useCache=False )
         except KeyboardInterrupt,e:
             raise e
         except w3afException,w3:
             om.out.error( str(w3) )
Exemplo n.º 2
0
 def discover(self, fuzzableRequest ):
     '''
     Get www.site.com and site.com and compare responses.
     
     @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test.
     '''
     if not self._exec :
         # This will remove the plugin from the discovery plugins to be runned.
         raise w3afRunOnce()
         
     else:
         # Only run once
         self._exec = False
         
         if not re.match('\d?\d?\d?\.\d?\d?\d?\.\d?\d?\d?\.\d?\d?\d?',
                                 urlParser.getDomain( fuzzableRequest.getURL() ) ):
             # Only do all this if this is a domain name!
             base_url = urlParser.baseUrl( fuzzableRequest.getURL() )
             original_response = self._urlOpener.GET( base_url, useCache=True )
             
             domain = urlParser.getDomain( fuzzableRequest.getURL() )
             proto = urlParser.getProtocol( fuzzableRequest.getURL() )
             if domain.startswith('www.'):
                 dns_wildcard_url = proto + '://' + domain.replace('www.', '') + '/'
             else:
                 dns_wildcard_url = proto + '://www.' + domain + '/'
             
             self._test_DNS( original_response, dns_wildcard_url )
             self._test_IP( original_response, domain )
             
         return []
Exemplo n.º 3
0
    def doAll(self):
        global global_firstRequest
        if global_firstRequest:
            global_firstRequest = False
            om.out.information("The user is navigating through the spiderMan proxy.")

        if self.path == TERMINATE_URL:
            om.out.information("The user terminated the spiderMan session.")
            self._sendEnd()
            self._spiderMan.stopProxy()
        else:
            om.out.debug("[spiderMan] Handling request: " + self.command + " " + self.path)
            #   Send this information to the plugin so it can send it to the core
            freq = self._createFuzzableRequest()
            self._spiderMan.append_fuzzable_request(freq)

            grep = False
            if urlParser.getDomain(self.path) == self.server.w3afLayer.targetDomain:
                grep = True

            try:
                response = self._sendToServer(grep=grep)
            except Exception, e:
                self._sendError(e)
            else:
Exemplo n.º 4
0
    def discover(self, fuzzableRequest):
        '''
        @parameter fuzzableRequest: A fuzzableRequest instance that contains
                                    (among other things) the URL to test.
        '''
        if not self._run:
            # This will remove the plugin from the discovery plugins to be runned.
            raise w3afRunOnce()

        # I will only run this one time. All calls to bing_spider return the same url's
        self._run = False
        bingSE = bing(self._urlOpener)
        domain = urlParser.getDomain(fuzzableRequest.getURL())

        if is_private_site(domain):
            msg = 'There is no point in searching Bing for "site:'+ domain + '".'
            msg += ' Bing doesnt index private pages.'
            raise w3afException( msg )

        results = bingSE.getNResults('site:'+ domain, self._resultLimit)

        for res in results:
            targs = (res.URL,)
            self._tm.startFunction(target=self._genFuzzableRequests,
                                    args=targs, ownerObj=self)
        self._tm.join( self )

        return self._fuzzableRequests
Exemplo n.º 5
0
 def discover(self, fuzzableRequest ):
     '''
     @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test.
     '''
     self._fuzzableRequests = []
     if not self._run:
         # This will remove the plugin from the discovery plugins to be runned.
         raise w3afRunOnce()
     else:
         
         # update !
         if self._update_ghdb:
             self._update_db()
             
         # I will only run this one time. All calls to ghdb return the same url's
         self._run = False
         
         # Get the domain and set some parameters
         domain = urlParser.getDomain( fuzzableRequest.getURL() )
         if is_private_site( domain ):
             msg = 'There is no point in searching google for "site:'+ domain
             msg += '" . Google doesnt index private pages.'
             raise w3afException( msg )
         
         return self._do_clasic_GHDB( domain )
     
     return []
Exemplo n.º 6
0
    def discover(self, fuzzableRequest ):
        '''
        @parameter fuzzableRequest: A fuzzableRequest instance that contains 
                                                    (among other things) the URL to test.
        '''
        if not self._run:
            # This will remove the plugin from the discovery plugins to be runned.
            raise w3afRunOnce()
        else:
            # I will only run this one time. All calls to googleSpider return the same url's
            self._run = False
            
            google_se = google(self._urlOpener)
            
            domain = urlParser.getDomain( fuzzableRequest.getURL() )
            if is_private_site( domain ):
                msg = 'There is no point in searching google for "site:'+ domain + '".'
                msg += ' Google doesnt index private pages.'
                raise w3afException( msg )

            try:
                results = google_se.getNResults('site:'+ domain, self._result_limit)
            except w3afException, w3:
                om.out.error(str(w3))
                # If I found an error, I don't want to be run again
                raise w3afRunOnce()
            else:
    def discover(self, fuzzableRequest ):
        '''
        @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test.
        '''
        self._new_fuzzable_requests = []
        if not self._run:
            # This will remove the plugin from the discovery plugins to be runned.
            raise w3afRunOnce()
        else:
            # I will only run this one time. All calls to yahooSiteExplorer return the same url's
            self._run = False
            self._yse = yse( self._urlOpener )
            
            domain = urlParser.getDomain( fuzzableRequest.getURL() )
            if is_private_site( domain ):
                msg = 'There is no point in searching yahoo site explorer for site: "'
                msg += domain + '" . Yahoo doesnt index private pages.'
                raise w3afException(msg)

            results = self._yse.search( domain, 0, self._result_limit )
                
            for res in results:
                #   Send the requests using threads:
                targs = (res.URL,)
                self._tm.startFunction( target=self._generate_fuzzable_requests, \
                                                    args=targs, ownerObj=self )
            
            # Wait for all threads to finish
            self._tm.join( self )
            
        return self._new_fuzzable_requests
Exemplo n.º 8
0
    def _generic_vhosts( self, fuzzableRequest ):
        '''
        Test some generic virtual hosts, only do this once.
        '''
        res = []
        base_url = urlParser.baseUrl(fuzzableRequest.getURL())
        
        common_vhost_list = self._get_common_virtualhosts(urlParser.getDomain(base_url))
        
        # Get some responses to compare later
        original_response = self._urlOpener.GET(base_url, useCache=True)
        orig_resp_body = original_response.getBody()
        non_existant = 'iDoNotExistPleaseGoAwayNowOrDie' + createRandAlNum(4)
        self._non_existant_response = self._urlOpener.GET(base_url, useCache=False, \
                                                        headers={'Host': non_existant })
        nonexist_resp_body = self._non_existant_response.getBody()
        
        for common_vhost in common_vhost_list:
            try:
                vhost_response = self._urlOpener.GET( base_url, useCache=False, \
                                                headers={'Host': common_vhost } )
            except w3afException:
                pass
            else:
                vhost_resp_body = vhost_response.getBody()

                # If they are *really* different (not just different by some chars)
                if relative_distance_lt(vhost_resp_body, orig_resp_body, 0.35) and \
                    relative_distance_lt(vhost_resp_body, nonexist_resp_body, 0.35):
                    res.append((common_vhost, vhost_response.id))
        
        return res
 def test_links_results_domain(self):
     domain = "www.bonsai-sec.com"
     query = "site:%s security" % domain
     start = 0
     for searcher in self._get_google_searchers(query, start, self.count):
         # returned URLs' domain should be the expected
         for link in searcher.links:
             link_domain = urlParser.getDomain(link.URL)
             self.assertTrue(link_domain == domain, 
                             "Current link domain is '%s'. Expected: '%s'" % (link_domain, domain))
Exemplo n.º 10
0
    def discover(self, fuzzableRequest):
        """
        Searches for links on the html.

        @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test.
        """
        om.out.debug("webSpider plugin is testing: " + fuzzableRequest.getURL())

        if self._first_run:
            # I have to set some variables, in order to be able to code the "onlyForward" feature
            self._first_run = False
            self._target_urls = [urlParser.getDomainPath(i) for i in cf.cf.getData("targets")]
            self._target_domain = urlParser.getDomain(cf.cf.getData("targets")[0])

        # If its a form, then smartFill the Dc.
        original_dc = fuzzableRequest.getDc()
        if isinstance(fuzzableRequest, httpPostDataRequest.httpPostDataRequest):

            # TODO!!!!!!
            if fuzzableRequest.getURL() in self._already_filled_form:
                return []
            else:
                self._already_filled_form.add(fuzzableRequest.getURL())

            to_send = original_dc.copy()
            for parameter_name in to_send:

                # I do not want to mess with the "static" fields
                if isinstance(to_send, form.form):
                    if to_send.getType(parameter_name) in ["checkbox", "file", "radio", "select"]:
                        continue

                #
                #   Set all the other fields, except from the ones that have a value set (example:
                #   hidden fields like __VIEWSTATE).
                #
                for element_index in xrange(len(to_send[parameter_name])):

                    #   should I ignore it because it already has a value?
                    if to_send[parameter_name][element_index] != "":
                        continue

                    #   smartFill it!
                    to_send[parameter_name][element_index] = smartFill(parameter_name)

            fuzzableRequest.setDc(to_send)

        self._fuzzableRequests = []
        response = None

        try:
            response = self._sendMutant(fuzzableRequest, analyze=False)
        except KeyboardInterrupt, e:
            raise e
Exemplo n.º 11
0
    def _findAccounts(self, page):
        '''
        Finds mails in bing result.

        @return: A list of valid accounts
        '''
        try:
            om.out.debug('Searching for mails in: ' + page.URL)
            if self._domain == urlParser.getDomain(page.URL):
                response = self._urlOpener.GET(page.URL, useCache=True, grepResult=True)
            else:
                response = self._urlOpener.GET(page.URL, useCache=True, grepResult=False)
        except KeyboardInterrupt, e:
            raise e
Exemplo n.º 12
0
 def discover(self, fuzzableRequest ):
     '''
     Does a search in archive.org and searches for links on the html. Then searches those
     URLs in the target site. This is a time machine ! 
     
     @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things)
                                                   the URL to test.
     '''
     # Get the domain and set some parameters
     domain = urlParser.getDomain( fuzzableRequest.getURL() )
     if is_private_site( domain ):
         msg = 'There is no point in searching archive.org for "'+ domain + '"'
         msg += ' because it is a private site that will never be indexed.'
         raise w3afException(msg)
     else:
         # Work
         om.out.debug( 'archiveDotOrg plugin is testing: ' + fuzzableRequest.getURL() )
         
         start_url = 'http://web.archive.org/web/*/' + fuzzableRequest.getURL()
         domain = urlParser.getDomain( fuzzableRequest.getURL() )
         references = self._spider_archive( [ start_url, ] , self._max_depth, domain )
         
         return self._analyze_urls( references )
Exemplo n.º 13
0
    def _sslCookieValueUsedInHTTP(self, request, response):
        """
        Analyze if a cookie value, sent in a HTTPS request, is now used for identifying the user in an insecure page.
        Example:
            Login is done over SSL
            The rest of the page is HTTP
        """
        if request.getURL().startswith("http://"):
            for cookie in kb.kb.getData("collectCookies", "cookies"):
                if cookie.getURL().startswith("https://") and urlParser.getDomain(
                    request.getURL()
                ) == urlParser.getDomain(cookie.getURL()):
                    # The cookie was sent using SSL, I'll check if the current
                    # request, is using this values in the POSTDATA / QS / COOKIE
                    for key in cookie["cookie-object"].keys():
                        # This if is to create less false positives
                        if len(cookie["cookie-object"][key]) > 4:

                            for parameter_name in request.getDc():

                                # added to support repeated parameter names.
                                for parameter_value_i in request.getDc()[parameter_name]:

                                    # The first statement of this if is to make this algorithm faster
                                    if len(parameter_value_i) > 4 and parameter_value_i == cookie["cookie-object"][key]:
                                        v = vuln.vuln()
                                        v.setPluginName(self.getName())
                                        v.setURL(response.getURL())
                                        self._setCookieToRep(v, cobj=cookie)
                                        v.setSeverity(severity.HIGH)
                                        v.setId(response.id)
                                        v.setName("Secure cookies over insecure channel")
                                        msg = "Cookie values that were set over HTTPS, are sent over "
                                        msg += 'an insecure channel when requesting URL: "'
                                        msg += request.getURL() + '" , parameter "' + parameter_name + '"'
                                        v.setDesc(msg)
                                        kb.kb.append(self, "cookies", v)
Exemplo n.º 14
0
 def endElement(self, name):
     if name == 'phish_detail_url':
         self.inside_detail = False
     if name == 'url':
         self.inside_URL = False
     if name == 'entry':
         self.inside_entry = False
         #
         #   Now I try to match the entry with an element in the to_check_list
         #
         phish_domain = urlParser.getDomain( self.url )
         for url in self._to_check_list:
             if url == phish_domain or phish_domain.endswith('.' + url ):
                 ptm = phishTankMatch( self.url, self.phish_detail_url )
                 self.matches.append( ptm )
Exemplo n.º 15
0
 def __init__( self, httpResponse ):
     # "setBaseUrl"
     url = httpResponse.getURL()
     redirURL = httpResponse.getRedirURL()
     if redirURL:
         url = redirURL
     self._baseUrl = url
     self._baseDomain = urlParser.getDomain(url)
     self._rootDomain = urlParser.getRootDomain(url)
     
     # A nice default
     self._encoding = 'utf-8'
     
     # To store results
     self._emails = []
     self._re_URLs = []
Exemplo n.º 16
0
 def _find_accounts(self, googlePage ):
     '''
     Finds mails in google result page.
     
     @return: A list of valid accounts
     '''
     try:
         om.out.debug('Searching for mails in: ' + googlePage.getURI() )
         if self._domain == urlParser.getDomain( googlePage.getURI() ):
             response = self._urlOpener.GET( googlePage.getURI(), useCache=True, \
                                                             grepResult=True )
         else:
             response = self._urlOpener.GET( googlePage.getURI(), useCache=True, \
                                                             grepResult=False )
     except KeyboardInterrupt, e:
         raise e
Exemplo n.º 17
0
 def _fr2urllibReq( self, fuzzableRequest ):
     '''
     Convert a fuzzableRequest to a urllib2 request object. 
     Used in http_request.
     
     @parameter fuzzableRequest: A fuzzableRequest.
     @return: A urllib2 request obj.
     '''
     host = getDomain( fuzzableRequest.getURL() )
     
     if fuzzableRequest.getMethod().upper() == 'GET':
         data = None
     else:
         data = fuzzableRequest.getData()
     req = HTTPRequest( fuzzableRequest.getURI(), data=data\
     , headers=fuzzableRequest.getHeaders(), origin_req_host=host )
     return req
Exemplo n.º 18
0
    def _verifyURL(self, targetUrl, fileTarget=True):
        """
        Verify if the URL is valid and raise an exception if w3af doesn't support it.
        """
        protocol = urlParser.getProtocol(targetUrl)
        domain = urlParser.getDomain(targetUrl) or ""

        aFile = fileTarget and protocol == "file" and domain
        aHTTP = protocol in ["http", "https"] and urlParser.isValidURLDomain(targetUrl)

        if not (aFile or aHTTP):
            msg = (
                'Invalid format for target URL "%s", you have to specify '
                "the protocol (http/https/file) and a domain or IP address. "
                "Examples: http://host.tld/ ; https://127.0.0.1/ ." % targetUrl
            )
            raise w3afException(msg)
Exemplo n.º 19
0
 def grep_wrapper(self, fuzzableRequest, response):
     '''
     This method tries to find patterns on responses.
     
     This method CAN be implemented on a plugin, but its better to do your searches in _testResponse().
     
     @param response: This is the httpResponse object to test.
     @param fuzzableRequest: This is the fuzzable request object that generated the current response being analyzed.
     @return: If something is found it must be reported to the Output Manager and the KB.
     '''
     if response.getFromCache():
         #om.out.debug('Grep plugins not testing: ' + repr(fuzzableRequest) + ' cause it was already tested.' )
         pass
     elif urlParser.getDomain( fuzzableRequest.getURL() ) in cf.cf.getData('targetDomains'):
         self.grep( fuzzableRequest, response )
     else:
         #om.out.debug('Grep plugins not testing: ' + fuzzableRequest.getURL() + ' cause it aint a target domain.' )
         pass
Exemplo n.º 20
0
 def _grepResult(self, request, response):
     # The grep process is all done in another thread. This improves the
     # speed of all w3af.
     if len( self._grepPlugins ) and urlParser.getDomain( request.get_full_url() ) in cf.cf.getData('targetDomains'):
         
         # I'll create a fuzzable request based on the urllib2 request object
         fuzzReq = createFuzzableRequestRaw( request.get_method(), request.get_full_url(), request.get_data(), request.headers )
         
         for grep_plugin in self._grepPlugins:
             #
             #   For debugging, do not remove, only comment out if needed.
             #
             self._grep_worker( grep_plugin, fuzzReq, response )
             
             # TODO: Analyze if creating a different threadpool for grep workers speeds up the whole process
             #targs = (grep_plugin, fuzzReq, response)
             #self._tm.startFunction( target=self._grep_worker, args=targs, ownerObj=self, restrict=False )
         
         self._tm.join( self )
Exemplo n.º 21
0
 def discover(self, fuzzableRequest ):
     '''
     Find virtual hosts.
     
     @parameter fuzzableRequest: A fuzzableRequest instance that contains
                                                 (among other things) the URL to test.
     '''
     vhost_list = []
     if self._first_exec:
         # Only run once
         self._first_exec = False
         vhost_list = self._generic_vhosts( fuzzableRequest )
         
         # Set this for later
         self._can_resolve_domain_names = self._can_resolve_domains()
         
     
     # I also test for ""dead links"" that the web programmer left in the page
     # For example, If w3af finds a link to "http://corporative.intranet.corp/" it will try to
     # resolve the dns name, if it fails, it will try to request that page from the server
     vhost_list.extend( self._get_dead_links( fuzzableRequest ) )
     
     # Report our findings
     for vhost, request_id in vhost_list:
         v = vuln.vuln()
         v.setPluginName(self.getName())
         v.setURL( fuzzableRequest.getURL() )
         v.setMethod( 'GET' )
         v.setName( 'Shared hosting' )
         v.setSeverity(severity.LOW)
         
         domain = urlParser.getDomain(fuzzableRequest.getURL())
         
         msg = 'Found a new virtual host at the target web server, the virtual host name is: "'
         msg += vhost + '". To access this site you might need to change your DNS resolution'
         msg += ' settings in order to point "' + vhost + '" to the IP address of "'
         msg += domain + '".'
         v.setDesc( msg )
         v.setId( request_id )
         kb.kb.append( self, 'findvhost', v )
         om.out.information( v.getDesc() )       
     
     return []
Exemplo n.º 22
0
 def _local_test_inclusion(self, freq):
     '''
     Check for RFI using a local web server
     
     @param freq: A fuzzableRequest object
     @return: None, everything is saved to the kb
     '''
     #
     # The listen address is an empty string when I have no default route
     #
     # Only work if:
     #   - The listen address is private and the target address is private
     #   - The listen address is public and the target address is public
     #
     if self._listen_address == '':
         return
     
     is_listen_priv = is_private_site(self._listen_address)
     is_target_priv = is_private_site(urlParser.getDomain(freq.getURL()))
         
     if (is_listen_priv and is_target_priv) or \
         not (is_listen_priv or is_target_priv):
         om.out.debug('RFI test using local web server for URL: ' + freq.getURL())
         om.out.debug('w3af is running a webserver')
         try:
             # Create file for remote inclusion
             self._create_file()
             
             # Start web server
             webroot = os.path.join(get_home_dir(), 'webroot')
             webserver.start_webserver(self._listen_address,
                                       self._listen_port, webroot)
             
             # Perform the real work
             self._test_inclusion(freq)
             
             # Wait for threads to finish
             self._tm.join(self)
         finally:
             self._rm_file()
Exemplo n.º 23
0
    def audit(self, freq ):
        '''
        Get the cert and do some checks against it.

        @param freq: A fuzzableRequest
        '''
        url = freq.getURL()
        
        if 'HTTPS' != getProtocol( url ).upper():
            return

        domain = getDomain(url)
        # We need to check certificate only once per host
        if domain in self._already_tested_domains:
            return

        # Parse the domain:port
        splited = getNetLocation(url).split(':')
        port = 443
        host = splited[0]

        if len( splited ) > 1:
            port = int(splited[1])

        # Create the connection
        socket_obj = socket.socket()
        try:
            socket_obj.connect( ( host , port ) )
            ctx = SSL.Context(SSL.SSLv23_METHOD)
            ssl_conn = SSL.Connection(ctx, socket_obj)

            # Go to client mode
            ssl_conn.set_connect_state()

            # If I don't send something here, the "get_peer_certificate"
            # method returns None. Don't ask me why!
            #ssl_conn.send('GET / HTTP/1.1\r\n\r\n')
            self.ssl_wrapper( ssl_conn, ssl_conn.send, ('GET / HTTP/1.1\r\n\r\n', ), {})
        except Exception, e:
            om.out.error('Error in audit.sslCertificate: "' + repr(e)  +'".')
Exemplo n.º 24
0
 def discover(self, fuzzableRequest ):
     '''
     Plugin entry point, perform all the work.
     '''
     
     if not self._run:
         # This will remove the plugin from the discovery plugins to be runned.
         raise w3afRunOnce()
     else:
         # Run one time
         self._run = False
         
         if self._update_DB:
             self._do_update()
         
         domain = urlParser.getDomain( fuzzableRequest.getURL() )
         to_check_list = self._get_to_check( domain )
         
         # I found some URLs, create fuzzable requests
         phishtank_matches = self._is_in_phishtank( to_check_list )
         for ptm in phishtank_matches:
             response = self._urlOpener.GET( ptm.url )
             self._fuzzable_requests.extend( self._createFuzzableRequests( response ) )
         
         # Only create the vuln object once
         if phishtank_matches:
             v = vuln.vuln()
             v.setPluginName(self.getName())
             v.setURL( ptm.url )
             v.setId( response.id )
             v.setName( 'Phishing scam' )
             v.setSeverity(severity.LOW)
             desc = 'The URL: "' + ptm.url + '" seems to be involved in a phishing scam. Please see "'
             desc += ptm.more_info_URL + '" for more info.'
             v.setDesc( desc )
             kb.kb.append( self, 'phishtank', v )
             om.out.vulnerability( v.getDesc(), severity=v.getSeverity() )
             
     return self._fuzzable_requests
Exemplo n.º 25
0
    def discover(self, freq):

        if not self._run:
            # This will remove the plugin from the discovery plugins to be runned.
            raise w3afRunOnce()
        else:
            self._run = False

            # Create the proxy server
            self._proxy = proxy(self._listenAddress, self._listenPort, self._urlOpener, self.createPH())
            self._proxy.targetDomain = urlParser.getDomain(freq.getURL())

            # Inform the user
            msg = "spiderMan proxy is running on " + self._listenAddress + ":"
            msg += str(self._listenPort) + ".\nPlease configure your browser to use these proxy"
            msg += " settings and navigate the target site.\nTo exit spiderMan plugin please"
            msg += " navigate to " + TERMINATE_URL + " ."
            om.out.information(msg)

            # Run the server
            self._proxy.run()

        return self._fuzzableRequests
Exemplo n.º 26
0
 def discover(self, fuzzableRequest ):
     '''
     @parameter fuzzableRequest: A fuzzableRequest instance that contains
                                                 (among other things) the URL to test.
     '''
     if not self._run:
         # This will remove the plugin from the discovery plugins to be runned.
         raise w3afRunOnce()
     else:
         # This plugin will only run one time. 
         self._run = False
         
         self._google = google(self._urlOpener)
         self._domain = domain = urlParser.getDomain( fuzzableRequest.getURL() )
         self._domain_root = urlParser.getRootDomain( domain )
         
         if self._fast_search:
             self._do_fast_search( domain )
         else:
             self._do_complete_search( domain )
         
         self._tm.join( self )
         self.printUniq( kb.kb.getData( 'fingerGoogle', 'mails' ), None )
         return []
Exemplo n.º 27
0
    def discover(self, fuzzableRequest):
        '''
        @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test.
        '''
        result = []
        # This will remove the plugin from the discovery plugins to be runned.
        if not self._run:
            raise w3afRunOnce()

        # This plugin will only run one time. 
        self._run = False
        bingSE = bing(self._urlOpener)
        self._domain = domain = urlParser.getDomain(fuzzableRequest.getURL())
        self._domainRoot = urlParser.getRootDomain(domain)

        results = bingSE.getNResults('@'+self._domainRoot, self._resultLimit)

        for result in results:
            targs = (result,)
            self._tm.startFunction(target=self._findAccounts, args=targs, ownerObj=self)

        self._tm.join(self)
        self.printUniq(kb.kb.getData('fingerBing', 'mails'), None)
        return result
Exemplo n.º 28
0
    def discover(self, fuzzableRequest ):
        '''
        Get the server-status and parse it.
        
        @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test.
        '''
        res = []
        if not self._exec :
            # This will remove the plugin from the discovery plugins to be runned.
            raise w3afRunOnce()
            
        else:
            # Only run once
            self._exec = False
            
            base_url = urlParser.baseUrl( fuzzableRequest.getURL() )
            server_status_url = urlParser.urlJoin(  base_url , 'server-status' )
            response = self._urlOpener.GET( server_status_url, useCache=True )
            
            if not is_404( response ) and response.getCode() not in range(400, 404):
                msg = 'Apache server-status cgi exists. The URL is: "' + response.getURL() + '".'
                om.out.information( msg )
                
                # Create some simple fuzzable requests
                res.extend( self._createFuzzableRequests( response ) )

                # Get the server version
                # <dl><dt>Server Version: Apache/2.2.9 (Unix)</dt>
                for version in re.findall('<dl><dt>Server Version: (.*?)</dt>', response.getBody()):
                    # Save the results in the KB so the user can look at it
                    i = info.info()
                    i.setPluginName(self.getName())
                    i.setURL( response.getURL() )
                    i.setId( response.id )
                    i.setName( 'Apache Server version' )
                    msg = 'The web server has the apache server status module enabled, '
                    msg += 'which discloses the following remote server version: "' + version + '".'
                    i.setDesc( msg )

                    om.out.information(i.getDesc())
                    kb.kb.append( self, 'server', i )
                
                # Now really parse the file and create custom made fuzzable requests
                regex = '<td>.*?<td nowrap>(.*?)</td><td nowrap>.*? (.*?) HTTP/1'
                for domain, path in re.findall(regex, response.getBody() ):
                    
                    if 'unavailable' in domain:
                        domain = urlParser.getDomain( response.getURL() )
                        
                    foundURL = urlParser.getProtocol( response.getURL() ) + '://' + domain + path
                    # Check if the requested domain and the found one are equal.
                    if urlParser.getDomain( foundURL ) == urlParser.getDomain( response.getURL() ):
                        # They are equal, request the URL and create the fuzzable requests
                        tmpRes = self._urlOpener.GET( foundURL, useCache=True )
                        if not is_404( tmpRes ):
                            res.extend( self._createFuzzableRequests( tmpRes ) )
                    else:
                        # This is a shared hosting server
                        self._shared_hosting_hosts.append( domain )
                
                # Now that we are outsite the for loop, we can report the possible vulns
                if len( self._shared_hosting_hosts ):
                    v = vuln.vuln()
                    v.setPluginName(self.getName())
                    v.setURL( fuzzableRequest.getURL() )
                    v.setId( response.id )
                    self._shared_hosting_hosts = list( set( self._shared_hosting_hosts ) )
                    v['alsoInHosting'] = self._shared_hosting_hosts
                    v.setDesc( 'The web application under test seems to be in a shared hosting.' )
                    v.setName( 'Shared hosting' )
                    v.setSeverity(severity.MEDIUM)
                    
                    kb.kb.append( self, 'sharedHosting', v )
                    om.out.vulnerability( v.getDesc(), severity=v.getSeverity() )
                
                    msg = 'This list of domains, and the domain of the web application under test,'
                    msg += ' all point to the same server:'
                    om.out.vulnerability(msg, severity=severity.MEDIUM )
                    for url in self._shared_hosting_hosts:
                        om.out.vulnerability('- ' + url, severity=severity.MEDIUM )
                
                # Check if well parsed
                elif 'apache' in response.getBody().lower():
                    msg = 'Couldn\'t find any URLs in the apache server status page. Two things can'
                    msg += ' trigger this:\n    - The Apache web server sent a server-status page'
                    msg += ' that the serverStatus plugin failed to parse or,\n    - The remote '
                    msg += ' web server has no traffic. If you are sure about the first one, please'
                    msg += ' report a bug.'
                    om.out.information( msg )
                    om.out.debug('The server-status body is: "'+response.getBody()+'"')
        
        return res
Exemplo n.º 29
0
    def _upload_webshell(self, driver, vuln_obj):
        '''
        First, upload any file to the target webroot.
        
        Once I've found the target webroot (or any other location inside the webroot where I can
        write a file) try to upload a webshell and test for execution.
        
        @parameter driver: The database driver to use in order to upload the file.
        @parameter vuln_obj: The vulnerability that we are exploiting.
        
        @return: The webshell URL if the webshell was uploaded, or None if the process failed.
        '''
        upload_success = False
        
        # First, we test if we can upload a file into a directory we can access:
        webroot_dirs = get_webroot_dirs( urlParser.getDomain(vuln_obj.getURL()) )
        for webroot in webroot_dirs:
            
            if upload_success: break
            
            # w3af found a lot of directories, and we are going to use that knowledgeBase
            # because one of the dirs may be writable and one may not!
            for path in self._get_site_directories():
                
                # Create the remote_path
                remote_path = webroot + '/' + path
                
                # Create the filename
                remote_filename = createRandAlNum( 8 ) + '.' + createRandAlNum(3)
                remote_path += '/' + remote_filename
                # And just in case... remove double slashes
                for i in xrange(3): remote_path = remote_path.replace('//', '/')
                
                # Create the content (which will also act as the test_string)
                test_string = content = createRandAlNum(16)
            
                # Create the test URL
                test_url = urlParser.urlJoin(vuln_obj.getURL(), path + '/' + remote_filename )

                if self._upload_file( driver, remote_path, content, test_url, test_string):
                    upload_success = True
                    om.out.console('Successfully wrote a file to the webroot.')
                    break
        
        # We can upload files, and we know where they are uploaded, now we
        # just need to upload a webshell that works in that environment!
        if upload_success:
            
            om.out.console('Trying to write a webshell.')
            
            # Get the extension from the vulnerable script
            extension = urlParser.getExtension( vuln_obj.getURL() )
            
            for file_content, real_extension in shell_handler.get_webshells( extension ):
                
                # Create the variables to upload the file, based on the success of the
                # previous for loop:
                remote_path = remote_path[:remote_path.rfind('/')]
                filename = createRandAlNum( 8 )
                remote_path += '/' + filename + '.' + real_extension
                
                # And now do "the same thing" with the URL
                test_url = test_url[:test_url.rfind('/')]
                test_url += '/' + filename + '.' + real_extension + '?cmd='
                
                # Upload & test
                if self._upload_file( driver, remote_path, file_content, test_url, shell_handler.SHELL_IDENTIFIER):
                    # Complete success!
                    om.out.console('Successfully installed a webshell in the target server!')
                    return test_url
                    
        return None
Exemplo n.º 30
0
 def discover(self, fuzzableRequest ):
     '''
     @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test.
     '''
     if not self._run:
         # This will remove the plugin from the discovery plugins to be runned.
         raise w3afRunOnce()
     else:
         # I will only run this one time. All calls to sharedHosting return the same url's
         self._run = False
         
         bing_wrapper = bing( self._urlOpener )
         
         domain = urlParser.getDomain( fuzzableRequest.getURL() )
         if is_private_site( domain ):
             msg = 'sharedHosting plugin is not checking for subdomains for domain: '
             msg += domain + ' because its a private address.' 
             om.out.debug(msg)
             
         else:
             # Get the ip and do the search
             addrinfo = None
             try:
                 addrinfo = socket.getaddrinfo(domain, 0)
             except:
                 raise w3afException('Could not resolve hostname: ' + domain )
             ip_address_list = [info[4][0] for info in addrinfo]
             ip_address_list = list( set(ip_address_list) )
             
             # This is the best way to search, one by one!
             for ip_address in ip_address_list:
                 results = bing_wrapper.getNResults('ip:'+ ip_address, self._result_limit )
                 
                 results = [ urlParser.baseUrl( r.URL ) for r in results ]
                 results = list( set( results ) )
                 
                 # not vuln by default
                 is_vulnerable = False
                 
                 if len(results) > 1:
                     # We may have something...
                     is_vulnerable = True
                     
                     if len(results) == 2:
                         # Maybe we have this case:
                         # [Mon 09 Jun 2008 01:08:26 PM ART] - http://216.244.147.14/
                         # [Mon 09 Jun 2008 01:08:26 PM ART] - http://www.business.com/
                         # Where www.business.com resolves to 216.244.147.14; so we don't really
                         # have more than one domain in the same server.
                         res0 = socket.gethostbyname( urlParser.getDomain( results[0] ) )
                         res1 = socket.gethostbyname( urlParser.getDomain( results[1] ) )
                         if res0 == res1:
                             is_vulnerable = False
                 
                 if is_vulnerable:
                     severityOfThisVuln = severity.MEDIUM
                     v = vuln.vuln()
                     v.setPluginName(self.getName())
                     v.setURL(fuzzableRequest.getURL())
                     v.setId(1)
                     
                     v['alsoInHosting'] = results
                     msg = 'The web application under test seems to be in a shared hosting. '
                     msg += 'This list of domains, and the domain of the web application under '
                     msg += 'test, all point to the same IP address (%s):\n' % ip_address
                     for url in results:
                         domain = urlParser.getDomain(url)
                         msg += '- %s\n' % url
                         kb.kb.append( self, 'domains', domain)
                     v.setDesc( msg )
                     v.setName( 'Shared hosting' )
                     v.setSeverity(severityOfThisVuln)
                     om.out.vulnerability( msg, severity=severityOfThisVuln )
                     kb.kb.append( self, 'sharedHosting', v )
             
     return []