def _end(self, exc_inst=None, ignore_err=False): ''' This method is called when the process ends normally or by an error. ''' try: # End the xUrllib (clear the cache) and create a new one, so it can # be used by exploit plugins. self.uriOpener.end() self.uriOpener = xUrllib() if exc_inst: om.out.debug(str(exc_inst)) tm.join(joinAll=True) tm.stopAllDaemons() for plugin in self.plugins.plugins['grep']: plugin.end() # Also, close the output manager. om.out.endOutputPlugins() except Exception: if not ignore_err: raise finally: self.status.stop() self.progress.stop() # Remove all references to plugins from memory self.plugins.zero_enabled_plugins() # No targets to be scanned. cf.cf.save('targets', [])
def __init__(self ): ''' Init some variables and files. Create the URI opener. ''' # Create some directories self._home_directory() self._tmp_directory() # These are some of the most important moving parts in the w3afCore # they basically handle every aspect of the w3af framework: self.strategy = w3af_core_strategy( self ) self.profiles = w3af_core_profiles( self ) self.plugins = w3af_core_plugins( self ) self.status = w3af_core_status() self.target = w3af_core_target() self.progress = progress() # Init some internal variables self._initializeInternalVariables() self.plugins.zero_enabled_plugins() # I init the 404 detection for the whole framework self.uriOpener = xUrllib() fp_404_db = fingerprint_404_singleton() fp_404_db.set_urlopener( self.uriOpener )
def setUp(self): self.plugin = codeDisclosure() from core.controllers.coreHelpers.fingerprint_404 import fingerprint_404_singleton from core.data.url.xUrllib import xUrllib f = fingerprint_404_singleton( [False, False, False] ) f.set_urlopener( xUrllib() ) kb.kb.save('codeDisclosure', 'codeDisclosure', [])
def _do_req_without_cookies(fuzz_req): url = fuzz_req.getURI() data = fuzz_req.getData() headers = fuzz_req.getHeaders() # Typically GET and POST meth = getattr(xUrllib(), fuzz_req.getMethod().upper()) resp = meth(url, data, headers, grep=False, cache=False) return resp
def setUp(self): self.plugin = feeds() from core.controllers.coreHelpers.fingerprint_404 import fingerprint_404_singleton from core.data.url.xUrllib import xUrllib f = fingerprint_404_singleton([False, False, False]) f.set_urlopener(xUrllib()) kb.kb.save("feeds", "feeds", [])
def _bruteWorker( self, url, combinations ): ''' @parameter url: A string representation of an URL @parameter combinations: A list of tuples with (user,pass) ''' # get instance outside loop... uriOpener = xUrllib() uriOpener.setGrepPlugins( [] ) # So uriOpener._init is not called uriOpener._cacheOpener = '' for combination in combinations: user = combination[0] passwd = combination[1] om.out.debug('[basicAuthBrute] Testing ' + user + '/' + passwd) uriOpener.settings.setBasicAuth( url, user, passwd ) # The next line replaces the uriOpener opener with a new one that has # the basic auth settings configured #IMPORTANT: This line also calls __init__ on all urllib2 handlers, to have in mind: # the localCache clears the cache when you call init... # this creates problem with multithreading uriOpener.settings.buildOpeners() uriOpener._opener = uriOpener.settings.getCustomUrlopen() # This "if" is for multithreading if not self._found or not self._stopOnFirst: try: response = uriOpener.GET( url, cache=False, grep=False ) except w3afException, w3: msg = 'Exception while bruteforcing basic authentication, error message: ' msg += str(w3) om.out.debug( msg ) else: # GET was OK if response.getCode() == 200: self._found = True v = vuln.vuln() v.setId(response.id) v.setPluginName(self.getName()) v.setURL( url ) v.setDesc( 'Found authentication credentials to: "'+ url + '". A correct user and password combination is: ' + user + '/' + passwd) v['user'] = user v['pass'] = passwd v['response'] = response v.setSeverity(severity.HIGH) v.setName( 'Guessable credentials' ) kb.kb.append( self , 'auth' , v ) om.out.vulnerability( v.getDesc(), severity=v.getSeverity() ) break
def setUp(self): PyMockTestCase.setUp(self) # Start the proxy server self._proxy = proxy(self.IP, self.PORT, xUrllib(), w3afProxyHandler) self._proxy.start() # Build the proxy opener self.proxy_opener = urllib2.build_opener( urllib2.ProxyHandler( {"http": "http://%s:%s" % (self.IP, self.PORT)}), urllib2.HTTPHandler)
def __init__( self, ip, port, urlOpener=xUrllib(), proxyCert='core/controllers/daemons/mitm.crt' ): ''' @parameter ip: IP address to bind @parameter port: Port to bind @parameter urlOpener: The urlOpener that will be used to open the requests that arrive from the browser @parameter proxyHandler: A class that will know how to handle requests from the browser @parameter proxyCert: Proxy certificate to use, this is needed for proxying SSL connections. ''' proxy.__init__(self, ip, port, urlOpener, w3afLocalProxyHandler, proxyCert) # Internal vars self._requestQueue = Queue.Queue() self._editedRequests = {} self._editedResponses = {} # User configured parameters self._methodsToTrap = [] self._whatToTrap = re.compile('.*') self._whatNotToTrap = re.compile('.*\.(gif|jpg|png|css|js|ico|swf|axd|tif)$') self._trap = False self._fixContentLength = True
def _bruteWorker(self, freq, combinations): ''' @parameter freq: A fuzzableRequest @parameter combinations: A list of tuples with (user, pass) ''' def _doPOSTWithoutCookies(urlOpener, fuzz_req): url = fuzz_req.getURI() data = fuzz_req.getData() headers = fuzz_req.getHeaders() resp = urlOpener.POST(url, data, headers, grepResult=False, useCache=False) return resp data_container = freq.getDc() data_container = self._true_extra_fields(data_container) # Ok, now we start with the real bruteforcing! for combination in combinations: username = combination[0] userpwd = combination[1] # Remember that we can have password only forms! if self._user_field_name is not None: data_container[self._user_field_name][0] = username data_container[self._passwd_field_name][0] = userpwd freq.setDc(data_container) # This "if" is for multithreading if not self._found or not self._stopOnFirst: # TODO: This is a *hack*. This logic shouldn't be implemented # in the plugin but in xUrllib, urlOpener = xUrllib() try: resp = _doPOSTWithoutCookies(urlOpener, freq) except w3afMustStopOnUrlError: return body = resp.getBody() body = body.replace(username, '').replace(userpwd, '') with self._plugin_lock: if not self._matchesFailedLogin(body): # Ok, this might be a valid combination. # Now test with a new invalid password to ensure our # previous possible found credentials are valid data_container[self._passwd_field_name][0] = \ createRandAlNum(8) freq.setDc(data_container) verif_resp = _doPOSTWithoutCookies(xUrllib(), freq) body = verif_resp.getBody() body = body.replace(username, '').replace(userpwd, '') if self._matchesFailedLogin(body): self._found = True freq_url = freq.getURL() v = vuln.vuln() v.setId(resp.id) v.setPluginName(self.getName()) v.setURL(freq.getURL()) if self._user_field_name is not None: msg = ('Found authentication credentials to: ' '"%s". A correct user and password combination' ' is: %s/%s' % (freq_url, username, userpwd)) else: # There is no user field! msg = ('Found authentication credentials to: ' '"%s". The correct password is: "%s".' % (freq_url, userpwd)) v.setDesc(msg) v['user'] = username v['pass'] = combination[1] v['response'] = resp v.setSeverity(severity.HIGH) v.setName('Guessable credentials') kb.kb.append(self, 'auth', v) # Save cookies in the 'main' urlOpenet so the rest # of active plugins use them. This is part of the # previously mentioned hack. headers = resp.getHeaders() additionalHeaders = [] for header_name in headers: if 'cookie' in header_name.lower(): additionalHeaders.append( (header_name, headers[header_name])) self._urlOpener.settings.setHeadersList( additionalHeaders) om.out.vulnerability(msg, severity=severity.HIGH) return
for i in xrange(60): time.sleep(0.1) if id(originalFuzzableRequest) in self._editedResponses: res = self._editedResponses[ id(originalFuzzableRequest) ] del self._editedResponses[ id(originalFuzzableRequest) ] # Now we return it... if isinstance(res, Exception): raise res else: return res # I looped and got nothing! raise w3afException('Timed out waiting for response from remote server.') if __name__ == '__main__': lp = localproxy('127.0.0.1', 8080, xUrllib() ) lp.start2() for i in xrange(10): time.sleep(1) tr = lp.getTrappedRequest() if tr: print tr print lp.sendRawRequest( tr, tr.dumpRequestHead(), tr.getData() ) else: print 'Waiting...' print 'Exit!' lp.stop() print 'bye bye...'
''' import random import re import unittest from core.data.searchEngines.googleSearchEngine import googleSearchEngine, \ GAjaxSearch, GStandardSearch, GMobileSearch, GSetSearch from core.data.url.httpResponse import httpResponse from core.data.url.xUrllib import xUrllib # Global vars HEADERS = {'User-Agent':'Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 6.0)'} # TODO: This needs to be mocked up! URL_OPENER = xUrllib() URL_OPEN_FUNC = lambda url: URL_OPENER.GET(url, headers=HEADERS, useCache=True, grepResult=False) URL_REGEX = re.compile('((http|https)://([a-zA-Z0-9_:@\-\./]*?)/[^ \n\r\t"\'<>]*)') class test_googleSearchEngine(unittest.TestCase): def setUp(self): self.query, self.limit = random.choice([('big bang theory', 200), ('two and half man', 37), ('doctor house', 55)]) self.gse = googleSearchEngine(URL_OPENER) def test_get_links_results_len(self): # Len of results must be ge. than limit
def setUp(self): self.uri_opener = xUrllib() cf.cf.save('sessionName', 'defaultSession' + '-' + time.strftime('%Y-%b-%d_%H-%M-%S')) create_temp_dir()
from measure import show_stats NUM_REQUESTS = 5000 try: from core.data.url.xUrllib import xUrllib except: print 'This script needs to be located inside the w3af trunk directory to work' sys.exit(1) from core.data.parsers.urlParser import url_object import core.data.kb.config as cf cf.cf.save('sessionName', 'speed') uri_opener = xUrllib() start_time = time.time() for i in xrange(NUM_REQUESTS): url = url_object( 'http://localhost/' + str(i) ) uri_opener.GET( url, useCache=False) end_time = time.time() show_stats( start_time, end_time, NUM_REQUESTS)
time.sleep(0.1) if id(originalFuzzableRequest) in self._editedResponses: res = self._editedResponses[id(originalFuzzableRequest)] del self._editedResponses[id(originalFuzzableRequest)] # Now we return it... if isinstance(res, Exception): raise res else: return res # I looped and got nothing! raise w3afException("Timed out waiting for response from remote server.") if __name__ == "__main__": lp = localproxy("127.0.0.1", 8080, xUrllib()) lp.start2() for i in xrange(10): time.sleep(1) tr = lp.getTrappedRequest() if tr: print tr print lp.sendRawRequest(tr, tr.dumpRequestHead(), tr.getData()) else: print "Waiting..." print "Exit!" lp.stop() print "bye bye..."