def processProxyMessage(self,messageReference, messageIsRequest, remoteHost, remotePort, serviceIsHttps, httpMethod, url, resourceType, statusCode, responseContentType, message, interceptAction): curl = "%s://%s:%d%s" % ("https" if serviceIsHttps else "http", \ remoteHost, remotePort, url) headers = self.mCallBacks.getHeaders(message) if messageIsRequest: # Need to build a urllib2 Request object wRequest=RequestMessage(message, headers) request=urllib2.Request(curl,wRequest.data,wRequest.headers) evasion = False for plugin in self.loadedPlugins: if plugin.getType() == "evasion": evasion = True request=self.processEvasionPlugin(plugin, request) if evasion: return self.createBurpRequest(request,httpMethod) elif responseContentType and responseContentType.count("text"): # Few objets needed by w3af httpResponse object wMessage = Message(message, headers) response = httpResponse(int(statusCode), wMessage.message, \ wMessage.headers, curl, curl, wMessage.msg, id=messageReference+1) for plugin in self.loadedPlugins: if plugin.getType() == "grep": self.processGrepPlugin(plugin, response) return message
def store_in_cache(request, response): hi = HistoryItem() # Set the request headers = dict(request.headers) headers.update(request.unredirected_hdrs) req = createFuzzableRequestRaw(method=request.get_method(), url=request.url_object, postData=str(request.get_data() or ''), headers=headers) hi.request = req # Set the response resp = response code, msg, hdrs, url, body, id = (resp.code, resp.msg, resp.info(), resp.geturl(), resp.read(), resp.id) # BUGBUG: This is where I create/log the responses that always have # 0.2 as the time! url_instance = url_object( url ) resp = httpResponse.httpResponse(code, body, hdrs, url_instance, request.url_object, msg=msg, id=id, alias=gen_hash(request)) hi.response = resp # Now save them try: hi.save() except KeyboardInterrupt, k: raise k
def test_find(self): find_id = random.randint(1, 499) url = url_object('http://w3af.org/a/b/foobar.php?foo=123') tag_value = createRandAlNum(10) for i in xrange(0, 500): fr = FuzzReq(url, dc={'a': ['1']}) code = 200 if i == find_id: code = 302 res = httpResponse(code, '<html>',{'Content-Type':'text/html'}, url, url) h1 = HistoryItem() h1.request = fr res.setId(i) h1.response = res if i == find_id: h1.toggleMark() h1.updateTag(tag_value) h1.save() h2 = HistoryItem() self.assertEqual(len(h2.find([('tag', "%"+tag_value+"%", 'like')])), 1) self.assertEqual(len(h2.find([('code', 302, '=')])), 1) self.assertEqual(len(h2.find([('mark', 1, '=')])), 1) self.assertEqual(len(h2.find([('has_qs', 1, '=')])), 500) self.assertEqual(len(h2.find([('has_qs', 1, '=')], resultLimit=10)), 10) results = h2.find([('has_qs', 1, '=')], resultLimit=1, orderData=[('id','desc')]) self.assertEqual(results[0].id, 499) search_data = [] search_data.append(('id', find_id + 1, "<")) search_data.append(('id', find_id - 1, ">")) self.assertEqual(len(h2.find(search_data)), 1)
def processProxyMessage(self, messageReference, messageIsRequest, remoteHost, remotePort, serviceIsHttps, httpMethod, url, resourceType, statusCode, responseContentType, message, interceptAction): curl = "%s://%s:%d%s" % ("https" if serviceIsHttps else "http", \ remoteHost, remotePort, url) headers = self.mCallBacks.getHeaders(message) if messageIsRequest: # Need to build a urllib2 Request object wRequest = RequestMessage(message, headers) request = urllib2.Request(curl, wRequest.data, wRequest.headers) evasion = False for plugin in self.loadedPlugins: if plugin.getType() == "evasion": evasion = True request = self.processEvasionPlugin(plugin, request) if evasion: return self.createBurpRequest(request, httpMethod) elif responseContentType and responseContentType.count("text"): # Few objets needed by w3af httpResponse object wMessage = Message(message, headers) response = httpResponse(int(statusCode), wMessage.message, \ wMessage.headers, curl, curl, wMessage.msg, id=messageReference+1) for plugin in self.loadedPlugins: if plugin.getType() == "grep": self.processGrepPlugin(plugin, response) return message
def _new_no_content_resp(self, uri, log_it=False): ''' Return a new NO_CONTENT httpResponse object. Optionally call the subscribed log handlers @param uri: URI string or request object @param log_it: Boolean that indicated whether to log request and response. ''' # accept a URI or a Request object if isinstance(uri, url_object): req = HTTPRequest(uri) elif isinstance(uri, HTTPRequest): req = uri else: msg = 'The uri parameter of xUrllib._new_content_resp() has to be of' msg += ' HTTPRequest of url_object type.' raise Exception( msg ) # Work, no_content_response = httpResponse(NO_CONTENT, '', {}, uri, uri, msg='No Content') if log_it: # This also assigns the id to both objects. logHandler.logHandler().http_response(req, no_content_response) if no_content_response.id is None: no_content_response.id = seq_gen.inc() return no_content_response
def _send(self, req, cache=False, useMultipart=False, grep=True): ''' Actually send the request object. @param req: The HTTPRequest object that represents the request. @return: An httpResponse object. ''' # This is the place where I hook the pause and stop feature # And some other things like memory usage debugging. self._callBeforeSend() # Sanitize the URL self._checkURI(req) # Evasion original_url = req._Request__original original_url_inst = req.url_object req = self._evasion(req) start_time = time.time() res = None req.get_from_cache = cache try: res = self._opener.open(req) except urllib2.HTTPError, e: # We usually get here when response codes in [404, 403, 401,...] msg = '%s %s returned HTTP code "%s" - id: %s' % \ (req.get_method(), original_url, e.code, e.id) if hasattr(e, 'from_cache'): msg += ' - from cache.' om.out.debug(msg) # Return this info to the caller code = int(e.code) info = e.info() geturl_instance = url_object(e.geturl()) read = self._readRespose(e) httpResObj = httpResponse(code, read, info, geturl_instance, original_url_inst, id=e.id, time=time.time()-start_time, msg=e.msg, charset=getattr(e.fp, 'encoding', None)) # Clear the log of failed requests; this request is done! req_id = id(req) if req_id in self._errorCount: del self._errorCount[req_id] # Reset errors counter self._zeroGlobalErrorCount() if grep: self._grep(req, httpResObj) else: om.out.debug('No grep for: "%s", the plugin sent ' 'grep=False.' % geturl_instance) return httpResObj
def _sendEnd(self): """ Sends an HTML indicating that w3af spiderMan plugin has finished its execution. """ html = "<html>spiderMan plugin finished its execution.</html>" headers = {"Content-Length": str(len(html))} r = httpResponse.httpResponse(200, html, headers, TERMINATE_URL, TERMINATE_URL) self._sendToBrowser(r)
def test_no_code_disclosure_blank(self): body = '' url = url_object('http://www.w3af.com/') headers = {'content-type': 'text/html'} response = httpResponse(200, body , headers, url, url) request = fuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertTrue( len(kb.kb.getData('codeDisclosure', 'codeDisclosure')) == 0 )
def test_ASP_code_disclosure(self): body = 'header <% Response.Write("Hello World!") %> footer' url = url_object('http://www.w3af.com/') headers = {'content-type': 'text/html'} response = httpResponse(200, body , headers, url, url) request = fuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertTrue( len(kb.kb.getData('codeDisclosure', 'codeDisclosure')) == 1 )
def test_none(self): body = '<an object="1"> <or applet=2> <apple>' url = url_object('http://www.w3af.com/') headers = {'content-type': 'text/html'} response = httpResponse(200, body , headers, url, url) request = fuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEquals( len(kb.kb.getData('objects', 'objects')), 0 )
def test_none(self): body = 'header <form><noinput type="file"></form> footer' url = url_object('http://www.w3af.com/') headers = {'content-type': 'text/html'} response = httpResponse(200, body , headers, url, url) request = fuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEquals( len(kb.kb.getData('fileUpload', 'fileUpload')), 0 )
def setUp(self): # # Init # self.url_str = 'http://localhost:631/' self.url_inst = url_object( self.url_str ) spam = httpResponse(200, '', {}, self.url_inst, self.url_inst) try: spam = httpResponse(200, '', {}, self.url_inst, self.url_inst) is_404(spam) except: pass self._w3af = core.controllers.w3afCore.w3afCore() self._plugins = [] for pname in self._w3af.getPluginList('grep'): self._plugins.append( self._w3af.getPluginInstance(pname, 'grep') )
def test_no_feeds(self): body = 'header <nofeed version="3" foo="4"> footer' url = url_object("http://www.w3af.com/") headers = {"content-type": "text/html"} response = httpResponse(200, body, headers, url, url) request = fuzzableRequest(url, method="GET") self.plugin.grep(request, response) self.assertEquals(len(kb.kb.getData("feeds", "feeds")), 0)
def test_PHP_code_disclosure(self): body = 'header <? echo $a; ?> footer' url = url_object('http://www.w3af.com/') headers = {'content-type': 'text/html'} response = httpResponse(200, body , headers, url, url) request = fuzzableRequest() request.setURL( url ) request.setMethod( 'GET' ) self.plugin.grep(request, response) self.assertTrue( len(kb.kb.getData('codeDisclosure', 'codeDisclosure')) == 1 )
def _sendEnd( self ): ''' Sends an HTML indicating that w3af spiderMan plugin has finished its execution. ''' html = '<html>spiderMan plugin finished its execution.</html>' headers = {'Content-Length': str(len(html))} r = httpResponse.httpResponse( 200, html, headers, TERMINATE_URL, TERMINATE_URL,) self._sendToBrowser(r)
def test_no_version(self): body = 'header <rss foo="3"> footer' url = url_object("http://www.w3af.com/") headers = {"content-type": "text/html"} response = httpResponse(200, body, headers, url, url) request = fuzzableRequest(url, method="GET") self.plugin.grep(request, response) self.assertEquals(len(kb.kb.getData("feeds", "feeds")), 1) i = kb.kb.getData("feeds", "feeds")[0] self.assertTrue("RSS" in i.getDesc()) self.assertTrue("unknown" in i.getDesc())
def test_save_load(self): i = random.randint(1, 499) url = url_object('http://w3af.com/a/b/c.php') fr = FuzzReq(url, dc={'a': ['1']}) res = httpResponse(200, '<html>',{'Content-Type':'text/html'}, url, url) h1 = HistoryItem() h1.request = fr res.setId(i) h1.response = res h1.save() h2 = HistoryItem() h2.load(i) self.assertEqual(h1.request, h2.request) self.assertEqual(h1.response.body, h2.response.body)
def test_mark(self): mark_id = random.randint(1, 499) url = url_object('http://w3af.org/a/b/c.php') for i in xrange(0, 500): fr = FuzzReq(url, dc={'a': ['1']}) res = httpResponse(200, '<html>',{'Content-Type':'text/html'}, url, url) h1 = HistoryItem() h1.request = fr res.setId(i) h1.response = res if i == mark_id: h1.toggleMark() h1.save() h2 = HistoryItem() h2.load(mark_id) self.assertTrue(h2.mark)
def test_delete(self): i = random.randint(1, 499) url = url_object('http://w3af.com/a/b/c.php') fr = FuzzReq(url, dc={'a': ['1']}) res = httpResponse(200, '<html>',{'Content-Type':'text/html'}, url, url) h1 = HistoryItem() h1.request = fr res.setId(i) h1.response = res h1.save() h1.delete(i) try: h2 = h1.read(i) except: h2 = None self.assertEqual(h2, None)
def test_applet(self): body = '''header <APPLET code="XYZApp.class" codebase="html/" align="baseline" width="200" height="200"> <PARAM name="model" value="models/HyaluronicAcid.xyz"> No Java 2 SDK, Standard Edition v 1.4.2 support for APPLET!! </APPLET> footer''' url = url_object('http://www.w3af.com/') headers = {'content-type': 'text/html'} response = httpResponse(200, body , headers, url, url) request = fuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEquals( len(kb.kb.getData('objects', 'applet')), 1 ) i = kb.kb.getData('objects', 'applet')[0] self.assertTrue( '"applet"' in i.getDesc() )
def test_object(self): body = '''header <OBJECT classid="clsid:8AD9C840-044E-11D1-B3E9-00805F499D93" width="200" height="200"> <PARAM name="code" value="Applet1.class"> </OBJECT> footer''' url = url_object('http://www.w3af.com/') headers = {'content-type': 'text/html'} response = httpResponse(200, body , headers, url, url) request = fuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEquals( len(kb.kb.getData('objects', 'object')), 1 ) i = kb.kb.getData('objects', 'object')[0] self.assertTrue( '"object"' in i.getDesc() )
def profile_me(): ''' To be profiled ''' for _ in xrange(1): for counter in xrange(1,5): file_name = 'test-' + str(counter) + '.html' file_path = os.path.join('plugins','tests','grep',file_name) body = file( file_path ).read() response = httpResponse(200, body, {'Content-Type': 'text/html'}, url_object( self.url_str + str(counter) ), url_object( self.url_str + str(counter) ) ) request = fuzzableRequest(self.url_inst) for pinst in self._plugins: pinst.grep( request, response )
def http_response(self, request, response): if len( self._pluginList ) and response._connection.sock is not None: # Create the httpResponse object code, msg, hdrs = response.code, response.msg, response.info() url_instance = url_object( response.geturl() ) body = response.read() # Id is not here, the mangle is done BEFORE logging # id = response.id httpRes = httpResponse.httpResponse( code, body, hdrs, url_instance, request.url_object, msg=msg) for plugin in self._pluginList: plugin.mangleResponse( httpRes ) response = self._httpResponse2httplib( response, httpRes ) return response
def test_tag(self): tag_id = random.randint(501, 999) tag_value = createRandAlNum(10) url = url_object('http://w3af.org/a/b/c.php') for i in xrange(501, 1000): fr = FuzzReq(url, dc={'a': ['1']}) res = httpResponse(200, '<html>',{'Content-Type':'text/html'}, url, url) h1 = HistoryItem() h1.request = fr res.setId(i) h1.response = res if i == tag_id: h1.updateTag(tag_value) h1.save() h2 = HistoryItem() h2.load(tag_id) self.assertEqual(h2.tag, tag_value)
def _new_no_content_resp(self, uri, log_it=False): ''' Return a new NO_CONTENT httpResponse object. Optionally call the subscribed log handlers @param uri: URI string or request object @param log_it: Boolean that indicated whether to log request and response. ''' nc_resp = httpResponse(NO_CONTENT, '', {}, uri, uri, msg='No Content') if log_it: # accept a URI or a Request object if isinstance(uri, basestring): req = HTTPRequest(uri) else: req = uri # This also assign a the id to both objects. logHandler.logHandler().http_response(req, nc_resp) else: nc_resp.id = seq_gen.inc() return nc_resp
def http_response(self, request, response): if len( self._pluginList ): # Create the httpResponse object code, msg, hdrs = response.code, response.msg, response.info() url = response.geturl() body = response.read() # Id is not here, the mangle is done BEFORE logging # id = response.id httpRes = httpResponse.httpResponse( code, body, hdrs, url, url, msg=msg) for plugin in self._pluginList: plugin.mangleResponse( httpRes ) if response._connection.sock is None: # This fixes bug #1982106 # https://sourceforge.net/tracker/index.php?func=detail&aid=1982106&group_id=170274&atid=853652 # Returning None is like saying "I don't know what to do with this, let the next handler manage it". ### FIXME: Does this work? return None else: response = self._httpResponse2httplib( response, httpRes ) return response
def _log_request_response( self, request, response ): ''' Send the request and the response to the output manager. ''' method = request.get_method() url = request.get_full_url() headers = request.headers postData = request.get_data() for i in request.unredirected_hdrs.keys(): headers[ i ] = request.unredirected_hdrs[ i ] fr = createFuzzableRequestRaw(method, url, postData, headers) if isinstance(response, httpResponse.httpResponse): res = response else: code, msg, hdrs = response.code, response.msg, response.info() url = response.geturl() body = response.read() id = response.id # BUGBUG: This is where I create/log the responses that always have 0.2 as the time! res = httpResponse.httpResponse( code, body, hdrs, url, url, msg=msg, id=id) om.out.logHttp( fr, res )
def _log_request_response(self, request, response): ''' Send the request and the response to the output manager. ''' headers = dict(request.headers) headers.update(request.unredirected_hdrs) fr = createFuzzableRequestRaw(method=request.get_method(), url=request.url_object, postData=request.get_data(), headers=headers) if isinstance(response, httpResponse.httpResponse): res = response else: code, msg, hdrs = response.code, response.msg, response.info() url = response.geturl() body = response.read() id = response.id # BUGBUG: This is where I create/log the responses that always have 0.2 as the time! url_instance = url_object( url ) res = httpResponse.httpResponse(code, body, hdrs, request.url_object, url_instance, msg=msg, id=id) om.out.logHttp(fr, res)
screen_width = gtk.gdk.screen_width() screeen_height = gtk.gdk.screen_height() pos_x = treeview.get_bin_window().get_origin()[0] + event.x - popup_width/2 if pos_x < 0: pos_x = 0 elif pos_x + popup_width > screen_width: pos_x = screen_width - popup_width pos_y = cell_y_ + 3 if pos_y + popup_height > screeen_height: pos_y = cell_y - 3 - popup_height return (pos_x , pos_y) def main(): gtk.main() if __name__ == "__main__": # We create the data data = [ httpResponse(200, 'my data1 looks like this and has no errors', {}, 'http://a/index.html', 'http://a/index.html', id=1), httpResponse(200, 'errors? i like errors like this one: SQL', {}, 'http://a/index.html', 'http://a/index.html', id=2), httpResponse(200, 'my data is really happy', {}, 'http://a/index.html', 'http://a/index.html', id=3), httpResponse(200, 'my data1 loves me', {}, 'http://a/index.html', 'http://a/index.html', id=4), httpResponse(200, 'my data likes me', {}, 'http://a/index.html', 'http://a/index.html', id=5) ] cl_win = clusterCellWindow( data=data ) main()
def _build_http_response(url, body_content, headers={}): if "content-type" not in headers: headers["content-type"] = "text/html" return httpResponse(200, body_content, headers, url, url, charset="utf-8")
res.id) else: msg = '%s %s with data: "%s" returned HTTP code "%s" - id: %s'\ % (req.get_method(), original_url, urllib.unquote_plus(rdata), res.code, res.id) if hasattr(res, 'from_cache'): msg += ' - from cache.' om.out.debug(msg) code = int(res.code) info = res.info() geturl = res.geturl() read = self._readRespose(res) httpResObj = httpResponse(code, read, info, geturl, original_url, id=res.id, time=time.time() - start_time, msg=res.msg) # Let the upper layers know that this response came from the local cache. if isinstance(res, CachedResponse): httpResObj.setFromCache(True) # Clear the log of failed requests; this request is done! req_id = id(req) if req_id in self._errorCount: del self._errorCount[req_id] self._zeroGlobalErrorCount() if grepResult: self._grepResult(req, httpResObj) else: om.out.debug('No grep for : ' + geturl + ' , the plugin sent grepResult=False.')