def test_render_with_unicode_control_chars(self): _id = 2 desc = ('This is a long description that contains some special' ' unicode control characters such as \f and \x09') vuln = MockVuln(_id=_id) vuln.set_desc(desc) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() self.assertNotIn('unicode control characters such as \f and \x09', xml) self.assertIn('unicode control characters such as <character code="000c"/> and <character code="0009"/>', xml) self.assertValidXML(xml)
def setUp(self): super(TestHTMLRendering, self).setUp() self.plugin = self.w3afcore.plugins.get_plugin_inst( 'output', 'html_file') HistoryItem().init() url = URL('http://w3af.com/a/b/c.php') request = HTTPRequest(url, data='a=1') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(1) h1.response = res h1.save() url = URL('http://w3af.com/foo.py') request = HTTPRequest(url, data='text=xss') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>empty</html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(4) h1.response = res h1.save()
def test_render_attr_with_special_chars(self): _id = 2 name = 'A long description with special characters: <&">' vuln = MockVuln(_id=_id) vuln.set_name(name) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() self.assertNotIn(name, xml) self.assertIn('A long description with special characters: <&">', xml) self.assertValidXML(xml)
def test_render_attr_with_special_chars(self): _id = 2 name = 'A long description with special characters: <&">' vuln = MockVuln(_id=_id) vuln.set_name(name) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() self.assertNotIn(name, xml) self.assertIn( 'A long description with special characters: <&">', xml) self.assertValidXML(xml)
def modify_request(self, request): """ Mangles the request :param request: HTTPRequest instance that is going to be modified by the evasion plugin :return: The modified request """ # Mangle the postdata data = str(request.get_data()) if data: try: # Only mangle the postdata if it is a url encoded string parse_qs(data) except: pass else: data = '\x00' + data headers_copy = copy.deepcopy(request.headers) headers_copy['content-length'] = str(len(data)) request = HTTPRequest(request.url_object, data, headers_copy, request.get_origin_req_host(), retries=request.retries_left) return request
def test_render_with_unicode_control_chars(self): _id = 2 desc = ('This is a long description that contains some special' ' unicode control characters such as \f and \x09') vuln = MockVuln(_id=_id) vuln.set_desc(desc) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() self.assertNotIn('unicode control characters such as \f and \x09', xml) self.assertIn( 'unicode control characters such as <character code="000c"/> and <character code="0009"/>', xml) self.assertValidXML(xml)
def test_render_with_special_chars(self): _id = 2 desc = ('This is a long description that contains some special' ' characters such as <, & and > which MUST be encoded' ' by jinja2.') vuln = MockVuln(_id=_id) vuln.set_desc(desc) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() self.assertNotIn('such as <, & and > which MUST', xml) self.assertIn('such as <, & and > which MUST', xml) self.assertValidXML(xml)
def modify_request(self, request): """ Mangles the request :param request: HTTPRequest instance that is going to be modified by the evasion plugin :return: The modified request """ # Mangle the postdata data = str(request.get_data()) if data: try: # Only mangle the postdata if it is a url encoded string parse_qs(data) except: pass else: data = "\x00" + data headers_copy = copy.deepcopy(request.headers) headers_copy["content-length"] = str(len(data)) request = HTTPRequest( request.url_object, data, headers_copy, request.get_origin_req_host(), retries=request.retries_left ) return request
def test_low_level_with_cookie_jar(self): # IMPORTANT NOTE: Please remember that the cookie expiration, 2736616305 # above, is going to limit the date until which this unittest will PASS cj_contents = self.COOKIEJAR.replace(' ' * 8, '') tmp_file = tempfile.NamedTemporaryFile(delete=False) tmp_file.write(cj_contents) tmp_file.close() cj = cookielib.MozillaCookieJar() cj.load(tmp_file.name, ignore_discard=True, ignore_expires=True) cookie_handler = CookieHandler(cj) opener = urllib2.build_opener(cookie_handler) # Verify cookie from cookie jar is sent with_cookie_req = HTTPRequest(self.URL_CHECK_COOKIE, cookies=True) with_cookie_res = opener.open(with_cookie_req).read() self.assertTrue('Cookie was sent.' in with_cookie_res) # And now it will NOT send any cookie because we're setting cookie to False no_cookie_req = HTTPRequest(self.URL_CHECK_COOKIE, cookies=False) no_cookie_res = opener.open(no_cookie_req).read() self.assertTrue('Cookie was NOT sent.' in no_cookie_res) os.unlink(tmp_file.name)
def test_low_level(self): httpretty.register_uri( httpretty.GET, self.URL_SEND_COOKIE, adding_headers={'Set-Cookie': self.COOKIE_VALUE}) httpretty.register_uri(httpretty.GET, self.URL_CHECK_COOKIE, body=self.request_callback) opener = urllib2.build_opener(CookieHandler) # With this request the CookieHandler should store a cookie in its # cookiejar set_cookie_req = HTTPRequest(URL(self.URL_SEND_COOKIE)) opener.open(set_cookie_req).read() # And now it will send it because we're setting cookie to True with_cookie_req = HTTPRequest(URL(self.URL_CHECK_COOKIE), cookies=True) with_cookie_res = opener.open(with_cookie_req).read() self.assertIn('Cookie received', with_cookie_res) # And now it will NOT send any cookie because we're setting cookie to False no_cookie_req = HTTPRequest(URL(self.URL_CHECK_COOKIE), cookies=False) no_cookie_res = opener.open(no_cookie_req).read() self.assertIn('Cookie not sent', no_cookie_res) # And now it will send it because we're setting cookie to True with_cookie_req = HTTPRequest(URL(self.URL_CHECK_COOKIE), cookies=True) with_cookie_res = opener.open(with_cookie_req).read() self.assertIn('Cookie received', with_cookie_res)
def test_basic(self): u = URL('http://www.w3af.com') req = HTTPRequest(u) self.assertEqual(req.get_full_url(), 'http://www.w3af.com/') self.assertEqual(req.get_uri().url_string, 'http://www.w3af.com/')
def test_dump_case01(self): expected = '\r\n'.join( ['GET http://w3af.com/a/b/c.php HTTP/1.1', 'Hello: World', '', '']) u = URL('http://w3af.com/a/b/c.php') headers = Headers([('Hello', 'World')]) req = HTTPRequest(u, headers=headers) self.assertEqual(req.dump(), expected)
def test_dump_case02(self): expected = u'\r\n'.join([ u'GET http://w3af.com/a/b/c.php HTTP/1.1', u'Hola: Múndo', u'', u'' ]) u = URL('http://w3af.com/a/b/c.php') headers = Headers([('Hola', 'Múndo')]) req = HTTPRequest(u, headers=headers) self.assertEqual(req.dump(), expected.encode('utf-8'))
def POST(self, uri, data='', headers=Headers(), grep=True, cache=False, cookies=True, error_handling=True, timeout=None, _from=None): """ POST's data to a uri using a proxy, user agents, and other settings that where set previously. :param uri: This is the url where to post. :param data: A string with the data for the POST. :see: The GET() for documentation on the other parameters :return: An HTTPResponse object. """ if not isinstance(uri, URL): raise TypeError('The uri parameter of ExtendedUrllib.POST() must' ' be of url.URL type.') if not isinstance(headers, Headers): raise TypeError( 'The header parameter of ExtendedUrllib.POST() must' ' be of Headers type.') # Validate what I'm sending, init the library (if needed) self.setup() # # Create and send the request # # Please note that the cache=False overrides the user setting # since we *never* want to return cached responses for POST # requests. # data = str(data) new_connection = True if timeout is not None else False host = uri.get_domain() timeout = self.get_timeout(host) if timeout is None else timeout req = HTTPRequest(uri, data=data, cookies=cookies, cache=False, error_handling=error_handling, method='POST', retries=self.settings.get_max_retrys(), timeout=timeout, new_connection=new_connection) req = self.add_headers(req, headers) req._from = _from return self.send(req, grep=grep)
def test_dump_case02(self): expected = u'\r\n'.join([u'GET http://w3af.com/a/b/c.php HTTP/1.1', u'Hola: Múndo', u'', u'']) u = URL('http://w3af.com/a/b/c.php') headers = Headers([('Hola', 'Múndo')]) req = HTTPRequest(u, headers=headers) self.assertEqual(req.dump(), expected.encode('utf-8'))
def test_dump_case01(self): expected = '\r\n'.join(['GET http://w3af.com/a/b/c.php HTTP/1.1', 'Hello: World', '', '']) u = URL('http://w3af.com/a/b/c.php') headers = Headers([('Hello', 'World')]) req = HTTPRequest(u, headers=headers) self.assertEqual(req.dump(), expected)
def test_render_url_special_chars(self): self.maxDiff = None _id = 2 vuln = MockVuln(_id=_id) url = URL( u'https://w3af.com/._basebind/node_modules/lodash._basecreate/' u'LICENSE.txt\x00=ڞ') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) vuln.set_uri(url) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() expected = ( u'<vulnerability id="[2]" method="GET" name="TestCase" plugin="plugin_name" severity="High" url="https://w3af.com/._basebind/node_modules/lodash._basecreate/LICENSE.txt<character code="0000"/>=\u069e" var="None">\n' u' <description>Foo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggs</description>\n\n\n' u' <http-transactions>\n' u' <http-transaction id="2">\n\n' u' <http-request>\n' u' <status>POST https://w3af.com/._basebind/node_modules/lodash._basecreate/LICENSE.txt%00=%DA%9E HTTP/1.1</status>\n' u' <headers>\n' u' <header field="User-agent" content="w3af" />\n' u' </headers>\n' u' <body content-encoding="base64">YT0x\n</body>\n' u' </http-request>\n\n' u' <http-response>\n' u' <status>HTTP/1.1 200 OK</status>\n' u' <headers>\n' u' <header field="Content-Type" content="text/html" />\n' u' </headers>\n' u' <body content-encoding="base64">PGh0bWw+\n</body>\n' u' </http-response>\n\n' u'</http-transaction>\n' u' </http-transactions>\n' u'</vulnerability>') self.assertEqual(xml, expected) self.assertValidXML(xml)
def test_to_from_dict(self): headers = Headers([('Host', 'www.w3af.com')]) req = HTTPRequest(URL("http://www.w3af.com/"), data='spameggs', headers=headers) msg = msgpack.dumps(req.to_dict()) loaded_dict = msgpack.loads(msg) loaded_req = HTTPRequest.from_dict(loaded_dict) self.assertEqual(req, loaded_req) self.assertEqual(req.__dict__.values(), loaded_req.__dict__.values())
def test_cache(self): url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) _id = 2 h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() http_transaction = HTTPTransaction(x._get_jinja2_env(), _id) self.assertIsNone(http_transaction.get_node_from_cache()) # Writes to cache xml = http_transaction.to_string() expected = ( u'<http-transaction id="2">\n\n' u' <http-request>\n' u' <status>POST http://w3af.com/a/b/c.php HTTP/1.1</status>\n' u' <headers>\n' u' <header field="User-agent" content="w3af" />\n' u' </headers>\n' u' <body content-encoding="base64">YT0x\n</body>\n' u' </http-request>\n\n' u' <http-response>\n' u' <status>HTTP/1.1 200 OK</status>\n' u' <headers>\n' u' <header field="Content-Type" content="text/html" />\n' u' </headers>\n' u' <body content-encoding="base64">PGh0bWw+\n</body>\n' u' </http-response>\n\n</http-transaction>') self.assertEqual(expected, xml) # Yup, we're cached self.assertIsNotNone(http_transaction.get_node_from_cache()) # Make sure they are all the same cached_xml = http_transaction.get_node_from_cache() self.assertEqual(cached_xml, expected) xml = http_transaction.to_string() self.assertEqual(expected, xml)
def test_cache(self): url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) _id = 2 h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() http_transaction = HTTPTransaction(x._get_jinja2_env(), _id) self.assertIsNone(http_transaction.get_node_from_cache()) # Writes to cache xml = http_transaction.to_string() expected = (u'<http-transaction id="2">\n\n' u' <http-request>\n' u' <status>POST http://w3af.com/a/b/c.php HTTP/1.1</status>\n' u' <headers>\n' u' <header field="User-agent" content="w3af" />\n' u' </headers>\n' u' <body content-encoding="base64">YT0x\n</body>\n' u' </http-request>\n\n' u' <http-response>\n' u' <status>HTTP/1.1 200 OK</status>\n' u' <headers>\n' u' <header field="Content-Type" content="text/html" />\n' u' </headers>\n' u' <body content-encoding="base64">PGh0bWw+\n</body>\n' u' </http-response>\n\n</http-transaction>') self.assertEqual(expected, xml) # Yup, we're cached self.assertIsNotNone(http_transaction.get_node_from_cache()) # Make sure they are all the same cached_xml = http_transaction.get_node_from_cache() self.assertEqual(cached_xml, expected) xml = http_transaction.to_string() self.assertEqual(expected, xml)
def test_render_simple(self): _id = 2 vuln = MockVuln(_id=_id) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() expected = ( u'<vulnerability id="[2]" method="GET" name="TestCase" plugin="plugin_name" severity="High" url="None" var="None">\n' u' <description>Foo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggs</description>\n\n\n' u' <http-transactions>\n' u' <http-transaction id="2">\n\n' u' <http-request>\n' u' <status>POST http://w3af.com/a/b/c.php HTTP/1.1</status>\n' u' <headers>\n' u' <header field="User-agent" content="w3af" />\n' u' </headers>\n' u' <body content-encoding="base64">YT0x\n</body>\n' u' </http-request>\n\n' u' <http-response>\n' u' <status>HTTP/1.1 200 OK</status>\n' u' <headers>\n' u' <header field="Content-Type" content="text/html" />\n' u' </headers>\n' u' <body content-encoding="base64">PGh0bWw+\n</body>\n' u' </http-response>\n\n' u'</http-transaction>\n' u' </http-transactions>\n' u'</vulnerability>') self.assertEqual(xml, expected) self.assertValidXML(xml)
def test_no_duplicate_vuln_reports(self): # The xml_file plugin had a bug where vulnerabilities were written to # disk multiple times, this test makes sure I fixed that vulnerability # Write the HTTP request / response to the DB url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>syntax error near', hdr, url, url) _id = 1 h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() # Create one vulnerability in the KB pointing to the request- # response we just created desc = 'Just a test for the XML file output plugin.' v = Vuln('SQL injection', desc, severity.HIGH, _id, 'sqli') kb.kb.append('sqli', 'sqli', v) self.assertEqual(len(kb.kb.get_all_vulns()), 1) # Setup the plugin plugin_instance = xml_file() plugin_instance.set_w3af_core(self.w3af_core) # Set the output file for the unittest ol = OptionList() d = 'Output file name where to write the XML data' o = opt_factory('output_file', self.FILENAME, d, OUTPUT_FILE) ol.add(o) # Then we flush() twice to disk, this reproduced the issue plugin_instance.set_options(ol) plugin_instance.flush() plugin_instance.flush() plugin_instance.flush() # Now we parse the vulnerabilities from disk and confirm only one # is there file_vulns = get_vulns_from_xml(self.FILENAME) self.assertEqual(len(file_vulns), 1, file_vulns)
def test_render_simple(self): _id = 2 vuln = MockVuln(_id=_id) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() expected = (u'<vulnerability id="[2]" method="GET" name="TestCase" plugin="plugin_name" severity="High" url="None" var="None">\n' u' <description>Foo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggs</description>\n\n\n' u' <http-transactions>\n' u' <http-transaction id="2">\n\n' u' <http-request>\n' u' <status>POST http://w3af.com/a/b/c.php HTTP/1.1</status>\n' u' <headers>\n' u' <header field="User-agent" content="w3af" />\n' u' </headers>\n' u' <body content-encoding="base64">YT0x\n</body>\n' u' </http-request>\n\n' u' <http-response>\n' u' <status>HTTP/1.1 200 OK</status>\n' u' <headers>\n' u' <header field="Content-Type" content="text/html" />\n' u' </headers>\n' u' <body content-encoding="base64">PGh0bWw+\n</body>\n' u' </http-response>\n\n' u'</http-transaction>\n' u' </http-transactions>\n' u'</vulnerability>') self.assertEqual(xml, expected) self.assertValidXML(xml)
def test_no_duplicate_vuln_reports(self): # The xml_file plugin had a bug where vulnerabilities were written to # disk multiple times, this test makes sure I fixed that vulnerability # Write the HTTP request / response to the DB url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>syntax error near', hdr, url, url) _id = 1 h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() # Create one vulnerability in the KB pointing to the request- # response we just created desc = 'Just a test for the XML file output plugin.' v = Vuln('SQL injection', desc, severity.HIGH, _id, 'sqli') kb.kb.append('sqli', 'sqli', v) self.assertEqual(len(kb.kb.get_all_vulns()), 1) # Setup the plugin plugin_instance = xml_file() # Set the output file for the unittest ol = OptionList() d = 'Output file name where to write the XML data' o = opt_factory('output_file', self.FILENAME, d, OUTPUT_FILE) ol.add(o) # Then we flush() twice to disk, this reproduced the issue plugin_instance.set_options(ol) plugin_instance.flush() plugin_instance.flush() plugin_instance.flush() # Now we parse the vulnerabilities from disk and confirm only one # is there file_vulns = get_vulns_from_xml(self.FILENAME) self.assertEqual(len(file_vulns), 1, file_vulns)
def create_redirect_request(self, req, fp, code, msg, headers, new_url_str, new_url_obj): """ This is called by the http_error_30x methods when a redirection response is received. If a redirection should take place, return a new Request to allow http_error_30x to perform the redirect. """ new_headers = dict((k, v) for k, v in req.headers.items() if k.lower() not in REMOVE_ON_REDIRECT) orig_method = req.get_method() method = orig_method if orig_method in {'GET', 'HEAD'} else 'GET' new_request = HTTPRequest(new_url_obj, headers=new_headers, origin_req_host=req.get_origin_req_host(), method=method, timeout=req.timeout, unverifiable=True, follow_redirects=True, cookies=req.cookies, cache=req.get_from_cache, error_handling=req.error_handling, retries=req.retries_left, new_connection=req.new_connection, use_basic_auth=req.use_basic_auth) return new_request
def test_add_when_qs(self): url = URL('http://www.w3af.com/?id=1') original_req = HTTPRequest(url) modified_req = self.eplugin.modify_request(original_req) self.assertEqual(len(modified_req.url_object.querystring), 2) self.assertIn('id=1', str(modified_req.url_object.querystring))
def create_redirect_request(self, request, response, new_url_obj): """ Create a new HTTP request inheriting all the attributes from the original object and setting the target URL to the one received in the 30x response. """ new_headers = dict((k, v) for k, v in request.headers.items() if k.lower() not in REMOVE_ON_REDIRECT) orig_method = request.get_method() method = orig_method if orig_method in GET_HEAD else 'GET' new_request = HTTPRequest( new_url_obj, headers=new_headers, origin_req_host=request.get_origin_req_host(), method=method, timeout=request.timeout, unverifiable=True, follow_redirects=True, cookies=request.cookies, cache=request.get_from_cache, error_handling=request.error_handling, retries=request.retries_left, new_connection=request.new_connection, use_basic_auth=request.use_basic_auth) return new_request
def _is_suitable(self, freq): """ For CSRF attack we need request with payload and persistent/session cookies. :return: True if the request can have a CSRF vulnerability """ # Does the application send cookies? # # By checking like this we're loosing the opportunity to detect any # CSRF vulnerabilities in non-authenticated parts of the application for cookie in self._uri_opener.get_cookies(): if freq.get_url().get_domain() in cookie.domain: break else: return False # Strict mode on/off - do we need to audit GET requests? Not always... if freq.get_method() == 'GET' and self._strict_mode: return False # Does the request have a payload? # # By checking like this we're loosing the opportunity to find CSRF vulns # in applications that use mod_rewrite. Example: A CSRF in this URL: # http://host.tld/users/remove/id/123 if not freq.get_uri().has_query_string() and not freq.get_raw_data(): if 'date' in freq.get_uri().url_string: req = HTTPRequest.from_fuzzable_request(freq) return False om.out.debug('%s is suitable for CSRF attack' % freq.get_url()) return True
def modify_request(self, request): """ Mangles the request :param request: HTTPRequest instance that is going to be modified by the evasion plugin :return: The modified request """ # First we mangle the URL path = request.url_object.get_path() path = self._mutate(path) # Finally, we set all the mutants to the request in order to return it new_url = request.url_object.copy() new_url.set_path(path) # Mangle the postdata data = request.get_data() if data: try: # Only mangle the postdata if it is a url encoded string parse_qs(data) except: pass else: data = self._mutate(data) new_req = HTTPRequest(new_url, data, request.headers, request.get_origin_req_host(), retries=request.retries_left) return new_req
def __call__(self, uri, data=None, headers=Headers(), cache=False, grep=True, cookies=True, ignore_errors=False): """ :return: An HTTPResponse object that's the result of sending the request with a method different from "GET" or "POST". """ if not isinstance(uri, URL): raise TypeError('The uri parameter of AnyMethod.' '__call__() must be of url.URL type.') if not isinstance(headers, Headers): raise TypeError('The headers parameter of AnyMethod.' '__call__() must be of Headers type.') self._xurllib._init() max_retries = self._xurllib.settings.get_max_retrys() req = HTTPRequest(uri, data, cookies=cookies, cache=cache, method=self._method, ignore_errors=ignore_errors, retries=max_retries) req = self._xurllib._add_headers(req, headers or {}) return self._xurllib._send(req, grep=grep)
def test_clear(self): url = URL('http://w3af.com/a/b/c.php') request = HTTPRequest(url, data='a=1') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(1) h1.response = res h1.save() table_name = h1.get_table_name() db = get_default_temp_db_instance() self.assertTrue(db.table_exists(table_name)) clear_result = h1.clear() self.assertTrue(clear_result) self.assertFalse(os.path.exists(h1._session_dir), '%s exists.' % h1._session_dir) # Changed the meaning of clear a little bit... now it simply removes # all rows from the table, not the table itself self.assertTrue(db.table_exists(table_name))
def modify_request(self, request): """ Mangles the request :param request: HTTPRequest instance that is going to be modified by the evasion plugin :return: The modified request """ # First we mangle the URL qs = request.url_object.querystring.copy() qs = self._mutate(qs) # Finally, we set all the mutants to the request in order to return it new_url = request.url_object.copy() new_url.querystring = qs # Mangle the postdata post_data = request.get_data() if post_data: try: # Only mangle the postdata if it is a url encoded string post_data = parse_qs(post_data) except: pass else: post_data = str(self._mutate(post_data)) new_req = HTTPRequest(new_url, post_data, request.headers, request.get_origin_req_host()) return new_req
def _server_root_path_is_reachable(self, request): """ Sends an HTTP GET to the server's root path to verify that it's reachable from our location. :param request: The original HTTP request :return: True if we were able to get a response """ uri = request.get_uri() root_url = uri.base_url() host = uri.get_domain() req = HTTPRequest(root_url, cookies=True, cache=False, error_handling=False, method='GET', retries=0, timeout=self.get_timeout(host)) req = self.add_headers(req) try: self.send(req, grep=False) except HTTPRequestException, e: msg = 'Remote URL %s is UNREACHABLE due to: "%s"' om.out.debug(msg % (root_url, e)) return False
def GET(self, uri, data=None, headers=Headers(), cache=False, grep=True, cookies=True, respect_size_limit=True, error_handling=True, timeout=None): """ HTTP GET a URI using a proxy, user agent, and other settings that where previously set in opener_settings.py . :param uri: This is the URI to GET, with the query string included. :param data: Only used if the uri parameter is really a URL. The data will be converted into a string and set as the URL object query string before sending. :param headers: Any special headers that will be sent with this request :param cache: Should the library search the local cache for a response before sending it to the wire? :param grep: Should grep plugins be applied to this request/response? :param timeout: If None we'll use the configured (opener settings) timeout or the auto-adjusted value. Otherwise we'll use the defined timeout as the socket timeout value for this request. The timeout is specified in seconds :param cookies: Send stored cookies in request (or not) :return: An HTTPResponse object. """ if not isinstance(uri, URL): raise TypeError('The uri parameter of ExtendedUrllib.GET() must be' ' of url.URL type.') if not isinstance(headers, Headers): raise TypeError('The header parameter of ExtendedUrllib.GET() must' ' be of Headers type.') # Validate what I'm sending, init the library (if needed) self.setup() if data: uri = uri.copy() uri.querystring = data new_connection = True if timeout is not None else False host = uri.get_domain() timeout = self.get_timeout(host) if timeout is None else timeout req = HTTPRequest(uri, cookies=cookies, cache=cache, error_handling=error_handling, method='GET', retries=self.settings.get_max_retrys(), timeout=timeout, new_connection=new_connection) req = self.add_headers(req, headers) with raise_size_limit(respect_size_limit): return self.send(req, grep=grep)
def test_no_modification(self): rhe = rnd_hex_encode() u = URL('http://www.w3af.com/') r = HTTPRequest( u ) self.assertEqual(rhe.modify_request( r ).url_object.url_string, u'http://www.w3af.com/')
def test_encode_path_case01(self): rhe = rnd_hex_encode() u = URL('http://www.w3af.com/a/') r = HTTPRequest( u ) modified_path = rhe.modify_request( r ).url_object.get_path() self.assertIn(modified_path, ['/a/','/%61/'])
def test_modify_basic(self): bbd = backspace_between_dots() u = URL('http://www.w3af.com/../') r = HTTPRequest(u) self.assertEqual(bbd.modify_request(r).url_object.url_string, u'http://www.w3af.com/.%41%08./')
def test_cache_http_errors(self): settings = opener_settings.OpenerSettings() settings.build_openers() opener = settings.get_custom_opener() url = URL('http://w3af.org/foo-bar-not-exists.htm') request = HTTPRequest(url, cache=False) with patch('w3af.core.data.url.handlers.cache.CacheClass') as cc_mock: store_in_cache = Mock() cc_mock.attach_mock(store_in_cache, 'store_in_cache') # If there is a response we should store it, even if it is a 404 try: response = opener.open(request) except urllib2.HTTPError: pass # Make sure the right call was made _call = _Call(('store_in_cache', (request, response))) self.assertEqual(cc_mock.mock_calls, [_call]) cc_mock.reset_mock() # And make sure the response was a 404 self.assertEqual(response.status, 404)
def test_encode_post_data(self): rhe = rnd_hex_encode() u = URL('http://www.w3af.com/') r = HTTPRequest(u, data='a=b') modified_pdata = rhe.modify_request(r).get_data() self.assertIn(modified_pdata, ['a=b', '%61=b', 'a=%62', '%61=%62'])
def test_to_dict_msgpack_with_data_token(self): token = DataToken('Host', 'www.w3af.com', ('Host',)) headers = Headers([('Host', token)]) freq = FuzzableRequest(URL("http://www.w3af.com/"), headers=headers) req = HTTPRequest.from_fuzzable_request(freq) msgpack.dumps(req.to_dict())
def test_handler_order_block(self): """Get an instance of the extended urllib and verify that the blacklist handler still works, even when mixed with all the other handlers.""" # Configure the handler blocked_url = URL(get_moth_http('/abc/def/')) cf.cf.save('non_targets', [blocked_url,]) settings = opener_settings.OpenerSettings() settings.build_openers() opener = settings.get_custom_opener() request = HTTPRequest(blocked_url) request.url_object = blocked_url request.cookies = True request.get_from_cache = False response = opener.open(request) self.assertEqual(response.code, NO_CONTENT) self.assertEqual(response.id, 1)
def http_request(self, request): if self._plugin_list: fr = FuzzableRequest.from_urllib2_request(request) for plugin in self._plugin_list: fr = plugin.mangle_request(fr) request = HTTPRequest.from_fuzzable_request(fr) return request
def test_render_simple(self): url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) _id = 1 h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() http_transaction = HTTPTransaction(x._get_jinja2_env(), _id) xml = http_transaction.to_string() expected = (u'<http-transaction id="1">\n\n' u' <http-request>\n' u' <status>POST http://w3af.com/a/b/c.php HTTP/1.1</status>\n' u' <headers>\n' u' <header field="User-agent" content="w3af" />\n' u' </headers>\n' u' <body content-encoding="base64">YT0x\n</body>\n' u' </http-request>\n\n' u' <http-response>\n' u' <status>HTTP/1.1 200 OK</status>\n' u' <headers>\n' u' <header field="Content-Type" content="text/html" />\n' u' </headers>\n' u' <body content-encoding="base64">PGh0bWw+\n</body>\n' u' </http-response>\n\n</http-transaction>') self.assertEqual(expected, xml) self.assertValidXML(xml)
def _load_from_file(self, id): fname = self._get_fname_for_id(id) WAIT_TIME = 0.05 # # Due to some concurrency issues, we need to perform these checks # for _ in xrange(int(1 / WAIT_TIME)): if not os.path.exists(fname): time.sleep(WAIT_TIME) continue # Ok... the file exists, but it might still be being written req_res = open(fname, 'rb') try: data = msgpack.load(req_res, use_list=True) except ValueError: # ValueError: Extra data. returned when msgpack finds invalid # data in the file req_res.close() time.sleep(WAIT_TIME) continue try: request_dict, response_dict, canary = data except TypeError: # https://github.com/andresriancho/w3af/issues/1101 # 'NoneType' object is not iterable req_res.close() time.sleep(WAIT_TIME) continue if not canary == self._MSGPACK_CANARY: # read failed, most likely because the file write is not # complete but for some reason it was a valid msgpack file req_res.close() time.sleep(WAIT_TIME) continue # Success! req_res.close() request = HTTPRequest.from_dict(request_dict) response = HTTPResponse.from_dict(response_dict) return request, response else: msg = 'Timeout expecting trace file to be ready "%s"' % fname raise IOError(msg)
def test_cache_works_as_expected(self): # # Cache starts empty # cache = FindingsCache() self.assertEquals(cache.list(), []) # # Create two vulnerabilities with their HTTP requests and responses # _id = 1 name = 'I have a name' vuln1 = MockVuln(_id=_id) vuln1.set_name(name) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() _id = 2 name = 'Just a name' vuln2 = MockVuln(_id=_id) vuln2.set_name(name) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h2 = HistoryItem() h2.request = request res.set_id(_id) h2.response = res h2.save() # # Save one vulnerability to the KB and call the cache-user # kb.kb.append('a', 'b', vuln1) x = xml_file() list(x.findings()) self.assertEquals(cache.list(), [vuln1.get_uniq_id()]) # # Save another vulnerability to the KB and call the cache-user # kb.kb.append('a', 'c', vuln2) list(x.findings()) expected = {vuln1.get_uniq_id(), vuln2.get_uniq_id()} self.assertEquals(set(cache.list()), expected) # # Remove one vulnerability and see how it is removed from the cache # kb.kb.raw_write('a', 'c', 'noop') list(x.findings()) expected = {vuln1.get_uniq_id()} self.assertEquals(set(cache.list()), expected)
class TestSed(unittest.TestCase): def setUp(self): create_temp_dir() self.plugin = sed() self.url = URL("http://www.w3af.com/") self.request = HTTPRequest(self.url) def tearDown(self): self.plugin.end() def test_blank_body(self): body = "" headers = Headers([("content-type", "text/html")]) response = HTTPResponse(200, body, headers, self.url, self.url, _id=1) option_list = self.plugin.get_options() option_list["expressions"].set_value("qh/User/NotLuser/") self.plugin.set_options(option_list) mod_request = self.plugin.mangle_request(self.request) mod_response = self.plugin.mangle_response(response) self.assertEqual(mod_request.get_headers(), self.request.get_headers()) self.assertEqual(mod_response.get_headers(), response.get_headers()) self.assertEqual(mod_request.get_uri(), self.request.get_uri()) self.assertEqual(mod_response.get_uri(), response.get_uri()) self.assertEqual(mod_response.get_body(), response.get_body()) def test_response_body(self): body = "hello user!" headers = Headers([("content-type", "text/html")]) response = HTTPResponse(200, body, headers, self.url, self.url, _id=1) option_list = self.plugin.get_options() option_list["expressions"].set_value("sb/user/notluser/") self.plugin.set_options(option_list) mod_request = self.plugin.mangle_request(self.request) mod_response = self.plugin.mangle_response(response) self.assertEqual(mod_request.get_headers(), self.request.get_headers()) self.assertEqual(mod_response.get_headers(), response.get_headers()) self.assertEqual(mod_request.get_uri(), self.request.get_uri()) self.assertEqual(mod_response.get_uri(), response.get_uri()) self.assertEqual(mod_response.get_body(), "hello notluser!") def test_request_headers(self): headers = Headers([("content-type", "text/html")]) request = HTTPRequest(self.url, headers=headers) option_list = self.plugin.get_options() option_list["expressions"].set_value("qh/html/xml/") self.plugin.set_options(option_list) mod_request = self.plugin.mangle_request(request) value, _ = mod_request.get_headers().iget("content-type") self.assertEqual(value, "text/xml") self.assertIs(mod_request, request)
class TestSed(unittest.TestCase): def setUp(self): create_temp_dir() self.plugin = sed() self.url = URL('http://www.w3af.com/') self.request = HTTPRequest(self.url) def tearDown(self): self.plugin.end() def test_blank_body(self): body = '' headers = Headers([('content-type', 'text/html')]) response = HTTPResponse(200, body, headers, self.url, self.url, _id=1) option_list = self.plugin.get_options() option_list['expressions'].set_value('qh/User/NotLuser/') self.plugin.set_options(option_list) mod_request = self.plugin.mangle_request(self.request) mod_response = self.plugin.mangle_response(response) self.assertEqual(mod_request.get_headers(), self.request.get_headers()) self.assertEqual(mod_response.get_headers(), response.get_headers()) self.assertEqual(mod_request.get_uri(), self.request.get_uri()) self.assertEqual(mod_response.get_uri(), response.get_uri()) self.assertEqual(mod_response.get_body(), response.get_body()) def test_response_body(self): body = 'hello user!' headers = Headers([('content-type', 'text/html')]) response = HTTPResponse(200, body, headers, self.url, self.url, _id=1) option_list = self.plugin.get_options() option_list['expressions'].set_value('sb/user/notluser/') self.plugin.set_options(option_list) mod_request = self.plugin.mangle_request(self.request) mod_response = self.plugin.mangle_response(response) self.assertEqual(mod_request.get_headers(), self.request.get_headers()) self.assertEqual(mod_response.get_headers(), response.get_headers()) self.assertEqual(mod_request.get_uri(), self.request.get_uri()) self.assertEqual(mod_response.get_uri(), response.get_uri()) self.assertEqual(mod_response.get_body(), 'hello notluser!') def test_request_headers(self): headers = Headers([('content-type', 'text/html')]) request = HTTPRequest(self.url, headers=headers) option_list = self.plugin.get_options() option_list['expressions'].set_value('qh/html/xml/') self.plugin.set_options(option_list) mod_request = self.plugin.mangle_request(request) value, _ = mod_request.get_headers().iget('content-type') self.assertEqual(value, 'text/xml')
def setUp(self): create_temp_dir() self.plugin = sed() self.url = URL("http://www.w3af.com/") self.request = HTTPRequest(self.url)