def test_render_with_special_chars(self): _id = 2 desc = ('This is a long description that contains some special' ' characters such as <, & and > which MUST be encoded' ' by jinja2.') vuln = MockVuln(_id=_id) vuln.set_desc(desc) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() self.assertNotIn('such as <, & and > which MUST', xml) self.assertIn('such as <, & and > which MUST', xml) self.assertValidXML(xml)
def test_render_with_unicode_control_chars(self): _id = 2 desc = ('This is a long description that contains some special' ' unicode control characters such as \f and \x09') vuln = MockVuln(_id=_id) vuln.set_desc(desc) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() self.assertNotIn('unicode control characters such as \f and \x09', xml) self.assertIn('unicode control characters such as <character code="000c"/> and <character code="0009"/>', xml) self.assertValidXML(xml)
def test_clear(self): url = URL('http://w3af.com/a/b/c.php') request = HTTPRequest(url, data='a=1') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(1) h1.response = res h1.save() table_name = h1.get_table_name() db = get_default_temp_db_instance() self.assertTrue(db.table_exists(table_name)) clear_result = h1.clear() self.assertTrue(clear_result) self.assertFalse(os.path.exists(h1._session_dir), '%s exists.' % h1._session_dir) # Changed the meaning of clear a little bit... now it simply removes # all rows from the table, not the table itself self.assertTrue(db.table_exists(table_name))
def test_find(self): find_id = random.randint(1, 499) url = URL('http://w3af.org/a/b/foobar.php?foo=123') tag_value = rand_alnum(10) for i in xrange(0, 500): request = HTTPRequest(url, data='a=1') code = 200 if i == find_id: code = 302 hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(code, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(i) h1.response = res if i == find_id: h1.toggle_mark() h1.update_tag(tag_value) h1.save() h2 = HistoryItem() self.assertEqual( len(h2.find([('tag', "%" + tag_value + "%", 'like')])), 1) self.assertEqual(len(h2.find([('code', 302, '=')])), 1) self.assertEqual(len(h2.find([('mark', 1, '=')])), 1) self.assertEqual(len(h2.find([('has_qs', 1, '=')])), 500) self.assertEqual( len(h2.find([('has_qs', 1, '=')], result_limit=10)), 10) results = h2.find( [('has_qs', 1, '=')], result_limit=1, orderData=[('id', 'desc')]) self.assertEqual(results[0].id, 499) search_data = [] search_data.append(('id', find_id + 1, "<")) search_data.append(('id', find_id - 1, ">")) self.assertEqual(len(h2.find(search_data)), 1)
def test_render_attr_with_special_chars(self): _id = 2 name = 'A long description with special characters: <&">' vuln = MockVuln(_id=_id) vuln.set_name(name) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() self.assertNotIn(name, xml) self.assertIn('A long description with special characters: <&">', xml) self.assertValidXML(xml)
def test_dump_response_head_5416(self): """ :see: https://github.com/andresriancho/w3af/issues/5416 """ url = URL('http://w3af.com') headers = Headers() msg = 'D\xe9plac\xe9 Temporairement' resp = HTTPResponse(200, '', headers, url, url, msg=msg) expected_dump = u'HTTP/1.1 200 Déplacé Temporairement\r\n'.encode('utf8') self.assertEqual(resp.dump_response_head(), expected_dump)
def _create_terminate_response(self, http_response): content = render('spiderman_end.html', {}) headers = Headers(( ('Connection', 'close'), ('Content-type', 'text/html'), )) http_response = HTTPResponse(200, content.encode('utf-8'), headers, http_response.get_uri(), http_response.get_uri(), msg='Ok') return http_response
def new_no_content_resp(uri, add_id=False): """ Return a new NO_CONTENT HTTPResponse object. :param uri: URI string or request object """ no_content_response = HTTPResponse(NO_CONTENT, '', Headers(), uri, uri, msg='No Content') if add_id: no_content_response.id = consecutive_number_generator.inc() return no_content_response
def test_cache(self): url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) _id = 2 h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() http_transaction = HTTPTransaction(x._get_jinja2_env(), _id) self.assertIsNone(http_transaction.get_node_from_cache()) # Writes to cache xml = http_transaction.to_string() expected = (u'<http-transaction id="2">\n\n' u' <http-request>\n' u' <status>POST http://w3af.com/a/b/c.php HTTP/1.1</status>\n' u' <headers>\n' u' <header field="User-agent" content="w3af" />\n' u' </headers>\n' u' <body content-encoding="base64">YT0x\n</body>\n' u' </http-request>\n\n' u' <http-response>\n' u' <status>HTTP/1.1 200 OK</status>\n' u' <headers>\n' u' <header field="Content-Type" content="text/html" />\n' u' </headers>\n' u' <body content-encoding="base64">PGh0bWw+\n</body>\n' u' </http-response>\n\n</http-transaction>') self.assertEqual(expected, xml) # Yup, we're cached self.assertIsNotNone(http_transaction.get_node_from_cache()) # Make sure they are all the same cached_xml = http_transaction.get_node_from_cache() self.assertEqual(cached_xml, expected) xml = http_transaction.to_string() self.assertEqual(expected, xml)
def test_clear_clear(self): url = URL('http://w3af.com/a/b/c.php') request = HTTPRequest(url, data='a=1') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(1) h1.response = res h1.save() h1.clear() h1.clear()
def test_save_load(self): i = random.randint(1, 499) url = URL('http://w3af.com/a/b/c.php') request = HTTPRequest(url, data='a=1') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(i) h1.response = res h1.save() h2 = HistoryItem() h2.load(i) self.assertEqual(h1.request, h2.request) self.assertEqual(h1.response.body, h2.response.body)
def test_render_simple(self): _id = 2 vuln = MockVuln(_id=_id) url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() finding = Finding(x._get_jinja2_env(), vuln) xml = finding.to_string() expected = (u'<vulnerability id="[2]" method="GET" name="TestCase" plugin="plugin_name" severity="High" url="None" var="None">\n' u' <description>Foo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggsFoo bar spam eggs</description>\n\n\n' u' <http-transactions>\n' u' <http-transaction id="2">\n\n' u' <http-request>\n' u' <status>POST http://w3af.com/a/b/c.php HTTP/1.1</status>\n' u' <headers>\n' u' <header field="User-agent" content="w3af" />\n' u' </headers>\n' u' <body content-encoding="base64">YT0x\n</body>\n' u' </http-request>\n\n' u' <http-response>\n' u' <status>HTTP/1.1 200 OK</status>\n' u' <headers>\n' u' <header field="Content-Type" content="text/html" />\n' u' </headers>\n' u' <body content-encoding="base64">PGh0bWw+\n</body>\n' u' </http-response>\n\n' u'</http-transaction>\n' u' </http-transactions>\n' u'</vulnerability>') self.assertEqual(xml, expected) self.assertValidXML(xml)
def test_no_duplicate_vuln_reports(self): # The xml_file plugin had a bug where vulnerabilities were written to # disk multiple times, this test makes sure I fixed that vulnerability # Write the HTTP request / response to the DB url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>syntax error near', hdr, url, url) _id = 1 h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() # Create one vulnerability in the KB pointing to the request- # response we just created desc = 'Just a test for the XML file output plugin.' v = Vuln('SQL injection', desc, severity.HIGH, _id, 'sqli') kb.kb.append('sqli', 'sqli', v) self.assertEqual(len(kb.kb.get_all_vulns()), 1) # Setup the plugin plugin_instance = xml_file() # Set the output file for the unittest ol = OptionList() d = 'Output file name where to write the XML data' o = opt_factory('output_file', self.FILENAME, d, OUTPUT_FILE) ol.add(o) # Then we flush() twice to disk, this reproduced the issue plugin_instance.set_options(ol) plugin_instance.flush() plugin_instance.flush() plugin_instance.flush() # Now we parse the vulnerabilities from disk and confirm only one # is there file_vulns = get_vulns_from_xml(self.FILENAME) self.assertEqual(len(file_vulns), 1, file_vulns)
def _create_favicon_response(self, http_response): favicon = os.path.join(ROOT_PATH, 'plugins/crawl/spider_man/favicon.ico') headers = Headers(( ('Connection', 'close'), ('Content-type', 'image/vnd.microsoft.icon'), )) http_response = HTTPResponse(200, file(favicon, 'rb').read(), headers, http_response.get_uri(), http_response.get_uri(), msg='Ok') return http_response
def test_dump_response_head_3661(self): """ :see: https://github.com/andresriancho/w3af/issues/3661 """ url = URL('http://w3af.com') # '\xf3' is o-tilde in windows-1251 # # We get from that arbitrary character to o-tilde in windows-1251 when # we fail to decode it, and chardet guesses the encoding. headers = Headers([('Content-Type', '\xf3')]) resp = HTTPResponse(200, '', headers, url, url) # '\xc3\xb3' is o-tilde in utf-8 expected_dump = 'HTTP/1.1 200 OK\r\nContent-Type: \xc3\xb3\r\n' self.assertEqual(resp.dump_response_head(), expected_dump)
def test_response_body(self): body = "hello user!" headers = Headers([("content-type", "text/html")]) response = HTTPResponse(200, body, headers, self.url, self.url, _id=1) option_list = self.plugin.get_options() option_list["expressions"].set_value("sb/user/notluser/") self.plugin.set_options(option_list) mod_request = self.plugin.mangle_request(self.request) mod_response = self.plugin.mangle_response(response) self.assertEqual(mod_request.get_headers(), self.request.get_headers()) self.assertEqual(mod_response.get_headers(), response.get_headers()) self.assertEqual(mod_request.get_uri(), self.request.get_uri()) self.assertEqual(mod_response.get_uri(), response.get_uri()) self.assertEqual(mod_response.get_body(), "hello notluser!")
def test_response_body(self): body = 'hello user!' headers = Headers([('content-type', 'text/html')]) response = HTTPResponse(200, body, headers, self.url, self.url, _id=1) option_list = self.plugin.get_options() option_list['expressions'].set_value('sb/user/notluser/') self.plugin.set_options(option_list) mod_request = self.plugin.mangle_request(self.request) mod_response = self.plugin.mangle_response(response) self.assertEqual(mod_request.get_headers(), self.request.get_headers()) self.assertEqual(mod_response.get_headers(), response.get_headers()) self.assertEqual(mod_request.get_uri(), self.request.get_uri()) self.assertEqual(mod_response.get_uri(), response.get_uri()) self.assertEqual(mod_response.get_body(), 'hello notluser!')
def test_save_load_unicode_decode_error(self): url = URL('http://w3af.com/a/b/é.php?x=á') request = HTTPRequest(url, data='a=1') headers = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', headers, url, url) res.set_id(1) h1 = HistoryItem() h1.request = request h1.response = res h1.save() h2 = HistoryItem() h2.load(1) self.assertEqual(h1.request, h2.request) self.assertEqual(h1.response.body, h2.response.body) self.assertEqual(h1.request.url_object, h2.request.url_object)
def test_tag(self): tag_id = random.randint(501, 999) tag_value = rand_alnum(10) url = URL('http://w3af.org/a/b/c.php') for i in xrange(501, 1000): request = HTTPRequest(url, data='a=1') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(i) h1.response = res if i == tag_id: h1.update_tag(tag_value) h1.save() h2 = HistoryItem() h2.load(tag_id) self.assertEqual(h2.tag, tag_value)
def test_delete(self): i = random.randint(1, 499) url = URL('http://w3af.com/a/b/c.php') request = HTTPRequest(url, data='a=1') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) res.set_id(i) h1 = HistoryItem() h1.request = request h1.response = res h1.save() fname = h1._get_fname_for_id(i) self.assertTrue(os.path.exists(fname)) h1.delete(i) self.assertRaises(DBException, h1.read, i) self.assertFalse(os.path.exists(fname))
def http_response(self, request, response): if self._plugin_list: # Create the HTTPResponse object http_resp = HTTPResponse.from_httplib_resp(response) for plugin in self._plugin_list: plugin.mangle_response(http_resp) response = self._http_resp_2_httplib(response, http_resp) return response
def setUp(self): super(TestHTMLRendering, self).setUp() self.plugin = self.w3afcore.plugins.get_plugin_inst('output', 'html_file') HistoryItem().init() url = URL('http://w3af.com/a/b/c.php') request = HTTPRequest(url, data='a=1') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(1) h1.response = res h1.save() url = URL('http://w3af.com/foo.py') request = HTTPRequest(url, data='text=xss') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>empty</html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(4) h1.response = res h1.save()
def test_from_dict_encodings(self): for body, charset in TEST_RESPONSES.values(): html = body.encode(charset) resp = self.create_resp(Headers([("Content-Type", "text/xml")]), html) msg = msgpack.dumps(resp.to_dict()) loaded_dict = msgpack.loads(msg) loaded_resp = HTTPResponse.from_dict(loaded_dict) self.assertEquals( smart_unicode(html, DEFAULT_CHARSET, ESCAPED_CHAR, on_error_guess=False), loaded_resp.body )
def test_render_simple(self): url = URL('http://w3af.com/a/b/c.php') hdr = Headers([('User-Agent', 'w3af')]) request = HTTPRequest(url, data='a=1') request.set_headers(hdr) hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) _id = 1 h1 = HistoryItem() h1.request = request res.set_id(_id) h1.response = res h1.save() x = xml_file() http_transaction = HTTPTransaction(x._get_jinja2_env(), _id) xml = http_transaction.to_string() expected = (u'<http-transaction id="1">\n\n' u' <http-request>\n' u' <status>POST http://w3af.com/a/b/c.php HTTP/1.1</status>\n' u' <headers>\n' u' <header field="User-agent" content="w3af" />\n' u' </headers>\n' u' <body content-encoding="base64">YT0x\n</body>\n' u' </http-request>\n\n' u' <http-response>\n' u' <status>HTTP/1.1 200 OK</status>\n' u' <headers>\n' u' <header field="Content-Type" content="text/html" />\n' u' </headers>\n' u' <body content-encoding="base64">PGh0bWw+\n</body>\n' u' </http-response>\n\n</http-transaction>') self.assertEqual(expected, xml) self.assertValidXML(xml)
def test_cookie(self): body = '' redir_url = '/foo.bar' headers = Headers([('content-type', 'text/html'), ('uri', redir_url), ('cookie', 'abc=def')]) http_response = HTTPResponse(200, body, headers, self.url, self.url) redir_fr_cookie = create_fuzzable_requests(http_response, add_self=False) self.assertEqual(len(redir_fr_cookie), 1) redir_fr_cookie = redir_fr_cookie[0] self.assertEqual(str(redir_fr_cookie.get_cookie()), 'abc=def;')
def test_provides_csp_features_no_case03(self): """ Test case in which site provides broken CSP. """ # Note the errors in the directive: # default-src -> default-source # img-src -> image-src header_value = "default-src ' '; img-src ' '" hrds = {CSP_HEADER_W3C: header_value}.items() csp_headers = Headers(hrds) http_response = HTTPResponse(200, '', csp_headers, self.url, self.url) self.assertFalse(provides_csp_features(http_response))
def _load_from_file(self, id): fname = self._get_fname_for_id(id) WAIT_TIME = 0.05 # # Due to some concurrency issues, we need to perform these checks # for _ in xrange(int(1 / WAIT_TIME)): if not os.path.exists(fname): time.sleep(WAIT_TIME) continue # Ok... the file exists, but it might still be being written req_res = open(fname, 'rb') try: data = msgpack.load(req_res, use_list=True) except ValueError: # ValueError: Extra data. returned when msgpack finds invalid # data in the file req_res.close() time.sleep(WAIT_TIME) continue try: request_dict, response_dict, canary = data except TypeError: # https://github.com/andresriancho/w3af/issues/1101 # 'NoneType' object is not iterable req_res.close() time.sleep(WAIT_TIME) continue if not canary == self._MSGPACK_CANARY: # read failed, most likely because the file write is not # complete but for some reason it was a valid msgpack file req_res.close() time.sleep(WAIT_TIME) continue # Success! req_res.close() request = HTTPRequest.from_dict(request_dict) response = HTTPResponse.from_dict(response_dict) return request, response else: msg = 'Timeout expecting trace file to be ready "%s"' % fname raise IOError(msg)
def test_site_protected_against_xss_by_csp_case05(self): """ Test case in witch site provide CSP features and enable unsafe inline script + use of the javascript "eval()" function into is CSP Script policies BUT we accept theses configurations. """ header_value = "script-src 'self' unsafe-eval unsafe-inline; "\ "script-nonce 'AADD'" hrds = {CSP_HEADER_W3C: header_value}.items() csp_headers = Headers(hrds) http_response = HTTPResponse(200, '', csp_headers, self.url, self.url) site_protected = site_protected_against_xss_by_csp( http_response, True, True) self.assertTrue(site_protected)
def test_redirect_uri_relative(self): ws = web_spider() body = '' url = URL('http://www.w3af.org') redir_url = '/redir' headers = Headers([('content-type', 'text/html'), ('uri', redir_url)]) resp = HTTPResponse(200, body, headers, url, url) gen = ws._headers_url_generator(resp, None) extracted_data = [i for i in gen] expected_data = [(url.url_join(redir_url), None, resp, False)] self.assertEqual(extracted_data, expected_data)
def test_handle_really_a_404(self): httpretty.register_uri(httpretty.GET, re.compile('w3af.com/(.*)'), body=self.request_callback, status=200) # This is the URL we found during crawling and want to know if is_404() query_url = URL('http://w3af.com/path1/path2/') headers = Headers([('Content-Type', 'text/html')]) success_200 = HTTPResponse(200, self.ALL_SAME_BODY, headers, query_url, query_url) self.assertTrue(self.fingerprint_404.is_404(success_200))
def _handle_send_success(self, req, res, grep, original_url, original_url_inst, start_time): """ Handle the case in "def _send" where the request was successful and we were able to get a valid HTTP response. :return: An HTTPResponse object. """ # Everything went well! rdata = req.get_data() if not rdata: msg = ('%s %s returned HTTP code "%s"' % (req.get_method(), urllib.unquote_plus(original_url), res.code)) else: printable_data = urllib.unquote_plus(rdata) if len(rdata) > 75: printable_data = '%s...' % printable_data[:75] printable_data = printable_data.replace('\n', ' ') printable_data = printable_data.replace('\r', ' ') msg = ('%s %s with data: "%s" returned HTTP code "%s"' % (req.get_method(), original_url, printable_data, res.code)) from_cache = hasattr(res, 'from_cache') and res.from_cache flags = ' (id=%s,from_cache=%i,grep=%i)' % (res.id, from_cache, grep) msg += flags om.out.debug(msg) http_resp = HTTPResponse.from_httplib_resp(res, original_url=original_url_inst) http_resp.set_id(res.id) http_resp.set_wait_time(time.time() - start_time) http_resp.set_from_cache(from_cache) # Clear the log of failed requests; this request is DONE! req_id = id(req) if req_id in self._error_count: del self._error_count[req_id] self._zero_global_error_count() if grep: self._grep(req, http_resp) return http_resp
def test_sl_3(self, *args): """ Static link 3, text/javascript """ body = 'function { ws_url =' \ '"wss://www.example.com/socketserver:8080";' \ 'wslink = new WebSocket(url); return wslink} ' url = URL('https://www.w3af.com/') headers = Headers([('content-type', 'text/javascript')]) response = HTTPResponse(200, body, headers, url, url, _id=1) request = FuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEqual( len(kb.kb.get('websockets_links', 'websockets_links')), 1)
def test_analyze_cookies_simple_cookie(self): body = '' url = URL('http://www.w3af.com/') headers = Headers({ 'content-type': 'text/html', 'Set-Cookie': 'abc=def' }.items()) response = HTTPResponse(200, body, headers, url, url, _id=1) request = FuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEqual(len(kb.kb.get('analyze_cookies', 'cookies')), 1) self.assertEqual(len(kb.kb.get('analyze_cookies', 'invalid-cookies')), 0)
def test_mark(self): mark_id = 3 url = URL('http://w3af.org/a/b/c.php') for i in xrange(0, 500): request = HTTPRequest(url, data='a=1') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(i) h1.response = res if i == mark_id: h1.toggle_mark() h1.save() h2 = HistoryItem() h2.load(mark_id) self.assertTrue(h2.mark) h3 = HistoryItem() h3.load(mark_id - 1) self.assertFalse(h3.mark)
def test_vs5(self, *args): body = 'header <form action="http://www.w3af.com/"><div></div>' \ '</form><input type="password" name="passwd" />footer' url = URL('https://www.w3af.com/') headers = Headers([('content-type', 'text/html')]) response = HTTPResponse(200, body, headers, url, url, _id=1) request = FuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEqual( len(kb.kb.get('form_cleartext_password', 'form_cleartext_password')), 1) self.assertEqual( kb.kb.get('form_cleartext_password', 'form_cleartext_password') [0].get_name() == 'Insecure password submission over HTTP', 1)
def test_n1(self, *args): """ Not vulnerable """ body = 'header <form action="https://www.w3af.com/">' \ '<input type="text" /></form>footer' url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html')]) response = HTTPResponse(200, body, headers, url, url, _id=1) request = FuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEqual( len(kb.kb.get('form_cleartext_password', 'form_cleartext_password')), 0)
def test_mark(self): mark_id = 3 url = URL('http://w3af.org/a/b/c.php') for i in xrange(0, 500): request = HTTPRequest(url, data='a=1') hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(200, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(i) h1.response = res if i == mark_id: h1.toggle_mark() h1.save() h2 = HistoryItem() h2.load(mark_id) self.assertTrue(h2.mark) h3 = HistoryItem() h3.load(mark_id-1) self.assertFalse(h3.mark)
def test_multi(self): body = """<html> <a href="http://moth/abc.jsp?sql=SELECT x FROM TABLE">x</a> <a href="http://moth/abc.jsp?call=s(12,3)">x</a> </html>""" response = HTTPResponse(200, body, self.headers, self.url, self.url, _id=1) self.plugin.grep(self.request, response) vulns = kb.kb.get('strange_parameters', 'strange_parameters') self.assertEquals(len(vulns), 2, vulns)
def setUp(self): super(TestCORSOrigin, self).setUp() self.co = cors_origin() self.url = URL('http://moth/') self.origin = 'http://moth/' self.response = HTTPResponse(200, '', Headers(), self.url, self.url, _id=3) self.request = FuzzableRequest(self.url)
def test_issue_3234(self): # # is_404 can not handle URLs with : in path #3234 # https://github.com/andresriancho/w3af/issues/3234 # httpretty.register_uri(httpretty.GET, re.compile("w3af.com/(.*)"), body="404 found", status=404) url = URL('http://w3af.com/d:a') resp = HTTPResponse(200, 'body', Headers(), url, url) self.assertFalse(self.fingerprint_404.is_404(resp))
def test_image_with_image_content_type(self, *args): """ Verify that our plugins don't break when we send them an image. """ file_path = os.path.join(ROOT_PATH, 'plugins', 'tests', 'grep', 'data', 'w3af.png') body = file(file_path).read() hdrs = Headers({'Content-Type': 'image/png'}.items()) response = HTTPResponse(200, body, hdrs, self.url_inst, self.url_inst, _id=random.randint(1, 5000)) request = FuzzableRequest(self.url_inst) for pinst in self._plugins: pinst.grep(request, response)
def parse(self, filename): body = file(filename).read() swf_mime = 'application/x-shockwave-flash' hdrs = Headers({'Content-Type': swf_mime}.items()) response = HTTPResponse(200, body, hdrs, URL('http://moth/xyz/'), URL('http://moth/xyz/'), _id=1) parser = SWFParser(response) parser.parse() return parser
def test_analyze_cookies_with_httponly_case_sensitive(self): body = '' url = URL('https://www.w3af.com/') headers = Headers({ 'content-type': 'text/html', 'Set-Cookie': 'abc=def;Secure;HttpOnly' }.items()) response = HTTPResponse(200, body, headers, url, url, _id=1) request = FuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEqual(len(kb.kb.get('analyze_cookies', 'cookies')), 1) self.assertEqual(len(kb.kb.get('analyze_cookies', 'security')), 0)
def test_find(self): find_id = random.randint(1, 499) url = URL('http://w3af.org/a/b/foobar.php?foo=123') tag_value = rand_alnum(10) for i in xrange(0, 500): request = HTTPRequest(url, data='a=1') code = 200 if i == find_id: code = 302 hdr = Headers([('Content-Type', 'text/html')]) res = HTTPResponse(code, '<html>', hdr, url, url) h1 = HistoryItem() h1.request = request res.set_id(i) h1.response = res if i == find_id: h1.toggle_mark() h1.update_tag(tag_value) h1.save() h2 = HistoryItem() self.assertEqual( len(h2.find([('tag', "%" + tag_value + "%", 'like')])), 1) self.assertEqual(len(h2.find([('code', 302, '=')])), 1) self.assertEqual(len(h2.find([('mark', 1, '=')])), 1) self.assertEqual(len(h2.find([('has_qs', 1, '=')])), 500) self.assertEqual(len(h2.find([('has_qs', 1, '=')], result_limit=10)), 10) results = h2.find([('has_qs', 1, '=')], result_limit=1, order_data=[('id', 'desc')]) self.assertEqual(results[0].id, 499) search_data = [('id', find_id + 1, "<"), ('id', find_id - 1, ">")] self.assertEqual(len(h2.find(search_data)), 1)
def test_save_load_compressed(self): force_compression_count = HistoryItem._UNCOMPRESSED_FILES + HistoryItem._COMPRESSED_FILE_BATCH force_compression_count += 150 url = URL('http://w3af.com/a/b/c.php') headers = Headers([('Content-Type', 'text/html')]) body = '<html>' + LOREM * 20 for i in xrange(1, force_compression_count): request = HTTPRequest(url, data='a=%s' % i) response = HTTPResponse(200, body, headers, url, url) response.set_id(i) h = HistoryItem() h.request = request h.response = response h.save() compressed_file = os.path.join(h.get_session_dir(), '1-150.zip') self.assertTrue(os.path.exists(compressed_file)) expected_files = [ '%s.trace' % i for i in range(1, HistoryItem._COMPRESSED_FILE_BATCH + 1) ] _zip = zipfile.ZipFile(compressed_file, mode='r') self.assertEqual(_zip.namelist(), expected_files) for i in xrange(1, 100): h = HistoryItem() h.load(i) self.assertEqual(h.request.get_uri(), url) self.assertEqual(h.response.get_headers(), headers) self.assertEqual(h.response.get_body(), body)
def test(): """ Run using: python -m memory_profiler w3af/core/data/parsers/tests/test_htmlparser_performance.py That will activate the profiler. """ body = file(OUTPUT_FILE).read() url = URL('http://www.clarin.com.ar/') headers = Headers() headers['content-type'] = 'text/html' response = HTTPResponse(200, body, headers, url, url, charset='utf-8') p = HTMLParser(response) del p
def new_no_content_resp(uri, add_id=False): """ Return a new NO_CONTENT HTTPResponse object. :param uri: URI string or request object :param add_id: Add ID to the HTTP response """ # # WARNING: You are about to change this code? Please read the related # race condition in this commit [0] # # [0] https://github.com/andresriancho/w3af/commit/682bc2e4ad7d075bbdc469bc5d24a28e6d2e7804 # no_content_response = HTTPResponse(code=NO_CONTENT, read='', headers=Headers(), geturl=uri, original_url=uri, msg=NO_CONTENT_MSG) if add_id: no_content_response.id = consecutive_number_generator.inc() return no_content_response
def test_no_clamav_eicar(self, *args): body = pyclamd.ClamdAgnostic().EICAR() url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html')]) response = HTTPResponse(200, body, headers, url, url, _id=1) request = FuzzableRequest(url, method='GET') # Simulate that we don't have clamd running self.plugin._connection_test = Mock(return_value=False) self.plugin._scan_http_response = Mock() self.plugin.grep(request, response) findings = kb.kb.get('clamav', 'malware') self.assertEqual(len(findings), 0) self.assertEqual(self.plugin._scan_http_response.call_count, 0)
def test_unsafe_inline_enabled_yes_case01(self): """ Test case in which site provides "unsafe-inline" related CSP for script. """ hrds = {} hrds[CSP_HEADER_FIREFOX] = CSP_DIRECTIVE_SCRIPT + " '" + \ CSP_DIRECTIVE_VALUE_UNSAFE_INLINE + "'" hrds[CSP_HEADER_W3C] = CSP_DIRECTIVE_SCRIPT + " 'self';" + \ CSP_DIRECTIVE_REPORT_URI + " /myrelativeuri" csp_headers = Headers(hrds.items()) http_response = HTTPResponse(200, '', csp_headers, self.url, self.url) self.assertTrue(unsafe_inline_enabled(http_response))
def test_provides_csp_features_yes_case02(self): """ Test case in which site provides CSP features using only report-only policies. """ header_value = "default-src 'self'; img-src *; object-src"\ " media1.example.com media2.example.com"\ " *.cdn.example.com; script-src"\ " trustedscripts.example.com" hrds = {CSP_HEADER_W3C_REPORT_ONLY: header_value}.items() csp_headers = Headers(hrds) http_response = HTTPResponse(200, '', csp_headers, self.url, self.url) self.assertTrue(provides_csp_features(http_response))
def test_strange_headers_timing(self): body = 'Hello world' url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html'), ('hello-world', 'yes!')]) request = FuzzableRequest(url, method='GET') resp_positive = HTTPResponse(200, body, headers, url, url, _id=1) start = time.time() for _ in xrange(5): self.plugin.grep(request, resp_positive) spent = time.time() - start
def test_group_info_set(self, *args): body = '<meta test="user/pass"></script>' url_1 = URL('http://www.w3af.com/1') url_2 = URL('http://www.w3af.com/2') headers = Headers([('content-type', 'text/html')]) request = FuzzableRequest(url_1, method='GET') resp_1 = HTTPResponse(200, body, headers, url_1, url_1, _id=1) resp_2 = HTTPResponse(200, body, headers, url_2, url_2, _id=1) self.plugin.grep(request, resp_1) self.plugin.grep(request, resp_2) self.plugin.end() expected_desc = u'The application sent a <meta> tag with the' \ u' attribute value set to "user/pass" which looks' \ u' interesting and should be manually reviewed. The' \ u' first ten URLs which sent the tag are:\n' \ u' - http://www.w3af.com/2\n' \ u' - http://www.w3af.com/1\n' # pylint: disable=E1103 info_set = kb.kb.get_one('meta_tags', 'meta_tags') self.assertEqual(set(info_set.get_urls()), {url_1, url_2}) self.assertEqual(info_set.get_desc(), expected_desc)
def test_provides_cors_features_false(self): url = URL('http://moth/') fr = FuzzableRequest(url) http_response = HTTPResponse(200, '', Headers(), url, url) url_opener_mock = Mock() url_opener_mock.GET = MagicMock(return_value=http_response) cors = provides_cors_features(fr, url_opener_mock) call_header = Headers({'Origin': 'www.w3af.org'}.items()) url_opener_mock.GET.assert_called_with(url, headers=call_header) self.assertFalse(cors)
def test_url_session_in_body_and_url(self): url = 'http://www.w3af.com/?JSESSIONID=231badb19b93e44f47da1bd64a8147f2' body = 'abc <a href="%s">def</a> footer' % url url = URL(url) headers = Headers([('content-type', 'text/html')]) request = FuzzableRequest(url, method='GET') resp = HTTPResponse(200, body, headers, url, url, _id=1) self.plugin.grep(request, resp) infos = kb.kb.get('url_session', 'url_session') self.assertEquals(len(infos), 1) info = infos[0] self.assertEqual(info.get_name(), 'Session ID in URL')
def test_form_autocomplete_group_info_set(self): body = '<form action="/login"><input type="password" name="p"></form>' url_1 = URL('http://www.w3af.com/1') url_2 = URL('http://www.w3af.com/2') headers = Headers([('content-type', 'text/html')]) request = FuzzableRequest(url_1, method='GET') resp_1 = HTTPResponse(200, body, headers, url_1, url_1, _id=1) resp_2 = HTTPResponse(200, body, headers, url_2, url_2, _id=1) self.plugin.grep(request, resp_1) self.plugin.grep(request, resp_2) self.plugin.end() expected_desc = (u'The application contains 2 different URLs with a' u' <form> element which has auto-complete enabled' u' for password fields. The first two vulnerable' u' URLs are:\n' u' - http://www.w3af.com/2\n' u' - http://www.w3af.com/1\n') # pylint: disable=E1103 info_set = kb.kb.get_one('form_autocomplete', 'form_autocomplete') self.assertEqual(set(info_set.get_urls()), {url_1, url_2}) self.assertEqual(info_set.get_desc(), expected_desc)
def test_from_dict(self): html = "header <b>ABC</b>-<b>DEF</b>-<b>XYZ</b> footer" headers = Headers([("Content-Type", "text/html")]) orig_resp = self.create_resp(headers, html) msg = msgpack.dumps(orig_resp.to_dict()) loaded_dict = msgpack.loads(msg) loaded_resp = HTTPResponse.from_dict(loaded_dict) self.assertEqual(orig_resp, loaded_resp) orig_resp.__dict__.pop("_body_lock") loaded_resp.__dict__.pop("_body_lock") self.assertEqual(orig_resp.__dict__.values(), loaded_resp.__dict__.values())
def test_from_dict(self): html = 'header <b>ABC</b>-<b>DEF</b>-<b>XYZ</b> footer' headers = Headers([('Content-Type', 'text/html')]) orig_resp = self.create_resp(headers, html) msg = msgpack.dumps(orig_resp.to_dict()) loaded_dict = msgpack.loads(msg) loaded_resp = HTTPResponse.from_dict(loaded_dict) self.assertEqual(orig_resp, loaded_resp) cmp_attrs = list(orig_resp.__slots__) cmp_attrs.remove('_body_lock') self.assertEqual({k: getattr(orig_resp, k) for k in cmp_attrs}, {k: getattr(loaded_resp, k) for k in cmp_attrs})
def _log_req_resp(self, request, response): """ Send the request and the response to the output manager. """ if not isinstance(response, HTTPResponse): url = request.url_object resp = HTTPResponse.from_httplib_resp(response, original_url=url) resp.set_id(response.id) else: resp = response if not isinstance(request, HTTPRequest): msg = 'There is something odd going on in OutputManagerHandler,'\ ' request should be of type HTTPRequest got %s'\ ' instead.' raise TypeError(msg % type(request)) om.out.log_http(request, resp)