class TestExtendedUrllibProxy(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() # Start the proxy daemon self._proxy = Proxy('127.0.0.1', 0, ExtendedUrllib(), w3afProxyHandler) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Configure the proxy settings = OpenerSettings() options = settings.get_options() proxy_address_opt = options['proxy_address'] proxy_port_opt = options['proxy_port'] proxy_address_opt.set_value('127.0.0.1') proxy_port_opt.set_value(port) settings.set_options(options) self.uri_opener.settings = settings def tearDown(self): self.uri_opener.end() def test_http_default_port_via_proxy(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_http_port_specification_via_proxy(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_https_via_proxy(self): TODO = 'Skip this test because of a strange bug with the extended'\ ' url library and w3af\'s local proxy daemon. More info here:'\ ' https://github.com/andresriancho/w3af/issues/183' raise SkipTest(TODO) url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_offline_port_via_proxy(self): url = URL('http://127.0.0.1:8181/') http_response = self.uri_opener.GET(url, cache=False) self.assertEqual(http_response.get_code(), 400) def test_POST_via_proxy(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False) self.assertIn('123456abc', http_response.body)
def test_sessions_simultaneous(self): def request_callback(request, uri, headers): received_cookie_value = request.headers.get('cookie', None) if received_cookie_value is not None: return 200, headers, 'Cookie %s received' % received_cookie_value else: return 200, headers, 'Cookie not sent' httpretty.register_uri(httpretty.GET, self.URL_SEND_COOKIE + '1', adding_headers={'Set-Cookie': '11111111'}) httpretty.register_uri(httpretty.GET, self.URL_SEND_COOKIE + '2', adding_headers={'Set-Cookie': '222222222'}) httpretty.register_uri(httpretty.GET, self.URL_CHECK_COOKIE, body=request_callback) uri_opener = ExtendedUrllib() session_1 = uri_opener.get_new_session() session_2 = uri_opener.get_new_session() uri_opener.GET(URL(self.URL_SEND_COOKIE + '1'), session=session_1) uri_opener.GET(URL(self.URL_SEND_COOKIE + '2'), session=session_2) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), session=session_1) self.assertIn('Cookie 11111111 received', resp.body) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), session=session_2) self.assertIn('Cookie 222222222 received', resp)
def test_sessions_basic(self): httpretty.register_uri( httpretty.GET, self.URL_SEND_COOKIE, adding_headers={'Set-Cookie': self.COOKIE_VALUE}) httpretty.register_uri(httpretty.GET, self.URL_CHECK_COOKIE, body=self.request_callback) uri_opener = ExtendedUrllib() session_1 = uri_opener.get_new_session() session_2 = uri_opener.get_new_session() uri_opener.GET(URL(self.URL_SEND_COOKIE), session=session_1) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True, session=session_1) self.assertIn('Cookie received', resp) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True, session=session_2) self.assertIn('Cookie not sent', resp)
class TestXUrllibIntegration(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() @attr('ci_fails') def test_ntlm_auth_not_configured(self): self.uri_opener = ExtendedUrllib() url = URL("http://moth/w3af/core/ntlm_auth/ntlm_v1/") http_response = self.uri_opener.GET(url, cache=False) self.assertIn('Must authenticate.', http_response.body) @attr('ci_fails') def test_ntlm_auth_valid_creds(self): self.uri_opener = ExtendedUrllib() settings = OpenerSettings() options = settings.get_options() ntlm_domain = options['ntlm_auth_domain'] ntlm_user = options['ntlm_auth_user'] ntlm_pass = options['ntlm_auth_passwd'] ntlm_url = options['ntlm_auth_url'] ntlm_domain.set_value('moth') ntlm_user.set_value('admin') ntlm_pass.set_value('admin') ntlm_url.set_value('http://moth/w3af/core/ntlm_auth/ntlm_v1/') settings.set_options(options) self.uri_opener.settings = settings url = URL("http://moth/w3af/core/ntlm_auth/ntlm_v1/") http_response = self.uri_opener.GET(url, cache=False) self.assertIn('You are admin from MOTH/', http_response.body) def test_gzip(self): url = URL(get_moth_http('/core/gzip/gzip.html')) res = self.uri_opener.GET(url, cache=False) headers = res.get_headers() content_encoding, _ = headers.iget('content-encoding', '') test_res = 'gzip' in content_encoding or \ 'compress' in content_encoding self.assertTrue(test_res, content_encoding) def test_get_cookies(self): self.assertEqual(len([c for c in self.uri_opener.get_cookies()]), 0) url_sends_cookie = URL(get_moth_http('/core/cookies/set-cookie.py')) self.uri_opener.GET(url_sends_cookie, cache=False) self.assertEqual(len([c for c in self.uri_opener.get_cookies()]), 1) cookie = [c for c in self.uri_opener.get_cookies()][0] self.assertEqual('127.0.0.1', cookie.domain)
def test_xurllib(self): uri_opener = ExtendedUrllib() uri_opener.GET(self.URL_SENDS_COOKIE) resp = uri_opener.GET(self.URL_CHECK_COOKIE, cookies=True) self.assertTrue('Cookie was sent.' in resp) resp = uri_opener.GET(self.URL_CHECK_COOKIE, cookies=False) self.assertTrue('Cookie was NOT sent.' in resp) resp = uri_opener.GET(self.URL_CHECK_COOKIE, cookies=True) self.assertTrue('Cookie was sent.' in resp)
class TestXUrllibTimeout(unittest.TestCase): def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_timeout(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.settings.set_configured_timeout(0.5) self.uri_opener.clear_timeout() # We can mock this because it's being tested at TestXUrllibDelayOnError self.uri_opener._pause_on_http_error = Mock() start = time.time() try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.message, 'HTTP timeout error') except Exception, e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__)
def test_send_mangled(self): xurllib = ExtendedUrllib() xurllib.set_evasion_plugins([ self_reference(), ]) url = URL('http://moth/') http_response = xurllib.GET(url) self.assertEqual(http_response.get_url().url_string, u'http://moth/./')
def test_xurllib(self): httpretty.register_uri( httpretty.GET, self.URL_SEND_COOKIE, adding_headers={'Set-Cookie': self.COOKIE_VALUE}) httpretty.register_uri(httpretty.GET, self.URL_CHECK_COOKIE, body=self.request_callback) uri_opener = ExtendedUrllib() uri_opener.GET(URL(self.URL_SEND_COOKIE)) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True) self.assertIn('Cookie received', resp) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=False) self.assertIn('Cookie not sent', resp) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True) self.assertIn('Cookie received', resp)
def test_headers_upper_case(self): url = "http://w3af.org/" httpretty.register_uri(httpretty.GET, url, body='hello world', content_type="application/html") uri_opener = ExtendedUrllib() res = uri_opener.GET(URL(url), cache=False) headers = res.get_headers() content_encoding, _ = headers.get('Content-Type', '') self.assertIn('gzip', content_encoding) self.assertIn('View HTTP response headers.', res.get_body())
def test_headers_upper_case(self): """ This unittest is skipped here, but shouldn't be removed, it is a reminder that w3af (and urllib/httplib) does always perform a call to lower() for all the data received over the wire. This gives w3af a modified view of the reality, we never see what was really sent to us. """ url = "http://w3af.org/" httpretty.register_uri(httpretty.GET, url, body='hello world', content_type="application/html") uri_opener = ExtendedUrllib() res = uri_opener.GET(URL(url), cache=False) headers = res.get_headers() content_encoding = headers.get('Content-Type', '') self.assertIn('application/html', content_encoding)
def test_demo_testfire_net(self): # We don't control the demo.testfire.net domain, so we'll check if its # up before doing anything else uri_opener = ExtendedUrllib() login_url = URL(self.demo_testfire + 'login.aspx') try: res = uri_opener.GET(login_url) except: raise SkipTest('demo.testfire.net is unreachable!') else: if not 'Online Banking Login' in res.body: raise SkipTest('demo.testfire.net has changed!') self._scan(self.demo_testfire_net['target'], self.demo_testfire_net['plugins']) urls = self.kb.get_all_known_urls() url_strings = set(str(u) for u in urls) self.assertTrue(self.demo_testfire + 'queryxpath.aspx' in url_strings) self.assertTrue(self.demo_testfire + 'queryxpath.aspx.cs' in url_strings)
class TestCSRF(PluginTest): target_url = 'http://moth/w3af/audit/csrf/' _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'audit': (PluginConfig('csrf'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def setUp(self): super(TestCSRF, self).setUp() self.csrf_plugin = csrf() self.uri_opener = ExtendedUrllib() self.csrf_plugin.set_url_opener(self.uri_opener) @attr('ci_fails') def test_found_csrf(self): EXPECTED = [ ('/w3af/audit/csrf/vulnerable/buy.php'), ('/w3af/audit/csrf/vulnerable-rnd/buy.php'), #@see: https://github.com/andresriancho/w3af/issues/120 #('/w3af/audit/csrf/vulnerable-token-ignored/buy.php'), ('/w3af/audit/csrf/link-vote/vote.php') ] # Run the scan cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) # Assert the general results vulns = self.kb.get('csrf', 'csrf') self.assertEquals(set(EXPECTED), set([v.get_url().get_path() for v in vulns])) self.assertTrue( all(['CSRF vulnerability' == v.get_name() for v in vulns])) def test_resp_is_equal(self): url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html')]) r1 = HTTPResponse(200, 'body', headers, url, url) r2 = HTTPResponse(404, 'body', headers, url, url) self.assertFalse(self.csrf_plugin._is_resp_equal(r1, r2)) r1 = HTTPResponse(200, 'a', headers, url, url) r2 = HTTPResponse(200, 'b', headers, url, url) self.assertFalse(self.csrf_plugin._is_resp_equal(r1, r2)) r1 = HTTPResponse(200, 'a', headers, url, url) r2 = HTTPResponse(200, 'a', headers, url, url) self.assertTrue(self.csrf_plugin._is_resp_equal(r1, r2)) @attr('ci_fails') def test_is_suitable(self): # False because no cookie is set and no QS nor post-data url = URL('http://moth/') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # False because no cookie is set url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) url_sends_cookie = URL( 'http://moth/w3af/core/cookie_handler/set-cookie.php') self.uri_opener.GET(url_sends_cookie) # Still false because it doesn't have any QS or POST data url = URL('http://moth/') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) self.csrf_plugin._strict_mode = True # Still false because of the strict mode url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # False, no items in post-data url = URL('http://moth/') req = FuzzableRequest(url, method='POST', post_data=URLEncodedForm()) suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # True, items in DC, POST (passes strict mode) and cookies url = URL('http://moth/') form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'test'), ('type', 'text')]) form = URLEncodedForm(form_params) req = FuzzableRequest(url, method='POST', post_data=form) suitable = self.csrf_plugin._is_suitable(req) self.assertTrue(suitable) self.csrf_plugin._strict_mode = False # True now that we have strict mode off, cookies and QS url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertTrue(suitable) @attr('ci_fails') def test_is_origin_checked_true_case01(self): url = URL('http://moth/w3af/audit/csrf/referer/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer/') ]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked( freq, orig_response, None) self.assertTrue(origin_checked) @attr('ci_fails') def test_is_origin_checked_true_case02(self): url = URL('http://moth/w3af/audit/csrf/referer-rnd/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer-rnd/')]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked( freq, orig_response, None) self.assertTrue(origin_checked) @attr('ci_fails') def test_is_origin_checked_false(self): url = URL('http://moth/w3af/audit/csrf/vulnerable/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer-rnd/')]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked( freq, orig_response, None) self.assertFalse(origin_checked) @attr('ci_fails') def test_is_token_checked_true(self): generator = URL('http://moth/w3af/audit/csrf/secure-replay-allowed/') http_response = self.uri_opener.GET(generator) # Please note that this freq holds a fresh/valid CSRF token cookie = Cookie.from_http_response(http_response) freq = FuzzableRequest(generator, cookie=cookie) # FIXME: # And I use this token here to get the original response, and if the # application is properly developed, that token will be invalidated # and that's where this algorithm fails. original_response = self.uri_opener.send_mutant(freq) token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'} checked = self.csrf_plugin._is_token_checked(freq, token, original_response) self.assertTrue(checked) @attr('ci_fails') def test_is_token_checked_false(self): """ This covers the case where there is a token but for some reason it is NOT verified by the web application. """ generator = URL( 'http://moth/w3af/audit/csrf/vulnerable-token-ignored/') http_response = self.uri_opener.GET(generator) # Please note that this freq holds a fresh/valid CSRF token cookie = Cookie.from_http_response(http_response) freq = FuzzableRequest(generator, cookie=cookie) # FIXME: # And I use this token here to get the original response, and if the # application is properly developed, that token will be invalidated # and that's where this algorithm fails. original_response = self.uri_opener.send_mutant(freq) token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'} checked = self.csrf_plugin._is_token_checked(freq, token, original_response) self.assertFalse(checked)
class TestXUrllibDelayOnError(unittest.TestCase): def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_increasing_delay_on_errors(self): expected_log = {0: False, 70: False, 40: False, 10: False, 80: False, 50: False, 20: False, 90: False, 60: False, 30: False, 100: False} self.assertEqual(self.uri_opener._sleep_log, expected_log) return_empty_daemon = UpperDaemon(EmptyTCPHandler) return_empty_daemon.start() return_empty_daemon.wait_for_start() port = return_empty_daemon.get_port() # No retries means that the test is easier to read/understand self.uri_opener.settings.set_max_http_retries(0) # We want to keep going, don't test the _should_stop_scan here. self.uri_opener._should_stop_scan = lambda x: False url = URL('http://127.0.0.1:%s/' % port) http_exception_count = 0 loops = 100 # Not check the delays with patch('w3af.core.data.url.extended_urllib.time.sleep') as sleepm: for i in xrange(loops): try: self.uri_opener.GET(url, cache=False) except HTTPRequestException: http_exception_count += 1 except Exception, e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__) else: self.assertTrue(False, 'Expecting HTTPRequestException') self.assertEqual(loops - 1, i) # Note that the timeouts are increasing based on the error rate and # SOCKET_ERROR_DELAY expected_calls = [call(1.5), call(3.0), call(4.5), call(6.0), call(7.5), call(9.0), call(10.5), call(12.0), call(13.5)] expected_log = {0: False, 70: True, 40: True, 10: True, 80: True, 50: True, 20: True, 90: True, 60: True, 30: True, 100: False} self.assertEqual(expected_calls, sleepm.call_args_list) self.assertEqual(http_exception_count, 100) self.assertEqual(self.uri_opener._sleep_log, expected_log) # This one should also clear the log try: self.uri_opener.GET(url, cache=False) except HTTPRequestException: pass else: self.assertTrue(False, 'Expected HTTPRequestException') # The log was cleared, all values should be False self.assertTrue(all([not v for v in self.uri_opener._sleep_log.values()]))
class TestRedirectHandlerExtendedUrllib(unittest.TestCase): """ Test the redirect handler using ExtendedUrllib """ REDIR_DEST = 'http://w3af.org/dest' REDIR_SRC = 'http://w3af.org/src' OK_BODY = 'Body!' def setUp(self): consecutive_number_generator.reset() self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() @httpretty.activate def test_redirect_302_simple_no_follow(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': self.REDIR_DEST}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src) location, _ = response.get_headers().iget('location') self.assertEqual(location, self.REDIR_DEST) self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_id(), 1) @httpretty.activate def test_redirect_302_simple_follow(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': self.REDIR_DEST}) httpretty.register_uri(httpretty.GET, self.REDIR_DEST, body=self.OK_BODY, status=200) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) self.assertEqual(response.get_code(), OK) self.assertEqual(response.get_body(), self.OK_BODY) self.assertEqual(response.get_redir_uri(), URL(self.REDIR_DEST)) self.assertEqual(response.get_url(), URL(self.REDIR_SRC)) self.assertEqual(response.get_id(), 2) @httpretty.activate def test_redirect_301_loop(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=MOVED_PERMANENTLY, adding_headers={'Location': self.REDIR_DEST}) httpretty.register_uri(httpretty.GET, self.REDIR_DEST, body='', status=MOVED_PERMANENTLY, adding_headers={'URI': self.REDIR_SRC}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) # At some point the handler detects a loop and stops self.assertEqual(response.get_code(), MOVED_PERMANENTLY) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_id(), 9) @httpretty.activate def test_redirect_302_without_location_returns_302_response(self): # Breaks the RFC httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) # Doesn't follow the redirects self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_id(), 1) @httpretty.activate def test_redirect_no_follow_file_proto(self): httpretty.register_uri( httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': 'file:///etc/passwd'}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_url(), URL(self.REDIR_SRC)) self.assertEqual(response.get_id(), 1)
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' MOCK_URL = 'http://www.w3af.org/' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() httpretty.reset() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_basic_ssl(self): url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) @httpretty.activate def test_GET_with_post_data(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) mock_url = URL(self.MOCK_URL) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/') @httpretty.activate def test_GET_with_post_data_and_qs(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) qs = '?qs=1' mock_url = URL(self.MOCK_URL + qs) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/' + qs) def test_post(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_post_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_domain(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_file_proto(self): url = URL('file://foo/bar.txt') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.value, "Bad HTTP response status line: ''") else:
class TestExtendedUrllibProxy(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() # Start the proxy daemon self._proxy = Proxy('127.0.0.2', 0, ExtendedUrllib(), ProxyHandler) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Configure the proxy settings = OpenerSettings() options = settings.get_options() proxy_address_opt = options['proxy_address'] proxy_port_opt = options['proxy_port'] proxy_address_opt.set_value('127.0.0.2') proxy_port_opt.set_value(port) settings.set_options(options) self.uri_opener.settings = settings def tearDown(self): self.uri_opener.end() def test_http_default_port_via_proxy(self): # TODO: Write this test pass def test_http_port_specification_via_proxy(self): self.assertEqual(self._proxy.total_handled_requests, 0) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertEqual(self._proxy.total_handled_requests, 1) def test_https_via_proxy(self): self.assertEqual(self._proxy.total_handled_requests, 0) url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertEqual(self._proxy.total_handled_requests, 1) def test_offline_port_via_proxy(self): url = URL('http://127.0.0.1:8181/') http_response = self.uri_opener.GET(url, cache=False) self.assertEqual(http_response.get_code(), 500) self.assertIn('Connection refused', http_response.body) def test_POST_via_proxy(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False) self.assertIn('123456abc', http_response.body)
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) def test_post(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_post_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_domain(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_file_proto(self): url = URL('file://foo/bar.txt') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.value, "Bad HTTP response status line: ''") else:
class TestCSRF(PluginTest): target_url = 'http://moth/w3af/audit/csrf/' _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'audit': (PluginConfig('csrf'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def setUp(self): super(TestCSRF, self).setUp() self.csrf_plugin = csrf() self.uri_opener = ExtendedUrllib() self.csrf_plugin.set_url_opener(self.uri_opener) @attr('ci_fails') def test_found_csrf(self): EXPECTED = [ ('/w3af/audit/csrf/vulnerable/buy.php'), ('/w3af/audit/csrf/vulnerable-rnd/buy.php'), #@see: https://github.com/andresriancho/w3af/issues/120 #('/w3af/audit/csrf/vulnerable-token-ignored/buy.php'), ('/w3af/audit/csrf/link-vote/vote.php') ] # Run the scan cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) # Assert the general results vulns = self.kb.get('csrf', 'csrf') self.assertEquals(set(EXPECTED), set([v.get_url().get_path() for v in vulns])) self.assertTrue( all(['CSRF vulnerability' == v.get_name() for v in vulns])) def test_resp_is_equal(self): url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html')]) r1 = HTTPResponse(200, 'body', headers, url, url) r2 = HTTPResponse(404, 'body', headers, url, url) self.assertFalse(self.csrf_plugin._is_resp_equal(r1, r2)) r1 = HTTPResponse(200, 'a', headers, url, url) r2 = HTTPResponse(200, 'b', headers, url, url) self.assertFalse(self.csrf_plugin._is_resp_equal(r1, r2)) r1 = HTTPResponse(200, 'a', headers, url, url) r2 = HTTPResponse(200, 'a', headers, url, url) self.assertTrue(self.csrf_plugin._is_resp_equal(r1, r2)) @attr('ci_fails') def test_is_suitable(self): # False because no cookie is set and no QS nor post-data url = URL('http://moth/') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # False because no cookie is set url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) url_sends_cookie = URL( 'http://moth/w3af/core/cookie_handler/set-cookie.php') self.uri_opener.GET(url_sends_cookie) # Still false because it doesn't have any QS or POST data url = URL('http://moth/') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) self.csrf_plugin._strict_mode = True # Still false because of the strict mode url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # False, no items in dc url = URL('http://moth/') req = FuzzableRequest(url, method='POST', dc=Form()) suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # True, items in DC, POST (passes strict mode) and cookies url = URL('http://moth/') form = Form() form.add_input([('name', 'test'), ('type', 'text')]) req = FuzzableRequest(url, method='POST', dc=form) suitable = self.csrf_plugin._is_suitable(req) self.assertTrue(suitable) self.csrf_plugin._strict_mode = False # True now that we have strict mode off, cookies and QS url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertTrue(suitable) @attr('ci_fails') def test_is_origin_checked_true_case01(self): url = URL('http://moth/w3af/audit/csrf/referer/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer/') ]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked( freq, orig_response) self.assertTrue(origin_checked) @attr('ci_fails') def test_is_origin_checked_true_case02(self): url = URL('http://moth/w3af/audit/csrf/referer-rnd/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer-rnd/')]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked( freq, orig_response) self.assertTrue(origin_checked) @attr('ci_fails') def test_is_origin_checked_false(self): url = URL('http://moth/w3af/audit/csrf/vulnerable/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer-rnd/')]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked( freq, orig_response) self.assertFalse(origin_checked) def test_is_csrf_token_true_case01(self): self.csrf_plugin.is_csrf_token('token', 'f842eb01b87a8ee18868d3bf80a558f3') def test_is_csrf_token_true_case02(self): self.csrf_plugin.is_csrf_token('secret', 'f842eb01b87a8ee18868d3bf80a558f3') def test_is_csrf_token_true_case03(self): self.csrf_plugin.is_csrf_token('csrf', 'f842eb01b87a8ee18868d3bf80a558f3') def test_is_csrf_token_false_case01(self): self.csrf_plugin.is_csrf_token('token', '') def test_is_csrf_token_false_case02(self): self.csrf_plugin.is_csrf_token('secret', 'helloworld') def test_is_csrf_token_false_case03(self): self.csrf_plugin.is_csrf_token('secret', 'helloworld123') def test_is_csrf_token_false_case04(self): self.csrf_plugin.is_csrf_token('secret', 'hello world 123') def test_is_csrf_token_false_case05(self): lorem = ('Lorem ipsum dolor sit amet, consectetur adipiscing elit.' ' Curabitur at eros elit, rhoncus feugiat libero. Praesent' ' lobortis ultricies est gravida tempor. Sed tortor mi,' ' euismod at interdum quis, hendrerit vitae risus. Sed' ' iaculis, ante sagittis ullamcorper molestie, metus nibh' ' posuere purus, non tempor massa leo at odio. Duis quis' ' elit enim. Morbi lobortis est sed metus adipiscing in' ' lacinia est porttitor. Suspendisse potenti. Morbi pretium' ' lacinia magna, sit amet tincidunt enim vestibulum sed.') self.csrf_plugin.is_csrf_token('secret', lorem) def test_is_csrf_token_false_case06(self): self.csrf_plugin.is_csrf_token('token', 'f842e') def test_find_csrf_token_true_simple(self): url = URL('http://moth/w3af/audit/csrf/') query_string = parse_qs('secret=f842eb01b87a8ee18868d3bf80a558f3') freq = FuzzableRequest(url, method='GET', dc=query_string) token = self.csrf_plugin._find_csrf_token(freq) self.assertIn('secret', token) def test_find_csrf_token_true_repeated(self): url = URL('http://moth/w3af/audit/csrf/') query_string = parse_qs('secret=f842eb01b87a8ee18868d3bf80a558f3' '&secret=not a token') freq = FuzzableRequest(url, method='GET', dc=query_string) token = self.csrf_plugin._find_csrf_token(freq) self.assertIn('secret', token) def test_find_csrf_token_false(self): url = URL('http://moth/w3af/audit/csrf/') query_string = parse_qs('secret=not a token') freq = FuzzableRequest(url, method='GET', dc=query_string) token = self.csrf_plugin._find_csrf_token(freq) self.assertNotIn('secret', token) @attr('ci_fails') def test_is_token_checked_true(self): generator = URL('http://moth/w3af/audit/csrf/secure-replay-allowed/') http_response = self.uri_opener.GET(generator) # Please note that this freq holds a fresh/valid CSRF token freq_lst = create_fuzzable_requests(http_response, add_self=False) self.assertEqual(len(freq_lst), 1) freq = freq_lst[0] # FIXME: # And I use this token here to get the original response, and if the # application is properly developed, that token will be invalidated # and that's where this algorithm fails. original_response = self.uri_opener.send_mutant(freq) token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'} checked = self.csrf_plugin._is_token_checked(freq, token, original_response) self.assertTrue(checked) @attr('ci_fails') def test_is_token_checked_false(self): """ This covers the case where there is a token but for some reason it is NOT verified by the web application. """ generator = URL( 'http://moth/w3af/audit/csrf/vulnerable-token-ignored/') http_response = self.uri_opener.GET(generator) # Please note that this freq holds a fresh/valid CSRF token freq_lst = create_fuzzable_requests(http_response, add_self=False) self.assertEqual(len(freq_lst), 1) freq = freq_lst[0] # FIXME: # And I use this token here to get the original response, and if the # application is properly developed, that token will be invalidated # and that's where this algorithm fails. original_response = self.uri_opener.send_mutant(freq) token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'} checked = self.csrf_plugin._is_token_checked(freq, token, original_response) self.assertFalse(checked)
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' MOCK_URL = 'http://www.w3af.org/' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() httpretty.reset() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_basic_ssl(self): url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_github_ssl(self): url = URL( 'https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/jsrepository.json' ) http_response = self.uri_opener.GET(url, cache=False, binary_response=True, respect_size_limit=False) self.assertIn('jquery', http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) @httpretty.activate def test_GET_with_post_data(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) mock_url = URL(self.MOCK_URL) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/') @httpretty.activate def test_GET_with_post_data_and_qs(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) qs = '?qs=1' mock_url = URL(self.MOCK_URL + qs) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/' + qs) def test_post(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_post_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_domain(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_file_proto(self): url = URL('file://foo/bar.txt') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url) except HTTPRequestException as hre: self.assertEqual(hre.value, "Bad HTTP response status line: ''") else: self.assertTrue(False, 'Expected HTTPRequestException.') def test_url_port_not_http_many(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_max_http_retries(0) url = URL('http://127.0.0.1:%s/' % port) http_request_e = 0 scan_must_stop_e = 0 for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except HTTPRequestException: http_request_e += 1 except ScanMustStopException as smse: scan_must_stop_e += 1 break except Exception as e: msg = 'Not expecting "%s".' self.assertTrue(False, msg % e.__class__.__name__) self.assertEqual(scan_must_stop_e, 1) self.assertEqual(http_request_e, 9) def test_get_wait_time(self): """ Asserts that all the responses coming out of the extended urllib have a get_wait_time different from the default. """ url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertNotEqual(http_response.get_wait_time(), DEFAULT_WAIT_TIME) def test_ssl_tls_1_0(self): ssl_daemon = RawSSLDaemon(Ok200Handler, ssl_version=ssl.PROTOCOL_TLSv1) ssl_daemon.start() ssl_daemon.wait_for_start() port = ssl_daemon.get_port() url = URL('https://127.0.0.1:%s/' % port) resp = self.uri_opener.GET(url) self.assertEqual(resp.get_body(), Ok200Handler.body) def test_ssl_v23(self): # https://bugs.kali.org/view.php?id=2160 if not hasattr(ssl, 'PROTOCOL_SSLv23'): return ssl_daemon = RawSSLDaemon(Ok200Handler, ssl_version=ssl.PROTOCOL_SSLv23) ssl_daemon.start() ssl_daemon.wait_for_start() port = ssl_daemon.get_port() url = URL('https://127.0.0.1:%s/' % port) resp = self.uri_opener.GET(url) self.assertEqual(resp.get_body(), Ok200Handler.body) def test_ssl_v3(self): # https://bugs.kali.org/view.php?id=2160 if not hasattr(ssl, 'PROTOCOL_SSLv3'): return # pylint: disable=E1101 ssl_daemon = RawSSLDaemon(Ok200Handler, ssl_version=ssl.PROTOCOL_SSLv3) ssl_daemon.start() ssl_daemon.wait_for_start() # pylint: disable=E1101 port = ssl_daemon.get_port() url = URL('https://127.0.0.1:%s/' % port) resp = self.uri_opener.GET(url) self.assertEqual(resp.get_body(), Ok200Handler.body) @attr('internet') @attr('ci_fails') def test_ssl_sni(self): """ Test is our HTTP client supports SSL SNI """ url = URL('https://sni.velox.ch/') resp = self.uri_opener.GET(url) self.assertIn('<strong>Great!', resp.get_body()) def test_ssl_fail_when_requesting_http(self): http_daemon = UpperDaemon(Ok200Handler) http_daemon.start() http_daemon.wait_for_start() port = http_daemon.get_port() # Note that here I'm using httpS <<---- "S" and that I've started an # HTTP server. We should get an exception url = URL('https://127.0.0.1:%s/' % port) self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_ssl_fail_when_requesting_moth_http(self): """ https://github.com/andresriancho/w3af/issues/7989 This test takes considerable time to run since it needs to timeout the SSL connection for each SSL protocol """ # Note that here I'm using httpS <<---- "S" and that I'm connecting to # the net location (host:port) of an HTTP server. http_url = URL(get_moth_http()) test_url = URL('https://%s' % http_url.get_net_location()) self.uri_opener.settings.set_max_http_retries(0) self.assertRaises(HTTPRequestException, self.uri_opener.GET, test_url, timeout=1) def test_stop(self): self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause_stop(self): self.uri_opener.pause(True) self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) def test_pause_unpause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) self.uri_opener.pause(False) http_response = output.get() self.assertNotIsInstance(http_response, types.NoneType, 'Error in send thread.') th.join() self.assertEqual(http_response.get_code(), 200) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_removes_cache(self): url = URL(get_moth_http()) self.uri_opener.GET(url, cache=False) # Please note that this line, together with the tearDown() act as # a test for a "double call to end()". self.uri_opener.end() db_fmt = 'db_unittest-%s' trace_fmt = 'db_unittest-%s_traces/' temp_dir = get_temp_dir() for i in xrange(100): test_db_path = os.path.join(temp_dir, db_fmt % i) test_trace_path = os.path.join(temp_dir, trace_fmt % i) self.assertFalse(os.path.exists(test_db_path), test_db_path) self.assertFalse(os.path.exists(test_trace_path), test_trace_path) def test_special_char_header(self): url = URL(get_moth_http('/core/headers/echo-headers.py')) header_content = u'name=ábc' headers = Headers([('Cookie', header_content)]) http_response = self.uri_opener.GET(url, cache=False, headers=headers) self.assertIn(header_content, http_response.body) def test_bad_file_descriptor_8125_local(self): """ 8125 is basically an issue with the way HTTP SSL connections handle the Connection: Close header. :see: https://github.com/andresriancho/w3af/issues/8125 """ raw_http_response = ('HTTP/1.1 200 Ok\r\n' 'Connection: close\r\n' 'Content-Type: text/html\r\n' 'Content-Length: 3\r\n\r\nabc') certfile = os.path.join(ROOT_PATH, 'plugins', 'tests', 'audit', 'certs', 'invalid_cert.pem') port = get_unused_port() s = SSLServer('localhost', port, certfile, http_response=raw_http_response) s.start() body = 'abc' mock_url = 'https://localhost:%s/' % port url = URL(mock_url) http_response = self.uri_opener.GET(url, cache=False) self.assertEqual(body, http_response.body) s.stop() # This error is expected, it's generated when the xurllib negotiates # the different SSL protocols with the server self.assertEqual(set([e.strerror for e in s.errors]), {'Bad file descriptor'}) def test_rate_limit_high(self): self.rate_limit_generic(500, 0.009, 0.4) def test_rate_limit_low(self): self.rate_limit_generic(1, 1, 2.2) def test_rate_limit_zero(self): self.rate_limit_generic(0, 0.005, 0.4) @httpretty.activate def rate_limit_generic(self, max_requests_per_second, _min, _max): mock_url = 'http://mock/' url = URL(mock_url) httpretty.register_uri(httpretty.GET, mock_url, body='Body') start_time = time.time() with patch.object(self.uri_opener.settings, 'get_max_requests_per_second') as mrps_mock: mrps_mock.return_value = max_requests_per_second self.uri_opener.GET(url, cache=False) self.uri_opener.GET(url, cache=False) httpretty.reset() end_time = time.time() elapsed_time = end_time - start_time self.assertGreaterEqual(elapsed_time, _min) self.assertLessEqual(elapsed_time, _max)
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) def test_POST(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_POST_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_url(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_not_http_many(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except ScanMustStopByUnknownReasonExc: self.assertTrue(False, 'Not expecting this exception type.') except ScanMustStopOnUrlError: self.assertTrue(True) except ScanMustStopException: self.assertTrue(True) break else: self.assertTrue(False) def test_timeout(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.settings.set_timeout(1) self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) self.uri_opener.settings.set_default_values() def test_timeout_many(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_timeout(1) url = URL('http://127.0.0.1:%s/' % port) for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except ScanMustStopByUnknownReasonExc: self.assertTrue(False, 'Not expecting this exception type.') except ScanMustStopOnUrlError: self.assertTrue(True) except ScanMustStopException: self.assertTrue(True) break else: self.assertTrue(False) self.uri_opener.settings.set_default_values() def test_ignore_errors(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_timeout(1) self.uri_opener._retry = Mock() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url, ignore_errors=True) except ScanMustStopOnUrlError: self.assertEqual(self.uri_opener._retry.call_count, 0) else: self.assertTrue(False, 'Exception not raised') self.uri_opener.settings.set_default_values() def test_stop(self): self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause_stop(self): self.uri_opener.pause(True) self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) def test_pause_unpause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) self.uri_opener.pause(False) http_response = output.get() self.assertNotIsInstance(http_response, types.NoneType, 'Error in send thread.') th.join() self.assertEqual(http_response.get_code(), 200) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_removes_cache(self): url = URL(get_moth_http()) self.uri_opener.GET(url, cache=False) # Please note that this line, together with the tearDown() act as # a test for a "double call to end()". self.uri_opener.end() db_fmt = 'db_unittest-%s' trace_fmt = 'db_unittest-%s_traces/' temp_dir = get_temp_dir() for i in xrange(100): test_db_path = os.path.join(temp_dir, db_fmt % i) test_trace_path = os.path.join(temp_dir, trace_fmt % i) self.assertFalse(os.path.exists(test_db_path), test_db_path) self.assertFalse(os.path.exists(test_trace_path), test_trace_path) def test_special_char_header(self): url = URL(get_moth_http('/core/headers/echo-headers.py')) header_content = u'name=ábc' headers = Headers([('Cookie', header_content)]) http_response = self.uri_opener.GET(url, cache=False, headers=headers) self.assertIn(header_content, http_response.body)
class TestXUrllibTimeout(unittest.TestCase): def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_timeout(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.settings.set_configured_timeout(0.5) self.uri_opener.clear_timeout() # We can mock this because it's being tested at TestXUrllibDelayOnError self.uri_opener._pause_on_http_error = Mock() start = time.time() try: self.uri_opener.GET(url) except HTTPRequestException as hre: self.assertEqual(hre.message, 'HTTP timeout error') except Exception as e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__) else: self.assertTrue(False, 'Expected HTTPRequestException.') end = time.time() self.uri_opener.settings.set_default_values() self.assertLess(end-start, 1.5) def test_timeout_ssl(self): ssl_daemon = RawSSLDaemon(TimeoutTCPHandler) ssl_daemon.start() ssl_daemon.wait_for_start() port = ssl_daemon.get_port() url = URL('https://127.0.0.1:%s/' % port) self.uri_opener.settings.set_max_http_retries(0) self.uri_opener.settings.set_configured_timeout(1) self.uri_opener.clear_timeout() start = time.time() self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) end = time.time() self.uri_opener.settings.set_default_values() # We set the upper limit to 4 because the uri opener needs to timeout # all the connections (one for each SSL protocol) and then, because of # some very relaxed handshake it needs to timeout a SSL protocol 3 # connection which passes handshake phase but then fails to send/get # the headers self.assertLess(end-start, 80) def test_timeout_many(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_configured_timeout(0.5) self.uri_opener.clear_timeout() # We can mock this because it's being tested at TestXUrllibDelayOnError self.uri_opener._pause_on_http_error = Mock() url = URL('http://127.0.0.1:%s/' % port) http_request_e = 0 scan_stop_e = 0 for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except HTTPRequestException as hre: http_request_e += 1 self.assertEqual(hre.message, 'HTTP timeout error') except ScanMustStopException: scan_stop_e += 1 self.assertTrue(True) break except Exception as e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__) else: self.assertTrue(False, 'Expecting timeout') else: self.assertTrue(False, 'Expected ScanMustStopException') self.uri_opener.settings.set_default_values() self.assertEqual(http_request_e, 4) self.assertEqual(scan_stop_e, 1) def test_timeout_auto_adjust(self): upper_daemon = UpperDaemon(Ok200SmallDelayHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() # Enable timeout auto-adjust self.uri_opener.settings.set_configured_timeout(0) self.uri_opener.clear_timeout() # We can mock this because it's being tested at TestXUrllibDelayOnError self.uri_opener._pause_on_http_error = Mock() # Mock to verify the calls self.uri_opener.set_timeout = Mock() # Make sure we start from the desired timeout value self.assertEqual(self.uri_opener.get_timeout('127.0.0.1'), DEFAULT_TIMEOUT) url = URL('http://127.0.0.1:%s/' % port) sent_requests = 0 self.uri_opener.GET(url) time.sleep(TIMEOUT_UPDATE_ELAPSED_MIN + 1) for _ in xrange(TIMEOUT_ADJUST_LIMIT * 3): try: self.uri_opener.GET(url) except Exception: raise else: sent_requests += 1 if self.uri_opener.set_timeout.call_count: break self.assertEqual(self.uri_opener.set_timeout.call_count, 1) # pylint: disable=E1136 rtt = self.uri_opener.get_average_rtt()[0] adjusted_tout = self.uri_opener.set_timeout.call_args[0][0] expected_tout = TIMEOUT_MULT_CONST * rtt delta = rtt * 0.2 # pylint: enable=E1136 self.assertGreaterEqual(adjusted_tout, expected_tout - delta) self.assertLessEqual(adjusted_tout, expected_tout + delta) self.assertLess(adjusted_tout, DEFAULT_TIMEOUT) self.assertEqual(sent_requests, TIMEOUT_ADJUST_LIMIT) def test_timeout_parameter_overrides_global_timeout(self): upper_daemon = UpperDaemon(Ok200SmallDelayWithLongTriggeredTimeoutHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() # Enable timeout auto-adjust self.uri_opener.settings.set_configured_timeout(0) self.uri_opener.clear_timeout() # Make sure we start from the desired timeout value self.assertEqual(self.uri_opener.get_timeout('127.0.0.1'), DEFAULT_TIMEOUT) url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.GET(url) time.sleep(TIMEOUT_UPDATE_ELAPSED_MIN + 1) for _ in xrange(TIMEOUT_ADJUST_LIMIT * 3): self.uri_opener.GET(url) # These make sure that the HTTP connection pool is full, this is # required because we want to check if the timeout applies to # existing connections, not new ones for _ in xrange(ConnectionManager.MAX_CONNECTIONS): self.uri_opener.GET(url) # Make sure we reached the desired timeout after our HTTP # requests to the test server self.assertEqual(self.uri_opener.get_timeout('127.0.0.1'), MIN_TIMEOUT) timeout_url = URL('http://127.0.0.1:%s/timeout' % port) # And now the real test, this one makes sure that the timeout # parameter sent to GET overrides the configured value response = self.uri_opener.GET(timeout_url, timeout=8.0) self.assertEqual(response.get_code(), 200) self.assertEqual(self.uri_opener.get_timeout('127.0.0.1'), MIN_TIMEOUT) # When timeout is not specified and the server returns in more # than the expected time, an exception is raised self.assertRaises(Exception, self.uri_opener.GET, timeout_url)