class Generic404Test(unittest.TestCase): def get_body(self, unique_parts): # Do not increase this 50 too much, it will exceed the xurllib max # HTTP response body length parts = [re.__doc__, random.__doc__, unittest.__doc__] parts = parts * 50 parts.extend(unique_parts) rnd = random.Random() rnd.seed(1) rnd.shuffle(parts) body = '\n'.join(parts) # filename = str(abs(hash(''.join(parts)))) + '-hash.txt' # file(filename, 'w').write(body) return body def setUp(self): self.urllib = ExtendedUrllib() self.fingerprint_404 = Fingerprint404() self.fingerprint_404.set_url_opener(self.urllib) def tearDown(self): self.urllib.end() clear_default_temp_db_instance()
class TestXUrllibTimeout(unittest.TestCase): def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_timeout(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.settings.set_configured_timeout(0.5) self.uri_opener.clear_timeout() # We can mock this because it's being tested at TestXUrllibDelayOnError self.uri_opener._pause_on_http_error = Mock() start = time.time() try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.message, 'HTTP timeout error') except Exception, e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__)
def setUp(self): uri = URL(self.SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener, debug=True)
def test_delay_controlled_random(self): for expected_result, delays in self.TEST_SUITE: urllib = ExtendedUrllib() side_effect = generate_delays(delays, rand_range=(0, 2)) urllib.send_mutant = MagicMock(side_effect=side_effect) delay_obj = ExactDelay('sleep(%s)') url = URL('http://moth/?id=1') req = FuzzableRequest(url) mutant = QSMutant(req) mutant.set_dc(url.querystring) mutant.set_token(('id', 0)) ed = ExactDelayController(mutant, delay_obj, urllib) controlled, responses = ed.delay_is_controlled() # This is where we change from test_delay_controlled, the basic # idea is that we'll allow false negatives but no false positives if expected_result: expected_result = [True, False] else: expected_result = [False] self.assertIn(controlled, expected_result, delays)
class TestExtendedUrllibProxy(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() # Start the proxy daemon self._proxy = Proxy('127.0.0.1', 0, ExtendedUrllib(), w3afProxyHandler) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Configure the proxy settings = OpenerSettings() options = settings.get_options() proxy_address_opt = options['proxy_address'] proxy_port_opt = options['proxy_port'] proxy_address_opt.set_value('127.0.0.1') proxy_port_opt.set_value(port) settings.set_options(options) self.uri_opener.settings = settings def tearDown(self): self.uri_opener.end() def test_http_default_port_via_proxy(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_http_port_specification_via_proxy(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_https_via_proxy(self): TODO = 'Skip this test because of a strange bug with the extended'\ ' url library and w3af\'s local proxy daemon. More info here:'\ ' https://github.com/andresriancho/w3af/issues/183' raise SkipTest(TODO) url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_offline_port_via_proxy(self): url = URL('http://127.0.0.1:8181/') http_response = self.uri_opener.GET(url, cache=False) self.assertEqual(http_response.get_code(), 400) def test_POST_via_proxy(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False) self.assertIn('123456abc', http_response.body)
def test_send_mangled(self): xurllib = ExtendedUrllib() xurllib.set_evasion_plugins([self_reference(), ]) url = URL('http://moth/') http_response = xurllib.GET(url) self.assertEqual(http_response.get_url().url_string, u'http://moth/./')
def test_verify_vulnerability_ssl(self): uri = URL(self.SSL_SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable, self.sqlmap.last_stdout)
class TestXUrllibIntegration(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() @attr('ci_fails') def test_ntlm_auth_not_configured(self): self.uri_opener = ExtendedUrllib() url = URL("http://moth/w3af/core/ntlm_auth/ntlm_v1/") http_response = self.uri_opener.GET(url, cache=False) self.assertIn('Must authenticate.', http_response.body) @attr('ci_fails') def test_ntlm_auth_valid_creds(self): self.uri_opener = ExtendedUrllib() settings = OpenerSettings() options = settings.get_options() ntlm_domain = options['ntlm_auth_domain'] ntlm_user = options['ntlm_auth_user'] ntlm_pass = options['ntlm_auth_passwd'] ntlm_url = options['ntlm_auth_url'] ntlm_domain.set_value('moth') ntlm_user.set_value('admin') ntlm_pass.set_value('admin') ntlm_url.set_value('http://moth/w3af/core/ntlm_auth/ntlm_v1/') settings.set_options(options) self.uri_opener.settings = settings url = URL("http://moth/w3af/core/ntlm_auth/ntlm_v1/") http_response = self.uri_opener.GET(url, cache=False) self.assertIn('You are admin from MOTH/', http_response.body) def test_gzip(self): url = URL(get_moth_http('/core/gzip/gzip.html')) res = self.uri_opener.GET(url, cache=False) headers = res.get_headers() content_encoding, _ = headers.iget('content-encoding', '') test_res = 'gzip' in content_encoding or \ 'compress' in content_encoding self.assertTrue(test_res, content_encoding) def test_get_cookies(self): self.assertEqual(len([c for c in self.uri_opener.get_cookies()]), 0) url_sends_cookie = URL(get_moth_http('/core/cookies/set-cookie.py')) self.uri_opener.GET(url_sends_cookie, cache=False) self.assertEqual(len([c for c in self.uri_opener.get_cookies()]), 1) cookie = [c for c in self.uri_opener.get_cookies()][0] self.assertEqual('127.0.0.1', cookie.domain)
def test_send_mangled(self): xurllib = ExtendedUrllib() xurllib.set_evasion_plugins([ self_reference(), ]) url = URL('http://moth/') http_response = xurllib.GET(url) self.assertEqual(http_response.get_url().url_string, u'http://moth/./')
def test_headers_upper_case(self): url = "http://w3af.org/" httpretty.register_uri(httpretty.GET, url, body='hello world', content_type="application/html") uri_opener = ExtendedUrllib() res = uri_opener.GET(URL(url), cache=False) headers = res.get_headers() content_encoding, _ = headers.get('Content-Type', '') self.assertIn('gzip', content_encoding) self.assertIn('View HTTP response headers.', res.get_body())
def __init__(self, ip, port, urlOpener=ExtendedUrllib(), proxy_cert=Proxy.SSL_CERT): """ :param ip: IP address to bind :param port: Port to bind :param urlOpener: The urlOpener that will be used to open the requests that arrive from the browser :param proxyHandler: A class that will know how to handle requests from the browser :param proxy_cert: Proxy certificate to use, this is needed for proxying SSL connections. """ Proxy.__init__(self, ip, port, urlOpener, w3afLocalProxyHandler, proxy_cert) self.daemon = True self.name = 'LocalProxyThread' # Internal vars self._request_queue = Queue.Queue() self._edited_requests = {} self._edited_responses = {} # User configured parameters self._methods_to_trap = set() self._what_to_trap = re.compile('.*') self._what_not_to_trap = re.compile( '.*\.(gif|jpg|png|css|js|ico|swf|axd|tif)$') self._trap = False self._fix_content_length = True
def test_pickleable_shells(self): pool = Pool(1) xurllib = ExtendedUrllib() original_shell = Shell(MockVuln(), xurllib, pool) kb.append('a', 'b', original_shell) unpickled_shell = kb.get('a', 'b')[0] self.assertEqual(original_shell, unpickled_shell) self.assertEqual(unpickled_shell.worker_pool, None) self.assertEqual(unpickled_shell._uri_opener, None) pool.terminate() pool.join() xurllib.end()
def test_delay_controlled(self): for expected_result, delays in self.TEST_SUITE: urllib = ExtendedUrllib() side_effect = generate_delays(delays) urllib.send_mutant = MagicMock(side_effect=side_effect) delay_obj = ExactDelay('sleep(%s)') url = URL('http://moth/?id=1') req = FuzzableRequest(url) mutant = QSMutant(req) mutant.set_dc(url.querystring) mutant.set_token(('id', 0)) ed = ExactDelayController(mutant, delay_obj, urllib) controlled, responses = ed.delay_is_controlled() self.assertEqual(expected_result, controlled, delays)
def test_raise_on_domain_not_in_archive(self): url = URL('http://www.w3af-scanner.org/') fr = FuzzableRequest(url, method='GET') ado = archive_dot_org() uri_opener = ExtendedUrllib() ado.set_url_opener(uri_opener) self.assertRaises(RunOnce, ado.crawl_wrapper, fr)
def setUp(self): self.uri_opener = ExtendedUrllib() # Start the proxy daemon self._proxy = Proxy('127.0.0.2', 0, ExtendedUrllib(), ProxyHandler) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Configure the proxy settings = OpenerSettings() options = settings.get_options() proxy_address_opt = options['proxy_address'] proxy_port_opt = options['proxy_port'] proxy_address_opt.set_value('127.0.0.2') proxy_port_opt.set_value(port) settings.set_options(options) self.uri_opener.settings = settings
def test_ntlm_auth_valid_creds(self): self.uri_opener = ExtendedUrllib() settings = OpenerSettings() options = settings.get_options() ntlm_domain = options['ntlm_auth_domain'] ntlm_user = options['ntlm_auth_user'] ntlm_pass = options['ntlm_auth_passwd'] ntlm_url = options['ntlm_auth_url'] ntlm_domain.set_value('moth') ntlm_user.set_value('admin') ntlm_pass.set_value('admin') ntlm_url.set_value('http://moth/w3af/core/ntlm_auth/ntlm_v1/') settings.set_options(options) self.uri_opener.settings = settings url = URL("http://moth/w3af/core/ntlm_auth/ntlm_v1/") http_response = self.uri_opener.GET(url, cache=False) self.assertIn('You are admin from MOTH/', http_response.body)
def test_false_negative(self): server_error = ('500 error that does NOT\n' 'look like one\n' 'because we want to reproduce the bug\n') not_found = ('This is a 404\n' 'but it does NOT look like one\n' 'because we want to reproduce the bug\n') httpretty.register_uri(httpretty.GET, re.compile("w3af.com/foo/(.*)"), body=server_error, status=500) httpretty.register_uri(httpretty.GET, re.compile("w3af.com/(.*)"), body=not_found, status=404) root_url = URL('http://w3af.com/') foo_url = URL('http://w3af.com/foo/phpinfo.php') server_error_resp = HTTPResponse(500, server_error, Headers(), foo_url, foo_url) urllib = ExtendedUrllib() worker_pool = Pool(processes=2, worker_names='WorkerThread', max_queued_tasks=2, maxtasksperchild=20) fingerprint_404 = Fingerprint404() fingerprint_404.set_url_opener(urllib) fingerprint_404.set_worker_pool(worker_pool) fingerprint_404.generate_404_knowledge(root_url) self.assertTrue(fingerprint_404.is_404(server_error_resp)) fingerprint_404.cleanup() urllib.clear()
def test_sessions_simultaneous(self): def request_callback(request, uri, headers): received_cookie_value = request.headers.get('cookie', None) if received_cookie_value is not None: return 200, headers, 'Cookie %s received' % received_cookie_value else: return 200, headers, 'Cookie not sent' httpretty.register_uri(httpretty.GET, self.URL_SEND_COOKIE + '1', adding_headers={'Set-Cookie': '11111111'}) httpretty.register_uri(httpretty.GET, self.URL_SEND_COOKIE + '2', adding_headers={'Set-Cookie': '222222222'}) httpretty.register_uri(httpretty.GET, self.URL_CHECK_COOKIE, body=request_callback) uri_opener = ExtendedUrllib() session_1 = uri_opener.get_new_session() session_2 = uri_opener.get_new_session() uri_opener.GET(URL(self.URL_SEND_COOKIE + '1'), session=session_1) uri_opener.GET(URL(self.URL_SEND_COOKIE + '2'), session=session_2) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), session=session_1) self.assertIn('Cookie 11111111 received', resp.body) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), session=session_2) self.assertIn('Cookie 222222222 received', resp)
def test_sessions_basic(self): httpretty.register_uri( httpretty.GET, self.URL_SEND_COOKIE, adding_headers={'Set-Cookie': self.COOKIE_VALUE}) httpretty.register_uri(httpretty.GET, self.URL_CHECK_COOKIE, body=self.request_callback) uri_opener = ExtendedUrllib() session_1 = uri_opener.get_new_session() session_2 = uri_opener.get_new_session() uri_opener.GET(URL(self.URL_SEND_COOKIE), session=session_1) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True, session=session_1) self.assertIn('Cookie received', resp) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True, session=session_2) self.assertIn('Cookie not sent', resp)
def test_demo_testfire_net(self): # We don't control the demo.testfire.net domain, so we'll check if its # up before doing anything else uri_opener = ExtendedUrllib() login_url = URL(self.demo_testfire + 'login.aspx') try: res = uri_opener.GET(login_url) except: raise SkipTest('demo.testfire.net is unreachable!') else: if not 'Online Banking Login' in res.body: raise SkipTest('demo.testfire.net has changed!') self._scan(self.demo_testfire_net['target'], self.demo_testfire_net['plugins']) urls = self.kb.get_all_known_urls() url_strings = set(str(u) for u in urls) self.assertTrue(self.demo_testfire + 'queryxpath.aspx' in url_strings) self.assertTrue(self.demo_testfire + 'queryxpath.aspx.cs' in url_strings)
def test_headers_upper_case(self): """ This unittest is skipped here, but shouldn't be removed, it is a reminder that w3af (and urllib/httplib) does always perform a call to lower() for all the data received over the wire. This gives w3af a modified view of the reality, we never see what was really sent to us. """ url = "http://w3af.org/" httpretty.register_uri(httpretty.GET, url, body='hello world', content_type="application/html") uri_opener = ExtendedUrllib() res = uri_opener.GET(URL(url), cache=False) headers = res.get_headers() content_encoding = headers.get('Content-Type', '') self.assertIn('application/html', content_encoding)
def setUp(self): # Start the proxy server create_temp_dir() self._proxy = InterceptProxy(self.IP, 0, ExtendedUrllib()) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Build the proxy opener proxy_handler = urllib2.ProxyHandler( {'http': 'http://%s:%s' % (self.IP, port)}) self.proxy_opener = urllib2.build_opener(proxy_handler, urllib2.HTTPHandler)
def test_xurllib(self): uri_opener = ExtendedUrllib() uri_opener.GET(self.URL_SENDS_COOKIE) resp = uri_opener.GET(self.URL_CHECK_COOKIE, cookies=True) self.assertTrue('Cookie was sent.' in resp) resp = uri_opener.GET(self.URL_CHECK_COOKIE, cookies=False) self.assertTrue('Cookie was NOT sent.' in resp) resp = uri_opener.GET(self.URL_CHECK_COOKIE, cookies=True) self.assertTrue('Cookie was sent.' in resp)
def test_xurllib(self): httpretty.register_uri( httpretty.GET, self.URL_SEND_COOKIE, adding_headers={'Set-Cookie': self.COOKIE_VALUE}) httpretty.register_uri(httpretty.GET, self.URL_CHECK_COOKIE, body=self.request_callback) uri_opener = ExtendedUrllib() uri_opener.GET(URL(self.URL_SEND_COOKIE)) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True) self.assertIn('Cookie received', resp) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=False) self.assertIn('Cookie not sent', resp) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True) self.assertIn('Cookie received', resp)
def setUp(self): self.bing_se = bing(ExtendedUrllib())
class BaseGoogleAPISearch(unittest.TestCase): """ @see: test_GMobileSearch, test_GStandardSearch, test_GAjaxSearch below for tests on these particular search implementations. This base class is not intended to be run by nosetests. """ GoogleApiSearcher = None COUNT = 10 def setUp(self): self.opener = ExtendedUrllib() def tearDown(self): self.opener.end() @attr('fails') def test_len_link_results(self): if self.GoogleApiSearcher is None: return keywords = ['pink', 'red', 'blue'] random.shuffle(keywords) query = ' '.join(keywords) start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links self.assertEqual(searcher.status, FINISHED_OK, GOOGLE_MSG) link_list = '\n'.join(str(r) for r in searcher.links) msg = 'Got less results than expected, %s is less than %s:\n%s' msg = msg % (len(searcher.links), self.COUNT, link_list) self.assertGreaterEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: self.assertTrue( URL_REGEX.match(link.URL.url_string) is not None, link.URL.url_string) for page in searcher.pages: self.assertTrue(isinstance(page, HTTPResponse)) # Check that the links are related to my search related = 0 for link in searcher.links: for key in keywords: if key in link.URL.url_string.lower(): related += 1 self.assertTrue(related > 5, related) @attr('fails') def test_links_results_domain(self): if self.GoogleApiSearcher is None: return domain = "www.bonsai-sec.com" query = "site:%s" % domain start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links self.assertEqual(searcher.status, FINISHED_OK, GOOGLE_MSG) msg = 'Got less results than expected:\n%s' % '\n'.join( str(r) for r in searcher.links) self.assertEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: link_domain = link.URL.get_domain() msg = "Current link domain is '%s'. Expected: '%s'" % (link_domain, domain) self.assertEqual(link_domain, domain, msg)
class TestXUrllibDelayOnError(unittest.TestCase): def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_increasing_delay_on_errors(self): expected_log = {0: False, 70: False, 40: False, 10: False, 80: False, 50: False, 20: False, 90: False, 60: False, 30: False, 100: False} self.assertEqual(self.uri_opener._sleep_log, expected_log) return_empty_daemon = UpperDaemon(EmptyTCPHandler) return_empty_daemon.start() return_empty_daemon.wait_for_start() port = return_empty_daemon.get_port() # No retries means that the test is easier to read/understand self.uri_opener.settings.set_max_http_retries(0) # We want to keep going, don't test the _should_stop_scan here. self.uri_opener._should_stop_scan = lambda x: False url = URL('http://127.0.0.1:%s/' % port) http_exception_count = 0 loops = 100 # Not check the delays with patch('w3af.core.data.url.extended_urllib.time.sleep') as sleepm: for i in xrange(loops): try: self.uri_opener.GET(url, cache=False) except HTTPRequestException: http_exception_count += 1 except Exception, e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__) else: self.assertTrue(False, 'Expecting HTTPRequestException') self.assertEqual(loops - 1, i) # Note that the timeouts are increasing based on the error rate and # SOCKET_ERROR_DELAY expected_calls = [call(1.5), call(3.0), call(4.5), call(6.0), call(7.5), call(9.0), call(10.5), call(12.0), call(13.5)] expected_log = {0: False, 70: True, 40: True, 10: True, 80: True, 50: True, 20: True, 90: True, 60: True, 30: True, 100: False} self.assertEqual(expected_calls, sleepm.call_args_list) self.assertEqual(http_exception_count, 100) self.assertEqual(self.uri_opener._sleep_log, expected_log) # This one should also clear the log try: self.uri_opener.GET(url, cache=False) except HTTPRequestException: pass else: self.assertTrue(False, 'Expected HTTPRequestException') # The log was cleared, all values should be False self.assertTrue(all([not v for v in self.uri_opener._sleep_log.values()]))
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) def test_POST(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_POST_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_url(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_not_http_many(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except ScanMustStopByUnknownReasonExc: self.assertTrue(False, 'Not expecting this exception type.') except ScanMustStopOnUrlError: self.assertTrue(True) except ScanMustStopException: self.assertTrue(True) break else: self.assertTrue(False) def test_timeout(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.settings.set_timeout(1) self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) self.uri_opener.settings.set_default_values() def test_timeout_many(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_timeout(1) url = URL('http://127.0.0.1:%s/' % port) for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except ScanMustStopByUnknownReasonExc: self.assertTrue(False, 'Not expecting this exception type.') except ScanMustStopOnUrlError: self.assertTrue(True) except ScanMustStopException: self.assertTrue(True) break else: self.assertTrue(False) self.uri_opener.settings.set_default_values() def test_ignore_errors(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_timeout(1) self.uri_opener._retry = Mock() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url, ignore_errors=True) except ScanMustStopOnUrlError: self.assertEqual(self.uri_opener._retry.call_count, 0) else: self.assertTrue(False, 'Exception not raised') self.uri_opener.settings.set_default_values() def test_stop(self): self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause_stop(self): self.uri_opener.pause(True) self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) def test_pause_unpause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) self.uri_opener.pause(False) http_response = output.get() self.assertNotIsInstance(http_response, types.NoneType, 'Error in send thread.') th.join() self.assertEqual(http_response.get_code(), 200) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_removes_cache(self): url = URL(get_moth_http()) self.uri_opener.GET(url, cache=False) # Please note that this line, together with the tearDown() act as # a test for a "double call to end()". self.uri_opener.end() db_fmt = 'db_unittest-%s' trace_fmt = 'db_unittest-%s_traces/' temp_dir = get_temp_dir() for i in xrange(100): test_db_path = os.path.join(temp_dir, db_fmt % i) test_trace_path = os.path.join(temp_dir, trace_fmt % i) self.assertFalse(os.path.exists(test_db_path), test_db_path) self.assertFalse(os.path.exists(test_trace_path), test_trace_path) def test_special_char_header(self): url = URL(get_moth_http('/core/headers/echo-headers.py')) header_content = u'name=ábc' headers = Headers([('Cookie', header_content)]) http_response = self.uri_opener.GET(url, cache=False, headers=headers) self.assertIn(header_content, http_response.body)
class BaseGoogleAPISearch(unittest.TestCase): """ @see: test_GMobileSearch, test_GStandardSearch, test_GAjaxSearch below for tests on these particular search implementations. This base class is not intended to be run by nosetests. """ GoogleApiSearcher = None COUNT = 10 def setUp(self): self.opener = ExtendedUrllib() def tearDown(self): self.opener.end() def test_len_link_results(self): if self.GoogleApiSearcher is None: return keywords = ["pink", "red", "blue"] random.shuffle(keywords) query = ' '.join(keywords) start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links msg = 'This test fails randomly based on Google\'s anti automation' \ ' protection, if it fails you should run it again in a couple'\ ' of minutes. Many consecutive failures show that our code is'\ ' NOT working anymore.' self.assertEqual(searcher.status, FINISHED_OK, msg) link_list = '\n'.join(str(r) for r in searcher.links) msg = 'Got less results than expected, %s is less than %s:\n%s' msg = msg % (len(searcher.links), self.COUNT, link_list) self.assertGreaterEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: self.assertTrue(URL_REGEX.match(link.URL.url_string) is not None, link.URL.url_string) for page in searcher.pages: self.assertTrue(isinstance(page, HTTPResponse)) # Check that the links are related to my search related = 0 for link in searcher.links: for key in keywords: if key in link.URL.url_string.lower(): related += 1 self.assertTrue(related > 5, related) def test_links_results_domain(self): if self.GoogleApiSearcher is None: return domain = "www.bonsai-sec.com" query = "site:%s" % domain start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links msg = 'This test fails randomly based on Google\'s anti automation' \ ' protection, if it fails you should run it again in a couple of' \ ' minutes. Many consecutive failures show that our code is NOT' \ ' working anymore.' self.assertEqual(searcher.status, FINISHED_OK, msg) msg = 'Got less results than expected:\n%s' % '\n'.join( str(r) for r in searcher.links) self.assertEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: link_domain = link.URL.get_domain() msg = "Current link domain is '%s'. Expected: '%s'" % ( link_domain, domain) self.assertEqual(link_domain, domain, msg)
def setUp(self): self.urllib = ExtendedUrllib() self.fingerprint_404 = Fingerprint404() self.fingerprint_404.set_url_opener(self.urllib)
class TestGetAverageRTT(unittest.TestCase): MOCK_URL = 'http://www.w3af.org/' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() httpretty.reset() @httpretty.activate def test_get_average_rtt_for_mutant_all_equal(self): def request_callback(request, uri, headers): time.sleep(0.5) body = 'Yup' return 200, headers, body httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=request_callback) mock_url = URL(self.MOCK_URL) fuzzable_request = FuzzableRequest(mock_url) average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request) # Check the response self.assertGreater(average_rtt, 0.45) self.assertGreater(0.55, average_rtt) @httpretty.activate def test_get_average_rtt_for_mutant_similar(self): def request_callback(request, uri, headers): time.sleep(0.4 + random.randint(1, 9) / 100.0) body = 'Yup' return 200, headers, body httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=request_callback) mock_url = URL(self.MOCK_URL) fuzzable_request = FuzzableRequest(mock_url) average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request) # Check the response self.assertGreater(average_rtt, 0.45) self.assertGreater(0.55, average_rtt) @httpretty.activate def test_get_average_rtt_for_mutant_one_off(self): # # TODO: This is one of the cases I need to fix using _has_outliers! # Calculating the average using 0.3 , 0.2 , 2.0 is madness # httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=RequestCallBackWithDelays([0.3, 0.2, 2.0])) mock_url = URL(self.MOCK_URL) fuzzable_request = FuzzableRequest(mock_url) average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request) # Check the response self.assertGreater(average_rtt, 0.80) self.assertGreater(0.90, average_rtt)
class BaseGoogleAPISearch(unittest.TestCase): """ @see: test_GMobileSearch, test_GStandardSearch, test_GAjaxSearch below for tests on these particular search implementations. This base class is not intended to be run by nosetests. """ GoogleApiSearcher = None COUNT = 10 def setUp(self): self.opener = ExtendedUrllib() def tearDown(self): self.opener.end() @attr('fails') def test_len_link_results(self): if self.GoogleApiSearcher is None: return keywords = ['pink', 'red', 'blue'] random.shuffle(keywords) query = ' '.join(keywords) start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links self.assertEqual(searcher.status, FINISHED_OK, GOOGLE_MSG) link_list = '\n'.join(str(r) for r in searcher.links) msg = 'Got less results than expected, %s is less than %s:\n%s' msg = msg % (len(searcher.links), self.COUNT, link_list) self.assertGreaterEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: self.assertTrue(URL_REGEX.match(link.URL.url_string) is not None, link.URL.url_string) for page in searcher.pages: self.assertTrue(isinstance(page, HTTPResponse)) # Check that the links are related to my search related = 0 for link in searcher.links: for key in keywords: if key in link.URL.url_string.lower(): related += 1 self.assertTrue(related > 5, related) @attr('fails') def test_links_results_domain(self): if self.GoogleApiSearcher is None: return domain = "www.bonsai-sec.com" query = "site:%s" % domain start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links self.assertEqual(searcher.status, FINISHED_OK, GOOGLE_MSG) msg = 'Got less results than expected:\n%s' % '\n'.join( str(r) for r in searcher.links) self.assertEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: link_domain = link.URL.get_domain() msg = "Current link domain is '%s'. Expected: '%s'" % ( link_domain, domain) self.assertEqual(link_domain, domain, msg)
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) def test_post(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_post_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_domain(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_file_proto(self): url = URL('file://foo/bar.txt') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.value, "Bad HTTP response status line: ''") else:
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' MOCK_URL = 'http://www.w3af.org/' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() httpretty.reset() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_basic_ssl(self): url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) @httpretty.activate def test_GET_with_post_data(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) mock_url = URL(self.MOCK_URL) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/') @httpretty.activate def test_GET_with_post_data_and_qs(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) qs = '?qs=1' mock_url = URL(self.MOCK_URL + qs) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/' + qs) def test_post(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_post_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_domain(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_file_proto(self): url = URL('file://foo/bar.txt') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.value, "Bad HTTP response status line: ''") else:
class TestExtendedUrllibProxy(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() # Start the proxy daemon self._proxy = Proxy('127.0.0.2', 0, ExtendedUrllib(), ProxyHandler) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Configure the proxy settings = OpenerSettings() options = settings.get_options() proxy_address_opt = options['proxy_address'] proxy_port_opt = options['proxy_port'] proxy_address_opt.set_value('127.0.0.2') proxy_port_opt.set_value(port) settings.set_options(options) self.uri_opener.settings = settings def tearDown(self): self.uri_opener.end() def test_http_default_port_via_proxy(self): # TODO: Write this test pass def test_http_port_specification_via_proxy(self): self.assertEqual(self._proxy.total_handled_requests, 0) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertEqual(self._proxy.total_handled_requests, 1) def test_https_via_proxy(self): self.assertEqual(self._proxy.total_handled_requests, 0) url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertEqual(self._proxy.total_handled_requests, 1) def test_offline_port_via_proxy(self): url = URL('http://127.0.0.1:8181/') http_response = self.uri_opener.GET(url, cache=False) self.assertEqual(http_response.get_code(), 500) self.assertIn('Connection refused', http_response.body) def test_POST_via_proxy(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False) self.assertIn('123456abc', http_response.body)
class TestSQLMapWrapper(unittest.TestCase): SQLI_GET = get_moth_http('/audit/sql_injection/' 'where_string_single_qs.py?uname=pablo') SSL_SQLI_GET = get_moth_https('/audit/sql_injection/' 'where_string_single_qs.py?uname=pablo') SQLI_POST = get_moth_http('/audit/sql_injection/where_integer_form.py') DATA_POST = 'text=1' def setUp(self): uri = URL(self.SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener, debug=True) def tearDown(self): self.uri_opener.end() self.sqlmap.cleanup() @classmethod def setUpClass(cls): output_dir = os.path.join(SQLMapWrapper.SQLMAP_LOCATION, 'output') if os.path.exists(output_dir): shutil.rmtree(output_dir) @classmethod def tearDownClass(cls): # Doing this in both setupclass and teardownclass in order to be sure # that a ctrl+c doesn't break it output_dir = os.path.join(SQLMapWrapper.SQLMAP_LOCATION, 'output') if os.path.exists(output_dir): shutil.rmtree(output_dir) def test_verify_vulnerability(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) def test_verify_vulnerability_ssl(self): uri = URL(self.SSL_SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable, self.sqlmap.last_stdout) def test_verify_vulnerability_false(self): not_vuln = get_moth_http('/audit/sql_injection/' 'where_string_single_qs.py?fake=pablo') uri = URL(not_vuln) target = Target(uri) self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertFalse(vulnerable) def test_verify_vulnerability_POST(self): target = Target(URL(self.SQLI_POST), self.DATA_POST) self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable, self.sqlmap.last_stdout) def test_wrapper_invalid_url(self): self.assertRaises(TypeError, SQLMapWrapper, self.SQLI_GET, self.uri_opener) def test_stds(self): uri = URL(self.SQLI_GET) target = Target(uri) self.sqlmap = SQLMapWrapper(target, self.uri_opener) prms = ['--batch',] cmd, process = self.sqlmap.run_sqlmap_with_pipes(prms) self.assertIsInstance(process.stdout, file) self.assertIsInstance(process.stderr, file) self.assertIsInstance(process.stdin, file) self.assertIsInstance(cmd, basestring) self.assertIn('sqlmap.py', cmd) def test_target_basic(self): target = Target(URL(self.SQLI_GET)) params = target.to_params() self.assertEqual(params, ["--url=%s" % self.SQLI_GET]) def test_target_post_data(self): target = Target(URL(self.SQLI_GET), self.DATA_POST) params = target.to_params() self.assertEqual(params, ["--url=%s" % self.SQLI_GET, "--data=%s" % self.DATA_POST]) def test_no_coloring(self): params = self.sqlmap.get_wrapper_params() self.assertIn('--disable-coloring', params) def test_always_batch(self): params = self.sqlmap.get_wrapper_params() self.assertIn('--batch', params) def test_use_proxy(self): params = self.sqlmap.get_wrapper_params() self.assertTrue(any(i.startswith('--proxy=http://127.0.0.1:') for i in params)) def test_enable_coloring(self): uri = URL(self.SQLI_GET) target = Target(uri) sqlmap = SQLMapWrapper(target, self.uri_opener, coloring=True) params = sqlmap.get_wrapper_params() self.assertNotIn('--disable-coloring', params) def test_dbs(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.dbs() output = process.stdout.read() self.assertIn('on SQLite it is not possible to enumerate databases', output) def test_tables(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.tables() output = process.stdout.read() self.assertIn('auth_group_permissions', output) self.assertIn('Database: SQLite_masterdb', output) self.assertIn('django_content_type', output) def test_users(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.users() output = process.stdout.read() self.assertIn('on SQLite it is not possible to enumerate the users', output) def test_dump(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.dump() output = process.stdout.read() self.assertIn('django_session', output) self.assertIn('auth_user_user_permissions', output) def test_sqlmap(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.direct('--tables') output = process.stdout.read() self.assertIn('django_session', output) self.assertIn('auth_user_user_permissions', output) self.assertNotIn('information_schema', output) self.assertNotIn('COLUMN_PRIVILEGES', output)
class TestMultipartPostUpload(unittest.TestCase): """ In the new architecture I've been working on, the HTTP requests are almost completely created by serializing two objects: * FuzzableRequest * DataContainer (stored in FuzzableRequest._post_data) There is a special DataContainer sub-class for MultipartPost file uploads called MultipartContainer, which holds variables and files and when serialized will be encoded as multipart. These test cases try to make sure that the file upload feature works by sending a POST request with a MultipartContainer to moth. """ MOTH_FILE_UP_URL = URL(get_moth_http('/core/file_upload/upload.py')) def setUp(self): self.opener = ExtendedUrllib() def tearDown(self): self.opener.end() def test_multipart_without_file(self): form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'uploadedfile')]) form_params['uploadedfile'][0] = 'this is not a file' form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'), ('type', 'hidden'), ('value', '10000')]) mpc = MultipartContainer(form_params) resp = self.opener.POST(self.MOTH_FILE_UP_URL, data=str(mpc), headers=Headers(mpc.get_headers())) self.assertNotIn('was successfully uploaded', resp.get_body()) def test_file_upload(self): temp = tempfile.mkstemp(suffix=".tmp") os.write(temp[0], 'file content') _file = open(temp[1], "rb") self.upload_file(_file) def test_stringio_upload(self): _file = NamedStringIO('file content', name='test.txt') self.upload_file(_file) def upload_file(self, _file): form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'uploadedfile')]) form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'), ('type', 'hidden'), ('value', '10000')]) mpc = MultipartContainer(form_params) mpc['uploadedfile'][0] = _file resp = self.opener.POST(self.MOTH_FILE_UP_URL, data=str(mpc), headers=Headers(mpc.get_headers())) self.assertIn('was successfully uploaded', resp.get_body()) def test_upload_file_using_fuzzable_request(self): form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'uploadedfile')]) form_params['uploadedfile'][0] = NamedStringIO('file content', name='test.txt') form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'), ('type', 'hidden'), ('value', '10000')]) mpc = MultipartContainer(form_params) freq = FuzzableRequest(self.MOTH_FILE_UP_URL, post_data=mpc, method='POST') resp = self.opener.send_mutant(freq) self.assertIn('was successfully uploaded', resp.get_body())
def setUp(self): super(TestCSRF, self).setUp() self.csrf_plugin = csrf() self.uri_opener = ExtendedUrllib() self.csrf_plugin.set_url_opener(self.uri_opener)
class TestCSRF(PluginTest): target_url = 'http://moth/w3af/audit/csrf/' _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'audit': (PluginConfig('csrf'),), 'crawl': ( PluginConfig( 'web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def setUp(self): super(TestCSRF, self).setUp() self.csrf_plugin = csrf() self.uri_opener = ExtendedUrllib() self.csrf_plugin.set_url_opener(self.uri_opener) @attr('ci_fails') def test_found_csrf(self): EXPECTED = [ ('/w3af/audit/csrf/vulnerable/buy.php'), ('/w3af/audit/csrf/vulnerable-rnd/buy.php'), #@see: https://github.com/andresriancho/w3af/issues/120 #('/w3af/audit/csrf/vulnerable-token-ignored/buy.php'), ('/w3af/audit/csrf/link-vote/vote.php') ] # Run the scan cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) # Assert the general results vulns = self.kb.get('csrf', 'csrf') self.assertEquals(set(EXPECTED), set([v.get_url().get_path() for v in vulns])) self.assertTrue( all(['CSRF vulnerability' == v.get_name() for v in vulns])) def test_resp_is_equal(self): url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html')]) r1 = HTTPResponse(200, 'body', headers, url, url) r2 = HTTPResponse(404, 'body', headers, url, url) self.assertFalse(self.csrf_plugin._is_resp_equal(r1, r2)) r1 = HTTPResponse(200, 'a', headers, url, url) r2 = HTTPResponse(200, 'b', headers, url, url) self.assertFalse(self.csrf_plugin._is_resp_equal(r1, r2)) r1 = HTTPResponse(200, 'a', headers, url, url) r2 = HTTPResponse(200, 'a', headers, url, url) self.assertTrue(self.csrf_plugin._is_resp_equal(r1, r2)) @attr('ci_fails') def test_is_suitable(self): # False because no cookie is set and no QS nor post-data url = URL('http://moth/') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # False because no cookie is set url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) url_sends_cookie = URL( 'http://moth/w3af/core/cookie_handler/set-cookie.php') self.uri_opener.GET(url_sends_cookie) # Still false because it doesn't have any QS or POST data url = URL('http://moth/') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) self.csrf_plugin._strict_mode = True # Still false because of the strict mode url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # False, no items in post-data url = URL('http://moth/') req = FuzzableRequest(url, method='POST', post_data=URLEncodedForm()) suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # True, items in DC, POST (passes strict mode) and cookies url = URL('http://moth/') form_params = FormParameters() form_params.add_input([('name', 'test'), ('type', 'text')]) form = URLEncodedForm(form_params) req = FuzzableRequest(url, method='POST', post_data=form) suitable = self.csrf_plugin._is_suitable(req) self.assertTrue(suitable) self.csrf_plugin._strict_mode = False # True now that we have strict mode off, cookies and QS url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertTrue(suitable) @attr('ci_fails') def test_is_origin_checked_true_case01(self): url = URL('http://moth/w3af/audit/csrf/referer/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer/')]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked(freq, orig_response) self.assertTrue(origin_checked) @attr('ci_fails') def test_is_origin_checked_true_case02(self): url = URL('http://moth/w3af/audit/csrf/referer-rnd/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer-rnd/')]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked(freq, orig_response) self.assertTrue(origin_checked) @attr('ci_fails') def test_is_origin_checked_false(self): url = URL('http://moth/w3af/audit/csrf/vulnerable/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer-rnd/')]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked(freq, orig_response) self.assertFalse(origin_checked) def test_is_csrf_token_true_case01(self): self.csrf_plugin.is_csrf_token('token', 'f842eb01b87a8ee18868d3bf80a558f3') def test_is_csrf_token_true_case02(self): self.csrf_plugin.is_csrf_token('secret', 'f842eb01b87a8ee18868d3bf80a558f3') def test_is_csrf_token_true_case03(self): self.csrf_plugin.is_csrf_token('csrf', 'f842eb01b87a8ee18868d3bf80a558f3') def test_is_csrf_token_false_case01(self): self.csrf_plugin.is_csrf_token('token', '') def test_is_csrf_token_false_case02(self): self.csrf_plugin.is_csrf_token('secret', 'helloworld') def test_is_csrf_token_false_case03(self): self.csrf_plugin.is_csrf_token('secret', 'helloworld123') def test_is_csrf_token_false_case04(self): self.csrf_plugin.is_csrf_token('secret', 'hello world 123') def test_is_csrf_token_false_case05(self): lorem = ('Lorem ipsum dolor sit amet, consectetur adipiscing elit.' ' Curabitur at eros elit, rhoncus feugiat libero. Praesent' ' lobortis ultricies est gravida tempor. Sed tortor mi,' ' euismod at interdum quis, hendrerit vitae risus. Sed' ' iaculis, ante sagittis ullamcorper molestie, metus nibh' ' posuere purus, non tempor massa leo at odio. Duis quis' ' elit enim. Morbi lobortis est sed metus adipiscing in' ' lacinia est porttitor. Suspendisse potenti. Morbi pretium' ' lacinia magna, sit amet tincidunt enim vestibulum sed.') self.csrf_plugin.is_csrf_token('secret', lorem) def test_is_csrf_token_false_case06(self): self.csrf_plugin.is_csrf_token('token', 'f842e') def test_find_csrf_token_true_simple(self): url = URL('http://moth/w3af/audit/csrf/') query_string = parse_qs('secret=f842eb01b87a8ee18868d3bf80a558f3') freq = FuzzableRequest(url, method='GET') freq.set_querystring(query_string) token = self.csrf_plugin._find_csrf_token(freq) self.assertIn('secret', token) def test_find_csrf_token_true_repeated(self): url = URL('http://moth/w3af/audit/csrf/') query_string = parse_qs('secret=f842eb01b87a8ee18868d3bf80a558f3' '&secret=not a token') freq = FuzzableRequest(url, method='GET') freq.set_querystring(query_string) token = self.csrf_plugin._find_csrf_token(freq) self.assertIn('secret', token) def test_find_csrf_token_false(self): url = URL('http://moth/w3af/audit/csrf/') query_string = parse_qs('secret=not a token') freq = FuzzableRequest(url, method='GET') freq.set_querystring(query_string) token = self.csrf_plugin._find_csrf_token(freq) self.assertNotIn('secret', token) @attr('ci_fails') def test_is_token_checked_true(self): generator = URL('http://moth/w3af/audit/csrf/secure-replay-allowed/') http_response = self.uri_opener.GET(generator) # Please note that this freq holds a fresh/valid CSRF token cookie = Cookie.from_http_response(http_response) freq = FuzzableRequest(generator, cookie=cookie) # FIXME: # And I use this token here to get the original response, and if the # application is properly developed, that token will be invalidated # and that's where this algorithm fails. original_response = self.uri_opener.send_mutant(freq) token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'} checked = self.csrf_plugin._is_token_checked(freq, token, original_response) self.assertTrue(checked) @attr('ci_fails') def test_is_token_checked_false(self): """ This covers the case where there is a token but for some reason it is NOT verified by the web application. """ generator = URL('http://moth/w3af/audit/csrf/vulnerable-token-ignored/') http_response = self.uri_opener.GET(generator) # Please note that this freq holds a fresh/valid CSRF token cookie = Cookie.from_http_response(http_response) freq = FuzzableRequest(generator, cookie=cookie) # FIXME: # And I use this token here to get the original response, and if the # application is properly developed, that token will be invalidated # and that's where this algorithm fails. original_response = self.uri_opener.send_mutant(freq) token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'} checked = self.csrf_plugin._is_token_checked(freq, token, original_response) self.assertFalse(checked)
class TestRedirectHandlerExtendedUrllib(unittest.TestCase): """ Test the redirect handler using ExtendedUrllib """ REDIR_DEST = 'http://w3af.org/dest' REDIR_SRC = 'http://w3af.org/src' OK_BODY = 'Body!' def setUp(self): consecutive_number_generator.reset() self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() @httpretty.activate def test_redirect_302_simple_no_follow(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': self.REDIR_DEST}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src) location, _ = response.get_headers().iget('location') self.assertEqual(location, self.REDIR_DEST) self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_id(), 1) @httpretty.activate def test_redirect_302_simple_follow(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': self.REDIR_DEST}) httpretty.register_uri(httpretty.GET, self.REDIR_DEST, body=self.OK_BODY, status=200) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) self.assertEqual(response.get_code(), OK) self.assertEqual(response.get_body(), self.OK_BODY) self.assertEqual(response.get_redir_uri(), URL(self.REDIR_DEST)) self.assertEqual(response.get_url(), URL(self.REDIR_SRC)) self.assertEqual(response.get_id(), 2) @httpretty.activate def test_redirect_301_loop(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=MOVED_PERMANENTLY, adding_headers={'Location': self.REDIR_DEST}) httpretty.register_uri(httpretty.GET, self.REDIR_DEST, body='', status=MOVED_PERMANENTLY, adding_headers={'URI': self.REDIR_SRC}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) # At some point the handler detects a loop and stops self.assertEqual(response.get_code(), MOVED_PERMANENTLY) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_id(), 9) @httpretty.activate def test_redirect_302_without_location_returns_302_response(self): # Breaks the RFC httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) # Doesn't follow the redirects self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_id(), 1) @httpretty.activate def test_redirect_no_follow_file_proto(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': 'file:///etc/passwd'}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_url(), URL(self.REDIR_SRC)) self.assertEqual(response.get_id(), 1)
class FakeCore(object): worker_pool = Pool(1) uri_opener = ExtendedUrllib()
def setUp(self): consecutive_number_generator.reset() self.uri_opener = ExtendedUrllib()
def setUp(self): self.query, self.limit = random.choice([('big bang theory', 20), ('two and half man', 20), ('doctor house', 20)]) opener = ExtendedUrllib() self.gse = google(opener)
def setUp(self): self.uri_opener = ExtendedUrllib()
class TestCSRF(PluginTest): target_url = 'http://moth/w3af/audit/csrf/' _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'audit': (PluginConfig('csrf'),), 'crawl': ( PluginConfig( 'web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def setUp(self): super(TestCSRF, self).setUp() self.csrf_plugin = csrf() self.uri_opener = ExtendedUrllib() self.csrf_plugin.set_url_opener(self.uri_opener) @attr('ci_fails') def test_found_csrf(self): EXPECTED = [ ('/w3af/audit/csrf/vulnerable/buy.php'), ('/w3af/audit/csrf/vulnerable-rnd/buy.php'), #@see: https://github.com/andresriancho/w3af/issues/120 #('/w3af/audit/csrf/vulnerable-token-ignored/buy.php'), ('/w3af/audit/csrf/link-vote/vote.php') ] # Run the scan cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) # Assert the general results vulns = self.kb.get('csrf', 'csrf') self.assertEquals(set(EXPECTED), set([v.get_url().get_path() for v in vulns])) self.assertTrue( all(['CSRF vulnerability' == v.get_name() for v in vulns])) def test_resp_is_equal(self): url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html')]) r1 = HTTPResponse(200, 'body', headers, url, url) r2 = HTTPResponse(404, 'body', headers, url, url) self.assertFalse(self.csrf_plugin._is_resp_equal(r1, r2)) r1 = HTTPResponse(200, 'a', headers, url, url) r2 = HTTPResponse(200, 'b', headers, url, url) self.assertFalse(self.csrf_plugin._is_resp_equal(r1, r2)) r1 = HTTPResponse(200, 'a', headers, url, url) r2 = HTTPResponse(200, 'a', headers, url, url) self.assertTrue(self.csrf_plugin._is_resp_equal(r1, r2)) @attr('ci_fails') def test_is_suitable(self): # False because no cookie is set and no QS nor post-data url = URL('http://moth/') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # False because no cookie is set url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) url_sends_cookie = URL( 'http://moth/w3af/core/cookie_handler/set-cookie.php') self.uri_opener.GET(url_sends_cookie) # Still false because it doesn't have any QS or POST data url = URL('http://moth/') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) self.csrf_plugin._strict_mode = True # Still false because of the strict mode url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # False, no items in post-data url = URL('http://moth/') req = FuzzableRequest(url, method='POST', post_data=URLEncodedForm()) suitable = self.csrf_plugin._is_suitable(req) self.assertFalse(suitable) # True, items in DC, POST (passes strict mode) and cookies url = URL('http://moth/') form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'test'), ('type', 'text')]) form = URLEncodedForm(form_params) req = FuzzableRequest(url, method='POST', post_data=form) suitable = self.csrf_plugin._is_suitable(req) self.assertTrue(suitable) self.csrf_plugin._strict_mode = False # True now that we have strict mode off, cookies and QS url = URL('http://moth/?id=3') req = FuzzableRequest(url, method='GET') suitable = self.csrf_plugin._is_suitable(req) self.assertTrue(suitable) @attr('ci_fails') def test_is_origin_checked_true_case01(self): url = URL('http://moth/w3af/audit/csrf/referer/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer/')]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked(freq, orig_response, None) self.assertTrue(origin_checked) @attr('ci_fails') def test_is_origin_checked_true_case02(self): url = URL('http://moth/w3af/audit/csrf/referer-rnd/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer-rnd/')]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked(freq, orig_response, None) self.assertTrue(origin_checked) @attr('ci_fails') def test_is_origin_checked_false(self): url = URL('http://moth/w3af/audit/csrf/vulnerable/buy.php?shares=123') headers = Headers([('Referer', 'http://moth/w3af/audit/csrf/referer-rnd/')]) freq = FuzzableRequest(url, method='GET', headers=headers) orig_response = self.uri_opener.send_mutant(freq) origin_checked = self.csrf_plugin._is_origin_checked(freq, orig_response, None) self.assertFalse(origin_checked) @attr('ci_fails') def test_is_token_checked_true(self): generator = URL('http://moth/w3af/audit/csrf/secure-replay-allowed/') http_response = self.uri_opener.GET(generator) # Please note that this freq holds a fresh/valid CSRF token cookie = Cookie.from_http_response(http_response) freq = FuzzableRequest(generator, cookie=cookie) # FIXME: # And I use this token here to get the original response, and if the # application is properly developed, that token will be invalidated # and that's where this algorithm fails. original_response = self.uri_opener.send_mutant(freq) token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'} checked = self.csrf_plugin._is_token_checked(freq, token, original_response) self.assertTrue(checked) @attr('ci_fails') def test_is_token_checked_false(self): """ This covers the case where there is a token but for some reason it is NOT verified by the web application. """ generator = URL('http://moth/w3af/audit/csrf/vulnerable-token-ignored/') http_response = self.uri_opener.GET(generator) # Please note that this freq holds a fresh/valid CSRF token cookie = Cookie.from_http_response(http_response) freq = FuzzableRequest(generator, cookie=cookie) # FIXME: # And I use this token here to get the original response, and if the # application is properly developed, that token will be invalidated # and that's where this algorithm fails. original_response = self.uri_opener.send_mutant(freq) token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'} checked = self.csrf_plugin._is_token_checked(freq, token, original_response) self.assertFalse(checked)
def test_ntlm_auth_not_configured(self): self.uri_opener = ExtendedUrllib() url = URL("http://moth/w3af/core/ntlm_auth/ntlm_v1/") http_response = self.uri_opener.GET(url, cache=False) self.assertIn('Must authenticate.', http_response.body)