def __init__(self, ip, port, urlOpener=ExtendedUrllib(), proxy_cert=Proxy.SSL_CERT): """ :param ip: IP address to bind :param port: Port to bind :param urlOpener: The urlOpener that will be used to open the requests that arrive from the browser :param proxyHandler: A class that will know how to handle requests from the browser :param proxy_cert: Proxy certificate to use, this is needed for proxying SSL connections. """ Proxy.__init__(self, ip, port, urlOpener, w3afLocalProxyHandler, proxy_cert) self.daemon = True self.name = 'LocalProxyThread' # Internal vars self._request_queue = Queue.Queue() self._edited_requests = {} self._edited_responses = {} # User configured parameters self._methods_to_trap = set() self._what_to_trap = re.compile('.*') self._what_not_to_trap = re.compile( '.*\.(gif|jpg|png|css|js|ico|swf|axd|tif)$') self._trap = False self._fix_content_length = True
def test_sessions_basic(self): httpretty.register_uri( httpretty.GET, self.URL_SEND_COOKIE, adding_headers={'Set-Cookie': self.COOKIE_VALUE}) httpretty.register_uri(httpretty.GET, self.URL_CHECK_COOKIE, body=self.request_callback) uri_opener = ExtendedUrllib() session_1 = uri_opener.get_new_session() session_2 = uri_opener.get_new_session() uri_opener.GET(URL(self.URL_SEND_COOKIE), session=session_1) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True, session=session_1) self.assertIn('Cookie received', resp) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True, session=session_2) self.assertIn('Cookie not sent', resp)
def setUp(self): uri = URL(self.SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener, debug=True)
def test_sessions_simultaneous(self): def request_callback(request, uri, headers): received_cookie_value = request.headers.get('cookie', None) if received_cookie_value is not None: return 200, headers, 'Cookie %s received' % received_cookie_value else: return 200, headers, 'Cookie not sent' httpretty.register_uri(httpretty.GET, self.URL_SEND_COOKIE + '1', adding_headers={'Set-Cookie': '11111111'}) httpretty.register_uri(httpretty.GET, self.URL_SEND_COOKIE + '2', adding_headers={'Set-Cookie': '222222222'}) httpretty.register_uri(httpretty.GET, self.URL_CHECK_COOKIE, body=request_callback) uri_opener = ExtendedUrllib() session_1 = uri_opener.get_new_session() session_2 = uri_opener.get_new_session() uri_opener.GET(URL(self.URL_SEND_COOKIE + '1'), session=session_1) uri_opener.GET(URL(self.URL_SEND_COOKIE + '2'), session=session_2) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), session=session_1) self.assertIn('Cookie 11111111 received', resp.body) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), session=session_2) self.assertIn('Cookie 222222222 received', resp)
def test_delay_controlled_random(self): for expected_result, delays in self.TEST_SUITE: urllib = ExtendedUrllib() side_effect = generate_delays(delays, rand_range=(0, 2)) urllib.send_mutant = MagicMock(side_effect=side_effect) delay_obj = ExactDelay('sleep(%s)') url = URL('http://moth/?id=1') req = FuzzableRequest(url) mutant = QSMutant(req) mutant.set_dc(url.querystring) mutant.set_token(('id', 0)) ed = ExactDelayController(mutant, delay_obj, urllib) controlled, responses = ed.delay_is_controlled() # This is where we change from test_delay_controlled, the basic # idea is that we'll allow false negatives but no false positives if expected_result: expected_result = [True, False] else: expected_result = [False] self.assertIn(controlled, expected_result, delays)
def test_verify_vulnerability_ssl(self): uri = URL(self.SSL_SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable, self.sqlmap.last_stdout)
def test_raise_on_domain_not_in_archive(self): url = URL('http://www.w3af-scanner.org/') fr = FuzzableRequest(url, method='GET') ado = archive_dot_org() uri_opener = ExtendedUrllib() ado.set_url_opener(uri_opener) self.assertRaises(RunOnce, ado.crawl_wrapper, fr)
def test_send_mangled(self): xurllib = ExtendedUrllib() xurllib.set_evasion_plugins([ self_reference(), ]) url = URL('http://moth/') http_response = xurllib.GET(url) self.assertEqual(http_response.get_url().url_string, u'http://moth/./')
def setUp(self): self.uri_opener = ExtendedUrllib() # Start the proxy daemon self._proxy = Proxy('127.0.0.2', 0, ExtendedUrllib(), ProxyHandler) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Configure the proxy settings = OpenerSettings() options = settings.get_options() proxy_address_opt = options['proxy_address'] proxy_port_opt = options['proxy_port'] proxy_address_opt.set_value('127.0.0.2') proxy_port_opt.set_value(port) settings.set_options(options) self.uri_opener.settings = settings
def test_xurllib(self): uri_opener = ExtendedUrllib() uri_opener.GET(self.URL_SENDS_COOKIE) resp = uri_opener.GET(self.URL_CHECK_COOKIE, cookies=True) self.assertTrue('Cookie was sent.' in resp) resp = uri_opener.GET(self.URL_CHECK_COOKIE, cookies=False) self.assertTrue('Cookie was NOT sent.' in resp) resp = uri_opener.GET(self.URL_CHECK_COOKIE, cookies=True) self.assertTrue('Cookie was sent.' in resp)
def test_headers_upper_case(self): url = "http://w3af.org/" httpretty.register_uri(httpretty.GET, url, body='hello world', content_type="application/html") uri_opener = ExtendedUrllib() res = uri_opener.GET(URL(url), cache=False) headers = res.get_headers() content_encoding, _ = headers.get('Content-Type', '') self.assertIn('gzip', content_encoding) self.assertIn('View HTTP response headers.', res.get_body())
def setUp(self): # Start the proxy server create_temp_dir() self._proxy = InterceptProxy(self.IP, 0, ExtendedUrllib()) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Build the proxy opener proxy_handler = urllib2.ProxyHandler( {'http': 'http://%s:%s' % (self.IP, port)}) self.proxy_opener = urllib2.build_opener(proxy_handler, urllib2.HTTPHandler)
def test_pickleable_shells(self): pool = Pool(1) xurllib = ExtendedUrllib() original_shell = Shell(MockVuln(), xurllib, pool) kb.append('a', 'b', original_shell) unpickled_shell = kb.get('a', 'b')[0] self.assertEqual(original_shell, unpickled_shell) self.assertEqual(unpickled_shell.worker_pool, None) self.assertEqual(unpickled_shell._uri_opener, None) pool.terminate() pool.join() xurllib.end()
def test_delay_controlled(self): for expected_result, delays in self.TEST_SUITE: urllib = ExtendedUrllib() side_effect = generate_delays(delays) urllib.send_mutant = MagicMock(side_effect=side_effect) delay_obj = ExactDelay('sleep(%s)') url = URL('http://moth/?id=1') req = FuzzableRequest(url) mutant = QSMutant(req) mutant.set_dc(url.querystring) mutant.set_token(('id', 0)) ed = ExactDelayController(mutant, delay_obj, urllib) controlled, responses = ed.delay_is_controlled() self.assertEqual(expected_result, controlled, delays)
def test_ntlm_auth_valid_creds(self): self.uri_opener = ExtendedUrllib() settings = OpenerSettings() options = settings.get_options() ntlm_domain = options['ntlm_auth_domain'] ntlm_user = options['ntlm_auth_user'] ntlm_pass = options['ntlm_auth_passwd'] ntlm_url = options['ntlm_auth_url'] ntlm_domain.set_value('moth') ntlm_user.set_value('admin') ntlm_pass.set_value('admin') ntlm_url.set_value('http://moth/w3af/core/ntlm_auth/ntlm_v1/') settings.set_options(options) self.uri_opener.settings = settings url = URL("http://moth/w3af/core/ntlm_auth/ntlm_v1/") http_response = self.uri_opener.GET(url, cache=False) self.assertIn('You are admin from MOTH/', http_response.body)
def test_xurllib(self): httpretty.register_uri( httpretty.GET, self.URL_SEND_COOKIE, adding_headers={'Set-Cookie': self.COOKIE_VALUE}) httpretty.register_uri(httpretty.GET, self.URL_CHECK_COOKIE, body=self.request_callback) uri_opener = ExtendedUrllib() uri_opener.GET(URL(self.URL_SEND_COOKIE)) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True) self.assertIn('Cookie received', resp) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=False) self.assertIn('Cookie not sent', resp) resp = uri_opener.GET(URL(self.URL_CHECK_COOKIE), cookies=True) self.assertIn('Cookie received', resp)
def test_false_negative(self): server_error = ('500 error that does NOT\n' 'look like one\n' 'because we want to reproduce the bug\n') not_found = ('This is a 404\n' 'but it does NOT look like one\n' 'because we want to reproduce the bug\n') httpretty.register_uri(httpretty.GET, re.compile("w3af.com/foo/(.*)"), body=server_error, status=500) httpretty.register_uri(httpretty.GET, re.compile("w3af.com/(.*)"), body=not_found, status=404) root_url = URL('http://w3af.com/') foo_url = URL('http://w3af.com/foo/phpinfo.php') server_error_resp = HTTPResponse(500, server_error, Headers(), foo_url, foo_url) urllib = ExtendedUrllib() worker_pool = Pool(processes=2, worker_names='WorkerThread', max_queued_tasks=2, maxtasksperchild=20) fingerprint_404 = Fingerprint404() fingerprint_404.set_url_opener(urllib) fingerprint_404.set_worker_pool(worker_pool) fingerprint_404.generate_404_knowledge(root_url) self.assertTrue(fingerprint_404.is_404(server_error_resp)) fingerprint_404.cleanup() urllib.clear()
def test_headers_upper_case(self): """ This unittest is skipped here, but shouldn't be removed, it is a reminder that w3af (and urllib/httplib) does always perform a call to lower() for all the data received over the wire. This gives w3af a modified view of the reality, we never see what was really sent to us. """ url = "http://w3af.org/" httpretty.register_uri(httpretty.GET, url, body='hello world', content_type="application/html") uri_opener = ExtendedUrllib() res = uri_opener.GET(URL(url), cache=False) headers = res.get_headers() content_encoding = headers.get('Content-Type', '') self.assertIn('application/html', content_encoding)
def test_demo_testfire_net(self): # We don't control the demo.testfire.net domain, so we'll check if its # up before doing anything else uri_opener = ExtendedUrllib() login_url = URL(self.demo_testfire + 'login.aspx') try: res = uri_opener.GET(login_url) except: raise SkipTest('demo.testfire.net is unreachable!') else: if not 'Online Banking Login' in res.body: raise SkipTest('demo.testfire.net has changed!') self._scan(self.demo_testfire_net['target'], self.demo_testfire_net['plugins']) urls = self.kb.get_all_known_urls() url_strings = set(str(u) for u in urls) self.assertTrue(self.demo_testfire + 'queryxpath.aspx' in url_strings) self.assertTrue(self.demo_testfire + 'queryxpath.aspx.cs' in url_strings)
def setUp(self): self.uri_opener = ExtendedUrllib()
def setUp(self): self.urllib = ExtendedUrllib() self.fingerprint_404 = Fingerprint404() self.fingerprint_404.set_url_opener(self.urllib)
def setUp(self): super(TestDirectoryTraversal, self).setUp() self.csrf_plugin = directory_traversal() self.uri_opener = ExtendedUrllib() self.csrf_plugin.set_url_opener(self.uri_opener)
def setUp(self): self.bing_se = bing(ExtendedUrllib())
def setUp(self): self.query, self.limit = random.choice([('big bang theory', 20), ('two and half man', 20), ('doctor house', 20)]) opener = ExtendedUrllib() self.gse = google(opener)
class FakeCore(object): worker_pool = Pool(1) uri_opener = ExtendedUrllib()
def setUp(self): create_temp_dir() self.pks_se = pks(ExtendedUrllib())
def setUp(self): super(TestCSRF, self).setUp() self.csrf_plugin = csrf() self.uri_opener = ExtendedUrllib() self.csrf_plugin.set_url_opener(self.uri_opener)
def setUp(self): consecutive_number_generator.reset() self.uri_opener = ExtendedUrllib()
def test_ntlm_auth_not_configured(self): self.uri_opener = ExtendedUrllib() url = URL("http://moth/w3af/core/ntlm_auth/ntlm_v1/") http_response = self.uri_opener.GET(url, cache=False) self.assertIn('Must authenticate.', http_response.body)