class Generic404Test(unittest.TestCase): def get_body(self, unique_parts): # Do not increase this 50 too much, it will exceed the xurllib max # HTTP response body length parts = [re.__doc__, random.__doc__, unittest.__doc__] parts = parts * 50 parts.extend(unique_parts) rnd = random.Random() rnd.seed(1) rnd.shuffle(parts) body = '\n'.join(parts) # filename = str(abs(hash(''.join(parts)))) + '-hash.txt' # file(filename, 'w').write(body) return body def setUp(self): self.urllib = ExtendedUrllib() self.fingerprint_404 = Fingerprint404() self.fingerprint_404.set_url_opener(self.urllib) def tearDown(self): self.urllib.end() clear_default_temp_db_instance()
class TestExtendedUrllibProxy(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() # Start the proxy daemon self._proxy = Proxy('127.0.0.1', 0, ExtendedUrllib(), w3afProxyHandler) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Configure the proxy settings = OpenerSettings() options = settings.get_options() proxy_address_opt = options['proxy_address'] proxy_port_opt = options['proxy_port'] proxy_address_opt.set_value('127.0.0.1') proxy_port_opt.set_value(port) settings.set_options(options) self.uri_opener.settings = settings def tearDown(self): self.uri_opener.end() def test_http_default_port_via_proxy(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_http_port_specification_via_proxy(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_https_via_proxy(self): TODO = 'Skip this test because of a strange bug with the extended'\ ' url library and w3af\'s local proxy daemon. More info here:'\ ' https://github.com/andresriancho/w3af/issues/183' raise SkipTest(TODO) url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_offline_port_via_proxy(self): url = URL('http://127.0.0.1:8181/') http_response = self.uri_opener.GET(url, cache=False) self.assertEqual(http_response.get_code(), 400) def test_POST_via_proxy(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False) self.assertIn('123456abc', http_response.body)
class TestXUrllibTimeout(unittest.TestCase): def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_timeout(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.settings.set_configured_timeout(0.5) self.uri_opener.clear_timeout() # We can mock this because it's being tested at TestXUrllibDelayOnError self.uri_opener._pause_on_http_error = Mock() start = time.time() try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.message, 'HTTP timeout error') except Exception, e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__)
class Generic404Test(unittest.TestCase): def get_body(self, unique_parts): # Do not increase this 50 too much, it will exceed the xurllib max # HTTP response body length parts = [re.__doc__, random.__doc__, unittest.__doc__] parts = parts * 50 parts.extend(unique_parts) rnd = random.Random() rnd.seed(1) rnd.shuffle(parts) body = '\n'.join(parts) # filename = str(abs(hash(''.join(parts)))) + '-hash.txt' # file(filename, 'w').write(body) return body def setUp(self): self.urllib = ExtendedUrllib() self.fingerprint_404 = Fingerprint404() self.fingerprint_404.set_url_opener(self.urllib) def tearDown(self): self.urllib.end() clear_default_temp_db_instance()
class TestXUrllibTimeout(unittest.TestCase): def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_timeout(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.settings.set_configured_timeout(0.5) self.uri_opener.clear_timeout() # We can mock this because it's being tested at TestXUrllibDelayOnError self.uri_opener._pause_on_http_error = Mock() start = time.time() try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.message, 'HTTP timeout error') except Exception, e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__)
class TestExtendedUrllibProxy(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() # Start the proxy daemon self._proxy = Proxy('127.0.0.1', 0, ExtendedUrllib(), w3afProxyHandler) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Configure the proxy settings = OpenerSettings() options = settings.get_options() proxy_address_opt = options['proxy_address'] proxy_port_opt = options['proxy_port'] proxy_address_opt.set_value('127.0.0.1') proxy_port_opt.set_value(port) settings.set_options(options) self.uri_opener.settings = settings def tearDown(self): self.uri_opener.end() def test_http_default_port_via_proxy(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_http_port_specification_via_proxy(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_https_via_proxy(self): TODO = 'Skip this test because of a strange bug with the extended'\ ' url library and w3af\'s local proxy daemon. More info here:'\ ' https://github.com/andresriancho/w3af/issues/183' raise SkipTest(TODO) url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_offline_port_via_proxy(self): url = URL('http://127.0.0.1:8181/') http_response = self.uri_opener.GET(url, cache=False) self.assertEqual(http_response.get_code(), 400) def test_POST_via_proxy(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False) self.assertIn('123456abc', http_response.body)
def test_pickleable_shells(self): pool = Pool(1) xurllib = ExtendedUrllib() original_shell = Shell(MockVuln(), xurllib, pool) kb.append('a', 'b', original_shell) unpickled_shell = kb.get('a', 'b')[0] self.assertEqual(original_shell, unpickled_shell) self.assertEqual(unpickled_shell.worker_pool, None) self.assertEqual(unpickled_shell._uri_opener, None) pool.terminate() pool.join() xurllib.end()
class TestRedirectHandlerExtendedUrllib(unittest.TestCase): """ Test the redirect handler using ExtendedUrllib """ REDIR_DEST = 'http://w3af.org/dest' REDIR_SRC = 'http://w3af.org/src' OK_BODY = 'Body!' def setUp(self): consecutive_number_generator.reset() self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() @httpretty.activate def test_redirect_302_simple_no_follow(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': self.REDIR_DEST}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src) location, _ = response.get_headers().iget('location') self.assertEqual(location, self.REDIR_DEST) self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_id(), 1) @httpretty.activate def test_redirect_302_simple_follow(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': self.REDIR_DEST}) httpretty.register_uri(httpretty.GET, self.REDIR_DEST, body=self.OK_BODY, status=200) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) self.assertEqual(response.get_code(), OK) self.assertEqual(response.get_body(), self.OK_BODY) self.assertEqual(response.get_redir_uri(), URL(self.REDIR_DEST)) self.assertEqual(response.get_url(), URL(self.REDIR_SRC)) self.assertEqual(response.get_id(), 2) @httpretty.activate def test_redirect_301_loop(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=MOVED_PERMANENTLY, adding_headers={'Location': self.REDIR_DEST}) httpretty.register_uri(httpretty.GET, self.REDIR_DEST, body='', status=MOVED_PERMANENTLY, adding_headers={'URI': self.REDIR_SRC}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) # At some point the handler detects a loop and stops self.assertEqual(response.get_code(), MOVED_PERMANENTLY) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_id(), 9) @httpretty.activate def test_redirect_302_without_location_returns_302_response(self): # Breaks the RFC httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) # Doesn't follow the redirects self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_id(), 1) @httpretty.activate def test_redirect_no_follow_file_proto(self): httpretty.register_uri( httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': 'file:///etc/passwd'}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_url(), URL(self.REDIR_SRC)) self.assertEqual(response.get_id(), 1)
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) def test_POST(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_POST_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_url(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_not_http_many(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except ScanMustStopByUnknownReasonExc: self.assertTrue(False, 'Not expecting this exception type.') except ScanMustStopOnUrlError: self.assertTrue(True) except ScanMustStopException: self.assertTrue(True) break else: self.assertTrue(False) def test_timeout(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.settings.set_timeout(1) self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) self.uri_opener.settings.set_default_values() def test_timeout_many(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_timeout(1) url = URL('http://127.0.0.1:%s/' % port) for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except ScanMustStopByUnknownReasonExc: self.assertTrue(False, 'Not expecting this exception type.') except ScanMustStopOnUrlError: self.assertTrue(True) except ScanMustStopException: self.assertTrue(True) break else: self.assertTrue(False) self.uri_opener.settings.set_default_values() def test_ignore_errors(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_timeout(1) self.uri_opener._retry = Mock() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url, ignore_errors=True) except ScanMustStopOnUrlError: self.assertEqual(self.uri_opener._retry.call_count, 0) else: self.assertTrue(False, 'Exception not raised') self.uri_opener.settings.set_default_values() def test_stop(self): self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause_stop(self): self.uri_opener.pause(True) self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) def test_pause_unpause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) self.uri_opener.pause(False) http_response = output.get() self.assertNotIsInstance(http_response, types.NoneType, 'Error in send thread.') th.join() self.assertEqual(http_response.get_code(), 200) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_removes_cache(self): url = URL(get_moth_http()) self.uri_opener.GET(url, cache=False) # Please note that this line, together with the tearDown() act as # a test for a "double call to end()". self.uri_opener.end() db_fmt = 'db_unittest-%s' trace_fmt = 'db_unittest-%s_traces/' temp_dir = get_temp_dir() for i in xrange(100): test_db_path = os.path.join(temp_dir, db_fmt % i) test_trace_path = os.path.join(temp_dir, trace_fmt % i) self.assertFalse(os.path.exists(test_db_path), test_db_path) self.assertFalse(os.path.exists(test_trace_path), test_trace_path) def test_special_char_header(self): url = URL(get_moth_http('/core/headers/echo-headers.py')) header_content = u'name=ábc' headers = Headers([('Cookie', header_content)]) http_response = self.uri_opener.GET(url, cache=False, headers=headers) self.assertIn(header_content, http_response.body)
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) def test_post(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_post_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_domain(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_file_proto(self): url = URL('file://foo/bar.txt') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.value, "Bad HTTP response status line: ''") else:
class TestExtendedUrllibProxy(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() # Start the proxy daemon self._proxy = Proxy('127.0.0.2', 0, ExtendedUrllib(), ProxyHandler) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Configure the proxy settings = OpenerSettings() options = settings.get_options() proxy_address_opt = options['proxy_address'] proxy_port_opt = options['proxy_port'] proxy_address_opt.set_value('127.0.0.2') proxy_port_opt.set_value(port) settings.set_options(options) self.uri_opener.settings = settings def tearDown(self): self.uri_opener.end() def test_http_default_port_via_proxy(self): # TODO: Write this test pass def test_http_port_specification_via_proxy(self): self.assertEqual(self._proxy.total_handled_requests, 0) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertEqual(self._proxy.total_handled_requests, 1) def test_https_via_proxy(self): self.assertEqual(self._proxy.total_handled_requests, 0) url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertEqual(self._proxy.total_handled_requests, 1) def test_offline_port_via_proxy(self): url = URL('http://127.0.0.1:8181/') http_response = self.uri_opener.GET(url, cache=False) self.assertEqual(http_response.get_code(), 500) self.assertIn('Connection refused', http_response.body) def test_POST_via_proxy(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False) self.assertIn('123456abc', http_response.body)
class TestMultipartPostUpload(unittest.TestCase): """ In the new architecture I've been working on, the HTTP requests are almost completely created by serializing two objects: * FuzzableRequest * DataContainer (stored in FuzzableRequest._post_data) There is a special DataContainer sub-class for MultipartPost file uploads called MultipartContainer, which holds variables and files and when serialized will be encoded as multipart. These test cases try to make sure that the file upload feature works by sending a POST request with a MultipartContainer to moth. """ MOTH_FILE_UP_URL = URL(get_moth_http('/core/file_upload/upload.py')) def setUp(self): self.opener = ExtendedUrllib() def tearDown(self): self.opener.end() def test_multipart_without_file(self): form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'uploadedfile')]) form_params['uploadedfile'][0] = 'this is not a file' form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'), ('type', 'hidden'), ('value', '10000')]) mpc = MultipartContainer(form_params) resp = self.opener.POST(self.MOTH_FILE_UP_URL, data=str(mpc), headers=Headers(mpc.get_headers())) self.assertNotIn('was successfully uploaded', resp.get_body()) def test_file_upload(self): temp = tempfile.mkstemp(suffix=".tmp") os.write(temp[0], 'file content') _file = open(temp[1], "rb") self.upload_file(_file) def test_stringio_upload(self): _file = NamedStringIO('file content', name='test.txt') self.upload_file(_file) def upload_file(self, _file): form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'uploadedfile')]) form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'), ('type', 'hidden'), ('value', '10000')]) mpc = MultipartContainer(form_params) mpc['uploadedfile'][0] = _file resp = self.opener.POST(self.MOTH_FILE_UP_URL, data=str(mpc), headers=Headers(mpc.get_headers())) self.assertIn('was successfully uploaded', resp.get_body()) def test_upload_file_using_fuzzable_request(self): form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'uploadedfile')]) form_params['uploadedfile'][0] = NamedStringIO('file content', name='test.txt') form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'), ('type', 'hidden'), ('value', '10000')]) mpc = MultipartContainer(form_params) freq = FuzzableRequest(self.MOTH_FILE_UP_URL, post_data=mpc, method='POST') resp = self.opener.send_mutant(freq) self.assertIn('was successfully uploaded', resp.get_body())
class BaseGoogleAPISearch(unittest.TestCase): """ @see: test_GMobileSearch, test_GStandardSearch, test_GAjaxSearch below for tests on these particular search implementations. This base class is not intended to be run by nosetests. """ GoogleApiSearcher = None COUNT = 10 def setUp(self): self.opener = ExtendedUrllib() def tearDown(self): self.opener.end() @attr('fails') def test_len_link_results(self): if self.GoogleApiSearcher is None: return keywords = ['pink', 'red', 'blue'] random.shuffle(keywords) query = ' '.join(keywords) start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links self.assertEqual(searcher.status, FINISHED_OK, GOOGLE_MSG) link_list = '\n'.join(str(r) for r in searcher.links) msg = 'Got less results than expected, %s is less than %s:\n%s' msg = msg % (len(searcher.links), self.COUNT, link_list) self.assertGreaterEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: self.assertTrue( URL_REGEX.match(link.URL.url_string) is not None, link.URL.url_string) for page in searcher.pages: self.assertTrue(isinstance(page, HTTPResponse)) # Check that the links are related to my search related = 0 for link in searcher.links: for key in keywords: if key in link.URL.url_string.lower(): related += 1 self.assertTrue(related > 5, related) @attr('fails') def test_links_results_domain(self): if self.GoogleApiSearcher is None: return domain = "www.bonsai-sec.com" query = "site:%s" % domain start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links self.assertEqual(searcher.status, FINISHED_OK, GOOGLE_MSG) msg = 'Got less results than expected:\n%s' % '\n'.join( str(r) for r in searcher.links) self.assertEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: link_domain = link.URL.get_domain() msg = "Current link domain is '%s'. Expected: '%s'" % (link_domain, domain) self.assertEqual(link_domain, domain, msg)
class TestMultipartPostUpload(unittest.TestCase): """ In the new architecture I've been working on, the HTTP requests are almost completely created by serializing two objects: * FuzzableRequest * DataContainer (stored in FuzzableRequest._post_data) There is a special DataContainer sub-class for MultipartPost file uploads called MultipartContainer, which holds variables and files and when serialized will be encoded as multipart. These test cases try to make sure that the file upload feature works by sending a POST request with a MultipartContainer to moth. """ MOTH_FILE_UP_URL = URL(get_moth_http('/core/file_upload/upload.py')) def setUp(self): self.opener = ExtendedUrllib() def tearDown(self): self.opener.end() def test_multipart_without_file(self): form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'uploadedfile')]) form_params['uploadedfile'][0] = 'this is not a file' form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'), ('type', 'hidden'), ('value', '10000')]) mpc = MultipartContainer(form_params) resp = self.opener.POST(self.MOTH_FILE_UP_URL, data=str(mpc), headers=Headers(mpc.get_headers())) self.assertNotIn('was successfully uploaded', resp.get_body()) def test_file_upload(self): temp = tempfile.mkstemp(suffix=".tmp") os.write(temp[0], 'file content') _file = open(temp[1], "rb") self.upload_file(_file) def test_stringio_upload(self): _file = NamedStringIO('file content', name='test.txt') self.upload_file(_file) def upload_file(self, _file): form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'uploadedfile')]) form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'), ('type', 'hidden'), ('value', '10000')]) mpc = MultipartContainer(form_params) mpc['uploadedfile'][0] = _file resp = self.opener.POST(self.MOTH_FILE_UP_URL, data=str(mpc), headers=Headers(mpc.get_headers())) self.assertIn('was successfully uploaded', resp.get_body()) def test_upload_file_using_fuzzable_request(self): form_params = FormParameters() form_params.add_field_by_attr_items([('name', 'uploadedfile')]) form_params['uploadedfile'][0] = NamedStringIO('file content', name='test.txt') form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'), ('type', 'hidden'), ('value', '10000')]) mpc = MultipartContainer(form_params) freq = FuzzableRequest(self.MOTH_FILE_UP_URL, post_data=mpc, method='POST') resp = self.opener.send_mutant(freq) self.assertIn('was successfully uploaded', resp.get_body())
class TestGetAverageRTT(unittest.TestCase): MOCK_URL = 'http://www.w3af.org/' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() httpretty.reset() @httpretty.activate def test_get_average_rtt_for_mutant_all_equal(self): def request_callback(request, uri, headers): time.sleep(0.5) body = 'Yup' return 200, headers, body httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=request_callback) mock_url = URL(self.MOCK_URL) fuzzable_request = FuzzableRequest(mock_url) average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request) # Check the response self.assertGreater(average_rtt, 0.45) self.assertGreater(0.55, average_rtt) @httpretty.activate def test_get_average_rtt_for_mutant_similar(self): def request_callback(request, uri, headers): time.sleep(0.4 + random.randint(1, 9) / 100.0) body = 'Yup' return 200, headers, body httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=request_callback) mock_url = URL(self.MOCK_URL) fuzzable_request = FuzzableRequest(mock_url) average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request) # Check the response self.assertGreater(average_rtt, 0.45) self.assertGreater(0.55, average_rtt) @httpretty.activate def test_get_average_rtt_for_mutant_one_off(self): # # TODO: This is one of the cases I need to fix using _has_outliers! # Calculating the average using 0.3 , 0.2 , 2.0 is madness # httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=RequestCallBackWithDelays([0.3, 0.2, 2.0])) mock_url = URL(self.MOCK_URL) fuzzable_request = FuzzableRequest(mock_url) average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request) # Check the response self.assertGreater(average_rtt, 0.80) self.assertGreater(0.90, average_rtt)
class TestSQLMapWrapper(unittest.TestCase): SQLI_GET = get_moth_http('/audit/sql_injection/' 'where_string_single_qs.py?uname=pablo') SSL_SQLI_GET = get_moth_https('/audit/sql_injection/' 'where_string_single_qs.py?uname=pablo') SQLI_POST = get_moth_http('/audit/sql_injection/where_integer_form.py') DATA_POST = 'text=1' def setUp(self): uri = URL(self.SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener, debug=True) def tearDown(self): self.uri_opener.end() self.sqlmap.cleanup() @classmethod def setUpClass(cls): output_dir = os.path.join(SQLMapWrapper.SQLMAP_LOCATION, 'output') if os.path.exists(output_dir): shutil.rmtree(output_dir) @classmethod def tearDownClass(cls): # Doing this in both setupclass and teardownclass in order to be sure # that a ctrl+c doesn't break it output_dir = os.path.join(SQLMapWrapper.SQLMAP_LOCATION, 'output') if os.path.exists(output_dir): shutil.rmtree(output_dir) def test_verify_vulnerability(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) def test_verify_vulnerability_ssl(self): uri = URL(self.SSL_SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable, self.sqlmap.last_stdout) def test_verify_vulnerability_false(self): not_vuln = get_moth_http('/audit/sql_injection/' 'where_string_single_qs.py?fake=pablo') uri = URL(not_vuln) target = Target(uri) self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertFalse(vulnerable) def test_verify_vulnerability_POST(self): target = Target(URL(self.SQLI_POST), self.DATA_POST) self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable, self.sqlmap.last_stdout) def test_wrapper_invalid_url(self): self.assertRaises(TypeError, SQLMapWrapper, self.SQLI_GET, self.uri_opener) def test_stds(self): uri = URL(self.SQLI_GET) target = Target(uri) self.sqlmap = SQLMapWrapper(target, self.uri_opener) prms = ['--batch',] cmd, process = self.sqlmap.run_sqlmap_with_pipes(prms) self.assertIsInstance(process.stdout, file) self.assertIsInstance(process.stderr, file) self.assertIsInstance(process.stdin, file) self.assertIsInstance(cmd, basestring) self.assertIn('sqlmap.py', cmd) def test_target_basic(self): target = Target(URL(self.SQLI_GET)) params = target.to_params() self.assertEqual(params, ["--url=%s" % self.SQLI_GET]) def test_target_post_data(self): target = Target(URL(self.SQLI_GET), self.DATA_POST) params = target.to_params() self.assertEqual(params, ["--url=%s" % self.SQLI_GET, "--data=%s" % self.DATA_POST]) def test_no_coloring(self): params = self.sqlmap.get_wrapper_params() self.assertIn('--disable-coloring', params) def test_always_batch(self): params = self.sqlmap.get_wrapper_params() self.assertIn('--batch', params) def test_use_proxy(self): params = self.sqlmap.get_wrapper_params() self.assertTrue(any(i.startswith('--proxy=http://127.0.0.1:') for i in params)) def test_enable_coloring(self): uri = URL(self.SQLI_GET) target = Target(uri) sqlmap = SQLMapWrapper(target, self.uri_opener, coloring=True) params = sqlmap.get_wrapper_params() self.assertNotIn('--disable-coloring', params) def test_dbs(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.dbs() output = process.stdout.read() self.assertIn('on SQLite it is not possible to enumerate databases', output) def test_tables(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.tables() output = process.stdout.read() self.assertIn('auth_group_permissions', output) self.assertIn('Database: SQLite_masterdb', output) self.assertIn('django_content_type', output) def test_users(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.users() output = process.stdout.read() self.assertIn('on SQLite it is not possible to enumerate the users', output) def test_dump(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.dump() output = process.stdout.read() self.assertIn('django_session', output) self.assertIn('auth_user_user_permissions', output) def test_sqlmap(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.direct('--tables') output = process.stdout.read() self.assertIn('django_session', output) self.assertIn('auth_user_user_permissions', output) self.assertNotIn('information_schema', output) self.assertNotIn('COLUMN_PRIVILEGES', output)
class TestGetAverageRTT(unittest.TestCase): MOCK_URL = 'http://www.w3af.org/' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() httpretty.reset() @httpretty.activate def test_get_average_rtt_for_mutant_all_equal(self): def request_callback(request, uri, headers): time.sleep(0.5) body = 'Yup' return 200, headers, body httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=request_callback) mock_url = URL(self.MOCK_URL) fuzzable_request = FuzzableRequest(mock_url) average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request) # Check the response self.assertGreater(average_rtt, 0.45) self.assertGreater(0.55, average_rtt) @httpretty.activate def test_get_average_rtt_for_mutant_similar(self): def request_callback(request, uri, headers): time.sleep(0.4 + random.randint(1, 9) / 100.0) body = 'Yup' return 200, headers, body httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=request_callback) mock_url = URL(self.MOCK_URL) fuzzable_request = FuzzableRequest(mock_url) average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request) # Check the response self.assertGreater(average_rtt, 0.45) self.assertGreater(0.55, average_rtt) @httpretty.activate def test_get_average_rtt_for_mutant_one_off(self): # # TODO: This is one of the cases I need to fix using _has_outliers! # Calculating the average using 0.3 , 0.2 , 2.0 is madness # httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=RequestCallBackWithDelays([0.3, 0.2, 2.0])) mock_url = URL(self.MOCK_URL) fuzzable_request = FuzzableRequest(mock_url) average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request) # Check the response self.assertGreater(average_rtt, 0.80) self.assertGreater(0.90, average_rtt)
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' MOCK_URL = 'http://www.w3af.org/' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() httpretty.reset() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_basic_ssl(self): url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_github_ssl(self): url = URL( 'https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/jsrepository.json' ) http_response = self.uri_opener.GET(url, cache=False, binary_response=True, respect_size_limit=False) self.assertIn('jquery', http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) @httpretty.activate def test_GET_with_post_data(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) mock_url = URL(self.MOCK_URL) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/') @httpretty.activate def test_GET_with_post_data_and_qs(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) qs = '?qs=1' mock_url = URL(self.MOCK_URL + qs) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/' + qs) def test_post(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_post_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_domain(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_file_proto(self): url = URL('file://foo/bar.txt') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url) except HTTPRequestException as hre: self.assertEqual(hre.value, "Bad HTTP response status line: ''") else: self.assertTrue(False, 'Expected HTTPRequestException.') def test_url_port_not_http_many(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_max_http_retries(0) url = URL('http://127.0.0.1:%s/' % port) http_request_e = 0 scan_must_stop_e = 0 for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except HTTPRequestException: http_request_e += 1 except ScanMustStopException as smse: scan_must_stop_e += 1 break except Exception as e: msg = 'Not expecting "%s".' self.assertTrue(False, msg % e.__class__.__name__) self.assertEqual(scan_must_stop_e, 1) self.assertEqual(http_request_e, 9) def test_get_wait_time(self): """ Asserts that all the responses coming out of the extended urllib have a get_wait_time different from the default. """ url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertNotEqual(http_response.get_wait_time(), DEFAULT_WAIT_TIME) def test_ssl_tls_1_0(self): ssl_daemon = RawSSLDaemon(Ok200Handler, ssl_version=ssl.PROTOCOL_TLSv1) ssl_daemon.start() ssl_daemon.wait_for_start() port = ssl_daemon.get_port() url = URL('https://127.0.0.1:%s/' % port) resp = self.uri_opener.GET(url) self.assertEqual(resp.get_body(), Ok200Handler.body) def test_ssl_v23(self): # https://bugs.kali.org/view.php?id=2160 if not hasattr(ssl, 'PROTOCOL_SSLv23'): return ssl_daemon = RawSSLDaemon(Ok200Handler, ssl_version=ssl.PROTOCOL_SSLv23) ssl_daemon.start() ssl_daemon.wait_for_start() port = ssl_daemon.get_port() url = URL('https://127.0.0.1:%s/' % port) resp = self.uri_opener.GET(url) self.assertEqual(resp.get_body(), Ok200Handler.body) def test_ssl_v3(self): # https://bugs.kali.org/view.php?id=2160 if not hasattr(ssl, 'PROTOCOL_SSLv3'): return # pylint: disable=E1101 ssl_daemon = RawSSLDaemon(Ok200Handler, ssl_version=ssl.PROTOCOL_SSLv3) ssl_daemon.start() ssl_daemon.wait_for_start() # pylint: disable=E1101 port = ssl_daemon.get_port() url = URL('https://127.0.0.1:%s/' % port) resp = self.uri_opener.GET(url) self.assertEqual(resp.get_body(), Ok200Handler.body) @attr('internet') @attr('ci_fails') def test_ssl_sni(self): """ Test is our HTTP client supports SSL SNI """ url = URL('https://sni.velox.ch/') resp = self.uri_opener.GET(url) self.assertIn('<strong>Great!', resp.get_body()) def test_ssl_fail_when_requesting_http(self): http_daemon = UpperDaemon(Ok200Handler) http_daemon.start() http_daemon.wait_for_start() port = http_daemon.get_port() # Note that here I'm using httpS <<---- "S" and that I've started an # HTTP server. We should get an exception url = URL('https://127.0.0.1:%s/' % port) self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_ssl_fail_when_requesting_moth_http(self): """ https://github.com/andresriancho/w3af/issues/7989 This test takes considerable time to run since it needs to timeout the SSL connection for each SSL protocol """ # Note that here I'm using httpS <<---- "S" and that I'm connecting to # the net location (host:port) of an HTTP server. http_url = URL(get_moth_http()) test_url = URL('https://%s' % http_url.get_net_location()) self.uri_opener.settings.set_max_http_retries(0) self.assertRaises(HTTPRequestException, self.uri_opener.GET, test_url, timeout=1) def test_stop(self): self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause_stop(self): self.uri_opener.pause(True) self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) def test_pause_unpause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) self.uri_opener.pause(False) http_response = output.get() self.assertNotIsInstance(http_response, types.NoneType, 'Error in send thread.') th.join() self.assertEqual(http_response.get_code(), 200) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_removes_cache(self): url = URL(get_moth_http()) self.uri_opener.GET(url, cache=False) # Please note that this line, together with the tearDown() act as # a test for a "double call to end()". self.uri_opener.end() db_fmt = 'db_unittest-%s' trace_fmt = 'db_unittest-%s_traces/' temp_dir = get_temp_dir() for i in xrange(100): test_db_path = os.path.join(temp_dir, db_fmt % i) test_trace_path = os.path.join(temp_dir, trace_fmt % i) self.assertFalse(os.path.exists(test_db_path), test_db_path) self.assertFalse(os.path.exists(test_trace_path), test_trace_path) def test_special_char_header(self): url = URL(get_moth_http('/core/headers/echo-headers.py')) header_content = u'name=ábc' headers = Headers([('Cookie', header_content)]) http_response = self.uri_opener.GET(url, cache=False, headers=headers) self.assertIn(header_content, http_response.body) def test_bad_file_descriptor_8125_local(self): """ 8125 is basically an issue with the way HTTP SSL connections handle the Connection: Close header. :see: https://github.com/andresriancho/w3af/issues/8125 """ raw_http_response = ('HTTP/1.1 200 Ok\r\n' 'Connection: close\r\n' 'Content-Type: text/html\r\n' 'Content-Length: 3\r\n\r\nabc') certfile = os.path.join(ROOT_PATH, 'plugins', 'tests', 'audit', 'certs', 'invalid_cert.pem') port = get_unused_port() s = SSLServer('localhost', port, certfile, http_response=raw_http_response) s.start() body = 'abc' mock_url = 'https://localhost:%s/' % port url = URL(mock_url) http_response = self.uri_opener.GET(url, cache=False) self.assertEqual(body, http_response.body) s.stop() # This error is expected, it's generated when the xurllib negotiates # the different SSL protocols with the server self.assertEqual(set([e.strerror for e in s.errors]), {'Bad file descriptor'}) def test_rate_limit_high(self): self.rate_limit_generic(500, 0.009, 0.4) def test_rate_limit_low(self): self.rate_limit_generic(1, 1, 2.2) def test_rate_limit_zero(self): self.rate_limit_generic(0, 0.005, 0.4) @httpretty.activate def rate_limit_generic(self, max_requests_per_second, _min, _max): mock_url = 'http://mock/' url = URL(mock_url) httpretty.register_uri(httpretty.GET, mock_url, body='Body') start_time = time.time() with patch.object(self.uri_opener.settings, 'get_max_requests_per_second') as mrps_mock: mrps_mock.return_value = max_requests_per_second self.uri_opener.GET(url, cache=False) self.uri_opener.GET(url, cache=False) httpretty.reset() end_time = time.time() elapsed_time = end_time - start_time self.assertGreaterEqual(elapsed_time, _min) self.assertLessEqual(elapsed_time, _max)
class BaseGoogleAPISearch(unittest.TestCase): """ @see: test_GMobileSearch, test_GStandardSearch, test_GAjaxSearch below for tests on these particular search implementations. This base class is not intended to be run by nosetests. """ GoogleApiSearcher = None COUNT = 10 def setUp(self): self.opener = ExtendedUrllib() def tearDown(self): self.opener.end() def test_len_link_results(self): if self.GoogleApiSearcher is None: return keywords = ["pink", "red", "blue"] random.shuffle(keywords) query = ' '.join(keywords) start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links msg = 'This test fails randomly based on Google\'s anti automation' \ ' protection, if it fails you should run it again in a couple'\ ' of minutes. Many consecutive failures show that our code is'\ ' NOT working anymore.' self.assertEqual(searcher.status, FINISHED_OK, msg) link_list = '\n'.join(str(r) for r in searcher.links) msg = 'Got less results than expected, %s is less than %s:\n%s' msg = msg % (len(searcher.links), self.COUNT, link_list) self.assertGreaterEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: self.assertTrue(URL_REGEX.match(link.URL.url_string) is not None) for page in searcher.pages: self.assertTrue(isinstance(page, HTTPResponse)) # Check that the links are related to my search related = 0 for link in searcher.links: for key in keywords: if key in link.URL.url_string.lower(): related += 1 self.assertTrue(related > 5, related) def test_links_results_domain(self): if self.GoogleApiSearcher is None: return domain = "www.bonsai-sec.com" query = "site:%s" % domain start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links msg = 'This test fails randomly based on Google\'s anti automation' \ ' protection, if it fails you should run it again in a couple of' \ ' minutes. Many consecutive failures show that our code is NOT' \ ' working anymore.' self.assertEqual(searcher.status, FINISHED_OK, msg) msg = 'Got less results than expected:\n%s' % '\n'.join( str(r) for r in searcher.links) self.assertEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: link_domain = link.URL.get_domain() msg = "Current link domain is '%s'. Expected: '%s'" % ( link_domain, domain) self.assertEqual(link_domain, domain, msg)
class TestSQLMapWrapper(unittest.TestCase): SQLI_GET = get_moth_http('/audit/sql_injection/' 'where_string_single_qs.py?uname=pablo') SSL_SQLI_GET = get_moth_https('/audit/sql_injection/' 'where_string_single_qs.py?uname=pablo') SQLI_POST = get_moth_http('/audit/sql_injection/where_integer_form.py') DATA_POST = 'text=1' def setUp(self): uri = URL(self.SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener, debug=True) def tearDown(self): self.uri_opener.end() self.sqlmap.cleanup() @classmethod def setUpClass(cls): output_dir = os.path.join(SQLMapWrapper.SQLMAP_LOCATION, 'output') if os.path.exists(output_dir): shutil.rmtree(output_dir) @classmethod def tearDownClass(cls): # Doing this in both setupclass and teardownclass in order to be sure # that a ctrl+c doesn't break it output_dir = os.path.join(SQLMapWrapper.SQLMAP_LOCATION, 'output') if os.path.exists(output_dir): shutil.rmtree(output_dir) def test_verify_vulnerability(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) def test_verify_vulnerability_ssl(self): uri = URL(self.SSL_SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable, self.sqlmap.last_stdout) def test_verify_vulnerability_false(self): not_vuln = get_moth_http('/audit/sql_injection/' 'where_string_single_qs.py?fake=pablo') uri = URL(not_vuln) target = Target(uri) self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertFalse(vulnerable) def test_verify_vulnerability_POST(self): target = Target(URL(self.SQLI_POST), self.DATA_POST) self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable, self.sqlmap.last_stdout) def test_wrapper_invalid_url(self): self.assertRaises(TypeError, SQLMapWrapper, self.SQLI_GET, self.uri_opener) def test_stds(self): uri = URL(self.SQLI_GET) target = Target(uri) self.sqlmap = SQLMapWrapper(target, self.uri_opener) prms = [ '--batch', ] cmd, process = self.sqlmap.run_sqlmap_with_pipes(prms) self.assertIsInstance(process.stdout, file) self.assertIsInstance(process.stderr, file) self.assertIsInstance(process.stdin, file) self.assertIsInstance(cmd, basestring) self.assertIn('sqlmap.py', cmd) def test_target_basic(self): target = Target(URL(self.SQLI_GET)) params = target.to_params() self.assertEqual(params, ["--url=%s" % self.SQLI_GET]) def test_target_post_data(self): target = Target(URL(self.SQLI_GET), self.DATA_POST) params = target.to_params() self.assertEqual( params, ["--url=%s" % self.SQLI_GET, "--data=%s" % self.DATA_POST]) def test_no_coloring(self): params = self.sqlmap.get_wrapper_params() self.assertIn('--disable-coloring', params) def test_always_batch(self): params = self.sqlmap.get_wrapper_params() self.assertIn('--batch', params) def test_use_proxy(self): params = self.sqlmap.get_wrapper_params() self.assertTrue( any(i.startswith('--proxy=http://127.0.0.1:') for i in params)) def test_enable_coloring(self): uri = URL(self.SQLI_GET) target = Target(uri) sqlmap = SQLMapWrapper(target, self.uri_opener, coloring=True) params = sqlmap.get_wrapper_params() self.assertNotIn('--disable-coloring', params) def test_dbs(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.dbs() output = process.stdout.read() self.assertIn('on SQLite it is not possible to enumerate databases', output) def test_tables(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.tables() output = process.stdout.read() self.assertIn('auth_group_permissions', output) self.assertIn('Database: SQLite_masterdb', output) self.assertIn('django_content_type', output) def test_users(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.users() output = process.stdout.read() self.assertIn('on SQLite it is not possible to enumerate the users', output) def test_dump(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable) cmd, process = self.sqlmap.dump() output = process.stdout.read() self.assertIn('django_session', output) self.assertIn('auth_user_user_permissions', output) def test_sqlmap(self): vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable, self.sqlmap.last_stdout) cmd, process = self.sqlmap.direct('--tables') output = process.stdout.read() self.assertIn('django_session', output) self.assertIn('auth_user_user_permissions', output) self.assertNotIn('COLUMN_PRIVILEGES', output)
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) def test_post(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_post_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_domain(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_file_proto(self): url = URL('file://foo/bar.txt') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.value, "Bad HTTP response status line: ''") else:
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) def test_POST(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_POST_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_url(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) def test_url_port_not_http_many(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except ScanMustStopByUnknownReasonExc: self.assertTrue(False, 'Not expecting this exception type.') except ScanMustStopOnUrlError: self.assertTrue(True) except ScanMustStopException: self.assertTrue(True) break else: self.assertTrue(False) def test_timeout(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.settings.set_timeout(1) self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url) self.uri_opener.settings.set_default_values() def test_timeout_many(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_timeout(1) url = URL('http://127.0.0.1:%s/' % port) for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except ScanMustStopByUnknownReasonExc: self.assertTrue(False, 'Not expecting this exception type.') except ScanMustStopOnUrlError: self.assertTrue(True) except ScanMustStopException: self.assertTrue(True) break else: self.assertTrue(False) self.uri_opener.settings.set_default_values() def test_ignore_errors(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_timeout(1) self.uri_opener._retry = Mock() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url, ignore_errors=True) except ScanMustStopOnUrlError: self.assertEqual(self.uri_opener._retry.call_count, 0) else: self.assertTrue(False, 'Exception not raised') self.uri_opener.settings.set_default_values() def test_stop(self): self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause_stop(self): self.uri_opener.pause(True) self.uri_opener.stop() url = URL(get_moth_http()) self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url) def test_pause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) def test_pause_unpause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) self.uri_opener.pause(False) http_response = output.get() self.assertNotIsInstance(http_response, types.NoneType, 'Error in send thread.') th.join() self.assertEqual(http_response.get_code(), 200) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_removes_cache(self): url = URL(get_moth_http()) self.uri_opener.GET(url, cache=False) # Please note that this line, together with the tearDown() act as # a test for a "double call to end()". self.uri_opener.end() db_fmt = 'db_unittest-%s' trace_fmt = 'db_unittest-%s_traces/' temp_dir = get_temp_dir() for i in xrange(100): test_db_path = os.path.join(temp_dir, db_fmt % i) test_trace_path = os.path.join(temp_dir, trace_fmt % i) self.assertFalse(os.path.exists(test_db_path), test_db_path) self.assertFalse(os.path.exists(test_trace_path), test_trace_path) def test_special_char_header(self): url = URL(get_moth_http('/core/headers/echo-headers.py')) header_content = u'name=ábc' headers = Headers([('Cookie', header_content)]) http_response = self.uri_opener.GET(url, cache=False, headers=headers) self.assertIn(header_content, http_response.body)
class BaseGoogleAPISearch(unittest.TestCase): """ @see: test_GMobileSearch, test_GStandardSearch, test_GAjaxSearch below for tests on these particular search implementations. This base class is not intended to be run by nosetests. """ GoogleApiSearcher = None COUNT = 10 def setUp(self): self.opener = ExtendedUrllib() def tearDown(self): self.opener.end() @attr('fails') def test_len_link_results(self): if self.GoogleApiSearcher is None: return keywords = ['pink', 'red', 'blue'] random.shuffle(keywords) query = ' '.join(keywords) start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links self.assertEqual(searcher.status, FINISHED_OK, GOOGLE_MSG) link_list = '\n'.join(str(r) for r in searcher.links) msg = 'Got less results than expected, %s is less than %s:\n%s' msg = msg % (len(searcher.links), self.COUNT, link_list) self.assertGreaterEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: self.assertTrue(URL_REGEX.match(link.URL.url_string) is not None, link.URL.url_string) for page in searcher.pages: self.assertTrue(isinstance(page, HTTPResponse)) # Check that the links are related to my search related = 0 for link in searcher.links: for key in keywords: if key in link.URL.url_string.lower(): related += 1 self.assertTrue(related > 5, related) @attr('fails') def test_links_results_domain(self): if self.GoogleApiSearcher is None: return domain = "www.bonsai-sec.com" query = "site:%s" % domain start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links self.assertEqual(searcher.status, FINISHED_OK, GOOGLE_MSG) msg = 'Got less results than expected:\n%s' % '\n'.join( str(r) for r in searcher.links) self.assertEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: link_domain = link.URL.get_domain() msg = "Current link domain is '%s'. Expected: '%s'" % ( link_domain, domain) self.assertEqual(link_domain, domain, msg)
class TestRedirectHandlerExtendedUrllib(unittest.TestCase): """ Test the redirect handler using ExtendedUrllib """ REDIR_DEST = 'http://w3af.org/dest' REDIR_SRC = 'http://w3af.org/src' OK_BODY = 'Body!' def setUp(self): consecutive_number_generator.reset() self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() @httpretty.activate def test_redirect_302_simple_no_follow(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': self.REDIR_DEST}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src) location, _ = response.get_headers().iget('location') self.assertEqual(location, self.REDIR_DEST) self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_id(), 1) @httpretty.activate def test_redirect_302_simple_follow(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': self.REDIR_DEST}) httpretty.register_uri(httpretty.GET, self.REDIR_DEST, body=self.OK_BODY, status=200) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) self.assertEqual(response.get_code(), OK) self.assertEqual(response.get_body(), self.OK_BODY) self.assertEqual(response.get_redir_uri(), URL(self.REDIR_DEST)) self.assertEqual(response.get_url(), URL(self.REDIR_SRC)) self.assertEqual(response.get_id(), 2) @httpretty.activate def test_redirect_301_loop(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=MOVED_PERMANENTLY, adding_headers={'Location': self.REDIR_DEST}) httpretty.register_uri(httpretty.GET, self.REDIR_DEST, body='', status=MOVED_PERMANENTLY, adding_headers={'URI': self.REDIR_SRC}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) # At some point the handler detects a loop and stops self.assertEqual(response.get_code(), MOVED_PERMANENTLY) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_id(), 9) @httpretty.activate def test_redirect_302_without_location_returns_302_response(self): # Breaks the RFC httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) # Doesn't follow the redirects self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_id(), 1) @httpretty.activate def test_redirect_no_follow_file_proto(self): httpretty.register_uri(httpretty.GET, self.REDIR_SRC, body='', status=FOUND, adding_headers={'Location': 'file:///etc/passwd'}) redirect_src = URL(self.REDIR_SRC) response = self.uri_opener.GET(redirect_src, follow_redirects=True) self.assertEqual(response.get_code(), FOUND) self.assertEqual(response.get_body(), '') self.assertEqual(response.get_url(), URL(self.REDIR_SRC)) self.assertEqual(response.get_id(), 1)
class TestExtendedUrllibProxy(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' def setUp(self): self.uri_opener = ExtendedUrllib() # Start the proxy daemon self._proxy = Proxy('127.0.0.2', 0, ExtendedUrllib(), ProxyHandler) self._proxy.start() self._proxy.wait_for_start() port = self._proxy.get_port() # Configure the proxy settings = OpenerSettings() options = settings.get_options() proxy_address_opt = options['proxy_address'] proxy_port_opt = options['proxy_port'] proxy_address_opt.set_value('127.0.0.2') proxy_port_opt.set_value(port) settings.set_options(options) self.uri_opener.settings = settings def tearDown(self): self.uri_opener.end() def test_http_default_port_via_proxy(self): # TODO: Write this test pass def test_http_port_specification_via_proxy(self): self.assertEqual(self._proxy.total_handled_requests, 0) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertEqual(self._proxy.total_handled_requests, 1) def test_https_via_proxy(self): self.assertEqual(self._proxy.total_handled_requests, 0) url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertEqual(self._proxy.total_handled_requests, 1) def test_offline_port_via_proxy(self): url = URL('http://127.0.0.1:8181/') http_response = self.uri_opener.GET(url, cache=False) self.assertEqual(http_response.get_code(), 500) self.assertIn('Connection refused', http_response.body) def test_POST_via_proxy(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False) self.assertIn('123456abc', http_response.body)
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' MOCK_URL = 'http://www.w3af.org/' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() httpretty.reset() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_basic_ssl(self): url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) @httpretty.activate def test_GET_with_post_data(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) mock_url = URL(self.MOCK_URL) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/') @httpretty.activate def test_GET_with_post_data_and_qs(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) qs = '?qs=1' mock_url = URL(self.MOCK_URL + qs) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/' + qs) def test_post(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_post_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_domain(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_file_proto(self): url = URL('file://foo/bar.txt') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.value, "Bad HTTP response status line: ''") else:
class BaseGoogleAPISearch(unittest.TestCase): """ @see: test_GMobileSearch, test_GStandardSearch, test_GAjaxSearch below for tests on these particular search implementations. This base class is not intended to be run by nosetests. """ GoogleApiSearcher = None COUNT = 10 def setUp(self): self.opener = ExtendedUrllib() def tearDown(self): self.opener.end() def test_len_link_results(self): if self.GoogleApiSearcher is None: return keywords = ["pink", "red", "blue"] random.shuffle(keywords) query = ' '.join(keywords) start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links msg = 'This test fails randomly based on Google\'s anti automation' \ ' protection, if it fails you should run it again in a couple'\ ' of minutes. Many consecutive failures show that our code is'\ ' NOT working anymore.' self.assertEqual(searcher.status, FINISHED_OK, msg) link_list = '\n'.join(str(r) for r in searcher.links) msg = 'Got less results than expected, %s is less than %s:\n%s' msg = msg % (len(searcher.links), self.COUNT, link_list) self.assertGreaterEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: self.assertTrue(URL_REGEX.match(link.URL.url_string) is not None, link.URL.url_string) for page in searcher.pages: self.assertTrue(isinstance(page, HTTPResponse)) # Check that the links are related to my search related = 0 for link in searcher.links: for key in keywords: if key in link.URL.url_string.lower(): related += 1 self.assertTrue(related > 5, related) def test_links_results_domain(self): if self.GoogleApiSearcher is None: return domain = "www.bonsai-sec.com" query = "site:%s" % domain start = 0 # pylint: disable=E1102 # E1102: self.GoogleApiSearcher is not callable searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT) self.assertEqual(searcher.status, IS_NEW) # This actually does the search searcher.links msg = 'This test fails randomly based on Google\'s anti automation' \ ' protection, if it fails you should run it again in a couple of' \ ' minutes. Many consecutive failures show that our code is NOT' \ ' working anymore.' self.assertEqual(searcher.status, FINISHED_OK, msg) msg = 'Got less results than expected:\n%s' % '\n'.join( str(r) for r in searcher.links) self.assertEqual(len(searcher.links), self.COUNT, msg) for link in searcher.links: link_domain = link.URL.get_domain() msg = "Current link domain is '%s'. Expected: '%s'" % ( link_domain, domain) self.assertEqual(link_domain, domain, msg)
class TestXUrllibDelayOnError(unittest.TestCase): def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_increasing_delay_on_errors(self): expected_log = {0: False, 70: False, 40: False, 10: False, 80: False, 50: False, 20: False, 90: False, 60: False, 30: False, 100: False} self.assertEqual(self.uri_opener._sleep_log, expected_log) return_empty_daemon = UpperDaemon(EmptyTCPHandler) return_empty_daemon.start() return_empty_daemon.wait_for_start() port = return_empty_daemon.get_port() # No retries means that the test is easier to read/understand self.uri_opener.settings.set_max_http_retries(0) # We want to keep going, don't test the _should_stop_scan here. self.uri_opener._should_stop_scan = lambda x: False url = URL('http://127.0.0.1:%s/' % port) http_exception_count = 0 loops = 100 # Not check the delays with patch('w3af.core.data.url.extended_urllib.time.sleep') as sleepm: for i in xrange(loops): try: self.uri_opener.GET(url, cache=False) except HTTPRequestException: http_exception_count += 1 except Exception, e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__) else: self.assertTrue(False, 'Expecting HTTPRequestException') self.assertEqual(loops - 1, i) # Note that the timeouts are increasing based on the error rate and # SOCKET_ERROR_DELAY expected_calls = [call(1.5), call(3.0), call(4.5), call(6.0), call(7.5), call(9.0), call(10.5), call(12.0), call(13.5)] expected_log = {0: False, 70: True, 40: True, 10: True, 80: True, 50: True, 20: True, 90: True, 60: True, 30: True, 100: False} self.assertEqual(expected_calls, sleepm.call_args_list) self.assertEqual(http_exception_count, 100) self.assertEqual(self.uri_opener._sleep_log, expected_log) # This one should also clear the log try: self.uri_opener.GET(url, cache=False) except HTTPRequestException: pass else: self.assertTrue(False, 'Expected HTTPRequestException') # The log was cleared, all values should be False self.assertTrue(all([not v for v in self.uri_opener._sleep_log.values()]))
class TestXUrllib(unittest.TestCase): MOTH_MESSAGE = '<title>moth: vulnerable web application</title>' MOCK_URL = 'http://www.w3af.org/' def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() httpretty.reset() def test_basic(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_basic_ssl(self): url = URL(get_moth_https()) http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None) def test_cache(self): url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) url = URL(get_moth_http()) http_response = self.uri_opener.GET(url) self.assertIn(self.MOTH_MESSAGE, http_response.body) def test_qs_params(self): url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('123456abc', http_response.body) url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0')) http_response = self.uri_opener.GET(url, cache=False) self.assertIn('root:x:0', http_response.body) @httpretty.activate def test_GET_with_post_data(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) mock_url = URL(self.MOCK_URL) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/') @httpretty.activate def test_GET_with_post_data_and_qs(self): httpretty.register_uri(httpretty.GET, self.MOCK_URL, body=self.MOTH_MESSAGE, status=200) qs = '?qs=1' mock_url = URL(self.MOCK_URL + qs) data = 'abc=123&def=456' response = self.uri_opener.GET(mock_url, data=data) # Check the response self.assertEqual(response.get_code(), 200) self.assertEqual(response.get_body(), self.MOTH_MESSAGE) # And use httpretty to check the request self.assertEqual(httpretty.last_request().method, 'GET') request_headers = httpretty.last_request().headers self.assertIn('content-length', request_headers) self.assertEqual(str(len(data)), request_headers['content-length']) self.assertEqual(httpretty.last_request().body, data) self.assertEqual(httpretty.last_request().path, '/' + qs) def test_post(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) data = URLEncodedForm() data['text'] = ['123456abc'] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn('123456abc', http_response.body) def test_post_special_chars(self): url = URL(get_moth_http('/audit/xss/simple_xss_form.py')) test_data = u'abc<def>"-á-' data = URLEncodedForm() data['text'] = [test_data] http_response = self.uri_opener.POST(url, data, cache=False) self.assertIn(test_data, http_response.body) def test_unknown_domain(self): url = URL('http://longsitethatdoesnotexistfoo.com/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_file_proto(self): url = URL('file://foo/bar.txt') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_closed(self): # TODO: Change 2312 by an always closed/non-http port url = URL('http://127.0.0.1:2312/') self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) def test_url_port_not_http(self): upper_daemon = UpperDaemon(EmptyTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) try: self.uri_opener.GET(url) except HTTPRequestException, hre: self.assertEqual(hre.value, "Bad HTTP response status line: ''") else:
class TestXUrllibTimeout(unittest.TestCase): def setUp(self): self.uri_opener = ExtendedUrllib() def tearDown(self): self.uri_opener.end() def test_timeout(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.settings.set_configured_timeout(0.5) self.uri_opener.clear_timeout() # We can mock this because it's being tested at TestXUrllibDelayOnError self.uri_opener._pause_on_http_error = Mock() start = time.time() try: self.uri_opener.GET(url) except HTTPRequestException as hre: self.assertEqual(hre.message, 'HTTP timeout error') except Exception as e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__) else: self.assertTrue(False, 'Expected HTTPRequestException.') end = time.time() self.uri_opener.settings.set_default_values() self.assertLess(end-start, 1.5) def test_timeout_ssl(self): ssl_daemon = RawSSLDaemon(TimeoutTCPHandler) ssl_daemon.start() ssl_daemon.wait_for_start() port = ssl_daemon.get_port() url = URL('https://127.0.0.1:%s/' % port) self.uri_opener.settings.set_max_http_retries(0) self.uri_opener.settings.set_configured_timeout(1) self.uri_opener.clear_timeout() start = time.time() self.assertRaises(HTTPRequestException, self.uri_opener.GET, url) end = time.time() self.uri_opener.settings.set_default_values() # We set the upper limit to 4 because the uri opener needs to timeout # all the connections (one for each SSL protocol) and then, because of # some very relaxed handshake it needs to timeout a SSL protocol 3 # connection which passes handshake phase but then fails to send/get # the headers self.assertLess(end-start, 80) def test_timeout_many(self): upper_daemon = UpperDaemon(TimeoutTCPHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() self.uri_opener.settings.set_configured_timeout(0.5) self.uri_opener.clear_timeout() # We can mock this because it's being tested at TestXUrllibDelayOnError self.uri_opener._pause_on_http_error = Mock() url = URL('http://127.0.0.1:%s/' % port) http_request_e = 0 scan_stop_e = 0 for _ in xrange(MAX_ERROR_COUNT): try: self.uri_opener.GET(url) except HTTPRequestException as hre: http_request_e += 1 self.assertEqual(hre.message, 'HTTP timeout error') except ScanMustStopException: scan_stop_e += 1 self.assertTrue(True) break except Exception as e: msg = 'Not expecting: "%s"' self.assertTrue(False, msg % e.__class__.__name__) else: self.assertTrue(False, 'Expecting timeout') else: self.assertTrue(False, 'Expected ScanMustStopException') self.uri_opener.settings.set_default_values() self.assertEqual(http_request_e, 4) self.assertEqual(scan_stop_e, 1) def test_timeout_auto_adjust(self): upper_daemon = UpperDaemon(Ok200SmallDelayHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() # Enable timeout auto-adjust self.uri_opener.settings.set_configured_timeout(0) self.uri_opener.clear_timeout() # We can mock this because it's being tested at TestXUrllibDelayOnError self.uri_opener._pause_on_http_error = Mock() # Mock to verify the calls self.uri_opener.set_timeout = Mock() # Make sure we start from the desired timeout value self.assertEqual(self.uri_opener.get_timeout('127.0.0.1'), DEFAULT_TIMEOUT) url = URL('http://127.0.0.1:%s/' % port) sent_requests = 0 self.uri_opener.GET(url) time.sleep(TIMEOUT_UPDATE_ELAPSED_MIN + 1) for _ in xrange(TIMEOUT_ADJUST_LIMIT * 3): try: self.uri_opener.GET(url) except Exception: raise else: sent_requests += 1 if self.uri_opener.set_timeout.call_count: break self.assertEqual(self.uri_opener.set_timeout.call_count, 1) # pylint: disable=E1136 rtt = self.uri_opener.get_average_rtt()[0] adjusted_tout = self.uri_opener.set_timeout.call_args[0][0] expected_tout = TIMEOUT_MULT_CONST * rtt delta = rtt * 0.2 # pylint: enable=E1136 self.assertGreaterEqual(adjusted_tout, expected_tout - delta) self.assertLessEqual(adjusted_tout, expected_tout + delta) self.assertLess(adjusted_tout, DEFAULT_TIMEOUT) self.assertEqual(sent_requests, TIMEOUT_ADJUST_LIMIT) def test_timeout_parameter_overrides_global_timeout(self): upper_daemon = UpperDaemon(Ok200SmallDelayWithLongTriggeredTimeoutHandler) upper_daemon.start() upper_daemon.wait_for_start() port = upper_daemon.get_port() # Enable timeout auto-adjust self.uri_opener.settings.set_configured_timeout(0) self.uri_opener.clear_timeout() # Make sure we start from the desired timeout value self.assertEqual(self.uri_opener.get_timeout('127.0.0.1'), DEFAULT_TIMEOUT) url = URL('http://127.0.0.1:%s/' % port) self.uri_opener.GET(url) time.sleep(TIMEOUT_UPDATE_ELAPSED_MIN + 1) for _ in xrange(TIMEOUT_ADJUST_LIMIT * 3): self.uri_opener.GET(url) # These make sure that the HTTP connection pool is full, this is # required because we want to check if the timeout applies to # existing connections, not new ones for _ in xrange(ConnectionManager.MAX_CONNECTIONS): self.uri_opener.GET(url) # Make sure we reached the desired timeout after our HTTP # requests to the test server self.assertEqual(self.uri_opener.get_timeout('127.0.0.1'), MIN_TIMEOUT) timeout_url = URL('http://127.0.0.1:%s/timeout' % port) # And now the real test, this one makes sure that the timeout # parameter sent to GET overrides the configured value response = self.uri_opener.GET(timeout_url, timeout=8.0) self.assertEqual(response.get_code(), 200) self.assertEqual(self.uri_opener.get_timeout('127.0.0.1'), MIN_TIMEOUT) # When timeout is not specified and the server returns in more # than the expected time, an exception is raised self.assertRaises(Exception, self.uri_opener.GET, timeout_url)