def _get_all_parameters(self, request): """ :param request: The HTTP request :yield: All the HTTP request parameters as tuples of (name, value) """ headers = request.get_headers() query_string = request.get_uri().get_querystring() dc = dc_from_hdrs_post(headers, request.get_data()) cookie_str, _ = headers.iget('cookie', '') cookie_dc = Cookie(cookie_str) token_generators = itertools.chain(query_string.iter_tokens(), dc.iter_tokens(), headers.iter_tokens(), cookie_dc.iter_tokens()) for token in token_generators: token_name = token.get_name() token_value = token.get_value() token_value = smart_str_ignore(token_value) yield token_name, token_value # Handle the case where the parameter is base64 encoded is_b64, decoded_data = maybe_decode_base64(token_value) if is_b64: yield token_name, decoded_data
def test_create_cookie(self): url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html'), ('Cookie', 'abc=def')]) response = HTTPResponse(200, '', headers, url, url) cookie = Cookie.from_http_response(response) self.assertEqual(cookie, Cookie('abc=def'))
def test_copy_with_token(self): dc = Cookie('one=123; two=567; path=/') dc.set_token(('one', 0)) dc_copy = copy.deepcopy(dc) self.assertEqual(dc.get_token(), dc_copy.get_token()) self.assertIsNotNone(dc.get_token()) self.assertIsNotNone(dc_copy.get_token()) self.assertEqual(dc_copy.get_token().get_name(), 'one')
def set_cookie(self, cookie): """ :param cookie: A Cookie object as defined in core.data.dc.cookie, or a string. """ if isinstance(cookie, Cookie): self._cookie = cookie elif isinstance(cookie, basestring): self._cookie = Cookie(cookie) elif cookie is None: self._cookie = Cookie() else: fmt = '[FuzzableRequest error] set_cookie received: "%s": "%s".' error_str = fmt % (type(cookie), repr(cookie)) om.out.error(error_str) raise BaseFrameworkException(error_str)
def __init__(self, uri, method='GET', headers=None, cookie=None, post_data=None): super(FuzzableRequest, self).__init__() # Note: Do not check for the URI/Headers type here, since I'm doing it # in set_uri() and set_headers() already. if cookie is not None and not isinstance(cookie, Cookie): raise TypeError(TYPE_ERROR % ('cookie', 'Cookie')) if post_data is not None and not isinstance(post_data, DataContainer): raise TypeError(TYPE_ERROR % ('post_data', 'DataContainer')) # Internal variables self._method = method self._cookie = Cookie() if cookie is None else cookie self._post_data = KeyValueContainer( ) if post_data is None else post_data # Set the headers self._headers = None pheaders = Headers() if headers is None else headers self.set_headers(pheaders) # Set the URL self._uri = None self._url = None self.set_uri(uri) # Set the internal variables self._sent_info_comp = None
def test_qs_and_cookie(self): """ Even when fuzz_cookies is True, we won't create HeaderMutants based on a FuzzableRequest. This is one of the ugly things related with https://github.com/andresriancho/w3af/issues/3149 Which we fixed! """ cf_singleton.save('fuzzable_headers', []) cf_singleton.save('fuzz_cookies', True) # This one changed cf_singleton.save('fuzz_url_filenames', False) cf_singleton.save('fuzzed_files_extension', 'gif') cf_singleton.save('fuzz_form_files', False) cf_singleton.save('fuzz_url_parts', False) url = URL('http://moth/?id=1') # And now there is a cookie cookie = Cookie('foo=bar') freq = FuzzableRequest(url, cookie=cookie) mutants = create_mutants(freq, self.payloads) expected_urls = [ u'http://moth/?id=abc', u'http://moth/?id=def', u'http://moth/?id=1', u'http://moth/?id=1' ] generated_urls = [m.get_uri().url_string for m in mutants] self.assertEqual(generated_urls, expected_urls) self.assertAllInstance(mutants[:2], QSMutant) self.assertAllInstance(mutants[2:], CookieMutant) self.assertAllHaveTokens(mutants)
def test_repeated(self): cookie_obj = Cookie('test=123; test=abc def; path=/') self.assertIn('test', cookie_obj) self.assertIn('path', cookie_obj) self.assertEqual(cookie_obj['test'], ['123', 'abc def'])
def test_qs_and_cookie(self): cf_singleton.save('fuzzable_headers', []) cf_singleton.save('fuzz_cookies', True) # This one changed cf_singleton.save('fuzz_url_filenames', False) cf_singleton.save('fuzzed_files_extension', 'gif') cf_singleton.save('fuzz_form_files', False) cf_singleton.save('fuzz_url_parts', False) url = URL('http://moth/?id=1') # And now there is a cookie cookie = Cookie('foo=bar') freq = HTTPQSRequest(url, cookie=cookie) generated_mutants = create_mutants(freq, self.payloads) expected_urls = [ u'http://moth/?id=abc', u'http://moth/?id=def', u'http://moth/?id=1', u'http://moth/?id=1' ] generated_urls = [m.get_uri().url_string for m in generated_mutants] self.assertEqual(generated_urls, expected_urls) expected_cookies = ['foo=bar;', 'foo=bar;', 'foo=abc;', 'foo=def;'] generated_cookies = [str(m.get_cookie()) for m in generated_mutants] self.assertEqual(expected_cookies, generated_cookies) self.assertTrue( all( isinstance(m, QSMutant) or isinstance(m, CookieMutant) for m in generated_mutants))
def from_http_response(cls, http_response): """ :return: An instance of FuzzableRequest using the URL and cookie from the http_response. The method used is "GET", and no post_data is set. """ cookie = Cookie.from_http_response(http_response) return cls(http_response.get_uri(), method='GET', cookie=cookie)
def test_create_cookie(self): url = URL("http://www.w3af.com/") headers = Headers([("content-type", "text/html"), ("Cookie", "abc=def")]) response = HTTPResponse(200, "", headers, url, url) cookie = Cookie.from_http_response(response) self.assertEqual(cookie, Cookie("abc=def"))
def test_basic(self): cookie_obj = Cookie('test=123; foobar=abc def; path=/') self.assertIn('test', cookie_obj) self.assertIn('foobar', cookie_obj) self.assertIn('path', cookie_obj) self.assertEqual(cookie_obj['test'], ['123']) self.assertEqual(cookie_obj['foobar'], ['abc def'])
def test_config_true(self): fuzzer_config = {'fuzz_cookies': True} cookie = Cookie('foo=bar; spam=eggs') freq = HTTPQSRequest(self.url, cookie=cookie) generated_mutants = CookieMutant.create_mutants( freq, self.payloads, [], False, fuzzer_config) self.assertNotEqual(len(generated_mutants), 0, generated_mutants)
def __init__(self, uri, method='GET', headers=None, cookie=None, dc=None): super(FuzzableRequest, self).__init__() # Internal variables self._dc = dc or DataContainer() self._method = method self._headers = Headers(headers or ()) self._cookie = cookie or Cookie() self._data = None self.set_uri(uri) # Set the internal variables self._sent_info_comp = None
def test_should_inject_cookie_value(self): b64data = base64.b64encode( cPickle.dumps({ 'data': 'here', 'cookie': 'A' * 16 })) url = URL('http://moth/') cookie = Cookie('foo=%s' % b64data) freq = FuzzableRequest(url, cookie=cookie) mutant = CookieMutant.create_mutants(freq, self.payloads, [], False, self.fuzzer_config)[0] self.assertTrue(self.plugin._should_inject(mutant, 'python'))
def add_req(): url = request.json["url"] method = request.json["method"] post_data = request.json["post_data"] headers = request.json["headers"] cookie_string = request.json['cookie'] headers = Headers(headers.items()) freq = FuzzableRequest(URL(url), method, headers, Cookie(cookie_string), dc_from_hdrs_post(headers, post_data)) urllist.req_queue.put_nowait(freq) print("req size %d" % urllist.req_queue.qsize()) return jsonify({"status": True})
def test_basics(self): cookie = Cookie('foo=bar; spam=eggs') freq = FuzzableRequest(self.url, cookie=cookie) m = CookieMutant(freq) m.get_dc().set_token(('foo', 0)) m.set_token_value('abc') self.assertEqual(m.get_url().url_string, 'http://moth/') self.assertEqual(str(m.get_cookie()), 'foo=abc; spam=eggs') expected_found_at = '"http://moth/", using HTTP method GET. The modified'\ ' parameter was the session cookie with value: '\ '"foo=abc; spam=eggs".' generated_found_at = m.found_at() self.assertEqual(generated_found_at, expected_found_at)
def test_is_token_checked_true(self): generator = URL('http://moth/w3af/audit/csrf/secure-replay-allowed/') http_response = self.uri_opener.GET(generator) # Please note that this freq holds a fresh/valid CSRF token cookie = Cookie.from_http_response(http_response) freq = FuzzableRequest(generator, cookie=cookie) # FIXME: # And I use this token here to get the original response, and if the # application is properly developed, that token will be invalidated # and that's where this algorithm fails. original_response = self.uri_opener.send_mutant(freq) token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'} checked = self.csrf_plugin._is_token_checked(freq, token, original_response) self.assertTrue(checked)
def test_valid_results(self): cookie = Cookie('foo=bar; spam=eggs') freq = HTTPQSRequest(self.url, cookie=cookie) generated_mutants = CookieMutant.create_mutants( freq, self.payloads, [], False, self.fuzzer_config) self.assertEqual(len(generated_mutants), 4, generated_mutants) expected_cookies = [ 'foo=abc; spam=eggs;', 'foo=def; spam=eggs;', 'foo=bar; spam=abc;', 'foo=bar; spam=def;' ] generated_cookies = [str(m.get_cookie()) for m in generated_mutants] self.assertEqual(expected_cookies, generated_cookies) generated_cookies = [str(m.get_dc()) for m in generated_mutants] self.assertEqual(expected_cookies, generated_cookies)
def test_is_token_checked_false(self): """ This covers the case where there is a token but for some reason it is NOT verified by the web application. """ generator = URL('http://moth/w3af/audit/csrf/vulnerable-token-ignored/') http_response = self.uri_opener.GET(generator) # Please note that this freq holds a fresh/valid CSRF token cookie = Cookie.from_http_response(http_response) freq = FuzzableRequest(generator, cookie=cookie) # FIXME: # And I use this token here to get the original response, and if the # application is properly developed, that token will be invalidated # and that's where this algorithm fails. original_response = self.uri_opener.send_mutant(freq) token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'} checked = self.csrf_plugin._is_token_checked(freq, token, original_response) self.assertFalse(checked)
def test_basics(self): cookie = Cookie('foo=bar; spam=eggs') freq = HTTPQSRequest(self.url, cookie=cookie) m = CookieMutant(freq) m.set_var('foo', 0) m.set_mod_value('abc') self.assertEqual(m.get_url().url_string, 'http://moth/') self.assertEqual(str(m.get_cookie()), 'foo=abc; spam=eggs;') expected_mod_value = 'The cookie data that was sent is: "foo=abc;'\ ' spam=eggs;".' generated_mod_value = m.print_mod_value() self.assertEqual(generated_mod_value, expected_mod_value) expected_found_at = '"http://moth/", using HTTP method GET. The modified'\ ' parameter was the session cookie with value: '\ '"foo=abc; spam=eggs;".' generated_found_at = m.found_at() self.assertEqual(generated_found_at, expected_found_at)
def _create_cookie(http_response): """ Create a cookie object based on a HTTP response. >>> from w3af.core.data.parsers.url import URL >>> from w3af.core.data.url.HTTPResponse import HTTPResponse >>> url = URL('http://www.w3af.com/') >>> headers = Headers({'content-type': 'text/html', 'Cookie': 'abc=def' }.items()) >>> response = HTTPResponse(200, '' , headers, url, url) >>> cookie = _create_cookie(response) >>> cookie Cookie({u'abc': [u'def']}) """ cookies = [] # Get data from RESPONSE response_headers = http_response.get_headers() for hname, hvalue in response_headers.iteritems(): if 'cookie' in hname.lower(): cookies.append(hvalue) cookie_inst = Cookie(''.join(cookies)) # # delete everything that the browsers usually keep to themselves, since # this cookie object is the one we're going to send to the wire # for key in ['path', 'expires', 'domain', 'max-age']: try: del cookie_inst[key] except: pass return cookie_inst
def _verify_reference(self, reference, original_request, original_response, possibly_broken, be_recursive=True): """ The parameters are: * Newly found URL * The FuzzableRequest instance which generated the response where the new URL was found * The HTTPResponse generated by the FuzzableRequest * Boolean indicating if we trust this reference or not This method GET's every new link and parses it in order to get new links and forms. """ # # Remember that this "breaks" the cache=True in most cases! # headers = { 'Referer': original_url } # # But this does not, and it is friendlier than simply ignoring the # referer # # if self._fuzzy_browser.check_page( str(reference) ): # self._fuzzy_browser.add_page( str(reference) ) # else: # return referer = original_response.get_url().base_url().url_string headers = Headers([('Referer', referer)]) # Note: We're not grep'ing this HTTP request/response now because it # has high probability of being a 404, and the grep plugins # already got enough 404 responses to analyze (from is_404 for # example). If it's not a 404 then we'll push it to the core # and it will come back to this plugin's crawl() where it will # be requested with grep=True self._requests_count += 1 if self._requests_count > self._max_requests_count: return resp = self._uri_opener.GET(reference, cache=True, headers=headers, grep=False) if is_404(resp): # Note: I WANT to follow links that are in the 404 page, but # DO NOT return the 404 itself to the core. # # This will parse the 404 response and add the 404-links in the # output queue, so that the core can get them # if be_recursive: # # Only follow one level of links in 404 pages, this limits the # potential issue when this is found: # # http://foo.com/abc/ => 404 # Body: <a href="def/">link</a> # # Which would lead to this function to perform requests to: # * http://foo.com/abc/ # * http://foo.com/abc/def/ # * http://foo.com/abc/def/def/ # * http://foo.com/abc/def/def/def/ # * ... # # Do not use threads here, it will dead-lock (for unknown # reasons). This is tested in TestDeadLock unittest. for args in self._urls_to_verify_generator( resp, original_request): self._verify_reference(*args, be_recursive=False) # Store the broken links if not possibly_broken and resp.get_code( ) not in self.UNAUTH_FORBID: t = (resp.get_url(), original_request.get_uri()) self._broken_links.add(t) else: msg = '[web_spider] Sending link to w3af core: "%s"' om.out.debug(msg % reference) fuzz_req = FuzzableRequest(reference, headers=headers) # These next steps are simple, but actually allows me to set the # referer and cookie for the FuzzableRequest instances I'm sending # to the core, which will then allow the fuzzer to create # CookieMutant and HeadersMutant instances. # # Without setting the Cookie, the CookieMutant would never have any # data to modify; remember that cookies are actually set by the # urllib2 cookie handler when the request already exited the # framework. cookie = Cookie.from_http_response(original_response) fuzz_req.set_referer(referer) fuzz_req.set_cookie(cookie) self.output_queue.put(fuzz_req)
def _verify_reference(self, reference, original_request, original_response, possibly_broken, be_recursive=True): """ The parameters are: * Newly found URL * The FuzzableRequest instance which generated the response where the new URL was found * The HTTPResponse generated by the FuzzableRequest * Boolean indicating if we trust this reference or not This method GET's every new link and parses it in order to get new links and forms. """ # # Remember that this "breaks" the cache=True in most cases! # headers = { 'Referer': original_url } # # But this does not, and it is friendlier than simply ignoring the # referer # referer = original_response.get_url().base_url().url_string headers = Headers([('Referer', referer)]) # Note: We're not grep'ing this HTTP request/response now because it # has high probability of being a 404, and the grep plugins # already got enough 404 responses to analyze (from is_404 for # example). If it's not a 404 then we'll push it to the core # and it will come back to this plugin's crawl() where it will # be requested with grep=True resp = self._uri_opener.GET(reference, cache=True, headers=headers, grep=False) if is_404(resp): # Note: I WANT to follow links that are in the 404 page, but # DO NOT return the 404 itself to the core. # # This will parse the 404 response and add the 404-links in the # output queue, so that the core can get them # if be_recursive: # # Only follow one level of links in 404 pages, this limits the # potential issue when this is found: # # http://foo.com/abc/ => 404 # Body: <a href="def/">link</a> # # Which would lead to this function to perform requests to: # * http://foo.com/abc/ # * http://foo.com/abc/def/ # * http://foo.com/abc/def/def/ # * http://foo.com/abc/def/def/def/ # * ... # # Do not use threads here, it will dead-lock (for unknown # reasons). This is tested in TestDeadLock unittest. for args in self._urls_to_verify_generator(resp, original_request): self._verify_reference(*args, be_recursive=False) # Store the broken links if not possibly_broken and resp.get_code() not in self.UNAUTH_FORBID: t = (resp.get_url(), original_request.get_uri()) self._broken_links.add(t) else: msg = '[web_spider] Sending link to w3af core: "%s"' om.out.debug(msg % reference) fuzz_req = FuzzableRequest(reference, headers=headers) # These next steps are simple, but actually allows me to set the # referer and cookie for the FuzzableRequest instances I'm sending # to the core, which will then allow the fuzzer to create # CookieMutant and HeadersMutant instances. # # Without setting the Cookie, the CookieMutant would never have any # data to modify; remember that cookies are actually set by the # urllib2 cookie handler when the request already exited the # framework. cookie = Cookie.from_http_response(original_response) fuzz_req.set_referer(referer) fuzz_req.set_cookie(cookie) self.output_queue.put(fuzz_req)
def _verify_reference(self, reference, original_request, original_response, possibly_broken, be_recursive=True): """ The parameters are: * Newly found URL * The FuzzableRequest instance which generated the response where the new URL was found * The HTTPResponse generated by the FuzzableRequest * Boolean indicating if we trust this reference or not This method GET's every new link and parses it in order to get new links and forms. """ # # Remember that this "breaks" the cache=True in most cases! # headers = { 'Referer': original_url } # # But this does not, and it is friendlier than simply ignoring the # referer # referer = original_response.get_url().base_url().url_string headers = Headers([('Referer', referer)]) resp = self._uri_opener.GET(reference, cache=True, headers=headers) if is_404(resp): # Note: I WANT to follow links that are in the 404 page, but # DO NOT return the 404 itself to the core. # # This will parse the 404 response and add the 404-links in the # output queue, so that the core can get them # if be_recursive: # # Only follow one level of links in 404 pages, this limits the # potential issue when this is found: # # http://foo.com/abc/ => 404 # Body: <a href="def/">link</a> # # Which would lead to this function to perform requests to: # * http://foo.com/abc/ # * http://foo.com/abc/def/ # * http://foo.com/abc/def/def/ # * http://foo.com/abc/def/def/def/ # * ... # non_recursive_verify_ref = partial(self._verify_reference, be_recursive=False) self.worker_pool.map_multi_args( non_recursive_verify_ref, self._urls_to_verify_generator(resp, original_request)) # Store the broken links if not possibly_broken and resp.get_code() not in self.UNAUTH_FORBID: t = (resp.get_url(), original_request.get_uri()) self._broken_links.add(t) else: msg = 'Adding reference "%s" to the result.' om.out.debug(msg % reference) fuzz_req = FuzzableRequest(reference, headers=headers) # These next steps are simple, but actually allows me to set the # referer and cookie for the FuzzableRequest instances I'm sending # to the core, which will then allow the fuzzer to create # CookieMutant and HeadersMutant instances. # # Without setting the Cookie, the CookieMutant would never have any # data to modify; remember that cookies are actually set by the # urllib2 cookie handler when the request already exited the # framework. cookie = Cookie.from_http_response(original_response) fuzz_req.set_referer(referer) fuzz_req.set_cookie(cookie) self.output_queue.put(fuzz_req)
def _verify_reference(self, reference, original_request, original_response, possibly_broken, be_recursive=True): """ The parameters are: * Newly found URL * The FuzzableRequest instance which generated the response where the new URL was found * The HTTPResponse generated by the FuzzableRequest * Boolean indicating if we trust this reference or not This method GET's every new link and parses it in order to get new links and forms. """ # # Remember that this "breaks" the cache=True in most cases! # headers = { 'Referer': original_url } # # But this does not, and it is friendlier than simply ignoring the # referer # referer = original_response.get_url().base_url().url_string headers = Headers([('Referer', referer)]) resp = self._uri_opener.GET(reference, cache=True, headers=headers) if is_404(resp): # Note: I WANT to follow links that are in the 404 page, but # DO NOT return the 404 itself to the core. # # This will parse the 404 response and add the 404-links in the # output queue, so that the core can get them # if be_recursive: # # Only follow one level of links in 404 pages, this limits the # potential issue when this is found: # # http://foo.com/abc/ => 404 # Body: <a href="def/">link</a> # # Which would lead to this function to perform requests to: # * http://foo.com/abc/ # * http://foo.com/abc/def/ # * http://foo.com/abc/def/def/ # * http://foo.com/abc/def/def/def/ # * ... # non_recursive_verify_ref = partial(self._verify_reference, be_recursive=False) self.worker_pool.map_multi_args( non_recursive_verify_ref, self._urls_to_verify_generator(resp, original_request)) # Store the broken links if not possibly_broken and resp.get_code( ) not in self.UNAUTH_FORBID: t = (resp.get_url(), original_request.get_uri()) self._broken_links.add(t) else: msg = 'Adding reference "%s" to the result.' om.out.debug(msg % reference) fuzz_req = FuzzableRequest(reference, headers=headers) # These next steps are simple, but actually allows me to set the # referer and cookie for the FuzzableRequest instances I'm sending # to the core, which will then allow the fuzzer to create # CookieMutant and HeadersMutant instances. # # Without setting the Cookie, the CookieMutant would never have any # data to modify; remember that cookies are actually set by the # urllib2 cookie handler when the request already exited the # framework. cookie = Cookie.from_http_response(original_response) fuzz_req.set_referer(referer) fuzz_req.set_cookie(cookie) self.output_queue.put(fuzz_req)