def search(self, query, start, count=10): ''' Search the web with Bing. This method is based from the msn.py file from the massive enumeration toolset, coded by pdp and released under GPL v2. ''' url = 'http://www.bing.com/search?' query = urllib.urlencode({'q': query, 'first': start + 1, 'FORM': 'PERE'}) url_instance = URL(url + query) response = self._uri_opener.GET(url_instance, headers=self._headers, cache=True, grep=False) # This regex might become outdated, but the good thing is that we have # test_bing.py which is going to fail and tell us that it's outdated re_match = re.findall('<a href="((http|https)(.*?))" h="ID=SERP,', response.get_body()) results = set() for url, _, _ in re_match: try: url = URL(url) except: pass else: if url.get_domain() not in self.BLACKLISTED_DOMAINS: bing_result = BingResult(url) results.add(bing_result) return results
def test_encode_decode(self): '''Encode and Decode should be able to run one on the result of the other and return the original''' original = URL(u'https://w3af.com:443/file.asp?id=1%202') encoded = original.url_encode() decoded = URL(encoded).url_decode() self.assertEqual(original, decoded)
def test_url_join_case01(self): u = URL('http://w3af.com/foo.bar') self.assertEqual(u.url_join('abc.html').url_string, u'http://w3af.com/abc.html') self.assertEqual(u.url_join('/abc.html').url_string, u'http://w3af.com/abc.html')
def test_simplest_url(self): u = URL('http://w3af.com/foo/bar.txt') self.assertEqual(u.path, '/foo/bar.txt') self.assertEqual(u.scheme, 'http') self.assertEqual(u.get_file_name(), 'bar.txt') self.assertEqual(u.get_extension(), 'txt')
def from_httplib_resp(cls, httplibresp, original_url=None): ''' Factory function. Build a HTTPResponse object from a httplib.HTTPResponse instance :param httplibresp: httplib.HTTPResponse instance :param original_url: Optional 'url_object' instance. :return: A HTTPResponse instance ''' resp = httplibresp code, msg, hdrs, body = (resp.code, resp.msg, resp.info(), resp.read()) hdrs = Headers(hdrs.items()) if original_url: url_inst = URL(resp.geturl(), original_url.encoding) url_inst = url_inst.url_decode() else: url_inst = original_url = URL(resp.geturl()) if isinstance(resp, urllib2.HTTPError): # This is possible because in errors.py I do: # err = urllib2.HTTPError(req.get_full_url(), code, msg, hdrs, resp) charset = getattr(resp.fp, 'encoding', None) else: # The encoding attribute is only set on CachedResponse instances charset = getattr(resp, 'encoding', None) return cls(code, body, hdrs, url_inst, original_url, msg, charset=charset)
def do_ALL(self): global global_first_request if global_first_request: global_first_request = False om.out.information("The user is navigating through the spider_man proxy.") # Convert to url_object path = URL(self.path) if path == TERMINATE_URL: om.out.information("The user terminated the spider_man session.") self._send_end() self._spider_man.stop_proxy() return om.out.debug("[spider_man] Handling request: %s %s" % (self.command, path)) # Send this information to the plugin so it can send it to the core freq = self._create_fuzzable_request() self._spider_man.append_fuzzable_request(freq) grep = True if path.get_domain() != self.server.w3afLayer.target_domain: grep = False try: response = self._send_to_server(grep=grep) except Exception, e: self._send_error(e)
def test_url_join_case03(self): u = URL('http://w3af.com/def/jkl/') self.assertEqual(u.url_join('/def/abc.html').url_string, u'http://w3af.com/def/abc.html') self.assertEqual(u.url_join('def/abc.html').url_string, u'http://w3af.com/def/jkl/def/abc.html')
def test_default_proto(self): ''' http is the default protocol, we can provide URLs with no proto ''' u = URL('w3af.com') self.assertEqual(u.get_domain(), 'w3af.com') self.assertEqual(u.get_protocol(), 'http')
def http_request(self, req): url_instance = URL(req.get_full_url()) url_instance.set_param(self._url_parameter) new_request = HTTPRequest(url_instance, headers=req.headers, origin_req_host=req.get_origin_req_host(), unverifiable=req.is_unverifiable()) return new_request
def test_remove_fragment(self): u = URL('http://w3af.com/foo/bar.txt?id=3#foobar') self.assertEqual(u.remove_fragment().url_string, u'http://w3af.com/foo/bar.txt?id=3') u = URL('http://w3af.com/foo/bar.txt#foobar') self.assertEqual(u.remove_fragment().url_string, u'http://w3af.com/foo/bar.txt')
def test_from_url(self): o = URL('http://w3af.com/foo/bar.txt') u = URL.from_URL(o) self.assertEqual(u.path, '/foo/bar.txt') self.assertEqual(u.scheme, 'http') self.assertEqual(u.get_file_name(), 'bar.txt') self.assertEqual(u.get_extension(), 'txt') o = URL('w3af.com') u = URL.from_URL(o) self.assertEqual(u.get_domain(), 'w3af.com') self.assertEqual(u.get_protocol(), 'http')
def test_set_params(self): u = URL('http://w3af.com/;id=1') u.set_param('file=2') self.assertEqual(u.get_params_string(), 'file=2') u = URL('http://w3af.com/xyz.txt;id=1?file=2') u.set_param('file=3') self.assertEqual(u.get_params_string(), 'file=3') self.assertEqual(u.get_path_qs(), '/xyz.txt;file=3?file=2')
def test_check_case09(self): is_vuln = IsVulnerableHelper(200, 301, re.compile('def'), re.compile('xyz'), re.compile('spam')) url = URL('http://moth/') http_response = HTTPResponse(301, 'hello world abc def', Headers(), url, url) self.assertTrue( is_vuln.check(http_response) )
def test_find_csrf_token_false(self): url = URL('http://moth/w3af/audit/csrf/') query_string = parse_qs('secret=not a token') freq = FuzzableRequest(url, method='GET', dc=query_string) token = self.csrf_plugin._find_csrf_token(freq) self.assertNotIn('secret', token)
def test_find_csrf_token_true_simple(self): url = URL('http://moth/w3af/audit/csrf/') query_string = parse_qs('secret=f842eb01b87a8ee18868d3bf80a558f3') freq = FuzzableRequest(url, method='GET', dc=query_string) token = self.csrf_plugin._find_csrf_token(freq) self.assertIn('secret', token)
def test_verify_vulnerability_POST(self): target = Target(URL(self.SQLI_POST), self.DATA_POST) self.sqlmap = SQLMapWrapper(target, self.uri_opener) vulnerable = self.sqlmap.is_vulnerable() self.assertTrue(vulnerable)
def setUp(self): uri = URL(self.SQLI_GET) target = Target(uri) self.uri_opener = ExtendedUrllib() self.sqlmap = SQLMapWrapper(target, self.uri_opener)
def test_enable_coloring(self): uri = URL(self.SQLI_GET) target = Target(uri) sqlmap = SQLMapWrapper(target, self.uri_opener, coloring=True) params = sqlmap.get_wrapper_params() self.assertNotIn('--disable-coloring', params)
def test_target_post_data(self): target = Target(URL(self.SQLI_GET), self.DATA_POST) params = target.to_params() self.assertEqual( params, ["--url=%s" % self.SQLI_GET, "--data=%s" % self.DATA_POST])
class xssed_dot_com(InfrastructurePlugin): ''' Search in xssed.com to find xssed pages. :author: Nicolas Crocfer ([email protected]) :author: Raul Siles: set "." in front of the root domain to limit search ''' def __init__(self): InfrastructurePlugin.__init__(self) # # Could change in time, # self._xssed_url = URL("http://www.xssed.com") self._fixed = "<img src='http://data.xssed.org/images/fixed.gif'> FIXED</th>" @runonce(exc_class=w3afRunOnce) def discover(self, fuzzable_request): ''' Search in xssed.com and parse the output. :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' target_domain = fuzzable_request.get_url().get_root_domain() try: check_url = self._xssed_url.url_join( "/search?key=." + target_domain) response = self._uri_opener.GET(check_url) except w3afException, e: msg = 'An exception was raised while running xssed_dot_com'\ ' plugin. Exception: "%s".' % e om.out.debug(msg) else:
def from_dict(cls, unserialized_dict): ''' * msgpack is MUCH faster than cPickle, * msgpack can't serialize python objects, * I have to create a dict representation of HTTPResponse to serialize it, * and a from_dict to have the object back :param unserialized_dict: A dict just as returned by to_dict() ''' udict = unserialized_dict code, msg, hdrs = udict['code'], udict['msg'], udict['headers'] body, _time, _id = udict['body'], udict['time'], udict['id'] headers_inst = Headers(hdrs.items()) url = URL(udict['uri']) return cls(code, body, headers_inst, url, url, msg=msg, _id=_id, time=_time)
def test_handle_exception(self): url = URL('http://moth/') fr = FuzzableRequest(url) try: raise Exception() except Exception, e: self.bc.handle_exception('audit', 'sqli', fr, e)
def test_clamav_workers(self, *args): WAIT_TIME = 3 DELTA = WAIT_TIME * 0.1 # Prepare the mocked plugin def wait(x, y): time.sleep(WAIT_TIME) self.plugin._is_properly_configured = Mock(return_value=True) self.plugin._scan_http_response = wait self.plugin._report_result = lambda x: 42 start_time = time.time() for i in xrange(3): body = '' url = URL('http://www.w3af.com/%s' % i) headers = Headers([('content-type', 'text/html')]) response = HTTPResponse(200, body, headers, url, url, _id=1) request = FuzzableRequest(url, method='GET') self.plugin.grep(request, response) # Let the worker pool wait for the clamd response, this is done by # the core when run in a real scan self.plugin.worker_pool.close() self.plugin.worker_pool.join() end_time = time.time() time_spent = end_time - start_time findings = kb.kb.get('clamav', 'malware') self.assertEqual(len(findings), 0, findings) self.assertLessEqual(time_spent, WAIT_TIME + DELTA)
def test_mutant_creation(self): form = Form() form.add_input([("name", "username"), ("value", "")]) form.add_input([("name", "address"), ("value", "")]) freq = HTTPPostDataRequest(URL('http://www.w3af.com/?id=3'), dc=form, method='PUT') created_mutants = PostDataMutant.create_mutants( freq, self.payloads, [], False, self.fuzzer_config) expected_dc_lst = [ Form([('username', ['abc']), ('address', ['Bonsai Street 123'])]), Form([('username', ['def']), ('address', ['Bonsai Street 123'])]), Form([('username', ['John8212']), ('address', ['abc'])]), Form([('username', ['John8212']), ('address', ['def'])]) ] created_dc_lst = [i.get_dc() for i in created_mutants] self.assertEqual(created_dc_lst, expected_dc_lst) self.assertEqual(created_mutants[0].get_var(), 'username') self.assertEqual(created_mutants[0].get_var_index(), 0) self.assertEqual(created_mutants[0].get_original_value(), '') self.assertEqual(created_mutants[2].get_var(), 'address') self.assertEqual(created_mutants[2].get_var_index(), 0) self.assertEqual(created_mutants[2].get_original_value(), '') self.assertTrue( all(isinstance(m, PostDataMutant) for m in created_mutants)) self.assertTrue( all(m.get_method().startswith('PUT') for m in created_mutants))
def test_add_when_qs(self): rp = rnd_param() u = URL('http://www.w3af.com/?id=1') r = HTTPRequest(u) qs = rp.modify_request(r).url_object.querystring self.assertEqual(len(qs), 2)
def test_bug_13_Dec_2012(self): url1 = URL('http://w3af.com/foo/') url2 = URL('http://w3af.com/bar/') body = '<a href="?id=1">1</a>' resp1 = HTTPResponse(200, body, self.headers, url1, url1) resp2 = HTTPResponse(200, body, self.headers, url2, url2) parser1 = self.dpc.get_document_parser_for(resp1) parser2 = self.dpc.get_document_parser_for(resp2) self.assertNotEqual(id(parser1), id(parser2)) _, parsed_refs_1 = parser1.get_references() _, parsed_refs_2 = parser2.get_references() self.assertEqual(parsed_refs_1, parsed_refs_2)
def __init__(self): super(FileUploadTemplate, self).__init__() self.name = self.get_vulnerability_name() self.file_vars = [] self.file_dest = URL('http://host.tld/uploads/file.ext') self.method = 'POST'
def test_is_valid_domain_valid(self): self.assertTrue(URL("http://1.2.3.4").is_valid_domain()) self.assertTrue(URL("http://aaa.com").is_valid_domain()) self.assertTrue(URL("http://aa-bb").is_valid_domain()) self.assertTrue(URL("http://w3af.com").is_valid_domain()) self.assertTrue(URL("http://w3af.com:39").is_valid_domain()) self.assertTrue(URL("http://w3af.com:3932").is_valid_domain()) self.assertTrue(URL("http://f.o.o.b.a.r.s.p.a.m.e.g.g.s").is_valid_domain()) self.assertTrue(URL("http://abc:3932").is_valid_domain())
def met_search(self, query): """ Query a Public Key Server. This method is based from the pks.py file from the massive enumeration toolset, coded by pdp and released under GPL v2. """ url = URL(u'http://pgp.mit.edu:11371/pks/lookup') url.querystring = {u'op': u'index', u'search': query} response = self._uri_opener.GET(url, headers=self._headers, cache=True, grep=False) content = response.get_body() content = re.sub('(<.*?>|<|>)', '', content) results = [] accounts = [] for line in content.split('\n')[2:]: if not line.strip(): continue tokens = line.split() if len(tokens) >= 5: email = tokens[-1] name = ' '.join(tokens[3:-1]) # Copy+paste from baseparser.py email_regex = '([A-Z0-9\._%-]{1,45}@([A-Z0-9\.-]{1,45}\.){1,10}[A-Z]{2,4})' if re.match(email_regex, email, re.IGNORECASE): account = email.split('@')[0] domain = email.split('@')[1] if domain == query: if account not in accounts: accounts.append(account) pksr = PKSResult(name, account, domain, response.id) results.append(pksr) return results
def __init__(self): InfrastructurePlugin.__init__(self) # # Could change in time, # self._xssed_url = URL("http://www.xssed.com") self._fixed = "<img src='http://data.xssed.org/images/fixed.gif'> FIXED</th>"
def test_from_parts(self): u = URL.from_parts('http', 'w3af.com', '/foo/bar.txt', None, 'a=b', 'frag') self.assertEqual(u.path, '/foo/bar.txt') self.assertEqual(u.scheme, 'http') self.assertEqual(u.get_file_name(), 'bar.txt') self.assertEqual(u.get_extension(), 'txt')
def test_ajax_two(self): body = '<script> ... xhr = new XMLHttpRequest(); ... xhr = new ActiveXObject("Microsoft.XMLHTTP"); ... </script>' url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html')]) response = HTTPResponse(200, body, headers, url, url, _id=1) request = FuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEquals(len(kb.kb.get('ajax', 'ajax')), 1)
def test_build_cors_request_true(self): url = URL('http://moth/') fr = build_cors_request(url, 'http://foo.com/') self.assertEquals(fr.get_url(), url) self.assertEquals(fr.get_method(), 'GET') self.assertEquals(fr.get_headers(), {'Origin': 'http://foo.com/'})
def test_no_modification(self): fwe = full_width_encode() u = URL('http://www.w3af.com/') r = HTTPRequest(u) self.assertEqual( fwe.modify_request(r).url_object.url_string, u'http://www.w3af.com/')
def test_config_false(self): fuzzer_config = {'fuzz_form_files': False} freq = HTTPPostDataRequest(URL('http://www.w3af.com/foo/bar')) generated_mutants = FileContentMutant.create_mutants( freq, self.payloads, [], False, fuzzer_config) self.assertEqual(len(generated_mutants), 0, generated_mutants)
def test_config_true(self): fuzzer_config = {'fuzz_url_filenames': True} freq = HTTPQSRequest(URL('http://www.w3af.com/foo/bar')) generated_mutants = FileNameMutant.create_mutants( freq, self.payloads, [], False, fuzzer_config) self.assertNotEqual(len(generated_mutants), 0, generated_mutants)
def test_basic(self): url = URL('http://moth/') http_response = self.uri_opener.GET(url, cache=False) self.assertIn(self.MOTH_MESSAGE, http_response.body) self.assertGreaterEqual(http_response.id, 1) self.assertNotEqual(http_response.id, None)
def test_private_ip_find_10(self): body = 'header 10.2.34.2 footer' url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html')]) response = HTTPResponse(200, body, headers, url, url, _id=1) request = FuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEquals(len(kb.kb.get('private_ip', 'HTML')), 1)
def test_private_ip_broken_html(self): body = '<html><head>192.168.1.1</html>' url = URL('http://www.w3af.com/') headers = Headers([('content-type', 'text/html')]) response = HTTPResponse(200, body, headers, url, url, _id=1) request = FuzzableRequest(url, method='GET') self.plugin.grep(request, response) self.assertEquals(len(kb.kb.get('private_ip', 'HTML')), 1)
def test_phishtank_no_match(self): phishtank_inst = self.w3afcore.plugins.get_plugin_inst('crawl', 'phishtank') phishtank_inst.crawl(FuzzableRequest(URL(self.safe_url))) vulns = self.kb.get('phishtank', 'phishtank') self.assertEqual(len(vulns), 0, vulns)
def test_add_path_to_base_url(self): rp = rnd_path() u = URL('http://www.w3af.com/') r = HTTPRequest( u ) url_string = rp.modify_request( r ).url_object.url_string self.assertRegexpMatches(url_string, 'http://www.w3af.com/\w*/../')
def test_add_with_filename(self): rp = rnd_path() u = URL('http://www.w3af.com/abc/def.htm') r = HTTPRequest( u ) url_string = rp.modify_request( r ).url_object.url_string self.assertRegexpMatches(url_string, 'http://www.w3af.com/\w*/../abc/def.htm')
def test_build_cors_request_false(self): url = URL('http://moth/') fr = build_cors_request(url, None) self.assertEquals(fr.get_url(), url) self.assertEquals(fr.get_method(), 'GET') self.assertEquals(fr.get_headers(), {})
def test_no_modification(self): rhe = rnd_hex_encode() u = URL('http://www.w3af.com/') r = HTTPRequest(u) self.assertEqual( rhe.modify_request(r).url_object.url_string, u'http://www.w3af.com/')
def test_get_path_qs(self): u = URL(u'https://w3af.com:443/xyz/123/456/789/') self.assertEqual(u.get_path(), u'/xyz/123/456/789/') u = URL(u'https://w3af.com:443/xyz/123/456/789/') self.assertEqual(u.get_path_qs(), u'/xyz/123/456/789/') u = URL(u'https://w3af.com:443/xyz/file.asp') self.assertEqual(u.get_path_qs(), u'/xyz/file.asp') u = URL(u'https://w3af.com:443/xyz/file.asp?id=1') self.assertEqual(u.get_path_qs(), u'/xyz/file.asp?id=1')
def met_search(self, query): """ Query a Public Key Server. This method is based from the pks.py file from the massive enumeration toolset, coded by pdp and released under GPL v2. """ url = URL(u"http://pgp.mit.edu:11371/pks/lookup") url.querystring = {u"op": u"index", u"search": query} response = self._uri_opener.GET(url, headers=self._headers, cache=True, grep=False) content = response.get_body() content = re.sub("(<.*?>|<|>)", "", content) results = [] accounts = [] for line in content.split("\n")[2:]: if not line.strip(): continue tokens = line.split() if len(tokens) >= 5: email = tokens[-1] name = " ".join(tokens[3:-1]) # Copy+paste from baseparser.py email_regex = "([A-Z0-9\._%-]{1,45}@([A-Z0-9\.-]{1,45}\.){1,10}[A-Z]{2,4})" if re.match(email_regex, email, re.IGNORECASE): account = email.split("@")[0] domain = email.split("@")[1] if domain == query: if account not in accounts: accounts.append(account) pksr = PKSResult(name, account, domain, response.id) results.append(pksr) return results
def from_httplib_resp(cls, httplibresp, original_url=None): ''' Factory function. Build a HTTPResponse object from a httplib.HTTPResponse instance :param httplibresp: httplib.HTTPResponse instance :param original_url: Optional 'url_object' instance. :return: A HTTPResponse instance ''' resp = httplibresp code, msg, hdrs, body = (resp.code, resp.msg, resp.info(), resp.read()) hdrs = Headers(hdrs.items()) if original_url: url_inst = URL(resp.geturl(), original_url.encoding) url_inst = url_inst.url_decode() else: url_inst = original_url = URL(resp.geturl()) charset = getattr(resp, 'encoding', None) return cls(code, body, hdrs, url_inst, original_url, msg, charset=charset)
def test_get_path_without_filename(self): u = URL('https://w3af.com:443/xyz/file.asp') self.assertEqual(u.get_path_without_file(), '/xyz/') u = URL('https://w3af.com:443/xyz/') self.assertEqual(u.get_path_without_file(), '/xyz/') u = URL('https://w3af.com:443/xyz/123/456/789/') self.assertEqual(u.get_path_without_file(), '/xyz/123/456/789/')
def test_has_query_string(self): u = URL('http://w3af.com/foo/bar.txt') self.assertFalse(u.has_query_string()) u = URL('http://w3af.com/foo/bar.txt?id=1') self.assertTrue(u.has_query_string()) u = URL('http://w3af.com/foo/bar.txt;par=3') self.assertFalse(u.has_query_string())
def _create_file(self): ''' Create random name file php with random php content. To be used in the remote file inclusion test. :return: The file content to be served via the webserver. Please note that the generated code works both in PHP and JSP without any issues, since PHP will run everything between "<?" and "?>" and JSP will run code between "<%" and "%>". TODO: make this code compatible with: asp/aspx, jsp, js (nodejs), pl, py, rb, etc. Some code snippets that might help to achieve this task: asp_code = 'response.write("%s");\n response.write("%s");' % ( rand1, rand2) asp_code = '<% \n '+asp_code+'\n %>' ''' with self._plugin_lock: # First, generate the php file to be included. rfi_result_part_1 = rand1 = rand_alnum(9) rfi_result_part_2 = rand2 = rand_alnum(9) rfi_result = rand1 + rand2 filename = rand_alnum(8) php_jsp_code = '<? echo "%s"; echo "%s"; ?>' php_jsp_code += '<%% out.print("%s"); out.print("%s"); %%>' php_jsp_code = php_jsp_code % (rand1, rand2, rand1, rand2) # Define the required parameters netloc = self._listen_address + ':' + str(self._listen_port) path = '/' + filename rfi_url = URL.from_parts('http', netloc, path, None, None, None) rfi_data = RFIData( rfi_url, rfi_result_part_1, rfi_result_part_2, rfi_result) return php_jsp_code, rfi_data
def endElement(self, name): if name == 'phish_detail_url': self.inside_detail = False if name == 'url': self.inside_URL = False if name == 'entry': self.inside_entry = False # # Now I try to match the entry with an element in the # to_check_list # for target_host in self._to_check: if target_host in self.url: phish_url = URL(self.url) target_host_url = URL(target_host) if target_host_url.get_domain() == phish_url.get_domain() or \ phish_url.get_domain().endswith('.' + target_host_url.get_domain()): phish_detail_url = URL(self.phish_detail_url) ptm = PhishTankMatch(phish_url, phish_detail_url) self.matches.append(ptm)
def test_uri2url(self): u = URL('http://w3af.com/foo/bar.txt?id=3') self.assertEqual(u.uri2url().url_string, u'http://w3af.com/foo/bar.txt')
def test_set_domain(self): u = URL('http://w3af.com/def/jkl/') self.assertEqual(u.get_domain(), 'w3af.com') u.set_domain('host.tld') self.assertEqual(u.get_domain(), 'host.tld') u.set_domain('foobar') self.assertEqual(u.get_domain(), 'foobar') u.set_domain('foobar.') self.assertEqual(u.get_domain(), 'foobar.')
def test_set_domain_with_port(self): u = URL('http://w3af.com:443/def/jkl/') self.assertEqual(u.get_domain(), 'w3af.com') u.set_domain('host.tld') self.assertEqual(u.get_net_location(), 'host.tld:443')
def test_set_protocol(self): u = URL("http://1.2.3.4") self.assertEqual(u.get_protocol(), 'http') u.set_protocol('https') self.assertEqual(u.get_protocol(), 'https')
def test_set_filename(self): u = URL('https://w3af.com:443/xyz/def.html') u.set_file_name( 'abc.pdf' ) self.assertEqual(u.url_string, 'https://w3af.com/xyz/abc.pdf') self.assertEqual(u.get_file_name(), 'abc.pdf') u = URL('https://w3af.com/xyz/def.html?id=1') u.set_file_name( 'abc.pdf' ) self.assertEqual(u.url_string, 'https://w3af.com/xyz/abc.pdf?id=1') u = URL('https://w3af.com/xyz/def.html?file=/etc/passwd') u.set_file_name( 'abc.pdf' ) self.assertEqual(u.url_string, 'https://w3af.com/xyz/abc.pdf?file=/etc/passwd') u = URL('https://w3af.com/') u.set_file_name( 'abc.pdf' ) self.assertEqual(u.url_string, 'https://w3af.com/abc.pdf')
def test_set_extension(self): u = URL('https://www.w3af.com/xyz/foo') self.assertRaises(Exception, u.set_extension, 'xml') u = URL('https://w3af.com/xyz/d.html') u.set_extension('xml') self.assertEqual(u.get_extension(), 'xml') u = URL('https://w3af.com/xyz/d.html?id=3') u.set_extension('xml') self.assertEqual(u.get_extension(), 'xml') u = URL('https://w3af.com/xyz/d.html.foo?id=3') u.set_extension('xml') self.assertEqual(u.get_extension(), 'xml') self.assertEqual(u.url_string, u'https://w3af.com/xyz/d.html.xml?id=3')
def normalize_url_case12(self): # IPv6 support u = URL('http://fe80:0:0:0:202:b3ff:fe1e:8329/') u.normalize_url() self.assertEqual(u.url_string, u'http://fe80:0:0:0:202:b3ff:fe1e:8329/')
def normalize_url_case11(self): u = URL('http://w3af.com/../../f00.b4r') u.normalize_url() self.assertEqual(u.url_string, u'http://w3af.com/f00.b4r')