def test(self): # Wfuzz results with wfuzz.FuzzSession(url=url, **params) as s: if payloads is None: fuzzed = s.fuzz() else: fuzzed = s.get_payloads(payloads).fuzz() ret_list = [(x.code, x.history.urlparse.path) for x in fuzzed] # repeat test with extra params if specified and check against if extra_params: # if using proxy change localhost for docker compose service proxied_url = url proxied_payloads = payloads if "proxies" in extra_params: for original_host, proxied_host in REPLACE_HOSTNAMES: proxied_url = proxied_url.replace(original_host, proxied_host) if proxied_payloads: proxied_payloads = [[payload.replace(original_host, proxied_host) for payload in payloads_list] for payloads_list in proxied_payloads] with wfuzz.FuzzSession(url=proxied_url) as s: same_list = [(x.code, x.history.urlparse.path) for x in s.get_payloads(proxied_payloads).fuzz(**extra_params)] self.assertEqual(sorted(ret_list), sorted(same_list)) else: self.assertEqual(sorted(ret_list), sorted(expected_list))
def test(self): if not expected_list: return temp_name = next(tempfile._get_candidate_names()) defult_tmp_dir = tempfile._get_default_tempdir() filename = os.path.join(defult_tmp_dir, temp_name) # Wfuzz results with wfuzz.FuzzSession(url=url, **dict(list(params.items()) + list(dict(save=filename).items()))) as s: if payloads is None: fuzzed = s.fuzz() else: fuzzed = s.get_payloads(payloads).fuzz() ret_list = [(x.code, x.history.urlparse.path) for x in fuzzed] # repeat test with performaing same saved request with wfuzz.FuzzSession(payloads=[("wfuzzp", dict(fn=filename))], url="FUZZ") as s: same_list = [(x.code, x.history.urlparse.path) for x in s.fuzz()] self.assertEqual(sorted(ret_list), sorted(same_list)) # repeat test with performaing FUZZ[url] saved request with wfuzz.FuzzSession(payloads=[("wfuzzp", dict(fn=filename))], url="FUZZ[url]") as s: same_list = [(x.code, x.history.urlparse.path) for x in s.fuzz()] self.assertEqual(sorted(ret_list), sorted(same_list))
def test(self): temp_name = next(tempfile._get_candidate_names()) defult_tmp_dir = tempfile._get_default_tempdir() filename = os.path.join(defult_tmp_dir, temp_name) # Wfuzz results with wfuzz.FuzzSession(url=url, **params) as s: s.export_to_file(filename) if payloads is None: fuzzed = s.fuzz() else: fuzzed = s.get_payloads(payloads).fuzz() ret_list = [(x.code, x.history.urlparse.path) for x in fuzzed] # repeat test with recipe as only parameter with wfuzz.FuzzSession(recipe=filename) as s: if payloads is None: same_list = [(x.code, x.history.urlparse.path) for x in s.fuzz()] else: same_list = [(x.code, x.history.urlparse.path) for x in s.get_payloads(payloads).fuzz()] self.assertEqual(sorted(ret_list), sorted(same_list))
def fuzzing(): fuz = wfuzz.FuzzSession(url=baseURL + "/uploads/FUZZ.php") temp = "" for req in fuz.fuzz(hc=[404], payloads=[("file", dict(fn=fuzzFile))]): temp = req filePath = str(re.findall(r'"([^"]*)"', str(temp))[0]) + ".php?cmd" exploit(baseURL + "/uploads/" + filePath)
def run_scan(pa): output = set() URL = 'https://www.googleapis.com/$discovery' headers = [('X-Originating-IP', '127.0.0.1'), ('X-Forwarded-For', '127.0.0.1'), ('X-Remote-IP', '127.0.0.1'), ('X-Remote-Addr', '127.0.0.1'), ('Accept', '*/*'), ('Content-Type', 'application/json'), ('referer', 'www.googleapis.com'), ('Host', 'FUZZ-FUZ2ZFUZ3Z-googleapis.sandbox.google.com')] payloads = [("file", dict(fn=args.static)), ("file", dict(fn=args.input)), ("file", dict(fn=args.end))] with wfuzz.FuzzSession(scanmode=True, url=URL, hc=[404, 'XXX'], headers=headers, payloads=payloads) as sess: for res in sess.fuzz(): w = "".join(res.payload[1:]).lower() #args.output.write(w + "\n") if not w in pa and not w in output: output.add(w) args.output.write(w + "\n") return output
def brute_directories(self, url_fuzz, concur="1", file_name="directory_brute.txt", format='raw', follow="False"): if (follow == "True"): follow = True else: follow = False logger.info("Disable Brute Force") sess = wfuzz.FuzzSession(url=url_fuzz, printer=(file_name, format), concurrent=int(concur), follow=bool(follow)) target = "Target: {0}".format(url_fuzz) if (format == 'raw'): f = open(file_name, 'w+') f.write(target + '\n' + header + '\n') for req in sess.fuzz(hc=[404], payloads=[("file", dict(fn=self.dirfile))]): f.write(str(req) + '\n') logger.info(req) f.close() else: for req in sess.fuzz(hc=[404], payloads=[("file", dict(fn=self.dirfile))]): logger.info(req)
def test(self): # Wfuzz results with wfuzz.FuzzSession(url=url, **params) as s : if payloads == None: fuzzed = s.fuzz() else: fuzzed = s.get_payloads(payloads).fuzz() ret_list = map(lambda x: (x.code, x.history.urlparse.path), fuzzed) # repeat test with extra params if specified and check against if extra_params: with wfuzz.FuzzSession(url=url) as s : same_list = map(lambda x: (x.code, x.history.urlparse.path), s.get_payloads(payloads).fuzz(**extra_params)) self.assertEqual(sorted(ret_list), sorted(same_list)) else: self.assertEqual(sorted(ret_list), sorted(expected_list))
def brute_force(filename): #h = [("Host", host)] found = 0 with wfuzz.FuzzSession(scanmode=True,url="FUZZ",hc=[404,301,302,'XXX','-01'], method="POST", data="{}", payloads=[("file",dict(fn=str(filename)))], printer=(filename+".post.out", "csv")) as sess: for r in sess.fuzz(): found += 1 print r print("Took %d seconds, made %d requests.", int(sess.stats.totaltime), sess.stats.processed()) return found
def run_yolo(headers, payloads): URL = 'http://www.googleapis.com/$discovery/rest' logger.info(headers) logger.debug(payloads) with wfuzz.FuzzSession(scanmode=True, url=URL, sc=[200], headers=headers, payloads=payloads) as sess: logger.info("YOLO!") for res in sess.fuzz(): p = "-".join([x.content for x in res.payload]) #print(p) message_queue.put({"key": headers[1][1], "referer": headers[2][1], "api": p, "content": res.history.content})
def brute_directories(self, url_fuzz, concur="1", file_name="directory_brute.json", follow="False"): sess = wfuzz.FuzzSession(url=url_fuzz, printer=(file_name, "json"), concurrent=int(concur), follow=bool(follow)) for req in sess.fuzz(hc=[404], payloads=[("file", dict(fn=self.dirfile))]): logger.info(req)
def brute_force(input_file): results = [] h = [("Host", "FUZZ")] with wfuzz.FuzzSession(scanmode=True, url="https://storage.googleapis.com", hc=[404], payloads=[("file", dict(fn=input_file))], headers=h) as sess: for r in sess.fuzz(): if r.code == 403: print(colored(r.description, "red")) results.append(r.description) elif r.code == 200: print(colored(r.description, "blue")) results.append(r.description) elif r.code == 400: print(colored(r.description, "yellow")) results.append(r.description) if args.list_perms and r.code != 404: t = requests.get( "https://content.googleapis.com/storage/v1/b/" + r.description + "/iam/testPermissions?permissions=storage.objects.get&permissions=storage.buckets.delete" + "&permissions=storage.buckets.getIamPolicy&permissions=storage.buckets.setIamPolicy&permissions=storage.buckets.update" + "&permissions=storage.objects.delete&permissions=storage.objects.getIamPolicy&permissions=storage.objects.create" + "&permissions=storage.objects.list&permissions=storage.objects.setIamPolicy&permissions=storage.objects.update" ) if t.status_code == 200: perms = t.json().get("permissions") if perms: for perm in perms: print(perm) if r.code != 400 and r.code != -1: if args.list_files: try: report_files_buckets(r.description) except (exceptions.Forbidden, exceptions.NotFound, KeyboardInterrupt), e: pass
def run_wfuzz(wfuzz_opts): """Execute the wfuzz function (with standard output) and gather the results for later Args: wfuzz_opts (Dict): Input to the wfuzz application Returns: results (Dict): Dict of dictionary responses containing urls, code, content """ results = dict() ind = 0 with wfuzz.FuzzSession(**wfuzz_options) as wf: for r in wf.fuzz(): data = dict() print(r) data["code"] = r.code data["url"] = r.url data["content"] = Soup(r.content, features="html.parser").text results[ind] = data ind += 1 return results
def fuzz_first_step(page, specification, specification_codes, domain): """ Fuzz uri, look for undeclared pages, use authorize() to set session, use print_fuzz_data() to print data about fuzzing, use recursion to fuzz all pages :param page: dictionary that contain data about page :type: dict :param specification: string that can be '', 'hc', 'sc', used to specify wfuzz :type: str :param specification_codes: list of integers contained status codes, used in print_fuzz_data to specify specification of wfuzz :type: list :param domain: string contained domain name of server :type: str :return: none """ session = authorize(domain) url = domain if not page['is_changeable']: try: url += page['baseUri'] except KeyError: url += page['uri'] url += '/FUZZ' else: url += page['uri'].replace( page['uri'][page['uri'].index('{'):page['uri'].index('}') + 1], rstr.xeger(page['type'])) + '/FUZZ' fuzz_sess = wfuzz.FuzzSession(url=url, cookie=convert_cookies_format( session.cookies.get_dict()), method='GET', rleve='depth', payloads=[("file", dict(fn="big.txt"))]) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url) for i in page['pages']: fuzz_first_step(i, specification, specification_codes, domain)
def run_scan(apikey, referer, output): URL = 'https://www.googleapis.com/$discovery/rest?key=' + apikey headers = [('X-Originating-IP', '127.0.0.1'), ('X-Forwarded-For', '127.0.0.1'), ('X-Remote-IP', '127.0.0.1'), ('X-Remote-Addr', '127.0.0.1'), ('Accept', '*/*'), ('Content-Type', 'application/json')] headers.append(("referer", referer)) with wfuzz.FuzzSession(scanmode=True, url=URL, sc=[200]) as sess: #prod headers.append(("Host", "FUZZ.googleapis.com")) payloads = [("file", dict(fn=args.input))] for res in sess.fuzz(headers=headers, payloads=payloads): save_discovery(output + "/prod", res) #sandbox headers.append(("Host", "FUZ2Z-FUZZ-googleapis.sandbox.google.com")) payloads = [("file", dict(fn=args.input)), ("file", dict(fn=args.static))] for res in sess.fuzz(headers=headers, payloads=payloads): save_discovery(output + "/sandbox", res)
import wfuzz s = wfuzz.FuzzSession(url="http:/testphp.vulnweb.com/FUZZ") for r in s.fuzz(hc=[404], payloads=[("file", dict(fn="../wfuzz.code/wordlist/general/common.txt"))]): print(r)
def brute_force(input_file): results = [] h = [("Host", "FUZZ")] with wfuzz.FuzzSession(scanmode=True, method="HEAD", url="https://storage.googleapis.com", hc=[404], payloads=[("file", dict(fn=input_file))], headers=h) as sess: for r in sess.fuzz(): if r.code == 403: print(colored(r.description, "red")) results.append(r.description) elif r.code == 200: print(r.history.raw_content) print(colored(r.description, "blue")) results.append(r.description) elif r.code == 400: print(colored(r.description, "yellow")) #results.append(r.description) try: t = requests.get("https://www.googleapis.com/storage/v1/b/" + r.description) if t.status_code == 200: print(t.text) except requests.exceptions: pass if args.list_perms and r.code != 404: try: t = requests.get( "https://content.googleapis.com/storage/v1/b/" + r.description + "/iam/testPermissions?permissions=storage.objects.get&permissions=storage.buckets.delete" + "&permissions=storage.buckets.getIamPolicy&permissions=storage.buckets.setIamPolicy&permissions=storage.buckets.update" + "&permissions=storage.objects.delete&permissions=storage.objects.getIamPolicy&permissions=storage.objects.create" + "&permissions=storage.objects.list&permissions=storage.objects.setIamPolicy&permissions=storage.objects.update" ) if t.status_code == 200: perms = t.json().get("permissions") if perms: print("List of permissions: %s.", perms) except requests.exceptions: pass if r.code != 400 and r.code != -1: t = requests.get("https://" + r.description) #+ ".appspot.com") if not t.status_code == 404: print( "Get https://" + str(r.description) + ".appspot.com: %s", t.status_code) if args.list_files: try: report_files_buckets(r.description) except (TypeError, requests.exceptions, exceptions.Forbidden, exceptions.NotFound, exceptions.ServiceUnavailable, KeyboardInterrupt), e: pass
def fuzz_dirs(self): wf_sess = wfuzz.FuzzSession(url=f"{self.ip_add}FUZZ") # wf_sess.get_payload(self.wl) for req in wf_sess.fuzz(hc=[404], payloads=[("file", dict(fn=self.wl))]): yield req.url
def fuzz_second_step(page, specification, specification_codes, domain): """ Fuzz parameters, look for undeclared status codes, use authorize() to set session, use print_fuzz_data() to print data, about fuzzing use recursion to fuzz all pages :param page: dictionary that contain data about page :type: dict :param specification: string that can be '', 'hc', 'sc', used to specify wfuzz :type: str :param specification_codes: list of integers contained status codes, used in print_fuzz_data to specify specification of wfuzz :type: list :param domain: string contained domain name of server :type: str :return: none """ session = authorize(domain) for method in page['methods']: print(method['method']) if method['method'] == 'get' and not page['is_changeable']: params = method['queryParameters'] for item in params: uri = urllib.parse.quote(parse_params(params, item), safe='=&~._') url = domain + page['uri'] + '?' + uri + item['name'] + '=FUZZ' fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), method='GET').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method']) elif method['method'] == 'get' and page['is_changeable']: params = method['queryParameters'] for item in params: uri = urllib.parse.quote(parse_params(params, item), safe='=&~._') url = domain + page['uri'].replace( page['uri'][page['uri'].index('{'):page['uri'].index('}') + 1], rstr.xeger(types[page['type']]) ) + '?' + uri + item['name'] + '=FUZZ' fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), method='GET').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method']) uri = urllib.parse.quote(parse_params(params), safe='=&~._') url = domain + page['uri'].replace( page['uri'][page['uri'].index('{'):page['uri'].index('}') + 1], 'FUZZ') + '?' + uri fuzz_sess = wfuzz.FuzzSession(url=url, cookie=convert_cookies_format( session.cookies.get_dict()), method='GET').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method']) elif method['method'] == 'post' and not page['is_changeable']: params_body = method['body']['properties'] params_query = method['queryParameters'] for item in params_query: uri = urllib.parse.quote(parse_params(params_query, item), safe='=&~.') url = domain + page['uri'] + '?' + uri + item['name'] + '=FUZZ' postdata = parse_params(params_body)[:-1] fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), postdata=postdata, method='POST').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method'], postdata) for item in params_body: uri = urllib.parse.quote(parse_params(params_query), safe='=&~.') url = domain + page['uri'] + '?' + uri postdata = parse_params(params_body, item) + item['name'] + '=FUZZ' fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), postdata=postdata, method='POST').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method'], postdata) elif method['method'] == 'post' and page['is_changeable']: params_body = method['body']['properties'] params_query = method['queryParameters'] for item in params_body: uri = urllib.parse.quote(parse_params(params_query), safe='=&~.') url = domain + page['uri'].replace( page['uri'][page['uri'].index('{'):page['uri'].index('}') + 1], rstr.xeger( types[page['type']])) + '?' + uri postdata = parse_params(params_body, item) + item['name'] + '=FUZZ' fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), postdata=postdata, method='POST').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method'], postdata) for item in params_query: uri = urllib.parse.quote(parse_params(params_query, item), safe='=&~.') url = domain + page['uri'].replace( page['uri'][page['uri'].index('{'):page['uri'].index('}') + 1], rstr.xeger(types[page['type']]) ) + '?' + uri + item['name'] + '=FUZZ' postdata = parse_params(params_body) fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), postdata=postdata, method='POST').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method'], postdata) uri = urllib.parse.quote(parse_params(params_query), safe='=&~.') url = domain + page['uri'].replace( page['uri'][page['uri'].index('{'):page['uri'].index('}') + 1], 'FUZZ') + '?' + uri postdata = parse_params(params_body) fuzz_sess = wfuzz.FuzzSession(url=url, cookie=convert_cookies_format( session.cookies.get_dict()), postdata=postdata, method='POST').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method'], postdata) elif method['method'] == 'put' and not page['is_changeable']: try: params_body = method['body']['properties'] except KeyError: params_body = {} params_query = method['queryParameters'] for item in params_query: uri = urllib.parse.quote(parse_params(params_query, item), safe='=&~.') url = domain + page['uri'] + '?' + uri postdata = parse_params(params_body)[:-1] fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), postdata=postdata, method='PUT').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method'], postdata) for item in params_body: uri = urllib.parse.quote(parse_params(params_query), safe='=&~.') url = domain + page['uri'] + '?' + uri postdata = parse_params(params_body, item) + item['name'] + '=FUZZ' fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), postdata=postdata, method='PUT').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method'], postdata) elif method['method'] == 'put' and page['is_changeable']: print('put') try: params_body = method['body']['properties'] except KeyError: params_body = {} params_query = method['queryParameters'] for item in params_body: uri = urllib.parse.quote(parse_params(params_query), safe='=&~.') url = domain + page['uri'].replace( page['uri'][page['uri'].index('{'):page['uri'].index('}') + 1], rstr.xeger( types[page['type']])) + '?' + uri postdata = parse_params(params_body, item) + item['name'] + '=FUZZ' fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), postdata=postdata, method='PUT').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method'], postdata) for item in params_query: uri = urllib.parse.quote(parse_params(params_query, item), safe='=&~.') url = domain + page['uri'].replace( page['uri'][page['uri'].index('{'):page['uri'].index('}') + 1], rstr.xeger(types[page['type']]) ) + '?' + uri + item['name'] + '=FUZZ' postdata = parse_params(params_body) fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), postdata=postdata, method='PUT').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method'], postdata) uri = urllib.parse.quote(parse_params(params_query), safe='=&~.') url = domain + page['uri'].replace( page['uri'][page['uri'].index('{'):page['uri'].index('}') + 1], 'FUZZ') + '?' + uri postdata = parse_params(params_body) fuzz_sess = wfuzz.FuzzSession(url=url, cookie=convert_cookies_format( session.cookies.get_dict()), postdata=postdata, method='PUT').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method'], postdata) elif method['method'] == 'delete': url = domain + page['uri'].replace(page['relativeUri'], '/FUZZ') fuzz_sess = wfuzz.FuzzSession( url=url, cookie=convert_cookies_format(session.cookies.get_dict()), method='DELETE').get_payload(req_types) print_fuzz_data(page, specification, specification_codes, fuzz_sess, url, method['method']) for item in page['pages']: fuzz_second_step(item, specification, specification_codes, domain)