def test_parser_get_request(): backend_parser = BurpRequestParser rp = RequestParser("tests/request_get.txt", backend_parser) request = rp.parse_text() data = request.get_data() assert data.get("method") == "GET" assert data.get("path") == "/get?test=FUZZ" assert data.get("http_version") == "HTTP/1.1" assert data.get("headers") assert len(data.get("headers")) == 6 assert data.get("url") == "httpbin.org/get?test=FUZZ"
async def main(args): backend_parser = BurpRequestParser rp = RequestParser(args.frequest, backend_parser) request = rp.parse_text() request.url = args.protocol + "://" + request.url key_request, subkey_request = find_world_in_request(request) if args.fproxies: ph = ProxiesHandler(args.fproxies) else: ph = None response_handler = ResponseHandler( stop_status=args.stop_status, output_type=args.output_type, print_request=args.print_request, ) range_step = args.range_step or 1 async with aiohttp.ClientSession() as session: for value in get_values_from_range(args.range_start, args.range_end, range_step, args.min_len): replica_request = Request(request.get_data().copy()) replica_request.url = request.url replace_world_in_request(replica_request, key_request, subkey_request, value) no_answer = True attempt_request_limit = 10 request_count = 0 response = None while no_answer: proxy = None if ph: if not ph.proxies: print("List of working proxies is empty") sys.exit() proxy = ph.get_random_proxy() try: response = await do_request(session, replica_request, proxy=proxy) except (aiohttp.client_exceptions.ClientProxyConnectionError, asyncio.TimeoutError): print( f"There was failed request attempt with the proxy {proxy}" ) if ph: ph.exclude_not_working_proxy(proxy) else: no_answer = False finally: request_count += 1 if no_answer and request_count > attempt_request_limit: print("you have exceeded maximum request limit") break await response_handler.handle(replica_request, response)