class RequestParserTest(unittest.TestCase): def setUp(self): self.env = {} self.env['wsgi.input'] = Mock() def test_ignores_requests_without_json_payload(self): self.env = 'some_text' self.request_parser = RequestParser(self.env) self.request = self.request_parser.parse() self.assertEquals(self.request, None) def test_detects_modified_files_in_single_commit(self): self.env['wsgi.input'].read.return_value = simple_payload self.request_parser = RequestParser(self.env) self.request = self.request_parser.parse() modified_files = set(['file4.py', 'file3.py']) self.assertEquals(self.request.modified_files, modified_files) def test_same_modified_file_is_detected_only_once_in_multiple_commits(self): self.env['wsgi.input'].read.return_value = complex_payload self.request_parser = RequestParser(self.env) self.request = self.request_parser.parse() modified_files = set(['file4.py', 'file3.py', 'file5.py']) self.assertEquals(self.request.modified_files, modified_files)
def test_try_get_headers_ok(): request_parser = RequestParser() data = good_req raw_data = bytearray(data, 'utf-8') parsed_req = request_parser.try_get_headers(raw_data) assert parsed_req.headers["Host"] == 'www.google.com'
def test_response_parse(): request_parser = RequestParser() data = good_req raw_data = bytearray(data, 'utf-8') _, parsed_req = request_parser.parse(raw_data) assert parsed_req.headers["Host"] == 'www.google.com'
def listen(env, start_response): """ Basic POST request listener Just forwards request payload to RequestParser Response is irrelevant, nobody's listening... """ request_parser = RequestParser(env) request_parser.parse() response_headers = [('Content-Type', 'text/plain')] start_response('200 OK', response_headers) return ""
def test_parser_get_request(): backend_parser = BurpRequestParser rp = RequestParser("tests/request_get.txt", backend_parser) request = rp.parse_text() data = request.get_data() assert data.get("method") == "GET" assert data.get("path") == "/get?test=FUZZ" assert data.get("http_version") == "HTTP/1.1" assert data.get("headers") assert len(data.get("headers")) == 6 assert data.get("url") == "httpbin.org/get?test=FUZZ"
def swipe_card(): rp = RequestParser('sjcwvcorafw1', 55555) response = rp.send_acquire_command("401-181-390") print(str(response)) rp.send_msr_command("401-181-390", 'ALL_TRACKS', 'N', '200', '1', '0') time.sleep(1) rp.send_touch_command("401-181-390", 'S', '(25, 14)', '200', '1', '0') #time.sleep(1) rp.send_release_command("401-181-390")
def test_detects_modified_files_in_single_commit(self): self.env['wsgi.input'].read.return_value = simple_payload self.request_parser = RequestParser(self.env) self.request = self.request_parser.parse() modified_files = set(['file4.py', 'file3.py']) self.assertEquals(self.request.modified_files, modified_files)
def test_same_modified_file_is_detected_only_once_in_multiple_commits(self): self.env['wsgi.input'].read.return_value = complex_payload self.request_parser = RequestParser(self.env) self.request = self.request_parser.parse() modified_files = set(['file4.py', 'file3.py', 'file5.py']) self.assertEquals(self.request.modified_files, modified_files)
def contactless(): rp = RequestParser('sjcwvcorafw1', 55555) response = rp.send_acquire_command("401-181-390") print(str(response)) rp.send_ctls_command("401-181-390", "2", "F", "1", "1", "0") time.sleep(1) #rp.send_touch_command("401-181-390", 'S', '(15, 20)', '200', '1', '0') #time.sleep(1) rp.send_release_command("401-181-390")
def chip_insert(): rp = RequestParser('sjcwvcorafw1', 55555) response = rp.send_acquire_command("401-181-390") print(str(response)) rp.send_scr_command("401-181-390", '1', 'F', '6', '1', '0') time.sleep(1) #rp.send_touch_command("401-181-390", 'S', '(15, 20)', '200', '1', '0') #time.sleep(1) rp.send_release_command("401-181-390")
def get_requests_important_items(self, requests): parsed = RequestParser() output = [] for request_type, request in requests.items(): iteration_type = request if isinstance(request, dict): iteration_type = request.items() for entry in iteration_type: # Parse request is implemented if "parse_response" in dir(self._gatherer): result = self._gatherer.parse_response(request_type, entry) if result: output.append(result) parsed += result # Otherwise append whole entry else: if entry: output.append(entry) parsed += str(entry) return output
def webhook(): response_json = None print(request) # Get the request object request_parser_object = RequestParser(request) if request_parser_object.intent["displayName"] == "order_intent": response_json = Service.order_intent(request_parser_object) elif request_parser_object.intent["displayName"] == "order_intent.no": response_json = Service.order_intent_no(request_parser_object) elif request_parser_object.intent["displayName"] == "cancel_item_intent": response_json = Service.cancel_item_intent(request_parser_object) elif request_parser_object.intent[ "displayName"] == "cancel_item_intent.continue": response_json = Service.cancel_item_intent_continue( request_parser_object) elif request_parser_object.intent[ "displayName"] == "cancel_order_intent.yes": response_json = Service.cancel_order_intent_yes(request_parser_object) elif request_parser_object.intent[ "displayName"] == "complete_order_intent": response_json = Service.complete_order_intent(request_parser_object) elif request_parser_object.intent[ "displayName"] == "complete_order_intent.yes": response_json = Service.complete_order_intent_yes( request_parser_object) elif request_parser_object.intent[ "displayName"] == "Default Welcome Intent": response_json = Service.default_welcome_intent(request_parser_object) elif request_parser_object.intent["displayName"] == 'sign_in_intent': response_json = Service.sign_in_intent(request_parser_object) elif request_parser_object.intent[ "displayName"] == 'Default Fallback Intent': response_json = Service.fallback_intent(request_parser_object) response = make_response(jsonify(response_json)) return response
class ProxyPasser: __config = ServerConfig() __req_parser = RequestParser() __res_parser = ResponseParser() def __init__(self, client_socket): self.__client = client_socket self.__target = socket.socket(socket.AF_INET, socket.SOCK_STREAM) def run(self): """ Init connection client <> target :return: """ logging.info(f"Client <> Target opened") raw_req = self.__client.recv(8192) if raw_req is None: logging.info(f"Client terminated (RECV)") self.__client.close() return _, req = self.__req_parser.parse(raw_req) host, port = self.__req_parser.get_destination(req) full_body = None content_len = req.headers.get('Content-Length') if content_len: full_body = self._read_http_message_content_length(self.__client, req.body, int(content_len)) transfer_enc = req.headers.get('Transfer-Encoding') if transfer_enc == 'chunked': full_body = self._read_http_message_chunked_encoding(self.__client, req.body) if req.method not in ['POST','PUT', 'PATCH']: raw_req = req.headers_raw else: if not full_body: self.__client.close() return raw_req = req.headers_raw + full_body target_host_socket = self.__target target_host_socket.connect((host, port)) target_host_socket.sendall(raw_req) raw_rsp = target_host_socket.recv(8192) if raw_rsp is None: logging.info(f"Server terminated (RECV)") self.__close_conn(target_host_socket) return orig_headers, mod_headers, body = self.__res_parser.parse(raw_rsp) full_body = None content_len = orig_headers.get('Content-Length') if content_len: full_body = self._read_http_message_content_length(target_host_socket, body, int(content_len)) transfer_enc = orig_headers.get('Transfer-Encoding') if transfer_enc == 'chunked': full_body = self._read_http_message_chunked_encoding(target_host_socket, body) # if not full_body: # return final_response = mod_headers + full_body self.__client.sendall(final_response) self.__close_conn(target_host_socket) def _read_http_message_content_length(self, client, body_bytes, total_len): """Reads request data from socket. If request method or protocol are not supported, rejects it""" result = body_bytes curr_len = len(body_bytes) while curr_len < total_len: chunk = client.recv(8192) if not chunk: break result += chunk curr_len += len(chunk) return result def _read_http_message_chunked_encoding(self, client, body_bytes): """Reads request data from socket. If request method or protocol are not supported, rejects it""" result = bytearray() avail_read = 0 while True: if len(body_bytes) < 2: chunk = client.recv(8192) if not chunk: break body_bytes += chunk if avail_read > 0: fragment = body_bytes[:avail_read] result += fragment body_bytes = body_bytes[avail_read:] avail_read -= len(fragment) if avail_read < 0: avail_read = 0 else: body_decoded = body_bytes.decode('utf-8', errors='ignore').splitlines() chunk_line = body_decoded[0] if chunk_line == '': chunk_line = body_decoded[1] length = int(chunk_line.strip().split(';')[0], 16) chunk_line_len = len(chunk_line) if length == 0: return result + body_bytes result += body_bytes[:length+chunk_line_len] avail_read = length + chunk_line_len - len(body_bytes[chunk_line_len:length+chunk_line_len]) body_bytes = body_bytes[length+chunk_line_len:] return result def __close_conn(self, target_host_socket): """ Close target and client connections :param target_host_socket: :return: """ self.__client.close() target_host_socket.close() logging.info("Client <> Target closed")
class HttpServer: __request_parser: RequestParser = RequestParser() __request_processor: RequestHandler = RequestHandler() def __init__(self, config_name="config.json"): self.__config = ServerConfig(config_name) logging.basicConfig(filename=self.__config.log_file, level=logging.DEBUG, format='%(asctime)s %(message)s') self.thread_pool = ThreadPool() def run(self): """Binds, listens, processing HTTP requests on socket""" s = socket(AF_INET, SOCK_STREAM) s.bind((self.__config.host, self.__config.port)) s.listen(self.__config.queue_size) logging.info(f'Launched at {self.__config.port}') while True: try: client_connection, _ = s.accept() except Exception as e: logging.info(e) s.close() break client_connection.settimeout(self.__config.max_req_time) self.thread_pool.add_task(self.__route_request, client_connection) def __route_request(self, client): """Routes request to handler if exists, then closes the connection""" if self.__config.proxy_pass_mode: __proxy_passer = ProxyPasser(client) __proxy_passer.run() return while True: try: raw_request = self.__read_from_socket(client) except timeout: logging.info("Caught timeout waiting for socket connection") break except ReadSocketError: bad_response = Response(code=400) client.sendall(bad_response.get_bytes()) client.close() logging.info(f'Failed to read request. Returned response' f' {bad_response.code}') return req = self.__request_parser.parse(raw_request) if req.method == "GET": response_func, response = self.__request_processor.handle_get( req) logging.info(f'Received GET {req.path}, ' f'returned response {response.code}') response_func(client=client) if "Connection" not in req.headers \ or req.headers["Connection"].lower() != "keep-alive": break client.close() def __read_from_socket(self, client): """Reads request data from socket. If request method or protocol are not supported, rejects it""" result = bytearray() req = None head_len = 0 total_len = None while not total_len or head_len < total_len: chunk = client.recv(8192) if not chunk: break result += chunk head_len += len(chunk) if not req: req = self.__request_parser.try_get_headers(result) if not req: continue if req.method not in self.__config.supported_methods or \ req.proto not in self.__config.supported_protos: logging.info(f'Received unsupported request {req}') raise ReadSocketError("Request of this type not supported") total_len = req.headers.get("Content-Length") if not total_len: break return result
def test_ignores_requests_without_json_payload(self): self.env = 'some_text' self.request_parser = RequestParser(self.env) self.request = self.request_parser.parse() self.assertEquals(self.request, None)
class ProxyServer: __request_parser: RequestParser = RequestParser() __response_parser: ResponseParser = ResponseParser() def __init__(self, config_name="config.json"): self.__config = ServerConfig(config_name) logging.basicConfig(filename=self.__config.log_file, level=logging.DEBUG, format='%(asctime)s %(message)s') self.thread_pool = ThreadPool() def run(self): """Binds, listens, processing HTTP requests on socket""" signal.signal(signal.SIGINT, self.__handle_exit) s = socket(AF_INET, SOCK_STREAM) s.bind((self.__config.host, self.__config.port)) s.listen(self.__config.queue_size) logging.info(f'Launched at {self.__config.port}') while True: try: client_connection, _ = s.accept() except SystemExit as e: s.close() break except Exception as e: logging.info(e) s.close() break # client_connection.settimeout(self.__config.max_req_time) self.thread_pool.add_task(self.__route_request, client_connection) def __route_request(self, client): """Routes request to handler if exists, then closes the connection""" proxy_passer = ProxyPasser(client) proxy_passer.run() def __read_from_socket(self, sock, is_response): """Reads request data from socket. If request method or protocol are not supported, rejects it""" result = bytearray() headers = None head_len = 0 total_len = None while not total_len or head_len < total_len: chunk = sock.recv(8192) if not chunk: break result += chunk head_len += len(chunk) if not headers: headers = self.try_get_headers(result, is_response) if not headers: continue total_len_raw = headers.get("Content-Length") if total_len_raw: total_len = int(total_len_raw) else: continue return result def try_get_headers(self, data, is_response): try: if is_response: headers, parsed_msg = self.__response_parser.parse(data) else: headers, parsed_msg = self.__request_parser.parse(data) if parsed_msg: return headers except ValueError: return None def __handle_exit(self, signal, frame): logging.info("Received SIGINT, shutting down threads...") print("shutting down...") self.thread_pool.tasks.join() self.thread_pool.terminate_all_workers() logging.info("Threads stopped") sys.exit(0)
def send_message(): rp = RequestParser('sjcwvcorafw1', 55555) response = rp.send_acquire_command("401-181-390") print(str(response)) print "this is test"
async def main(args): backend_parser = BurpRequestParser rp = RequestParser(args.frequest, backend_parser) request = rp.parse_text() request.url = args.protocol + "://" + request.url key_request, subkey_request = find_world_in_request(request) if args.fproxies: ph = ProxiesHandler(args.fproxies) else: ph = None response_handler = ResponseHandler( stop_status=args.stop_status, output_type=args.output_type, print_request=args.print_request, ) range_step = args.range_step or 1 async with aiohttp.ClientSession() as session: for value in get_values_from_range(args.range_start, args.range_end, range_step, args.min_len): replica_request = Request(request.get_data().copy()) replica_request.url = request.url replace_world_in_request(replica_request, key_request, subkey_request, value) no_answer = True attempt_request_limit = 10 request_count = 0 response = None while no_answer: proxy = None if ph: if not ph.proxies: print("List of working proxies is empty") sys.exit() proxy = ph.get_random_proxy() try: response = await do_request(session, replica_request, proxy=proxy) except (aiohttp.client_exceptions.ClientProxyConnectionError, asyncio.TimeoutError): print( f"There was failed request attempt with the proxy {proxy}" ) if ph: ph.exclude_not_working_proxy(proxy) else: no_answer = False finally: request_count += 1 if no_answer and request_count > attempt_request_limit: print("you have exceeded maximum request limit") break await response_handler.handle(replica_request, response)