def run(self) -> None: p = HttpParser() try: p.execute(self.req, len(self.req)) url = p.get_url() metopd = p.get_method() http_pos = url.find('://') if http_pos == -1: temp = url else: temp = url[(http_pos + 3):] port_pos = temp.find(':') host_pos = temp.find('/') if host_pos == -1: host_pos = len(temp) if port_pos == -1 or host_pos < port_pos: port = 443 if metopd == "CONNECT" else 80 else: port = int((temp[port_pos + 1:])[:host_pos - port_pos - 1]) host = p.get_headers()['host'] port_ind = host.find(':') if port_ind != -1: host = host[:port_ind] if metopd == "CONNECT": https_proxy(host, port, self.client) else: proxy(host, port, self.client, self.req) except Exception as e: print(e) pass
def componentShown(self, e): self._split_pane_horizontal.setDividerLocation(0.25) # populate the table with the selected requests\response try: if self._reload_table: print("reload") self._table_data = [ ] # empty _table_data (not too cool but quick) for c in self._messages: msg = c[0] http_request = converter._byte_array_to_string( msg.getRequest()) request_parser = HttpParser() request_parser.execute(http_request, len(http_request)) host = msg.getHttpService().getHost() page = request_parser.get_url() method = request_parser.get_method() tmp = [host, method, page] self._table_data += [tmp] self._table.getModel().setDataVector(self._table_data, self._columns_names) self._reload_table = False except Exception as e: print(e)
def parse_start_string(con, data): p = HttpParser() try: p.execute(data, len(data)) url = p.get_url() metopd = p.get_method() http_pos = url.find('://') if http_pos == -1: temp = url else: temp = url[(http_pos + 3):] port_pos = temp.find(':') host_pos = temp.find('/') if host_pos == -1: host_pos = len(temp) if port_pos == -1 or host_pos < port_pos: port = 443 if metopd == "CONNECT" else 80 else: port = int((temp[port_pos + 1:])[:host_pos - port_pos - 1]) host = p.get_headers()['host'] port_ind = host.find(':') if port_ind != -1: host = host[:port_ind] if metopd == "CONNECT": https_proxy(host, port, con) else: proxy(host, port, con, data) except Exception as e: # print(e) pass
def understand_request(self, data): p = HttpParser() recved = len(data) try: p.execute(data, recved) content_type = p.get_headers()["CONTENT-TYPE"] method = p.get_method() dataOfReq = str(p.recv_body(), "utf-8") path = p.get_path() except: print(sys.exc_info()) return '400: Bad Request' + sys.exc_info() req = myRequest(content_type, method, dataOfReq, path) req.toPrint() if req._data == '': # если нет data return '204: No Content' if req._content_type != 'application/json': return '501: Not Implemented' if req._method != 'POST': return '501: Not Implemented' if req._path == '/users/add': return self.users_add(req) if req._path == '/chats/add': return self.chats_add(req) if req._path == '/messages/add': return self.messages_add(req) if req._path == '/chats/get': return self.chats_get(req) if req._path == '/messages/get': return self.messages_get(req)
def tx_req_from_raw_request(filename): ''' 把 plaintext 的请求 解析后,做重放 ''' headers_raw = "" with open(os.path.join(filename), "rb") as fr: headers_raw = fr.read() hp = HttpParser() r = hp.execute(headers_raw, len(headers_raw)) print("{} len={} parsed={}".format(filename, len(headers_raw), r)) headers = dict(hp.get_headers()) body = hp.recv_body() url = f'''https://{headers.get("HOST", "")}{hp.get_path()}''' method = hp.get_method().lower() resp = requests.request(method=method, url=url, headers=headers, data=body) print(resp_dump.dump_all(resp)) print("\n\n")
class HttpProtocolHandler: PROXY_TUNNEL_ESTABLISHED_RESPONSE_PKT = build_http_response( 200, reason=b'Connection established') def __init__(self, client_conn: socket.socket, client_addr, flags, req): self.start_time: float = time.time() self.client = client_conn self.req = req self.client_addr = client_addr # host and socket_fd self.flags = flags self.request_parser = HttpParser(0) # 0 - parse only requests self.response_parser = HttpParser(1) # 1 - parse only responses self.total_response_size: int = 0 self.upstream: Optional[urlparse.SplitResultBytes] = None self.host = None self.port = None self.upstream_url = None self.server: Optional[socket.socket] = None def parse_url(self, parser): url = parser.get_url() method = parser.get_method() protocol_pos = url.find('://') if protocol_pos != -1: url = url[(protocol_pos + 3):] port_pos = url.find(':') host_pos = url.find('/') if host_pos == -1: host_pos = len(url) if port_pos == -1 or host_pos < port_pos: port = 443 if method == "CONNECT" else DEFAULT_HTTP_PORT else: port = int((url[port_pos + 1:])[:host_pos - port_pos - 1]) port_ind = url.find(':') if port_ind != -1: url = url[:port_ind] self.upstream = urlparse.urlsplit('http://' + url + '/') self.upstream_url = self.build_upstream_relative_path() host = self.upstream.hostname port_ind = host.find(':') if port_ind != -1: host = host[:port_ind] return host, port def run(self) -> None: p = HttpParser() try: p.execute(self.req, len(self.req)) url = p.get_url() metopd = p.get_method() http_pos = url.find('://') if http_pos == -1: temp = url else: temp = url[(http_pos + 3):] port_pos = temp.find(':') host_pos = temp.find('/') if host_pos == -1: host_pos = len(temp) if port_pos == -1 or host_pos < port_pos: port = 443 if metopd == "CONNECT" else 80 else: port = int((temp[port_pos + 1:])[:host_pos - port_pos - 1]) host = p.get_headers()['host'] port_ind = host.find(':') if port_ind != -1: host = host[:port_ind] if metopd == "CONNECT": https_proxy(host, port, self.client) else: proxy(host, port, self.client, self.req) except Exception as e: print(e) pass def access_log(self): server_host, server_port = self.upstream.hostname, self.upstream.port \ if self.upstream.port else DEFAULT_HTTP_PORT connection_time_ms = (time.time() - self.start_time) * 1000 method = self.request_parser.get_method() if method == httpMethods.CONNECT: pass elif method: print('pid:%s | %s:%s - %s %s:%s%s - %s %s - %s bytes - %.2f ms' % (str(getpid()), self.client_addr[0], self.client_addr[1], method, server_host, server_port, self.request_parser.get_path(), self.response_parser.get_status_code(), self.response_parser.get_errno(), self.total_response_size, connection_time_ms))
def _parseHttpRequestResponse(model, http_request, http_response, protocol): try: global i_tag global var_i """ Parses a HTTP Request/Response and generate it's translation in ASLan++. """ request_parser = HttpParser() request_parser.execute(http_request, len(http_request)) var_i = 0 # concretization details concrete = dict() # concretization TAG returntag = "tag{}".format(i_tag) # URL for concretization url = protocol + "://" + request_parser.get_headers( )['Host'] + "/" + request_parser.get_url() concrete['url'] = url # path (this string should not begin with something different from a character) # and replace every non alphanumeric character with _ # the first re.sub is used to replace every non alphanumeric char # the second re.sub is used to remove non character from the begining of the string page = re.sub("^[^a-z]*", "", re.sub("[^a-zA-Z0-9]", "_", urlparse(url).path)) # add page in the array _aslanpp_constants model._page_constants.add(page) # method for concretization method = request_parser.get_method() concrete['method'] = method # query string post_query_string = "" get_query_string = request_parser.get_query_string() if method == "POST" and "Content-type" in request_parser.get_headers( ) and "multipart/form-data" not in request_parser.get_headers( )['Content-Type']: # POST parameters, multipart/form-data not yet supported post_query_string = request_parser.recv_body( ) #"&".join(a for a in [query_string, request_parser.recv_body()] if len(a)>0) if "Content-type" in request_parser.get_headers( ) and "multipart/form-data" in request_parser.get_headers( )['Content-Type']: print("multipart/form-data not yet supported") # for each request\response I need aslanpp_params_no_questionmark = "" aslanpp_params_questionmark = "" aslanpp_cookie_no_questionmark = "" aslanpp_cookie_questionmark = "" aslanpp_cookie2_no_questionmark = "" aslanpp_cookie2_questionmark = "" # convert GET parameters if get_query_string: # saving the concrete parameters concrete_get_params = [ couple.split("=") for couple in get_query_string.split("&") ] # parse the parameters and retrieve ASLan++ code and mapping aslanpp_no_questionmark, aslanpp_questionmark, mapping_get = _parse_parameters( model, concrete_get_params) aslanpp_params_no_questionmark += aslanpp_no_questionmark aslanpp_params_questionmark += aslanpp_questionmark # save get param in concretization concrete['get_params'] = mapping_get # convert POST parameters if post_query_string: # saving the concrete parameters concrete_post_params = [ couple.split("=") for couple in post_query_string.split("&") ] # parse the parameters and retrieve ASLan++ code and mapping aslanpp_no_questionmark, aslanpp_questionmark, mapping_post = _parse_parameters( model, concrete_post_params) aslanpp_params_no_questionmark += aslanpp_no_questionmark aslanpp_params_questionmark += aslanpp_questionmark # save get param in concretization concrete['post_params'] = mapping_post if aslanpp_params_no_questionmark == "": aslanpp_params_no_questionmark = "none" else: aslanpp_params_no_questionmark = aslanpp_params_no_questionmark[: -5] if aslanpp_params_questionmark == "": aslanpp_params_questionmark = "none" else: aslanpp_params_questionmark = aslanpp_params_questionmark[:-5] # convert cookie in the request try: cookie_request = request_parser.get_headers()['Cookie'] simple_cookie = Cookie.SimpleCookie(cookie_request) concrete_cookie = [[item, simple_cookie[item].value] for item in simple_cookie] # parse the parameters and retrieve ASLan++ code, constants, variables and mapping cookie_no_questionmark, cookie_questionmark, cookie_mapping = _parse_parameters( model, concrete_cookie) aslanpp_cookie_no_questionmark += cookie_no_questionmark[:-5] aslanpp_cookie_questionmark += cookie_questionmark[:-5] # save the mapping cookies concrete['cookies'] = cookie_mapping except KeyError: aslanpp_cookie_no_questionmark = "none" aslanpp_cookie_questionmark = "none" pass # check the response response_parser = HttpParser() response_parser.execute(http_response, len(http_response)) # Location # get the returned page by checking the Location field in # the header. If Location is set, it means is a 302 Redirect # and the client is receiving a different page back in the response try: location = response_parser.get_headers()['Location'] # prepend the letter p since in ASLan++ constants should start with a char return_page = "p{}".format( urlparse(location).path.partition("?")[0].replace( ".", "_").replace("/", "_")) model._page_constants.add(return_page) except KeyError: return_page = page # parse cookie in the response try: set_cookie_header = response_parser.get_headers()['Set-Cookie'] # parse new cookie simple_cookie = Cookie.SimpleCookie(set_cookie_header) cookies = [[item, simple_cookie[item].value] for item in simple_cookie] # parse the parameters and retrieve ASLan++ code, constants, variables and mapping aslanpp_cookie2_no_questionmark, aslanpp_cookie2_questionmark, cookie2_mapping = _parse_parameters( model, cookies) aslanpp_cookie2_no_questionmark += cookie_no_questionmark[:-5] aslanpp_cookie2_questionmark += cookie_questionmark[:-5] # save the mapping cookies concrete['cookies'] = cookie2_mapping except KeyError: aslanpp_cookie2_no_questionmark = "none" aslanpp_cookie2_questionmark = "non" pass model._webapp_branch += request_skeleton.format( page, aslanpp_params_questionmark, aslanpp_cookie_questionmark, returntag, return_page, "none", aslanpp_cookie2_no_questionmark, returntag) model._client_branch += client_skeleton.format( page, aslanpp_params_no_questionmark, aslanpp_cookie_no_questionmark, returntag, return_page, returntag) model._concretization[returntag] = concrete # save tag in taglist and increment the tag number model._taglist.add(returntag) # increate tag i_tag += 1 return returntag except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] print(exc_type, fname, exc_tb.tb_lineno)