def expected_http_body_size( request: request.Request, response: typing.Optional[response.Response] = None, expect_continue_as_0: bool = True): """ Args: - expect_continue_as_0: If true, incorrectly predict a body size of 0 for requests which are waiting for a 100 Continue response. Returns: The expected body length: - a positive integer, if the size is known in advance - None, if the size in unknown in advance (chunked encoding) - -1, if all data should be read until end of stream. Raises: exceptions.HttpSyntaxException, if the content length header is invalid """ # Determine response size according to # http://tools.ietf.org/html/rfc7230#section-3.3 if not response: headers = request.headers if expect_continue_as_0 and headers.get("expect", "").lower() == "100-continue": return 0 else: headers = response.headers if request.method.upper() == "HEAD": return 0 if 100 <= response.status_code <= 199: return 0 if response.status_code == 200 and request.method.upper() == "CONNECT": return 0 if response.status_code in (204, 304): return 0 if "chunked" in headers.get("transfer-encoding", "").lower(): return None if "content-length" in headers: try: sizes = headers.get_all("content-length") different_content_length_headers = any(x != sizes[0] for x in sizes) if different_content_length_headers: raise exceptions.HttpSyntaxException( "Conflicting Content Length Headers") size = int(sizes[0]) if size < 0: raise ValueError() return size except ValueError as e: raise exceptions.HttpSyntaxException( "Unparseable Content Length") from e if not response: return 0 return -1
def expected_http_body_size(request, response=None): """ Returns: The expected body length: - a positive integer, if the size is known in advance - None, if the size in unknown in advance (chunked encoding) - -1, if all data should be read until end of stream. Raises: exceptions.HttpSyntaxException, if the content length header is invalid """ # Determine response size according to # http://tools.ietf.org/html/rfc7230#section-3.3 if not response: headers = request.headers response_code = None is_request = True else: headers = response.headers response_code = response.status_code is_request = False if is_request: if headers.get("expect", "").lower() == "100-continue": return 0 else: if request.method.upper() == "HEAD": return 0 if 100 <= response_code <= 199: return 0 if response_code == 200 and request.method.upper() == "CONNECT": return 0 if response_code in (204, 304): return 0 if "chunked" in headers.get("transfer-encoding", "").lower(): return None if "content-length" in headers: try: sizes = headers.get_all("content-length") different_content_length_headers = any(x != sizes[0] for x in sizes) if different_content_length_headers: raise exceptions.HttpSyntaxException("Conflicting Content Length Headers") size = int(sizes[0]) if size < 0: raise ValueError() return size except ValueError: raise exceptions.HttpSyntaxException("Unparseable Content Length") if is_request: return 0 return -1
def expected_http_body_size(request, response=None): """ Returns: The expected body length: - a positive integer, if the size is known in advance - None, if the size in unknown in advance (chunked encoding) - -1, if all data should be read until end of stream. Raises: exceptions.HttpSyntaxException, if the content length header is invalid """ # Determine response size according to # http://tools.ietf.org/html/rfc7230#section-3.3 if not response: headers = request.headers response_code = None is_request = True else: headers = response.headers response_code = response.status_code is_request = False if is_request: if headers.get("expect", "").lower() == "100-continue": return 0 else: if request.method.upper() == "HEAD": return 0 if 100 <= response_code <= 199: return 0 if response_code == 200 and request.method.upper() == "CONNECT": return 0 if response_code in (204, 304): return 0 if "chunked" in headers.get("transfer-encoding", "").lower(): return None if "content-length" in headers: try: size = int(headers["content-length"]) if size < 0: raise ValueError() return size except ValueError: raise exceptions.HttpSyntaxException("Unparseable Content Length") if is_request: return 0 return -1
def read_response( self, __rfile, request_method=b'', body_size_limit=None, include_body=True, stream_id=None, ): if body_size_limit is not None: raise NotImplementedError() self.perform_connection_preface() timestamp_start = time.time() if hasattr(self.tcp_handler.rfile, "reset_timestamps"): self.tcp_handler.rfile.reset_timestamps() stream_id, headers, body = self._receive_transmission( stream_id=stream_id, include_body=include_body, ) if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"): # more accurate timestamp_start timestamp_start = self.tcp_handler.rfile.first_byte_timestamp if include_body: timestamp_end = time.time() else: timestamp_end = None response = mitmproxy.net.http.response.Response( http_version=b"HTTP/2.0", status_code=int(headers.get(':status', 502)), reason=b'', headers=headers, content=body, trailers=None, timestamp_start=timestamp_start, timestamp_end=timestamp_end, ) response.stream_id = stream_id return response
def read_response( self, __rfile, request_method=b'', body_size_limit=None, include_body=True, stream_id=None, ): if body_size_limit is not None: raise NotImplementedError() self.perform_connection_preface() timestamp_start = time.time() if hasattr(self.tcp_handler.rfile, "reset_timestamps"): self.tcp_handler.rfile.reset_timestamps() stream_id, headers, body = self._receive_transmission( stream_id=stream_id, include_body=include_body, ) if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"): # more accurate timestamp_start timestamp_start = self.tcp_handler.rfile.first_byte_timestamp if include_body: timestamp_end = time.time() else: timestamp_end = None response = mitmproxy.net.http.response.Response( b"HTTP/2.0", int(headers.get(':status', 502)), b'', headers, body, timestamp_start=timestamp_start, timestamp_end=timestamp_end, ) response.stream_id = stream_id return response