def read_request_body(self, request): expected_size = http1.expected_http_body_size(request) return http1.read_body( self.client_conn.rfile, expected_size, self.config.options.body_size_limit )
def read_headers( self, event: events.ConnectionEvent) -> layer.CommandGenerator[None]: if isinstance(event, events.DataReceived): if not self.request: # we just received some data for an unknown request. yield commands.Log( f"Unexpected data from server: {bytes(self.buf)!r}") yield commands.CloseConnection(self.conn) return assert self.stream_id response_head = self.buf.maybe_extract_lines() if response_head: response_head = [ bytes(x) for x in response_head ] # TODO: Make url.parse compatible with bytearrays try: self.response = http1.read_response_head(response_head) if self.context.options.validate_inbound_headers: http1.validate_headers(self.response.headers) expected_size = http1.expected_http_body_size( self.request, self.response) except ValueError as e: yield commands.CloseConnection(self.conn) yield ReceiveHttp( ResponseProtocolError( self.stream_id, f"Cannot parse HTTP response: {e}")) return yield ReceiveHttp( ResponseHeaders(self.stream_id, self.response, expected_size == 0)) self.body_reader = make_body_reader(expected_size) self.state = self.read_body yield from self.state(event) else: pass # FIXME: protect against header size DoS elif isinstance(event, events.ConnectionClosed): if self.conn.state & ConnectionState.CAN_WRITE: yield commands.CloseConnection(self.conn) if self.stream_id: if self.buf: yield ReceiveHttp( ResponseProtocolError( self.stream_id, f"unexpected server response: {bytes(self.buf)!r}") ) else: # The server has closed the connection to prevent us from continuing. # We need to signal that to the stream. # https://tools.ietf.org/html/rfc7231#section-6.5.11 yield ReceiveHttp( ResponseProtocolError(self.stream_id, "server closed connection")) else: return else: raise AssertionError(f"Unexpected event: {event}")
def read_response_body(self, request, response): expected_size = http1.expected_http_body_size(request, response) return http1.read_body( self.server_conn.rfile, expected_size, self.config.options.body_size_limit )
def read_response_body(self, request, response): expected_size = http1.expected_http_body_size(request, response) return http1.read_body( self.server_conn.rfile, expected_size, human.parse_size(self.config.options.body_size_limit) )
def read_headers(self, event: events.ConnectionEvent) -> layer.CommandGenerator[None]: if isinstance(event, events.DataReceived): request_head = self.buf.maybe_extract_lines() if request_head: request_head = [bytes(x) for x in request_head] # TODO: Make url.parse compatible with bytearrays try: self.request = http1.read_request_head(request_head) expected_body_size = http1.expected_http_body_size(self.request, expect_continue_as_0=False) except ValueError as e: yield commands.Log(f"{human.format_address(self.conn.peername)}: {e}") yield commands.CloseConnection(self.conn) self.state = self.done return yield ReceiveHttp(RequestHeaders(self.stream_id, self.request, expected_body_size == 0)) self.body_reader = make_body_reader(expected_body_size) self.state = self.read_body yield from self.state(event) else: pass # FIXME: protect against header size DoS elif isinstance(event, events.ConnectionClosed): buf = bytes(self.buf) if buf.strip(): yield commands.Log(f"Client closed connection before completing request headers: {buf!r}") yield commands.CloseConnection(self.conn) else: raise AssertionError(f"Unexpected event: {event}")
def read_request_body(self, request): expected_size = http1.expected_http_body_size(request) return http1.read_body( self.client_conn.rfile, expected_size, human.parse_size(self.config.options.body_size_limit) )
def mark_done(self, *, request: bool = False, response: bool = False) -> layer.CommandGenerator[None]: if request: self.request_done = True if response: self.response_done = True if self.request_done and self.response_done: assert self.request assert self.response if should_make_pipe(self.request, self.response): yield from self.make_pipe() return connection_done = ( http1.expected_http_body_size(self.request, self.response) == -1 or http1.connection_close(self.request.http_version, self.request.headers) or http1.connection_close(self.response.http_version, self.response.headers) # If we proxy HTTP/2 to HTTP/1, we only use upstream connections for one request. # This simplifies our connection management quite a bit as we can rely on # the proxyserver's max-connection-per-server throttling. or (self.request.is_http2 and isinstance(self, Http1Client)) ) if connection_done: yield commands.CloseConnection(self.conn) self.state = self.done return self.request_done = self.response_done = False self.request = self.response = None if isinstance(self, Http1Server): self.stream_id += 2 else: self.stream_id = None self.state = self.read_headers if self.buf: yield from self.state(events.DataReceived(self.conn, b""))
def run(self, f, is_request): if self.max_size: r = f.request if is_request else f.response try: expected_size = http1.expected_http_body_size( f.request, f.response if not is_request else None ) except exceptions.HttpException: f.reply.kill() return if expected_size and not r.raw_content and not (0 <= expected_size <= self.max_size): # r.stream may already be a callable, which we want to preserve. r.stream = r.stream or True ctx.log.info("Streaming {} {}".format("response from" if not is_request else "request to", f.request.host))
def run(self, f, is_request): if self.max_size: r = f.request if is_request else f.response try: expected_size = http1.expected_http_body_size( f.request, f.response if not is_request else None) except exceptions.HTTPException: f.reply.kill() return if expected_size and not r.raw_content and not (0 <= expected_size <= self.max_size): # r.stream may already be a callable, which we want to preserve. r.stream = r.stream or True # FIXME: make message generic when we add rquest streaming ctx.log.info("Streaming response from %s" % f.request.host)
def run(self, f, is_request): if self.max_size: r = f.request if is_request else f.response try: expected_size = http1.expected_http_body_size( f.request, f.response if not is_request else None ) except exceptions.HttpException: f.reply.kill() return if expected_size and not r.raw_content and not (0 <= expected_size <= self.max_size): # r.stream may already be a callable, which we want to preserve. r.stream = r.stream or True # FIXME: make message generic when we add rquest streaming ctx.log.info("Streaming response from %s" % f.request.host)
def check_close_connection(self, flow): request_close = http1.connection_close(flow.request.http_version, flow.request.headers) response_close = http1.connection_close(flow.response.http_version, flow.response.headers) read_until_eof = http1.expected_http_body_size(flow.request, flow.response) == -1 close_connection = request_close or response_close or read_until_eof if flow.request.first_line_format == "authority" and flow.response.status_code == 200: # Workaround for https://github.com/mitmproxy/mitmproxy/issues/313: # Charles Proxy sends a CONNECT response with HTTP/1.0 # and no Content-Length header return False return close_connection
def read_headers( self, event: events.ConnectionEvent) -> layer.CommandGenerator[None]: if isinstance(event, events.DataReceived): request_head = self.buf.maybe_extract_lines() if request_head: request_head = [ bytes(x) for x in request_head ] # TODO: Make url.parse compatible with bytearrays try: self.request = http1.read_request_head(request_head) if self.context.options.validate_inbound_headers: http1.validate_headers(self.request.headers) expected_body_size = http1.expected_http_body_size( self.request) except ValueError as e: yield commands.SendData(self.conn, make_error_response(400, str(e))) yield commands.CloseConnection(self.conn) if self.request: # we have headers that we can show in the ui yield ReceiveHttp( RequestHeaders(self.stream_id, self.request, False)) yield ReceiveHttp( RequestProtocolError(self.stream_id, str(e), 400)) else: yield commands.Log( f"{human.format_address(self.conn.peername)}: {e}") self.state = self.done return yield ReceiveHttp( RequestHeaders(self.stream_id, self.request, expected_body_size == 0)) self.body_reader = make_body_reader(expected_body_size) self.state = self.read_body yield from self.state(event) else: pass # FIXME: protect against header size DoS elif isinstance(event, events.ConnectionClosed): buf = bytes(self.buf) if buf.strip(): yield commands.Log( f"Client closed connection before completing request headers: {buf!r}" ) yield commands.CloseConnection(self.conn) else: raise AssertionError(f"Unexpected event: {event}")
def run(self, f, is_request): if self.max_size: r = f.request if is_request else f.response try: expected_size = http1.expected_http_body_size( f.request, f.response if not is_request else None) except exceptions.HttpException: f.reply.kill() return if expected_size and not r.raw_content and not (0 <= expected_size <= self.max_size): # r.stream may already be a callable, which we want to preserve. r.stream = r.stream or True ctx.log.info("Streaming {} {}".format( "response from" if not is_request else "request to", f.request.host))
def check_close_connection(self, flow): request_close = http1.connection_close( flow.request.http_version, flow.request.headers ) response_close = http1.connection_close( flow.response.http_version, flow.response.headers ) read_until_eof = http1.expected_http_body_size(flow.request, flow.response) == -1 close_connection = request_close or response_close or read_until_eof if flow.request.first_line_format == "authority" and flow.response.status_code == 200: # Workaround for https://github.com/mitmproxy/mitmproxy/issues/313: # Charles Proxy sends a CONNECT response with HTTP/1.0 # and no Content-Length header return False return close_connection
def mark_done(self, *, request: bool = False, response: bool = False) -> layer.CommandGenerator[None]: if request: self.request_done = True if response: self.response_done = True if self.request_done and self.response_done: assert self.request assert self.response if should_make_pipe(self.request, self.response): yield from self.make_pipe() return try: read_until_eof_semantics = http1.expected_http_body_size( self.request, self.response) == -1 except ValueError: # this may raise only now (and not earlier) because an addon set invalid headers, # in which case it's not really clear what we are supposed to do. read_until_eof_semantics = False connection_done = ( read_until_eof_semantics or http1.connection_close( self.request.http_version, self.request.headers) or http1.connection_close(self.response.http_version, self.response.headers) # If we proxy HTTP/2 to HTTP/1, we only use upstream connections for one request. # This simplifies our connection management quite a bit as we can rely on # the proxyserver's max-connection-per-server throttling. or (self.request.is_http2 and isinstance(self, Http1Client))) if connection_done: yield commands.CloseConnection(self.conn) self.state = self.done return self.request_done = self.response_done = False self.request = self.response = None if isinstance(self, Http1Server): self.stream_id += 2 else: self.stream_id = None self.state = self.read_headers if self.buf: yield from self.state(events.DataReceived(self.conn, b""))
def send(self, event: HttpEvent) -> layer.CommandGenerator[None]: if isinstance(event, RequestProtocolError): yield commands.CloseConnection(self.conn) return if not self.stream_id: assert isinstance(event, RequestHeaders) self.stream_id = event.stream_id self.request = event.request assert self.stream_id == event.stream_id if isinstance(event, RequestHeaders): request = event.request if request.is_http2: # Convert to an HTTP/1 request. request = request.copy( ) # (we could probably be a bit more efficient here.) request.http_version = "HTTP/1.1" if "Host" not in request.headers and request.authority: request.headers.insert(0, "Host", request.authority) request.authority = "" raw = http1.assemble_request_head(request) yield commands.SendData(self.conn, raw) elif isinstance(event, RequestData): assert self.request if "chunked" in self.request.headers.get("transfer-encoding", "").lower(): raw = b"%x\r\n%s\r\n" % (len(event.data), event.data) else: raw = event.data if raw: yield commands.SendData(self.conn, raw) elif isinstance(event, RequestEndOfMessage): assert self.request if "chunked" in self.request.headers.get("transfer-encoding", "").lower(): yield commands.SendData(self.conn, b"0\r\n\r\n") elif http1.expected_http_body_size(self.request, self.response) == -1: yield commands.CloseConnection(self.conn, half_close=True) yield from self.mark_done(request=True) else: raise AssertionError(f"Unexpected event: {event}")
def read_response_body(self, request, response): expected_size = http1.expected_http_body_size(request, response) return http1.read_body( self.server_conn.rfile, expected_size, self.config.options._processed.get("body_size_limit"))
def read_request_body(self, request): expected_size = http1.expected_http_body_size(request) return http1.read_body( self.client_conn.rfile, expected_size, self.config.options._processed.get("body_size_limit"))
def check_body_size(self, request: bool) -> layer.CommandGenerator[bool]: """ Check if the body size exceeds limits imposed by stream_large_bodies or body_size_limit. Returns `True` if the body size exceeds body_size_limit and further processing should be stopped. """ if not (self.context.options.stream_large_bodies or self.context.options.body_size_limit): return False # Step 1: Determine the expected body size. This can either come from a known content-length header, # or from the amount of currently buffered bytes (e.g. for chunked encoding). response = not request expected_size: Optional[int] # the 'late' case: we already started consuming the body if request and self.request_body_buf: expected_size = len(self.request_body_buf) elif response and self.response_body_buf: expected_size = len(self.response_body_buf) else: # the 'early' case: we have not started consuming the body try: expected_size = expected_http_body_size( self.flow.request, self.flow.response if response else None) except ValueError: # pragma: no cover # we just don't stream/kill malformed content-length headers. expected_size = None if expected_size is None or expected_size <= 0: return False # Step 2: Do we need to abort this? max_total_size = human.parse_size(self.context.options.body_size_limit) if max_total_size is not None and expected_size > max_total_size: if request and not self.request_body_buf: yield HttpRequestHeadersHook(self.flow) if response and not self.response_body_buf: yield HttpResponseHeadersHook(self.flow) err_msg = f"{'Request' if request else 'Response'} body exceeds mitmproxy's body_size_limit." err_code = 413 if request else 502 self.flow.error = flow.Error(err_msg) yield HttpErrorHook(self.flow) yield SendHttp( ResponseProtocolError(self.stream_id, err_msg, err_code), self.context.client) self.client_state = self.state_errored if response: yield SendHttp( RequestProtocolError(self.stream_id, err_msg, err_code), self.context.server) self.server_state = self.state_errored self.flow.live = False return True # Step 3: Do we need to stream this? max_stream_size = human.parse_size( self.context.options.stream_large_bodies) if max_stream_size is not None and expected_size > max_stream_size: if request: self.flow.request.stream = True if self.request_body_buf: # clear buffer and then fake a DataReceived event with everything we had in the buffer so far. body_buf = self.request_body_buf self.request_body_buf = b"" yield from self.start_request_stream() yield from self.handle_event( RequestData(self.stream_id, body_buf)) if response: assert self.flow.response self.flow.response.stream = True if self.response_body_buf: body_buf = self.response_body_buf self.response_body_buf = b"" yield from self.start_response_stream() yield from self.handle_event( ResponseData(self.stream_id, body_buf)) return False