示例#1
0
 def mark_done(self, *, request: bool = False, response: bool = False) -> layer.CommandGenerator[None]:
     if request:
         self.request_done = True
     if response:
         self.response_done = True
     if self.request_done and self.response_done:
         assert self.request
         assert self.response
         if should_make_pipe(self.request, self.response):
             yield from self.make_pipe()
             return
         connection_done = (
                 http1.expected_http_body_size(self.request, self.response) == -1
                 or http1.connection_close(self.request.http_version, self.request.headers)
                 or http1.connection_close(self.response.http_version, self.response.headers)
                 # If we proxy HTTP/2 to HTTP/1, we only use upstream connections for one request.
                 # This simplifies our connection management quite a bit as we can rely on
                 # the proxyserver's max-connection-per-server throttling.
                 or (self.request.is_http2 and isinstance(self, Http1Client))
         )
         if connection_done:
             yield commands.CloseConnection(self.conn)
             self.state = self.done
             return
         self.request_done = self.response_done = False
         self.request = self.response = None
         if isinstance(self, Http1Server):
             self.stream_id += 2
         else:
             self.stream_id = None
         self.state = self.read_headers
         if self.buf:
             yield from self.state(events.DataReceived(self.conn, b""))
示例#2
0
    def check_close_connection(self, flow):
        request_close = http1.connection_close(flow.request.http_version,
                                               flow.request.headers)
        response_close = http1.connection_close(flow.response.http_version,
                                                flow.response.headers)
        read_until_eof = http1.expected_http_body_size(flow.request,
                                                       flow.response) == -1
        close_connection = request_close or response_close or read_until_eof
        if flow.request.first_line_format == "authority" and flow.response.status_code == 200:
            # Workaround for https://github.com/mitmproxy/mitmproxy/issues/313:
            # Charles Proxy sends a CONNECT response with HTTP/1.0
            # and no Content-Length header

            return False
        return close_connection
示例#3
0
    def check_close_connection(self, flow):
        request_close = http1.connection_close(
            flow.request.http_version,
            flow.request.headers
        )
        response_close = http1.connection_close(
            flow.response.http_version,
            flow.response.headers
        )
        read_until_eof = http1.expected_http_body_size(flow.request, flow.response) == -1
        close_connection = request_close or response_close or read_until_eof
        if flow.request.first_line_format == "authority" and flow.response.status_code == 200:
            # Workaround for https://github.com/mitmproxy/mitmproxy/issues/313:
            # Charles Proxy sends a CONNECT response with HTTP/1.0
            # and no Content-Length header

            return False
        return close_connection
示例#4
0
 def mark_done(self,
               *,
               request: bool = False,
               response: bool = False) -> layer.CommandGenerator[None]:
     if request:
         self.request_done = True
     if response:
         self.response_done = True
     if self.request_done and self.response_done:
         assert self.request
         assert self.response
         if should_make_pipe(self.request, self.response):
             yield from self.make_pipe()
             return
         try:
             read_until_eof_semantics = http1.expected_http_body_size(
                 self.request, self.response) == -1
         except ValueError:
             # this may raise only now (and not earlier) because an addon set invalid headers,
             # in which case it's not really clear what we are supposed to do.
             read_until_eof_semantics = False
         connection_done = (
             read_until_eof_semantics or http1.connection_close(
                 self.request.http_version, self.request.headers)
             or http1.connection_close(self.response.http_version,
                                       self.response.headers)
             # If we proxy HTTP/2 to HTTP/1, we only use upstream connections for one request.
             # This simplifies our connection management quite a bit as we can rely on
             # the proxyserver's max-connection-per-server throttling.
             or (self.request.is_http2 and isinstance(self, Http1Client)))
         if connection_done:
             yield commands.CloseConnection(self.conn)
             self.state = self.done
             return
         self.request_done = self.response_done = False
         self.request = self.response = None
         if isinstance(self, Http1Server):
             self.stream_id += 2
         else:
             self.stream_id = None
         self.state = self.read_headers
         if self.buf:
             yield from self.state(events.DataReceived(self.conn, b""))