Exemplo n.º 1
0
 def on_header(hdr):
     try:
         hdr = parse_response_start_line(hdr)
     except HTTPInputError:
         # Not the first line, do nothing
         return
     write_body[0] = hdr.code not in [301, 302, 303, 307]
    def handle_webhdfs_stream_header(self, header_line):
        """Handles the incoming first lines of the response being headers and status code
        """
        headers = httputil.HTTPHeaders()
        header_line = header_line.rstrip()

        if not header_line:
            return

        self.log.debug(
            'HDFSBrowserHandler webhdfs {0} header'.format(header_line))

        if header_line.startswith('HTTP/'):
            try:
                status = httputil.parse_response_start_line(header_line)
                self.set_status(status.code)
                return
            except httputil.HTTPInputError:
                return

        headers.parse_line(header_line)

        # currently we are interested in propagating content-type and content-length
        if headers.get('Content-Type'):
            self.set_header('Content-Type', headers.get('Content-Type'))
        if headers.get('Content-Length'):
            self.set_header('Content-Length', headers.get('Content-Length'))
Exemplo n.º 3
0
            def on_header(hdr):
                if write_body[1] is not False and write_body[2] is None:
                    # Try to find out what content type encoding is used if this is a text file
                    write_body[1].parse_line(hdr)  # pylint: disable=no-member
                    if 'Content-Type' in write_body[1]:
                        content_type = write_body[1].get('Content-Type')  # pylint: disable=no-member
                        if not content_type.startswith('text'):
                            write_body[1] = write_body[2] = False
                        else:
                            encoding = 'utf-8'
                            fields = content_type.split(';')
                            for field in fields:
                                if 'encoding' in field:
                                    encoding = field.split('encoding=')[-1]
                            write_body[2] = encoding
                            # We have found our encoding. Stop processing headers.
                            write_body[1] = False

                if write_body[0] is not None:
                    # We already parsed the first line. No need to run the code below again
                    return

                try:
                    hdr = parse_response_start_line(hdr)
                except HTTPInputError:
                    # Not the first line, do nothing
                    return
                write_body[0] = hdr.code not in [301, 302, 303, 307]
                write_body[1] = HTTPHeaders()
Exemplo n.º 4
0
            def on_header(hdr):
                if write_body[1] is not False and write_body[2] is None:
                    # Try to find out what content type encoding is used if
                    # this is a text file
                    write_body[1].parse_line(hdr)  # pylint: disable=no-member
                    if 'Content-Type' in write_body[1]:
                        content_type = write_body[1].get('Content-Type')  # pylint: disable=no-member
                        if not content_type.startswith('text'):
                            write_body[1] = write_body[2] = False
                        else:
                            encoding = 'utf-8'
                            fields = content_type.split(';')
                            for field in fields:
                                if 'encoding' in field:
                                    encoding = field.split('encoding=')[-1]
                            write_body[2] = encoding
                            # We have found our encoding. Stop processing headers.
                            write_body[1] = False

                        # If write_body[0] is False, this means that this
                        # header is a 30x redirect, so we need to reset
                        # write_body[0] to None so that we parse the HTTP
                        # status code from the redirect target.
                        if write_body[0] is write_body[1] is False:
                            write_body[0] = None

                # Check the status line of the HTTP request
                if write_body[0] is None:
                    try:
                        hdr = parse_response_start_line(hdr)
                    except HTTPInputError:
                        # Not the first line, do nothing
                        return
                    write_body[0] = hdr.code not in [301, 302, 303, 307]
                    write_body[1] = HTTPHeaders()
Exemplo n.º 5
0
 def on_header(hdr):
     try:
         hdr = parse_response_start_line(hdr)
     except HTTPInputError:
         # Not the first line, do nothing
         return
     write_body[0] = hdr.code not in [301, 302, 303, 307]
Exemplo n.º 6
0
 def _header_callback(self, header_line):
     if not self.in_request_headers:
         start_line = parse_response_start_line(header_line)
         self.set_status(start_line.code, start_line.reason)
         self.in_request_headers = True
     elif not HTTPHeaderEndLineRex.match(header_line):
         self.proxy_headers.parse_line(header_line)
Exemplo n.º 7
0
 def header_callback(self, data):
     if self.header_lines == 0:
         status = httputil.parse_response_start_line(data)
         self.set_status(status.code, status.reason)
     else:
         for name, value in httputil.HTTPHeaders.parse(data).get_all():
             self.add_header(name, value)
     self.header_lines += 1
Exemplo n.º 8
0
  def header_callback(self, headers):
    # The header callback will be called multiple times, once for the initial
    # HTTP status line and once for each received header.
    header_lines = headers
    if self._response_status is None:
      status_line, _, header_lines = headers.partition('\r\n')
      self._response_status = httputil.parse_response_start_line(status_line)

    self._response_headers.update(httputil.HTTPHeaders.parse(header_lines))
Exemplo n.º 9
0
 def header_callback(self, line):
     try:
         if self.headers is None:
             start_line = parse_response_start_line(line)
             self.http_version, self.status_code, self.http_reason = start_line
             self.headers = HTTPHeaders()
         else:
             self.headers.parse_line(line)
     except Exception:
         app_log.exception('Cannot parse header %s' % line)
Exemplo n.º 10
0
 def got_header(self, header):
     if not self.headers:
         first_line = httputil.parse_response_start_line(header)
         self.set_status(first_line.code, first_line.reason)
         self.headers.append(header)
     elif header != '\r\n':
         self.headers.append(header)
     else:
         for line in self.headers[1:]:
             name, value = line.split(":", 1)
             self.set_header(name, value.strip())
Exemplo n.º 11
0
    def read_headers(self, delegate):
        try:
            _delegate, delegate = self._parse_delegate(delegate)
            header_future = self.stream.read_until_regex(
                b"\r?\n\r?\n", max_bytes=self.params.max_header_size)
            header_data = yield header_future
            start_line, headers = self._parse_headers(header_data)

            start_line = parse_response_start_line(start_line)
            self._response_start_line = start_line

            self._disconnect_on_finish = not self._can_keep_alive(
                start_line, headers)

            with _ExceptionLoggingContext(app_log):
                header_future = delegate.headers_received(start_line, headers)
                if header_future is not None:
                    yield header_future
            if self.stream is None:
                # We've been detached.
                raise gen.Return(False)

            # determine body skip
            #TODO: 100 <= code < 200
            if (self._request_start_line is not None
                    and self._request_start_line.method == 'HEAD'):
                _delegate.skip_body = True
            code = start_line.code
            if code == 304:
                # 304 responses may include the content-length header
                # but do not actually have a body.
                # http://tools.ietf.org/html/rfc7230#section-3.3
                _delegate.skip_body = True
            if code >= 100 and code < 200:
                # 1xx responses should never indicate the presence of
                # a body.
                if ('Content-Length' in headers
                        or 'Transfer-Encoding' in headers):
                    raise HTTPInputError("Response code %d cannot have body" %
                                         code)
                # TODO: client delegates will get headers_received twice
                # in the case of a 100-continue.  Document or change?
                yield self.read_headers(delegate)
        except HTTPInputError as e:
            gen_log.info("Malformed HTTP message from %s: %s", self.context, e)
            self.close()

            self._clear_callbacks()

            raise gen.Return(False)
        finally:
            header_future = None
        raise gen.Return(True)
Exemplo n.º 12
0
 def header_callback(self, data):
     if not self.status_code_read:
         status = httputil.parse_response_start_line(data)
         self.set_status(status.code, status.reason)
         self.status_code_read = True
     else:
         for name, value in httputil.HTTPHeaders.parse(data).get_all():
             if name in self.header_keys:
                 self.add_header(name, value)
             else:
                 self.set_header(name, value)
                 self.header_keys.add(name)
Exemplo n.º 13
0
def _curl_header_callback(headers, header_line):
    # header_line as returned by curl includes the end-of-line characters.
    header_line = header_line.strip()
    if header_line.startswith("HTTP/"):
        headers.clear()
        try:
            (__, __, reason) = httputil.parse_response_start_line(header_line)
            header_line = "X-Http-Reason: %s" % reason
        except httputil.HTTPInputError:
            return
    if not header_line:
        return
    headers.parse_line(header_line)
def _curl_header_callback(headers, header_line):
    # header_line as returned by curl includes the end-of-line characters.
    header_line = header_line.strip()
    if header_line.startswith("HTTP/"):
        headers.clear()
        try:
            (__, __, reason) = httputil.parse_response_start_line(header_line)
            header_line = "X-Http-Reason: %s" % reason
        except httputil.HTTPInputError:
            return
    if not header_line:
        return
    headers.parse_line(header_line)
Exemplo n.º 15
0
            def on_header(hdr):
                if write_body[1] is not False and write_body[2] is None:
                    if not hdr.strip() and 'Content-Type' not in write_body[1]:
                        # If write_body[0] is True, then we are not following a
                        # redirect (initial response was a 200 OK). So there is
                        # no need to reset write_body[0].
                        if write_body[0] is not True:
                            # We are following a redirect, so we need to reset
                            # write_body[0] so that we properly follow it.
                            write_body[0] = None
                        # We don't need the HTTPHeaders object anymore
                        write_body[1] = False
                        return
                    # Try to find out what content type encoding is used if
                    # this is a text file
                    write_body[1].parse_line(hdr)  # pylint: disable=no-member
                    if 'Content-Type' in write_body[1]:
                        content_type = write_body[1].get('Content-Type')  # pylint: disable=no-member
                        if not content_type.startswith('text'):
                            write_body[1] = write_body[2] = False
                        else:
                            encoding = 'utf-8'
                            fields = content_type.split(';')
                            for field in fields:
                                if 'encoding' in field:
                                    encoding = field.split('encoding=')[-1]
                            write_body[2] = encoding
                            # We have found our encoding. Stop processing headers.
                            write_body[1] = False

                        # If write_body[0] is False, this means that this
                        # header is a 30x redirect, so we need to reset
                        # write_body[0] to None so that we parse the HTTP
                        # status code from the redirect target. Additionally,
                        # we need to reset write_body[2] so that we inspect the
                        # headers for the Content-Type of the URL we're
                        # following.
                        if write_body[0] is write_body[1] is False:
                            write_body[0] = write_body[2] = None

                # Check the status line of the HTTP request
                if write_body[0] is None:
                    try:
                        hdr = parse_response_start_line(hdr)
                    except HTTPInputError:
                        # Not the first line, do nothing
                        return
                    write_body[0] = hdr.code not in [301, 302, 303, 307]
                    write_body[1] = HTTPHeaders()
Exemplo n.º 16
0
 def _handle_headers(self, headers):
     if hasattr(self, "_theaders"):
         if headers == "\r\n":
             for kv in self._theaders.get_all():
                 self.log.info("%s:%s" % kv)
                 self.add_header(kv[0], kv[1])
             del self._theaders
             return
         try:
             self._theaders.parse_line(headers)
         except:
             return
     else:
         r = parse_response_start_line(headers)
         self.set_status(r.code, r.reason)
         self._theaders = HTTPHeaders()
Exemplo n.º 17
0
 def _curl_header_callback(self, headers, header_callback, header_line):
     header_line = native_str(header_line)
     if header_callback is not None:
         self.io_loop.add_callback(header_callback, header_line)
     # header_line as returned by curl includes the end-of-line characters.
     # whitespace at the start should be preserved to allow multi-line headers
     header_line = header_line.rstrip()
     if header_line.startswith("HTTP/"):
         headers.clear()
         try:
             (__, __, reason) = httputil.parse_response_start_line(header_line)
             header_line = "X-Http-Reason: %s" % reason
         except httputil.HTTPInputError:
             return
     if not header_line:
         return
     headers.parse_line(header_line)
Exemplo n.º 18
0
 def _curl_header_callback(self, headers, header_callback, header_line):
     header_line = native_str(header_line.decode('latin1'))
     if header_callback is not None:
         self.io_loop.add_callback(header_callback, header_line)
     # header_line as returned by curl includes the end-of-line characters.
     # whitespace at the start should be preserved to allow multi-line headers
     header_line = header_line.rstrip()
     if header_line.startswith("HTTP/"):
         headers.clear()
         try:
             (__, __, reason) = httputil.parse_response_start_line(header_line)
             header_line = "X-Http-Reason: %s" % reason
         except httputil.HTTPInputError:
             return
     if not header_line:
         return
     headers.parse_line(header_line)
Exemplo n.º 19
0
    def _read_message(self, delegate):
        need_delegate_close = False
        try:
            header_future = self.stream.read_until_regex(
                b"\r?\n\r?\n",
                max_bytes=self.params.max_header_size)
            if self.params.header_timeout is None:
                header_data = yield header_future
            else:
                try:
                    header_data = yield gen.with_timeout(
                        self.stream.io_loop.time() + self.params.header_timeout,
                        header_future,
                        io_loop=self.stream.io_loop,
                        quiet_exceptions=iostream.StreamClosedError)
                except gen.TimeoutError:
                    self.close()
                    raise gen.Return(False)
            start_line, headers = self._parse_headers(header_data)
            if self.is_client:
                start_line = httputil.parse_response_start_line(start_line)
                self._response_start_line = start_line
            else:
                start_line = httputil.parse_request_start_line(start_line)
                self._request_start_line = start_line
                self._request_headers = headers

            self._disconnect_on_finish = not self._can_keep_alive(
                start_line, headers)
            need_delegate_close = True
            with _ExceptionLoggingContext():
                header_future = delegate.headers_received(start_line, headers)
                if header_future is not None:
                    yield header_future
            if self.stream is None:
                # We've been detached.
                need_delegate_close = False
                raise gen.Return(False)
            skip_body = False
            if self.is_client:
                if (self._request_start_line is not None and
                        self._request_start_line.method == 'HEAD'):
                    skip_body = True
                code = start_line.code
                if code == 304:
                    skip_body = True
                if code >= 100 and code < 200:
                    if ('Content-Length' in headers or
                            'Transfer-Encoding' in headers):
                        raise httputil.HTTPInputError(
                            "Response code %d cannot have body" % code)
                    yield self._read_message(delegate)
            else:
                if (headers.get("Expect") == "100-continue" and
                        not self._write_finished):
                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
            if not skip_body:
                body_future = self._read_body(
                    start_line.code if self.is_client else 0, headers, delegate)
                if body_future is not None:
                    if self._body_timeout is None:
                        yield body_future
                    else:
                        try:
                            yield gen.with_timeout(
                                self.stream.io_loop.time() + self._body_timeout,
                                body_future, self.stream.io_loop,
                                quiet_exceptions=iostream.StreamClosedError)
                        except gen.TimeoutError:
                            print("Timeout reading body from %s" % self.context)
                            self.stream.close()
                            raise gen.Return(False)
            self._read_finished = True
            if not self._write_finished or self.is_client:
                need_delegate_close = False
                with _ExceptionLoggingContext():
                    delegate.finish()
            if (not self._finish_future.done() and
                    self.stream is not None and
                    not self.stream.closed()):
                self.stream.set_close_callback(self._on_connection_close)
                yield self._finish_future
            if self.is_client and self._disconnect_on_finish:
                self.close()
            if self.stream is None:
                raise gen.Return(False)
        except httputil.HTTPInputError as e:
            print("Malformed HTTP message from %s: %s", (self.context, e))
            self.close()
            raise gen.Return(False)
        finally:
            if need_delegate_close:
                with _ExceptionLoggingContext():
                    delegate.on_connection_close()
            self._clear_callbacks()
        raise gen.Return(True)
Exemplo n.º 20
0
    def _read_message(self, delegate):
        need_delegate_close = False
        try:
            header_future = self.stream.read_until_regex(
                b"\r?\n\r?\n",
                max_bytes=self.params.max_header_size)
            if self.params.header_timeout is None:
                header_data = yield header_future
            else:
                try:
                    header_data = yield gen.with_timeout(
                        self.stream.io_loop.time() + self.params.header_timeout,
                        header_future,
                        quiet_exceptions=iostream.StreamClosedError)
                except gen.TimeoutError:
                    self.close()
                    raise gen.Return(False)
            start_line, headers = self._parse_headers(header_data)
            if self.is_client:
                start_line = httputil.parse_response_start_line(start_line)
                self._response_start_line = start_line
            else:
                start_line = httputil.parse_request_start_line(start_line)
                self._request_start_line = start_line
                self._request_headers = headers

            self._disconnect_on_finish = not self._can_keep_alive(
                start_line, headers)
            need_delegate_close = True
            with _ExceptionLoggingContext(app_log):
                header_future = delegate.headers_received(start_line, headers)
                if header_future is not None:
                    yield header_future
            if self.stream is None:
                # We've been detached.
                need_delegate_close = False
                raise gen.Return(False)
            skip_body = False
            if self.is_client:
                if (self._request_start_line is not None and
                        self._request_start_line.method == 'HEAD'):
                    skip_body = True
                code = start_line.code
                if code == 304:
                    # 304 responses may include the content-length header
                    # but do not actually have a body.
                    # http://tools.ietf.org/html/rfc7230#section-3.3
                    skip_body = True
                if code >= 100 and code < 200:
                    # 1xx responses should never indicate the presence of
                    # a body.
                    if ('Content-Length' in headers or
                            'Transfer-Encoding' in headers):
                        raise httputil.HTTPInputError(
                            "Response code %d cannot have body" % code)
                    # TODO: client delegates will get headers_received twice
                    # in the case of a 100-continue.  Document or change?
                    yield self._read_message(delegate)
            else:
                if (headers.get("Expect") == "100-continue" and
                        not self._write_finished):
                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
            if not skip_body:
                body_future = self._read_body(
                    start_line.code if self.is_client else 0, headers, delegate)
                if body_future is not None:
                    if self._body_timeout is None:
                        yield body_future
                    else:
                        try:
                            yield gen.with_timeout(
                                self.stream.io_loop.time() + self._body_timeout,
                                body_future,
                                quiet_exceptions=iostream.StreamClosedError)
                        except gen.TimeoutError:
                            gen_log.info("Timeout reading body from %s",
                                         self.context)
                            self.stream.close()
                            raise gen.Return(False)
            self._read_finished = True
            if not self._write_finished or self.is_client:
                need_delegate_close = False
                with _ExceptionLoggingContext(app_log):
                    delegate.finish()
            # If we're waiting for the application to produce an asynchronous
            # response, and we're not detached, register a close callback
            # on the stream (we didn't need one while we were reading)
            if (not self._finish_future.done() and
                    self.stream is not None and
                    not self.stream.closed()):
                self.stream.set_close_callback(self._on_connection_close)
                yield self._finish_future
            if self.is_client and self._disconnect_on_finish:
                self.close()
            if self.stream is None:
                raise gen.Return(False)
        except httputil.HTTPInputError as e:
            gen_log.info("Malformed HTTP message from %s: %s",
                         self.context, e)
            if not self.is_client:
                yield self.stream.write(b'HTTP/1.1 400 Bad Request\r\n\r\n')
            self.close()
            raise gen.Return(False)
        finally:
            if need_delegate_close:
                with _ExceptionLoggingContext(app_log):
                    delegate.on_connection_close()
            header_future = None
            self._clear_callbacks()
        raise gen.Return(True)
Exemplo n.º 21
0
    def response(self, conn, src, dst):
        keep_alive = True
        resp = None
        conn = conn.copy()

        start_line = yield 
        while keep_alive and start_line is not None:
            body = ""
            headers = MultiOrderedDict()
            try:
                resp = httputil.parse_response_start_line(start_line.strip())
            except httputil.HTTPInputError:
                if start_line != "":
                    self.log("HTTP Error: Malformed response start line: '{}'", start_line)
                start_line = yield
                continue
            while True:
                header_line = yield
                if header_line is None:
                    self.log("HTTP Warning: Terminated early?")
                    return
                if not header_line.strip():
                    break
                self.parse_header_line(headers, header_line.strip())

            if resp.version == "HTTP/1.0":
                keep_alive = headers.last("connection", "").lower().strip() == "keep-alive"
            else:
                keep_alive = headers.last("connection", "").lower().strip() != "close"

            if "content-length" in headers:
                try:
                    content_length = int(headers.last("content-length"))
                except ValueError:
                    content_length = None
            else:
                content_length = None


            if header_line is not None:
                #body += conn["lbl_buffers"][dst]
                #conn["lbl_buffers"][dst] = ""
                conn["lbl_disable"](dst)
                while len(body) < content_length or content_length is None:
                    data = yield
                    if data is None:
                        break
                    body += data

            if "content-encoding" in headers:
                conn["http_decoded"] = False
                encoding = headers.last("content-encoding")
                self.log("req encoding: {}", encoding)
                if encoding in self.DECODERS:
                    try:
                        body = self.DECODERS[encoding](body)
                        conn["http_decoded"] = True
                    except:
                        self.log("Unable to decode content '{}' len={}/{}", encoding, len(body), content_length)
            else:
                # Technically it was decoded with the 'identity' encoding...
                conn["http_decoded"] = True

            conn["lbl_enable"](dst)
            conn["http_headers"] = headers
            conn["http_response"] = resp
            start_line = yield self.bubble(dst, conn, body)
Exemplo n.º 22
0
Arquivo: http.py Projeto: zeusk/lens
    def response(self, conn, src, dst):
        keep_alive = True
        resp = None
        conn = conn.copy()

        start_line = yield
        while keep_alive and start_line is not None:
            body = ""
            headers = MultiOrderedDict()
            try:
                resp = httputil.parse_response_start_line(start_line.strip())
            except httputil.HTTPInputError:
                if start_line != "":
                    self.log("HTTP Error: Malformed response start line: '{}'",
                             start_line)
                start_line = yield
                continue
            while True:
                header_line = yield
                if header_line is None:
                    self.log("HTTP Warning: Terminated early?")
                    return
                if not header_line.strip():
                    break
                self.parse_header_line(headers, header_line.strip())

            if resp.version == "HTTP/1.0":
                keep_alive = headers.last("connection",
                                          "").lower().strip() == "keep-alive"
            else:
                keep_alive = headers.last("connection",
                                          "").lower().strip() != "close"

            if "content-length" in headers:
                try:
                    content_length = int(headers.last("content-length"))
                except ValueError:
                    content_length = None
            else:
                content_length = None

            if header_line is not None:
                #body += conn["lbl_buffers"][dst]
                #conn["lbl_buffers"][dst] = ""
                conn["lbl_disable"](dst)
                while len(body) < content_length or content_length is None:
                    data = yield
                    if data is None:
                        break
                    body += data

            if "content-encoding" in headers:
                conn["http_decoded"] = False
                encoding = headers.last("content-encoding")
                self.log("req encoding: {}", encoding)
                if encoding in self.DECODERS:
                    try:
                        body = self.DECODERS[encoding](body)
                        conn["http_decoded"] = True
                    except:
                        self.log("Unable to decode content '{}' len={}/{}",
                                 encoding, len(body), content_length)
            else:
                # Technically it was decoded with the 'identity' encoding...
                conn["http_decoded"] = True

            conn["lbl_enable"](dst)
            conn["http_headers"] = headers
            conn["http_response"] = resp
            start_line = yield self.bubble(dst, conn, body)
Exemplo n.º 23
0
    def _read_message(self, delegate):
        need_delegate_close = False
        try:
            # 这一句会从 iostream 里异步地读取 header
            # 的内容,返回的是一个将会填充
            # header 内容的值的 future 对象。
            # HTTP 协议中,header 和 body 之间使用类似于 \r\n\r\n
            # 这样的被称为 CRLF 的字符串来划分,
            # 所以作者传入了下面的正则来保证刚好读完 header 的信息
            header_future = self.stream.read_until_regex(
                b"\r?\n\r?\n", max_bytes=self.params.max_header_size)
            if self.params.header_timeout is None:
                header_data = yield header_future
            else:
                # 如果设置了 timeout,则将 Future 对象转化为一个
                # gen._Timeout 对象,让 iostream 在
                # 限定时间内去填充 future 对象内的 result,否则会报超时的错。
                try:
                    header_data = yield gen.with_timeout(
                        self.stream.io_loop.time() +
                        self.params.header_timeout,
                        header_future,
                        io_loop=self.stream.io_loop,
                        quiet_exceptions=iostream.StreamClosedError)
                except gen.TimeoutError:
                    self.close()
                    raise gen.Return(False)

            # 将 header 信息转化为保存有响应信息的对象。因为客户端和服务端中
            # http 头信息的形式是不同的,所以要根据 connection 是客户端方还
            # 是服务端方来作不同的处理。
            # 客户端得到的为 start line 被叫做 status-line,结构类似于:
            # HTTP/1.1 200 OK
            # 服务端得到的 start line 被叫做
            # request-line,结构类似于:GET / HTTP/1.1
            start_line, headers = self._parse_headers(header_data)
            if self.is_client:
                start_line = httputil.parse_response_start_line(start_line)
                self._response_start_line = start_line
            else:
                start_line = httputil.parse_request_start_line(start_line)
                self._request_start_line = start_line
                self._request_headers = headers

            # 根据头信息来判断连接的保持规则。
            self._disconnect_on_finish = not self._can_keep_alive(
                start_line, headers)
            need_delegate_close = True

            # 在这个 Context 之下,如果代码抛出异常,将会把 exception 的信息
            # 用 app_log 当做日志输出
            with _ExceptionLoggingContext(app_log):
                # 将解析好的头信息交给 delegate,这时对于服务端来说,会根据
                # start line 中的 path 信息来匹配处理请求的那个 Handler。
                header_future = delegate.headers_received(start_line, headers)

                # 在 stream 的情况下,header_future 则不会是 None。
                if header_future is not None:
                    yield header_future
            if self.stream is None:
                # We've been detached.
                need_delegate_close = False
                raise gen.Return(False)

            skip_body = False

            if self.is_client:
                # 对于客户端而言如果使用这个连接的对象是客户端类型的,则进
                # 行头信息以及 HTTP 状态码来决定是否继续读取 body 数据。
                if (self._request_start_line is not None
                        and self._request_start_line.method == 'HEAD'):
                    skip_body = True
                code = start_line.code
                if code == 304:
                    # 304 responses may include the content-length header
                    # but do not actually have a body.
                    # http://tools.ietf.org/html/rfc7230#section-3.3
                    skip_body = True
                if code >= 100 and code < 200:
                    # 1xx responses should never indicate the presence of
                    # a body.
                    if ('Content-Length' in headers
                            or 'Transfer-Encoding' in headers):
                        raise httputil.HTTPInputError(
                            "Response code %d cannot have body" % code)
                    # TODO: client delegates will get headers_received twice
                    # in the case of a 100-continue.  Document or change?
                    yield self._read_message(delegate)
            else:
                # 对于服务端而言,不存在需要跳过 body 数据的情况。
                if (headers.get("Expect") == "100-continue" and
                        # 处理头中包含『Expect: 100-continue』的客户端请求。
                        not self._write_finished):
                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")

            if not skip_body:
                body_future = self._read_body(
                    start_line.code if self.is_client else 0, headers,
                    delegate)
                if body_future is not None:
                    if self._body_timeout is None:
                        yield body_future
                    else:
                        try:
                            yield gen.with_timeout(
                                self.stream.io_loop.time() +
                                self._body_timeout,
                                body_future,
                                self.stream.io_loop,
                                quiet_exceptions=iostream.StreamClosedError)
                        except gen.TimeoutError:
                            gen_log.info("Timeout reading body from %s",
                                         self.context)
                            self.stream.close()
                            raise gen.Return(False)
            self._read_finished = True
            if not self._write_finished or self.is_client:
                # 服务端在这里便会调用到匹配到的 handler
                # 来向请求方写回返回数据。
                # 这时 _RequestDispatcher 对象中已经保有了 header 和 body 的数
                # 据,可以稍微处理一下(例如将表单数据 parse 一下)作为参数交给
                # Handler 处理了。
                need_delegate_close = False
                with _ExceptionLoggingContext(app_log):
                    delegate.finish()

            # If we're waiting for the application to produce an asynchronous
            # response, and we're not detached, register a close callback
            # on the stream (we didn't need one while we were reading)
            if (not self._finish_future.done() and self.stream is not None
                    and not self.stream.closed()):
                self.stream.set_close_callback(self._on_connection_close)
                yield self._finish_future
            if self.is_client and self._disconnect_on_finish:
                self.close()
            if self.stream is None:
                raise gen.Return(False)
        except httputil.HTTPInputError as e:
            gen_log.info("Malformed HTTP message from %s: %s", self.context, e)
            self.close()
            raise gen.Return(False)
        finally:
            if need_delegate_close:
                with _ExceptionLoggingContext(app_log):
                    delegate.on_connection_close()
            self._clear_callbacks()
        raise gen.Return(True)
Exemplo n.º 24
0
 async def _read_message(self, delegate: httputil.HTTPMessageDelegate) -> bool:
     need_delegate_close = False
     try:
         header_future = self.stream.read_until_regex(
             b"\r?\n\r?\n", max_bytes=self.params.max_header_size
         )
         if self.params.header_timeout is None:
             header_data = await header_future
         else:
             try:
                 header_data = await gen.with_timeout(
                     self.stream.io_loop.time() + self.params.header_timeout,
                     header_future,
                     quiet_exceptions=iostream.StreamClosedError,
                 )
             except gen.TimeoutError:
                 self.close()
                 return False
         start_line_str, headers = self._parse_headers(header_data)
         if self.is_client:
             resp_start_line = httputil.parse_response_start_line(start_line_str)
             self._response_start_line = resp_start_line
             start_line = (
                 resp_start_line
             )  # type: Union[httputil.RequestStartLine, httputil.ResponseStartLine]
             # TODO: this will need to change to support client-side keepalive
             self._disconnect_on_finish = False
         else:
             req_start_line = httputil.parse_request_start_line(start_line_str)
             self._request_start_line = req_start_line
             self._request_headers = headers
             start_line = req_start_line
             self._disconnect_on_finish = not self._can_keep_alive(
                 req_start_line, headers
             )
         need_delegate_close = True
         with _ExceptionLoggingContext(app_log):
             header_recv_future = delegate.headers_received(start_line, headers)
             if header_recv_future is not None:
                 await header_recv_future
         if self.stream is None:
             # We've been detached.
             need_delegate_close = False
             return False
         skip_body = False
         if self.is_client:
             assert isinstance(start_line, httputil.ResponseStartLine)
             if (
                 self._request_start_line is not None
                 and self._request_start_line.method == "HEAD"
             ):
                 skip_body = True
             code = start_line.code
             if code == 304:
                 # 304 responses may include the content-length header
                 # but do not actually have a body.
                 # http://tools.ietf.org/html/rfc7230#section-3.3
                 skip_body = True
             if 100 <= code < 200:
                 # 1xx responses should never indicate the presence of
                 # a body.
                 if "Content-Length" in headers or "Transfer-Encoding" in headers:
                     raise httputil.HTTPInputError(
                         "Response code %d cannot have body" % code
                     )
                 # TODO: client delegates will get headers_received twice
                 # in the case of a 100-continue.  Document or change?
                 await self._read_message(delegate)
         else:
             if headers.get("Expect") == "100-continue" and not self._write_finished:
                 self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
         if not skip_body:
             body_future = self._read_body(
                 resp_start_line.code if self.is_client else 0, headers, delegate
             )
             if body_future is not None:
                 if self._body_timeout is None:
                     await body_future
                 else:
                     try:
                         await gen.with_timeout(
                             self.stream.io_loop.time() + self._body_timeout,
                             body_future,
                             quiet_exceptions=iostream.StreamClosedError,
                         )
                     except gen.TimeoutError:
                         gen_log.info("Timeout reading body from %s", self.context)
                         self.stream.close()
                         return False
         self._read_finished = True
         if not self._write_finished or self.is_client:
             need_delegate_close = False
             with _ExceptionLoggingContext(app_log):
                 delegate.finish()
         # If we're waiting for the application to produce an asynchronous
         # response, and we're not detached, register a close callback
         # on the stream (we didn't need one while we were reading)
         if (
             not self._finish_future.done()
             and self.stream is not None
             and not self.stream.closed()
         ):
             self.stream.set_close_callback(self._on_connection_close)
             await self._finish_future
         if self.is_client and self._disconnect_on_finish:
             self.close()
         if self.stream is None:
             return False
     except httputil.HTTPInputError as e:
         gen_log.info("Malformed HTTP message from %s: %s", self.context, e)
         if not self.is_client:
             await self.stream.write(b"HTTP/1.1 400 Bad Request\r\n\r\n")
         self.close()
         return False
     finally:
         if need_delegate_close:
             with _ExceptionLoggingContext(app_log):
                 delegate.on_connection_close()
         header_future = None  # type: ignore
         self._clear_callbacks()
     return True
 def _handle_response(self, data):
     try:
         response = httputil.parse_response_start_line(data)
         Log.d(TAG, b"_handle_response %r" % response)
     except httputil.HTTPInputError as e:
         Log.e(TAG, b"_handle_response error", e)
Exemplo n.º 26
0
    def _read_message(self, delegate):
        need_delegate_close = False
        try:
            header_future = self.stream.read_until_regex(
                        b"\r?\n\r?\n",
                        max_bytes=self.params.max_header_size)
            if self.params.header_timeout is None:
                header_data = yield header_future
            else:
                try:
                    header_data = yield gen.with_timeout(
                        self.stream.io_loop.time() + self.params.header_timeout,
                        header_future,
                        io_loop=self.stream.io_loop)
                except gen.TimeoutError:
                    self.close()
                    raise gen.Return(False)
            start_line, headers = self._parse_headers(header_data)
            if self.is_client:
                start_line = httputil.parse_response_start_line(start_line)
                self._response_start_line = start_line
            else:
                start_line = httputil.parse_request_start_line(start_line)
                self._request_start_line = start_line
                self._request_headers = headers

            self._disconnect_on_finish = not self._can_keep_alive(
                start_line, headers)
            need_delegate_close = True
            header_future = delegate.headers_received(start_line, headers)
            if header_future is not None:
                yield header_future
            if self.stream is None:
                # We've been detached.
                need_delegate_close = False
                raise gen.Return(False)
            skip_body = False
            if self.is_client:
                if (self._request_start_line is not None and
                    self._request_start_line.method == 'HEAD'):
                    skip_body = True
                code = start_line.code
                if code == 304:
                    skip_body = True
                if code >= 100 and code < 200:
                    # TODO: client delegates will get headers_received twice
                    # in the case of a 100-continue.  Document or change?
                    yield self._read_message(delegate)
            else:
                if (headers.get("Expect") == "100-continue" and
                    not self._write_finished):
                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
            if not skip_body:
                body_future = self._read_body(headers, delegate)
                if body_future is not None:
                    if self._body_timeout is None:
                        yield body_future
                    else:
                        try:
                            yield gen.with_timeout(
                                self.stream.io_loop.time() + self._body_timeout,
                                body_future, self.stream.io_loop)
                        except gen.TimeoutError:
                            gen_log.info("Timeout reading body from %s",
                                         self.context)
                            self.stream.close()
                            raise gen.Return(False)
            self._read_finished = True
            if not self._write_finished or self.is_client:
                need_delegate_close = False
                delegate.finish()
            yield self._finish_future
            if self.is_client and self._disconnect_on_finish:
                self.close()
            if self.stream is None:
                raise gen.Return(False)
        except httputil.HTTPInputException as e:
            gen_log.info("Malformed HTTP message from %s: %s",
                         self.context, e)
            self.close()
            raise gen.Return(False)
        finally:
            if need_delegate_close:
                delegate.on_connection_close()
            self._clear_callbacks()
        raise gen.Return(True)
    def _read_message(self, delegate):
        need_delegate_close = False
        try:
            # 消息头与消息体之间由一个空行分开
            header_future = self.stream.read_until_regex(
                b"\r?\n\r?\n",
                max_bytes=self.params.max_header_size)
            if self.params.header_timeout is None:
                header_data = yield header_future
            else:
                try:
                    header_data = yield gen.with_timeout(
                        self.stream.io_loop.time() + self.params.header_timeout,
                        header_future,
                        io_loop=self.stream.io_loop)
                except gen.TimeoutError:
                    self.close()
                    raise gen.Return(False)
            # 解析消息头,分离头字段和首行(request-line/status-line)
            start_line, headers = self._parse_headers(header_data)
            # 作为 client 解析的是 server 的 response,作为 server 解析的是 client 的 request。
            # response 与 request 的 start_line(status-line/request-line) 的字段内容不同:
            # 1. response's status-line: HTTP-Version SP Status-Code SP Reason-Phrase CRLF
            # 2. request's request-line:Method SP Request-URI SP HTTP-Version CRLF
            # start_line 的值是一个 namedtuple。
            if self.is_client:
                start_line = httputil.parse_response_start_line(start_line)
                self._response_start_line = start_line
            else:
                start_line = httputil.parse_request_start_line(start_line)
                self._request_start_line = start_line
                self._request_headers = headers

            # 非 keep-alive 的请求或响应处理完成后要关闭连接。
            self._disconnect_on_finish = not self._can_keep_alive(
                start_line, headers)
            need_delegate_close = True
            with _ExceptionLoggingContext(app_log):
                header_future = delegate.headers_received(start_line, headers)
                if header_future is not None:
                    # 如果 header_future 是一个 `Future` 实例,则要等到完成才读取 body。
                    yield header_future
            # websocket ???
            if self.stream is None:
                # We've been detached.
                need_delegate_close = False
                raise gen.Return(False)
            skip_body = False
            if self.is_client:
                # 作为 client 如果发起的是 HEAD 请求,那么 server response 应该无消息体
                if (self._request_start_line is not None and
                        self._request_start_line.method == 'HEAD'):
                    skip_body = True
                code = start_line.code
                if code == 304:
                    # 304 responses may include the content-length header
                    # but do not actually have a body.
                    # http://tools.ietf.org/html/rfc7230#section-3.3
                    skip_body = True
                if code >= 100 and code < 200:
                    # 1xx responses should never indicate the presence of
                    # a body.
                    if ('Content-Length' in headers or
                        'Transfer-Encoding' in headers):
                        raise httputil.HTTPInputError(
                            "Response code %d cannot have body" % code)
                    # TODO: client delegates will get headers_received twice
                    # in the case of a 100-continue.  Document or change?
                    yield self._read_message(delegate)
            else:
                # 100-continue 这个状态码是在 HTTP/1.1 中为了提高传输效率而设置的。当
                # client 需要 POST 较大数据给 WebServer 时,可以在发送 HTTP 请求时带上
                # Expect: 100-continue,WebServer 如果接受这个请求则应答一个
                # ``HTTP/1.1 100 (Continue)``,那么 client 就继续传输 request body,
                # 否则应答 ``HTTP/1.1 417 Expectation Failed`` client 就放弃传输剩余
                # 的数据。(注:Expect 头部域,用于指出客户端要求的特殊服务器行为采用扩展语法
                # 定义,以方便扩展。)
                if (headers.get("Expect") == "100-continue" and
                        not self._write_finished):
                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
            if not skip_body:
                body_future = self._read_body(
                    start_line.code if self.is_client else 0, headers, delegate)
                if body_future is not None:
                    if self._body_timeout is None:
                        yield body_future
                    else:
                        try:
                            yield gen.with_timeout(
                                self.stream.io_loop.time() + self._body_timeout,
                                body_future, self.stream.io_loop)
                        except gen.TimeoutError:
                            gen_log.info("Timeout reading body from %s",
                                         self.context)
                            self.stream.close()
                            raise gen.Return(False)
            self._read_finished = True
            # 对 client mode ,response 解析完成就调用 HTTPMessageDelegate.finish() 方法是合适的;
            # 对 server mode ,_write_finished 表示 response 是否发送完成,未完成前调用
            # HTTPMessageDelegate.finish() 方法让 delegate 执行请求响应;
            if not self._write_finished or self.is_client:
                need_delegate_close = False
                with _ExceptionLoggingContext(app_log):
                    delegate.finish()
            # If we're waiting for the application to produce an asynchronous
            # response, and we're not detached, register a close callback
            # on the stream (we didn't need one while we were reading)
            #
            # NOTE:_finish_future resolves when all data has been written and flushed
            # to the IOStream.
            #
            # hold 住执行流程,直到异步响应完成,所有数据都写入 fd,才继续后续处理,通常调用方执行 `finish` 方法
            # 设置 `_finish_future` 完成,详细见 `finish` 和 `_finish_request` 方法实现。
            if (not self._finish_future.done() and
                    self.stream is not None and
                    not self.stream.closed()):
                self.stream.set_close_callback(self._on_connection_close)
                yield self._finish_future
            # 对于 client mode,处理完响应后如果不是 keep-alive 就断开连接。
            # 对于 server mode,需要在 response 完成后才断开连接,详细见 _finish_request/finish 方法实现。
            if self.is_client and self._disconnect_on_finish:
                self.close()
            if self.stream is None:
                raise gen.Return(False)
        except httputil.HTTPInputError as e:
            gen_log.info("Malformed HTTP message from %s: %s",
                         self.context, e)
            self.close()
            raise gen.Return(False)
        finally:
            # 连接 “关闭” 前还没能结束处理请求(call HTTPMessageDelegate.finish()),则
            # call  HTTPMessageDelegate.on_connection_close()
            if need_delegate_close:
                with _ExceptionLoggingContext(app_log):
                    delegate.on_connection_close()
            self._clear_callbacks()
        raise gen.Return(True)
Exemplo n.º 28
0
    def _read_message(self, delegate):
        need_delegate_close = False
        try:
            header_future = self.stream.read_until_regex(
                b"\r?\n\r?\n", max_bytes=self.params.max_header_size)
            if self.params.header_timeout is None:
                header_data = yield header_future
            else:
                try:
                    header_data = yield gen.with_timeout(
                        self.stream.io_loop.time() +
                        self.params.header_timeout,
                        header_future,
                        io_loop=self.stream.io_loop)
                except gen.TimeoutError:
                    self.close()
                    raise gen.Return(False)
            start_line, headers = self._parse_headers(header_data)
            if self.is_client:
                start_line = httputil.parse_response_start_line(start_line)
                self._response_start_line = start_line
            else:
                start_line = httputil.parse_request_start_line(start_line)
                self._request_start_line = start_line
                self._request_headers = headers

            self._disconnect_on_finish = not self._can_keep_alive(
                start_line, headers)
            need_delegate_close = True
            header_future = delegate.headers_received(start_line, headers)
            if header_future is not None:
                yield header_future
            if self.stream is None:
                # We've been detached.
                need_delegate_close = False
                raise gen.Return(False)
            skip_body = False
            if self.is_client:
                if (self._request_start_line is not None
                        and self._request_start_line.method == 'HEAD'):
                    skip_body = True
                code = start_line.code
                if code == 304:
                    skip_body = True
                if code >= 100 and code < 200:
                    # TODO: client delegates will get headers_received twice
                    # in the case of a 100-continue.  Document or change?
                    yield self._read_message(delegate)
            else:
                if (headers.get("Expect") == "100-continue"
                        and not self._write_finished):
                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
            if not skip_body:
                body_future = self._read_body(headers, delegate)
                if body_future is not None:
                    if self._body_timeout is None:
                        yield body_future
                    else:
                        try:
                            yield gen.with_timeout(
                                self.stream.io_loop.time() +
                                self._body_timeout, body_future,
                                self.stream.io_loop)
                        except gen.TimeoutError:
                            gen_log.info("Timeout reading body from %s",
                                         self.context)
                            self.stream.close()
                            raise gen.Return(False)
            self._read_finished = True
            if not self._write_finished or self.is_client:
                need_delegate_close = False
                delegate.finish()
            # If we're waiting for the application to produce an asynchronous
            # response, and we're not detached, register a close callback
            # on the stream (we didn't need one while we were reading)
            if (not self._finish_future.done() and self.stream is not None
                    and not self.stream.closed()):
                self.stream.set_close_callback(self._on_connection_close)
                yield self._finish_future
            if self.is_client and self._disconnect_on_finish:
                self.close()
            if self.stream is None:
                raise gen.Return(False)
        except httputil.HTTPInputException as e:
            gen_log.info("Malformed HTTP message from %s: %s", self.context, e)
            self.close()
            raise gen.Return(False)
        finally:
            if need_delegate_close:
                delegate.on_connection_close()
            self._clear_callbacks()
        raise gen.Return(True)
Exemplo n.º 29
0
    def _read_message(self, delegate):
        need_delegate_close = False
        try:
            header_future = self.stream.read_until_regex(
                b"\r?\n\r?\n", max_bytes=self.params.max_header_size)
            if self.params.header_timeout is None:
                header_data = yield header_future
            else:
                try:
                    header_data = yield gen.with_timeout(
                        self.stream.io_loop.time() +
                        self.params.header_timeout,
                        header_future,
                        io_loop=self.stream.io_loop,
                        quiet_exceptions=iostream.StreamClosedError)
                except gen.TimeoutError:
                    self.close()
                    raise gen.Return(False)
            start_line, headers = self._parse_headers(header_data)
            if self.is_client:
                start_line = httputil.parse_response_start_line(start_line)
                self._response_start_line = start_line
            else:
                start_line = httputil.parse_request_start_line(start_line)
                self._request_start_line = start_line
                self._request_headers = headers

            self._disconnect_on_finish = not self._can_keep_alive(
                start_line, headers)
            need_delegate_close = True
            with _ExceptionLoggingContext(app_log):
                header_future = delegate.headers_received(start_line, headers)
                if header_future is not None:
                    yield header_future
            if self.stream is None:
                # We've been detached.
                need_delegate_close = False
                raise gen.Return(False)
            skip_body = False
            if self.is_client:
                if (self._request_start_line is not None
                        and self._request_start_line.method == 'HEAD'):
                    skip_body = True
                code = start_line.code
                if code == 304:
                    # 304 responses may include the content-length header
                    # but do not actually have a body.
                    # http://tools.ietf.org/html/rfc7230#section-3.3
                    skip_body = True
                if code >= 100 and code < 200:
                    # 1xx responses should never indicate the presence of
                    # a body.
                    if ('Content-Length' in headers
                            or 'Transfer-Encoding' in headers):
                        raise httputil.HTTPInputError(
                            "Response code %d cannot have body" % code)
                    # TODO: client delegates will get headers_received twice
                    # in the case of a 100-continue.  Document or change?
                    yield self._read_message(delegate)
            else:
                if (headers.get("Expect") == "100-continue"
                        and not self._write_finished):
                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
            if not skip_body:
                body_future = self._read_body(
                    start_line.code if self.is_client else 0, headers,
                    delegate)
                if body_future is not None:
                    if self._body_timeout is None:
                        yield body_future
                    else:
                        try:
                            yield gen.with_timeout(
                                self.stream.io_loop.time() +
                                self._body_timeout,
                                body_future,
                                self.stream.io_loop,
                                quiet_exceptions=iostream.StreamClosedError)
                        except gen.TimeoutError:
                            gen_log.info("Timeout reading body from %s",
                                         self.context)
                            self.stream.close()
                            raise gen.Return(False)
            self._read_finished = True
            if not self._write_finished or self.is_client:
                need_delegate_close = False
                with _ExceptionLoggingContext(app_log):
                    delegate.finish()
            # If we're waiting for the application to produce an asynchronous
            # response, and we're not detached, register a close callback
            # on the stream (we didn't need one while we were reading)
            if (not self._finish_future.done() and self.stream is not None
                    and not self.stream.closed()):
                self.stream.set_close_callback(self._on_connection_close)
                yield self._finish_future
            if self.is_client and self._disconnect_on_finish:
                self.close()
            if self.stream is None:
                raise gen.Return(False)
        except httputil.HTTPInputError as e:
            gen_log.info("Malformed HTTP message from %s: %s", self.context, e)
            self.close()
            raise gen.Return(False)
        finally:
            if need_delegate_close:
                with _ExceptionLoggingContext(app_log):
                    delegate.on_connection_close()
            self._clear_callbacks()
        raise gen.Return(True)
Exemplo n.º 30
0
    def _read_message(self, delegate):
        need_delegate_close = False
        try:
            header_future = self.stream.read_until_regex(
                b"\r?\n\r?\n", max_bytes=self.params.max_header_size)
            if self.params.header_timeout is None:
                header_data = yield header_future
            else:
                try:
                    header_data = yield gen.with_timeout(
                        self.stream.io_loop.time() +
                        self.params.header_timeout,
                        header_future,
                        io_loop=self.stream.io_loop,
                        quiet_exceptions=iostream.StreamClosedError)
                except gen.TimeoutError:
                    self.close()
                    raise gen.Return(False)
            start_line, headers = self._parse_headers(header_data)
            if self.is_client:
                start_line = httputil.parse_response_start_line(start_line)
                self._response_start_line = start_line
            else:
                start_line = httputil.parse_request_start_line(start_line)
                self._request_start_line = start_line
                self._request_headers = headers

            self._disconnect_on_finish = not self._can_keep_alive(
                start_line, headers)
            need_delegate_close = True
            with _ExceptionLoggingContext():
                header_future = delegate.headers_received(start_line, headers)
                if header_future is not None:
                    yield header_future
            if self.stream is None:
                # We've been detached.
                need_delegate_close = False
                raise gen.Return(False)
            skip_body = False
            if self.is_client:
                if (self._request_start_line is not None
                        and self._request_start_line.method == 'HEAD'):
                    skip_body = True
                code = start_line.code
                if code == 304:
                    skip_body = True
                if code >= 100 and code < 200:
                    if ('Content-Length' in headers
                            or 'Transfer-Encoding' in headers):
                        raise httputil.HTTPInputError(
                            "Response code %d cannot have body" % code)
                    yield self._read_message(delegate)
            else:
                if (headers.get("Expect") == "100-continue"
                        and not self._write_finished):
                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
            if not skip_body:
                body_future = self._read_body(
                    start_line.code if self.is_client else 0, headers,
                    delegate)
                if body_future is not None:
                    if self._body_timeout is None:
                        yield body_future
                    else:
                        try:
                            yield gen.with_timeout(
                                self.stream.io_loop.time() +
                                self._body_timeout,
                                body_future,
                                self.stream.io_loop,
                                quiet_exceptions=iostream.StreamClosedError)
                        except gen.TimeoutError:
                            print("Timeout reading body from %s" %
                                  self.context)
                            self.stream.close()
                            raise gen.Return(False)
            self._read_finished = True
            if not self._write_finished or self.is_client:
                need_delegate_close = False
                with _ExceptionLoggingContext():
                    delegate.finish()
            if (not self._finish_future.done() and self.stream is not None
                    and not self.stream.closed()):
                self.stream.set_close_callback(self._on_connection_close)
                yield self._finish_future
            if self.is_client and self._disconnect_on_finish:
                self.close()
            if self.stream is None:
                raise gen.Return(False)
        except httputil.HTTPInputError as e:
            print("Malformed HTTP message from %s: %s", (self.context, e))
            self.close()
            raise gen.Return(False)
        finally:
            if need_delegate_close:
                with _ExceptionLoggingContext():
                    delegate.on_connection_close()
            self._clear_callbacks()
        raise gen.Return(True)