Пример #1
0
 def resolve(
         self,
         host: str,
         port: int,
         family: int = 0) -> "Generator[Any, Any, List[Tuple[int, Any]]]":
     if is_valid_ip(host):
         addresses = [host]
     else:
         # gethostbyname doesn't take callback as a kwarg
         fut = Future()  # type: Future[Tuple[Any, Any]]
         self.channel.gethostbyname(
             host, family, lambda result, error: fut.set_result(
                 (result, error)))
         result, error = yield fut
         if error:
             raise IOError(
                 "C-Ares returned error %s: %s while resolving %s" %
                 (error, pycares.errno.strerror(error), host))
         addresses = result.addresses
     addrinfo = []
     for address in addresses:
         if "." in address:
             address_family = socket.AF_INET
         elif ":" in address:
             address_family = socket.AF_INET6
         else:
             address_family = socket.AF_UNSPEC
         if family != socket.AF_UNSPEC and family != address_family:
             raise IOError("Requested socket family %d but got %d" %
                           (family, address_family))
         addrinfo.append((typing.cast(int,
                                      address_family), (address, port)))
     return addrinfo
Пример #2
0
    def get(
        self,
        timeout: Optional[Union[float, datetime.timedelta]] = None
    ) -> Awaitable[_T]:
        """Remove and return an item from the queue.

        Returns an awaitable which resolves once an item is available, or raises
        `tornado.util.TimeoutError` after a timeout.

        ``timeout`` may be a number denoting a time (on the same
        scale as `tornado.ioloop.IOLoop.time`, normally `time.time`), or a
        `datetime.timedelta` object for a deadline relative to the
        current time.

        .. note::

           The ``timeout`` argument of this method differs from that
           of the standard library's `queue.Queue.get`. That method
           interprets numeric values as relative timeouts; this one
           interprets them as absolute deadlines and requires
           ``timedelta`` objects for relative timeouts (consistent
           with other timeouts in Tornado).

        """
        future = Future()  # type: Future[_T]
        try:
            future.set_result(self.get_nowait())
        except QueueEmpty:
            self._getters.append(future)
            _set_timeout(future, timeout)
        return future
Пример #3
0
 def handle_exception(self, typ: Type[Exception], value: Exception,
                      tb: types.TracebackType) -> bool:
     if not self.running and not self.finished:
         self.future = Future()
         future_set_exc_info(self.future, (typ, value, tb))
         self.ctx_run(self.run)
         return True
     else:
         return False
Пример #4
0
 def error_callback(future: Future) -> None:
     try:
         future.result()
     except asyncio.CancelledError:
         pass
     except Exception as e:
         if not isinstance(e, quiet_exceptions):
             app_log.error(
                 "Exception in Future %r after timeout", future, exc_info=True
             )
Пример #5
0
    def next(self) -> Future:
        """Returns a `.Future` that will yield the next available result.

        Note that this `.Future` will not be the same object as any of
        the inputs.
        """
        self._running_future = Future()

        if self._finished:
            self._return_result(self._finished.popleft())

        return self._running_future
Пример #6
0
def _set_timeout(future: Future, timeout: Union[None, float,
                                                datetime.timedelta]) -> None:
    if timeout:

        def on_timeout() -> None:
            if not future.done():
                future.set_exception(gen.TimeoutError())

        io_loop = ioloop.IOLoop.current()
        timeout_handle = io_loop.add_timeout(timeout, on_timeout)
        future.add_done_callback(
            lambda _: io_loop.remove_timeout(timeout_handle))
Пример #7
0
 def __init__(
     self,
     stream: iostream.IOStream,
     is_client: bool,
     params: Optional[HTTP1ConnectionParameters] = None,
     context: Optional[object] = None,
 ) -> None:
     """
     :arg stream: an `.IOStream`
     :arg bool is_client: client or server
     :arg params: a `.HTTP1ConnectionParameters` instance or ``None``
     :arg context: an opaque application-defined object that can be accessed
         as ``connection.context``.
     """
     self.is_client = is_client
     self.stream = stream
     if params is None:
         params = HTTP1ConnectionParameters()
     self.params = params
     self.context = context
     self.no_keep_alive = params.no_keep_alive
     # The body limits can be altered by the delegate, so save them
     # here instead of just referencing self.params later.
     self._max_body_size = self.params.max_body_size or self.stream.max_buffer_size
     self._body_timeout = self.params.body_timeout
     # _write_finished is set to True when finish() has been called,
     # i.e. there will be no more data sent.  Data may still be in the
     # stream's write buffer.
     self._write_finished = False
     # True when we have read the entire incoming body.
     self._read_finished = False
     # _finish_future resolves when all data has been written and flushed
     # to the IOStream.
     self._finish_future = Future()  # type: Future[None]
     # If true, the connection should be closed after this request
     # (after the response has been written in the server side,
     # and after it has been read in the client)
     self._disconnect_on_finish = False
     self._clear_callbacks()
     # Save the start lines after we read or write them; they
     # affect later processing (e.g. 304 responses and HEAD methods
     # have content-length but no bodies)
     self._request_start_line = None  # type: Optional[httputil.RequestStartLine]
     self._response_start_line = None  # type: Optional[httputil.ResponseStartLine]
     self._request_headers = None  # type: Optional[httputil.HTTPHeaders]
     # True if we are writing output with chunked encoding.
     self._chunking_output = False
     # While reading a body with a content-length, this is the
     # amount left to read.
     self._expected_content_remaining = None  # type: Optional[int]
     # A Future for our outgoing writes, returned by IOStream.write.
     self._pending_write = None  # type: Optional[Future[None]]
Пример #8
0
    def wait(
        self,
        timeout: Optional[Union[float, datetime.timedelta]] = None
    ) -> Awaitable[None]:
        """Block until the internal flag is true.

        Returns an awaitable, which raises `tornado.util.TimeoutError` after a
        timeout.
        """
        fut = Future()  # type: Future[None]
        if self._value:
            fut.set_result(None)
            return fut
        self._waiters.add(fut)
        fut.add_done_callback(lambda fut: self._waiters.remove(fut))
        if timeout is None:
            return fut
        else:
            timeout_fut = gen.with_timeout(timeout, fut)
            # This is a slightly clumsy workaround for the fact that
            # gen.with_timeout doesn't cancel its futures. Cancelling
            # fut will remove it from the waiters list.
            timeout_fut.add_done_callback(lambda tf: fut.cancel()
                                          if not fut.done() else None)
            return timeout_fut
Пример #9
0
 def resolve(
         self,
         host: str,
         port: int,
         family: int = 0) -> "Generator[Any, Any, List[Tuple[int, Any]]]":
     # getHostByName doesn't accept IP addresses, so if the input
     # looks like an IP address just return it immediately.
     if twisted.internet.abstract.isIPAddress(host):
         resolved = host
         resolved_family = socket.AF_INET
     elif twisted.internet.abstract.isIPv6Address(host):
         resolved = host
         resolved_family = socket.AF_INET6
     else:
         deferred = self.resolver.getHostByName(utf8(host))
         fut = Future()  # type: Future[Any]
         deferred.addBoth(fut.set_result)
         resolved = yield fut
         if isinstance(resolved, failure.Failure):
             try:
                 resolved.raiseException()
             except twisted.names.error.DomainError as e:
                 raise IOError(e)
         elif twisted.internet.abstract.isIPAddress(resolved):
             resolved_family = socket.AF_INET
         elif twisted.internet.abstract.isIPv6Address(resolved):
             resolved_family = socket.AF_INET6
         else:
             resolved_family = socket.AF_UNSPEC
     if family != socket.AF_UNSPEC and family != resolved_family:
         raise Exception("Requested socket family %d but got %d" %
                         (family, resolved_family))
     result = [(typing.cast(int, resolved_family), (resolved, port))]
     return result
Пример #10
0
    def run_in_executor(
        self,
        executor: Optional[concurrent.futures.Executor],
        func: Callable[..., _T],
        *args: Any
    ) -> Awaitable[_T]:
        """Runs a function in a ``concurrent.futures.Executor``. If
        ``executor`` is ``None``, the IO loop's default executor will be used.

        Use `functools.partial` to pass keyword arguments to ``func``.

        .. versionadded:: 5.0
        """
        if executor is None:
            if not hasattr(self, "_executor"):
                from tornado_py3.process import cpu_count

                self._executor = concurrent.futures.ThreadPoolExecutor(
                    max_workers=(cpu_count() * 5)
                )  # type: concurrent.futures.Executor
            executor = self._executor
        c_future = executor.submit(func, *args)
        # Concurrent Futures are not usable with await. Wrap this in a
        # Tornado Future instead, using self.add_future for thread-safety.
        t_future = Future()  # type: Future[_T]
        self.add_future(c_future, lambda f: chain_future(f, t_future))
        return t_future
Пример #11
0
    def wait_for_exit(self, raise_error: bool = True) -> "Future[int]":
        """Returns a `.Future` which resolves when the process exits.

        Usage::

            ret = yield proc.wait_for_exit()

        This is a coroutine-friendly alternative to `set_exit_callback`
        (and a replacement for the blocking `subprocess.Popen.wait`).

        By default, raises `subprocess.CalledProcessError` if the process
        has a non-zero exit status. Use ``wait_for_exit(raise_error=False)``
        to suppress this behavior and return the exit status without raising.

        .. versionadded:: 4.2

        Availability: Unix
        """
        future = Future()  # type: Future[int]

        def callback(ret: int) -> None:
            if ret != 0 and raise_error:
                # Unfortunately we don't have the original args any more.
                future_set_exception_unless_cancelled(
                    future, CalledProcessError(ret, "unknown")
                )
            else:
                future_set_result_unless_cancelled(future, ret)

        self.set_exit_callback(callback)
        return future
Пример #12
0
    def fetch(
        self,
        request: Union[str, "HTTPRequest"],
        raise_error: bool = True,
        **kwargs: Any
    ) -> Awaitable["HTTPResponse"]:
        """Executes a request, asynchronously returning an `HTTPResponse`.

        The request may be either a string URL or an `HTTPRequest` object.
        If it is a string, we construct an `HTTPRequest` using any additional
        kwargs: ``HTTPRequest(request, **kwargs)``

        This method returns a `.Future` whose result is an
        `HTTPResponse`. By default, the ``Future`` will raise an
        `HTTPError` if the request returned a non-200 response code
        (other errors may also be raised if the server could not be
        contacted). Instead, if ``raise_error`` is set to False, the
        response will always be returned regardless of the response
        code.

        If a ``callback`` is given, it will be invoked with the `HTTPResponse`.
        In the callback interface, `HTTPError` is not automatically raised.
        Instead, you must check the response's ``error`` attribute or
        call its `~HTTPResponse.rethrow` method.

        .. versionchanged:: 6.0

           The ``callback`` argument was removed. Use the returned
           `.Future` instead.

           The ``raise_error=False`` argument only affects the
           `HTTPError` raised when a non-200 response code is used,
           instead of suppressing all errors.
        """
        if self._closed:
            raise RuntimeError("fetch() called on closed AsyncHTTPClient")
        if not isinstance(request, HTTPRequest):
            request = HTTPRequest(url=request, **kwargs)
        else:
            if kwargs:
                raise ValueError(
                    "kwargs can't be used if request is an HTTPRequest object"
                )
        # We may modify this (to add Host, Accept-Encoding, etc),
        # so make sure we don't modify the caller's object.  This is also
        # where normal dicts get converted to HTTPHeaders objects.
        request.headers = httputil.HTTPHeaders(request.headers)
        request_proxy = _RequestProxy(request, self.defaults)
        future = Future()  # type: Future[HTTPResponse]

        def handle_response(response: "HTTPResponse") -> None:
            if response.error:
                if raise_error or not response._error_is_response_code:
                    future_set_exception_unless_cancelled(future, response.error)
                    return
            future_set_result_unless_cancelled(future, response)

        self.fetch_impl(cast(HTTPRequest, request_proxy), handle_response)
        return future
Пример #13
0
    def __init__(self, *args: Future, **kwargs: Future) -> None:
        if args and kwargs:
            raise ValueError("You must provide args or kwargs, not both")

        if kwargs:
            self._unfinished = dict((f, k) for (k, f) in kwargs.items())
            futures = list(kwargs.values())  # type: Sequence[Future]
        else:
            self._unfinished = dict((f, i) for (i, f) in enumerate(args))
            futures = args

        self._finished = collections.deque()  # type: Deque[Future]
        self.current_index = None  # type: Optional[Union[str, int]]
        self.current_future = None  # type: Optional[Future]
        self._running_future = None  # type: Optional[Future]

        for future in futures:
            future_add_done_callback(future, self._done_callback)
Пример #14
0
    def write(self, chunk: bytes) -> "Future[None]":
        """Implements `.HTTPConnection.write`.

        For backwards compatibility it is allowed but deprecated to
        skip `write_headers` and instead call `write()` with a
        pre-encoded header block.
        """
        future = None
        if self.stream.closed():
            future = self._write_future = Future()
            self._write_future.set_exception(iostream.StreamClosedError())
            self._write_future.exception()
        else:
            future = self._write_future = Future()
            self._pending_write = self.stream.write(self._format_chunk(chunk))
            future_add_done_callback(self._pending_write,
                                     self._on_write_complete)
        return future
Пример #15
0
    def acquire(
        self,
        timeout: Optional[Union[float, datetime.timedelta]] = None
    ) -> Awaitable[_ReleasingContextManager]:
        """Decrement the counter. Returns an awaitable.

        Block if the counter is zero and wait for a `.release`. The awaitable
        raises `.TimeoutError` after the deadline.
        """
        waiter = Future()  # type: Future[_ReleasingContextManager]
        if self._value > 0:
            self._value -= 1
            waiter.set_result(_ReleasingContextManager(self))
        else:
            self._waiters.append(waiter)
            if timeout:

                def on_timeout() -> None:
                    if not waiter.done():
                        waiter.set_exception(gen.TimeoutError())
                    self._garbage_collect()

                io_loop = ioloop.IOLoop.current()
                timeout_handle = io_loop.add_timeout(timeout, on_timeout)
                waiter.add_done_callback(
                    lambda _: io_loop.remove_timeout(timeout_handle))
        return waiter
Пример #16
0
def _create_future() -> Future:
    future = Future()  # type: Future
    # Fixup asyncio debug info by removing extraneous stack entries
    source_traceback = getattr(future, "_source_traceback", ())
    while source_traceback:
        # Each traceback entry is equivalent to a
        # (filename, self.lineno, self.name, self.line) tuple
        filename = source_traceback[-1][0]
        if filename == __file__:
            del source_traceback[-1]
        else:
            break
    return future
Пример #17
0
    def _(d: Deferred) -> Future:
        f = Future()  # type: Future[Any]

        def errback(failure: failure.Failure) -> None:
            try:
                failure.raiseException()
                # Should never happen, but just in case
                raise Exception("errback called without error")
            except:
                future_set_exc_info(f, sys.exc_info())

        d.addCallbacks(f.set_result, errback)
        return f
Пример #18
0
    def handle_yield(self, yielded: _Yieldable) -> bool:
        try:
            self.future = convert_yielded(yielded)
        except BadYieldError:
            self.future = Future()
            future_set_exc_info(self.future, sys.exc_info())

        if self.future is moment:
            self.io_loop.add_callback(self.run)
            return False
        elif self.future is None:
            raise Exception("no pending future")
        elif not self.future.done():

            def inner(f: Any) -> None:
                # Break a reference cycle to speed GC.
                f = None  # noqa: F841
                self.run()

            self.io_loop.add_future(self.future, inner)
            return False
        return True
Пример #19
0
    def put(
        self, item: _T, timeout: Optional[Union[float, datetime.timedelta]] = None
    ) -> "Future[None]":
        """Put an item into the queue, perhaps waiting until there is room.

        Returns a Future, which raises `tornado.util.TimeoutError` after a
        timeout.

        ``timeout`` may be a number denoting a time (on the same
        scale as `tornado.ioloop.IOLoop.time`, normally `time.time`), or a
        `datetime.timedelta` object for a deadline relative to the
        current time.
        """
        future = Future()  # type: Future[None]
        try:
            self.put_nowait(item)
        except QueueFull:
            self._putters.append((item, future))
            _set_timeout(future, timeout)
        else:
            future.set_result(None)
        return future
Пример #20
0
    def wait(
        self,
        timeout: Optional[Union[float, datetime.timedelta]] = None
    ) -> Awaitable[bool]:
        """Wait for `.notify`.

        Returns a `.Future` that resolves ``True`` if the condition is notified,
        or ``False`` after a timeout.
        """
        waiter = Future()  # type: Future[bool]
        self._waiters.append(waiter)
        if timeout:

            def on_timeout() -> None:
                if not waiter.done():
                    future_set_result_unless_cancelled(waiter, False)
                self._garbage_collect()

            io_loop = ioloop.IOLoop.current()
            timeout_handle = io_loop.add_timeout(timeout, on_timeout)
            waiter.add_done_callback(
                lambda _: io_loop.remove_timeout(timeout_handle))
        return waiter
Пример #21
0
 def _create_stream(
     self,
     max_buffer_size: int,
     af: socket.AddressFamily,
     addr: Tuple,
     source_ip: Optional[str] = None,
     source_port: Optional[int] = None,
 ) -> Tuple[IOStream, "Future[IOStream]"]:
     # Always connect in plaintext; we'll convert to ssl if necessary
     # after one connection has completed.
     source_port_bind = source_port if isinstance(source_port, int) else 0
     source_ip_bind = source_ip
     if source_port_bind and not source_ip:
         # User required a specific port, but did not specify
         # a certain source IP, will bind to the default loopback.
         source_ip_bind = "::1" if af == socket.AF_INET6 else "127.0.0.1"
         # Trying to use the same address family as the requested af socket:
         # - 127.0.0.1 for IPv4
         # - ::1 for IPv6
     socket_obj = socket.socket(af)
     set_close_exec(socket_obj.fileno())
     if source_port_bind or source_ip_bind:
         # If the user requires binding also to a specific IP/port.
         try:
             socket_obj.bind((source_ip_bind, source_port_bind))
         except socket.error:
             socket_obj.close()
             # Fail loudly if unable to use the IP/port.
             raise
     try:
         stream = IOStream(socket_obj, max_buffer_size=max_buffer_size)
     except socket.error as e:
         fu = Future()  # type: Future[IOStream]
         fu.set_exception(e)
         return stream, fu
     else:
         return stream, stream.connect(addr)
Пример #22
0
    def __init__(
        self,
        addrinfo: List[Tuple],
        connect: Callable[[socket.AddressFamily, Tuple],
                          Tuple[IOStream, "Future[IOStream]"]],
    ) -> None:
        self.io_loop = IOLoop.current()
        self.connect = connect

        self.future = (
            Future()
        )  # type: Future[Tuple[socket.AddressFamily, Any, IOStream]]
        self.timeout = None  # type: Optional[object]
        self.connect_timeout = None  # type: Optional[object]
        self.last_error = None  # type: Optional[Exception]
        self.remaining = len(addrinfo)
        self.primary_addrs, self.secondary_addrs = self.split(addrinfo)
        self.streams = set()  # type: Set[IOStream]
Пример #23
0
        def run() -> None:
            try:
                result = func()
                if result is not None:
                    from tornado_py3.gen import convert_yielded

                    result = convert_yielded(result)
            except Exception:
                fut = Future()  # type: Future[Any]
                future_cell[0] = fut
                future_set_exc_info(fut, sys.exc_info())
            else:
                if is_future(result):
                    future_cell[0] = result
                else:
                    fut = Future()
                    future_cell[0] = fut
                    fut.set_result(result)
            assert future_cell[0] is not None
            self.add_future(future_cell[0], lambda future: self.stop())
Пример #24
0
 def write_headers(
     self,
     start_line: Union[httputil.RequestStartLine,
                       httputil.ResponseStartLine],
     headers: httputil.HTTPHeaders,
     chunk: Optional[bytes] = None,
 ) -> "Future[None]":
     """Implements `.HTTPConnection.write_headers`."""
     lines = []
     if self.is_client:
         assert isinstance(start_line, httputil.RequestStartLine)
         self._request_start_line = start_line
         lines.append(
             utf8("%s %s HTTP/1.1" % (start_line[0], start_line[1])))
         # Client requests with a non-empty body must have either a
         # Content-Length or a Transfer-Encoding.
         self._chunking_output = (
             start_line.method in ("POST", "PUT", "PATCH")
             and "Content-Length" not in headers
             and ("Transfer-Encoding" not in headers
                  or headers["Transfer-Encoding"] == "chunked"))
     else:
         assert isinstance(start_line, httputil.ResponseStartLine)
         assert self._request_start_line is not None
         assert self._request_headers is not None
         self._response_start_line = start_line
         lines.append(
             utf8("HTTP/1.1 %d %s" % (start_line[1], start_line[2])))
         self._chunking_output = (
             # TODO: should this use
             # self._request_start_line.version or
             # start_line.version?
             self._request_start_line.version == "HTTP/1.1"
             # 1xx, 204 and 304 responses have no body (not even a zero-length
             # body), and so should not have either Content-Length or
             # Transfer-Encoding headers.
             and start_line.code not in (204, 304) and
             (start_line.code < 100 or start_line.code >= 200)
             # No need to chunk the output if a Content-Length is specified.
             and "Content-Length" not in headers
             # Applications are discouraged from touching Transfer-Encoding,
             # but if they do, leave it alone.
             and "Transfer-Encoding" not in headers)
         # If connection to a 1.1 client will be closed, inform client
         if (self._request_start_line.version == "HTTP/1.1"
                 and self._disconnect_on_finish):
             headers["Connection"] = "close"
         # If a 1.0 client asked for keep-alive, add the header.
         if (self._request_start_line.version == "HTTP/1.0"
                 and self._request_headers.get("Connection",
                                               "").lower() == "keep-alive"):
             headers["Connection"] = "Keep-Alive"
     if self._chunking_output:
         headers["Transfer-Encoding"] = "chunked"
     if not self.is_client and (self._request_start_line.method == "HEAD"
                                or cast(httputil.ResponseStartLine,
                                        start_line).code == 304):
         self._expected_content_remaining = 0
     elif "Content-Length" in headers:
         self._expected_content_remaining = int(headers["Content-Length"])
     else:
         self._expected_content_remaining = None
     # TODO: headers are supposed to be of type str, but we still have some
     # cases that let bytes slip through. Remove these native_str calls when those
     # are fixed.
     header_lines = (native_str(n) + ": " + native_str(v)
                     for n, v in headers.get_all())
     lines.extend(l.encode("latin1") for l in header_lines)
     for line in lines:
         if b"\n" in line:
             raise ValueError("Newline in header: " + repr(line))
     future = None
     if self.stream.closed():
         future = self._write_future = Future()
         future.set_exception(iostream.StreamClosedError())
         future.exception()
     else:
         future = self._write_future = Future()
         data = b"\r\n".join(lines) + b"\r\n\r\n"
         if chunk:
             data += self._format_chunk(chunk)
         self._pending_write = self.stream.write(data)
         future_add_done_callback(self._pending_write,
                                  self._on_write_complete)
     return future
Пример #25
0
 def _discard_future_result(self, future: Future) -> None:
     """Avoid unhandled-exception warnings from spawned coroutines."""
     future.result()
Пример #26
0
class HTTP1Connection(httputil.HTTPConnection):
    """Implements the HTTP/1.x protocol.

    This class can be on its own for clients, or via `HTTP1ServerConnection`
    for servers.
    """
    def __init__(
        self,
        stream: iostream.IOStream,
        is_client: bool,
        params: Optional[HTTP1ConnectionParameters] = None,
        context: Optional[object] = None,
    ) -> None:
        """
        :arg stream: an `.IOStream`
        :arg bool is_client: client or server
        :arg params: a `.HTTP1ConnectionParameters` instance or ``None``
        :arg context: an opaque application-defined object that can be accessed
            as ``connection.context``.
        """
        self.is_client = is_client
        self.stream = stream
        if params is None:
            params = HTTP1ConnectionParameters()
        self.params = params
        self.context = context
        self.no_keep_alive = params.no_keep_alive
        # The body limits can be altered by the delegate, so save them
        # here instead of just referencing self.params later.
        self._max_body_size = self.params.max_body_size or self.stream.max_buffer_size
        self._body_timeout = self.params.body_timeout
        # _write_finished is set to True when finish() has been called,
        # i.e. there will be no more data sent.  Data may still be in the
        # stream's write buffer.
        self._write_finished = False
        # True when we have read the entire incoming body.
        self._read_finished = False
        # _finish_future resolves when all data has been written and flushed
        # to the IOStream.
        self._finish_future = Future()  # type: Future[None]
        # If true, the connection should be closed after this request
        # (after the response has been written in the server side,
        # and after it has been read in the client)
        self._disconnect_on_finish = False
        self._clear_callbacks()
        # Save the start lines after we read or write them; they
        # affect later processing (e.g. 304 responses and HEAD methods
        # have content-length but no bodies)
        self._request_start_line = None  # type: Optional[httputil.RequestStartLine]
        self._response_start_line = None  # type: Optional[httputil.ResponseStartLine]
        self._request_headers = None  # type: Optional[httputil.HTTPHeaders]
        # True if we are writing output with chunked encoding.
        self._chunking_output = False
        # While reading a body with a content-length, this is the
        # amount left to read.
        self._expected_content_remaining = None  # type: Optional[int]
        # A Future for our outgoing writes, returned by IOStream.write.
        self._pending_write = None  # type: Optional[Future[None]]

    def read_response(
            self, delegate: httputil.HTTPMessageDelegate) -> Awaitable[bool]:
        """Read a single HTTP response.

        Typical client-mode usage is to write a request using `write_headers`,
        `write`, and `finish`, and then call ``read_response``.

        :arg delegate: a `.HTTPMessageDelegate`

        Returns a `.Future` that resolves to a bool after the full response has
        been read. The result is true if the stream is still open.
        """
        if self.params.decompress:
            delegate = _GzipMessageDelegate(delegate, self.params.chunk_size)
        return self._read_message(delegate)

    async def _read_message(self,
                            delegate: httputil.HTTPMessageDelegate) -> bool:
        need_delegate_close = False
        try:
            header_future = self.stream.read_until_regex(
                b"\r?\n\r?\n", max_bytes=self.params.max_header_size)
            if self.params.header_timeout is None:
                header_data = await header_future
            else:
                try:
                    header_data = await gen.with_timeout(
                        self.stream.io_loop.time() +
                        self.params.header_timeout,
                        header_future,
                        quiet_exceptions=iostream.StreamClosedError,
                    )
                except gen.TimeoutError:
                    self.close()
                    return False
            start_line_str, headers = self._parse_headers(header_data)
            if self.is_client:
                resp_start_line = httputil.parse_response_start_line(
                    start_line_str)
                self._response_start_line = resp_start_line
                start_line = (
                    resp_start_line
                )  # type: Union[httputil.RequestStartLine, httputil.ResponseStartLine]
                # TODO: this will need to change to support client-side keepalive
                self._disconnect_on_finish = False
            else:
                req_start_line = httputil.parse_request_start_line(
                    start_line_str)
                self._request_start_line = req_start_line
                self._request_headers = headers
                start_line = req_start_line
                self._disconnect_on_finish = not self._can_keep_alive(
                    req_start_line, headers)
            need_delegate_close = True
            with _ExceptionLoggingContext(app_log):
                header_recv_future = delegate.headers_received(
                    start_line, headers)
                if header_recv_future is not None:
                    await header_recv_future
            if self.stream is None:
                # We've been detached.
                need_delegate_close = False
                return False
            skip_body = False
            if self.is_client:
                assert isinstance(start_line, httputil.ResponseStartLine)
                if (self._request_start_line is not None
                        and self._request_start_line.method == "HEAD"):
                    skip_body = True
                code = start_line.code
                if code == 304:
                    # 304 responses may include the content-length header
                    # but do not actually have a body.
                    # http://tools.ietf.org/html/rfc7230#section-3.3
                    skip_body = True
                if code >= 100 and code < 200:
                    # 1xx responses should never indicate the presence of
                    # a body.
                    if "Content-Length" in headers or "Transfer-Encoding" in headers:
                        raise httputil.HTTPInputError(
                            "Response code %d cannot have body" % code)
                    # TODO: client delegates will get headers_received twice
                    # in the case of a 100-continue.  Document or change?
                    await self._read_message(delegate)
            else:
                if headers.get(
                        "Expect"
                ) == "100-continue" and not self._write_finished:
                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
            if not skip_body:
                body_future = self._read_body(
                    resp_start_line.code if self.is_client else 0, headers,
                    delegate)
                if body_future is not None:
                    if self._body_timeout is None:
                        await body_future
                    else:
                        try:
                            await gen.with_timeout(
                                self.stream.io_loop.time() +
                                self._body_timeout,
                                body_future,
                                quiet_exceptions=iostream.StreamClosedError,
                            )
                        except gen.TimeoutError:
                            gen_log.info("Timeout reading body from %s",
                                         self.context)
                            self.stream.close()
                            return False
            self._read_finished = True
            if not self._write_finished or self.is_client:
                need_delegate_close = False
                with _ExceptionLoggingContext(app_log):
                    delegate.finish()
            # If we're waiting for the application to produce an asynchronous
            # response, and we're not detached, register a close callback
            # on the stream (we didn't need one while we were reading)
            if (not self._finish_future.done() and self.stream is not None
                    and not self.stream.closed()):
                self.stream.set_close_callback(self._on_connection_close)
                await self._finish_future
            if self.is_client and self._disconnect_on_finish:
                self.close()
            if self.stream is None:
                return False
        except httputil.HTTPInputError as e:
            gen_log.info("Malformed HTTP message from %s: %s", self.context, e)
            if not self.is_client:
                await self.stream.write(b"HTTP/1.1 400 Bad Request\r\n\r\n")
            self.close()
            return False
        finally:
            if need_delegate_close:
                with _ExceptionLoggingContext(app_log):
                    delegate.on_connection_close()
            header_future = None  # type: ignore
            self._clear_callbacks()
        return True

    def _clear_callbacks(self) -> None:
        """Clears the callback attributes.

        This allows the request handler to be garbage collected more
        quickly in CPython by breaking up reference cycles.
        """
        self._write_callback = None
        self._write_future = None  # type: Optional[Future[None]]
        self._close_callback = None  # type: Optional[Callable[[], None]]
        if self.stream is not None:
            self.stream.set_close_callback(None)

    def set_close_callback(self, callback: Optional[Callable[[],
                                                             None]]) -> None:
        """Sets a callback that will be run when the connection is closed.

        Note that this callback is slightly different from
        `.HTTPMessageDelegate.on_connection_close`: The
        `.HTTPMessageDelegate` method is called when the connection is
        closed while receiving a message. This callback is used when
        there is not an active delegate (for example, on the server
        side this callback is used if the client closes the connection
        after sending its request but before receiving all the
        response.
        """
        self._close_callback = callback

    def _on_connection_close(self) -> None:
        # Note that this callback is only registered on the IOStream
        # when we have finished reading the request and are waiting for
        # the application to produce its response.
        if self._close_callback is not None:
            callback = self._close_callback
            self._close_callback = None
            callback()
        if not self._finish_future.done():
            future_set_result_unless_cancelled(self._finish_future, None)
        self._clear_callbacks()

    def close(self) -> None:
        if self.stream is not None:
            self.stream.close()
        self._clear_callbacks()
        if not self._finish_future.done():
            future_set_result_unless_cancelled(self._finish_future, None)

    def detach(self) -> iostream.IOStream:
        """Take control of the underlying stream.

        Returns the underlying `.IOStream` object and stops all further
        HTTP processing.  May only be called during
        `.HTTPMessageDelegate.headers_received`.  Intended for implementing
        protocols like websockets that tunnel over an HTTP handshake.
        """
        self._clear_callbacks()
        stream = self.stream
        self.stream = None  # type: ignore
        if not self._finish_future.done():
            future_set_result_unless_cancelled(self._finish_future, None)
        return stream

    def set_body_timeout(self, timeout: float) -> None:
        """Sets the body timeout for a single request.

        Overrides the value from `.HTTP1ConnectionParameters`.
        """
        self._body_timeout = timeout

    def set_max_body_size(self, max_body_size: int) -> None:
        """Sets the body size limit for a single request.

        Overrides the value from `.HTTP1ConnectionParameters`.
        """
        self._max_body_size = max_body_size

    def write_headers(
        self,
        start_line: Union[httputil.RequestStartLine,
                          httputil.ResponseStartLine],
        headers: httputil.HTTPHeaders,
        chunk: Optional[bytes] = None,
    ) -> "Future[None]":
        """Implements `.HTTPConnection.write_headers`."""
        lines = []
        if self.is_client:
            assert isinstance(start_line, httputil.RequestStartLine)
            self._request_start_line = start_line
            lines.append(
                utf8("%s %s HTTP/1.1" % (start_line[0], start_line[1])))
            # Client requests with a non-empty body must have either a
            # Content-Length or a Transfer-Encoding.
            self._chunking_output = (
                start_line.method in ("POST", "PUT", "PATCH")
                and "Content-Length" not in headers
                and ("Transfer-Encoding" not in headers
                     or headers["Transfer-Encoding"] == "chunked"))
        else:
            assert isinstance(start_line, httputil.ResponseStartLine)
            assert self._request_start_line is not None
            assert self._request_headers is not None
            self._response_start_line = start_line
            lines.append(
                utf8("HTTP/1.1 %d %s" % (start_line[1], start_line[2])))
            self._chunking_output = (
                # TODO: should this use
                # self._request_start_line.version or
                # start_line.version?
                self._request_start_line.version == "HTTP/1.1"
                # 1xx, 204 and 304 responses have no body (not even a zero-length
                # body), and so should not have either Content-Length or
                # Transfer-Encoding headers.
                and start_line.code not in (204, 304) and
                (start_line.code < 100 or start_line.code >= 200)
                # No need to chunk the output if a Content-Length is specified.
                and "Content-Length" not in headers
                # Applications are discouraged from touching Transfer-Encoding,
                # but if they do, leave it alone.
                and "Transfer-Encoding" not in headers)
            # If connection to a 1.1 client will be closed, inform client
            if (self._request_start_line.version == "HTTP/1.1"
                    and self._disconnect_on_finish):
                headers["Connection"] = "close"
            # If a 1.0 client asked for keep-alive, add the header.
            if (self._request_start_line.version == "HTTP/1.0"
                    and self._request_headers.get("Connection",
                                                  "").lower() == "keep-alive"):
                headers["Connection"] = "Keep-Alive"
        if self._chunking_output:
            headers["Transfer-Encoding"] = "chunked"
        if not self.is_client and (self._request_start_line.method == "HEAD"
                                   or cast(httputil.ResponseStartLine,
                                           start_line).code == 304):
            self._expected_content_remaining = 0
        elif "Content-Length" in headers:
            self._expected_content_remaining = int(headers["Content-Length"])
        else:
            self._expected_content_remaining = None
        # TODO: headers are supposed to be of type str, but we still have some
        # cases that let bytes slip through. Remove these native_str calls when those
        # are fixed.
        header_lines = (native_str(n) + ": " + native_str(v)
                        for n, v in headers.get_all())
        lines.extend(l.encode("latin1") for l in header_lines)
        for line in lines:
            if b"\n" in line:
                raise ValueError("Newline in header: " + repr(line))
        future = None
        if self.stream.closed():
            future = self._write_future = Future()
            future.set_exception(iostream.StreamClosedError())
            future.exception()
        else:
            future = self._write_future = Future()
            data = b"\r\n".join(lines) + b"\r\n\r\n"
            if chunk:
                data += self._format_chunk(chunk)
            self._pending_write = self.stream.write(data)
            future_add_done_callback(self._pending_write,
                                     self._on_write_complete)
        return future

    def _format_chunk(self, chunk: bytes) -> bytes:
        if self._expected_content_remaining is not None:
            self._expected_content_remaining -= len(chunk)
            if self._expected_content_remaining < 0:
                # Close the stream now to stop further framing errors.
                self.stream.close()
                raise httputil.HTTPOutputError(
                    "Tried to write more data than Content-Length")
        if self._chunking_output and chunk:
            # Don't write out empty chunks because that means END-OF-STREAM
            # with chunked encoding
            return utf8("%x" % len(chunk)) + b"\r\n" + chunk + b"\r\n"
        else:
            return chunk

    def write(self, chunk: bytes) -> "Future[None]":
        """Implements `.HTTPConnection.write`.

        For backwards compatibility it is allowed but deprecated to
        skip `write_headers` and instead call `write()` with a
        pre-encoded header block.
        """
        future = None
        if self.stream.closed():
            future = self._write_future = Future()
            self._write_future.set_exception(iostream.StreamClosedError())
            self._write_future.exception()
        else:
            future = self._write_future = Future()
            self._pending_write = self.stream.write(self._format_chunk(chunk))
            future_add_done_callback(self._pending_write,
                                     self._on_write_complete)
        return future

    def finish(self) -> None:
        """Implements `.HTTPConnection.finish`."""
        if (self._expected_content_remaining is not None
                and self._expected_content_remaining != 0
                and not self.stream.closed()):
            self.stream.close()
            raise httputil.HTTPOutputError(
                "Tried to write %d bytes less than Content-Length" %
                self._expected_content_remaining)
        if self._chunking_output:
            if not self.stream.closed():
                self._pending_write = self.stream.write(b"0\r\n\r\n")
                self._pending_write.add_done_callback(self._on_write_complete)
        self._write_finished = True
        # If the app finished the request while we're still reading,
        # divert any remaining data away from the delegate and
        # close the connection when we're done sending our response.
        # Closing the connection is the only way to avoid reading the
        # whole input body.
        if not self._read_finished:
            self._disconnect_on_finish = True
        # No more data is coming, so instruct TCP to send any remaining
        # data immediately instead of waiting for a full packet or ack.
        self.stream.set_nodelay(True)
        if self._pending_write is None:
            self._finish_request(None)
        else:
            future_add_done_callback(self._pending_write, self._finish_request)

    def _on_write_complete(self, future: "Future[None]") -> None:
        exc = future.exception()
        if exc is not None and not isinstance(exc, iostream.StreamClosedError):
            future.result()
        if self._write_callback is not None:
            callback = self._write_callback
            self._write_callback = None
            self.stream.io_loop.add_callback(callback)
        if self._write_future is not None:
            future = self._write_future
            self._write_future = None
            future_set_result_unless_cancelled(future, None)

    def _can_keep_alive(self, start_line: httputil.RequestStartLine,
                        headers: httputil.HTTPHeaders) -> bool:
        if self.params.no_keep_alive:
            return False
        connection_header = headers.get("Connection")
        if connection_header is not None:
            connection_header = connection_header.lower()
        if start_line.version == "HTTP/1.1":
            return connection_header != "close"
        elif ("Content-Length" in headers
              or headers.get("Transfer-Encoding", "").lower() == "chunked"
              or getattr(start_line, "method", None) in ("HEAD", "GET")):
            # start_line may be a request or response start line; only
            # the former has a method attribute.
            return connection_header == "keep-alive"
        return False

    def _finish_request(self, future: "Optional[Future[None]]") -> None:
        self._clear_callbacks()
        if not self.is_client and self._disconnect_on_finish:
            self.close()
            return
        # Turn Nagle's algorithm back on, leaving the stream in its
        # default state for the next request.
        self.stream.set_nodelay(False)
        if not self._finish_future.done():
            future_set_result_unless_cancelled(self._finish_future, None)

    def _parse_headers(self, data: bytes) -> Tuple[str, httputil.HTTPHeaders]:
        # The lstrip removes newlines that some implementations sometimes
        # insert between messages of a reused connection.  Per RFC 7230,
        # we SHOULD ignore at least one empty line before the request.
        # http://tools.ietf.org/html/rfc7230#section-3.5
        data_str = native_str(data.decode("latin1")).lstrip("\r\n")
        # RFC 7230 section allows for both CRLF and bare LF.
        eol = data_str.find("\n")
        start_line = data_str[:eol].rstrip("\r")
        headers = httputil.HTTPHeaders.parse(data_str[eol:])
        return start_line, headers

    def _read_body(
        self,
        code: int,
        headers: httputil.HTTPHeaders,
        delegate: httputil.HTTPMessageDelegate,
    ) -> Optional[Awaitable[None]]:
        if "Content-Length" in headers:
            if "Transfer-Encoding" in headers:
                # Response cannot contain both Content-Length and
                # Transfer-Encoding headers.
                # http://tools.ietf.org/html/rfc7230#section-3.3.3
                raise httputil.HTTPInputError(
                    "Response with both Transfer-Encoding and Content-Length")
            if "," in headers["Content-Length"]:
                # Proxies sometimes cause Content-Length headers to get
                # duplicated.  If all the values are identical then we can
                # use them but if they differ it's an error.
                pieces = re.split(r",\s*", headers["Content-Length"])
                if any(i != pieces[0] for i in pieces):
                    raise httputil.HTTPInputError(
                        "Multiple unequal Content-Lengths: %r" %
                        headers["Content-Length"])
                headers["Content-Length"] = pieces[0]

            try:
                content_length = int(
                    headers["Content-Length"])  # type: Optional[int]
            except ValueError:
                # Handles non-integer Content-Length value.
                raise httputil.HTTPInputError(
                    "Only integer Content-Length is allowed: %s" %
                    headers["Content-Length"])

            if cast(int, content_length) > self._max_body_size:
                raise httputil.HTTPInputError("Content-Length too long")
        else:
            content_length = None

        if code == 204:
            # This response code is not allowed to have a non-empty body,
            # and has an implicit length of zero instead of read-until-close.
            # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3
            if "Transfer-Encoding" in headers or content_length not in (None,
                                                                        0):
                raise httputil.HTTPInputError(
                    "Response with code %d should not have body" % code)
            content_length = 0

        if content_length is not None:
            return self._read_fixed_body(content_length, delegate)
        if headers.get("Transfer-Encoding", "").lower() == "chunked":
            return self._read_chunked_body(delegate)
        if self.is_client:
            return self._read_body_until_close(delegate)
        return None

    async def _read_fixed_body(self, content_length: int,
                               delegate: httputil.HTTPMessageDelegate) -> None:
        while content_length > 0:
            body = await self.stream.read_bytes(min(self.params.chunk_size,
                                                    content_length),
                                                partial=True)
            content_length -= len(body)
            if not self._write_finished or self.is_client:
                with _ExceptionLoggingContext(app_log):
                    ret = delegate.data_received(body)
                    if ret is not None:
                        await ret

    async def _read_chunked_body(
            self, delegate: httputil.HTTPMessageDelegate) -> None:
        # TODO: "chunk extensions" http://tools.ietf.org/html/rfc2616#section-3.6.1
        total_size = 0
        while True:
            chunk_len_str = await self.stream.read_until(b"\r\n", max_bytes=64)
            chunk_len = int(chunk_len_str.strip(), 16)
            if chunk_len == 0:
                crlf = await self.stream.read_bytes(2)
                if crlf != b"\r\n":
                    raise httputil.HTTPInputError(
                        "improperly terminated chunked request")
                return
            total_size += chunk_len
            if total_size > self._max_body_size:
                raise httputil.HTTPInputError("chunked body too large")
            bytes_to_read = chunk_len
            while bytes_to_read:
                chunk = await self.stream.read_bytes(min(
                    bytes_to_read, self.params.chunk_size),
                                                     partial=True)
                bytes_to_read -= len(chunk)
                if not self._write_finished or self.is_client:
                    with _ExceptionLoggingContext(app_log):
                        ret = delegate.data_received(chunk)
                        if ret is not None:
                            await ret
            # chunk ends with \r\n
            crlf = await self.stream.read_bytes(2)
            assert crlf == b"\r\n"

    async def _read_body_until_close(
            self, delegate: httputil.HTTPMessageDelegate) -> None:
        body = await self.stream.read_until_close()
        if not self._write_finished or self.is_client:
            with _ExceptionLoggingContext(app_log):
                ret = delegate.data_received(body)
                if ret is not None:
                    await ret
Пример #27
0
class Runner(object):
    """Internal implementation of `tornado.gen.coroutine`.

    Maintains information about pending callbacks and their results.

    The results of the generator are stored in ``result_future`` (a
    `.Future`)
    """

    def __init__(
        self,
        gen: "Generator[_Yieldable, Any, _T]",
        result_future: "Future[_T]",
        first_yielded: _Yieldable,
    ) -> None:
        self.gen = gen
        self.result_future = result_future
        self.future = _null_future  # type: Union[None, Future]
        self.running = False
        self.finished = False
        self.io_loop = IOLoop.current()
        if self.handle_yield(first_yielded):
            gen = result_future = first_yielded = None  # type: ignore
            self.run()

    def run(self) -> None:
        """Starts or resumes the generator, running until it reaches a
        yield point that is not ready.
        """
        if self.running or self.finished:
            return
        try:
            self.running = True
            while True:
                future = self.future
                if future is None:
                    raise Exception("No pending future")
                if not future.done():
                    return
                self.future = None
                try:
                    exc_info = None

                    try:
                        value = future.result()
                    except Exception:
                        exc_info = sys.exc_info()
                    future = None

                    if exc_info is not None:
                        try:
                            yielded = self.gen.throw(*exc_info)  # type: ignore
                        finally:
                            # Break up a reference to itself
                            # for faster GC on CPython.
                            exc_info = None
                    else:
                        yielded = self.gen.send(value)

                except (StopIteration, Return) as e:
                    self.finished = True
                    self.future = _null_future
                    future_set_result_unless_cancelled(
                        self.result_future, _value_from_stopiteration(e)
                    )
                    self.result_future = None  # type: ignore
                    return
                except Exception:
                    self.finished = True
                    self.future = _null_future
                    future_set_exc_info(self.result_future, sys.exc_info())
                    self.result_future = None  # type: ignore
                    return
                if not self.handle_yield(yielded):
                    return
                yielded = None
        finally:
            self.running = False

    def handle_yield(self, yielded: _Yieldable) -> bool:
        try:
            self.future = convert_yielded(yielded)
        except BadYieldError:
            self.future = Future()
            future_set_exc_info(self.future, sys.exc_info())

        if self.future is moment:
            self.io_loop.add_callback(self.run)
            return False
        elif self.future is None:
            raise Exception("no pending future")
        elif not self.future.done():

            def inner(f: Any) -> None:
                # Break a reference cycle to speed GC.
                f = None  # noqa: F841
                self.run()

            self.io_loop.add_future(self.future, inner)
            return False
        return True

    def handle_exception(
        self, typ: Type[Exception], value: Exception, tb: types.TracebackType
    ) -> bool:
        if not self.running and not self.finished:
            self.future = Future()
            future_set_exc_info(self.future, (typ, value, tb))
            self.run()
            return True
        else:
            return False
Пример #28
0
class WaitIterator(object):
    """Provides an iterator to yield the results of awaitables as they finish.

    Yielding a set of awaitables like this:

    ``results = yield [awaitable1, awaitable2]``

    pauses the coroutine until both ``awaitable1`` and ``awaitable2``
    return, and then restarts the coroutine with the results of both
    awaitables. If either awaitable raises an exception, the
    expression will raise that exception and all the results will be
    lost.

    If you need to get the result of each awaitable as soon as possible,
    or if you need the result of some awaitables even if others produce
    errors, you can use ``WaitIterator``::

      wait_iterator = gen.WaitIterator(awaitable1, awaitable2)
      while not wait_iterator.done():
          try:
              result = yield wait_iterator.next()
          except Exception as e:
              print("Error {} from {}".format(e, wait_iterator.current_future))
          else:
              print("Result {} received from {} at {}".format(
                  result, wait_iterator.current_future,
                  wait_iterator.current_index))

    Because results are returned as soon as they are available the
    output from the iterator *will not be in the same order as the
    input arguments*. If you need to know which future produced the
    current result, you can use the attributes
    ``WaitIterator.current_future``, or ``WaitIterator.current_index``
    to get the index of the awaitable from the input list. (if keyword
    arguments were used in the construction of the `WaitIterator`,
    ``current_index`` will use the corresponding keyword).

    On Python 3.5, `WaitIterator` implements the async iterator
    protocol, so it can be used with the ``async for`` statement (note
    that in this version the entire iteration is aborted if any value
    raises an exception, while the previous example can continue past
    individual errors)::

      async for result in gen.WaitIterator(future1, future2):
          print("Result {} received from {} at {}".format(
              result, wait_iterator.current_future,
              wait_iterator.current_index))

    .. versionadded:: 4.1

    .. versionchanged:: 4.3
       Added ``async for`` support in Python 3.5.

    """

    _unfinished = {}  # type: Dict[Future, Union[int, str]]

    def __init__(self, *args: Future, **kwargs: Future) -> None:
        if args and kwargs:
            raise ValueError("You must provide args or kwargs, not both")

        if kwargs:
            self._unfinished = dict((f, k) for (k, f) in kwargs.items())
            futures = list(kwargs.values())  # type: Sequence[Future]
        else:
            self._unfinished = dict((f, i) for (i, f) in enumerate(args))
            futures = args

        self._finished = collections.deque()  # type: Deque[Future]
        self.current_index = None  # type: Optional[Union[str, int]]
        self.current_future = None  # type: Optional[Future]
        self._running_future = None  # type: Optional[Future]

        for future in futures:
            future_add_done_callback(future, self._done_callback)

    def done(self) -> bool:
        """Returns True if this iterator has no more results."""
        if self._finished or self._unfinished:
            return False
        # Clear the 'current' values when iteration is done.
        self.current_index = self.current_future = None
        return True

    def next(self) -> Future:
        """Returns a `.Future` that will yield the next available result.

        Note that this `.Future` will not be the same object as any of
        the inputs.
        """
        self._running_future = Future()

        if self._finished:
            self._return_result(self._finished.popleft())

        return self._running_future

    def _done_callback(self, done: Future) -> None:
        if self._running_future and not self._running_future.done():
            self._return_result(done)
        else:
            self._finished.append(done)

    def _return_result(self, done: Future) -> None:
        """Called set the returned future's state that of the future
        we yielded, and set the current future for the iterator.
        """
        if self._running_future is None:
            raise Exception("no future is running")
        chain_future(done, self._running_future)

        self.current_future = done
        self.current_index = self._unfinished.pop(done)

    def __aiter__(self) -> typing.AsyncIterator:
        return self

    def __anext__(self) -> Future:
        if self.done():
            # Lookup by name to silence pyflakes on older versions.
            raise getattr(builtins, "StopAsyncIteration")()
        return self.next()