Пример #1
0
def test_join_from_rfc_3986_abnormal(url, expected):
    # test case from https://tools.ietf.org/html/rfc3986.html#section-5.4.2
    base = URL('http://a/b/c/d;p?q')
    url = URL(url)
    expected = URL(expected)
    assert base.join(url) == expected
Пример #2
0
class BaseTestServer(ABC):
    def __init__(self, *, scheme=sentinel, host='127.0.0.1'):
        self.port = None
        self.server = None
        self.handler = None
        self._root = None
        self.host = host
        self._closed = False
        self.scheme = scheme

    @asyncio.coroutine
    def start_server(self, **kwargs):
        if self.server:
            return
        self.port = unused_port()
        self._ssl = kwargs.pop('ssl', None)
        if self.scheme is sentinel:
            if self._ssl:
                scheme = 'https'
            else:
                scheme = 'http'
            self.scheme = scheme
        self._root = URL('{}://{}:{}'.format(self.scheme,
                                             self.host,
                                             self.port))

        handler = yield from self._make_factory(**kwargs)
        self.server = yield from self._loop.create_server(handler,
                                                          self.host,
                                                          self.port,
                                                          ssl=self._ssl)

    @abstractmethod  # pragma: no cover
    @asyncio.coroutine
    def _make_factory(self, **kwargs):
        pass

    def make_url(self, path):
        url = URL(path)
        assert not url.is_absolute()
        return self._root.join(url)

    @property
    def started(self):
        return self.server is not None

    @property
    def closed(self):
        return self._closed

    @asyncio.coroutine
    def close(self):
        """Close all fixtures created by the test client.

        After that point, the TestClient is no longer usable.

        This is an idempotent function: running close multiple times
        will not have any additional effects.

        close is also run when the object is garbage collected, and on
        exit when used as a context manager.

        """
        if self.started and not self.closed:
            self.server.close()
            yield from self.server.wait_closed()
            self._root = None
            self.port = None
            yield from self._close_hook()
            self._closed = True

    @abstractmethod
    @asyncio.coroutine
    def _close_hook(self):
        pass  # pragma: no cover

    def __enter__(self):
        self._loop.run_until_complete(self.start_server())
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self._loop.run_until_complete(self.close())

    if PY_35:
        @asyncio.coroutine
        def __aenter__(self):
            yield from self.start_server()
            return self

        @asyncio.coroutine
        def __aexit__(self, exc_type, exc_value, traceback):
            yield from self.close()
Пример #3
0
class BaseTestServer(ABC):
    __test__ = False

    def __init__(
        self,
        *,
        scheme: Union[str, object] = sentinel,
        loop: Optional[asyncio.AbstractEventLoop] = None,
        host: str = "127.0.0.1",
        port: Optional[int] = None,
        skip_url_asserts: bool = False,
        socket_factory: Callable[
            [str, int, socket.AddressFamily], socket.socket
        ] = get_port_socket,
        **kwargs: Any,
    ) -> None:
        self._loop = loop
        self.runner = None  # type: Optional[BaseRunner]
        self._root = None  # type: Optional[URL]
        self.host = host
        self.port = port
        self._closed = False
        self.scheme = scheme
        self.skip_url_asserts = skip_url_asserts
        self.socket_factory = socket_factory

    async def start_server(
        self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
    ) -> None:
        if self.runner:
            return
        self._loop = loop
        self._ssl = kwargs.pop("ssl", None)
        self.runner = await self._make_runner(**kwargs)
        await self.runner.setup()
        if not self.port:
            self.port = 0
        try:
            version = ipaddress.ip_address(self.host).version
        except ValueError:
            version = 4
        family = socket.AF_INET6 if version == 6 else socket.AF_INET
        _sock = self.socket_factory(self.host, self.port, family)
        self.host, self.port = _sock.getsockname()[:2]
        site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
        await site.start()
        server = site._server
        assert server is not None
        sockets = server.sockets
        assert sockets is not None
        self.port = sockets[0].getsockname()[1]
        if self.scheme is sentinel:
            if self._ssl:
                scheme = "https"
            else:
                scheme = "http"
            self.scheme = scheme
        self._root = URL(f"{self.scheme}://{self.host}:{self.port}")

    @abstractmethod  # pragma: no cover
    async def _make_runner(self, **kwargs: Any) -> BaseRunner:
        pass

    def make_url(self, path: str) -> URL:
        assert self._root is not None
        url = URL(path)
        if not self.skip_url_asserts:
            assert not url.is_absolute()
            return self._root.join(url)
        else:
            return URL(str(self._root) + path)

    @property
    def started(self) -> bool:
        return self.runner is not None

    @property
    def closed(self) -> bool:
        return self._closed

    @property
    def handler(self) -> Server:
        # for backward compatibility
        # web.Server instance
        runner = self.runner
        assert runner is not None
        assert runner.server is not None
        return runner.server

    async def close(self) -> None:
        """Close all fixtures created by the test client.

        After that point, the TestClient is no longer usable.

        This is an idempotent function: running close multiple times
        will not have any additional effects.

        close is also run when the object is garbage collected, and on
        exit when used as a context manager.

        """
        if self.started and not self.closed:
            assert self.runner is not None
            await self.runner.cleanup()
            self._root = None
            self.port = None
            self._closed = True

    def __enter__(self) -> None:
        raise TypeError("Use async with instead")

    def __exit__(
        self,
        exc_type: Optional[Type[BaseException]],
        exc_value: Optional[BaseException],
        traceback: Optional[TracebackType],
    ) -> None:
        # __exit__ should exist in pair with __enter__ but never executed
        pass  # pragma: no cover

    async def __aenter__(self) -> "BaseTestServer":
        await self.start_server(loop=self._loop)
        return self

    async def __aexit__(
        self,
        exc_type: Optional[Type[BaseException]],
        exc_value: Optional[BaseException],
        traceback: Optional[TracebackType],
    ) -> None:
        await self.close()
Пример #4
0
def test_join_absolute():
    base = URL('http://www.cwi.nl/%7Eguido/Python.html')
    url = URL('//www.python.org/%7Eguido')
    url2 = base.join(url)
    assert str(url2) == 'http://www.python.org/~guido'
Пример #5
0
def test_join_non_url():
    base = URL("http://example.com")
    with pytest.raises(TypeError):
        base.join("path/to")
Пример #6
0
    def _request(self, method, url, *,
                 params=None,
                 data=None,
                 headers=None,
                 skip_auto_headers=None,
                 auth=None,
                 allow_redirects=True,
                 max_redirects=10,
                 encoding='utf-8',
                 version=None,
                 compress=None,
                 chunked=None,
                 expect100=False,
                 read_until_eof=True,
                 proxy=None,
                 proxy_auth=None,
                 timeout=DEFAULT_TIMEOUT):

        # NOTE: timeout clamps existing connect and read timeouts.  We cannot
        # set the default to None because we need to detect if the user wants
        # to use the existing timeouts by setting timeout to None.

        if version is not None:
            warnings.warn("HTTP version should be specified "
                          "by ClientSession constructor", DeprecationWarning)
        else:
            version = self._version

        if self.closed:
            raise RuntimeError('Session is closed')

        if not isinstance(chunked, bool) and chunked is not None:
            warnings.warn(
                'Chunk size is deprecated #1615', DeprecationWarning)

        redirects = 0
        history = []

        # Merge with default headers and transform to CIMultiDict
        headers = self._prepare_headers(headers)
        if auth is None:
            auth = self._default_auth
        # It would be confusing if we support explicit Authorization header
        # with `auth` argument
        if (headers is not None and
                auth is not None and
                hdrs.AUTHORIZATION in headers):
            raise ValueError("Can't combine `Authorization` header with "
                             "`auth` argument")

        skip_headers = set(self._skip_auto_headers)
        if skip_auto_headers is not None:
            for i in skip_auto_headers:
                skip_headers.add(istr(i))

        if proxy is not None:
            proxy = URL(proxy)

        # request timeout
        if timeout is None:
            timeout = self._read_timeout
        if timeout is None:
            timeout = self._connector.conn_timeout
        elif self._connector.conn_timeout is not None:
            timeout = max(timeout, self._connector.conn_timeout)

        # timeout is cumulative for all request operations
        # (request, redirects, responses, data consuming)
        timer = self._time_service.timeout(timeout)

        with timer:
            while True:
                url = URL(url).with_fragment(None)

                cookies = self._cookie_jar.filter_cookies(url)

                req = self._request_class(
                    method, url, params=params, headers=headers,
                    skip_auto_headers=skip_headers, data=data,
                    cookies=cookies, encoding=encoding,
                    auth=auth, version=version, compress=compress,
                    chunked=chunked, expect100=expect100,
                    loop=self._loop, response_class=self._response_class,
                    proxy=proxy, proxy_auth=proxy_auth, timer=timer)

                conn = yield from self._connector.connect(req)
                conn.writer.set_tcp_nodelay(True)
                try:
                    resp = req.send(conn)
                    try:
                        yield from resp.start(conn, read_until_eof)
                    except:
                        resp.close()
                        conn.close()
                        raise
                except ClientError:
                    raise
                except http.HttpProcessingError as exc:
                    raise ClientResponseError(
                        code=exc.code,
                        message=exc.message, headers=exc.headers) from exc
                except OSError as exc:
                    raise ClientOSError(*exc.args) from exc

                self._cookie_jar.update_cookies(resp.cookies, resp.url)

                # redirects
                if resp.status in (301, 302, 303, 307) and allow_redirects:
                    redirects += 1
                    history.append(resp)
                    if max_redirects and redirects >= max_redirects:
                        resp.close()
                        break
                    else:
                        yield from resp.release()

                    # For 301 and 302, mimic IE behaviour, now changed in RFC.
                    # Info: https://github.com/kennethreitz/requests/pull/269
                    if (resp.status == 303 and resp.method != hdrs.METH_HEAD) \
                       or (resp.status in (301, 302) and
                           resp.method == hdrs.METH_POST):
                        method = hdrs.METH_GET
                        data = None
                        if headers.get(hdrs.CONTENT_LENGTH):
                            headers.pop(hdrs.CONTENT_LENGTH)

                    r_url = (resp.headers.get(hdrs.LOCATION) or
                             resp.headers.get(hdrs.URI))
                    if r_url is None:
                        raise RuntimeError("{0.method} {0.url} returns "
                                           "a redirect [{0.status}] status "
                                           "but response lacks a Location "
                                           "or URI HTTP header".format(resp))
                    r_url = URL(r_url)

                    scheme = r_url.scheme
                    if scheme not in ('http', 'https', ''):
                        resp.close()
                        raise ValueError('Can redirect only to http or https')
                    elif not scheme:
                        r_url = url.join(r_url)

                    url = r_url
                    params = None
                    yield from resp.release()
                    continue

                break

        resp._history = tuple(history)
        return resp
Пример #7
0
class BaseTestServer(ABC):
    __test__ = False

    def __init__(self,
                 *,
                 scheme: Union[str, object] = sentinel,
                 host: str = '127.0.0.1',
                 port: Optional[int] = None,
                 skip_url_asserts: bool = False,
                 **kwargs: Any) -> None:
        self.runner = None  # type: Optional[BaseRunner]
        self._root = None  # type: Optional[URL]
        self.host = host
        self.port = port
        self._closed = False
        self.scheme = scheme
        self.skip_url_asserts = skip_url_asserts

    async def start_server(self, **kwargs: Any) -> None:
        if self.runner:
            return
        self._ssl = kwargs.pop('ssl', None)
        self.runner = await self._make_runner(**kwargs)
        await self.runner.setup()
        if not self.port:
            self.port = 0
        absolute_host = self.host
        try:
            version = ipaddress.ip_address(self.host).version
        except ValueError:
            version = 4
        if version == 6:
            absolute_host = f"[{self.host}]"
        family = socket.AF_INET6 if version == 6 else socket.AF_INET
        _sock = get_port_socket(self.host, self.port, family=family)
        self.host, self.port = _sock.getsockname()[:2]
        site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
        await site.start()
        server = site._server
        assert server is not None
        sockets = server.sockets
        assert sockets is not None
        self.port = sockets[0].getsockname()[1]
        if self.scheme is sentinel:
            if self._ssl:
                scheme = 'https'
            else:
                scheme = 'http'
            self.scheme = scheme
        self._root = URL('{}://{}:{}'.format(self.scheme, absolute_host,
                                             self.port))

    @abstractmethod  # pragma: no cover
    async def _make_runner(self, **kwargs: Any) -> BaseRunner:
        pass

    def make_url(self, path: str) -> URL:
        assert self._root is not None
        url = URL(path)
        if not self.skip_url_asserts:
            assert not url.is_absolute()
            return self._root.join(url)
        else:
            return URL(str(self._root) + path)

    @property
    def started(self) -> bool:
        return self.runner is not None

    @property
    def closed(self) -> bool:
        return self._closed

    @property
    def handler(self) -> Server:
        # for backward compatibility
        # web.Server instance
        runner = self.runner
        assert runner is not None
        assert runner.server is not None
        return runner.server

    async def close(self) -> None:
        """Close all fixtures created by the test client.

        After that point, the TestClient is no longer usable.

        This is an idempotent function: running close multiple times
        will not have any additional effects.

        close is also run when the object is garbage collected, and on
        exit when used as a context manager.

        """
        if self.started and not self.closed:
            assert self.runner is not None
            await self.runner.cleanup()
            self._root = None
            self.port = None
            self._closed = True

    async def __aenter__(self) -> 'BaseTestServer':
        await self.start_server()
        return self

    async def __aexit__(self, exc_type: Optional[Type[BaseException]],
                        exc_value: Optional[BaseException],
                        traceback: Optional[TracebackType]) -> None:
        await self.close()
Пример #8
0
def test_join_non_url():
    base = URL("http://example.com")
    with pytest.raises(TypeError):
        base.join("path/to")
Пример #9
0
def transform_to_paginated_response(
        data: list,
        page: int,
        page_size: int,
        total_pages: int,
        total_items: int,
        has_next: bool,
        has_prev: bool,
        url_for: _t.Callable,
        request_url: URL,
        **kwargs) -> dict:
    """
    Transform paginated data to json-api-like format

    :param data: items list
    :param page: page number
    :param page_size: page size
    :param total_pages: total pages count
    :param total_items: total items count
    :param has_next: has next page
    :param has_prev: has previous page
    :param url_for: url_for method for url resolution
    :param request_url: original request.url to get the host
    :param kwargs: extra keyword arguments for url query
    :return: dict
    """
    data = {
        'data': data,
        'meta': {
            'page': page,
            'page_size': page_size,
            'total_pages': total_pages,
            'total_items': total_items
        },
        'links': {
            'self': str(
                request_url.join(
                    url_for().with_query(
                        page=page,
                        page_size=page_size,
                        **kwargs
                    )
                )
            ),
            'next': str(
                request_url.join(
                    url_for().with_query(
                        page=page + 1,
                        page_size=page_size,
                        **kwargs
                    )
                )
            ) if has_next else None,
            'prev': str(
                request_url.join(
                    url_for().with_query(
                        page=page - 1,
                        page_size=page_size,
                        **kwargs
                    )
                )
            ) if has_prev else None
        }
    }
    return data
Пример #10
0
class Horizon(object):
    def __init__(self,
                 horizon_uri: Optional[str] = None,
                 pool_size: Optional[int] = None,
                 num_retries: Optional[int] = DEFAULT_NUM_RETRIES,
                 request_timeout: Optional[Union[int, None]] = DEFAULT_REQUEST_TIMEOUT,
                 backoff_factor: Optional[float] = DEFAULT_BACKOFF_FACTOR):
        """The :class:`Horizon` object, which represents the interface for
        making requests to a Horizon server instance.

        This class aims to be up to date with Horizon's API endpoints; however,
        you can utilize the internal session via ``self.session`` (which is a
        :class:`requests.Session` object) to make arbitrary requests to
        a Horizon instance's API.

        In general, on HTTP errors (non 2XX/3XX responses), no exception is
        raised, and the return dictionary must be checked to see if it is an
        error or a valid response. Any other errors however are raised by this
        class.

        :param horizon_uri: The horizon base URL
        :param request_timeout: The timeout for all requests.
        :param pool_size: persistent connection to Horizon and connection pool
        :param num_retries: configurable request retry functionality
        :param backoff_factor: a backoff factor to apply between attempts after the second try

        """
        if horizon_uri is None:
            self.horizon_uri = URL(HORIZON_TEST)
        else:
            self.horizon_uri = URL(horizon_uri)

        self.num_retries = num_retries
        self.backoff_factor = backoff_factor

        # init session
        if pool_size is None:
            connector = aiohttp.TCPConnector()
        else:
            connector = aiohttp.TCPConnector(limit=pool_size)
        session = aiohttp.ClientSession(headers=HEADERS,
                                        connector=connector,
                                        timeout=aiohttp.ClientTimeout(total=request_timeout))

        self._session = session
        self._sse_session = None

    async def _init_sse_session(self) -> None:
        """Init the sse session """
        if self._sse_session is None:
            self._sse_session = aiohttp.ClientSession(headers={'User-Agent': USER_AGENT})  # No timeout, no special connector
            # Other headers such as "Accept: text/event-stream" are added by thr SSEClient

    async def __aenter__(self) -> 'Horizon':
        return self

    async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
        await self.close()

    async def submit(self, te: str) -> dict:
        """Submit the transaction using a pooled connection, and retry on failure.

        `POST /transactions
        <https://www.stellar.org/developers/horizon/reference/endpoints/transactions-create.html>`_

        Uses form-encoded data to send over to Horizon.
        :param te: The transaction envelop encoded in base64
        :return: The JSON response indicating the success/failure of the
            submitted transaction.

        """
        params = {'tx': te}
        abs_url = self.horizon_uri.join(URL('/transactions'))
        try:
            reply = await self._post(abs_url, params)
        except (aiohttp.ClientConnectionError, aiohttp.ContentTypeError, asyncio.TimeoutError) as e:
            raise HorizonRequestError(e)

        return check_horizon_reply(reply)

    async def query(self, rel_url: URL, params: Optional[dict] = None, sse: Optional[bool] = False,
                    sse_timeout: Optional[Union[float, None]] = None) -> Union[dict, AsyncGenerator]:
        """
        Send a query to horizon
        :param rel_url: The relative path
        :param params: Parameters to include in the query
        :param sse: Should SSE be used
        :param sse_timeout: How long to wait for a new sse event
        :return: The response from horizon
        """
        abs_url = self.horizon_uri.join(rel_url)
        try:
            reply = await self._get(abs_url, params, sse, sse_timeout=sse_timeout)
        except (aiohttp.ClientConnectionError, aiohttp.ContentTypeError, asyncio.TimeoutError) as e:
            raise HorizonRequestError(e)

        return check_horizon_reply(reply) if not sse else reply

    @_retry
    async def _get(self, url: URL, params: Optional[dict] = None, sse: Optional[bool] = False,
                   sse_timeout: Optional[Union[float, None]] = None) -> Union[dict, AsyncGenerator]:
        """
        Send a get request
        :param url: The url to send a request to
        :param params: Parameters to include in the request
        :param sse: Should SSE be used
        :param sse_timeout: How long to wait for a new sse event
        :return: The response from the http request
        """
        if not sse:
            async with self._session.get(url, params=params) as response:
                return await response.json(encoding='utf-8')
        return self.sse_generator(url, sse_timeout)

    @_retry
    async def _post(self, url: URL, params: Optional[dict] = None) -> dict:
        """
        Send a post request
        :param url: The url to send a request to
        :param params: Parameters to include
        :return: The response from the http request
        """
        async with self._session.post(url, params=params) as response:
            return await response.json(encoding='utf-8')

    async def sse_generator(self, url: Union[str, URL], timeout: Union[float, None]) -> AsyncGenerator:
        """
        SSE generator with timeout between events
        :param url: URL to send SSE request to
        :param timeout: The time to wait for a a new event
        :return: AsyncGenerator[dict]
        """
        async def _sse_generator() -> AsyncGenerator:
            """
            Generator for sse events
            :rtype AsyncGenerator[dict]
            """
            last_id = 'now'  # Start monitoring from now.
            retry = 0.1
            while True:
                try:
                    """
                    Create a new SSEClient:
                    Using the last id as the cursor
                    Headers are needed because of a bug that makes "params" override the default headers
                    """
                    async with SSEClient(url, session=self._sse_session,
                                         params={'cursor': last_id},
                                         headers=HEADERS.copy()) as client:
                        """
                        We want to throw a TimeoutError if we didnt get any event in the last x seconds.
                        read_timeout in aiohttp is not implemented correctly https://github.com/aio-libs/aiohttp/issues/1954
                        So we will create our own way to do that.

                        Note that the timeout starts from the first event forward. There is no until we get the first event.
                        """
                        async for event in client:
                            if event.last_event_id != '':
                                # Events that dont have an id are not useful for us (hello/byebye events)
                                # Save the last event id and retry time
                                last_id = event.last_event_id
                                retry = client._reconnection_time.total_seconds()
                                try:
                                    yield json.loads(event.data)
                                except json.JSONDecodeError:
                                    # Content was not json-decodable
                                    pass
                except aiohttp.ClientPayloadError:
                    # Retry if the connection dropped after we got the initial response
                    logger.debug('Resetting SSE connection for {} after timeout'.format(url))
                    await asyncio.sleep(retry)

        await self._init_sse_session()
        gen = _sse_generator()
        while True:
            yield await asyncio.wait_for(gen.__anext__(), timeout)

    async def account(self, address: str) -> dict:
        """Returns information and links relating to a single account.

        `GET /accounts/{account}
        <https://www.stellar.org/developers/horizon/reference/endpoints/accounts-single.html>`_

        :param address: The account ID to retrieve details about.
        :return: The account details in a JSON response.
        :rtype: dict

        """
        endpoint = URL('/accounts/{account_id}'.format(account_id=address))
        return await self.query(endpoint)

    async def account_data(self, address, key):
        """This endpoint represents a single data associated with a given
        account.

        `GET /accounts/{account}/data/{key}
        <https://www.stellar.org/developers/horizon/reference/endpoints/data-for-account.html>`_

        :param str address: The account ID to look up a data item from.
        :param str key: The name of the key for the data item in question.
        :return: The value of the data field for the given account and data key.
        :rtype: dict

        """
        endpoint = URL('/accounts/{account_id}/data/{data_key}'.format(
            account_id=address, data_key=key))
        return await self.query(endpoint)

    async def account_effects(self, address, cursor=None, order='asc', limit=10, sse=False, sse_timeout=None):
        """This endpoint represents all effects that changed a given account.

        `GET /accounts/{account}/effects{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-account.html>`_

        :param str address: The account ID to look up effects for.
        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :type cursor: int, str
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :param bool sse: Use server side events for streaming responses.
        :return: The list of effects in a JSON response.
        :rtype: dict

        """
        endpoint = URL('/accounts/{account_id}/effects'.format(account_id=address))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params, sse, sse_timeout)

    async def account_offers(self, address, cursor=None, order='asc', limit=10, sse=False, sse_timeout=None):
        """This endpoint represents all the offers a particular account makes.

        `GET /accounts/{account}/offers{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/offers-for-account.html>`_

        :param str address: The account ID to retrieve offers from.
        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :type cursor: int, str
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :param bool sse: Use server side events for streaming responses.
        :return: The list of offers for an account in a JSON response.
        :rtype: dict

        """
        endpoint = URL('/accounts/{account_id}/offers'.format(account_id=address))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params, sse, sse_timeout)

    async def account_operations(self, address, cursor=None, order='asc', limit=10, sse=False, sse_timeout=None):
        """This endpoint represents all operations that were included in valid
        transactions that affected a particular account.

        `GET /accounts/{account}/operations{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/operations-for-account.html>`_

        :param str address: The account ID to list operations on.
        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :type cursor: int, str
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :param bool sse: Use server side events for streaming responses.
        :return: The list of operations for an account in a JSON response.
        :rtype: dict

        """
        endpoint = URL('/accounts/{account_id}/operations'.format(
            account_id=address))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params, sse, sse_timeout)

    async def account_transactions(self, address: str, cursor: Optional[int] = None, order: str = 'asc', limit: Optional[int] = 10, sse: Optional[bool] = False,
                                   sse_timeout: Optional[Union[float, None]] = None) -> Union[dict, AsyncGenerator]:
        """This endpoint represents all transactions that affected a given
        account.

        `GET /accounts/{account_id}/transactions{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/transactions-for-account.html>`_

        :param address: The account ID to list transactions from.
        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :param order: The order in which to return rows, "asc" or "desc".
        :param limit: Maximum number of records to return.
        :param sse: Use server side events for streaming responses.
        :param sse_timeout: How long to wait between events
        :return: The list of transactions for an account in a JSON response.
        :rtype: dict

        """
        endpoint = URL('/accounts/{account_id}/transactions'.format(
            account_id=address))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)

        return await self.query(endpoint, params, sse, sse_timeout)

    async def account_payments(self, address: str, cursor: Optional[int] = None, order: str = 'asc', limit: Optional[int] = 10, sse: Optional[bool] = False,
                               sse_timeout: Optional[Union[float, None]] = None) -> Union[dict, AsyncGenerator]:
        """This endpoint responds with a collection of Payment operations where
        the given account was either the sender or receiver.

        `GET /accounts/{id}/payments{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/payments-for-account.html>`_

        :param address: The account ID to list payments to/from.
        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :param order: The order in which to return rows, "asc" or "desc".
        :param limit: Maximum number of records to return.
        :param sse: Use server side events for streaming responses.
        :param sse_timeout: How long to wait between events
        :return: The list of payments for an account in a JSON response.
        :rtype: dict
        """
        endpoint = URL('/accounts/{account_id}/payments'.format(account_id=address))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params, sse, sse_timeout)

    async def account_trades(self, address, cursor=None, order='asc', limit=10, sse=False, sse_timeout=None):
        """This endpoint responds with a collection of Trades where
        the given account was either the taker or the maker

        `GET /accounts/{id}/trades{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/trades-for-account.html>`_

        :param str address: The account ID to list trades to/from.
        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :type cursor: int, str
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :param bool sse: Use server side events for streaming responses.
        :return: The list of payments for an account in a JSON response.
        :rtype: dict
        """
        endpoint = URL('/accounts/{account_id}/trades'.format(account_id=address))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params, sse, sse_timeout)

    async def assets(self, asset_code=None, asset_issuer=None, cursor=None, order='asc', limit=10):
        """This endpoint represents all assets. It will give you all the assets
        in the system along with various statistics about each.

        See the documentation below for details on query parameters that are
        available.

        `GET /assets{?asset_code,asset_issuer,cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/assets-all.html>`_

        :param str asset_code: Code of the Asset to filter by.
        :param str asset_issuer: Issuer of the Asset to filter by.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc",
            ordered by asset_code then by asset_issuer.
        :param int limit: Maximum number of records to return.

        :return: A list of all valid payment operations
        :rtype: dict

        """
        endpoint = URL('/assets')
        params = self.__query_params(asset_code=asset_code, asset_issuer=asset_issuer, cursor=cursor, order=order,
                                     limit=limit)
        return await self.query(endpoint, params)

    async def transactions(self, cursor: Optional[int] = None, order: str = 'asc', limit: Optional[int] = 10, sse: Optional[bool] = False,
                           sse_timeout: Optional[Union[float, None]] = None) -> Union[dict, AsyncGenerator]:
        """This endpoint represents all validated transactions.

        `GET /transactions{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/transactions-all.html>`_

        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :type cursor: int, str
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :param bool sse: Use server side events for streaming responses.
        :param sse_timeout: How long to wait between events
        :return: The list of all transactions
        :rtype: dict

        """
        endpoint = URL('/transactions')
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params, sse, sse_timeout)

    async def transaction(self, tx_hash: str) -> dict:
        """The transaction details endpoint provides information on a single
        transaction.

        `GET /transactions/{hash}
        <https://www.stellar.org/developers/horizon/reference/endpoints/transactions-single.html>`_

        :param tx_hash: The hex-encoded transaction hash.
        :return: A single transaction's details.
        :rtype: dict

        """
        endpoint = URL('/transactions/{tx_hash}'.format(tx_hash=tx_hash))
        return await self.query(endpoint)

    async def transaction_operations(self, tx_hash, cursor=None, order='asc', limit=10):
        """This endpoint represents all operations that are part of a given
        transaction.

        `GET /transactions/{hash}/operations{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/operations-for-transaction.html>`_

        :param str tx_hash: The hex-encoded transaction hash.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: A single transaction's operations.
        :rtype: dict

        """
        endpoint = URL('/transactions/{tx_hash}/operations'.format(tx_hash=tx_hash))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params)

    async def transaction_effects(self, tx_hash, cursor=None, order='asc', limit=10):
        """This endpoint represents all effects that occurred as a result of a
        given transaction.

        `GET /transactions/{hash}/effects{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-transaction.html>`_

        :param str tx_hash: The hex-encoded transaction hash.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: A single transaction's effects.
        :rtype: dict

        """
        endpoint = URL('/transactions/{tx_hash}/effects'.format(tx_hash=tx_hash))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params)

    async def transaction_payments(self, tx_hash, cursor=None, order='asc', limit=10):
        """This endpoint represents all payment operations that are part of a
        given transaction.

        `GET /transactions/{hash}/payments{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/payments-for-transaction.html>`_

        :param str tx_hash: The hex-encoded transaction hash.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: A single transaction's payment operations.
        :rtype: dict

        """
        endpoint = URL('/transactions/{tx_hash}/payments'.format(tx_hash=tx_hash))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params)

    async def order_book(self, selling_asset_code, buying_asset_code, selling_asset_issuer=None, buying_asset_issuer=None,
                   limit=10):
        """Return, for each orderbook, a summary of the orderbook and the bids
        and asks associated with that orderbook.

        See the external docs below for information on the arguments required.

        `GET /order_book
        <https://www.stellar.org/developers/horizon/reference/endpoints/orderbook-details.html>`_

        :param str selling_asset_code: Code of the Asset being sold.
        :param str buying_asset_code: Type of the Asset being bought.
        :param str selling_asset_issuer: Account ID of the issuer of the Asset being sold,
            if it is a native asset, let it be `None`.
        :param str buying_asset_issuer: Account ID of the issuer of the Asset being bought,
            if it is a native asset, let it be `None`.
        :param int limit: Limit the number of items returned.
        :return: A list of orderbook summaries as a JSON object.
        :rtype: dict

        """
        selling_asset = Asset(selling_asset_code, selling_asset_issuer)
        buying_asset = Asset(buying_asset_code, buying_asset_issuer)
        asset_params = {
            'selling_asset_type': selling_asset.type,
            'selling_asset_code': None if selling_asset.is_native() else selling_asset.code,
            'selling_asset_issuer': selling_asset.issuer,
            'buying_asset_type': buying_asset.type,
            'buying_asset_code': None if buying_asset.is_native() else buying_asset.code,
            'buying_asset_issuer': buying_asset.issuer,
        }
        endpoint = URL('/order_book')
        params = self.__query_params(limit=limit, **asset_params)
        return await self.query(endpoint, params)

    async def ledgers(self, cursor=None, order='asc', limit=10, sse=False, sse_timeout=None):
        """This endpoint represents all ledgers.

        `GET /ledgers{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/ledgers-all.html>`_

        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :type cursor: int, str
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :param bool sse: Use server side events for streaming responses.
        :return: All ledgers on the network.
        :rtype: dict

        """
        endpoint = URL('/ledgers')
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params, sse, sse_timeout)

    async def ledger(self, ledger_id: int) -> dict:
        """The ledger details endpoint provides information on a single ledger.

        `GET /ledgers/{sequence}
        <https://www.stellar.org/developers/horizon/reference/endpoints/ledgers-single.html>`_

        :param ledger_id: The id of the ledger to look up.
        :return: The details of a single ledger.
        :rtype: dict

        """
        endpoint = URL('/ledgers/{ledger_id}'.format(ledger_id=ledger_id))
        return await self.query(endpoint)

    async def ledger_transactions(self, ledger_id, cursor=None, order='asc', limit=10):
        """This endpoint represents all transactions that occurred in the given
        ledger.

        `GET /ledgers/{id}/transactions{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-ledger.html>`_

        :param int ledger_id: The id of the ledger to look up.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: The transactions for a single ledger.
        :rtype: dict

        """
        endpoint = URL('/ledgers/{ledger_id}/transactions'.format(ledger_id=ledger_id))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params)

    async def ledger_effects(self, ledger_id, cursor=None, order='asc', limit=10):
        """This endpoint represents all effects that occurred in the given
        ledger.

        `GET /ledgers/{id}/effects{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-ledger.html>`_

        :param int ledger_id: The id of the ledger to look up.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: The effects for a single ledger.
        :rtype: dict

        """
        endpoint = URL('/ledgers/{ledger_id}/effects'.format(ledger_id=ledger_id))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params)

    async def ledger_operations(self, ledger_id, cursor=None, order='asc', limit=10):
        """This endpoint returns all operations that occurred in a given
        ledger.

        `GET /ledgers/{id}/operations{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/operations-for-ledger.html>`_

        :param int ledger_id: The id of the ledger to look up.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: The operations contained in a single ledger.
        :rtype: dict

        """
        endpoint = URL('/ledgers/{ledger_id}/operations'.format(
            ledger_id=ledger_id))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params)

    async def ledger_payments(self, ledger_id, cursor=None, order='asc', limit=10):
        """This endpoint represents all payment operations that are part of a
        valid transactions in a given ledger.

        `GET /ledgers/{id}/payments{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/payments-for-ledger.html>`_

        :param int ledger_id: The id of the ledger to look up.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: The payments contained in a single ledger.
        :rtype: dict

        """
        endpoint = URL('/ledgers/{ledger_id}/payments'.format(ledger_id=ledger_id))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params)

    async def effects(self, cursor=None, order='asc', limit=10, sse=False, sse_timeout=None):
        """This endpoint represents all effects.

        `GET /effects{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/effects-all.html>`_

        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :type cursor: int, str
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :param bool sse: Use server side events for streaming responses.
        :return: A list of all effects.
        :rtype: dict

        """
        endpoint = URL('/effects')
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params, sse, sse_timeout)

    async def operations(self, cursor=None, order='asc', limit=10, sse=False, sse_timeout=None):
        """This endpoint represents all operations that are part of validated
        transactions.

        `GET /operations{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/operations-all.html>`_

        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :type cursor: int, str
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :param bool sse: Use server side events for streaming responses.
        :return: A list of all operations.
        :rtype: dict

        """
        endpoint = URL('/operations')
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params, sse, sse_timeout)

    async def operation(self, op_id):
        """The operation details endpoint provides information on a single
        operation.

        `GET /operations/{id}
        <https://www.stellar.org/developers/horizon/reference/endpoints/operations-single.html>`_

        :param id op_id: The operation ID to get details on.
        :return: Details on a single operation.
        :rtype: dict
        """
        endpoint = URL('/operations/{op_id}'.format(op_id=op_id))
        return await self.query(endpoint)

    async def operation_effects(self, op_id, cursor=None, order='asc', limit=10):
        """This endpoint represents all effects that occurred as a result of a
        given operation.

        `GET /operations/{id}/effects{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-operation.html>`_

        :param int op_id: The operation ID to get effects on.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: A list of effects on the given operation.
        :rtype: dict

        """
        endpoint = URL('/operations/{op_id}/effects'.format(op_id=op_id))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params)

    async def payments(self, cursor=None, order='asc', limit=10, sse=False, sse_timeout=None):
        """This endpoint represents all payment operations that are part of
        validated transactions.

        `GET /payments{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/payments-all.html>`_

        :param cursor: A paging token, specifying where to start returning records from.
            When streaming this can be set to "now" to stream object created since your request time.
        :type cursor: int, str
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :param bool sse: Use server side events for streaming responses.
        :return: A list of all valid payment operations.
        :rtype: dict

        """
        endpoint = URL('/payments')
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params, sse, sse_timeout)

    async def paths(self, destination_account, destination_amount, source_account, destination_asset_code,
              destination_asset_issuer=None):
        """Load a list of assets available to the source account id and find
        any payment paths from those source assets to the desired
        destination asset.

        See the below docs for more information on required and optional
        parameters for further specifying your search.

        `GET /paths
        <https://www.stellar.org/developers/horizon/reference/endpoints/path-finding.html>`_

        :param str destination_account: The destination account that any returned path should use.
        :param str destination_amount: The amount, denominated in the destination asset,
            that any returned path should be able to satisfy.
        :param str source_account: The sender's account id. Any returned path must use a source that the sender can hold.
        :param str destination_asset_code: The asset code for the destination.
        :param destination_asset_issuer: The asset issuer for the destination, if it is a native asset, let it be `None`.
        :type destination_asset_issuer: str, None


        :return: A list of paths that can be used to complete a payment based
            on a given query.
        :rtype: dict

        """
        destination_asset = Asset(destination_asset_code, destination_asset_issuer)
        destination_asset_params = {
            'destination_asset_type': destination_asset.type,
            'destination_asset_code': None if destination_asset.is_native() else destination_asset.code,
            'destination_asset_issuer': destination_asset.issuer
        }
        endpoint = URL('/paths')
        params = self.__query_params(destination_account=destination_account,
                                     source_account=source_account,
                                     destination_amount=destination_amount,
                                     **destination_asset_params
                                     )
        return await self.query(endpoint, params)

    async def trades(self, base_asset_code=None, counter_asset_code=None, base_asset_issuer=None, counter_asset_issuer=None,
               offer_id=None, cursor=None, order='asc', limit=10):
        """Load a list of trades, optionally filtered by an orderbook.

        See the below docs for more information on required and optional
        parameters for further specifying your search.

        `GET /trades
        <https://www.stellar.org/developers/horizon/reference/endpoints/trades.html>`_

        :param str base_asset_code: Code of base asset.
        :param str base_asset_issuer: Issuer of base asset, if it is a native asset, let it be `None`.
        :param str counter_asset_code: Code of counter asset.
        :param str counter_asset_issuer: Issuer of counter asset, if it is a native asset, let it be `None`.
        :param int offer_id: Filter for by a specific offer id.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: A list of trades filtered by a given query.
        :rtype: dict

        """
        base_asset = Asset(base_asset_code, base_asset_issuer)
        counter_asset = Asset(counter_asset_code, counter_asset_issuer)
        asset_params = {
            'base_asset_type': base_asset.type,
            'base_asset_code': None if base_asset.is_native() else base_asset.code,
            'base_asset_issuer': base_asset.issuer,
            'counter_asset_type': counter_asset.type,
            'counter_asset_code': None if counter_asset.is_native() else counter_asset.code,
            'counter_asset_issuer': counter_asset.issuer
        }
        endpoint = URL('/trades')
        params = self.__query_params(offer_id=offer_id, cursor=cursor, order=order, limit=limit, **asset_params)
        return await self.query(endpoint, params)

    async def trade_aggregations(self, resolution, base_asset_code, counter_asset_code,
                           base_asset_issuer=None, start_time=None, end_time=None,
                           counter_asset_issuer=None, order='asc', limit=10):
        """Load a list of aggregated historical trade data, optionally filtered
        by an orderbook.

        `GET /trade_aggregations
        <https://www.stellar.org/developers/horizon/reference/endpoints/trade_aggregations.html>`_

        :param int start_time: Lower time boundary represented as millis since epoch.
        :param int end_time: Upper time boundary represented as millis since epoch.
        :param int resolution: Segment duration as millis since epoch. Supported values
            are 1 minute (60000), 5 minutes (300000), 15 minutes (900000), 1 hour (3600000),
            1 day (86400000) and 1 week (604800000).
        :param str base_asset_code: Code of base asset.
        :param str base_asset_issuer: Issuer of base asset, if it is a native asset, let it be `None`.
        :param str counter_asset_code: Code of counter asset.
        :param str counter_asset_issuer: Issuer of counter asset, if it is a native asset, let it be `None`.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: A list of collected trade aggregations.
        :rtype: dict

        """
        base_asset = Asset(base_asset_code, base_asset_issuer)
        counter_asset = Asset(counter_asset_code, counter_asset_issuer)
        asset_params = {
            'base_asset_type': base_asset.type,
            'base_asset_code': None if base_asset.is_native() else base_asset.code,
            'base_asset_issuer': base_asset.issuer,
            'counter_asset_type': counter_asset.type,
            'counter_asset_code': None if counter_asset.is_native() else counter_asset.code,
            'counter_asset_issuer': counter_asset.issuer
        }
        endpoint = URL('/trade_aggregations')
        params = self.__query_params(start_time=start_time, end_time=end_time, resolution=resolution, order=order,
                                     limit=limit, **asset_params)
        return await self.query(endpoint, params)

    async def offer_trades(self, offer_id, cursor=None, order='asc', limit=10):
        """This endpoint represents all trades for a given offer.

        `GET /offers/{offer_id}/trades{?cursor,limit,order}
        <https://www.stellar.org/developers/horizon/reference/endpoints/trades-for-offer.html>`_

        :param int offer_id: The offer ID to get trades on.
        :param int cursor: A paging token, specifying where to start returning records from.
        :param str order: The order in which to return rows, "asc" or "desc".
        :param int limit: Maximum number of records to return.
        :return: A list of effects on the given operation.
        :rtype: dict

        """
        endpoint = URL('/offers/{offer_id}/trades'.format(offer_id=offer_id))
        params = self.__query_params(cursor=cursor, order=order, limit=limit)
        return await self.query(endpoint, params)

    async def metrics(self):
        """The metrics endpoint returns a host of useful data points for monitoring the health
        of the underlying Horizon process.

        `GET /metrics
        <https://www.stellar.org/developers/horizon/reference/endpoints/metrics.html>`_

        :return: A host of useful data points for monitoring the health of the underlying Horizon process
        :rtype: dict
        """

        endpoint = URL('/metrics')
        return await self.query(endpoint)

    @staticmethod
    def __query_params(**kwargs) -> Union[dict, None]:
        params = {k: v for k, v in kwargs.items() if v is not None}
        return params

    async def close(self) -> None:
        """Close the connection to horizon"""
        await self._session.__aexit__(None, None, None)
        if self._sse_session is not None:
            await self._sse_session.__aexit__(None, None, None)
Пример #11
0
class Proxy:
    """This is basically a reverse proxy that translates some headers. We don't care about cookies or sessions.

    This takes the OIDC data from the load balancer, validates it, and adds new headers as expected by Grafana.
    Some form of key caching may be useful and will be implemented later.
    """

    def __init__(
        self,
        upstream: str,
        aws_region: str,
        header_name: str = "X-WEBAUTH-USER",
        header_property: str = "email",
        ignore_auth: bool = False,
    ):
        """Creates a server for a given AWS region.

        :param upstream: The URL of the upstream server
        :param aws_region: There AWS region where this is running, used to fetch the key.
        :param header_name: HTTP header name to send, as configured in ``grafana.ini``.
        :param header_property: The header property to use from the payload. Should match what Grafana expects.
        :param ignore_auth: Whether to run without authentication. Should only be used in testing.
        """
        self._ignore_auth = ignore_auth
        self._upstream = URL(upstream)
        self._key_url = URL(f"https://public-keys.auth.elb.{aws_region}.amazonaws.com")
        self._header_name = header_name
        self._header_property = header_property

    async def _setup_session(self, app):
        """Handle context sessions nicely.

        `See docs <https://docs.aiohttp.org/en/latest/client_advanced.html#persistent-session>`_"""
        self._key_session = ClientSession(raise_for_status=True)
        self._upstream_session = ClientSession(
            raise_for_status=False, cookie_jar=DummyCookieJar(), auto_decompress=False
        )
        yield
        await asyncio.gather(self._key_session.close(), self._upstream_session.close())

    def runner(self):
        app = web.Application(middlewares=[self.auth_middleware], logger=logger)
        app.router.add_route("*", "/{tail:.*}", self.handle_request)
        app.cleanup_ctx.append(self._setup_session)
        return web.AppRunner(app)

    async def _decode_payload(self, oidc_data: str) -> Mapping[str, str]:
        """ Returns the payload of the OIDC data sent by the ALB

        `Relevant AWS Documentation
        <https://docs.aws.amazon.com/elasticloadbalancing/latest/application/listener-authenticate-users.html#user-claims-encoding>`_

        :param oidc_data: OIDC data from alb
        :return: payload
        :raise: jwt.exceptions.ExpiredSignatureError: If the token is not longer valid
        """
        header = jwt.get_unverified_header(oidc_data)
        kid = header["kid"]
        alg = header["alg"]

        async with self._key_session.get(self._key_url.join(URL(kid))) as response:
            pub_key = await response.text()

        payload = jwt.decode(oidc_data, pub_key, algorithms=[alg])
        try:
            return payload[self._header_property]
        except KeyError:
            logger.warning(f"Could not find '{self._header_property}' key in OIDC Data.")
            raise HTTPBadRequest

    async def _add_auth_info(self, request: web.Request):
        """Adds the authentication information, if any, to the request.

        Catches exceptions from decoding the payload and converts them to HTTP exceptions to be propagated.
        If authentication is disabled via :attr:`~_ignore_auth` doesn't do anything.

        Headers are kept in a `CIMultiDictProxy`_ so case of the header is not important.

        .. _CIMultiDictProxy: https://multidict.readthedocs.io/en/stable/multidict.html#multidict.CIMultiDictProxy
        """
        if self._ignore_auth:
            return None

        try:
            oidc_data = request.headers["X-Amzn-Oidc-Data"]
        except KeyError:
            logger.warning("No 'X-Amzn-Oidc-Data' header present. Dropping request.")
            raise HTTPProxyAuthenticationRequired()
        try:
            request["auth_payload"] = (self._header_name, await self._decode_payload(oidc_data))
        except ExpiredSignatureError:
            logger.warning("Got expired token. Dropping request.")
            raise HTTPUnauthorized()
        except DecodeError as e:
            logger.warning("Couldn't decode token. Dropping request.")
            logger.debug("Couldn't decode token: %s" % e)
            raise HTTPBadRequest()

    @REQUEST_HISTOGRAM.time()
    async def handle_request(self, request: web.Request) -> web.StreamResponse:
        upstream_url = self._upstream.join(request.url.relative())
        upstream_request = self._upstream_session.request(
            url=upstream_url,
            method=request.method,
            headers=clean_response_headers(request),
            params=request.query,
            data=request.content,
            allow_redirects=False,
        )
        async with upstream_request as upstream_response:
            UPSTREAM_STATUS_COUNTER.labels(method=upstream_response.method, status=upstream_response.status).inc()
            response = web.StreamResponse(status=upstream_response.status, headers=upstream_response.headers)
            await response.prepare(request)
            async for data in upstream_response.content.iter_any():
                await response.write(data)
            await response.write_eof()
            return response

    @web.middleware
    async def auth_middleware(self, request, handler):
        await self._add_auth_info(request)
        return await handler(request)
Пример #12
0
class API:
    """Low level client."""

    DEFAULT_TIMEOUT = 10

    def __init__(  # pylint: disable=too-many-arguments
        self,
        api_key,
        url="http://127.0.0.1:8384",
        timeout=DEFAULT_TIMEOUT,
        verify_ssl=True,
        loop=None,
        session=None,
    ):
        """Initialize the client."""
        self._api_key = api_key
        self._url = URL(url)
        self._timeout = aiohttp.ClientTimeout(total=timeout)
        self._verify_ssl = verify_ssl

        self._loop = loop or asyncio.get_event_loop()
        self._session = session
        self._close_session = False

        if self._session is None:
            self._session = aiohttp.ClientSession(loop=self._loop)
            self._close_session = True

    @property
    def url(self):
        """Get URL."""
        return str(self._url)

    async def request(self, *args, **kwargs):
        """Perform request with error wrapping."""
        try:
            return await self.raw_request(*args, **kwargs)
        except aiohttp.client_exceptions.ClientResponseError as error:
            if error.status in [401, 403]:
                raise UnauthorizedError from error
            if error.status == 404:
                raise NotFoundError from error
            raise SyncthingError from error
        except Exception as error:
            raise SyncthingError from error

    async def raw_request(self, uri, params=None, data=None, method="GET"):
        """Perform request."""
        async with self._session.request(
                method,
                self._url.join(URL(uri)).update_query(params),
                json=data,
                headers={
                    "Accept": "application/json",
                    "X-API-Key": self._api_key,
                },
                timeout=self._timeout,
                verify_ssl=self._verify_ssl,
        ) as response:
            response.raise_for_status()
            if "Content-Type" in response.headers and "application/json" in response.headers[
                    "Content-Type"]:
                return await response.json()
            return await response.read()

    async def close(self):
        """Close the session."""
        if self._session and self._close_session:
            await self._session.close()
Пример #13
0
    async def _request(self, method, url, *,
                       params=None,
                       data=None,
                       json=None,
                       headers=None,
                       skip_auto_headers=None,
                       auth=None,
                       allow_redirects=True,
                       max_redirects=10,
                       compress=None,
                       chunked=None,
                       expect100=False,
                       read_until_eof=True,
                       proxy=None,
                       proxy_auth=None,
                       timeout=sentinel,
                       verify_ssl=None,
                       fingerprint=None,
                       ssl_context=None,
                       ssl=None,
                       proxy_headers=None,
                       trace_request_ctx=None):

        # NOTE: timeout clamps existing connect and read timeouts.  We cannot
        # set the default to None because we need to detect if the user wants
        # to use the existing timeouts by setting timeout to None.

        if self.closed:
            raise RuntimeError('Session is closed')

        ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)

        if data is not None and json is not None:
            raise ValueError(
                'data and json parameters can not be used at the same time')
        elif json is not None:
            data = payload.JsonPayload(json, dumps=self._json_serialize)

        if not isinstance(chunked, bool) and chunked is not None:
            warnings.warn(
                'Chunk size is deprecated #1615', DeprecationWarning)

        redirects = 0
        history = []
        version = self._version

        # Merge with default headers and transform to CIMultiDict
        headers = self._prepare_headers(headers)
        proxy_headers = self._prepare_headers(proxy_headers)

        try:
            url = URL(url)
        except ValueError:
            raise InvalidURL(url)

        skip_headers = set(self._skip_auto_headers)
        if skip_auto_headers is not None:
            for i in skip_auto_headers:
                skip_headers.add(istr(i))

        if proxy is not None:
            try:
                proxy = URL(proxy)
            except ValueError:
                raise InvalidURL(proxy)

        # timeout is cumulative for all request operations
        # (request, redirects, responses, data consuming)
        tm = TimeoutHandle(
            self._loop,
            timeout if timeout is not sentinel else self._read_timeout)
        handle = tm.start()

        traces = [
            Trace(
                self,
                trace_config,
                trace_config.trace_config_ctx(
                    trace_request_ctx=trace_request_ctx)
            )
            for trace_config in self._trace_configs
        ]

        for trace in traces:
            await trace.send_request_start(
                method,
                url,
                headers
            )

        timer = tm.timer()
        try:
            with timer:
                while True:
                    url, auth_from_url = strip_auth_from_url(url)
                    if auth and auth_from_url:
                        raise ValueError("Cannot combine AUTH argument with "
                                         "credentials encoded in URL")

                    if auth is None:
                        auth = auth_from_url
                    if auth is None:
                        auth = self._default_auth
                    # It would be confusing if we support explicit
                    # Authorization header with auth argument
                    if (headers is not None and
                            auth is not None and
                            hdrs.AUTHORIZATION in headers):
                        raise ValueError("Cannot combine AUTHORIZATION header "
                                         "with AUTH argument or credentials "
                                         "encoded in URL")

                    url = url.with_fragment(None)
                    cookies = self._cookie_jar.filter_cookies(url)

                    if proxy is not None:
                        proxy = URL(proxy)
                    elif self._trust_env:
                        for scheme, proxy_info in proxies_from_env().items():
                            if scheme == url.scheme:
                                proxy = proxy_info.proxy
                                proxy_auth = proxy_info.proxy_auth
                                break

                    req = self._request_class(
                        method, url, params=params, headers=headers,
                        skip_auto_headers=skip_headers, data=data,
                        cookies=cookies, auth=auth, version=version,
                        compress=compress, chunked=chunked,
                        expect100=expect100, loop=self._loop,
                        response_class=self._response_class,
                        proxy=proxy, proxy_auth=proxy_auth, timer=timer,
                        session=self, auto_decompress=self._auto_decompress,
                        ssl=ssl, proxy_headers=proxy_headers)

                    # connection timeout
                    try:
                        with CeilTimeout(self._conn_timeout, loop=self._loop):
                            conn = await self._connector.connect(
                                req,
                                traces=traces
                            )
                    except asyncio.TimeoutError as exc:
                        raise ServerTimeoutError(
                            'Connection timeout '
                            'to host {0}'.format(url)) from exc

                    tcp_nodelay(conn.transport, True)
                    tcp_cork(conn.transport, False)
                    try:
                        resp = req.send(conn)
                        try:
                            await resp.start(conn, read_until_eof)
                        except Exception:
                            resp.close()
                            conn.close()
                            raise
                    except ClientError:
                        raise
                    except OSError as exc:
                        raise ClientOSError(*exc.args) from exc

                    self._cookie_jar.update_cookies(resp.cookies, resp.url)

                    # redirects
                    if resp.status in (
                            301, 302, 303, 307, 308) and allow_redirects:

                        for trace in traces:
                            await trace.send_request_redirect(
                                method,
                                url,
                                headers,
                                resp
                            )

                        redirects += 1
                        history.append(resp)
                        if max_redirects and redirects >= max_redirects:
                            resp.close()
                            break
                        else:
                            resp.release()

                        # For 301 and 302, mimic IE, now changed in RFC
                        # https://github.com/kennethreitz/requests/pull/269
                        if (resp.status == 303 and
                                resp.method != hdrs.METH_HEAD) \
                                or (resp.status in (301, 302) and
                                    resp.method == hdrs.METH_POST):
                            method = hdrs.METH_GET
                            data = None
                            if headers.get(hdrs.CONTENT_LENGTH):
                                headers.pop(hdrs.CONTENT_LENGTH)

                        r_url = (resp.headers.get(hdrs.LOCATION) or
                                 resp.headers.get(hdrs.URI))
                        if r_url is None:
                            # see github.com/aio-libs/aiohttp/issues/2022
                            break

                        try:
                            r_url = URL(
                                r_url, encoded=not self.requote_redirect_url)

                        except ValueError:
                            raise InvalidURL(r_url)

                        scheme = r_url.scheme
                        if scheme not in ('http', 'https', ''):
                            resp.close()
                            raise ValueError(
                                'Can redirect only to http or https')
                        elif not scheme:
                            r_url = url.join(r_url)

                        if url.origin() != r_url.origin():
                            auth = None
                            headers.pop(hdrs.AUTHORIZATION, None)

                        url = r_url
                        params = None
                        resp.release()
                        continue

                    break

            # check response status
            if self._raise_for_status:
                resp.raise_for_status()

            # register connection
            if handle is not None:
                if resp.connection is not None:
                    resp.connection.add_callback(handle.cancel)
                else:
                    handle.cancel()

            resp._history = tuple(history)

            for trace in traces:
                await trace.send_request_end(
                    method,
                    url,
                    headers,
                    resp
                )
            return resp

        except Exception as e:
            # cleanup timer
            tm.close()
            if handle:
                handle.cancel()
                handle = None

            for trace in traces:
                await trace.send_request_exception(
                    method,
                    url,
                    headers,
                    e
                )
            raise
Пример #14
0
class BaseTestServer(ABC):
    def __init__(self,
                 *,
                 scheme=sentinel,
                 loop=None,
                 host='127.0.0.1',
                 skip_url_asserts=False,
                 **kwargs):
        self._loop = loop
        self.port = None
        self.server = None
        self.handler = None
        self._root = None
        self.host = host
        self._closed = False
        self.scheme = scheme
        self.skip_url_asserts = skip_url_asserts

    @asyncio.coroutine
    def start_server(self, loop=None, **kwargs):
        if self.server:
            return
        self._loop = loop
        self.port = unused_port()
        self._ssl = kwargs.pop('ssl', None)
        if self.scheme is sentinel:
            if self._ssl:
                scheme = 'https'
            else:
                scheme = 'http'
            self.scheme = scheme
        self._root = URL('{}://{}:{}'.format(self.scheme, self.host,
                                             self.port))

        handler = yield from self._make_factory(**kwargs)
        self.server = yield from self._loop.create_server(handler,
                                                          self.host,
                                                          self.port,
                                                          ssl=self._ssl)

    @abstractmethod  # pragma: no cover
    @asyncio.coroutine
    def _make_factory(self, **kwargs):
        pass

    def make_url(self, path):
        url = URL(path)
        if not self.skip_url_asserts:
            assert not url.is_absolute()
            return self._root.join(url)
        else:
            return URL(str(self._root) + path)

    @property
    def started(self):
        return self.server is not None

    @property
    def closed(self):
        return self._closed

    @asyncio.coroutine
    def close(self):
        """Close all fixtures created by the test client.

        After that point, the TestClient is no longer usable.

        This is an idempotent function: running close multiple times
        will not have any additional effects.

        close is also run when the object is garbage collected, and on
        exit when used as a context manager.

        """
        if self.started and not self.closed:
            self.server.close()
            yield from self.server.wait_closed()
            self._root = None
            self.port = None
            yield from self._close_hook()
            self._closed = True

    @abstractmethod
    @asyncio.coroutine
    def _close_hook(self):
        pass  # pragma: no cover

    def __enter__(self):
        self._loop.run_until_complete(self.start_server(loop=self._loop))
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self._loop.run_until_complete(self.close())

    if PY_35:

        @asyncio.coroutine
        def __aenter__(self):
            yield from self.start_server(loop=self._loop)
            return self

        @asyncio.coroutine
        def __aexit__(self, exc_type, exc_value, traceback):
            yield from self.close()
Пример #15
0
class BaseTestServer(ABC):
    def __init__(self, *, scheme=sentinel, loop=None,
                 host='127.0.0.1', skip_url_asserts=False, **kwargs):
        self._loop = loop
        self.port = None
        self.server = None
        self.handler = None
        self._root = None
        self.host = host
        self._closed = False
        self.scheme = scheme
        self.skip_url_asserts = skip_url_asserts

    async def start_server(self, loop=None, **kwargs):
        if self.server:
            return
        self._loop = loop
        self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self._socket.bind((self.host, 0))
        self.port = self._socket.getsockname()[1]
        self._ssl = kwargs.pop('ssl', None)
        if self.scheme is sentinel:
            if self._ssl:
                scheme = 'https'
            else:
                scheme = 'http'
            self.scheme = scheme
        self._root = URL('{}://{}:{}'.format(self.scheme,
                                             self.host,
                                             self.port))

        handler = await self._make_factory(**kwargs)
        self.server = await self._loop.create_server(
            handler, ssl=self._ssl, sock=self._socket)

    @abstractmethod  # pragma: no cover
    async def _make_factory(self, **kwargs):
        pass

    def make_url(self, path):
        url = URL(path)
        if not self.skip_url_asserts:
            assert not url.is_absolute()
            return self._root.join(url)
        else:
            return URL(str(self._root) + path)

    @property
    def started(self):
        return self.server is not None

    @property
    def closed(self):
        return self._closed

    async def close(self):
        """Close all fixtures created by the test client.

        After that point, the TestClient is no longer usable.

        This is an idempotent function: running close multiple times
        will not have any additional effects.

        close is also run when the object is garbage collected, and on
        exit when used as a context manager.

        """
        if self.started and not self.closed:
            self.server.close()
            await self.server.wait_closed()
            self._root = None
            self.port = None
            await self._close_hook()
            self._closed = True

    @abstractmethod
    async def _close_hook(self):
        pass  # pragma: no cover

    def __enter__(self):
        raise TypeError("Use async with instead")

    def __exit__(self, exc_type, exc_value, traceback):
        # __exit__ should exist in pair with __enter__ but never executed
        pass  # pragma: no cover

    async def __aenter__(self):
        await self.start_server(loop=self._loop)
        return self

    async def __aexit__(self, exc_type, exc_value, traceback):
        await self.close()
Пример #16
0
    def _request(self,
                 method,
                 url,
                 *,
                 params=None,
                 data=None,
                 headers=None,
                 skip_auto_headers=None,
                 auth=None,
                 allow_redirects=True,
                 max_redirects=10,
                 encoding='utf-8',
                 version=None,
                 compress=None,
                 chunked=None,
                 expect100=False,
                 read_until_eof=True,
                 proxy=None,
                 proxy_auth=None,
                 timeout=DEFAULT_TIMEOUT):

        # NOTE: timeout clamps existing connect and read timeouts.  We cannot
        # set the default to None because we need to detect if the user wants
        # to use the existing timeouts by setting timeout to None.

        if version is not None:
            warnings.warn(
                "HTTP version should be specified "
                "by ClientSession constructor", DeprecationWarning)
        else:
            version = self._version

        if self.closed:
            raise RuntimeError('Session is closed')

        if not isinstance(chunked, bool) and chunked is not None:
            warnings.warn('Chunk size is deprecated #1615', DeprecationWarning)

        redirects = 0
        history = []

        # Merge with default headers and transform to CIMultiDict
        headers = self._prepare_headers(headers)
        if auth is None:
            auth = self._default_auth
        # It would be confusing if we support explicit Authorization header
        # with `auth` argument
        if (headers is not None and auth is not None
                and hdrs.AUTHORIZATION in headers):
            raise ValueError("Can't combine `Authorization` header with "
                             "`auth` argument")

        skip_headers = set(self._skip_auto_headers)
        if skip_auto_headers is not None:
            for i in skip_auto_headers:
                skip_headers.add(istr(i))

        if proxy is not None:
            proxy = URL(proxy)

        # request timeout
        if timeout is None:
            timeout = self._read_timeout
        if timeout is None:
            timeout = self._connector.conn_timeout
        elif self._connector.conn_timeout is not None:
            timeout = max(timeout, self._connector.conn_timeout)

        # timeout is cumulative for all request operations
        # (request, redirects, responses, data consuming)
        timer = self._time_service.timeout(timeout)

        with timer:
            while True:
                url = URL(url).with_fragment(None)

                cookies = self._cookie_jar.filter_cookies(url)

                req = self._request_class(method,
                                          url,
                                          params=params,
                                          headers=headers,
                                          skip_auto_headers=skip_headers,
                                          data=data,
                                          cookies=cookies,
                                          encoding=encoding,
                                          auth=auth,
                                          version=version,
                                          compress=compress,
                                          chunked=chunked,
                                          expect100=expect100,
                                          loop=self._loop,
                                          response_class=self._response_class,
                                          proxy=proxy,
                                          proxy_auth=proxy_auth,
                                          timer=timer)

                conn = yield from self._connector.connect(req)
                conn.writer.set_tcp_nodelay(True)
                try:
                    resp = req.send(conn)
                    try:
                        yield from resp.start(conn, read_until_eof)
                    except:
                        resp.close()
                        conn.close()
                        raise
                except aiohttp.ServerDisconnectedError:
                    raise
                except aiohttp.HttpProcessingError as exc:
                    raise aiohttp.ClientResponseError() from exc
                except OSError as exc:
                    raise aiohttp.ClientOSError(*exc.args) from exc

                self._cookie_jar.update_cookies(resp.cookies, resp.url)

                # redirects
                if resp.status in (301, 302, 303, 307) and allow_redirects:
                    redirects += 1
                    history.append(resp)
                    if max_redirects and redirects >= max_redirects:
                        resp.close()
                        break
                    else:
                        yield from resp.release()

                    # For 301 and 302, mimic IE behaviour, now changed in RFC.
                    # Info: https://github.com/kennethreitz/requests/pull/269
                    if (resp.status == 303 and resp.method != hdrs.METH_HEAD) \
                       or (resp.status in (301, 302) and
                           resp.method == hdrs.METH_POST):
                        method = hdrs.METH_GET
                        data = None
                        if headers.get(hdrs.CONTENT_LENGTH):
                            headers.pop(hdrs.CONTENT_LENGTH)

                    r_url = (resp.headers.get(hdrs.LOCATION)
                             or resp.headers.get(hdrs.URI))
                    if r_url is None:
                        raise RuntimeError("{0.method} {0.url} returns "
                                           "a redirect [{0.status}] status "
                                           "but response lacks a Location "
                                           "or URI HTTP header".format(resp))
                    r_url = URL(r_url)

                    scheme = r_url.scheme
                    if scheme not in ('http', 'https', ''):
                        resp.close()
                        raise ValueError('Can redirect only to http or https')
                    elif not scheme:
                        r_url = url.join(r_url)

                    url = r_url
                    params = None
                    yield from resp.release()
                    continue

                break

        resp._history = tuple(history)
        return resp
Пример #17
0
import re
import traceback
import logging

import aiohttp
from yarl import URL
from bs4 import BeautifulSoup, element

base_url = URL("https://www.ktu.edu.in")
ktu_url = base_url.join(URL("/eu/core/announcements.htm"))
headers = {
    "User-Agent":
    "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:68.0) Gecko/20100101 Firefox/68.0"
}
logger = logging.getLogger(__name__)


def get_string(tag):
    r_string = ""
    for i in tag.children:
        if isinstance(i, element.Comment):
            continue
        elif isinstance(i, element.NavigableString):
            r_string += re.sub(r"[\n\r]", "", i.string)
        else:
            if i.name in ["a"]:
                href = URL(i.get("href", "").strip())
                if not href.is_absolute():
                    href = base_url.join(href)
                href_str = "".join([x.strip() for x in i.stripped_strings])
                r_string += f"<a href='{href}'>{href_str}</a>"
Пример #18
0
def test_join_absolute():
    base = URL("http://www.cwi.nl/%7Eguido/Python.html")
    url = URL("//www.python.org/%7Eguido")
    url2 = base.join(url)
    assert str(url2) == "http://www.python.org/~guido"
Пример #19
0
    def _request(self, method, url, *,
                 params=None,
                 data=None,
                 json=None,
                 headers=None,
                 skip_auto_headers=None,
                 auth=None,
                 allow_redirects=True,
                 max_redirects=10,
                 encoding=None,
                 compress=None,
                 chunked=None,
                 expect100=False,
                 read_until_eof=True,
                 proxy=None,
                 proxy_auth=None,
                 timeout=DEFAULT_TIMEOUT):

        # NOTE: timeout clamps existing connect and read timeouts.  We cannot
        # set the default to None because we need to detect if the user wants
        # to use the existing timeouts by setting timeout to None.

        if encoding is not None:
            warnings.warn(
                "encoding parameter is not supported, "
                "please use FormData(charset='utf-8') instead",
                DeprecationWarning)

        if self.closed:
            raise RuntimeError('Session is closed')

        if data is not None and json is not None:
            raise ValueError(
                'data and json parameters can not be used at the same time')
        elif json is not None:
            data = payload.JsonPayload(json, dumps=self._json_serialize)

        if not isinstance(chunked, bool) and chunked is not None:
            warnings.warn(
                'Chunk size is deprecated #1615', DeprecationWarning)

        redirects = 0
        history = []
        version = self._version

        # Merge with default headers and transform to CIMultiDict
        headers = self._prepare_headers(headers)
        if auth is None:
            auth = self._default_auth
        # It would be confusing if we support explicit Authorization header
        # with `auth` argument
        if (headers is not None and
                auth is not None and
                hdrs.AUTHORIZATION in headers):
            raise ValueError("Can't combine `Authorization` header with "
                             "`auth` argument")

        skip_headers = set(self._skip_auto_headers)
        if skip_auto_headers is not None:
            for i in skip_auto_headers:
                skip_headers.add(istr(i))

        if proxy is not None:
            proxy = URL(proxy)

        # timeout is cumulative for all request operations
        # (request, redirects, responses, data consuming)
        tm = TimeoutHandle(
            self._loop, timeout if timeout is not None else self._read_timeout)
        handle = tm.start()

        timer = tm.timer()
        try:
            with timer:
                while True:
                    url = URL(url).with_fragment(None)
                    cookies = self._cookie_jar.filter_cookies(url)

                    req = self._request_class(
                        method, url, params=params, headers=headers,
                        skip_auto_headers=skip_headers, data=data,
                        cookies=cookies, auth=auth, version=version,
                        compress=compress, chunked=chunked,
                        expect100=expect100, loop=self._loop,
                        response_class=self._response_class,
                        proxy=proxy, proxy_auth=proxy_auth, timer=timer)

                    # connection timeout
                    try:
                        with CeilTimeout(self._conn_timeout, loop=self._loop):
                            conn = yield from self._connector.connect(req)
                    except asyncio.TimeoutError as exc:
                        raise ServerTimeoutError(
                            'Connection timeout '
                            'to host {0}'.format(url)) from exc

                    conn.writer.set_tcp_nodelay(True)
                    try:
                        resp = req.send(conn)
                        try:
                            yield from resp.start(conn, read_until_eof)
                        except:
                            resp.close()
                            conn.close()
                            raise
                    except ClientError:
                        raise
                    except OSError as exc:
                        raise ClientOSError(*exc.args) from exc

                    self._cookie_jar.update_cookies(resp.cookies, resp.url)

                    # redirects
                    if resp.status in (301, 302, 303, 307) and allow_redirects:
                        redirects += 1
                        history.append(resp)
                        if max_redirects and redirects >= max_redirects:
                            resp.close()
                            break
                        else:
                            resp.release()

                        # For 301 and 302, mimic IE, now changed in RFC
                        # https://github.com/kennethreitz/requests/pull/269
                        if (resp.status == 303 and
                                resp.method != hdrs.METH_HEAD) \
                                or (resp.status in (301, 302) and
                                    resp.method == hdrs.METH_POST):
                            method = hdrs.METH_GET
                            data = None
                            if headers.get(hdrs.CONTENT_LENGTH):
                                headers.pop(hdrs.CONTENT_LENGTH)

                        r_url = (resp.headers.get(hdrs.LOCATION) or
                                 resp.headers.get(hdrs.URI))
                        if r_url is None:
                            raise RuntimeError(
                                "{0.method} {0.url} returns "
                                "a redirect [{0.status}] status "
                                "but response lacks a Location "
                                "or URI HTTP header".format(resp))
                        r_url = URL(
                            r_url, encoded=not self.requote_redirect_url)

                        scheme = r_url.scheme
                        if scheme not in ('http', 'https', ''):
                            resp.close()
                            raise ValueError(
                                'Can redirect only to http or https')
                        elif not scheme:
                            r_url = url.join(r_url)

                        url = r_url
                        params = None
                        resp.release()
                        continue

                    break

            # check response status
            if self._raise_for_status:
                resp.raise_for_status()

            # register connection
            if handle is not None:
                if resp.connection is not None:
                    resp.connection.add_callback(handle.cancel)
                else:
                    handle.cancel()

            resp._history = tuple(history)
            return resp

        except:
            # cleanup timer
            tm.close()
            if handle:
                handle.cancel()
                handle = None

            raise
Пример #20
0
def test_join_from_rfc_3986_abnormal(url, expected):
    # test case from https://tools.ietf.org/html/rfc3986.html#section-5.4.2
    base = URL("http://a/b/c/d;p?q")
    url = URL(url)
    expected = URL(expected)
    assert base.join(url) == expected
Пример #21
0
class ClientSession:
    """First-class interface for making HTTP requests."""

    ATTRS = frozenset([
        "_base_url",
        "_source_traceback",
        "_connector",
        "requote_redirect_url",
        "_loop",
        "_cookie_jar",
        "_connector_owner",
        "_default_auth",
        "_version",
        "_json_serialize",
        "_requote_redirect_url",
        "_timeout",
        "_raise_for_status",
        "_auto_decompress",
        "_trust_env",
        "_default_headers",
        "_skip_auto_headers",
        "_request_class",
        "_response_class",
        "_ws_response_class",
        "_trace_configs",
        "_read_bufsize",
    ])

    _source_traceback = None

    def __init__(
        self,
        base_url: Optional[StrOrURL] = None,
        *,
        connector: Optional[BaseConnector] = None,
        loop: Optional[asyncio.AbstractEventLoop] = None,
        cookies: Optional[LooseCookies] = None,
        headers: Optional[LooseHeaders] = None,
        skip_auto_headers: Optional[Iterable[str]] = None,
        auth: Optional[BasicAuth] = None,
        json_serialize: JSONEncoder = json.dumps,
        request_class: Type[ClientRequest] = ClientRequest,
        response_class: Type[ClientResponse] = ClientResponse,
        ws_response_class: Type[
            ClientWebSocketResponse] = ClientWebSocketResponse,
        version: HttpVersion = http.HttpVersion11,
        cookie_jar: Optional[AbstractCookieJar] = None,
        connector_owner: bool = True,
        raise_for_status: bool = False,
        read_timeout: Union[float, object] = sentinel,
        conn_timeout: Optional[float] = None,
        timeout: Union[object, ClientTimeout] = sentinel,
        auto_decompress: bool = True,
        trust_env: bool = False,
        requote_redirect_url: bool = True,
        trace_configs: Optional[List[TraceConfig]] = None,
        read_bufsize: int = 2**16,
    ) -> None:
        if loop is None:
            if connector is not None:
                loop = connector._loop

        loop = get_running_loop(loop)

        if base_url is None or isinstance(base_url, URL):
            self._base_url: Optional[URL] = base_url
        else:
            self._base_url = URL(base_url)
            assert (self._base_url.origin() == self._base_url
                    ), "Only absolute URLs without path part are supported"

        if connector is None:
            connector = TCPConnector(loop=loop)

        if connector._loop is not loop:
            raise RuntimeError(
                "Session and connector has to use same event loop")

        self._loop = loop

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        if cookie_jar is None:
            cookie_jar = CookieJar(loop=loop)
        self._cookie_jar = cookie_jar

        if cookies is not None:
            self._cookie_jar.update_cookies(cookies)

        self._connector = connector  # type: Optional[BaseConnector]
        self._connector_owner = connector_owner
        self._default_auth = auth
        self._version = version
        self._json_serialize = json_serialize
        if timeout is sentinel:
            self._timeout = DEFAULT_TIMEOUT
            if read_timeout is not sentinel:
                warnings.warn(
                    "read_timeout is deprecated, "
                    "use timeout argument instead",
                    DeprecationWarning,
                    stacklevel=2,
                )
                self._timeout = attr.evolve(self._timeout, total=read_timeout)
            if conn_timeout is not None:
                self._timeout = attr.evolve(self._timeout,
                                            connect=conn_timeout)
                warnings.warn(
                    "conn_timeout is deprecated, "
                    "use timeout argument instead",
                    DeprecationWarning,
                    stacklevel=2,
                )
        else:
            self._timeout = timeout  # type: ignore[assignment]
            if read_timeout is not sentinel:
                raise ValueError("read_timeout and timeout parameters "
                                 "conflict, please setup "
                                 "timeout.read")
            if conn_timeout is not None:
                raise ValueError("conn_timeout and timeout parameters "
                                 "conflict, please setup "
                                 "timeout.connect")
        self._raise_for_status = raise_for_status
        self._auto_decompress = auto_decompress
        self._trust_env = trust_env
        self._requote_redirect_url = requote_redirect_url
        self._read_bufsize = read_bufsize

        # Convert to list of tuples
        if headers:
            real_headers = CIMultiDict(headers)  # type: CIMultiDict[str]
        else:
            real_headers = CIMultiDict()
        self._default_headers = real_headers  # type: CIMultiDict[str]
        if skip_auto_headers is not None:
            self._skip_auto_headers = frozenset(
                istr(i) for i in skip_auto_headers)
        else:
            self._skip_auto_headers = frozenset()

        self._request_class = request_class
        self._response_class = response_class
        self._ws_response_class = ws_response_class

        self._trace_configs = trace_configs or []
        for trace_config in self._trace_configs:
            trace_config.freeze()

    def __init_subclass__(cls: Type["ClientSession"]) -> None:
        warnings.warn(
            "Inheritance class {} from ClientSession "
            "is discouraged".format(cls.__name__),
            DeprecationWarning,
            stacklevel=2,
        )

    if DEBUG:

        def __setattr__(self, name: str, val: Any) -> None:
            if name not in self.ATTRS:
                warnings.warn(
                    "Setting custom ClientSession.{} attribute "
                    "is discouraged".format(name),
                    DeprecationWarning,
                    stacklevel=2,
                )
            super().__setattr__(name, val)

    def __del__(self, _warnings: Any = warnings) -> None:
        if not self.closed:
            if PY_36:
                kwargs = {"source": self}
            else:
                kwargs = {}
            _warnings.warn(f"Unclosed client session {self!r}",
                           ResourceWarning, **kwargs)
            context = {
                "client_session": self,
                "message": "Unclosed client session"
            }
            if self._source_traceback is not None:
                context["source_traceback"] = self._source_traceback
            self._loop.call_exception_handler(context)

    def request(self, method: str, url: StrOrURL,
                **kwargs: Any) -> "_RequestContextManager":
        """Perform HTTP request."""
        return _RequestContextManager(self._request(method, url, **kwargs))

    def _build_url(self, str_or_url: StrOrURL) -> URL:
        url = URL(str_or_url)
        if self._base_url is None:
            return url
        else:
            assert not url.is_absolute() and url.path.startswith("/")
            return self._base_url.join(url)

    async def _request(
        self,
        method: str,
        str_or_url: StrOrURL,
        *,
        params: Optional[Mapping[str, str]] = None,
        data: Any = None,
        json: Any = None,
        cookies: Optional[LooseCookies] = None,
        headers: Optional[LooseHeaders] = None,
        skip_auto_headers: Optional[Iterable[str]] = None,
        auth: Optional[BasicAuth] = None,
        allow_redirects: bool = True,
        max_redirects: int = 10,
        compress: Optional[str] = None,
        chunked: Optional[bool] = None,
        expect100: bool = False,
        raise_for_status: Optional[bool] = None,
        read_until_eof: bool = True,
        proxy: Optional[StrOrURL] = None,
        proxy_auth: Optional[BasicAuth] = None,
        timeout: Union[ClientTimeout, object] = sentinel,
        verify_ssl: Optional[bool] = None,
        fingerprint: Optional[bytes] = None,
        ssl_context: Optional[SSLContext] = None,
        ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None,
        proxy_headers: Optional[LooseHeaders] = None,
        trace_request_ctx: Optional[SimpleNamespace] = None,
        read_bufsize: Optional[int] = None,
    ) -> ClientResponse:

        # NOTE: timeout clamps existing connect and read timeouts.  We cannot
        # set the default to None because we need to detect if the user wants
        # to use the existing timeouts by setting timeout to None.

        if self.closed:
            raise RuntimeError("Session is closed")

        ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)

        if data is not None and json is not None:
            raise ValueError(
                "data and json parameters can not be used at the same time")
        elif json is not None:
            data = payload.JsonPayload(json, dumps=self._json_serialize)

        if not isinstance(chunked, bool) and chunked is not None:
            warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)

        redirects = 0
        history = []
        version = self._version

        # Merge with default headers and transform to CIMultiDict
        headers = self._prepare_headers(headers)
        proxy_headers = self._prepare_headers(proxy_headers)

        try:
            url = self._build_url(str_or_url)
        except ValueError as e:
            raise InvalidURL(str_or_url) from e

        skip_headers = set(self._skip_auto_headers)
        if skip_auto_headers is not None:
            for i in skip_auto_headers:
                skip_headers.add(istr(i))

        if proxy is not None:
            try:
                proxy = URL(proxy)
            except ValueError as e:
                raise InvalidURL(proxy) from e

        if timeout is sentinel:
            real_timeout = self._timeout  # type: ClientTimeout
        else:
            if not isinstance(timeout, ClientTimeout):
                real_timeout = ClientTimeout(
                    total=timeout)  # type: ignore[arg-type]
            else:
                real_timeout = timeout
        # timeout is cumulative for all request operations
        # (request, redirects, responses, data consuming)
        tm = TimeoutHandle(self._loop, real_timeout.total)
        handle = tm.start()

        if read_bufsize is None:
            read_bufsize = self._read_bufsize

        traces = [
            Trace(
                self,
                trace_config,
                trace_config.trace_config_ctx(
                    trace_request_ctx=trace_request_ctx),
            ) for trace_config in self._trace_configs
        ]

        for trace in traces:
            await trace.send_request_start(method, url.update_query(params),
                                           headers)

        timer = tm.timer()
        try:
            with timer:
                while True:
                    url, auth_from_url = strip_auth_from_url(url)
                    if auth and auth_from_url:
                        raise ValueError("Cannot combine AUTH argument with "
                                         "credentials encoded in URL")

                    if auth is None:
                        auth = auth_from_url
                    if auth is None:
                        auth = self._default_auth
                    # It would be confusing if we support explicit
                    # Authorization header with auth argument
                    if (headers is not None and auth is not None
                            and hdrs.AUTHORIZATION in headers):
                        raise ValueError("Cannot combine AUTHORIZATION header "
                                         "with AUTH argument or credentials "
                                         "encoded in URL")

                    all_cookies = self._cookie_jar.filter_cookies(url)

                    if cookies is not None:
                        tmp_cookie_jar = CookieJar()
                        tmp_cookie_jar.update_cookies(cookies)
                        req_cookies = tmp_cookie_jar.filter_cookies(url)
                        if req_cookies:
                            all_cookies.load(req_cookies)

                    if proxy is not None:
                        proxy = URL(proxy)
                    elif self._trust_env:
                        with suppress(LookupError):
                            proxy, proxy_auth = get_env_proxy_for_url(url)

                    req = self._request_class(
                        method,
                        url,
                        params=params,
                        headers=headers,
                        skip_auto_headers=skip_headers,
                        data=data,
                        cookies=all_cookies,
                        auth=auth,
                        version=version,
                        compress=compress,
                        chunked=chunked,
                        expect100=expect100,
                        loop=self._loop,
                        response_class=self._response_class,
                        proxy=proxy,
                        proxy_auth=proxy_auth,
                        timer=timer,
                        session=self,
                        ssl=ssl,
                        proxy_headers=proxy_headers,
                        traces=traces,
                    )

                    # connection timeout
                    try:
                        async with ceil_timeout(real_timeout.connect):
                            assert self._connector is not None
                            conn = await self._connector.connect(
                                req, traces=traces, timeout=real_timeout)
                    except asyncio.TimeoutError as exc:
                        raise ServerTimeoutError(
                            "Connection timeout "
                            "to host {}".format(url)) from exc

                    assert conn.transport is not None

                    assert conn.protocol is not None
                    conn.protocol.set_response_params(
                        timer=timer,
                        skip_payload=method.upper() == "HEAD",
                        read_until_eof=read_until_eof,
                        auto_decompress=self._auto_decompress,
                        read_timeout=real_timeout.sock_read,
                        read_bufsize=read_bufsize,
                    )

                    try:
                        try:
                            resp = await req.send(conn)
                            try:
                                await resp.start(conn)
                            except BaseException:
                                resp.close()
                                raise
                        except BaseException:
                            conn.close()
                            raise
                    except ClientError:
                        raise
                    except OSError as exc:
                        raise ClientOSError(*exc.args) from exc

                    self._cookie_jar.update_cookies(resp.cookies, resp.url)

                    # redirects
                    if resp.status in (301, 302, 303, 307,
                                       308) and allow_redirects:

                        for trace in traces:
                            await trace.send_request_redirect(
                                method, url.update_query(params), headers,
                                resp)

                        redirects += 1
                        history.append(resp)
                        if max_redirects and redirects >= max_redirects:
                            resp.close()
                            raise TooManyRedirects(history[0].request_info,
                                                   tuple(history))

                        # For 301 and 302, mimic IE, now changed in RFC
                        # https://github.com/kennethreitz/requests/pull/269
                        if (resp.status == 303
                                and resp.method != hdrs.METH_HEAD) or (
                                    resp.status in (301, 302)
                                    and resp.method == hdrs.METH_POST):
                            method = hdrs.METH_GET
                            data = None
                            if headers.get(hdrs.CONTENT_LENGTH):
                                headers.pop(hdrs.CONTENT_LENGTH)

                        r_url = resp.headers.get(
                            hdrs.LOCATION) or resp.headers.get(hdrs.URI)
                        if r_url is None:
                            # see github.com/aio-libs/aiohttp/issues/2022
                            break
                        else:
                            # reading from correct redirection
                            # response is forbidden
                            resp.release()

                        try:
                            parsed_url = URL(
                                r_url, encoded=not self._requote_redirect_url)

                        except ValueError as e:
                            raise InvalidURL(r_url) from e

                        scheme = parsed_url.scheme
                        if scheme not in ("http", "https", ""):
                            resp.close()
                            raise ValueError(
                                "Can redirect only to http or https")
                        elif not scheme:
                            parsed_url = url.join(parsed_url)

                        if url.origin() != parsed_url.origin():
                            auth = None
                            headers.pop(hdrs.AUTHORIZATION, None)

                        url = parsed_url
                        params = None
                        resp.release()
                        continue

                    break

            # check response status
            if raise_for_status is None:
                raise_for_status = self._raise_for_status
            if raise_for_status:
                resp.raise_for_status()

            # register connection
            if handle is not None:
                if resp.connection is not None:
                    resp.connection.add_callback(handle.cancel)
                else:
                    handle.cancel()

            resp._history = tuple(history)

            for trace in traces:
                await trace.send_request_end(method, url.update_query(params),
                                             headers, resp)
            return resp

        except BaseException as e:
            # cleanup timer
            tm.close()
            if handle:
                handle.cancel()
                handle = None

            for trace in traces:
                await trace.send_request_exception(method,
                                                   url.update_query(params),
                                                   headers, e)
            raise

    def ws_connect(
        self,
        url: StrOrURL,
        *,
        method: str = hdrs.METH_GET,
        protocols: Iterable[str] = (),
        timeout: float = 10.0,
        receive_timeout: Optional[float] = None,
        autoclose: bool = True,
        autoping: bool = True,
        heartbeat: Optional[float] = None,
        auth: Optional[BasicAuth] = None,
        origin: Optional[str] = None,
        params: Optional[Mapping[str, str]] = None,
        headers: Optional[LooseHeaders] = None,
        proxy: Optional[StrOrURL] = None,
        proxy_auth: Optional[BasicAuth] = None,
        ssl: Union[SSLContext, bool, None, Fingerprint] = None,
        verify_ssl: Optional[bool] = None,
        fingerprint: Optional[bytes] = None,
        ssl_context: Optional[SSLContext] = None,
        proxy_headers: Optional[LooseHeaders] = None,
        compress: int = 0,
        max_msg_size: int = 4 * 1024 * 1024,
    ) -> "_WSRequestContextManager":
        """Initiate websocket connection."""
        return _WSRequestContextManager(
            self._ws_connect(
                url,
                method=method,
                protocols=protocols,
                timeout=timeout,
                receive_timeout=receive_timeout,
                autoclose=autoclose,
                autoping=autoping,
                heartbeat=heartbeat,
                auth=auth,
                origin=origin,
                params=params,
                headers=headers,
                proxy=proxy,
                proxy_auth=proxy_auth,
                ssl=ssl,
                verify_ssl=verify_ssl,
                fingerprint=fingerprint,
                ssl_context=ssl_context,
                proxy_headers=proxy_headers,
                compress=compress,
                max_msg_size=max_msg_size,
            ))

    async def _ws_connect(
        self,
        url: StrOrURL,
        *,
        method: str = hdrs.METH_GET,
        protocols: Iterable[str] = (),
        timeout: float = 10.0,
        receive_timeout: Optional[float] = None,
        autoclose: bool = True,
        autoping: bool = True,
        heartbeat: Optional[float] = None,
        auth: Optional[BasicAuth] = None,
        origin: Optional[str] = None,
        params: Optional[Mapping[str, str]] = None,
        headers: Optional[LooseHeaders] = None,
        proxy: Optional[StrOrURL] = None,
        proxy_auth: Optional[BasicAuth] = None,
        ssl: Union[SSLContext, bool, None, Fingerprint] = None,
        verify_ssl: Optional[bool] = None,
        fingerprint: Optional[bytes] = None,
        ssl_context: Optional[SSLContext] = None,
        proxy_headers: Optional[LooseHeaders] = None,
        compress: int = 0,
        max_msg_size: int = 4 * 1024 * 1024,
    ) -> ClientWebSocketResponse:

        if headers is None:
            real_headers = CIMultiDict()  # type: CIMultiDict[str]
        else:
            real_headers = CIMultiDict(headers)

        default_headers = {
            hdrs.UPGRADE: "websocket",
            hdrs.CONNECTION: "upgrade",
            hdrs.SEC_WEBSOCKET_VERSION: "13",
        }

        for key, value in default_headers.items():
            real_headers.setdefault(key, value)

        sec_key = base64.b64encode(os.urandom(16))
        real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()

        if protocols:
            real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
        if origin is not None:
            real_headers[hdrs.ORIGIN] = origin
        if compress:
            extstr = ws_ext_gen(compress=compress)
            real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr

        ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)

        # send request
        resp = await self.request(
            method,
            url,
            params=params,
            headers=real_headers,
            read_until_eof=False,
            auth=auth,
            proxy=proxy,
            proxy_auth=proxy_auth,
            ssl=ssl,
            proxy_headers=proxy_headers,
        )

        try:
            # check handshake
            if resp.status != 101:
                raise WSServerHandshakeError(
                    resp.request_info,
                    resp.history,
                    message="Invalid response status",
                    status=resp.status,
                    headers=resp.headers,
                )

            if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
                raise WSServerHandshakeError(
                    resp.request_info,
                    resp.history,
                    message="Invalid upgrade header",
                    status=resp.status,
                    headers=resp.headers,
                )

            if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
                raise WSServerHandshakeError(
                    resp.request_info,
                    resp.history,
                    message="Invalid connection header",
                    status=resp.status,
                    headers=resp.headers,
                )

            # key calculation
            r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
            match = base64.b64encode(hashlib.sha1(sec_key +
                                                  WS_KEY).digest()).decode()
            if r_key != match:
                raise WSServerHandshakeError(
                    resp.request_info,
                    resp.history,
                    message="Invalid challenge response",
                    status=resp.status,
                    headers=resp.headers,
                )

            # websocket protocol
            protocol = None
            if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
                resp_protocols = [
                    proto.strip() for proto in resp.headers[
                        hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
                ]

                for proto in resp_protocols:
                    if proto in protocols:
                        protocol = proto
                        break

            # websocket compress
            notakeover = False
            if compress:
                compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
                if compress_hdrs:
                    try:
                        compress, notakeover = ws_ext_parse(compress_hdrs)
                    except WSHandshakeError as exc:
                        raise WSServerHandshakeError(
                            resp.request_info,
                            resp.history,
                            message=exc.args[0],
                            status=resp.status,
                            headers=resp.headers,
                        ) from exc
                else:
                    compress = 0
                    notakeover = False

            conn = resp.connection
            assert conn is not None
            conn_proto = conn.protocol
            assert conn_proto is not None
            transport = conn.transport
            assert transport is not None
            reader = FlowControlDataQueue(
                conn_proto, 2**16,
                loop=self._loop)  # type: FlowControlDataQueue[WSMessage]
            conn_proto.set_parser(WebSocketReader(reader, max_msg_size),
                                  reader)
            writer = WebSocketWriter(
                conn_proto,
                transport,
                use_mask=True,
                compress=compress,
                notakeover=notakeover,
            )
        except BaseException:
            resp.close()
            raise
        else:
            return self._ws_response_class(
                reader,
                writer,
                protocol,
                resp,
                timeout,
                autoclose,
                autoping,
                self._loop,
                receive_timeout=receive_timeout,
                heartbeat=heartbeat,
                compress=compress,
                client_notakeover=notakeover,
            )

    def _prepare_headers(
            self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
        """Add default headers and transform it to CIMultiDict"""
        # Convert headers to MultiDict
        result = CIMultiDict(self._default_headers)
        if headers:
            if not isinstance(headers, (MultiDictProxy, MultiDict)):
                headers = CIMultiDict(headers)
            added_names = set()  # type: Set[str]
            for key, value in headers.items():
                if key in added_names:
                    result.add(key, value)
                else:
                    result[key] = value
                    added_names.add(key)
        return result

    def get(self,
            url: StrOrURL,
            *,
            allow_redirects: bool = True,
            **kwargs: Any) -> "_RequestContextManager":
        """Perform HTTP GET request."""
        return _RequestContextManager(
            self._request(hdrs.METH_GET,
                          url,
                          allow_redirects=allow_redirects,
                          **kwargs))

    def options(self,
                url: StrOrURL,
                *,
                allow_redirects: bool = True,
                **kwargs: Any) -> "_RequestContextManager":
        """Perform HTTP OPTIONS request."""
        return _RequestContextManager(
            self._request(hdrs.METH_OPTIONS,
                          url,
                          allow_redirects=allow_redirects,
                          **kwargs))

    def head(self,
             url: StrOrURL,
             *,
             allow_redirects: bool = False,
             **kwargs: Any) -> "_RequestContextManager":
        """Perform HTTP HEAD request."""
        return _RequestContextManager(
            self._request(hdrs.METH_HEAD,
                          url,
                          allow_redirects=allow_redirects,
                          **kwargs))

    def post(self,
             url: StrOrURL,
             *,
             data: Any = None,
             **kwargs: Any) -> "_RequestContextManager":
        """Perform HTTP POST request."""
        return _RequestContextManager(
            self._request(hdrs.METH_POST, url, data=data, **kwargs))

    def put(self,
            url: StrOrURL,
            *,
            data: Any = None,
            **kwargs: Any) -> "_RequestContextManager":
        """Perform HTTP PUT request."""
        return _RequestContextManager(
            self._request(hdrs.METH_PUT, url, data=data, **kwargs))

    def patch(self,
              url: StrOrURL,
              *,
              data: Any = None,
              **kwargs: Any) -> "_RequestContextManager":
        """Perform HTTP PATCH request."""
        return _RequestContextManager(
            self._request(hdrs.METH_PATCH, url, data=data, **kwargs))

    def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
        """Perform HTTP DELETE request."""
        return _RequestContextManager(
            self._request(hdrs.METH_DELETE, url, **kwargs))

    async def close(self) -> None:
        """Close underlying connector.

        Release all acquired resources.
        """
        if not self.closed:
            if self._connector is not None and self._connector_owner:
                await self._connector.close()
            self._connector = None

    @property
    def closed(self) -> bool:
        """Is client session closed.

        A readonly property.
        """
        return self._connector is None or self._connector.closed

    @property
    def connector(self) -> Optional[BaseConnector]:
        """Connector instance used for the session."""
        return self._connector

    @property
    def cookie_jar(self) -> AbstractCookieJar:
        """The session cookies."""
        return self._cookie_jar

    @property
    def version(self) -> Tuple[int, int]:
        """The session HTTP protocol version."""
        return self._version

    @property
    def requote_redirect_url(self) -> bool:
        """Do URL requoting on redirection handling."""
        return self._requote_redirect_url

    @requote_redirect_url.setter
    def requote_redirect_url(self, val: bool) -> None:
        """Do URL requoting on redirection handling."""
        warnings.warn(
            "session.requote_redirect_url modification "
            "is deprecated #2778",
            DeprecationWarning,
            stacklevel=2,
        )
        self._requote_redirect_url = val

    @property
    def loop(self) -> asyncio.AbstractEventLoop:
        """Session's loop."""
        warnings.warn("client.loop property is deprecated",
                      DeprecationWarning,
                      stacklevel=2)
        return self._loop

    @property
    def timeout(self) -> Union[object, ClientTimeout]:
        """Timeout for the session."""
        return self._timeout

    @property
    def headers(self) -> "CIMultiDict[str]":
        """The default headers of the client session."""
        return self._default_headers

    @property
    def skip_auto_headers(self) -> FrozenSet[istr]:
        """Headers for which autogeneration should be skipped"""
        return self._skip_auto_headers

    @property
    def auth(self) -> Optional[BasicAuth]:
        """An object that represents HTTP Basic Authorization"""
        return self._default_auth

    @property
    def json_serialize(self) -> JSONEncoder:
        """Json serializer callable"""
        return self._json_serialize

    @property
    def connector_owner(self) -> bool:
        """Should connector be closed on session closing"""
        return self._connector_owner

    @property
    def raise_for_status(
        self, ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
        """Should `ClientResponse.raise_for_status()` be called for each response."""
        return self._raise_for_status

    @property
    def auto_decompress(self) -> bool:
        """Should the body response be automatically decompressed."""
        return self._auto_decompress

    @property
    def trust_env(self) -> bool:
        """
        Should proxies information from environment or netrc be trusted.

        Information is from HTTP_PROXY / HTTPS_PROXY environment variables
        or ~/.netrc file if present.
        """
        return self._trust_env

    @property
    def trace_configs(self) -> List[TraceConfig]:
        """A list of TraceConfig instances used for client tracing"""
        return self._trace_configs

    def detach(self) -> None:
        """Detach connector from session without closing the former.

        Session is switched to closed state anyway.
        """
        self._connector = None

    def __enter__(self) -> None:
        raise TypeError("Use async with instead")

    def __exit__(
        self,
        exc_type: Optional[Type[BaseException]],
        exc_val: Optional[BaseException],
        exc_tb: Optional[TracebackType],
    ) -> None:
        # __exit__ should exist in pair with __enter__ but never executed
        pass  # pragma: no cover

    async def __aenter__(self) -> "ClientSession":
        return self

    async def __aexit__(
        self,
        exc_type: Optional[Type[BaseException]],
        exc_val: Optional[BaseException],
        exc_tb: Optional[TracebackType],
    ) -> None:
        await self.close()
Пример #22
0
class BaseTestServer(ABC):
    def __init__(self, *, scheme=sentinel, loop=None,
                 host='127.0.0.1', port=None, skip_url_asserts=False,
                 **kwargs):
        self._loop = loop
        self.runner = None
        self._root = None
        self.host = host
        self.port = port
        self._closed = False
        self.scheme = scheme
        self.skip_url_asserts = skip_url_asserts

    async def start_server(self, loop=None, **kwargs):
        if self.runner:
            return
        self._loop = loop
        self._ssl = kwargs.pop('ssl', None)
        self.runner = await self._make_runner(**kwargs)
        await self.runner.setup()
        if not self.port:
            self.port = unused_port()
        site = TCPSite(self.runner, host=self.host, port=self.port,
                       ssl_context=self._ssl)
        await site.start()
        if self.scheme is sentinel:
            if self._ssl:
                scheme = 'https'
            else:
                scheme = 'http'
            self.scheme = scheme
        self._root = URL('{}://{}:{}'.format(self.scheme,
                                             self.host,
                                             self.port))

    @abstractmethod  # pragma: no cover
    async def _make_runner(self, **kwargs):
        pass

    def make_url(self, path):
        url = URL(path)
        if not self.skip_url_asserts:
            assert not url.is_absolute()
            return self._root.join(url)
        else:
            return URL(str(self._root) + path)

    @property
    def started(self):
        return self.runner is not None

    @property
    def closed(self):
        return self._closed

    @property
    def handler(self):
        # for backward compatibility
        # web.Server instance
        return self.runner.server

    async def close(self):
        """Close all fixtures created by the test client.

        After that point, the TestClient is no longer usable.

        This is an idempotent function: running close multiple times
        will not have any additional effects.

        close is also run when the object is garbage collected, and on
        exit when used as a context manager.

        """
        if self.started and not self.closed:
            await self.runner.cleanup()
            self._root = None
            self.port = None
            self._closed = True

    def __enter__(self):
        raise TypeError("Use async with instead")

    def __exit__(self, exc_type, exc_value, traceback):
        # __exit__ should exist in pair with __enter__ but never executed
        pass  # pragma: no cover

    async def __aenter__(self):
        await self.start_server(loop=self._loop)
        return self

    async def __aexit__(self, exc_type, exc_value, traceback):
        await self.close()
Пример #23
0
    def _request(self,
                 method,
                 url,
                 *,
                 params=None,
                 data=None,
                 headers=None,
                 skip_auto_headers=None,
                 auth=None,
                 allow_redirects=True,
                 max_redirects=10,
                 encoding='utf-8',
                 version=None,
                 compress=None,
                 chunked=None,
                 expect100=False,
                 read_until_eof=True,
                 proxy=None,
                 proxy_auth=None,
                 timeout=5 * 60):

        if version is not None:
            warnings.warn(
                "HTTP version should be specified "
                "by ClientSession constructor", DeprecationWarning)
        else:
            version = self._version

        if self.closed:
            raise RuntimeError('Session is closed')

        redirects = 0
        history = []

        # Merge with default headers and transform to CIMultiDict
        headers = self._prepare_headers(headers)
        if auth is None:
            auth = self._default_auth
        # It would be confusing if we support explicit Authorization header
        # with `auth` argument
        if (headers is not None and auth is not None
                and hdrs.AUTHORIZATION in headers):
            raise ValueError("Can't combine `Authorization` header with "
                             "`auth` argument")

        skip_headers = set(self._skip_auto_headers)
        if skip_auto_headers is not None:
            for i in skip_auto_headers:
                skip_headers.add(istr(i))

        if proxy is not None:
            proxy = URL(proxy)

        while True:
            url = URL(url).with_fragment(None)

            cookies = self._cookie_jar.filter_cookies(url)

            req = self._request_class(method,
                                      url,
                                      params=params,
                                      headers=headers,
                                      skip_auto_headers=skip_headers,
                                      data=data,
                                      cookies=cookies,
                                      encoding=encoding,
                                      auth=auth,
                                      version=version,
                                      compress=compress,
                                      chunked=chunked,
                                      expect100=expect100,
                                      loop=self._loop,
                                      response_class=self._response_class,
                                      proxy=proxy,
                                      proxy_auth=proxy_auth,
                                      timeout=timeout)

            with Timeout(timeout, loop=self._loop):
                conn = yield from self._connector.connect(req)
            conn.writer.set_tcp_nodelay(True)
            try:
                resp = req.send(conn.writer, conn.reader)
                try:
                    yield from resp.start(conn, read_until_eof)
                except:
                    resp.close()
                    conn.close()
                    raise
            except (aiohttp.HttpProcessingError,
                    aiohttp.ServerDisconnectedError) as exc:
                raise aiohttp.ClientResponseError() from exc
            except OSError as exc:
                raise aiohttp.ClientOSError(*exc.args) from exc

            self._cookie_jar.update_cookies(resp.cookies, resp.url_obj)

            # redirects
            if resp.status in (301, 302, 303, 307) and allow_redirects:
                redirects += 1
                history.append(resp)
                if max_redirects and redirects >= max_redirects:
                    resp.close()
                    break
                else:
                    # TODO: close the connection if BODY is large enough
                    # Redirect with big BODY is forbidden by HTTP protocol
                    # but malformed server may send illegal response.
                    # Small BODIES with text like "Not Found" are still
                    # perfectly fine and should be accepted.
                    yield from resp.release()

                # For 301 and 302, mimic IE behaviour, now changed in RFC.
                # Details: https://github.com/kennethreitz/requests/pull/269
                if (resp.status == 303 and resp.method != hdrs.METH_HEAD) \
                   or (resp.status in (301, 302) and
                       resp.method == hdrs.METH_POST):
                    method = hdrs.METH_GET
                    data = None
                    if headers.get(hdrs.CONTENT_LENGTH):
                        headers.pop(hdrs.CONTENT_LENGTH)

                r_url = URL(
                    resp.headers.get(hdrs.LOCATION)
                    or resp.headers.get(hdrs.URI))

                scheme = r_url.scheme
                if scheme not in ('http', 'https', ''):
                    resp.close()
                    raise ValueError('Can redirect only to http or https')
                elif not scheme:
                    r_url = url.join(r_url)

                url = r_url
                params = None
                yield from resp.release()
                continue

            break

        resp._history = tuple(history)
        return resp
Пример #24
0
def test_join():
    base = URL('http://www.cwi.nl/%7Eguido/Python.html')
    url = URL('FAQ.html')
    url2 = base.join(url)
    assert str(url2) == 'http://www.cwi.nl/~guido/FAQ.html'
Пример #25
0
class SpiderCrawler:
    def __init__(self, start_url, database, depth):
        self.client = httpx.AsyncClient()
        self.url = URL(start_url)
        self.db = database
        self.depth = depth

    @timer
    async def get_data_from_url(self):
        calls = 0

        @not_retries
        async def load(url_: URL, level_):
            nonlocal calls
            calls += 1
            try:
                title, html_body, soup = await self._load_and_parse(url_)
            except TypeError:
                # Can't download
                return

            asyncio.ensure_future(
                self.db.save_to_db(url_,
                                   title,
                                   html_body,
                                   parent=self.url.human_repr()))

            if level_ >= self.depth:
                return

            refs = self._ref_generator(soup.findAll('a'))
            todos = [load(ref, level_ + 1) for ref in refs]
            await asyncio.gather(*todos)

        try:
            await load(self.url, 0)
        finally:
            print("CALLS: ", calls)
            await self.client.aclose()
            await self.db.pg.pool.close()

    async def _load_and_parse(self, url_: URL):
        try:
            res = await self.client.get(str(url_))
        except httpx.HTTPError:
            return
        except ValueError:
            return

        soup = BeautifulSoup(res, 'lxml')

        try:
            title = soup.title.text
        except AttributeError:
            title = None
        html_body = res.text

        return title, html_body, soup

    def _ref_generator(self, bs_result_set):

        for ref in bs_result_set:
            try:
                href = URL(ref.attrs['href'])

                if href.query_string:  # Without QS
                    continue

                if not href.is_absolute():
                    href = self.url.join(href)

                if href != self.url:
                    yield href
            except KeyError:
                continue
Пример #26
0
def test_join_non_url():
    base = URL('http://example.com')
    with pytest.raises(TypeError):
        base.join('path/to')
Пример #27
0
class BaseTestServer(ABC):
    def __init__(self,
                 *,
                 scheme: Union[str, object]=sentinel,
                 loop: Optional[asyncio.AbstractEventLoop]=None,
                 host: str='127.0.0.1',
                 port: Optional[int]=None,
                 skip_url_asserts: bool=False,
                 **kwargs: Any) -> None:
        self._loop = loop
        self.runner = None  # type: Optional[BaseRunner]
        self._root = None  # type: Optional[URL]
        self.host = host
        self.port = port
        self._closed = False
        self.scheme = scheme
        self.skip_url_asserts = skip_url_asserts

    async def start_server(self,
                           loop: Optional[asyncio.AbstractEventLoop]=None,
                           **kwargs: Any) -> None:
        if self.runner:
            return
        self._loop = loop
        self._ssl = kwargs.pop('ssl', None)
        self.runner = await self._make_runner(**kwargs)
        await self.runner.setup()
        if not self.port:
            self.port = 0
        _sock = get_port_socket(self.host, self.port)
        self.host, self.port = _sock.getsockname()[:2]
        site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
        await site.start()
        server = site._server
        assert server is not None
        sockets = server.sockets
        assert sockets is not None
        self.port = sockets[0].getsockname()[1]
        if self.scheme is sentinel:
            if self._ssl:
                scheme = 'https'
            else:
                scheme = 'http'
            self.scheme = scheme
        self._root = URL('{}://{}:{}'.format(self.scheme,
                                             self.host,
                                             self.port))

    @abstractmethod  # pragma: no cover
    async def _make_runner(self, **kwargs: Any) -> BaseRunner:
        pass

    def make_url(self, path: str) -> URL:
        assert self._root is not None
        url = URL(path)
        if not self.skip_url_asserts:
            assert not url.is_absolute()
            return self._root.join(url)
        else:
            return URL(str(self._root) + path)

    @property
    def started(self) -> bool:
        return self.runner is not None

    @property
    def closed(self) -> bool:
        return self._closed

    @property
    def handler(self) -> Server:
        # for backward compatibility
        # web.Server instance
        runner = self.runner
        assert runner is not None
        assert runner.server is not None
        return runner.server

    async def close(self) -> None:
        """Close all fixtures created by the test client.

        After that point, the TestClient is no longer usable.

        This is an idempotent function: running close multiple times
        will not have any additional effects.

        close is also run when the object is garbage collected, and on
        exit when used as a context manager.

        """
        if self.started and not self.closed:
            assert self.runner is not None
            await self.runner.cleanup()
            self._root = None
            self.port = None
            self._closed = True

    def __enter__(self) -> None:
        raise TypeError("Use async with instead")

    def __exit__(self,
                 exc_type: Optional[Type[BaseException]],
                 exc_value: Optional[BaseException],
                 traceback: Optional[TracebackType]) -> None:
        # __exit__ should exist in pair with __enter__ but never executed
        pass  # pragma: no cover

    async def __aenter__(self) -> 'BaseTestServer':
        await self.start_server(loop=self._loop)
        return self

    async def __aexit__(self,
                        exc_type: Optional[Type[BaseException]],
                        exc_value: Optional[BaseException],
                        traceback: Optional[TracebackType]) -> None:
        await self.close()
Пример #28
0
class BaseTestServer(ABC):
    def __init__(self,
                 *,
                 scheme=sentinel,
                 loop=None,
                 host='127.0.0.1',
                 port=None,
                 skip_url_asserts=False,
                 **kwargs):
        self._loop = loop
        self.runner = None
        self._root = None
        self.host = host
        self.port = port
        self._closed = False
        self.scheme = scheme
        self.skip_url_asserts = skip_url_asserts

    async def start_server(self, loop=None, **kwargs):
        if self.runner:
            return
        self._loop = loop
        self._ssl = kwargs.pop('ssl', None)
        self.runner = await self._make_runner(**kwargs)
        await self.runner.setup()
        if not self.port:
            self.port = unused_port()
        site = TCPSite(self.runner,
                       host=self.host,
                       port=self.port,
                       ssl_context=self._ssl)
        await site.start()
        if self.scheme is sentinel:
            if self._ssl:
                scheme = 'https'
            else:
                scheme = 'http'
            self.scheme = scheme
        self._root = URL('{}://{}:{}'.format(self.scheme, self.host,
                                             self.port))

    @abstractmethod  # pragma: no cover
    async def _make_runner(self, **kwargs):
        pass

    def make_url(self, path):
        url = URL(path)
        if not self.skip_url_asserts:
            assert not url.is_absolute()
            return self._root.join(url)
        else:
            return URL(str(self._root) + path)

    @property
    def started(self):
        return self.runner is not None

    @property
    def closed(self):
        return self._closed

    @property
    def handler(self):
        # for backward compatibility
        # web.Server instance
        return self.runner.server

    async def close(self):
        """Close all fixtures created by the test client.

        After that point, the TestClient is no longer usable.

        This is an idempotent function: running close multiple times
        will not have any additional effects.

        close is also run when the object is garbage collected, and on
        exit when used as a context manager.

        """
        if self.started and not self.closed:
            await self.runner.cleanup()
            self._root = None
            self.port = None
            self._closed = True

    def __enter__(self):
        raise TypeError("Use async with instead")

    def __exit__(self, exc_type, exc_value, traceback):
        # __exit__ should exist in pair with __enter__ but never executed
        pass  # pragma: no cover

    async def __aenter__(self):
        await self.start_server(loop=self._loop)
        return self

    async def __aexit__(self, exc_type, exc_value, traceback):
        await self.close()
Пример #29
0
class TestServer:
    def __init__(self, app, *, scheme=sentinel, host='127.0.0.1'):
        self.app = app
        self._loop = app.loop
        self.port = None
        self.server = None
        self.handler = None
        self._root = None
        self.host = host
        self._closed = False
        self.scheme = scheme

    @asyncio.coroutine
    def start_server(self, **kwargs):
        if self.server:
            return
        self.port = unused_port()
        self._ssl = kwargs.pop('ssl', None)
        if self.scheme is sentinel:
            if self._ssl:
                scheme = 'https'
            else:
                scheme = 'http'
            self.scheme = scheme
        self._root = URL('{}://{}:{}'.format(self.scheme,
                                             self.host,
                                             self.port))
        yield from self.app.startup()
        self.handler = self.app.make_handler(**kwargs)
        self.server = yield from self._loop.create_server(self.handler,
                                                          self.host,
                                                          self.port,
                                                          ssl=self._ssl)

    def make_url(self, path):
        url = URL(path)
        assert not url.is_absolute()
        return self._root.join(url)

    @asyncio.coroutine
    def close(self):
        """Close all fixtures created by the test client.

        After that point, the TestClient is no longer usable.

        This is an idempotent function: running close multiple times
        will not have any additional effects.

        close is also run when the object is garbage collected, and on
        exit when used as a context manager.

        """
        if self.server is not None and not self._closed:
            self.server.close()
            yield from self.server.wait_closed()
            yield from self.app.shutdown()
            yield from self.handler.finish_connections()
            yield from self.app.cleanup()
            self._root = None
            self.port = None
            self._closed = True

    def __enter__(self):
        self._loop.run_until_complete(self.start_server())
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self._loop.run_until_complete(self.close())

    if PY_35:
        @asyncio.coroutine
        def __aenter__(self):
            yield from self.start_server()
            return self

        @asyncio.coroutine
        def __aexit__(self, exc_type, exc_value, traceback):
            yield from self.close()
Пример #30
0
    def _request(self, method, url, *,
                 params=None,
                 data=None,
                 json=None,
                 headers=None,
                 skip_auto_headers=None,
                 auth=None,
                 allow_redirects=True,
                 max_redirects=10,
                 encoding=None,
                 compress=None,
                 chunked=None,
                 expect100=False,
                 read_until_eof=True,
                 proxy=None,
                 proxy_auth=None,
                 timeout=sentinel,
                 verify_ssl=None,
                 fingerprint=None,
                 ssl_context=None,
                 proxy_headers=None):

        # NOTE: timeout clamps existing connect and read timeouts.  We cannot
        # set the default to None because we need to detect if the user wants
        # to use the existing timeouts by setting timeout to None.

        if encoding is not None:
            warnings.warn(
                "encoding parameter is not supported, "
                "please use FormData(charset='utf-8') instead",
                DeprecationWarning)

        if self.closed:
            raise RuntimeError('Session is closed')

        if data is not None and json is not None:
            raise ValueError(
                'data and json parameters can not be used at the same time')
        elif json is not None:
            data = payload.JsonPayload(json, dumps=self._json_serialize)

        if not isinstance(chunked, bool) and chunked is not None:
            warnings.warn(
                'Chunk size is deprecated #1615', DeprecationWarning)

        redirects = 0
        history = []
        version = self._version

        # Merge with default headers and transform to CIMultiDict
        headers = self._prepare_headers(headers)
        proxy_headers = self._prepare_headers(proxy_headers)

        try:
            url = URL(url)
        except ValueError:
            raise InvalidURL(url)

        skip_headers = set(self._skip_auto_headers)
        if skip_auto_headers is not None:
            for i in skip_auto_headers:
                skip_headers.add(istr(i))

        if proxy is not None:
            try:
                proxy = URL(proxy)
            except ValueError:
                raise InvalidURL(url)

        # timeout is cumulative for all request operations
        # (request, redirects, responses, data consuming)
        tm = TimeoutHandle(
            self._loop,
            timeout if timeout is not sentinel else self._read_timeout)
        handle = tm.start()

        url = URL(url)
        timer = tm.timer()
        try:
            with timer:
                while True:
                    url, auth_from_url = strip_auth_from_url(url)
                    if auth and auth_from_url:
                        raise ValueError("Cannot combine AUTH argument with "
                                         "credentials encoded in URL")

                    if auth is None:
                        auth = auth_from_url
                    if auth is None:
                        auth = self._default_auth
                    # It would be confusing if we support explicit
                    # Authorization header with auth argument
                    if (headers is not None and
                            auth is not None and
                            hdrs.AUTHORIZATION in headers):
                        raise ValueError("Cannot combine AUTHORIZATION header "
                                         "with AUTH argument or credentials "
                                         "encoded in URL")

                    url = url.with_fragment(None)
                    cookies = self._cookie_jar.filter_cookies(url)

                    if proxy is not None:
                        proxy = URL(proxy)
                    elif self._trust_env:
                        for scheme, proxy_info in proxies_from_env().items():
                            if scheme == url.scheme:
                                proxy, proxy_auth = proxy_info
                                break

                    req = self._request_class(
                        method, url, params=params, headers=headers,
                        skip_auto_headers=skip_headers, data=data,
                        cookies=cookies, auth=auth, version=version,
                        compress=compress, chunked=chunked,
                        expect100=expect100, loop=self._loop,
                        response_class=self._response_class,
                        proxy=proxy, proxy_auth=proxy_auth, timer=timer,
                        session=self, auto_decompress=self._auto_decompress,
                        verify_ssl=verify_ssl, fingerprint=fingerprint,
                        ssl_context=ssl_context, proxy_headers=proxy_headers)

                    # connection timeout
                    try:
                        with CeilTimeout(self._conn_timeout, loop=self._loop):
                            conn = yield from self._connector.connect(req)
                    except asyncio.TimeoutError as exc:
                        raise ServerTimeoutError(
                            'Connection timeout '
                            'to host {0}'.format(url)) from exc

                    conn.writer.set_tcp_nodelay(True)
                    try:
                        resp = req.send(conn)
                        try:
                            yield from resp.start(conn, read_until_eof)
                        except:
                            resp.close()
                            conn.close()
                            raise
                    except ClientError:
                        raise
                    except OSError as exc:
                        raise ClientOSError(*exc.args) from exc

                    self._cookie_jar.update_cookies(resp.cookies, resp.url)

                    # redirects
                    if resp.status in (
                            301, 302, 303, 307, 308) and allow_redirects:
                        redirects += 1
                        history.append(resp)
                        if max_redirects and redirects >= max_redirects:
                            resp.close()
                            break
                        else:
                            resp.release()

                        # For 301 and 302, mimic IE, now changed in RFC
                        # https://github.com/kennethreitz/requests/pull/269
                        if (resp.status == 303 and
                                resp.method != hdrs.METH_HEAD) \
                                or (resp.status in (301, 302) and
                                    resp.method == hdrs.METH_POST):
                            method = hdrs.METH_GET
                            data = None
                            if headers.get(hdrs.CONTENT_LENGTH):
                                headers.pop(hdrs.CONTENT_LENGTH)

                        r_url = (resp.headers.get(hdrs.LOCATION) or
                                 resp.headers.get(hdrs.URI))
                        if r_url is None:
                            # see github.com/aio-libs/aiohttp/issues/2022
                            break

                        try:
                            r_url = URL(
                                r_url, encoded=not self.requote_redirect_url)

                        except ValueError:
                            raise InvalidURL(r_url)

                        scheme = r_url.scheme
                        if scheme not in ('http', 'https', ''):
                            resp.close()
                            raise ValueError(
                                'Can redirect only to http or https')
                        elif not scheme:
                            r_url = url.join(r_url)

                        if url.origin() != r_url.origin():
                            auth = None
                            headers.pop(hdrs.AUTHORIZATION, None)

                        url = r_url
                        params = None
                        resp.release()
                        continue

                    break

            # check response status
            if self._raise_for_status:
                resp.raise_for_status()

            # register connection
            if handle is not None:
                if resp.connection is not None:
                    resp.connection.add_callback(handle.cancel)
                else:
                    handle.cancel()

            resp._history = tuple(history)
            return resp

        except:
            # cleanup timer
            tm.close()
            if handle:
                handle.cancel()
                handle = None

            raise
Пример #31
0
    async def _request(
            self,
            method: str,
            str_or_url: StrOrURL, *,
            params: Optional[Mapping[str, str]]=None,
            data: Any=None,
            json: Any=None,
            cookies: Optional[LooseCookies]=None,
            headers: LooseHeaders=None,
            skip_auto_headers: Optional[Iterable[str]]=None,
            auth: Optional[BasicAuth]=None,
            allow_redirects: bool=True,
            max_redirects: int=10,
            compress: Optional[str]=None,
            chunked: Optional[bool]=None,
            expect100: bool=False,
            raise_for_status: Optional[bool]=None,
            read_until_eof: bool=True,
            proxy: Optional[StrOrURL]=None,
            proxy_auth: Optional[BasicAuth]=None,
            timeout: Union[ClientTimeout, object]=sentinel,
            ssl: Optional[Union[SSLContext, bool, Fingerprint]]=None,
            proxy_headers: Optional[LooseHeaders]=None,
            trace_request_ctx: Optional[SimpleNamespace]=None
    ) -> ClientResponse:

        # NOTE: timeout clamps existing connect and read timeouts.  We cannot
        # set the default to None because we need to detect if the user wants
        # to use the existing timeouts by setting timeout to None.

        if self.closed:
            raise RuntimeError('Session is closed')

        if not isinstance(ssl, SSL_ALLOWED_TYPES):
            raise TypeError("ssl should be SSLContext, bool, Fingerprint, "
                            "or None, got {!r} instead.".format(ssl))

        if data is not None and json is not None:
            raise ValueError(
                'data and json parameters can not be used at the same time')
        elif json is not None:
            data = payload.JsonPayload(json, dumps=self._json_serialize)

        if not isinstance(chunked, bool) and chunked is not None:
            warnings.warn(
                'Chunk size is deprecated #1615', DeprecationWarning)

        redirects = 0
        history = []
        version = self._version

        # Merge with default headers and transform to CIMultiDict
        headers = self._prepare_headers(headers)
        proxy_headers = self._prepare_headers(proxy_headers)

        try:
            url = URL(str_or_url)
        except ValueError:
            raise InvalidURL(str_or_url)

        skip_headers = set(self._skip_auto_headers)
        if skip_auto_headers is not None:
            for i in skip_auto_headers:
                skip_headers.add(istr(i))

        if proxy is not None:
            try:
                proxy = URL(proxy)
            except ValueError:
                raise InvalidURL(proxy)

        if timeout is sentinel:
            real_timeout = self._timeout  # type: ClientTimeout
        else:
            if not isinstance(timeout, ClientTimeout):
                real_timeout = ClientTimeout(total=timeout)  # type: ignore
            else:
                real_timeout = timeout
        # timeout is cumulative for all request operations
        # (request, redirects, responses, data consuming)
        tm = TimeoutHandle(self._loop, real_timeout.total)
        handle = tm.start()

        traces = [
            Trace(
                self,
                trace_config,
                trace_config.trace_config_ctx(
                    trace_request_ctx=trace_request_ctx)
            )
            for trace_config in self._trace_configs
        ]

        for trace in traces:
            await trace.send_request_start(
                method,
                url,
                headers
            )

        timer = tm.timer()
        try:
            with timer:
                while True:
                    url, auth_from_url = strip_auth_from_url(url)
                    if auth and auth_from_url:
                        raise ValueError("Cannot combine AUTH argument with "
                                         "credentials encoded in URL")

                    if auth is None:
                        auth = auth_from_url
                    if auth is None:
                        auth = self._default_auth
                    # It would be confusing if we support explicit
                    # Authorization header with auth argument
                    if (headers is not None and
                            auth is not None and
                            hdrs.AUTHORIZATION in headers):
                        raise ValueError("Cannot combine AUTHORIZATION header "
                                         "with AUTH argument or credentials "
                                         "encoded in URL")

                    all_cookies = self._cookie_jar.filter_cookies(url)

                    if cookies is not None:
                        tmp_cookie_jar = CookieJar()
                        tmp_cookie_jar.update_cookies(cookies)
                        req_cookies = tmp_cookie_jar.filter_cookies(url)
                        if req_cookies:
                            all_cookies.load(req_cookies)

                    if proxy is not None:
                        proxy = URL(proxy)
                    elif self._trust_env:
                        for scheme, proxy_info in proxies_from_env().items():
                            if scheme == url.scheme:
                                proxy = proxy_info.proxy
                                proxy_auth = proxy_info.proxy_auth
                                break

                    req = self._request_class(
                        method, url, params=params, headers=headers,
                        skip_auto_headers=skip_headers, data=data,
                        cookies=all_cookies, auth=auth, version=version,
                        compress=compress, chunked=chunked,
                        expect100=expect100, loop=self._loop,
                        response_class=self._response_class,
                        proxy=proxy, proxy_auth=proxy_auth, timer=timer,
                        session=self,
                        ssl=ssl, proxy_headers=proxy_headers, traces=traces)

                    # connection timeout
                    try:
                        with CeilTimeout(real_timeout.connect,
                                         loop=self._loop):
                            assert self._connector is not None
                            conn = await self._connector.connect(
                                req,
                                traces=traces,
                                timeout=real_timeout
                            )
                    except asyncio.TimeoutError as exc:
                        raise ServerTimeoutError(
                            'Connection timeout '
                            'to host {0}'.format(url)) from exc

                    assert conn.transport is not None

                    assert conn.protocol is not None
                    conn.protocol.set_response_params(
                        timer=timer,
                        skip_payload=method.upper() == 'HEAD',
                        read_until_eof=read_until_eof,
                        auto_decompress=self._auto_decompress,
                        read_timeout=real_timeout.sock_read)

                    try:
                        try:
                            resp = await req.send(conn)
                            try:
                                await resp.start(conn)
                            except BaseException:
                                resp.close()
                                raise
                        except BaseException:
                            conn.close()
                            raise
                    except ClientError:
                        raise
                    except OSError as exc:
                        raise ClientOSError(*exc.args) from exc

                    self._cookie_jar.update_cookies(resp.cookies, resp.url)

                    # redirects
                    if resp.status in (
                            301, 302, 303, 307, 308) and allow_redirects:

                        for trace in traces:
                            await trace.send_request_redirect(
                                method,
                                url,
                                headers,
                                resp
                            )

                        redirects += 1
                        history.append(resp)
                        if max_redirects and redirects >= max_redirects:
                            resp.close()
                            raise TooManyRedirects(
                                history[0].request_info, tuple(history))

                        # For 301 and 302, mimic IE, now changed in RFC
                        # https://github.com/kennethreitz/requests/pull/269
                        if (resp.status == 303 and
                                resp.method != hdrs.METH_HEAD) \
                                or (resp.status in (301, 302) and
                                    resp.method == hdrs.METH_POST):
                            method = hdrs.METH_GET
                            data = None
                            if headers.get(hdrs.CONTENT_LENGTH):
                                headers.pop(hdrs.CONTENT_LENGTH)

                        r_url = (resp.headers.get(hdrs.LOCATION) or
                                 resp.headers.get(hdrs.URI))
                        if r_url is None:
                            # see github.com/aio-libs/aiohttp/issues/2022
                            break
                        else:
                            # reading from correct redirection
                            # response is forbidden
                            resp.release()

                        try:
                            r_url = URL(
                                r_url, encoded=not self._requote_redirect_url)

                        except ValueError:
                            raise InvalidURL(r_url)

                        scheme = r_url.scheme
                        if scheme not in ('http', 'https', ''):
                            resp.close()
                            raise ValueError(
                                'Can redirect only to http or https')
                        elif not scheme:
                            r_url = url.join(r_url)

                        if url.origin() != r_url.origin():
                            auth = None
                            headers.pop(hdrs.AUTHORIZATION, None)

                        url = r_url
                        params = None
                        resp.release()
                        continue

                    break

            # check response status
            if raise_for_status is None:
                raise_for_status = self._raise_for_status
            if raise_for_status:
                resp.raise_for_status()

            # register connection
            if handle is not None:
                if resp.connection is not None:
                    resp.connection.add_callback(handle.cancel)
                else:
                    handle.cancel()

            resp._history = tuple(history)

            for trace in traces:
                await trace.send_request_end(
                    method,
                    url,
                    headers,
                    resp
                )
            return resp

        except BaseException as e:
            # cleanup timer
            tm.close()
            if handle:
                handle.cancel()
                handle = None

            for trace in traces:
                await trace.send_request_exception(
                    method,
                    url,
                    headers,
                    e
                )
            raise
Пример #32
0
    async def _request(
            self,
            method: str,
            str_or_url: StrOrURL,
            *,
            params: Optional[Mapping[str, str]] = None,
            data: Any = None,
            json: Any = None,
            cookies: Optional[LooseCookies] = None,
            headers: LooseHeaders = None,
            skip_auto_headers: Optional[Iterable[str]] = None,
            auth: Optional[BasicAuth] = None,
            allow_redirects: bool = True,
            max_redirects: int = 10,
            compress: Optional[str] = None,
            chunked: Optional[bool] = None,
            expect100: bool = False,
            raise_for_status: Optional[bool] = None,
            read_until_eof: bool = True,
            proxy: Optional[StrOrURL] = None,
            proxy_auth: Optional[BasicAuth] = None,
            timeout: Union[ClientTimeout, object] = sentinel,
            ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None,
            proxy_headers: Optional[LooseHeaders] = None,
            trace_request_ctx: Optional[SimpleNamespace] = None
    ) -> ClientResponse:

        # NOTE: timeout clamps existing connect and read timeouts.  We cannot
        # set the default to None because we need to detect if the user wants
        # to use the existing timeouts by setting timeout to None.

        if self.closed:
            raise RuntimeError('Session is closed')

        if not isinstance(ssl, SSL_ALLOWED_TYPES):
            raise TypeError("ssl should be SSLContext, bool, Fingerprint, "
                            "or None, got {!r} instead.".format(ssl))

        if data is not None and json is not None:
            raise ValueError(
                'data and json parameters can not be used at the same time')
        elif json is not None:
            data = payload.JsonPayload(json, dumps=self._json_serialize)

        if not isinstance(chunked, bool) and chunked is not None:
            warnings.warn('Chunk size is deprecated #1615', DeprecationWarning)

        redirects = 0
        history = []
        version = self._version

        # Merge with default headers and transform to CIMultiDict
        headers = self._prepare_headers(headers)
        proxy_headers = self._prepare_headers(proxy_headers)

        try:
            url = URL(str_or_url)
        except ValueError:
            raise InvalidURL(str_or_url)

        skip_headers = set(self._skip_auto_headers)
        if skip_auto_headers is not None:
            for i in skip_auto_headers:
                skip_headers.add(istr(i))

        if proxy is not None:
            try:
                proxy = URL(proxy)
            except ValueError:
                raise InvalidURL(proxy)

        if timeout is sentinel:
            real_timeout = self._timeout  # type: ClientTimeout
        else:
            if not isinstance(timeout, ClientTimeout):
                real_timeout = ClientTimeout(total=timeout)  # type: ignore
            else:
                real_timeout = timeout
        # timeout is cumulative for all request operations
        # (request, redirects, responses, data consuming)
        tm = TimeoutHandle(self._loop, real_timeout.total)
        handle = tm.start()

        traces = [
            Trace(
                self, trace_config,
                trace_config.trace_config_ctx(
                    trace_request_ctx=trace_request_ctx))
            for trace_config in self._trace_configs
        ]

        for trace in traces:
            await trace.send_request_start(method, url, headers)

        timer = tm.timer()
        try:
            with timer:
                while True:
                    url, auth_from_url = strip_auth_from_url(url)
                    if auth and auth_from_url:
                        raise ValueError("Cannot combine AUTH argument with "
                                         "credentials encoded in URL")

                    if auth is None:
                        auth = auth_from_url
                    if auth is None:
                        auth = self._default_auth
                    # It would be confusing if we support explicit
                    # Authorization header with auth argument
                    if (headers is not None and auth is not None
                            and hdrs.AUTHORIZATION in headers):
                        raise ValueError("Cannot combine AUTHORIZATION header "
                                         "with AUTH argument or credentials "
                                         "encoded in URL")

                    session_cookies = self._cookie_jar.filter_cookies(url)

                    if cookies is not None:
                        tmp_cookie_jar = CookieJar()
                        tmp_cookie_jar.update_cookies(cookies)
                        req_cookies = tmp_cookie_jar.filter_cookies(url)
                        if req_cookies:
                            session_cookies.load(req_cookies)

                    cookies = session_cookies

                    if proxy is not None:
                        proxy = URL(proxy)
                    elif self._trust_env:
                        for scheme, proxy_info in proxies_from_env().items():
                            if scheme == url.scheme:
                                proxy = proxy_info.proxy
                                proxy_auth = proxy_info.proxy_auth
                                break

                    req = self._request_class(
                        method,
                        url,
                        params=params,
                        headers=headers,
                        skip_auto_headers=skip_headers,
                        data=data,
                        cookies=cookies,
                        auth=auth,
                        version=version,
                        compress=compress,
                        chunked=chunked,
                        expect100=expect100,
                        loop=self._loop,
                        response_class=self._response_class,
                        proxy=proxy,
                        proxy_auth=proxy_auth,
                        timer=timer,
                        session=self,
                        ssl=ssl,
                        proxy_headers=proxy_headers,
                        traces=traces)

                    # connection timeout
                    try:
                        with CeilTimeout(real_timeout.connect,
                                         loop=self._loop):
                            assert self._connector is not None
                            conn = await self._connector.connect(
                                req, traces=traces, timeout=real_timeout)
                    except asyncio.TimeoutError as exc:
                        raise ServerTimeoutError(
                            'Connection timeout '
                            'to host {0}'.format(url)) from exc

                    assert conn.transport is not None

                    assert conn.protocol is not None
                    conn.protocol.set_response_params(
                        timer=timer,
                        skip_payload=method.upper() == 'HEAD',
                        read_until_eof=read_until_eof,
                        auto_decompress=self._auto_decompress,
                        read_timeout=real_timeout.sock_read)

                    try:
                        try:
                            resp = await req.send(conn)
                            try:
                                await resp.start(conn)
                            except BaseException:
                                resp.close()
                                raise
                        except BaseException:
                            conn.close()
                            raise
                    except ClientError:
                        raise
                    except OSError as exc:
                        raise ClientOSError(*exc.args) from exc

                    self._cookie_jar.update_cookies(resp.cookies, resp.url)

                    # redirects
                    if resp.status in (301, 302, 303, 307,
                                       308) and allow_redirects:

                        for trace in traces:
                            await trace.send_request_redirect(
                                method, url, headers, resp)

                        redirects += 1
                        history.append(resp)
                        if max_redirects and redirects >= max_redirects:
                            resp.close()
                            raise TooManyRedirects(history[0].request_info,
                                                   tuple(history))

                        # For 301 and 302, mimic IE, now changed in RFC
                        # https://github.com/kennethreitz/requests/pull/269
                        if (resp.status == 303 and
                                resp.method != hdrs.METH_HEAD) \
                                or (resp.status in (301, 302) and
                                    resp.method == hdrs.METH_POST):
                            method = hdrs.METH_GET
                            data = None
                            if headers.get(hdrs.CONTENT_LENGTH):
                                headers.pop(hdrs.CONTENT_LENGTH)

                        r_url = (resp.headers.get(hdrs.LOCATION)
                                 or resp.headers.get(hdrs.URI))
                        if r_url is None:
                            # see github.com/aio-libs/aiohttp/issues/2022
                            break
                        else:
                            # reading from correct redirection
                            # response is forbidden
                            resp.release()

                        try:
                            r_url = URL(r_url,
                                        encoded=not self._requote_redirect_url)

                        except ValueError:
                            raise InvalidURL(r_url)

                        scheme = r_url.scheme
                        if scheme not in ('http', 'https', ''):
                            resp.close()
                            raise ValueError(
                                'Can redirect only to http or https')
                        elif not scheme:
                            r_url = url.join(r_url)

                        if url.origin() != r_url.origin():
                            auth = None
                            headers.pop(hdrs.AUTHORIZATION, None)

                        url = r_url
                        params = None
                        resp.release()
                        continue

                    break

            # check response status
            if raise_for_status is None:
                raise_for_status = self._raise_for_status
            if raise_for_status:
                resp.raise_for_status()

            # register connection
            if handle is not None:
                if resp.connection is not None:
                    resp.connection.add_callback(handle.cancel)
                else:
                    handle.cancel()

            resp._history = tuple(history)

            for trace in traces:
                await trace.send_request_end(method, url, headers, resp)
            return resp

        except BaseException as e:
            # cleanup timer
            tm.close()
            if handle:
                handle.cancel()
                handle = None

            for trace in traces:
                await trace.send_request_exception(method, url, headers, e)
            raise
Пример #33
0
def test_join():
    base = URL("http://www.cwi.nl/%7Eguido/Python.html")
    url = URL("FAQ.html")
    url2 = base.join(url)
    assert str(url2) == "http://www.cwi.nl/~guido/FAQ.html"
Пример #34
0
    def _request(self, method, url, *,
                 params=None,
                 data=None,
                 headers=None,
                 skip_auto_headers=None,
                 auth=None,
                 allow_redirects=True,
                 max_redirects=10,
                 encoding='utf-8',
                 version=None,
                 compress=None,
                 chunked=None,
                 expect100=False,
                 read_until_eof=True,
                 proxy=None,
                 proxy_auth=None,
                 timeout=5*60):

        if version is not None:
            warnings.warn("HTTP version should be specified "
                          "by ClientSession constructor", DeprecationWarning)
        else:
            version = self._version

        if self.closed:
            raise RuntimeError('Session is closed')

        redirects = 0
        history = []

        # Merge with default headers and transform to CIMultiDict
        headers = self._prepare_headers(headers)
        if auth is None:
            auth = self._default_auth
        # It would be confusing if we support explicit Authorization header
        # with `auth` argument
        if (headers is not None and
                auth is not None and
                hdrs.AUTHORIZATION in headers):
            raise ValueError("Can't combine `Authorization` header with "
                             "`auth` argument")

        skip_headers = set(self._skip_auto_headers)
        if skip_auto_headers is not None:
            for i in skip_auto_headers:
                skip_headers.add(istr(i))

        if proxy is not None:
            proxy = URL(proxy)

        while True:
            url = URL(url).with_fragment(None)

            cookies = self._cookie_jar.filter_cookies(url)

            req = self._request_class(
                method, url, params=params, headers=headers,
                skip_auto_headers=skip_headers, data=data,
                cookies=cookies, encoding=encoding,
                auth=auth, version=version, compress=compress, chunked=chunked,
                expect100=expect100,
                loop=self._loop, response_class=self._response_class,
                proxy=proxy, proxy_auth=proxy_auth, timeout=timeout)

            with Timeout(timeout, loop=self._loop):
                conn = yield from self._connector.connect(req)
            try:
                resp = req.send(conn.writer, conn.reader)
                try:
                    yield from resp.start(conn, read_until_eof)
                except:
                    resp.close()
                    conn.close()
                    raise
            except (aiohttp.HttpProcessingError,
                    aiohttp.ServerDisconnectedError) as exc:
                raise aiohttp.ClientResponseError() from exc
            except OSError as exc:
                raise aiohttp.ClientOSError(*exc.args) from exc

            self._cookie_jar.update_cookies(resp.cookies, resp.url_obj)

            # redirects
            if resp.status in (301, 302, 303, 307) and allow_redirects:
                redirects += 1
                history.append(resp)
                if max_redirects and redirects >= max_redirects:
                    resp.close()
                    break
                else:
                    # TODO: close the connection if BODY is large enough
                    # Redirect with big BODY is forbidden by HTTP protocol
                    # but malformed server may send illegal response.
                    # Small BODIES with text like "Not Found" are still
                    # perfectly fine and should be accepted.
                    yield from resp.release()

                # For 301 and 302, mimic IE behaviour, now changed in RFC.
                # Details: https://github.com/kennethreitz/requests/pull/269
                if (resp.status == 303 and resp.method != hdrs.METH_HEAD) \
                   or (resp.status in (301, 302) and
                       resp.method == hdrs.METH_POST):
                    method = hdrs.METH_GET
                    data = None
                    if headers.get(hdrs.CONTENT_LENGTH):
                        headers.pop(hdrs.CONTENT_LENGTH)

                r_url = URL(resp.headers.get(hdrs.LOCATION) or
                            resp.headers.get(hdrs.URI))

                scheme = r_url.scheme
                if scheme not in ('http', 'https', ''):
                    resp.close()
                    raise ValueError('Can redirect only to http or https')
                elif not scheme:
                    r_url = url.join(r_url)

                url = r_url
                params = None
                yield from resp.release()
                continue

            break

        resp._history = tuple(history)
        return resp
Пример #35
0
def get_node_url(identifier: str, request: web.Request) -> str:
    base = request.url.origin()
    url = URL.join(base,
                   request.app.router['get_node'].url_for(node_id=identifier))
    return str(url)