Esempio n. 1
0
    async def request(
        self,
        method: bytes,
        url: Tuple[bytes, bytes, Optional[int], bytes],
        headers: List[Tuple[bytes, bytes]] = None,
        stream: httpcore.AsyncByteStream = None,
        timeout: Mapping[str, Optional[float]] = None,
    ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]],
               httpcore.AsyncByteStream]:
        content = (httpcore.PlainByteStream(b"".join([
            part async for part in stream
        ])) if stream else httpcore.PlainByteStream(b""))

        (
            http_version,
            status_code,
            reason_phrase,
            headers,
            response_stream,
        ) = self.impl.request(method,
                              url,
                              headers=headers,
                              stream=content,
                              timeout=timeout)

        content = httpcore.PlainByteStream(b"".join(
            [part for part in response_stream]))

        return (
            http_version,
            status_code,
            reason_phrase,
            headers,
            content,
        )
Esempio n. 2
0
    def prepare_response(self, cached_data: dict):
        """Construct a response from cached data"""

        cached_response = cached_data["response"]

        status_code = cached_response["status_code"]
        headers = cached_response["headers"]
        stream = httpcore.PlainByteStream(cached_response["body"])
        ext = cached_response["ext"]

        response = Response.from_raw((status_code, headers, stream, ext))

        if response.headers.get("transfer-encoding", "") == "chunked":
            response.headers.pop("transfer-encoding")

        return response, cached_data["vary"]
Esempio n. 3
0
def stream(
    data, files, json: Any, boundary: bytes
) -> Union[httpcore.AsyncByteStream, httpcore.SyncByteStream]:
    if files:
        # TODO Get rid of this internal import
        # import is performed at runtime when needed to reduce impact of internal changes in httpx
        from httpx._multipart import MultipartStream

        return MultipartStream(data=data or {}, files=files, boundary=boundary)

    if json is not None:
        data = dumps(json).encode("utf-8")
    elif isinstance(data, str):
        data = data.encode("utf-8")
    elif data is None:
        data = b""

    if isinstance(data, bytes):
        return httpcore.PlainByteStream(data)

    return IteratorStream(data)
Esempio n. 4
0
    async def arequest(
        self,
        method: bytes,
        url: Tuple[bytes, bytes, Optional[int], bytes],
        headers: List[Tuple[bytes, bytes]] = None,
        stream: httpcore.AsyncByteStream = None,
        ext: dict = None,
    ) -> Tuple[int, List[Tuple[bytes, bytes]], httpcore.AsyncByteStream, dict]:
        headers = [] if headers is None else headers
        stream = httpcore.PlainByteStream(content=b"") if stream is None else stream

        # ASGI scope.
        scheme, host, port, full_path = url
        path, _, query = full_path.partition(b"?")
        scope = {
            "type": "http",
            "asgi": {"version": "3.0"},
            "http_version": "1.1",
            "method": method.decode(),
            "headers": [(k.lower(), v) for (k, v) in headers],
            "scheme": scheme.decode("ascii"),
            "path": unquote(path.decode("ascii")),
            "raw_path": path,
            "query_string": query,
            "server": (host.decode("ascii"), port),
            "client": self.client,
            "root_path": self.root_path,
        }

        # Request.
        request_body_chunks = stream.__aiter__()
        request_complete = False

        # Response.
        status_code = None
        response_headers = None
        body_parts = []
        response_started = False
        response_complete = create_event()

        # ASGI callables.

        async def receive() -> dict:
            nonlocal request_complete

            if request_complete:
                await response_complete.wait()
                return {"type": "http.disconnect"}

            try:
                body = await request_body_chunks.__anext__()
            except StopAsyncIteration:
                request_complete = True
                return {"type": "http.request", "body": b"", "more_body": False}
            return {"type": "http.request", "body": body, "more_body": True}

        async def send(message: dict) -> None:
            nonlocal status_code, response_headers, response_started

            if message["type"] == "http.response.start":
                assert not response_started

                status_code = message["status"]
                response_headers = message.get("headers", [])
                response_started = True

            elif message["type"] == "http.response.body":
                assert not response_complete.is_set()
                body = message.get("body", b"")
                more_body = message.get("more_body", False)

                if body and method != b"HEAD":
                    body_parts.append(body)

                if not more_body:
                    response_complete.set()

        try:
            await self.app(scope, receive, send)
        except Exception:
            if self.raise_app_exceptions or not response_complete.is_set():
                raise

        assert response_complete.is_set()
        assert status_code is not None
        assert response_headers is not None

        stream = httpcore.PlainByteStream(content=b"".join(body_parts))
        ext = {}

        return (status_code, response_headers, stream, ext)
Esempio n. 5
0
    def request(
        self,
        method: bytes,
        url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
        headers: typing.List[typing.Tuple[bytes, bytes]] = None,
        stream: httpcore.SyncByteStream = None,
        ext: dict = None,
    ) -> typing.Tuple[
        int, typing.List[typing.Tuple[bytes, bytes]], httpcore.SyncByteStream, dict
    ]:
        headers = [] if headers is None else headers
        stream = httpcore.PlainByteStream(content=b"") if stream is None else stream

        scheme, host, port, full_path = url
        path, _, query = full_path.partition(b"?")
        environ = {
            "wsgi.version": (1, 0),
            "wsgi.url_scheme": scheme.decode("ascii"),
            "wsgi.input": io.BytesIO(b"".join(stream)),
            "wsgi.errors": io.BytesIO(),
            "wsgi.multithread": True,
            "wsgi.multiprocess": False,
            "wsgi.run_once": False,
            "REQUEST_METHOD": method.decode(),
            "SCRIPT_NAME": self.script_name,
            "PATH_INFO": unquote(path.decode("ascii")),
            "QUERY_STRING": query.decode("ascii"),
            "SERVER_NAME": host.decode("ascii"),
            "SERVER_PORT": str(port),
            "REMOTE_ADDR": self.remote_addr,
        }
        for header_key, header_value in headers:
            key = header_key.decode("ascii").upper().replace("-", "_")
            if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"):
                key = "HTTP_" + key
            environ[key] = header_value.decode("ascii")

        seen_status = None
        seen_response_headers = None
        seen_exc_info = None

        def start_response(
            status: str, response_headers: list, exc_info: typing.Any = None
        ) -> None:
            nonlocal seen_status, seen_response_headers, seen_exc_info
            seen_status = status
            seen_response_headers = response_headers
            seen_exc_info = exc_info

        result = self.app(environ, start_response)
        # This is needed because the status returned by start_response
        # shouldn't be used until the first non-empty chunk has been served.
        result = _skip_leading_empty_chunks(result)

        assert seen_status is not None
        assert seen_response_headers is not None
        if seen_exc_info and self.raise_app_exceptions:
            raise seen_exc_info[1]

        status_code = int(seen_status.split()[0])
        headers = [
            (key.encode("ascii"), value.encode("ascii"))
            for key, value in seen_response_headers
        ]
        stream = httpcore.IteratorByteStream(iterator=result)
        ext = {}

        return (status_code, headers, stream, ext)
Esempio n. 6
0
def cache_response_action(
    request: Request,
    server_response: Response,
    cache_exists: bool,
    cache_etags: bool,
    cacheable_status_codes: Iterable[int],
) -> Optional[Union[CacheSet, CacheDelete]]:
    """
    Algorithm for caching responses.

    Does some checks on request and response and deletes cache if appropriate

    Then either:
    No cache
    Cache immediately with no body for redirects
    Cache with body, this must be deferred.

    Returns:
    May return a request that has had its stream wrapped to trigger caching once read.
    """
    cache_key = get_cache_key(request)

    # From httplib2: Don't cache 206's since we aren't going to
    #                handle byte range requests
    if server_response.status_code not in cacheable_status_codes:
        logger.debug(
            "Status code %s not in %s",
            server_response.status_code,
            cacheable_status_codes,
        )
        return None

    logger.debug('Updating cache with response from "%s"', cache_key)

    # TODO: Do this once on the request/response?
    cc_req = parse_cache_control_directives(request.headers)
    cc = parse_cache_control_directives(server_response.headers)

    # Delete it from the cache if we happen to have it stored there
    no_store = False
    if "no-store" in cc:
        no_store = True
        logger.debug('Response header has "no-store"')
    if "no-store" in cc_req:
        no_store = True
        logger.debug('Request header has "no-store"')
    if no_store and cache_exists:
        logger.debug('Purging existing cache entry to honor "no-store"')
        return CacheDelete(cache_key)
    if no_store:
        return None

    # https://tools.ietf.org/html/rfc7234#section-4.1:
    # A Vary header field-value of "*" always fails to match.
    # Storing such a response leads to a deserialization warning
    # during cache lookup and is not allowed to ever be served,
    # so storing it can be avoided.
    if "*" in server_response.headers.get("vary", ""):
        logger.debug('Response header has "Vary: *"')
        return None

    # If we've been given an etag, then keep the response
    if cache_etags and "etag" in server_response.headers:
        logger.debug("Caching due to etag")

    # Add to the cache any permanent redirects. We do this before looking
    # that the Date headers.
    elif int(server_response.status_code) in PERMANENT_REDIRECT_STATUSES:
        logger.debug("Caching permanent redirect")
        response_body = b""
        response = Response(
            server_response.status_code,
            server_response.headers,
            # TODO: This is naff, maybe we just use httpx.Response
            httpcore.PlainByteStream(response_body),
        )
        vary_header_values = get_vary_headers(request.headers, response)
        return CacheSet(cache_key, response, vary_header_values)

    # Add to the cache if the response headers demand it. If there
    # is no date header then we can't do anything about expiring
    # the cache.
    elif "date" in server_response.headers:
        # cache when there is a max-age > 0
        if "max-age" in cc and cc["max-age"] > 0:
            logger.debug("Caching b/c date exists and max-age > 0")

        # If the request can expire, it means we should cache it
        # in the meantime.
        elif "expires" in server_response.headers:
            if server_response.headers["expires"]:
                logger.debug("Caching b/c of expires header")
        else:
            return None
    else:
        return None

    vary_header_values = get_vary_headers(request.headers, server_response)
    return CacheSet(cache_key,
                    server_response,
                    vary_header_values,
                    deferred=True)