Пример #1
0
 async def _write_async(self, response: httpx.Response) -> None:
     filepath = self._get_filepath(response.url.path)
     async with aiofiles.open(filepath, "wb") as f:
         async for chunk in response.aiter_bytes(
         ):  # httpx doesn't yet support chunk_size arg
             if chunk:
                 await f.write(chunk)
Пример #2
0
async def _retrieve_and_write_to_disk(*, response: httpx.Response,
                                      outfile: Path, mode: Literal['ab', 'wb'],
                                      desc: str, local_file_size: int,
                                      remote_file_size: int,
                                      remote_file_hash: Optional[str],
                                      verify_hash: bool,
                                      verify_size: bool) -> None:
    hash = hashlib.md5()

    # If we're resuming a download, ensure the already-downloaded
    # parts of the file are fed into the hash function before
    # we continue.
    if verify_hash and local_file_size > 0:
        async with aiofiles.open(outfile, 'rb') as f:
            while True:
                data = await f.read(65536)
                if not data:
                    break
                hash.update(data)

    async with aiofiles.open(outfile, mode=mode) as f:
        with tqdm(desc=desc,
                  initial=local_file_size,
                  total=remote_file_size,
                  unit='B',
                  unit_scale=True,
                  unit_divisor=1024,
                  leave=False) as progress:
            num_bytes_downloaded = response.num_bytes_downloaded

            # TODO Add timeout handling here, too.
            async for chunk in response.aiter_bytes():
                await f.write(chunk)
                progress.update(response.num_bytes_downloaded -
                                num_bytes_downloaded)
                num_bytes_downloaded = (response.num_bytes_downloaded)
                if verify_hash:
                    hash.update(chunk)

        if verify_hash and remote_file_hash is not None:
            assert hash.hexdigest() == remote_file_hash

        # Check the file was completely downloaded.
        if verify_size:
            await f.flush()
            local_file_size = outfile.stat().st_size
            if not local_file_size == remote_file_size:
                raise RuntimeError(
                    f'Server claimed file size would be {remote_file_size} '
                    f'bytes, but downloaded {local_file_size} byes.')
Пример #3
0
async def read_json_resp(resp: httpx.Response) -> Dict[str, Any]:
    data = b""
    async for chunk in resp.aiter_bytes():
        data += chunk
    return orjson.loads(data)