async def test_response_iter_lines(httpbin): r = await get(httpbin + f'/get', stream=True) body = [] async with finalize(r.iter_lines()) as gen: async for chunk in gen: body.append(chunk) assert r.connection.closed
async def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): """Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. .. note:: This method is not reentrant safe. """ pending = None gen = self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode) async with finalize(gen) as gen: async for chunk in gen: if pending is not None: chunk = pending + chunk if delimiter: lines = chunk.split(delimiter) else: lines = chunk.splitlines() if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: pending = lines.pop() else: pending = None for line in lines: yield line if pending is not None: yield pending
async def test_response_iter_stream(httpbin): r = await get(httpbin + f'/bytes/{80*1024}', stream=True) body = [] async with finalize(r.__aiter__()) as gen: async for chunk in gen: body.append(chunk) assert r.connection.closed assert len(b''.join(body)) == 80 * 1024
async def test_decode_unicode(httpbin): r = await get(httpbin + f'/encoding/utf8', stream=True) body = [] async with finalize(r.iter_content(decode_unicode=True)) as gen: async for chunk in gen: body.append(chunk) assert r.connection.closed body = ''.join(body).encode('utf-8') assert len(body) == int(r.headers['content-length'])
async def test_chunked(httpbin_both): r = await get(httpbin_both + '/stream/1', stream=True) assert r.status_code == 200 body = [] async with finalize(r.iter_content()) as gen: async for chunk in gen: body.append(chunk) body = b''.join(body).decode('utf-8') assert json.loads(body)
async def generate(): async with self: async with finalize(self.raw.stream(chunk_size)) as gen: try: async for trunk in gen: yield trunk except ProtocolError as e: raise ChunkedEncodingError(e) except DecodeError as e: raise ContentDecodingError(e) except ReadTimeoutError as e: raise ConnectionError(e) self._content_consumed = True
async def stream_decode_response_unicode(iterator, r): """Stream decodes a iterator.""" async with finalize(iterator) as iterator: if r.encoding is None: async for item in iterator: yield item return decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') async for chunk in iterator: rv = decoder.decode(chunk) if rv: yield rv rv = decoder.decode(b'', final=True) if rv: yield rv
def _gen(*args, **kwargs): k = get_kernel() it = asyncfunc(*args, **kwargs) f = finalize(it) sentinal = object() async def _next(): try: return await it.__anext__() except StopAsyncIteration: return sentinal k.run(f.__aenter__) try: while True: item = k.run(_next) if item is sentinal: return yield item finally: k.run(f.__aexit__, *sys.exc_info())
async def main(): async with finalize(countdown(5)) as c: nums = [n async for n in c] assert nums == [5, 4, 3, 2, 1]