def test_decode_brotli(self): data = brotli.compress(b"foo") fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "br"}) r.preload_content() assert r.data == b"foo"
def test_deflate_streaming_tell_intermediate_point(self): # Ensure that ``tell()`` returns the correct number of bytes when # part-way through streaming compressed content. NUMBER_OF_READS = 10 class MockCompressedDataReading(BytesIO): """ A BytesIO-like reader returning ``payload`` in ``NUMBER_OF_READS`` calls to ``read``. """ def __init__(self, payload, payload_part_size): self.payloads = [ payload[i * payload_part_size:(i + 1) * payload_part_size] for i in range(NUMBER_OF_READS + 1) ] self.consumed = 0 assert b"".join(self.payloads) == payload def read(self, _): # Amount is unused. if len(self.payloads) > 0: payload = self.payloads.pop(0) self.consumed += len(payload) return payload return b"" def __iter__(self): return self def __next__(self): if not self.payloads: raise StopIteration() return self.read(None) next = __next__ uncompressed_data = zlib.decompress(ZLIB_PAYLOAD) payload_part_size = len(ZLIB_PAYLOAD) // NUMBER_OF_READS fp = MockCompressedDataReading(ZLIB_PAYLOAD, payload_part_size) resp = HTTPResponse(fp, headers={"content-encoding": "deflate"}) parts = [] stream = resp.stream(1) for part in stream: parts.append(part) assert resp.tell() == fp.consumed end_of_stream = resp.tell() with pytest.raises(StopIteration): next(stream) # Check that the payload is equal to the uncompressed data payload = b"".join(parts) assert uncompressed_data == payload # Check that the end of the stream is in the correct place assert len(ZLIB_PAYLOAD) == end_of_stream
def test_close_midstream(self): # A mock fp object that wraps a list and allows closing. class MockFP(object): self.list = None def close(self): self.list = None def __iter__(self): return self def __next__(self): if not self.list: raise StopIteration() return self.list.pop(0) next = __next__ data = [b"fo", b"o"] fp = MockFP() fp.list = data resp = HTTPResponse(fp) stream = resp.stream() assert next(stream) == b"fo" resp.close() with pytest.raises(StopIteration): next(stream)
def test_empty_stream(self): fp = BytesIO(b"") resp = HTTPResponse(fp) stream = resp.stream(decode_content=False) with pytest.raises(StopIteration): next(stream)
def test_retries(self): fp = BytesIO(b"") resp = HTTPResponse(fp) assert resp.retries is None retry = Retry() resp = HTTPResponse(fp, retries=retry) assert resp.retries == retry
def test_io_bufferedreader(self): fp = BytesIO(b"foo") resp = HTTPResponse(fp) br = BufferedReader(resp) assert br.read() == b"foo" br.close() assert resp.closed # HTTPResponse.read() by default closes the response # https://github.com/urllib3/urllib3/issues/1305 fp = BytesIO(b"hello\nworld") resp = HTTPResponse(fp) with pytest.raises(ValueError) as ctx: list(BufferedReader(resp)) assert str(ctx.value) == "readline of closed file" b = b"!tenbytes!" fp = BytesIO(b) resp = HTTPResponse(fp) br = BufferedReader(resp, 5) # This is necessary to make sure the "no bytes left" part of `readinto` # gets tested. assert len(br.read(5)) == 5 assert len(br.read(5)) == 5 assert len(br.read(5)) == 0
def test_multi_decoding_deflate_deflate(self): data = zlib.compress(zlib.compress(b"foo")) fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "deflate, deflate"}) r.preload_content() assert r.data == b"foo"
def test_reference_read(self): fp = BytesIO(b"foo") r = HTTPResponse(fp) assert r.read(1) == b"f" assert r.read(2) == b"oo" assert r.read() == b"" assert r.read() == b""
def test_preload(self): fp = BytesIO(b"foo") r = HTTPResponse(fp) r.preload_content() assert fp.tell() == len(b"foo") assert r.data == b"foo"
def test_decode_deflate_case_insensitve(self): data = zlib.compress(b"foo") fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "DeFlAtE"}) r.preload_content() assert r.data == b"foo"
def test_streaming(self): fp = [b"fo", b"o"] resp = HTTPResponse(fp) stream = resp.stream(decode_content=False) assert next(stream) == b"fo" assert next(stream) == b"o" with pytest.raises(StopIteration): next(stream)
def test_double_streaming(self): fp = [b"fo", b"o"] resp = HTTPResponse(fp) stream = list(resp.stream(decode_content=False)) assert stream == fp stream = list(resp.stream(decode_content=False)) assert stream == []
def test_deflate_streaming(self): data = zlib.compress(b"foo") fp = BytesIO(data) resp = HTTPResponse(fp, headers={"content-encoding": "deflate"}) stream = resp.stream() assert next(stream) == b"foo" with pytest.raises(StopIteration): next(stream)
def test_multi_decoding_deflate_gzip(self): compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(zlib.compress(b"foo")) data += compress.flush() fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "deflate, gzip"}) r.preload_content() assert r.data == b"foo"
def test_decode_gzip_multi_member(self): compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() data = data * 3 fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "gzip"}) r.preload_content() assert r.data == b"foofoofoo"
def test_chunked_decoding_gzip_swallow_garbage(self): compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() data = data * 3 + b"foo" fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "gzip"}) r.preload_content() assert r.data == b"foofoofoo"
def test_chunked_decoding_gzip(self): compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "gzip"}) assert r.read(1) == b"f" assert r.read(2) == b"oo" assert r.read() == b"" assert r.read() == b""
def test_deflate2_streaming(self): compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() fp = BytesIO(data) resp = HTTPResponse(fp, headers={"content-encoding": "deflate"}) stream = resp.stream() assert next(stream) == b"foo" with pytest.raises(StopIteration): next(stream)
def test_chunked_decoding_deflate(self): data = zlib.compress(b"foo") fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "deflate"}) assert r.read(1) == b"f" # Buffer in case we need to switch to the raw stream assert r._decoder._data is None assert r.read(2) == b"oo" assert r.read() == b"" assert r.read() == b""
def test_chunked_decoding_brotli(self): data = brotli.compress(b"foobarbaz") fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "br"}) ret = b"" for _ in range(100): ret += r.read(1) if r.closed: break assert ret == b"foobarbaz"
def test_chunked_decoding_deflate2(self): compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "deflate"}) assert r.read(1) == b"f" # Once we've decoded data, we just stream to the decoder; no buffering assert r._decoder._data is None assert r.read(2) == b"oo" assert r.read() == b"" assert r.read() == b""
def test_history(self): retry = Retry(total=10, method_whitelist=frozenset(["GET", "POST"])) assert retry.history == tuple() connection_error = ConnectTimeoutError("conntimeout") retry = retry.increment("GET", "/test1", None, connection_error) history = (RequestHistory("GET", "/test1", connection_error, None, None), ) assert retry.history == history read_error = ReadTimeoutError(None, "/test2", "read timed out") retry = retry.increment("POST", "/test2", None, read_error) history = ( RequestHistory("GET", "/test1", connection_error, None, None), RequestHistory("POST", "/test2", read_error, None, None), ) assert retry.history == history response = HTTPResponse(status=500) retry = retry.increment("GET", "/test3", response, None) history = ( RequestHistory("GET", "/test1", connection_error, None, None), RequestHistory("POST", "/test2", read_error, None, None), RequestHistory("GET", "/test3", None, 500, None), ) assert retry.history == history
def test_error_message(self): retry = Retry(total=0) with pytest.raises(MaxRetryError) as e: retry = retry.increment(method="GET", error=ReadTimeoutError( None, "/", "read timed out")) assert "Caused by redirect" not in str(e.value) assert str(e.value.reason) == "None: read timed out" retry = Retry(total=1) with pytest.raises(MaxRetryError) as e: retry = retry.increment("POST", "/") retry = retry.increment("POST", "/") assert "Caused by redirect" not in str(e.value) assert isinstance(e.value.reason, ResponseError) assert str(e.value.reason) == ResponseError.GENERIC_ERROR retry = Retry(total=1) response = HTTPResponse(status=500) with pytest.raises(MaxRetryError) as e: retry = retry.increment("POST", "/", response=response) retry = retry.increment("POST", "/", response=response) assert "Caused by redirect" not in str(e.value) msg = ResponseError.SPECIFIC_ERROR.format(status_code=500) assert str(e.value.reason) == msg retry = Retry(connect=1) with pytest.raises(MaxRetryError) as e: retry = retry.increment(error=ConnectTimeoutError("conntimeout")) retry = retry.increment(error=ConnectTimeoutError("conntimeout")) assert "Caused by redirect" not in str(e.value) assert str(e.value.reason) == "conntimeout"
def test_no_preload(self): fp = BytesIO(b"foo") r = HTTPResponse(fp) assert fp.tell() == 0 assert r.data == b"foo" assert fp.tell() == len(b"foo")
def test_streaming_tell(self): fp = [b"fo", b"o"] resp = HTTPResponse(fp) stream = resp.stream(decode_content=False) position = 0 position += len(next(stream)) assert 2 == position assert 2 == resp.tell() position += len(next(stream)) assert 3 == position assert 3 == resp.tell() with pytest.raises(StopIteration): next(stream)
def test_decode_gzip_swallow_garbage(self): # When data comes from multiple calls to read(), data after # the first zlib error (here triggered by garbage) should be # ignored. compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() data = data * 3 + b"foo" fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "gzip"}) ret = b"" for _ in range(100): ret += r.read(1) if r.closed: break assert ret == b"foofoofoo"
def test_gzipped_streaming_tell(self): compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) uncompressed_data = b"foo" data = compress.compress(uncompressed_data) data += compress.flush() fp = BytesIO(data) resp = HTTPResponse(fp, headers={"content-encoding": "gzip"}) stream = resp.stream() # Read everything payload = next(stream) assert payload == uncompressed_data assert len(data) == resp.tell() with pytest.raises(StopIteration): next(stream)
def test_status_counter(self): resp = HTTPResponse(status=400) retry = Retry(status=2) retry = retry.increment(response=resp) retry = retry.increment(response=resp) with pytest.raises(MaxRetryError) as e: retry.increment(response=resp) assert str(e.value.reason) == ResponseError.SPECIFIC_ERROR.format( status_code=400)
def test_backoff_reset_after_redirect(self): retry = Retry(total=100, redirect=5, backoff_factor=0.2) retry = retry.increment(method="GET") retry = retry.increment(method="GET") assert retry.get_backoff_time() == 0.4 redirect_response = HTTPResponse(status=302, headers={"location": "test"}) retry = retry.increment(method="GET", response=redirect_response) assert retry.get_backoff_time() == 0 retry = retry.increment(method="GET") retry = retry.increment(method="GET") assert retry.get_backoff_time() == 0.4
def test_extract(self): request = urllib.request.Request("http://google.com") cookiejar = http_cookiejar.CookieJar() response = HTTPResponse() cookies = [ "sessionhash=abcabcabcabcab; path=/; HttpOnly", "lastvisit=1348253375; expires=Sat, 21-Sep-2050 18:49:35 GMT; path=/", ] for c in cookies: response.headers.add("set-cookie", c) cookiejar.extract_cookies(response, request) assert len(cookiejar) == len(cookies)