def __init__(self, immediate_ms = 5000): parse_args() self.dn = None # check if security is enabled, if so read certificate files self.security = not args.DISABLE_SSL if self.security: mplane.utils.check_file(args.CERTFILE) self.cert = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "cert")) self.key = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "key")) self.ca = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "ca-chain")) mplane.utils.check_file(self.cert) mplane.utils.check_file(self.key) mplane.utils.check_file(self.ca) self.pool = HTTPSConnectionPool(args.SUPERVISOR_IP4, args.SUPERVISOR_PORT, key_file=self.key, cert_file=self.cert, ca_certs=self.ca) else: self.pool = HTTPConnectionPool(args.SUPERVISOR_IP4, args.SUPERVISOR_PORT) self.cert = None # get server DN, for Access Control purposes self.dn = self.get_dn() # generate a Service for each capability self.immediate_ms = immediate_ms self.scheduler = mplane.scheduler.Scheduler(self.security, self.cert) self.scheduler.add_service(FirelogService(firelog_capability(args.URL)))
def test_response_headers_are_returned_in_the_original_order(self): # NOTE: Probability this test gives a false negative is 1/(K!) K = 16 # NOTE: Provide headers in non-sorted order (i.e. reversed) # so that if the internal implementation tries to sort them, # a change will be detected. expected_response_headers = [('X-Header-%d' % i, str(i)) for i in reversed(range(K))] def socket_handler(listener): sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) sock.send(b'HTTP/1.1 200 OK\r\n' + b'\r\n'.join([ (k.encode('utf8') + b': ' + v.encode('utf8')) for (k, v) in expected_response_headers ]) + b'\r\n') sock.close() self._start_server(socket_handler) pool = HTTPConnectionPool(self.host, self.port) r = pool.request('GET', '/', retries=0) actual_response_headers = [ (k, v) for (k, v) in r.headers.items() if k.startswith('X-Header-') ] self.assertEqual(expected_response_headers, actual_response_headers)
def test_connection_refused(self): # Does the pool retry if there is no listener on the port? # Note: Socket server is not started until after the test. pool = HTTPConnectionPool(self.host, self.port) with self.assertRaises(MaxRetryError): pool.request('GET', '/') self._start_server(lambda x: None)
def test_source_address_ignored(self): # source_address is ignored in Python 2.6 and older. for addr in INVALID_SOURCE_ADDRESSES: pool = HTTPConnectionPool( self.host, self.port, source_address=addr) r = pool.request('GET', '/source_address') assert r.status == 200
def test_bad_connect(self): pool = HTTPConnectionPool('badhost.invalid', self.port) try: pool.request('GET', '/', retries=5) self.fail("should raise timeout exception here") except MaxRetryError as e: self.assertTrue(isinstance(e.reason, ProtocolError), e.reason)
def __init__(self, immediate_ms = 5000): parse_args() self.dn = None # check if security is enabled, if so read certificate files self.security = not args.DISABLE_SSL if self.security: mplane.utils.check_file(args.CERTFILE) self.cert = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "cert")) self.key = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "key")) self.ca = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "ca-chain")) mplane.utils.check_file(self.cert) mplane.utils.check_file(self.key) mplane.utils.check_file(self.ca) self.pool = HTTPSConnectionPool(args.SUPERVISOR_IP4, args.SUPERVISOR_PORT, key_file=self.key, cert_file=self.cert, ca_certs=self.ca) else: self.pool = HTTPConnectionPool(args.SUPERVISOR_IP4, args.SUPERVISOR_PORT) # get server DN, for Access Control purposes self.dn = self.get_dn() # generate a Service for each capability self.immediate_ms = immediate_ms self.scheduler = mplane.scheduler.Scheduler(self.security, self.cert) self.scheduler.add_service(tStatService(mplane.tstat_caps.tcp_flows_capability(args.IP4_NET), args.TSTAT_RUNTIMECONF)) self.scheduler.add_service(tStatService(mplane.tstat_caps.e2e_tcp_flows_capability(args.IP4_NET), args.TSTAT_RUNTIMECONF)) self.scheduler.add_service(tStatService(mplane.tstat_caps.tcp_options_capability(args.IP4_NET), args.TSTAT_RUNTIMECONF)) self.scheduler.add_service(tStatService(mplane.tstat_caps.tcp_p2p_stats_capability(args.IP4_NET), args.TSTAT_RUNTIMECONF)) self.scheduler.add_service(tStatService(mplane.tstat_caps.tcp_layer7_capability(args.IP4_NET), args.TSTAT_RUNTIMECONF))
def test_bad_connect(self): pool = HTTPConnectionPool('badhost.invalid', self.port) try: pool.request('GET', '/', retries=5) self.fail("should raise timeout exception here") except MaxRetryError as e: self.assertEqual(type(e.reason), NewConnectionError)
def test_timeout(self): pool = HTTPConnectionPool(HOST, PORT, timeout=0.1) try: r = pool.get_url('/sleep', fields={'seconds': 0.2}) self.fail("Failed to raise TimeoutError exception") except TimeoutError, e: pass
def test_incomplete_response(self): body = 'Response' partial_body = body[:2] def socket_handler(listener): sock = listener.accept()[0] # Consume request buf = b'' while not buf.endswith(b'\r\n\r\n'): buf = sock.recv(65536) # Send partial response and close socket. sock.send(( 'HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: %d\r\n' '\r\n' '%s' % (len(body), partial_body)).encode('utf-8') ) sock.close() self._start_server(socket_handler) pool = HTTPConnectionPool(self.host, self.port) response = pool.request('GET', '/', retries=0, preload_content=False) self.assertRaises(ProtocolError, response.read)
def test_delayed_body_read_timeout(self): timed_out = Event() def socket_handler(listener): sock = listener.accept()[0] buf = b'' body = 'Hi' while not buf.endswith(b'\r\n\r\n'): buf = sock.recv(65536) sock.send(('HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: %d\r\n' '\r\n' % len(body)).encode('utf-8')) timed_out.wait() sock.send(body.encode('utf-8')) sock.close() self._start_server(socket_handler) pool = HTTPConnectionPool(self.host, self.port) response = pool.urlopen('GET', '/', retries=0, preload_content=False, timeout=Timeout(connect=1, read=0.001)) try: self.assertRaises(ReadTimeoutError, response.read) finally: timed_out.set()
def test_headers_are_sent_with_the_original_case(self): headers = {"foo": "bar", "bAz": "quux"} parsed_headers = {} def socket_handler(listener): sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) headers_list = [header for header in buf.split(b"\r\n")[1:] if header] for header in headers_list: (key, value) = header.split(b": ") parsed_headers[key.decode()] = value.decode() # Send incomplete message (note Content-Length) sock.send(("HTTP/1.1 204 No Content\r\n" "Content-Length: 0\r\n" "\r\n").encode("utf-8")) sock.close() self._start_server(socket_handler) expected_headers = {"Accept-Encoding": "identity", "Host": "{0}:{1}".format(self.host, self.port)} expected_headers.update(headers) pool = HTTPConnectionPool(self.host, self.port, retries=False) pool.request("GET", "/", headers=HTTPHeaderDict(headers)) self.assertEqual(expected_headers, parsed_headers)
def test_headers_are_sent_with_the_original_case(self): headers = {'foo': 'bar', 'bAz': 'quux'} parsed_headers = {} def socket_handler(listener): sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) headers_list = [header for header in buf.split(b'\r\n')[1:] if header] for header in headers_list: (key, value) = header.split(b': ') parsed_headers[key.decode('ascii')] = value.decode('ascii') sock.send(( 'HTTP/1.1 204 No Content\r\n' 'Content-Length: 0\r\n' '\r\n').encode('utf-8')) sock.close() self._start_server(socket_handler) expected_headers = {'Accept-Encoding': 'identity', 'Host': '{0}:{1}'.format(self.host, self.port)} expected_headers.update(headers) pool = HTTPConnectionPool(self.host, self.port, retries=False) pool.request('GET', '/', headers=HTTPHeaderDict(headers)) self.assertEqual(expected_headers, parsed_headers)
def test_none_total_applies_connect(self): url = '/sleep?seconds=0.005' timeout = Timeout(total=None, connect=0.001) pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout) conn = pool._get_conn() self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET', url)
def test_enforce_content_length_no_body(self): done_event = Event() def socket_handler(listener): sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) sock.send( b'HTTP/1.1 200 OK\r\n' b'Content-Length: 22\r\n' b'Content-type: text/plain\r\n' b'\r\n' ) done_event.wait(1) sock.close() self._start_server(socket_handler) conn = HTTPConnectionPool(self.host, self.port, maxsize=1) self.addCleanup(conn.close) # Test stream on 0 length body head_response = conn.request('HEAD', url='/', preload_content=False, enforce_content_length=True) data = [chunk for chunk in head_response.stream(1)] self.assertEqual(len(data), 0) done_event.set()
def test_stream_none_unchunked_response_does_not_hang(self): done_event = Event() def socket_handler(listener): sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) sock.send( b'HTTP/1.1 200 OK\r\n' b'Content-Length: 12\r\n' b'Content-type: text/plain\r\n' b'\r\n' b'hello, world' ) done_event.wait(5) sock.close() self._start_server(socket_handler) pool = HTTPConnectionPool(self.host, self.port, retries=False) self.addCleanup(pool.close) r = pool.request('GET', '/', timeout=1, preload_content=False) # Stream should read to the end. self.assertEqual([b'hello, world'], list(r.stream(None))) done_event.set()
def test_release_conn_parameter(self): MAXSIZE=5 pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE) self.assertEqual(pool.pool.qsize(), MAXSIZE) # Make request without releasing connection pool.request('GET', '/', release_conn=False, preload_content=False) self.assertEqual(pool.pool.qsize(), MAXSIZE-1)
def test_source_address(self): for addr in VALID_SOURCE_ADDRESSES: pool = HTTPConnectionPool(self.host, self.port, source_address=addr, retries=False) r = pool.request('GET', '/source_address') assert r.data == b(addr[0]), ( "expected the response to contain the source address {addr}, " "but was {data}".format(data=r.data, addr=b(addr[0])))
def test_timeout(self): pool = HTTPConnectionPool(self.host, self.port, timeout=0.01) try: pool.request('GET', '/sleep', fields={'seconds': '0.02'}) self.fail("Failed to raise TimeoutError exception") except TimeoutError: pass
def test_httplib_headers_case_insensitive(self): self.start_response_handler( b"HTTP/1.1 200 OK\r\n" b"Content-Length: 0\r\n" b"Content-type: text/plain\r\n" b"\r\n" ) pool = HTTPConnectionPool(self.host, self.port, retries=False) HEADERS = {"Content-Length": "0", "Content-type": "text/plain"} r = pool.request("GET", "/") self.assertEqual(HEADERS, dict(r.headers.items())) # to preserve case sensitivity
def test_connection_error_retries(self): """ ECONNREFUSED error should raise a connection error, with retries """ port = find_unused_port() pool = HTTPConnectionPool(self.host, port) try: pool.request('GET', '/', retries=Retry(connect=3)) self.fail("Should have failed with a connection error.") except MaxRetryError as e: self.assertEqual(type(e.reason), NewConnectionError)
def test_connection_count_bigpool(self): http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16) http_pool.request('GET', '/') http_pool.request('GET', '/') http_pool.request('GET', '/') self.assertEqual(http_pool.num_connections, 1) self.assertEqual(http_pool.num_requests, 3)
def test_connection_count(self): pool = HTTPConnectionPool(self.host, self.port, maxsize=1) pool.request('GET', '/') pool.request('GET', '/') pool.request('GET', '/') self.assertEqual(pool.num_connections, 1) self.assertEqual(pool.num_requests, 3)
def test_socket_options(self): """Test that connections accept socket options.""" # This test needs to be here in order to be run. socket.create_connection actually tries to # connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool(self.host, self.port, socket_options=[(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)]) s = pool._new_conn()._new_conn() # Get the socket using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0 self.assertTrue(using_keepalive) s.close()
def test_keepalive(self): pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1) r = pool.request('GET', '/keepalive?close=0') r = pool.request('GET', '/keepalive?close=0') self.assertEqual(r.status, 200) self.assertEqual(pool.num_connections, 1) self.assertEqual(pool.num_requests, 2)
def test_disable_default_socket_options(self): """Test that passing None disables all socket options.""" # This test needs to be here in order to be run. socket.create_connection actually tries to # connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool(self.host, self.port, socket_options=None) s = pool._new_conn()._new_conn() using_nagle = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) == 0 self.assertTrue(using_nagle) s.close()
def test_nagle(self): """ Test that connections have TCP_NODELAY turned on """ # This test needs to be here in order to be run. socket.create_connection actually tries to # connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool(self.host, self.port) conn = pool._get_conn() pool._make_request(conn, 'GET', '/') tcp_nodelay_setting = conn.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) self.assertTrue(tcp_nodelay_setting)
def test_empty_head_response_does_not_hang(self): self.start_response_handler( b"HTTP/1.1 200 OK\r\n" b"Content-Length: 256\r\n" b"Content-type: text/plain\r\n" b"\r\n" ) pool = HTTPConnectionPool(self.host, self.port, retries=False) r = pool.request("HEAD", "/", timeout=1, preload_content=False) # stream will use the read method here. self.assertEqual([], list(r.stream()))
class TestFileBodiesOnRetryOrRedirect(HTTPDummyServerTestCase): def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port, timeout=0.1) self.addCleanup(self.pool.close) def test_retries_put_filehandle(self): """HTTP PUT retry with a file-like object should not timeout""" retry = Retry(total=3, status_forcelist=[418]) # httplib reads in 8k chunks; use a larger content length content_length = 65535 data = b'A' * content_length uploaded_file = io.BytesIO(data) headers = {'test-name': 'test_retries_put_filehandle', 'Content-Length': str(content_length)} resp = self.pool.urlopen('PUT', '/successful_retry', headers=headers, retries=retry, body=uploaded_file, assert_same_host=False, redirect=False) self.assertEqual(resp.status, 200) def test_redirect_put_file(self): """PUT with file object should work with a redirection response""" retry = Retry(total=3, status_forcelist=[418]) # httplib reads in 8k chunks; use a larger content length content_length = 65535 data = b'A' * content_length uploaded_file = io.BytesIO(data) headers = {'test-name': 'test_redirect_put_file', 'Content-Length': str(content_length)} url = '/redirect?target=/echo&status=307' resp = self.pool.urlopen('PUT', url, headers=headers, retries=retry, body=uploaded_file, assert_same_host=False, redirect=True) self.assertEqual(resp.status, 200) self.assertEqual(resp.data, data) def test_redirect_with_failed_tell(self): """Abort request if failed to get a position from tell()""" class BadTellObject(io.BytesIO): def tell(self): raise IOError body = BadTellObject(b'the data') url = '/redirect?target=/successful_retry' # httplib uses fileno if Content-Length isn't supplied, # which is unsupported by BytesIO. headers = {'Content-Length': '8'} try: self.pool.urlopen('PUT', url, headers=headers, body=body) self.fail('PUT successful despite failed rewind.') except UnrewindableBodyError as e: self.assertIn('Unable to record file position for', str(e))
def test_timeout_reset(self): """ If the read timeout isn't set, socket timeout should reset """ url = '/sleep?seconds=0.005' timeout = util.Timeout(connect=0.001) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) conn = pool._get_conn() try: pool._make_request(conn, 'GET', url) except ReadTimeoutError: self.fail("This request shouldn't trigger a read timeout.")
def test_partial_response(self): pool = HTTPConnectionPool(self.host, self.port, maxsize=1) req_data = {'lol': 'cat'} resp_data = urlencode(req_data).encode('utf-8') r = pool.request('GET', '/echo', fields=req_data, preload_content=False) self.assertEqual(r.read(5), resp_data[:5]) self.assertEqual(r.read(), resp_data[5:])
def test_max_retry(self): with HTTPConnectionPool(self.host, self.port) as pool: with pytest.raises(MaxRetryError): pool.request("GET", "/redirect", fields={"target": "/"}, retries=0)
import json import case_parsers from urllib3 import HTTPConnectionPool from typing import List import numpy as np failed_times = 0 try: pool = HTTPConnectionPool('127.0.0.1', port=8502, maxsize=32) except Exception as e: print('Can not found controversy marking model!', e) case_parsers.CON_MARK_MODEL_AVALIABLE = False def check_con_mark_model() -> bool: print('Checking controversy marking model...') try: res = pool.request("GET", '/v1/models/con_mark/metadata') if res.status == 200: print('ok!') return True else: print('HTTP status code not 200 is', res.status) return False except Exception as e: print('Can not found controversy marking model!', e) case_parsers.CON_MARK_MODEL_AVALIABLE = False return False def con_mark(text: str) -> List[str]:
def test_for_double_release(self): MAXSIZE = 5 # Check default state pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE) self.assertEqual(pool.num_connections, 0) self.assertEqual(pool.pool.qsize(), MAXSIZE) # Make an empty slot for testing pool.pool.get() self.assertEqual(pool.pool.qsize(), MAXSIZE - 1) # Check state after simple request pool.urlopen('GET', '/') self.assertEqual(pool.pool.qsize(), MAXSIZE - 1) # Check state without release pool.urlopen('GET', '/', preload_content=False) self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) pool.urlopen('GET', '/') self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) # Check state after read pool.urlopen('GET', '/').data self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) pool.urlopen('GET', '/') self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)
def test_post_with_urlencode(self): with HTTPConnectionPool(self.host, self.port) as pool: data = {"banana": "hammock", "lol": "cat"} r = pool.request("POST", "/echo", fields=data, encode_multipart=False) assert r.data.decode("utf-8") == urlencode(data)
class TestRetry(HTTPDummyServerTestCase): def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port) def test_max_retry(self): try: r = self.pool.request('GET', '/redirect', fields={'target': '/'}, retries=0) self.fail("Failed to raise MaxRetryError exception, returned %r" % r.status) except MaxRetryError: pass def test_disabled_retry(self): """ Disabled retries should disable redirect handling. """ r = self.pool.request('GET', '/redirect', fields={'target': '/'}, retries=False) self.assertEqual(r.status, 303) r = self.pool.request('GET', '/redirect', fields={'target': '/'}, retries=Retry(redirect=False)) self.assertEqual(r.status, 303) pool = HTTPConnectionPool('thishostdoesnotexist.invalid', self.port, timeout=0.001) self.assertRaises(NewConnectionError, pool.request, 'GET', '/test', retries=False) def test_read_retries(self): """ Should retry for status codes in the whitelist """ retry = Retry(read=1, status_forcelist=[418]) resp = self.pool.request('GET', '/successful_retry', headers={'test-name': 'test_read_retries'}, retries=retry) self.assertEqual(resp.status, 200) def test_read_total_retries(self): """ HTTP response w/ status code in the whitelist should be retried """ headers = {'test-name': 'test_read_total_retries'} retry = Retry(total=1, status_forcelist=[418]) resp = self.pool.request('GET', '/successful_retry', headers=headers, retries=retry) self.assertEqual(resp.status, 200) def test_retries_wrong_whitelist(self): """HTTP response w/ status code not in whitelist shouldn't be retried""" retry = Retry(total=1, status_forcelist=[202]) resp = self.pool.request('GET', '/successful_retry', headers={'test-name': 'test_wrong_whitelist'}, retries=retry) self.assertEqual(resp.status, 418) def test_default_method_whitelist_retried(self): """ urllib3 should retry methods in the default method whitelist """ retry = Retry(total=1, status_forcelist=[418]) resp = self.pool.request( 'OPTIONS', '/successful_retry', headers={'test-name': 'test_default_whitelist'}, retries=retry) self.assertEqual(resp.status, 200) def test_retries_wrong_method_list(self): """Method not in our whitelist should not be retried, even if code matches""" headers = {'test-name': 'test_wrong_method_whitelist'} retry = Retry(total=1, status_forcelist=[418], method_whitelist=['POST']) resp = self.pool.request('GET', '/successful_retry', headers=headers, retries=retry) self.assertEqual(resp.status, 418) def test_read_retries_unsuccessful(self): headers = {'test-name': 'test_read_retries_unsuccessful'} resp = self.pool.request('GET', '/successful_retry', headers=headers, retries=1) self.assertEqual(resp.status, 418) def test_retry_reuse_safe(self): """ It should be possible to reuse a Retry object across requests """ headers = {'test-name': 'test_retry_safe'} retry = Retry(total=1, status_forcelist=[418]) resp = self.pool.request('GET', '/successful_retry', headers=headers, retries=retry) self.assertEqual(resp.status, 200) resp = self.pool.request('GET', '/successful_retry', headers=headers, retries=retry) self.assertEqual(resp.status, 200) def test_retry_return_in_response(self): headers = {'test-name': 'test_retry_return_in_response'} retry = Retry(total=2, status_forcelist=[418]) resp = self.pool.request('GET', '/successful_retry', headers=headers, retries=retry) self.assertEqual(resp.status, 200) self.assertEqual(resp.retries.total, 1) self.assertEqual( resp.retries.history, (RequestHistory('GET', '/successful_retry', None, 418, None), )) def test_retry_redirect_history(self): resp = self.pool.request('GET', '/redirect', fields={'target': '/'}) self.assertEqual(resp.status, 200) self.assertEqual( resp.retries.history, (RequestHistory('GET', '/redirect?target=%2F', None, 303, '/'), )) def test_multi_redirect_history(self): r = self.pool.request('GET', '/multi_redirect', fields={'redirect_codes': '303,302,200'}, redirect=False) self.assertEqual(r.status, 303) self.assertEqual(r.retries.history, tuple()) r = self.pool.request( 'GET', '/multi_redirect', retries=10, fields={'redirect_codes': '303,302,301,307,302,200'}) self.assertEqual(r.status, 200) self.assertEqual(r.data, b'Done redirecting') self.assertEqual( [(request_history.status, request_history.redirect_location) for request_history in r.retries.history], [(303, '/multi_redirect?redirect_codes=302,301,307,302,200'), (302, '/multi_redirect?redirect_codes=301,307,302,200'), (301, '/multi_redirect?redirect_codes=307,302,200'), (307, '/multi_redirect?redirect_codes=302,200'), (302, '/multi_redirect?redirect_codes=200')])
def test_read_chunked_on_closed_response(self): with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request("GET", "/chunked", preload_content=False) response.close() with pytest.raises(StopIteration): next(response.read_chunked())
def test_invalid_method_not_allowed(self, char): with pytest.raises(ValueError): with HTTPConnectionPool(self.host, self.port) as pool: pool.request("GET" + char, "/")
def test_timeout(self): # Requests should time out when expected block_event = Event() ready_event = self.start_basic_handler(block_send=block_event, num=6) # Pool-global timeout timeout = Timeout(read=SHORT_TIMEOUT) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout, retries=False) wait_for_socket(ready_event) conn = pool._get_conn() self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET', '/') pool._put_conn(conn) block_event.set() # Release request wait_for_socket(ready_event) block_event.clear() self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/') block_event.set() # Release request # Request-specific timeouts should raise errors pool = HTTPConnectionPool(self.host, self.port, timeout=LONG_TIMEOUT, retries=False) conn = pool._get_conn() wait_for_socket(ready_event) now = time.time() self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET', '/', timeout=timeout) delta = time.time() - now block_event.set() # Release request self.assertTrue( delta < LONG_TIMEOUT, "timeout was pool-level LONG_TIMEOUT rather than request-level SHORT_TIMEOUT" ) pool._put_conn(conn) wait_for_socket(ready_event) now = time.time() self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/', timeout=timeout) delta = time.time() - now self.assertTrue( delta < LONG_TIMEOUT, "timeout was pool-level LONG_TIMEOUT rather than request-level SHORT_TIMEOUT" ) block_event.set() # Release request # Timeout int/float passed directly to request and _make_request should # raise a request timeout wait_for_socket(ready_event) self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/', timeout=SHORT_TIMEOUT) block_event.set() # Release request wait_for_socket(ready_event) conn = pool._new_conn() # FIXME: This assert flakes sometimes. Not sure why. self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET', '/', timeout=SHORT_TIMEOUT) block_event.set() # Release request
def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port) self.addCleanup(self.pool.close)
def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port, timeout=0.1)
class TestFileBodiesOnRetryOrRedirect(HTTPDummyServerTestCase): def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port, timeout=0.1) def test_retries_put_filehandle(self): """HTTP PUT retry with a file-like object should not timeout""" retry = Retry(total=3, status_forcelist=[418]) # httplib reads in 8k chunks; use a larger content length content_length = 65535 data = b'A' * content_length uploaded_file = io.BytesIO(data) headers = { 'test-name': 'test_retries_put_filehandle', 'Content-Length': str(content_length) } resp = self.pool.urlopen('PUT', '/successful_retry', headers=headers, retries=retry, body=uploaded_file, assert_same_host=False, redirect=False) self.assertEqual(resp.status, 200) def test_redirect_put_file(self): """PUT with file object should work with a redirection response""" retry = Retry(total=3, status_forcelist=[418]) # httplib reads in 8k chunks; use a larger content length content_length = 65535 data = b'A' * content_length uploaded_file = io.BytesIO(data) headers = { 'test-name': 'test_redirect_put_file', 'Content-Length': str(content_length) } url = '/redirect?target=/echo&status=307' resp = self.pool.urlopen('PUT', url, headers=headers, retries=retry, body=uploaded_file, assert_same_host=False, redirect=True) self.assertEqual(resp.status, 200) self.assertEqual(resp.data, data) def test_redirect_with_failed_tell(self): """Abort request if failed to get a position from tell()""" class BadTellObject(io.BytesIO): def tell(self): raise IOError body = BadTellObject(b'the data') url = '/redirect?target=/successful_retry' # httplib uses fileno if Content-Length isn't supplied, # which is unsupported by BytesIO. headers = {'Content-Length': '8'} try: resp = self.pool.urlopen('PUT', url, headers=headers, body=body) self.fail('PUT successful despite failed rewind.') except UnrewindableBodyError as e: self.assertTrue('Unable to record file position for' in str(e))
class TestConnectionPool(HTTPDummyServerTestCase): def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port) def test_get(self): r = self.pool.request('GET', '/specific_method', fields={'method': 'GET'}) self.assertEqual(r.status, 200, r.data) def test_post_url(self): r = self.pool.request('POST', '/specific_method', fields={'method': 'POST'}) self.assertEqual(r.status, 200, r.data) def test_urlopen_put(self): r = self.pool.urlopen('PUT', '/specific_method?method=PUT') self.assertEqual(r.status, 200, r.data) def test_wrong_specific_method(self): # To make sure the dummy server is actually returning failed responses r = self.pool.request('GET', '/specific_method', fields={'method': 'POST'}) self.assertEqual(r.status, 400, r.data) r = self.pool.request('POST', '/specific_method', fields={'method': 'GET'}) self.assertEqual(r.status, 400, r.data) def test_upload(self): data = "I'm in ur multipart form-data, hazing a cheezburgr" fields = { 'upload_param': 'filefield', 'upload_filename': 'lolcat.txt', 'upload_size': len(data), 'filefield': ('lolcat.txt', data), } r = self.pool.request('POST', '/upload', fields=fields) self.assertEqual(r.status, 200, r.data) def test_one_name_multiple_values(self): fields = [ ('foo', 'a'), ('foo', 'b'), ] # urlencode r = self.pool.request('GET', '/echo', fields=fields) self.assertEqual(r.data, b'foo=a&foo=b') # multipart r = self.pool.request('POST', '/echo', fields=fields) self.assertEqual(r.data.count(b'name="foo"'), 2) def test_request_method_body(self): body = b'hi' r = self.pool.request('POST', '/echo', body=body) self.assertEqual(r.data, body) fields = [('hi', 'hello')] self.assertRaises(TypeError, self.pool.request, 'POST', '/echo', body=body, fields=fields) def test_unicode_upload(self): fieldname = u('myfile') filename = u('\xe2\x99\xa5.txt') data = u('\xe2\x99\xa5').encode('utf8') size = len(data) fields = { u('upload_param'): fieldname, u('upload_filename'): filename, u('upload_size'): size, fieldname: (filename, data), } r = self.pool.request('POST', '/upload', fields=fields) self.assertEqual(r.status, 200, r.data) def test_nagle(self): """ Test that connections have TCP_NODELAY turned on """ # This test needs to be here in order to be run. socket.create_connection actually tries to # connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool(self.host, self.port) conn = pool._get_conn() pool._make_request(conn, 'GET', '/') tcp_nodelay_setting = conn.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) self.assertTrue(tcp_nodelay_setting) def test_socket_options(self): """Test that connections accept socket options.""" # This test needs to be here in order to be run. socket.create_connection actually tries to # connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool(self.host, self.port, socket_options=[(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)]) s = pool._new_conn()._new_conn() # Get the socket using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0 self.assertTrue(using_keepalive) s.close() def test_disable_default_socket_options(self): """Test that passing None disables all socket options.""" # This test needs to be here in order to be run. socket.create_connection actually tries to # connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool(self.host, self.port, socket_options=None) s = pool._new_conn()._new_conn() using_nagle = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) == 0 self.assertTrue(using_nagle) s.close() def test_defaults_are_applied(self): """Test that modifying the default socket options works.""" # This test needs to be here in order to be run. socket.create_connection actually tries to # connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool(self.host, self.port) # Get the HTTPConnection instance conn = pool._new_conn() # Update the default socket options conn.default_socket_options += [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)] s = conn._new_conn() nagle_disabled = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) > 0 using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0 self.assertTrue(nagle_disabled) self.assertTrue(using_keepalive) def test_connection_error_retries(self): """ ECONNREFUSED error should raise a connection error, with retries """ port = find_unused_port() pool = HTTPConnectionPool(self.host, port) try: pool.request('GET', '/', retries=Retry(connect=3)) self.fail("Should have failed with a connection error.") except MaxRetryError as e: self.assertEqual(type(e.reason), NewConnectionError) def test_timeout_success(self): timeout = Timeout(connect=3, read=5, total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) pool.request('GET', '/') # This should not raise a "Timeout already started" error pool.request('GET', '/') pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) # This should also not raise a "Timeout already started" error pool.request('GET', '/') timeout = Timeout(total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) pool.request('GET', '/') def test_tunnel(self): # note the actual httplib.py has no tests for this functionality timeout = Timeout(total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) conn = pool._get_conn() try: conn.set_tunnel(self.host, self.port) except AttributeError: # python 2.6 conn._set_tunnel(self.host, self.port) conn._tunnel = mock.Mock(return_value=None) pool._make_request(conn, 'GET', '/') conn._tunnel.assert_called_once_with() # test that it's not called when tunnel is not set timeout = Timeout(total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) conn = pool._get_conn() conn._tunnel = mock.Mock(return_value=None) pool._make_request(conn, 'GET', '/') self.assertEqual(conn._tunnel.called, False) def test_redirect(self): r = self.pool.request('GET', '/redirect', fields={'target': '/'}, redirect=False) self.assertEqual(r.status, 303) r = self.pool.request('GET', '/redirect', fields={'target': '/'}) self.assertEqual(r.status, 200) self.assertEqual(r.data, b'Dummy server!') def test_bad_connect(self): pool = HTTPConnectionPool('badhost.invalid', self.port) try: pool.request('GET', '/', retries=5) self.fail("should raise timeout exception here") except MaxRetryError as e: self.assertEqual(type(e.reason), NewConnectionError) def test_keepalive(self): pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1) r = pool.request('GET', '/keepalive?close=0') r = pool.request('GET', '/keepalive?close=0') self.assertEqual(r.status, 200) self.assertEqual(pool.num_connections, 1) self.assertEqual(pool.num_requests, 2) def test_keepalive_close(self): pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2) r = pool.request('GET', '/keepalive?close=1', retries=0, headers={ "Connection": "close", }) self.assertEqual(pool.num_connections, 1) # The dummyserver will have responded with Connection:close, # and httplib will properly cleanup the socket. # We grab the HTTPConnection object straight from the Queue, # because _get_conn() is where the check & reset occurs # pylint: disable-msg=W0212 conn = pool.pool.get() self.assertEqual(conn.sock, None) pool._put_conn(conn) # Now with keep-alive r = pool.request('GET', '/keepalive?close=0', retries=0, headers={ "Connection": "keep-alive", }) # The dummyserver responded with Connection:keep-alive, the connection # persists. conn = pool.pool.get() self.assertNotEqual(conn.sock, None) pool._put_conn(conn) # Another request asking the server to close the connection. This one # should get cleaned up for the next request. r = pool.request('GET', '/keepalive?close=1', retries=0, headers={ "Connection": "close", }) self.assertEqual(r.status, 200) conn = pool.pool.get() self.assertEqual(conn.sock, None) pool._put_conn(conn) # Next request r = pool.request('GET', '/keepalive?close=0') def test_post_with_urlencode(self): data = {'banana': 'hammock', 'lol': 'cat'} r = self.pool.request('POST', '/echo', fields=data, encode_multipart=False) self.assertEqual(r.data.decode('utf-8'), urlencode(data)) def test_post_with_multipart(self): data = {'banana': 'hammock', 'lol': 'cat'} r = self.pool.request('POST', '/echo', fields=data, encode_multipart=True) body = r.data.split(b'\r\n') encoded_data = encode_multipart_formdata(data)[0] expected_body = encoded_data.split(b'\r\n') # TODO: Get rid of extra parsing stuff when you can specify # a custom boundary to encode_multipart_formdata """ We need to loop the return lines because a timestamp is attached from within encode_multipart_formdata. When the server echos back the data, it has the timestamp from when the data was encoded, which is not equivalent to when we run encode_multipart_formdata on the data again. """ for i, line in enumerate(body): if line.startswith(b'--'): continue self.assertEqual(body[i], expected_body[i]) def test_check_gzip(self): r = self.pool.request('GET', '/encodingrequest', headers={'accept-encoding': 'gzip'}) self.assertEqual(r.headers.get('content-encoding'), 'gzip') self.assertEqual(r.data, b'hello, world!') def test_check_deflate(self): r = self.pool.request('GET', '/encodingrequest', headers={'accept-encoding': 'deflate'}) self.assertEqual(r.headers.get('content-encoding'), 'deflate') self.assertEqual(r.data, b'hello, world!') def test_bad_decode(self): self.assertRaises(DecodeError, self.pool.request, 'GET', '/encodingrequest', headers={'accept-encoding': 'garbage-deflate'}) self.assertRaises(DecodeError, self.pool.request, 'GET', '/encodingrequest', headers={'accept-encoding': 'garbage-gzip'}) def test_connection_count(self): pool = HTTPConnectionPool(self.host, self.port, maxsize=1) pool.request('GET', '/') pool.request('GET', '/') pool.request('GET', '/') self.assertEqual(pool.num_connections, 1) self.assertEqual(pool.num_requests, 3) def test_connection_count_bigpool(self): http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16) http_pool.request('GET', '/') http_pool.request('GET', '/') http_pool.request('GET', '/') self.assertEqual(http_pool.num_connections, 1) self.assertEqual(http_pool.num_requests, 3) def test_partial_response(self): pool = HTTPConnectionPool(self.host, self.port, maxsize=1) req_data = {'lol': 'cat'} resp_data = urlencode(req_data).encode('utf-8') r = pool.request('GET', '/echo', fields=req_data, preload_content=False) self.assertEqual(r.read(5), resp_data[:5]) self.assertEqual(r.read(), resp_data[5:]) def test_lazy_load_twice(self): # This test is sad and confusing. Need to figure out what's # going on with partial reads and socket reuse. pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2) payload_size = 1024 * 2 first_chunk = 512 boundary = 'foo' req_data = {'count': 'a' * payload_size} resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0] req2_data = {'count': 'b' * payload_size} resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0] r1 = pool.request('POST', '/echo', fields=req_data, multipart_boundary=boundary, preload_content=False) self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk]) try: r2 = pool.request('POST', '/echo', fields=req2_data, multipart_boundary=boundary, preload_content=False, pool_timeout=0.001) # This branch should generally bail here, but maybe someday it will # work? Perhaps by some sort of magic. Consider it a TODO. self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk]) self.assertEqual(r1.read(), resp_data[first_chunk:]) self.assertEqual(r2.read(), resp2_data[first_chunk:]) self.assertEqual(pool.num_requests, 2) except EmptyPoolError: self.assertEqual(r1.read(), resp_data[first_chunk:]) self.assertEqual(pool.num_requests, 1) self.assertEqual(pool.num_connections, 1) def test_for_double_release(self): MAXSIZE = 5 # Check default state pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE) self.assertEqual(pool.num_connections, 0) self.assertEqual(pool.pool.qsize(), MAXSIZE) # Make an empty slot for testing pool.pool.get() self.assertEqual(pool.pool.qsize(), MAXSIZE - 1) # Check state after simple request pool.urlopen('GET', '/') self.assertEqual(pool.pool.qsize(), MAXSIZE - 1) # Check state without release pool.urlopen('GET', '/', preload_content=False) self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) pool.urlopen('GET', '/') self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) # Check state after read pool.urlopen('GET', '/').data self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) pool.urlopen('GET', '/') self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) def test_release_conn_parameter(self): MAXSIZE = 5 pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE) self.assertEqual(pool.pool.qsize(), MAXSIZE) # Make request without releasing connection pool.request('GET', '/', release_conn=False, preload_content=False) self.assertEqual(pool.pool.qsize(), MAXSIZE - 1) def test_dns_error(self): pool = HTTPConnectionPool('thishostdoesnotexist.invalid', self.port, timeout=0.001) self.assertRaises(MaxRetryError, pool.request, 'GET', '/test', retries=2) def test_source_address(self): for addr, is_ipv6 in VALID_SOURCE_ADDRESSES: if is_ipv6 and not HAS_IPV6_AND_DNS: warnings.warn("No IPv6 support: skipping.", NoIPv6Warning) continue pool = HTTPConnectionPool(self.host, self.port, source_address=addr, retries=False) r = pool.request('GET', '/source_address') self.assertEqual(r.data, b(addr[0])) def test_source_address_error(self): for addr in INVALID_SOURCE_ADDRESSES: pool = HTTPConnectionPool(self.host, self.port, source_address=addr, retries=False) # FIXME: This assert flakes sometimes. Not sure why. self.assertRaises(NewConnectionError, pool.request, 'GET', '/source_address?{0}'.format(addr)) def test_stream_keepalive(self): x = 2 for _ in range(x): response = self.pool.request( 'GET', '/chunked', headers={ 'Connection': 'keep-alive', }, preload_content=False, retries=False, ) for chunk in response.stream(): self.assertEqual(chunk, b'123') self.assertEqual(self.pool.num_connections, 1) self.assertEqual(self.pool.num_requests, x) def test_chunked_gzip(self): response = self.pool.request( 'GET', '/chunked_gzip', preload_content=False, decode_content=True, ) self.assertEqual(b'123' * 4, response.read()) def test_cleanup_on_connection_error(self): ''' Test that connections are recycled to the pool on connection errors where no http response is received. ''' poolsize = 3 with HTTPConnectionPool(self.host, self.port, maxsize=poolsize, block=True) as http: self.assertEqual(http.pool.qsize(), poolsize) # force a connection error by supplying a non-existent # url. We won't get a response for this and so the # conn won't be implicitly returned to the pool. self.assertRaises(MaxRetryError, http.request, 'GET', '/redirect', fields={'target': '/'}, release_conn=False, retries=0) r = http.request('GET', '/redirect', fields={'target': '/'}, release_conn=False, retries=1) r.release_conn() # the pool should still contain poolsize elements self.assertEqual(http.pool.qsize(), http.pool.maxsize) def test_mixed_case_hostname(self): pool = HTTPConnectionPool("LoCaLhOsT", self.port) response = pool.request('GET', "http://LoCaLhOsT:%d/" % self.port) self.assertEqual(response.status, 200)
def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port)
class TestRetryAfter(HTTPDummyServerTestCase): def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port) def test_retry_after(self): # Request twice in a second to get a 429 response. r = self.pool.request('GET', '/retry_after', fields={'status': '429 Too Many Requests'}, retries=False) r = self.pool.request('GET', '/retry_after', fields={'status': '429 Too Many Requests'}, retries=False) self.assertEqual(r.status, 429) r = self.pool.request('GET', '/retry_after', fields={'status': '429 Too Many Requests'}, retries=True) self.assertEqual(r.status, 200) # Request twice in a second to get a 503 response. r = self.pool.request('GET', '/retry_after', fields={'status': '503 Service Unavailable'}, retries=False) r = self.pool.request('GET', '/retry_after', fields={'status': '503 Service Unavailable'}, retries=False) self.assertEqual(r.status, 503) r = self.pool.request('GET', '/retry_after', fields={'status': '503 Service Unavailable'}, retries=True) self.assertEqual(r.status, 200) # Ignore Retry-After header on status which is not defined in # Retry.RETRY_AFTER_STATUS_CODES. r = self.pool.request('GET', '/retry_after', fields={'status': "418 I'm a teapot"}, retries=True) self.assertEqual(r.status, 418) def test_redirect_after(self): r = self.pool.request('GET', '/redirect_after', retries=False) self.assertEqual(r.status, 303) t = time.time() r = self.pool.request('GET', '/redirect_after') self.assertEqual(r.status, 200) delta = time.time() - t self.assertTrue(delta >= 1) t = time.time() timestamp = t + 2 r = self.pool.request('GET', '/redirect_after?date=' + str(timestamp)) self.assertEqual(r.status, 200) delta = time.time() - t self.assertTrue(delta >= 1) # Retry-After is past t = time.time() timestamp = t - 1 r = self.pool.request('GET', '/redirect_after?date=' + str(timestamp)) delta = time.time() - t self.assertEqual(r.status, 200) self.assertTrue(delta < 1)
def test_preserves_path_dot_segments(self): """ ConnectionPool preserves dot segments in the URI """ with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request("GET", "/echo_uri/seg0/../seg2") assert response.data == b"/echo_uri/seg0/../seg2"
class TestRetry(HTTPDummyServerTestCase): def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port) self.addCleanup(self.pool.close) def test_max_retry(self): try: r = self.pool.request("GET", "/redirect", fields={"target": "/"}, retries=0) self.fail("Failed to raise MaxRetryError exception, returned %r" % r.status) except MaxRetryError: pass def test_disabled_retry(self): """ Disabled retries should disable redirect handling. """ r = self.pool.request("GET", "/redirect", fields={"target": "/"}, retries=False) self.assertEqual(r.status, 303) r = self.pool.request("GET", "/redirect", fields={"target": "/"}, retries=Retry(redirect=False)) self.assertEqual(r.status, 303) pool = HTTPConnectionPool("thishostdoesnotexist.invalid", self.port, timeout=0.001) self.assertRaises(NewConnectionError, pool.request, "GET", "/test", retries=False) def test_read_retries(self): """ Should retry for status codes in the whitelist """ retry = Retry(read=1, status_forcelist=[418]) resp = self.pool.request( "GET", "/successful_retry", headers={"test-name": "test_read_retries"}, retries=retry, ) self.assertEqual(resp.status, 200) def test_read_total_retries(self): """ HTTP response w/ status code in the whitelist should be retried """ headers = {"test-name": "test_read_total_retries"} retry = Retry(total=1, status_forcelist=[418]) resp = self.pool.request("GET", "/successful_retry", headers=headers, retries=retry) self.assertEqual(resp.status, 200) def test_retries_wrong_whitelist(self): """HTTP response w/ status code not in whitelist shouldn't be retried""" retry = Retry(total=1, status_forcelist=[202]) resp = self.pool.request( "GET", "/successful_retry", headers={"test-name": "test_wrong_whitelist"}, retries=retry, ) self.assertEqual(resp.status, 418) def test_default_method_whitelist_retried(self): """ urllib3 should retry methods in the default method whitelist """ retry = Retry(total=1, status_forcelist=[418]) resp = self.pool.request( "OPTIONS", "/successful_retry", headers={"test-name": "test_default_whitelist"}, retries=retry, ) self.assertEqual(resp.status, 200) def test_retries_wrong_method_list(self): """Method not in our whitelist should not be retried, even if code matches""" headers = {"test-name": "test_wrong_method_whitelist"} retry = Retry(total=1, status_forcelist=[418], method_whitelist=["POST"]) resp = self.pool.request("GET", "/successful_retry", headers=headers, retries=retry) self.assertEqual(resp.status, 418) def test_read_retries_unsuccessful(self): headers = {"test-name": "test_read_retries_unsuccessful"} resp = self.pool.request("GET", "/successful_retry", headers=headers, retries=1) self.assertEqual(resp.status, 418) def test_retry_reuse_safe(self): """ It should be possible to reuse a Retry object across requests """ headers = {"test-name": "test_retry_safe"} retry = Retry(total=1, status_forcelist=[418]) resp = self.pool.request("GET", "/successful_retry", headers=headers, retries=retry) self.assertEqual(resp.status, 200) resp = self.pool.request("GET", "/successful_retry", headers=headers, retries=retry) self.assertEqual(resp.status, 200) def test_retry_return_in_response(self): headers = {"test-name": "test_retry_return_in_response"} retry = Retry(total=2, status_forcelist=[418]) resp = self.pool.request("GET", "/successful_retry", headers=headers, retries=retry) self.assertEqual(resp.status, 200) self.assertEqual(resp.retries.total, 1) self.assertEqual( resp.retries.history, (RequestHistory("GET", "/successful_retry", None, 418, None), ), ) def test_retry_redirect_history(self): resp = self.pool.request("GET", "/redirect", fields={"target": "/"}) self.assertEqual(resp.status, 200) self.assertEqual( resp.retries.history, (RequestHistory("GET", "/redirect?target=%2F", None, 303, "/"), ), ) def test_multi_redirect_history(self): r = self.pool.request( "GET", "/multi_redirect", fields={"redirect_codes": "303,302,200"}, redirect=False, ) self.assertEqual(r.status, 303) self.assertEqual(r.retries.history, tuple()) r = self.pool.request( "GET", "/multi_redirect", retries=10, fields={"redirect_codes": "303,302,301,307,302,200"}, ) self.assertEqual(r.status, 200) self.assertEqual(r.data, b"Done redirecting") expected = [ (303, "/multi_redirect?redirect_codes=302,301,307,302,200"), (302, "/multi_redirect?redirect_codes=301,307,302,200"), (301, "/multi_redirect?redirect_codes=307,302,200"), (307, "/multi_redirect?redirect_codes=302,200"), (302, "/multi_redirect?redirect_codes=200"), ] actual = [(history.status, history.redirect_location) for history in r.retries.history] self.assertEqual(actual, expected)
def test_mixed_case_hostname(self): with HTTPConnectionPool("LoCaLhOsT", self.port) as pool: response = pool.request("GET", "http://LoCaLhOsT:%d/" % self.port) assert response.status == 200
def test_timeout_success(self): timeout = Timeout(connect=3, read=5, total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) self.addCleanup(pool.close) pool.request("GET", "/") # This should not raise a "Timeout already started" error pool.request("GET", "/") pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) self.addCleanup(pool.close) # This should also not raise a "Timeout already started" error pool.request("GET", "/") timeout = Timeout(total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) self.addCleanup(pool.close) pool.request("GET", "/")
def test_percent_encode_invalid_target_chars(self): with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/echo_params?q=\r&k=\n \n") assert r.data == b"[('k', '\\n \\n'), ('q', '\\r')]"
def test_bad_connect(self): with HTTPConnectionPool("badhost.invalid", self.port) as pool: with pytest.raises(MaxRetryError) as e: pool.request("GET", "/", retries=5) assert type(e.value.reason) == NewConnectionError
def test_dns_error(self): with HTTPConnectionPool( "thishostdoesnotexist.invalid", self.port, timeout=0.001 ) as pool: with pytest.raises(MaxRetryError): pool.request("GET", "/test", retries=2)
def test_post_url(self): with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("POST", "/specific_method", fields={"method": "POST"}) assert r.status == 200, r.data
class TestConnectionPool(HTTPDummyServerTestCase): def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port) self.addCleanup(self.pool.close) def test_get(self): r = self.pool.request("GET", "/specific_method", fields={"method": "GET"}) self.assertEqual(r.status, 200, r.data) def test_post_url(self): r = self.pool.request("POST", "/specific_method", fields={"method": "POST"}) self.assertEqual(r.status, 200, r.data) def test_urlopen_put(self): r = self.pool.urlopen("PUT", "/specific_method?method=PUT") self.assertEqual(r.status, 200, r.data) def test_wrong_specific_method(self): # To make sure the dummy server is actually returning failed responses r = self.pool.request("GET", "/specific_method", fields={"method": "POST"}) self.assertEqual(r.status, 400, r.data) r = self.pool.request("POST", "/specific_method", fields={"method": "GET"}) self.assertEqual(r.status, 400, r.data) def test_upload(self): data = "I'm in ur multipart form-data, hazing a cheezburgr" fields = { "upload_param": "filefield", "upload_filename": "lolcat.txt", "upload_size": len(data), "filefield": ("lolcat.txt", data), } r = self.pool.request("POST", "/upload", fields=fields) self.assertEqual(r.status, 200, r.data) def test_one_name_multiple_values(self): fields = [("foo", "a"), ("foo", "b")] # urlencode r = self.pool.request("GET", "/echo", fields=fields) self.assertEqual(r.data, b"foo=a&foo=b") # multipart r = self.pool.request("POST", "/echo", fields=fields) self.assertEqual(r.data.count(b'name="foo"'), 2) def test_request_method_body(self): body = b"hi" r = self.pool.request("POST", "/echo", body=body) self.assertEqual(r.data, body) fields = [("hi", "hello")] self.assertRaises(TypeError, self.pool.request, "POST", "/echo", body=body, fields=fields) def test_unicode_upload(self): fieldname = u("myfile") filename = u("\xe2\x99\xa5.txt") data = u("\xe2\x99\xa5").encode("utf8") size = len(data) fields = { u("upload_param"): fieldname, u("upload_filename"): filename, u("upload_size"): size, fieldname: (filename, data), } r = self.pool.request("POST", "/upload", fields=fields) self.assertEqual(r.status, 200, r.data) def test_nagle(self): """ Test that connections have TCP_NODELAY turned on """ # This test needs to be here in order to be run. socket.create_connection actually tries # to connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool(self.host, self.port) self.addCleanup(pool.close) conn = pool._get_conn() self.addCleanup(conn.close) pool._make_request(conn, "GET", "/") tcp_nodelay_setting = conn.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) self.assertTrue(tcp_nodelay_setting) def test_socket_options(self): """Test that connections accept socket options.""" # This test needs to be here in order to be run. socket.create_connection actually tries to # connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool( self.host, self.port, socket_options=[(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)], ) s = pool._new_conn()._new_conn() # Get the socket using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0 self.assertTrue(using_keepalive) s.close() def test_disable_default_socket_options(self): """Test that passing None disables all socket options.""" # This test needs to be here in order to be run. socket.create_connection actually tries # to connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool(self.host, self.port, socket_options=None) s = pool._new_conn()._new_conn() using_nagle = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) == 0 self.assertTrue(using_nagle) s.close() def test_defaults_are_applied(self): """Test that modifying the default socket options works.""" # This test needs to be here in order to be run. socket.create_connection actually tries # to connect to the host provided so we need a dummyserver to be running. pool = HTTPConnectionPool(self.host, self.port) self.addCleanup(pool.close) # Get the HTTPConnection instance conn = pool._new_conn() self.addCleanup(conn.close) # Update the default socket options conn.default_socket_options += [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)] s = conn._new_conn() self.addCleanup(s.close) nagle_disabled = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) > 0 using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0 self.assertTrue(nagle_disabled) self.assertTrue(using_keepalive) def test_connection_error_retries(self): """ ECONNREFUSED error should raise a connection error, with retries """ port = find_unused_port() pool = HTTPConnectionPool(self.host, port) try: pool.request("GET", "/", retries=Retry(connect=3)) self.fail("Should have failed with a connection error.") except MaxRetryError as e: self.assertEqual(type(e.reason), NewConnectionError) def test_timeout_success(self): timeout = Timeout(connect=3, read=5, total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) self.addCleanup(pool.close) pool.request("GET", "/") # This should not raise a "Timeout already started" error pool.request("GET", "/") pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) self.addCleanup(pool.close) # This should also not raise a "Timeout already started" error pool.request("GET", "/") timeout = Timeout(total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) self.addCleanup(pool.close) pool.request("GET", "/") def test_tunnel(self): # note the actual httplib.py has no tests for this functionality timeout = Timeout(total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) self.addCleanup(pool.close) conn = pool._get_conn() self.addCleanup(conn.close) conn.set_tunnel(self.host, self.port) conn._tunnel = mock.Mock(return_value=None) pool._make_request(conn, "GET", "/") conn._tunnel.assert_called_once_with() # test that it's not called when tunnel is not set timeout = Timeout(total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) self.addCleanup(pool.close) conn = pool._get_conn() self.addCleanup(conn.close) conn._tunnel = mock.Mock(return_value=None) pool._make_request(conn, "GET", "/") self.assertFalse(conn._tunnel.called) def test_redirect(self): r = self.pool.request("GET", "/redirect", fields={"target": "/"}, redirect=False) self.assertEqual(r.status, 303) r = self.pool.request("GET", "/redirect", fields={"target": "/"}) self.assertEqual(r.status, 200) self.assertEqual(r.data, b"Dummy server!") def test_bad_connect(self): pool = HTTPConnectionPool("badhost.invalid", self.port) try: pool.request("GET", "/", retries=5) self.fail("should raise timeout exception here") except MaxRetryError as e: self.assertEqual(type(e.reason), NewConnectionError) def test_keepalive(self): pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1) self.addCleanup(pool.close) r = pool.request("GET", "/keepalive?close=0") r = pool.request("GET", "/keepalive?close=0") self.assertEqual(r.status, 200) self.assertEqual(pool.num_connections, 1) self.assertEqual(pool.num_requests, 2) def test_keepalive_close(self): pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2) self.addCleanup(pool.close) r = pool.request("GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"}) self.assertEqual(pool.num_connections, 1) # The dummyserver will have responded with Connection:close, # and httplib will properly cleanup the socket. # We grab the HTTPConnection object straight from the Queue, # because _get_conn() is where the check & reset occurs # pylint: disable-msg=W0212 conn = pool.pool.get() self.assertIsNone(conn.sock) pool._put_conn(conn) # Now with keep-alive r = pool.request("GET", "/keepalive?close=0", retries=0, headers={"Connection": "keep-alive"}) # The dummyserver responded with Connection:keep-alive, the connection # persists. conn = pool.pool.get() self.assertIsNotNone(conn.sock) pool._put_conn(conn) # Another request asking the server to close the connection. This one # should get cleaned up for the next request. r = pool.request("GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"}) self.assertEqual(r.status, 200) conn = pool.pool.get() self.assertIsNone(conn.sock) pool._put_conn(conn) # Next request r = pool.request("GET", "/keepalive?close=0") def test_post_with_urlencode(self): data = {"banana": "hammock", "lol": "cat"} r = self.pool.request("POST", "/echo", fields=data, encode_multipart=False) self.assertEqual(r.data.decode("utf-8"), urlencode(data)) def test_post_with_multipart(self): data = {"banana": "hammock", "lol": "cat"} r = self.pool.request("POST", "/echo", fields=data, encode_multipart=True) body = r.data.split(b"\r\n") encoded_data = encode_multipart_formdata(data)[0] expected_body = encoded_data.split(b"\r\n") # TODO: Get rid of extra parsing stuff when you can specify # a custom boundary to encode_multipart_formdata """ We need to loop the return lines because a timestamp is attached from within encode_multipart_formdata. When the server echos back the data, it has the timestamp from when the data was encoded, which is not equivalent to when we run encode_multipart_formdata on the data again. """ for i, line in enumerate(body): if line.startswith(b"--"): continue self.assertEqual(body[i], expected_body[i]) def test_post_with_multipart__iter__(self): data = {"hello": "world"} r = self.pool.request( "POST", "/echo", fields=data, preload_content=False, multipart_boundary="boundary", encode_multipart=True, ) chunks = [chunk for chunk in r] assert chunks == [ b"--boundary\r\n", b'Content-Disposition: form-data; name="hello"\r\n', b"\r\n", b"world\r\n", b"--boundary--\r\n", ] def test_check_gzip(self): r = self.pool.request("GET", "/encodingrequest", headers={"accept-encoding": "gzip"}) self.assertEqual(r.headers.get("content-encoding"), "gzip") self.assertEqual(r.data, b"hello, world!") def test_check_deflate(self): r = self.pool.request("GET", "/encodingrequest", headers={"accept-encoding": "deflate"}) self.assertEqual(r.headers.get("content-encoding"), "deflate") self.assertEqual(r.data, b"hello, world!") def test_bad_decode(self): self.assertRaises( DecodeError, self.pool.request, "GET", "/encodingrequest", headers={"accept-encoding": "garbage-deflate"}, ) self.assertRaises( DecodeError, self.pool.request, "GET", "/encodingrequest", headers={"accept-encoding": "garbage-gzip"}, ) def test_connection_count(self): pool = HTTPConnectionPool(self.host, self.port, maxsize=1) self.addCleanup(pool.close) pool.request("GET", "/") pool.request("GET", "/") pool.request("GET", "/") self.assertEqual(pool.num_connections, 1) self.assertEqual(pool.num_requests, 3) def test_connection_count_bigpool(self): http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16) self.addCleanup(http_pool.close) http_pool.request("GET", "/") http_pool.request("GET", "/") http_pool.request("GET", "/") self.assertEqual(http_pool.num_connections, 1) self.assertEqual(http_pool.num_requests, 3) def test_partial_response(self): pool = HTTPConnectionPool(self.host, self.port, maxsize=1) self.addCleanup(pool.close) req_data = {"lol": "cat"} resp_data = urlencode(req_data).encode("utf-8") r = pool.request("GET", "/echo", fields=req_data, preload_content=False) self.assertEqual(r.read(5), resp_data[:5]) self.assertEqual(r.read(), resp_data[5:]) def test_lazy_load_twice(self): # This test is sad and confusing. Need to figure out what's # going on with partial reads and socket reuse. pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2) payload_size = 1024 * 2 first_chunk = 512 boundary = "foo" req_data = {"count": "a" * payload_size} resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0] req2_data = {"count": "b" * payload_size} resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0] r1 = pool.request( "POST", "/echo", fields=req_data, multipart_boundary=boundary, preload_content=False, ) self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk]) try: r2 = pool.request( "POST", "/echo", fields=req2_data, multipart_boundary=boundary, preload_content=False, pool_timeout=0.001, ) # This branch should generally bail here, but maybe someday it will # work? Perhaps by some sort of magic. Consider it a TODO. self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk]) self.assertEqual(r1.read(), resp_data[first_chunk:]) self.assertEqual(r2.read(), resp2_data[first_chunk:]) self.assertEqual(pool.num_requests, 2) except EmptyPoolError: self.assertEqual(r1.read(), resp_data[first_chunk:]) self.assertEqual(pool.num_requests, 1) self.assertEqual(pool.num_connections, 1) def test_for_double_release(self): MAXSIZE = 5 # Check default state pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE) self.addCleanup(pool.close) self.assertEqual(pool.num_connections, 0) self.assertEqual(pool.pool.qsize(), MAXSIZE) # Make an empty slot for testing pool.pool.get() self.assertEqual(pool.pool.qsize(), MAXSIZE - 1) # Check state after simple request pool.urlopen("GET", "/") self.assertEqual(pool.pool.qsize(), MAXSIZE - 1) # Check state without release pool.urlopen("GET", "/", preload_content=False) self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) pool.urlopen("GET", "/") self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) # Check state after read pool.urlopen("GET", "/").data self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) pool.urlopen("GET", "/") self.assertEqual(pool.pool.qsize(), MAXSIZE - 2) def test_release_conn_parameter(self): MAXSIZE = 5 pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE) self.assertEqual(pool.pool.qsize(), MAXSIZE) # Make request without releasing connection pool.request("GET", "/", release_conn=False, preload_content=False) self.assertEqual(pool.pool.qsize(), MAXSIZE - 1) def test_dns_error(self): pool = HTTPConnectionPool("thishostdoesnotexist.invalid", self.port, timeout=0.001) self.assertRaises(MaxRetryError, pool.request, "GET", "/test", retries=2) def test_source_address(self): for addr, is_ipv6 in VALID_SOURCE_ADDRESSES: if is_ipv6 and not HAS_IPV6_AND_DNS: warnings.warn("No IPv6 support: skipping.", NoIPv6Warning) continue pool = HTTPConnectionPool(self.host, self.port, source_address=addr, retries=False) self.addCleanup(pool.close) r = pool.request("GET", "/source_address") self.assertEqual(r.data, b(addr[0])) def test_source_address_error(self): for addr in INVALID_SOURCE_ADDRESSES: pool = HTTPConnectionPool(self.host, self.port, source_address=addr, retries=False) # FIXME: This assert flakes sometimes. Not sure why. self.assertRaises( NewConnectionError, pool.request, "GET", "/source_address?{0}".format(addr), ) def test_stream_keepalive(self): x = 2 for _ in range(x): response = self.pool.request( "GET", "/chunked", headers={"Connection": "keep-alive"}, preload_content=False, retries=False, ) for chunk in response.stream(): self.assertEqual(chunk, b"123") self.assertEqual(self.pool.num_connections, 1) self.assertEqual(self.pool.num_requests, x) def test_read_chunked_short_circuit(self): response = self.pool.request("GET", "/chunked", preload_content=False) response.read() with pytest.raises(StopIteration): next(response.read_chunked()) def test_read_chunked_on_closed_response(self): response = self.pool.request("GET", "/chunked", preload_content=False) response.close() with pytest.raises(StopIteration): next(response.read_chunked()) def test_chunked_gzip(self): response = self.pool.request("GET", "/chunked_gzip", preload_content=False, decode_content=True) self.assertEqual(b"123" * 4, response.read()) def test_cleanup_on_connection_error(self): """ Test that connections are recycled to the pool on connection errors where no http response is received. """ poolsize = 3 with HTTPConnectionPool(self.host, self.port, maxsize=poolsize, block=True) as http: self.assertEqual(http.pool.qsize(), poolsize) # force a connection error by supplying a non-existent # url. We won't get a response for this and so the # conn won't be implicitly returned to the pool. self.assertRaises( MaxRetryError, http.request, "GET", "/redirect", fields={"target": "/"}, release_conn=False, retries=0, ) r = http.request( "GET", "/redirect", fields={"target": "/"}, release_conn=False, retries=1, ) r.release_conn() # the pool should still contain poolsize elements self.assertEqual(http.pool.qsize(), http.pool.maxsize) def test_mixed_case_hostname(self): pool = HTTPConnectionPool("LoCaLhOsT", self.port) self.addCleanup(pool.close) response = pool.request("GET", "http://LoCaLhOsT:%d/" % self.port) self.assertEqual(response.status, 200)
def test_timeout_success(self): timeout = Timeout(connect=3, read=5, total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) pool.request('GET', '/') # This should not raise a "Timeout already started" error pool.request('GET', '/') pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) # This should also not raise a "Timeout already started" error pool.request('GET', '/') timeout = Timeout(total=None) pool = HTTPConnectionPool(self.host, self.port, timeout=timeout) pool.request('GET', '/')
def test_urlopen_put(self): with HTTPConnectionPool(self.host, self.port) as pool: r = pool.urlopen("PUT", "/specific_method?method=PUT") assert r.status == 200, r.data
class TestRetryAfter(HTTPDummyServerTestCase): def setUp(self): self.pool = HTTPConnectionPool(self.host, self.port) self.addCleanup(self.pool.close) def test_retry_after(self): # Request twice in a second to get a 429 response. r = self.pool.request( "GET", "/retry_after", fields={"status": "429 Too Many Requests"}, retries=False, ) r = self.pool.request( "GET", "/retry_after", fields={"status": "429 Too Many Requests"}, retries=False, ) self.assertEqual(r.status, 429) r = self.pool.request( "GET", "/retry_after", fields={"status": "429 Too Many Requests"}, retries=True, ) self.assertEqual(r.status, 200) # Request twice in a second to get a 503 response. r = self.pool.request( "GET", "/retry_after", fields={"status": "503 Service Unavailable"}, retries=False, ) r = self.pool.request( "GET", "/retry_after", fields={"status": "503 Service Unavailable"}, retries=False, ) self.assertEqual(r.status, 503) r = self.pool.request( "GET", "/retry_after", fields={"status": "503 Service Unavailable"}, retries=True, ) self.assertEqual(r.status, 200) # Ignore Retry-After header on status which is not defined in # Retry.RETRY_AFTER_STATUS_CODES. r = self.pool.request("GET", "/retry_after", fields={"status": "418 I'm a teapot"}, retries=True) self.assertEqual(r.status, 418) def test_redirect_after(self): r = self.pool.request("GET", "/redirect_after", retries=False) self.assertEqual(r.status, 303) t = time.time() r = self.pool.request("GET", "/redirect_after") self.assertEqual(r.status, 200) delta = time.time() - t self.assertGreaterEqual(delta, 1) t = time.time() timestamp = t + 2 r = self.pool.request("GET", "/redirect_after?date=" + str(timestamp)) self.assertEqual(r.status, 200) delta = time.time() - t self.assertGreaterEqual(delta, 1) # Retry-After is past t = time.time() timestamp = t - 1 r = self.pool.request("GET", "/redirect_after?date=" + str(timestamp)) delta = time.time() - t self.assertEqual(r.status, 200) self.assertLess(delta, 1)
from multiprocessing.dummy import Pool from urllib3 import HTTPConnectionPool from tqdm import tqdm import itertools import os import errno n_connections = 32 domain = 'geo-samples.beatport.com' http_pool = HTTPConnectionPool(domain) total_tracks = 4106 stems_per_track = 5 identifiers = list( itertools.product(range(1, total_tracks + 1), range(0, stems_per_track))) def mkdir_p(path): try: os.makedirs(path) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def download(url, fn): if not os.path.isfile(fn): r = http_pool.urlopen('GET', url) with open(fn, 'wb') as f:
def test_read_retries_unsuccessful(self): with HTTPConnectionPool(self.host, self.port) as pool: headers = {"test-name": "test_read_retries_unsuccessful"} resp = pool.request("GET", "/successful_retry", headers=headers, retries=1) assert resp.status == 418
def test_mixed_case_hostname(self): pool = HTTPConnectionPool("LoCaLhOsT", self.port) response = pool.request('GET', "http://LoCaLhOsT:%d/" % self.port) self.assertEqual(response.status, 200)
def test_keepalive_close(self): pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2) r = pool.request('GET', '/keepalive?close=1', retries=0, headers={ "Connection": "close", }) self.assertEqual(pool.num_connections, 1) # The dummyserver will have responded with Connection:close, # and httplib will properly cleanup the socket. # We grab the HTTPConnection object straight from the Queue, # because _get_conn() is where the check & reset occurs # pylint: disable-msg=W0212 conn = pool.pool.get() self.assertEqual(conn.sock, None) pool._put_conn(conn) # Now with keep-alive r = pool.request('GET', '/keepalive?close=0', retries=0, headers={ "Connection": "keep-alive", }) # The dummyserver responded with Connection:keep-alive, the connection # persists. conn = pool.pool.get() self.assertNotEqual(conn.sock, None) pool._put_conn(conn) # Another request asking the server to close the connection. This one # should get cleaned up for the next request. r = pool.request('GET', '/keepalive?close=1', retries=0, headers={ "Connection": "close", }) self.assertEqual(r.status, 200) conn = pool.pool.get() self.assertEqual(conn.sock, None) pool._put_conn(conn) # Next request r = pool.request('GET', '/keepalive?close=0')