def test_recovery_when_server_closes_connection(self): # Does the pool work seamlessly if an open connection in the # connection pool gets hung up on by the server, then reaches # the front of the queue again? def server(listener): for i in 0, 1: sock = listener.accept()[0] read_request(sock) body = 'Response %d' % i sock.send('HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: %d\r\n' '\r\n' '%s' % (len(body), body)) sock.close() # simulate a server timing out, closing socket done_closing.set() # let the test know it can proceed done_closing = Event() host, port = start_server(server) pool = HTTPConnectionPool(host, port) response = pool.request('GET', '/', retries=0) self.assertEqual(response.status, 200) self.assertEqual(response.data, 'Response 0') done_closing.wait() # wait until the socket in our pool gets closed response = pool.request('GET', '/', retries=0) self.assertEqual(response.status, 200) self.assertEqual(response.data, 'Response 1')
def test_pool_size(self): POOL_SIZE = 1 pool = HTTPConnectionPool(host='localhost', maxsize=POOL_SIZE, block=True) def _raise(ex): raise ex() def _test(exception, expect): pool._make_request = lambda *args, **kwargs: _raise(exception) with self.assertRaises(expect): pool.request('GET', '/') self.assertEqual(pool.pool.qsize(), POOL_SIZE) #make sure that all of the exceptions return the connection to the pool _test(Empty, TimeoutError) _test(SocketTimeout, TimeoutError) _test(BaseSSLError, SSLError) _test(CertificateError, SSLError) # The pool should never be empty, and with these two exceptions being raised, # a retry will be triggered, but that retry will fail, eventually raising # MaxRetryError, not EmptyPoolError # See: https://github.com/shazow/urllib3/issues/76 pool._make_request = lambda *args, **kwargs: _raise(HTTPException) with self.assertRaises(MaxRetryError): pool.request('GET', '/', retries=1, pool_timeout=0.01) self.assertEqual(pool.pool.qsize(), POOL_SIZE)
def test_pool_size(self): POOL_SIZE = 1 pool = HTTPConnectionPool(host='localhost', maxsize=POOL_SIZE, block=True) def _raise(ex): raise ex() def _test(exception, expect): pool._make_request = lambda *args, **kwargs: _raise(exception) with self.assertRaises(expect): pool.request('GET', '/') self.assertEqual(pool.pool.qsize(), POOL_SIZE) #make sure that all of the exceptions return the connection to the pool _test(Empty, TimeoutError) _test(SocketTimeout, TimeoutError) _test(BaseSSLError, SSLError) _test(CertificateError, SSLError) # The pool should never be empty, and with these two exceptions being raised, # a retry will be triggered, but that retry will fail, eventually raising # MaxRetryError, not EmptyPoolError # See: https://github.com/shazow/urllib3/issues/76 pool._make_request = lambda *args, **kwargs: _raise(HTTPException) with self.assertRaises(MaxRetryError): pool.request('GET', '/', retries=1, pool_timeout=0.01) self.assertEqual(pool.pool.qsize(), POOL_SIZE)
def test_retry_when_server_closes_connection_with_no_data(self): # Test that the retry mechanism works when the server drops the connection # prematurely done_closing = Event() ready = Event() def socket_handler(listener): for i in 0, 1, 2: print "Entering", i sock = listener.accept()[0] print "Accepting", i # only interact with client the second time if i == 1: buf = b"" while not buf.endswith(b"\r\n\r\n"): print "Reading..." buf = sock.recv(65536) print "Sending..." body = "Response %d" % i sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(body), body) ).encode("utf-8") ) print "Done." sock.close() # simulate a server timing out, closing socket print "Setting done", i done_closing.set() # let the test know it can proceed self._start_server(socket_handler) pool = HTTPConnectionPool(self.host, self.port) # Should succeed in the second retry import time time.sleep(0.1) response = pool.request("GET", "/", retries=1) self.assertEqual(response.status, 200) self.assertEqual(response.data, b"Response 1") print "(Client) Waiting..." done_closing.wait() # wait until the socket in our pool gets closed # Fail with no retries with self.assertRaises(MaxRetryError): # This is where a failure should occur for issue #104. response = pool.request("GET", "/", retries=0) print "(Client) Waiting final..." done_closing.wait() # wait until the socket in our pool gets closed
def test_retry_when_server_closes_connection_with_no_data(self): # Test that the retry mechanism works when the server drops the connection # prematurely done_closing = Event() ready = Event() def socket_handler(listener): for i in 0, 1, 2: print "Entering", i sock = listener.accept()[0] print "Accepting", i # only interact with client the second time if i == 1: buf = b'' while not buf.endswith(b'\r\n\r\n'): print "Reading..." buf = sock.recv(65536) print "Sending..." body = 'Response %d' % i sock.send(('HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: %d\r\n' '\r\n' '%s' % (len(body), body)).encode('utf-8')) print "Done." sock.close() # simulate a server timing out, closing socket print "Setting done", i done_closing.set() # let the test know it can proceed self._start_server(socket_handler) pool = HTTPConnectionPool(self.host, self.port) # Should succeed in the second retry import time time.sleep(0.1) response = pool.request('GET', '/', retries=1) self.assertEqual(response.status, 200) self.assertEqual(response.data, b'Response 1') print "(Client) Waiting..." done_closing.wait() # wait until the socket in our pool gets closed # Fail with no retries with self.assertRaises(MaxRetryError): # This is where a failure should occur for issue #104. response = pool.request('GET', '/', retries=0) print "(Client) Waiting final..." done_closing.wait() # wait until the socket in our pool gets closed
def test_max_connections(self): pool = HTTPConnectionPool(host='localhost', maxsize=1, block=True) pool._get_conn(timeout=0.01) try: pool._get_conn(timeout=0.01) self.fail("Managed to get a connection without EmptyPoolError") except EmptyPoolError: pass try: pool.request('GET', '/', pool_timeout=0.01) self.fail("Managed to get a connection without EmptyPoolError") except EmptyPoolError: pass self.assertEqual(pool.num_connections, 1)
def test_recovery_when_server_closes_connection(self): # Does the pool work seamlessly if an open connection in the # connection pool gets hung up on by the server, then reaches # the front of the queue again? done_closing = Event() def socket_handler(listener): for i in 0, 1: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf = sock.recv(65536) body = "Response %d" % i sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(body), body) ).encode("utf-8") ) sock.close() # simulate a server timing out, closing socket done_closing.set() # let the test know it can proceed self._start_server(socket_handler) pool = HTTPConnectionPool(self.host, self.port) response = pool.request("GET", "/", retries=0) self.assertEqual(response.status, 200) self.assertEqual(response.data, b"Response 0") done_closing.wait() # wait until the socket in our pool gets closed response = pool.request("GET", "/", retries=0) self.assertEqual(response.status, 200) self.assertEqual(response.data, b"Response 1") done_closing.wait() # wait until the socket in our pool gets closed
def test_recovery_when_server_closes_connection(self): # Does the pool work seamlessly if an open connection in the # connection pool gets hung up on by the server, then reaches # the front of the queue again? done_closing = Event() def socket_handler(listener): for i in 0, 1: sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf = sock.recv(65536) body = 'Response %d' % i sock.send(('HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: %d\r\n' '\r\n' '%s' % (len(body), body)).encode('utf-8')) sock.close() # simulate a server timing out, closing socket done_closing.set() # let the test know it can proceed self._start_server(socket_handler) pool = HTTPConnectionPool(self.host, self.port) response = pool.request('GET', '/', retries=0) self.assertEqual(response.status, 200) self.assertEqual(response.data, b'Response 0') done_closing.wait() # wait until the socket in our pool gets closed response = pool.request('GET', '/', retries=0) self.assertEqual(response.status, 200) self.assertEqual(response.data, b'Response 1') done_closing.wait() # wait until the socket in our pool gets closed
def test_multi_setcookie(self): def multicookie_response_handler(listener): sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send(b"HTTP/1.1 200 OK\r\n" b"Set-Cookie: foo=1\r\n" b"Set-Cookie: bar=1\r\n" b"\r\n") self._start_server(multicookie_response_handler) pool = HTTPConnectionPool(self.host, self.port) r = pool.request("GET", "/", retries=0) self.assertEquals(r.headers, {"set-cookie": "foo=1, bar=1"})
def test_multi_setcookie(self): def multicookie_response_handler(listener): sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) sock.send(b'HTTP/1.1 200 OK\r\n' b'Set-Cookie: foo=1\r\n' b'Set-Cookie: bar=1\r\n' b'\r\n') self._start_server(multicookie_response_handler) pool = HTTPConnectionPool(self.host, self.port) r = pool.request('GET', '/', retries=0) self.assertEquals(r.headers, {'set-cookie': 'foo=1, bar=1'})
def test_mixed_case_url(self): pool = HTTPConnectionPool('Example.com') response = pool.request('GET', "http://Example.com") self.assertEqual(response.status, 200)