def test_pool_timeouts(self): pool = HTTPConnectionPool(host='localhost') conn = pool._new_conn() self.assertEqual(conn.__class__, HTTPConnection) self.assertEqual(pool.timeout.__class__, Timeout) self.assertEqual(pool.timeout._read, Timeout.DEFAULT_TIMEOUT) self.assertEqual(pool.timeout._connect, Timeout.DEFAULT_TIMEOUT) self.assertEqual(pool.timeout.total, None) pool = HTTPConnectionPool(host='localhost', timeout=3) self.assertEqual(pool.timeout._read, 3) self.assertEqual(pool.timeout._connect, 3) self.assertEqual(pool.timeout.total, None)
def test_retry_exception_str(self): self.assertEqual( str(MaxRetryError( HTTPConnectionPool(host='localhost'), "Test.", None)), "HTTPConnectionPool(host='localhost', port=None): " "Max retries exceeded with url: Test. (Caused by redirect)") err = SocketError("Test") # using err.__class__ here, as socket.error is an alias for OSError # since Py3.3 and gets printed as this self.assertEqual( str(MaxRetryError( HTTPConnectionPool(host='localhost'), "Test.", err)), "HTTPConnectionPool(host='localhost', port=None): " "Max retries exceeded with url: Test. " "(Caused by %r)" % err)
def test_exceptions_with_objects(self): assert self.verify_pickling(HTTPError('foo')) assert self.verify_pickling(HTTPError('foo', IOError('foo'))) assert self.verify_pickling( MaxRetryError(HTTPConnectionPool('localhost'), '/', None)) assert self.verify_pickling(LocationParseError('fake location')) assert self.verify_pickling( ClosedPoolError(HTTPConnectionPool('localhost'), None)) assert self.verify_pickling( EmptyPoolError(HTTPConnectionPool('localhost'), None)) assert self.verify_pickling( HostChangedError(HTTPConnectionPool('localhost'), '/', None)) assert self.verify_pickling( ReadTimeoutError(HTTPConnectionPool('localhost'), '/', None))
def test_max_connections(self): pool = HTTPConnectionPool(host='localhost', maxsize=1, block=True) try: yield From(pool._get_conn(timeout=0.1)) try: yield From(pool._get_conn(timeout=0.1)) self.fail("Managed to get a connection without EmptyPoolError") except EmptyPoolError: pass try: yield From(pool.request('GET', '/', pool_timeout=0.1)) self.fail("Managed to get a connection without EmptyPoolError") except EmptyPoolError: pass except Exception as e: self.assertTrue(False) self.assertEqual(pool.num_connections, 1)
def test_pool_edgecases(self): pool = HTTPConnectionPool(host='localhost', maxsize=1, block=False) conn1 = yield From(pool._get_conn(timeout=0.1)) conn2 = yield From(pool._get_conn(timeout=0.1)) # New because block=False pool._put_conn(conn1) pool._put_conn(conn2) # Should be discarded self.assertEqual(conn1, (yield From(pool._get_conn()))) self.assertNotEqual(conn2, (yield From(pool._get_conn(timeout=0.1)))) self.assertEqual(pool.num_connections, 3)
def test_pool_size(self): POOL_SIZE = 1 pool = HTTPConnectionPool(host='localhost', maxsize=POOL_SIZE, block=True) def _raise(ex): raise ex() def _test(exception, expect): pool._make_request = lambda *args, **kwargs: _raise(exception) try: yield From(pool.request('GET', '/')) except expect: pass else: self.assertTrue(False, 'Expected exception %s not raised' % expect) self.assertEqual(pool.pool.qsize(), POOL_SIZE) # Make sure that all of the exceptions return the connection to the pool _test(Empty, EmptyPoolError) _test(BaseSSLError, SSLError) _test(CertificateError, SSLError) # The pool should never be empty, and with these two exceptions being raised, # a retry will be triggered, but that retry will fail, eventually raising # MaxRetryError, not EmptyPoolError # See: https://github.com/shazow/urllib3/issues/76 pool._make_request = lambda *args, **kwargs: _raise(HTTPException) try: yield From(pool.request('GET', '/', retries=1, pool_timeout=0.01)) except MaxRetryError: pass else: self.assertTrue(False, 'MaxRetryError not raised') self.assertEqual(pool.pool.qsize(), POOL_SIZE)
def test_queue_overload(self): http = HTTPConnectionPool('httpbin.org', maxsize=3, block=True, timeout=3) testLoop = asyncio.get_event_loop() testLoop.set_debug(True) global test_queue_overload_count test_queue_overload_count = 0 @asyncio.coroutine def get_page(): global test_queue_overload_count try: resp = yield From( http.request('GET', '/delay/1', pool_timeout=3)) pg = yield From(resp.data) self.assertTrue(b'Connection' in pg, pg) except EmptyPoolError: pass except Exception as e: raise else: test_queue_overload_count += 1 pageGetters = [ get_page(), get_page(), get_page(), get_page(), get_page() ] testLoop.run_until_complete( asyncio.wait(pageGetters, return_when=asyncio.ALL_COMPLETED)) self.assertGreater(test_queue_overload_count, 4, 'not all page_getters ran')
def test_exception_str(self): self.assertEqual( str(EmptyPoolError(HTTPConnectionPool(host='localhost'), "Test.")), "HTTPConnectionPool(host='localhost', port=None): Test.")