def test_connect_timeout(self):
        url = '/sleep?seconds=0.005'
        timeout = Timeout(connect=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        yield From(self.aioAssertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET', url))

        # Retries
        retries = Retry(connect=0)
        yield From(self.aioAssertRaises(MaxRetryError, pool.request, 'GET', url,
                          retries=retries))

        # Request-specific connection timeouts
        big_timeout = Timeout(read=0.2, connect=0.2)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port,
                                  timeout=big_timeout, retries=False)
        conn = pool._get_conn()
        yield From(self.aioAssertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET',
                          url, timeout=timeout))

        pool._put_conn(conn)
        yield From(self.aioAssertRaises(ConnectTimeoutError, pool.request, 'GET', url,
                          timeout=timeout))
 def test_conn_closed(self):
     pool = HTTPConnectionPool(self.host, self.port, timeout=0.001, retries=False)
     conn = pool._get_conn()
     pool._put_conn(conn)
     try:
         url = '/sleep?seconds=0.005'
         yield From(pool.urlopen('GET', url))
         self.fail("The request should fail with a timeout error.")
     except ReadTimeoutError:
         if conn.sock:
             self.assertRaises(socket.error, conn.sock.recv, 1024)
     finally:
         pool._put_conn(conn)
 def test_conn_closed(self):
     pool = HTTPConnectionPool(self.host, self.port, timeout=0.001, retries=False)
     conn = pool._get_conn()
     pool._put_conn(conn)
     try:
         url = "/sleep?seconds=0.005"
         yield From(pool.urlopen("GET", url))
         self.fail("The request should fail with a timeout error.")
     except ReadTimeoutError:
         if conn.sock:
             self.assertRaises(socket.error, conn.sock.recv, 1024)
     finally:
         pool._put_conn(conn)
    def test_connect_timeout(self):
        url = "/sleep?seconds=0.005"
        timeout = Timeout(connect=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        yield From(self.aioAssertRaises(ConnectTimeoutError, pool._make_request, conn, "GET", url))

        # Retries
        retries = Retry(connect=0)
        yield From(self.aioAssertRaises(MaxRetryError, pool.request, "GET", url, retries=retries))

        # Request-specific connection timeouts
        big_timeout = Timeout(read=0.2, connect=0.2)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=big_timeout, retries=False)
        conn = pool._get_conn()
        yield From(self.aioAssertRaises(ConnectTimeoutError, pool._make_request, conn, "GET", url, timeout=timeout))

        pool._put_conn(conn)
        yield From(self.aioAssertRaises(ConnectTimeoutError, pool.request, "GET", url, timeout=timeout))
    def test_keepalive_close(self):
        pool = HTTPConnectionPool(self.host, self.port,
                                  block=True, maxsize=1, timeout=2)

        r = yield From(pool.request('GET', '/keepalive?close=1', retries=0,
                                     headers={
                                         "Connection": "close",
                                     }))

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = yield From(pool.request('GET', '/keepalive?close=0', retries=0,
                                     headers={
                                         "Connection": "keep-alive",
                                     }))

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = yield From(pool.request('GET', '/keepalive?close=1', retries=0,
                                     headers={
                                         "Connection": "close",
                                     }))

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = yield From(pool.request('GET', '/keepalive?close=0'))
    def test_timeout(self):
        """ Requests should time out when expected """
        url = '/sleep?seconds=0.002'
        timeout = Timeout(read=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout, retries=False)

        conn = pool._get_conn()
        yield From(self.aioAssertRaises(ReadTimeoutError, pool._make_request,
                          conn, 'GET', url))
        pool._put_conn(conn)

        time.sleep(0.02) # Wait for server to start receiving again. :(

        yield From(self.aioAssertRaises(ReadTimeoutError, pool.request, 'GET', url))

        # Request-specific timeouts should raise errors
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.1, retries=False)

        conn = pool._get_conn()
        yield From(self.aioAssertRaises(ReadTimeoutError, pool._make_request,
                          conn, 'GET', url, timeout=timeout))
        pool._put_conn(conn)

        time.sleep(0.02) # Wait for server to start receiving again. :(

        yield From(self.aioAssertRaises(ReadTimeoutError, pool.request,
                          'GET', url, timeout=timeout))

        # Timeout int/float passed directly to request and _make_request should
        # raise a request timeout
        yield From(self.aioAssertRaises(ReadTimeoutError, pool.request,
                          'GET', url, timeout=0.001))
        conn = pool._new_conn()
        yield From(self.aioAssertRaises(ReadTimeoutError, pool._make_request, conn,
                          'GET', url, timeout=0.001))
        pool._put_conn(conn)

        # Timeout int/float passed directly to _make_request should not raise a
        # request timeout if it's a high value
        yield From(pool.request('GET', url, timeout=1))
    def test_keepalive_close(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        r = yield From(pool.request("GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"}))

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = yield From(pool.request("GET", "/keepalive?close=0", retries=0, headers={"Connection": "keep-alive"}))

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = yield From(pool.request("GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"}))

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = yield From(pool.request("GET", "/keepalive?close=0"))
    def test_timeout(self):
        """ Requests should time out when expected """
        url = "/sleep?seconds=0.002"
        timeout = Timeout(read=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout, retries=False)

        conn = pool._get_conn()
        yield From(self.aioAssertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url))
        pool._put_conn(conn)

        time.sleep(0.02)  # Wait for server to start receiving again. :(

        yield From(self.aioAssertRaises(ReadTimeoutError, pool.request, "GET", url))

        # Request-specific timeouts should raise errors
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.1, retries=False)

        conn = pool._get_conn()
        yield From(self.aioAssertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url, timeout=timeout))
        pool._put_conn(conn)

        time.sleep(0.02)  # Wait for server to start receiving again. :(

        yield From(self.aioAssertRaises(ReadTimeoutError, pool.request, "GET", url, timeout=timeout))

        # Timeout int/float passed directly to request and _make_request should
        # raise a request timeout
        yield From(self.aioAssertRaises(ReadTimeoutError, pool.request, "GET", url, timeout=0.001))
        conn = pool._new_conn()
        yield From(self.aioAssertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url, timeout=0.001))
        pool._put_conn(conn)

        # Timeout int/float passed directly to _make_request should not raise a
        # request timeout if it's a high value
        yield From(pool.request("GET", url, timeout=1))