Exemplo n.º 1
0
    def test_recovery_when_server_closes_connection(self):
        # Does the pool work seamlessly if an open connection in the
        # connection pool gets hung up on by the server, then reaches
        # the front of the queue again?

        def server(listener):
            for i in 0, 1:
                sock = listener.accept()[0]
                read_request(sock)
                body = 'Response %d' % i
                sock.send('HTTP/1.1 200 OK\r\n'
                          'Content-Type: text/plain\r\n'
                          'Content-Length: %d\r\n'
                          '\r\n'
                          '%s' % (len(body), body))
                sock.close()  # simulate a server timing out, closing socket
                done_closing.set()  # let the test know it can proceed

        done_closing = Event()
        host, port = start_server(server)
        pool = HTTPConnectionPool(host, port)

        response = pool.request('GET', '/', retries=0)
        self.assertEqual(response.status, 200)
        self.assertEqual(response.data, 'Response 0')

        done_closing.wait()  # wait until the socket in our pool gets closed

        response = pool.request('GET', '/', retries=0)
        self.assertEqual(response.status, 200)
        self.assertEqual(response.data, 'Response 1')
Exemplo n.º 2
0
    def __init__(
        self,
        schema: TableSchema,
        host,
        port,
        encoder: Callable[[WriterTableRow], bytes],
        options=None,
        table_name=None,
        chunk_size: int = 1,
    ):
        """
        Builds a writer to send a batch to Clickhouse.

        :param schema: The dataset schema to take the table name from
        :param host: Clickhosue host
        :param port: Clickhosue port
        :param encoder: A function that will be applied to each row to turn it into bytes
        :param options: options passed to Clickhouse
        :param table_name: Overrides the table coming from the schema (generally used for uplaoding
            on temporary tables)
        :param chunk_size: The chunk size (in rows).
            We send data to the server with Transfer-Encoding: chunked. If 0 we send the entire
            content in one chunk.
        """
        self.__pool = HTTPConnectionPool(host, port)
        self.__options = options if options is not None else {}
        self.__table_name = table_name or schema.get_table_name()
        self.__chunk_size = chunk_size
        self.__encoder = encoder
    def __init__(self,
                 host,
                 port,
                 certfile=None,
                 keyfile=None,
                 cacertfile=None,
                 force_ssl=False,
                 *args,
                 **kw):
        super(ConnectionPoolManager, self).__init__(*args, **kw)

        self.logger.debug("Creating ConnectionPoolManager for %s:%s", host,
                          port)

        if certfile or keyfile or force_ssl:
            #https://docs.python.org/2/library/ssl.html#ssl.SSLContext
            from ssl import SSLContext, PROTOCOL_SSLv23
            ssl_context = SSLContext(PROTOCOL_SSLv23)
            ssl_context.load_cert_chain(certfile=certfile, keyfile=keyfile)
            ssl_context.load_verify_locations(cafile=cacertfile)
            #https://docs.python.org/2/library/httplib.html
            self.__pool = HTTPSConnectionPool(host,
                                              port,
                                              maxsize=16,
                                              context=ssl_context)
        else:
            self.__pool = HTTPConnectionPool(host, port, maxsize=16)
Exemplo n.º 4
0
    def test_pool_size(self):
        POOL_SIZE = 1
        pool = HTTPConnectionPool(host='localhost', maxsize=POOL_SIZE, block=True)

        def _raise(ex):
            raise ex()

        def _test(exception, expect):
            pool._make_request = lambda *args, **kwargs: _raise(exception)
            self.assertRaises(expect, pool.request, 'GET', '/')

            self.assertEqual(pool.pool.qsize(), POOL_SIZE)

        # Make sure that all of the exceptions return the connection to the pool
        _test(Empty, EmptyPoolError)
        _test(BaseSSLError, SSLError)
        _test(CertificateError, SSLError)

        # The pool should never be empty, and with these two exceptions being raised,
        # a retry will be triggered, but that retry will fail, eventually raising
        # MaxRetryError, not EmptyPoolError
        # See: https://github.com/shazow/urllib3/issues/76
        pool._make_request = lambda *args, **kwargs: _raise(HTTPException)
        self.assertRaises(MaxRetryError, pool.request,
                          'GET', '/', retries=1, pool_timeout=0.01)
        self.assertEqual(pool.pool.qsize(), POOL_SIZE)
Exemplo n.º 5
0
    def test_retry_when_server_closes_connection_with_no_data(self):
        # Test that the retry mechanism works when the server drops the connection
        # prematurely

        done_closing = Event()
        ready = Event()

        def socket_handler(listener):
            for i in 0, 1, 2:
                print "Entering", i
                sock = listener.accept()[0]
                print "Accepting", i

                # only interact with client the second time
                if i == 1:
                    buf = b""
                    while not buf.endswith(b"\r\n\r\n"):
                        print "Reading..."
                        buf = sock.recv(65536)

                    print "Sending..."
                    body = "Response %d" % i
                    sock.send(
                        (
                            "HTTP/1.1 200 OK\r\n"
                            "Content-Type: text/plain\r\n"
                            "Content-Length: %d\r\n"
                            "\r\n"
                            "%s" % (len(body), body)
                        ).encode("utf-8")
                    )
                    print "Done."

                sock.close()  # simulate a server timing out, closing socket
                print "Setting done", i
                done_closing.set()  # let the test know it can proceed

        self._start_server(socket_handler)

        pool = HTTPConnectionPool(self.host, self.port)

        # Should succeed in the second retry
        import time

        time.sleep(0.1)
        response = pool.request("GET", "/", retries=1)
        self.assertEqual(response.status, 200)
        self.assertEqual(response.data, b"Response 1")

        print "(Client) Waiting..."
        done_closing.wait()  # wait until the socket in our pool gets closed

        # Fail with no retries
        with self.assertRaises(MaxRetryError):
            # This is where a failure should occur for issue #104.
            response = pool.request("GET", "/", retries=0)

        print "(Client) Waiting final..."
        done_closing.wait()  # wait until the socket in our pool gets closed
Exemplo n.º 6
0
 def __init__(self, host, port=None, strict=False,
              timeout=None, maxsize=1, block=False, headers=headers):
     try:
         host = get_host(host)[1]
     except TypeError: # Already a host-ified host
         pass
     headers = {k.lower(): v for (k, v) in headers.items()}
     HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, block, headers)
     self.cookie_session = CookieSession()
Exemplo n.º 7
0
    def test_retry_exception_str(self):
        self.assertEqual(
            str(MaxRetryError(HTTPConnectionPool(host='localhost'), "Test.", None)),
            "HTTPConnectionPool(host='localhost', port=None): Max retries exceeded with url: Test.")

        err = SocketError("Test")
        self.assertEqual(
            str(MaxRetryError(HTTPConnectionPool(host='localhost'), "Test.", err)),
            "HTTPConnectionPool(host='localhost', port=None): Max retries exceeded with url: Test. (caused by <class 'socket.error'>: Test)")
Exemplo n.º 8
0
    def test_retry_when_server_closes_connection_with_no_data(self):
        # Test that the retry mechanism works when the server drops the connection
        # prematurely

        done_closing = Event()
        ready = Event()

        def socket_handler(listener):
            for i in 0, 1, 2:
                print "Entering", i
                sock = listener.accept()[0]
                print "Accepting", i

                # only interact with client the second time
                if i == 1:
                    buf = b''
                    while not buf.endswith(b'\r\n\r\n'):
                        print "Reading..."
                        buf = sock.recv(65536)

                    print "Sending..."
                    body = 'Response %d' % i
                    sock.send(('HTTP/1.1 200 OK\r\n'
                               'Content-Type: text/plain\r\n'
                               'Content-Length: %d\r\n'
                               '\r\n'
                               '%s' % (len(body), body)).encode('utf-8'))
                    print "Done."

                sock.close()  # simulate a server timing out, closing socket
                print "Setting done", i
                done_closing.set()  # let the test know it can proceed

        self._start_server(socket_handler)

        pool = HTTPConnectionPool(self.host, self.port)

        # Should succeed in the second retry
        import time
        time.sleep(0.1)
        response = pool.request('GET', '/', retries=1)
        self.assertEqual(response.status, 200)
        self.assertEqual(response.data, b'Response 1')

        print "(Client) Waiting..."
        done_closing.wait()  # wait until the socket in our pool gets closed

        # Fail with no retries
        with self.assertRaises(MaxRetryError):
            # This is where a failure should occur for issue #104.
            response = pool.request('GET', '/', retries=0)

        print "(Client) Waiting final..."
        done_closing.wait()  # wait until the socket in our pool gets closed
Exemplo n.º 9
0
    def test_pool_timeouts(self):
        with HTTPConnectionPool(host='localhost') as pool:
            conn = pool._new_conn()
            assert conn.__class__ == HTTP1Connection
            assert pool.timeout.__class__ == Timeout
            assert pool.timeout._read == Timeout.DEFAULT_TIMEOUT
            assert pool.timeout._connect == Timeout.DEFAULT_TIMEOUT
            assert pool.timeout.total is None

        with HTTPConnectionPool(host='localhost', timeout=3) as pool:
            assert pool.timeout._read == 3
            assert pool.timeout._connect == 3
            assert pool.timeout.total is None
Exemplo n.º 10
0
 def test_exceptions_with_objects(self):
     assert self.cycle(HTTPError('foo'))
     assert self.cycle(
         MaxRetryError(HTTPConnectionPool('localhost'), '/', None))
     assert self.cycle(LocationParseError('fake location'))
     assert self.cycle(
         ClosedPoolError(HTTPConnectionPool('localhost'), None))
     assert self.cycle(EmptyPoolError(HTTPConnectionPool('localhost'),
                                      None))
     assert self.cycle(
         HostChangedError(HTTPConnectionPool('localhost'), '/', None))
     assert self.cycle(
         ReadTimeoutError(HTTPConnectionPool('localhost'), '/', None))
Exemplo n.º 11
0
    def test_pool_timeouts(self):
        with HTTPConnectionPool(host="localhost") as pool:
            conn = pool._new_conn()
            assert conn.__class__ == HTTPConnection
            assert pool.timeout.__class__ == Timeout
            assert pool.timeout._read == Timeout.DEFAULT_TIMEOUT
            assert pool.timeout._connect == Timeout.DEFAULT_TIMEOUT
            assert pool.timeout.total is None

            pool = HTTPConnectionPool(host="localhost", timeout=SHORT_TIMEOUT)
            assert pool.timeout._read == SHORT_TIMEOUT
            assert pool.timeout._connect == SHORT_TIMEOUT
            assert pool.timeout.total is None
Exemplo n.º 12
0
    def test_pool_timeouts(self):
        pool = HTTPConnectionPool(host='localhost')
        conn = pool._new_conn()
        self.assertEqual(conn.__class__, HTTPConnection)
        self.assertEqual(pool.timeout.__class__, Timeout)
        self.assertEqual(pool.timeout._read, Timeout.DEFAULT_TIMEOUT)
        self.assertEqual(pool.timeout._connect, Timeout.DEFAULT_TIMEOUT)
        self.assertEqual(pool.timeout.total, None)

        pool = HTTPConnectionPool(host='localhost', timeout=3)
        self.assertEqual(pool.timeout._read, 3)
        self.assertEqual(pool.timeout._connect, 3)
        self.assertEqual(pool.timeout.total, None)
Exemplo n.º 13
0
    def test_multi_setcookie(self):
        def multicookie_response_handler(listener):
            sock = listener.accept()[0]

            buf = b""
            while not buf.endswith(b"\r\n\r\n"):
                buf += sock.recv(65536)

            sock.send(b"HTTP/1.1 200 OK\r\n" b"Set-Cookie: foo=1\r\n" b"Set-Cookie: bar=1\r\n" b"\r\n")

        self._start_server(multicookie_response_handler)
        pool = HTTPConnectionPool(self.host, self.port)
        r = pool.request("GET", "/", retries=0)
        self.assertEquals(r.headers, {"set-cookie": "foo=1, bar=1"})
Exemplo n.º 14
0
    def __init__(self, host, port, user="", passwd=""):
        self.c = HTTPConnectionPool(host, port)
        url = url_prefix + "/metadata/authenticate"
        headers = {
            "content-type": "application/x-www-form-urlencoded",
        }

        params = {"user": user, "passwd": passwd}
        resp = self.c.urlopen("POST",
                              url,
                              body=urlencode(params),
                              headers=headers)

        jdata = json.loads(resp.data.decode("utf-8"))
        # jdata = json.loads(resp.data)
        self.auth_code = jdata["auth_code"]
Exemplo n.º 15
0
    def urlopen(self, method, url, body=None, headers=None, retries=3, redirect=True, assert_same_host=True,
                timeout=None, pool_timeout=None, release_conn=None, **response_kw):
        """
        Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
        with custom cross-host redirect logic and only sends the request-uri
        portion of the ``url``.

        The given ``url`` parameter must be absolute, such that an appropriate
        :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
        """
        if headers is None:
            headers = {k.lower(): v for (k, v) in self.headers.items()}
        headers.setdefault("cookie", "")
        for key, val in self.headers.items():
            headers.setdefault(key, val)
        # Now the updated Cookie string will be stored into the HTTP request.
        # The cookie header may contain duplicate entries (e.g. k=a; k=b;)
        headers["cookie"] = self.headers["cookie"] + headers["cookie"]
        # This will be resolved by putting the header in the SimpleCookie
        self.cookie_session.feed(self.headers)
        self.cookie_session.feed(headers)
        headers["cookie"] = self.cookie_session.extract()
        response = HTTPConnectionPool.urlopen(self, method, url, body, headers, retries, False, assert_same_host, timeout, pool_timeout,
            release_conn, **response_kw)
        self.cookie_session.feed(self.headers)
        self.cookie_session.feed(response.headers)
        self.cookie_session.feed(headers)
        headers["cookie"] = self.cookie_session.extract()
        redirect_location = redirect and response.get_redirect_location()
        if not redirect_location:
            return response
        if response.status == 303:
            method = "GET"
        return self.urlopen(method, redirect_location, body, headers, retries - 1, redirect, assert_same_host, timeout, pool_timeout,
            release_conn, **response_kw)
Exemplo n.º 16
0
 def test_read_timeout_0_does_not_raise_bad_status_line_error(self) -> None:
     with HTTPConnectionPool(host="localhost", maxsize=1) as pool:
         conn = Mock()
         with patch.object(Timeout, "read_timeout", 0):
             timeout = Timeout(1, 1, 1)
             with pytest.raises(ReadTimeoutError):
                 pool._make_request(conn, "", "", timeout)
Exemplo n.º 17
0
    def test_put_conn_when_pool_is_full_nonblocking(
            self, caplog: pytest.LogCaptureFixture) -> None:
        """
        If maxsize = n and we _put_conn n + 1 conns, the n + 1th conn will
        get closed and will not get added to the pool.
        """
        with HTTPConnectionPool(host="localhost", maxsize=1,
                                block=False) as pool:
            conn1 = pool._get_conn()
            # pool.pool is empty because we popped the one None that pool.pool was initialized with
            # but this pool._get_conn call will not raise EmptyPoolError because block is False
            conn2 = pool._get_conn()

            with patch.object(conn1, "close") as conn1_close:
                with patch.object(conn2, "close") as conn2_close:
                    pool._put_conn(conn1)
                    pool._put_conn(conn2)

            assert conn1_close.called is False
            assert conn2_close.called is True

            assert conn1 == pool._get_conn()
            assert conn2 != pool._get_conn()

            assert pool.num_connections == 3
            assert "Connection pool is full, discarding connection" in caplog.text
            assert "Connection pool size: 1" in caplog.text
Exemplo n.º 18
0
    def test_access_token_not_in_exception_traceback(self):
        """Check that access token is replaced within chained request exceptions."""
        backend_name = 'ibmq_qasm_simulator'
        backend = self.provider.get_backend(backend_name)
        circuit = transpile(self.qc1, backend, seed_transpiler=self.seed)
        qobj = assemble(circuit, backend, shots=1)
        client = backend._api_client

        exception_message = 'The access token in this exception ' \
                            'message should be replaced: {}'.format(self.access_token)
        exception_traceback_str = ''
        try:
            with mock.patch.object(HTTPConnectionPool,
                                   'urlopen',
                                   side_effect=MaxRetryError(
                                       HTTPConnectionPool('host'),
                                       'url',
                                       reason=exception_message)):
                _ = client.job_submit(backend.name(), qobj.to_dict())
        except RequestsApiError:
            exception_traceback_str = traceback.format_exc()

        self.assertTrue(exception_traceback_str)
        if self.access_token in exception_traceback_str:
            self.fail(
                'Access token not replaced in request exception traceback.')
Exemplo n.º 19
0
    def test_pool_size(self):
        POOL_SIZE = 1
        with HTTPConnectionPool(host="localhost",
                                maxsize=POOL_SIZE,
                                block=True) as pool:

            def _raise(ex):
                raise ex()

            def _test(exception, expect, reason=None):
                pool._make_request = lambda *args, **kwargs: _raise(exception)
                with pytest.raises(expect) as excinfo:
                    pool.request("GET", "/")
                if reason is not None:
                    assert isinstance(excinfo.value.reason, reason)
                assert pool.pool.qsize() == POOL_SIZE

            # Make sure that all of the exceptions return the connection
            # to the pool
            _test(BaseSSLError, MaxRetryError, SSLError)
            _test(CertificateError, MaxRetryError, SSLError)

            # The pool should never be empty, and with these two exceptions
            # being raised, a retry will be triggered, but that retry will
            # fail, eventually raising MaxRetryError, not EmptyPoolError
            # See: https://github.com/urllib3/urllib3/issues/76
            pool._make_request = lambda *args, **kwargs: _raise(HTTPException)
            with pytest.raises(MaxRetryError):
                pool.request("GET", "/", retries=1, pool_timeout=SHORT_TIMEOUT)
            assert pool.pool.qsize() == POOL_SIZE
Exemplo n.º 20
0
        def _test(exception: Type[BaseException]) -> None:
            with HTTPConnectionPool(host="localhost", maxsize=1,
                                    block=True) as pool:
                # Verify that the request succeeds after two attempts, and that the
                # connection is left on the response object, instead of being
                # released back into the pool.
                with patch.object(
                        pool, "_make_request",
                        _raise_once_make_request_function(exception)):
                    response = pool.urlopen(
                        "GET",
                        "/",
                        retries=1,
                        release_conn=False,
                        preload_content=False,
                        chunked=True,
                    )
                assert pool.pool is not None
                assert pool.pool.qsize() == 0
                assert pool.num_connections == 2
                assert response.connection is not None

                response.release_conn()
                assert pool.pool.qsize() == 1
                assert response.connection is None
Exemplo n.º 21
0
    def test_put_conn_when_pool_is_full_nonblocking(self):
        """
        If maxsize = n and we _put_conn n + 1 conns, the n + 1th conn will
        get closed and will not get added to the pool.
        """
        with HTTPConnectionPool(host="localhost", maxsize=1,
                                block=False) as pool:
            conn1 = pool._get_conn()
            # pool.pool is empty because we popped the one None that pool.pool was initialized with
            # but this pool._get_conn call will not raise EmptyPoolError because block is False
            conn2 = pool._get_conn()

            conn1.close = Mock()
            conn2.close = Mock()

            pool._put_conn(conn1)
            pool._put_conn(conn2)

            assert conn1.close.called is False
            assert conn2.close.called is True

            assert conn1 == pool._get_conn()
            assert conn2 != pool._get_conn()

            assert pool.num_connections == 3
Exemplo n.º 22
0
    def test_retry_exception_str(self):
        assert (str(
            MaxRetryError(
                HTTPConnectionPool(host="localhost"), "Test.",
                None)) == "HTTPConnectionPool(host='localhost', port=None): "
                "Max retries exceeded with url: Test. (Caused by None)")

        err = SocketError("Test")

        # using err.__class__ here, as socket.error is an alias for OSError
        # since Py3.3 and gets printed as this
        assert (str(
            MaxRetryError(
                HTTPConnectionPool(host="localhost"), "Test.",
                err)) == "HTTPConnectionPool(host='localhost', port=None): "
                "Max retries exceeded with url: Test. "
                "(Caused by %r)" % err)
Exemplo n.º 23
0
    def test_retry_exception_str(self):
        self.assertEqual(
            str(MaxRetryError(
                HTTPConnectionPool(host='localhost'), "Test.", None)),
            "HTTPConnectionPool(host='localhost', port=None): "
            "Max retries exceeded with url: Test. (Caused by redirect)")

        err = SocketError("Test")

        # using err.__class__ here, as socket.error is an alias for OSError
        # since Py3.3 and gets printed as this
        self.assertEqual(
            str(MaxRetryError(
                HTTPConnectionPool(host='localhost'), "Test.", err)),
            "HTTPConnectionPool(host='localhost', port=None): "
            "Max retries exceeded with url: Test. "
            "(Caused by {0}: Test)".format(str(err.__class__)))
Exemplo n.º 24
0
        def _test(exception):
            pool = HTTPConnectionPool(host='localhost', maxsize=1, block=True)

            # Verify that the request succeeds after two attempts, and that the
            # connection is left on the response object, instead of being
            # released back into the pool.
            pool._make_request = _raise_once_make_request_function(exception)
            response = pool.urlopen('GET', '/', retries=1,
                                    release_conn=False, preload_content=False,
                                    chunked=True)
            self.assertEqual(pool.pool.qsize(), 0)
            self.assertEqual(pool.num_connections, 2)
            self.assertTrue(response.connection is not None)

            response.release_conn()
            self.assertEqual(pool.pool.qsize(), 1)
            self.assertTrue(response.connection is None)
Exemplo n.º 25
0
    def test_multi_setcookie(self):
        def multicookie_response_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(b'HTTP/1.1 200 OK\r\n'
                      b'Set-Cookie: foo=1\r\n'
                      b'Set-Cookie: bar=1\r\n'
                      b'\r\n')

        self._start_server(multicookie_response_handler)
        pool = HTTPConnectionPool(self.host, self.port)
        r = pool.request('GET', '/', retries=0)
        self.assertEquals(r.headers, {'set-cookie': 'foo=1, bar=1'})
Exemplo n.º 26
0
class TestPickle(object):
    @pytest.mark.parametrize('exception', [
        HTTPError(None),
        MaxRetryError(None, None, None),
        LocationParseError(None),
        ConnectTimeoutError(None),
        HTTPError('foo'),
        HTTPError('foo', IOError('foo')),
        MaxRetryError(HTTPConnectionPool('localhost'), '/', None),
        LocationParseError('fake location'),
        ClosedPoolError(HTTPConnectionPool('localhost'), None),
        EmptyPoolError(HTTPConnectionPool('localhost'), None),
        ReadTimeoutError(HTTPConnectionPool('localhost'), '/', None),
    ])
    def test_exceptions(self, exception):
        result = pickle.loads(pickle.dumps(exception))
        assert isinstance(result, type(exception))
Exemplo n.º 27
0
    def test_recovery_when_server_closes_connection(self):
        # Does the pool work seamlessly if an open connection in the
        # connection pool gets hung up on by the server, then reaches
        # the front of the queue again?

        done_closing = Event()

        def socket_handler(listener):
            for i in 0, 1:
                sock = listener.accept()[0]

                buf = b""
                while not buf.endswith(b"\r\n\r\n"):
                    buf = sock.recv(65536)

                body = "Response %d" % i
                sock.send(
                    (
                        "HTTP/1.1 200 OK\r\n"
                        "Content-Type: text/plain\r\n"
                        "Content-Length: %d\r\n"
                        "\r\n"
                        "%s" % (len(body), body)
                    ).encode("utf-8")
                )

                sock.close()  # simulate a server timing out, closing socket
                done_closing.set()  # let the test know it can proceed

        self._start_server(socket_handler)
        pool = HTTPConnectionPool(self.host, self.port)

        response = pool.request("GET", "/", retries=0)
        self.assertEqual(response.status, 200)
        self.assertEqual(response.data, b"Response 0")

        done_closing.wait()  # wait until the socket in our pool gets closed

        response = pool.request("GET", "/", retries=0)
        self.assertEqual(response.status, 200)
        self.assertEqual(response.data, b"Response 1")

        done_closing.wait()  # wait until the socket in our pool gets closed
Exemplo n.º 28
0
    def test_empty_does_not_put_conn(self):
        """Do not put None back in the pool if the pool was empty"""

        with HTTPConnectionPool(host="localhost", maxsize=1,
                                block=True) as pool:
            pool._get_conn = Mock(
                side_effect=EmptyPoolError(pool, "Pool is empty"))
            pool._put_conn = Mock(
                side_effect=AssertionError("Unexpected _put_conn"))
            with pytest.raises(EmptyPoolError):
                pool.request("GET", "/")
Exemplo n.º 29
0
    def test_max_connections(self):
        with HTTPConnectionPool(host="localhost", maxsize=1, block=True) as pool:
            pool._get_conn(timeout=0.01)

            with pytest.raises(EmptyPoolError):
                pool._get_conn(timeout=0.01)

            with pytest.raises(EmptyPoolError):
                pool.request("GET", "/", pool_timeout=0.01)

            assert pool.num_connections == 1
Exemplo n.º 30
0
class TestPickle(object):
    @pytest.mark.parametrize(
        "exception",
        [
            HTTPError(None),
            MaxRetryError(None, None, None),
            LocationParseError(None),
            ConnectTimeoutError(None),
            HTTPError("foo"),
            HTTPError("foo", IOError("foo")),
            MaxRetryError(HTTPConnectionPool("localhost"), "/", None),
            LocationParseError("fake location"),
            ClosedPoolError(HTTPConnectionPool("localhost"), None),
            EmptyPoolError(HTTPConnectionPool("localhost"), None),
            ReadTimeoutError(HTTPConnectionPool("localhost"), "/", None),
        ],
    )
    def test_exceptions(self, exception):
        result = pickle.loads(pickle.dumps(exception))
        assert isinstance(result, type(exception))
Exemplo n.º 31
0
    def test_pool_edgecases(self):
        with HTTPConnectionPool(host="localhost", maxsize=1, block=False) as pool:
            conn1 = pool._get_conn()
            conn2 = pool._get_conn()  # New because block=False

            pool._put_conn(conn1)
            pool._put_conn(conn2)  # Should be discarded

            assert conn1 == pool._get_conn()
            assert conn2 != pool._get_conn()

            assert pool.num_connections == 3
Exemplo n.º 32
0
    def test_recovery_when_server_closes_connection(self):
        # Does the pool work seamlessly if an open connection in the
        # connection pool gets hung up on by the server, then reaches
        # the front of the queue again?

        done_closing = Event()

        def socket_handler(listener):
            for i in 0, 1:
                sock = listener.accept()[0]

                buf = b''
                while not buf.endswith(b'\r\n\r\n'):
                    buf = sock.recv(65536)

                body = 'Response %d' % i
                sock.send(('HTTP/1.1 200 OK\r\n'
                           'Content-Type: text/plain\r\n'
                           'Content-Length: %d\r\n'
                           '\r\n'
                           '%s' % (len(body), body)).encode('utf-8'))

                sock.close()  # simulate a server timing out, closing socket
                done_closing.set()  # let the test know it can proceed

        self._start_server(socket_handler)
        pool = HTTPConnectionPool(self.host, self.port)

        response = pool.request('GET', '/', retries=0)
        self.assertEqual(response.status, 200)
        self.assertEqual(response.data, b'Response 0')

        done_closing.wait()  # wait until the socket in our pool gets closed

        response = pool.request('GET', '/', retries=0)
        self.assertEqual(response.status, 200)
        self.assertEqual(response.data, b'Response 1')

        done_closing.wait()  # wait until the socket in our pool gets closed
Exemplo n.º 33
0
    def test_put_conn_closed_pool(self):
        with HTTPConnectionPool(host="localhost", maxsize=1,
                                block=True) as pool:
            conn1 = pool._get_conn()
            conn1.close = Mock()

            pool.close()
            assert pool.pool is None

            # Accessing pool.pool will raise AttributeError, which will get
            # caught and will close conn1
            pool._put_conn(conn1)

            assert conn1.close.called is True
Exemplo n.º 34
0
    def test_pool_edgecases(self):
        pool = HTTPConnectionPool(host='localhost', maxsize=1, block=False)

        conn1 = pool._get_conn()
        conn2 = pool._get_conn() # New because block=False

        pool._put_conn(conn1)
        pool._put_conn(conn2) # Should be discarded

        self.assertEqual(conn1, pool._get_conn())
        self.assertNotEqual(conn2, pool._get_conn())

        self.assertEqual(pool.num_connections, 3)
Exemplo n.º 35
0
    def test_max_connections(self):
        pool = HTTPConnectionPool(host='localhost', maxsize=1, block=True)

        pool._get_conn(timeout=0.01)

        try:
            pool._get_conn(timeout=0.01)
            self.fail("Managed to get a connection without EmptyPoolError")
        except EmptyPoolError:
            pass

        try:
            pool.request('GET', '/', pool_timeout=0.01)
            self.fail("Managed to get a connection without EmptyPoolError")
        except EmptyPoolError:
            pass

        self.assertEqual(pool.num_connections, 1)
Exemplo n.º 36
0
        def _test(exception, *args):
            with HTTPConnectionPool(host="localhost", maxsize=1,
                                    block=True) as pool:
                # Verify that the request succeeds after two attempts, and that the
                # connection is left on the response object, instead of being
                # released back into the pool.
                pool._make_request = _raise_once_make_request_function(
                    exception, *args)
                response = pool.urlopen("GET",
                                        "/",
                                        retries=1,
                                        preload_content=False)
                assert pool.pool.qsize() == 0
                assert pool.num_connections == 2
                assert response.connection is not None

                response.release_conn()
                assert pool.pool.qsize() == 1
                assert response.connection is None
Exemplo n.º 37
0
    def test_same_host_no_port(self):
        # This test was introduced in #801 to deal with the fact that urllib3
        # never initializes ConnectionPool objects with port=None.
        same_host_http = [
            ('google.com', '/'),
            ('google.com', 'http://google.com/'),
            ('google.com', 'http://google.com'),
            ('google.com', 'http://google.com/abra/cadabra'),
            # Test comparison using default ports
            ('google.com', 'http://google.com:80/abracadabra'),
        ]
        same_host_https = [
            ('google.com', '/'),
            ('google.com', 'https://google.com/'),
            ('google.com', 'https://google.com'),
            ('google.com', 'https://google.com/abra/cadabra'),
            # Test comparison using default ports
            ('google.com', 'https://google.com:443/abracadabra'),
        ]

        for a, b in same_host_http:
            c = HTTPConnectionPool(a)
            self.assertTrue(c.is_same_host(b), "%s =? %s" % (a, b))
        for a, b in same_host_https:
            c = HTTPSConnectionPool(a)
            self.assertTrue(c.is_same_host(b), "%s =? %s" % (a, b))

        not_same_host_http = [
            ('google.com', 'https://google.com/'),
            ('yahoo.com', 'http://google.com/'),
            ('google.com', 'https://google.net/'),
        ]
        not_same_host_https = [
            ('google.com', 'http://google.com/'),
            ('yahoo.com', 'https://google.com/'),
            ('google.com', 'https://google.net/'),
        ]

        for a, b in not_same_host_http:
            c = HTTPConnectionPool(a)
            self.assertFalse(c.is_same_host(b), "%s =? %s" % (a, b))
            c = HTTPConnectionPool(b)
            self.assertFalse(c.is_same_host(a), "%s =? %s" % (b, a))
        for a, b in not_same_host_https:
            c = HTTPSConnectionPool(a)
            self.assertFalse(c.is_same_host(b), "%s =? %s" % (a, b))
            c = HTTPSConnectionPool(b)
            self.assertFalse(c.is_same_host(a), "%s =? %s" % (b, a))