Example #1
0
 def test_connection_refused(self):
     # Does the pool retry if there is no listener on the port?
     # Note: Socket server is not started until after the test.
     pool = HTTPConnectionPool(self.host, self.port)
     with self.assertRaises(MaxRetryError):
         pool.request('GET', '/')
     self._start_server(lambda x: None)
 def test_bad_connect(self):
     pool = HTTPConnectionPool('badhost.invalid', self.port)
     try:
         pool.request('GET', '/', retries=5)
         self.fail("should raise timeout exception here")
     except MaxRetryError as e:
         self.assertTrue(isinstance(e.reason, ProtocolError), e.reason)
    def test_timeout(self):
        url = '/sleep?seconds=0.005'
        timeout = 0.001

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)

        conn = pool._get_conn()
        with self.assertRaises(SocketTimeout):
            pool._make_request(conn, 'GET', url)
        pool._put_conn(conn)

        with self.assertRaises(TimeoutError):
            pool.request('GET', url)

        # Request-specific timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.5)

        conn = pool._get_conn()
        with self.assertRaises(SocketTimeout):
            pool._make_request(conn, 'GET', url, timeout=timeout)
        pool._put_conn(conn)

        with self.assertRaises(TimeoutError):
            pool.request('GET', url, timeout=timeout)
Example #4
0
 def test_bad_connect(self):
     pool = HTTPConnectionPool('badhost.invalid', self.port)
     try:
         pool.request('GET', '/', retries=5)
         self.fail("should raise timeout exception here")
     except MaxRetryError as e:
         self.assertEqual(type(e.reason), NewConnectionError)
Example #5
0
    def test_headers_are_sent_with_the_original_case(self):
        headers = {'foo': 'bar', 'bAz': 'quux'}
        parsed_headers = {}

        def socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            headers_list = [header for header in buf.split(b'\r\n')[1:] if header]

            for header in headers_list:
                (key, value) = header.split(b': ')
                parsed_headers[key.decode('ascii')] = value.decode('ascii')

            sock.send((
                'HTTP/1.1 204 No Content\r\n'
                'Content-Length: 0\r\n'
                '\r\n').encode('utf-8'))

            sock.close()

        self._start_server(socket_handler)
        expected_headers = {'Accept-Encoding': 'identity',
                            'Host': '{0}:{1}'.format(self.host, self.port)}
        expected_headers.update(headers)

        pool = HTTPConnectionPool(self.host, self.port, retries=False)
        pool.request('GET', '/', headers=HTTPHeaderDict(headers))
        self.assertEqual(expected_headers, parsed_headers)
Example #6
0
    def test_timeout(self):
        url = "/sleep?seconds=0.005"
        timeout = util.Timeout(read=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url)
        pool._put_conn(conn)

        self.assertRaises(ReadTimeoutError, pool.request, "GET", url)

        # Request-specific timeouts should raise errors
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.5)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url, timeout=timeout)
        pool._put_conn(conn)

        self.assertRaises(ReadTimeoutError, pool.request, "GET", url, timeout=timeout)

        # Timeout int/float passed directly to request and _make_request should
        # raise a request timeout
        self.assertRaises(ReadTimeoutError, pool.request, "GET", url, timeout=0.001)
        conn = pool._new_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url, timeout=0.001)
        pool._put_conn(conn)

        # Timeout int/float passed directly to _make_request should not raise a
        # request timeout if it's a high value
        pool.request("GET", url, timeout=5)
Example #7
0
    def test_headers_are_sent_with_the_original_case(self):
        headers = {"foo": "bar", "bAz": "quux"}
        parsed_headers = {}

        def socket_handler(listener):
            sock = listener.accept()[0]

            buf = b""
            while not buf.endswith(b"\r\n\r\n"):
                buf += sock.recv(65536)

            headers_list = [header for header in buf.split(b"\r\n")[1:] if header]

            for header in headers_list:
                (key, value) = header.split(b": ")
                parsed_headers[key.decode()] = value.decode()

            # Send incomplete message (note Content-Length)
            sock.send(("HTTP/1.1 204 No Content\r\n" "Content-Length: 0\r\n" "\r\n").encode("utf-8"))

            sock.close()

        self._start_server(socket_handler)
        expected_headers = {"Accept-Encoding": "identity", "Host": "{0}:{1}".format(self.host, self.port)}
        expected_headers.update(headers)

        pool = HTTPConnectionPool(self.host, self.port, retries=False)
        pool.request("GET", "/", headers=HTTPHeaderDict(headers))
        self.assertEqual(expected_headers, parsed_headers)
Example #8
0
    def test_release_conn_parameter(self):
        MAXSIZE = 5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        assert pool.pool.qsize() == MAXSIZE

        # Make request without releasing connection
        pool.request("GET", "/", release_conn=False, preload_content=False)
        assert pool.pool.qsize() == MAXSIZE - 1
Example #9
0
 def test_timeout(self):
     pool = HTTPConnectionPool(self.host, self.port, timeout=0.01)
     try:
         pool.request('GET', '/sleep',
                      fields={'seconds': '0.02'})
         self.fail("Failed to raise TimeoutError exception")
     except TimeoutError:
         pass
 def test_source_address_error(self):
     for addr in INVALID_SOURCE_ADDRESSES:
         pool = HTTPConnectionPool(self.host,
                                   self.port,
                                   source_address=addr,
                                   retries=False)
         with pytest.raises(NewConnectionError):
             pool.request("GET", "/source_address?{0}".format(addr))
Example #11
0
    def test_release_conn_parameter(self):
        MAXSIZE=5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request('GET', '/', release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)
Example #12
0
    def test_release_conn_parameter(self):
        MAXSIZE = 5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request('GET', '/', release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)
Example #13
0
    def test_keepalive_close(self):
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)
        self.addCleanup(pool.close)

        r = pool.request('GET',
                         '/keepalive?close=1',
                         retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn._sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET',
                         '/keepalive?close=0',
                         retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn._sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET',
                         '/keepalive?close=1',
                         retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn._sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')
Example #14
0
 def test_source_address_error(self):
     for addr in INVALID_SOURCE_ADDRESSES:
         pool = HTTPConnectionPool(self.host,
                                   self.port,
                                   source_address=addr,
                                   retries=False)
         # FIXME: This assert flakes sometimes. Not sure why.
         with pytest.raises(NewConnectionError):
             pool.request("GET", "/source_address?{0}".format(addr))
Example #15
0
    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)

        http_pool.request('GET', '/')
        http_pool.request('GET', '/')
        http_pool.request('GET', '/')

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)
Example #16
0
 def test_connection_error_retries(self):
     """ ECONNREFUSED error should raise a connection error, with retries """
     port = find_unused_port()
     pool = HTTPConnectionPool(self.host, port)
     try:
         pool.request('GET', '/', retries=Retry(connect=3))
         self.fail("Should have failed with a connection error.")
     except MaxRetryError as e:
         self.assertEqual(type(e.reason), NewConnectionError)
Example #17
0
    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request('GET', '/')
        pool.request('GET', '/')
        pool.request('GET', '/')

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)
Example #18
0
    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request('GET', '/')
        pool.request('GET', '/')
        pool.request('GET', '/')

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)
Example #19
0
 def test_connection_error_retries(self):
     """ ECONNREFUSED error should raise a connection error, with retries """
     port = find_unused_port()
     pool = HTTPConnectionPool(self.host, port)
     try:
         pool.request('GET', '/', retries=Retry(connect=3))
         self.fail("Should have failed with a connection error.")
     except MaxRetryError as e:
         self.assertEqual(type(e.reason), NewConnectionError)
Example #20
0
    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)

        http_pool.request('GET', '/')
        http_pool.request('GET', '/')
        http_pool.request('GET', '/')

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)
 def test_connection_error_retries(self):
     """ ECONNREFUSED error should raise a connection error, with retries """
     port = find_unused_port()
     pool = HTTPConnectionPool(self.host, port)
     try:
         pool.request('GET', '/', retries=Retry(connect=3))
         self.fail("Should have failed with a connection error.")
     except MaxRetryError as e:
         self.assertTrue(isinstance(e.reason, ProtocolError))
         self.assertEqual(e.reason.args[1].errno, errno.ECONNREFUSED)
Example #22
0
 def test_connection_error_retries(self):
     """ ECONNREFUSED error should raise a connection error, with retries """
     port = find_unused_port()
     pool = HTTPConnectionPool(self.host, port)
     try:
         pool.request('GET', '/', retries=Retry(connect=3))
         self.fail("Should have failed with a connection error.")
     except MaxRetryError as e:
         self.assertTrue(isinstance(e.reason, ProtocolError))
         self.assertEqual(e.reason.args[1].errno, errno.ECONNREFUSED)
    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)
        self.addCleanup(http_pool.close)

        http_pool.request("GET", "/")
        http_pool.request("GET", "/")
        http_pool.request("GET", "/")

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)
    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)
        self.addCleanup(pool.close)

        pool.request("GET", "/")
        pool.request("GET", "/")
        pool.request("GET", "/")

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)
Example #25
0
    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = "foo"

        req_data = {"count": "a" * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {"count": "b" * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request(
            "POST",
            "/echo",
            fields=req_data,
            multipart_boundary=boundary,
            preload_content=False,
        )

        assert r1.read(first_chunk) == resp_data[:first_chunk]

        try:
            r2 = pool.request(
                "POST",
                "/echo",
                fields=req2_data,
                multipart_boundary=boundary,
                preload_content=False,
                pool_timeout=0.001,
            )

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            assert r2.read(first_chunk) == resp2_data[:first_chunk]

            assert r1.read() == resp_data[first_chunk:]
            assert r2.read() == resp2_data[first_chunk:]
            assert pool.num_requests == 2

        except EmptyPoolError:
            assert r1.read() == resp_data[first_chunk:]
            assert pool.num_requests == 1

        assert pool.num_connections == 1
Example #26
0
    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = 'foo'

        req_data = {'count': 'a' * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {'count': 'b' * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request('POST',
                          '/echo',
                          fields=req_data,
                          multipart_boundary=boundary,
                          preload_content=False)

        first_data = r1.read(first_chunk)
        self.assertGreater(len(first_data), 0)
        self.assertEqual(first_data, resp_data[:len(first_data)])

        try:
            r2 = pool.request('POST',
                              '/echo',
                              fields=req2_data,
                              multipart_boundary=boundary,
                              preload_content=False,
                              pool_timeout=0.001)

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            second_data = r2.read(first_chunk)
            self.assertGreater(len(second_data), 0)
            self.assertEqual(second_data, resp2_data[:len(second_data)])

            self.assertEqual(r1.read(), resp_data[len(first_data):])
            self.assertEqual(r2.read(), resp2_data[len(second_data):])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[len(first_data):])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)
Example #27
0
    def test_timeout_float(self):
        block_event = Event()
        ready_event = self.start_basic_handler(block_send=block_event, num=2)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=SHORT_TIMEOUT, retries=False)
        self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/')
        block_event.set() # Release block

        # Shouldn't raise this time
        ready_event.wait()
        block_event.set() # Pre-release block
        pool.request('GET', '/')
Example #28
0
    def test_timeout(self):
        url = '/sleep?seconds=0.005'
        timeout = util.Timeout(read=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET',
                          url)
        pool._put_conn(conn)

        self.assertRaises(ReadTimeoutError, pool.request, 'GET', url)

        # Request-specific timeouts should raise errors
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.5)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError,
                          pool._make_request,
                          conn,
                          'GET',
                          url,
                          timeout=timeout)
        pool._put_conn(conn)

        self.assertRaises(ReadTimeoutError,
                          pool.request,
                          'GET',
                          url,
                          timeout=timeout)

        # Timeout int/float passed directly to request and _make_request should
        # raise a request timeout
        self.assertRaises(ReadTimeoutError,
                          pool.request,
                          'GET',
                          url,
                          timeout=0.001)
        conn = pool._new_conn()
        self.assertRaises(ReadTimeoutError,
                          pool._make_request,
                          conn,
                          'GET',
                          url,
                          timeout=0.001)
        pool._put_conn(conn)

        # Timeout int/float passed directly to _make_request should not raise a
        # request timeout if it's a high value
        pool.request('GET', url, timeout=5)
Example #29
0
    def test_keepalive_close(self):
        # NOTE: This used to run against apache.org but it made the test suite
        # really slow and fail half the time. Setting it to skip until we can
        # make this run better locally.
        pool = HTTPConnectionPool(self.host, self.port,
                                  block=True, maxsize=1, timeout=2)

        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET', '/keepalive?close=0', retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')
Example #30
0
    def test_keepalive_close(self):
        # NOTE: This used to run against apache.org but it made the test suite
        # really slow and fail half the time. Setting it to skip until we can
        # make this run better locally.
        pool = HTTPConnectionPool(self.host, self.port,
                                  block=True, maxsize=1, timeout=2)

        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET', '/keepalive?close=0', retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')
Example #31
0
    def test_connection_timeout(self):
        timed_out = Event()
        def socket_handler(listener):
            timed_out.wait()
            sock = listener.accept()[0]
            sock.close()

        self._start_server(socket_handler)
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.001)

        with self.assertRaises(TimeoutError):
            pool.request('GET', '/', retries=0)

        timed_out.set()
Example #32
0
    def test_keepalive_close(self):
        pool = HTTPConnectionPool(self.host, self.port,
                                  block=True, maxsize=1, timeout=2)
        self.addCleanup(pool.close)

        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertIsNone(conn.sock)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET', '/keepalive?close=0', retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertIsNotNone(conn.sock)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertIsNone(conn.sock)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')
Example #33
0
    def test_source_address_ignored(self):
        # No warning is issued if source_address is omitted.
        with warnings.catch_warnings(record=True) as w:
            pool = HTTPConnectionPool(self.host, self.port)
            assert pool.request('GET', '/source_address').status == 200
            assert (
                not w or not issubclass(w[-1].category, PythonVersionWarning))

        # source_address is ignored in Python 2.6 and older. Warning issued.
        with warnings.catch_warnings(record=True) as w:
            for addr in INVALID_SOURCE_ADDRESSES:
                pool = HTTPConnectionPool(
                    self.host, self.port, source_address=addr)
                assert pool.request('GET', '/source_address').status == 200
            assert issubclass(w[-1].category, PythonVersionWarning)
Example #34
0
    def test_stream_none_unchunked_response_does_not_hang(self):
        done_event = Event()

        def socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(b'HTTP/1.1 200 OK\r\n'
                      b'Content-Length: 12\r\n'
                      b'Content-type: text/plain\r\n'
                      b'\r\n'
                      b'hello, world')
            done_event.wait(5)
            sock.close()

        self._start_server(socket_handler)
        pool = HTTPConnectionPool(self.host, self.port, retries=False)
        r = pool.request('GET', '/', timeout=1, preload_content=False)

        # Stream should read to the end.
        self.assertEqual([b'hello, world'], list(r.stream(None)))

        done_event.set()
Example #35
0
    def _test_broken_header_parsing(self, headers):
        self.start_response_handler(
            (b"HTTP/1.1 200 OK\r\n" b"Content-Length: 0\r\n" b"Content-type: text/plain\r\n")
            + b"\r\n".join(headers)
            + b"\r\n"
        )

        pool = HTTPConnectionPool(self.host, self.port, retries=False)

        with LogRecorder() as logs:
            pool.request("GET", "/")

        for record in logs:
            if "Failed to parse headers" in record.msg and pool._absolute_url("/") == record.args[0]:
                return
        self.fail("Missing log about unparsed headers")
Example #36
0
    def test_response_headers_are_returned_in_the_original_order(self):
        # NOTE: Probability this test gives a false negative is 1/(K!)
        K = 16
        # NOTE: Provide headers in non-sorted order (i.e. reversed)
        #       so that if the internal implementation tries to sort them,
        #       a change will be detected.
        expected_response_headers = [('X-Header-%d' % i, str(i))
                                     for i in reversed(range(K))]

        def socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(b'HTTP/1.1 200 OK\r\n' +
                      b'\r\n'.join([(k.encode('utf8') + b': ' +
                                     v.encode('utf8'))
                                    for (k, v) in expected_response_headers]) +
                      b'\r\n')
            sock.close()

        self._start_server(socket_handler)
        pool = HTTPConnectionPool(self.host, self.port)
        r = pool.request('GET', '/', retries=0)
        actual_response_headers = [(k, v) for (k, v) in r.headers.items()
                                   if k.startswith('X-Header-')]
        self.assertEqual(expected_response_headers, actual_response_headers)
Example #37
0
    def test_response_headers_are_returned_in_the_original_order(self):
        # NOTE: Probability this test gives a false negative is 1/(K!)
        K = 16
        # NOTE: Provide headers in non-sorted order (i.e. reversed)
        #       so that if the internal implementation tries to sort them,
        #       a change will be detected.
        expected_response_headers = [('X-Header-%d' % i, str(i)) for i in reversed(range(K))]
        
        def socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(b'HTTP/1.1 200 OK\r\n' +
                      b'\r\n'.join([
                          (k.encode('utf8') + b': ' + v.encode('utf8'))
                          for (k, v) in expected_response_headers
                      ]) +
                      b'\r\n')
            sock.close()

        self._start_server(socket_handler)
        pool = HTTPConnectionPool(self.host, self.port)
        r = pool.request('GET', '/', retries=0)
        actual_response_headers = [
            (k, v) for (k, v) in r.headers.items()
            if k.startswith('X-Header-')
        ]
        self.assertEqual(expected_response_headers, actual_response_headers)
Example #38
0
def main():

    try:
        client = pymongo.MongoClient()
        db = client[database]
        col = db[collection]

        pool = HTTPConnectionPool('games.crossfit.com', maxsize=1)

        start_time = time.time()

        for year in YEARS:
            for comp in COMPS:
                for gender in GENDERS:
                    results_url = (
                        "/scores/leaderboard.php?"
                        "competition={}&stage=0&division={}&region=0&"
                        "numberperpage=100&year={}&showtoggles=1&"
                        "hidedropdowns=1").format(COMPS[comp],
                         GENDERS[gender], year)

                    results_page = pool.request('GET', results_url,
                                                 preload_content=False)
                    soup = BeautifulSoup(results_page)

                    for athlete in soup.findAll('td', 'name'):
                        scrape_games_athlete(col, athlete, year, comp)

                    print_time_update(year, comp, gender, start_time)
    finally:
        print results_url
Example #39
0
    def test_incomplete_response(self):
        body = 'Response'
        partial_body = body[:2]

        def socket_handler(listener):
            sock = listener.accept()[0]

            # Consume request
            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf = sock.recv(65536)

            # Send partial response and close socket.
            sock.send((
                'HTTP/1.1 200 OK\r\n'
                'Content-Type: text/plain\r\n'
                'Content-Length: %d\r\n'
                '\r\n'
                '%s' % (len(body), partial_body)).encode('utf-8')
            )
            sock.close()

        self._start_server(socket_handler)
        pool = HTTPConnectionPool(self.host, self.port)

        response = pool.request('GET', '/', retries=0, preload_content=False)
        self.assertRaises(ProtocolError, response.read)
Example #40
0
 def test_source_address_ignored(self):
     # source_address is ignored in Python 2.6 and older.
     for addr in INVALID_SOURCE_ADDRESSES:
         pool = HTTPConnectionPool(
             self.host, self.port, source_address=addr)
         r = pool.request('GET', '/source_address')
         assert r.status == 200
Example #41
0
 def test_source_address_ignored(self):
     # source_address is ignored in Python 2.6 and older.
     for addr in INVALID_SOURCE_ADDRESSES:
         pool = HTTPConnectionPool(
             self.host, self.port, source_address=addr)
         r = pool.request('GET', '/source_address')
         assert r.status == 200
Example #42
0
    def test_incomplete_response(self):
        body = 'Response'
        partial_body = body[:2]

        def socket_handler(listener):
            sock = listener.accept()[0]

            # Consume request
            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf = sock.recv(65536)

            # Send partial response and close socket.
            sock.send(('HTTP/1.1 200 OK\r\n'
                       'Content-Type: text/plain\r\n'
                       'Content-Length: %d\r\n'
                       '\r\n'
                       '%s' % (len(body), partial_body)).encode('utf-8'))
            sock.close()

        self._start_server(socket_handler)
        pool = HTTPConnectionPool(self.host, self.port)

        response = pool.request('GET', '/', retries=0, preload_content=False)
        self.assertRaises(ProtocolError, response.read)
Example #43
0
    def test_stream_none_unchunked_response_does_not_hang(self):
        done_event = Event()

        def socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(
                b'HTTP/1.1 200 OK\r\n'
                b'Content-Length: 12\r\n'
                b'Content-type: text/plain\r\n'
                b'\r\n'
                b'hello, world'
            )
            done_event.wait(5)
            sock.close()

        self._start_server(socket_handler)
        pool = HTTPConnectionPool(self.host, self.port, retries=False)
        self.addCleanup(pool.close)
        r = pool.request('GET', '/', timeout=1, preload_content=False)

        # Stream should read to the end.
        self.assertEqual([b'hello, world'], list(r.stream(None)))

        done_event.set()
Example #44
0
    def test_enforce_content_length_no_body(self):
        done_event = Event()

        def socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(
                b'HTTP/1.1 200 OK\r\n'
                b'Content-Length: 22\r\n'
                b'Content-type: text/plain\r\n'
                b'\r\n'
            )
            done_event.wait(1)
            sock.close()

        self._start_server(socket_handler)
        conn = HTTPConnectionPool(self.host, self.port, maxsize=1)
        self.addCleanup(conn.close)

        # Test stream on 0 length body
        head_response = conn.request('HEAD', url='/', preload_content=False,
                                     enforce_content_length=True)
        data = [chunk for chunk in head_response.stream(1)]
        self.assertEqual(len(data), 0)

        done_event.set()
Example #45
0
    def test_enforce_content_length_get(self):
        done_event = Event()

        def socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(b'HTTP/1.1 200 OK\r\n'
                      b'Content-Length: 22\r\n'
                      b'Content-type: text/plain\r\n'
                      b'\r\n'
                      b'hello, world')
            done_event.wait(1)
            sock.close()

        self._start_server(socket_handler)
        conn = HTTPConnectionPool(self.host, self.port, maxsize=1)

        # Test stream read when content length less than headers claim
        get_response = conn.request('GET',
                                    url='/',
                                    preload_content=False,
                                    enforce_content_length=True)
        data = get_response.stream(100)
        # Read "good" data before we try to read again.
        # This won't trigger till generator is exhausted.
        next(data)
        self.assertRaises(ProtocolError, next, data)

        done_event.set()
Example #46
0
    def _test_broken_header_parsing(self, headers):
        self.start_response_handler((b'HTTP/1.1 200 OK\r\n'
                                     b'Content-Length: 0\r\n'
                                     b'Content-type: text/plain\r\n') +
                                    b'\r\n'.join(headers) + b'\r\n')

        pool = HTTPConnectionPool(self.host, self.port, retries=False)

        with LogRecorder() as logs:
            pool.request('GET', '/')

        for record in logs:
            if 'Failed to parse headers' in record.msg and \
                    pool._absolute_url('/') == record.args[0]:
                return
        self.fail('Missing log about unparsed headers')
Example #47
0
class API(object):
    def __init__(self, client_id: str, client_secret: str, auth_token: str) -> None:
        self.print_hello_message()
        self._cid = client_id
        self._csecret = client_secret
        self._auth_token = auth_token
        self.pool = HTTPConnectionPool(HOST_ADDR + ':' + HOST_PORT, timeout=2)

    @staticmethod
    def print_hello_message():
        logger.info('--------------------------------')
        logger.info('----- Hello from InsightePy ----')
        logger.info('--------------------------------')

    def make_request(self, method: str, url: str, fields: Dict[str, str]) -> Response:
        # adding user information to field
        fields['cid'] = self._cid
        fields['csecret'] = self._csecret
        fields['authtoken'] = self._auth_token
        logger.debug('Making request with: {}'.format(fields))
        r = self.pool.request(method, ROUTE_PREFIX + url, fields=fields)
        if r.status == 200:
            logger.debug('Got raw response:  {}'.format(r.data))
            try:
                response = Response(json.loads(r.data))
                logger.debug('Built Response: {}'.format(response.__repr__()))
                return response
            except Exception as e:
                logger.error('Encountered error while parsing response: {}'.format(str(e)))
                logger.error(traceback.format_exc())
        else:
            logger.error('Got a NOT OK Response Code: status={}'.format(r.status))
            logger.error(traceback.format_exc())

    def say_hello(self):
        logger.info('Running Say Hello')
        return self.make_request(
            'GET',
            '/hello',
            dict()
        )

    def single_extract(self, lang: str, verbatim: str, extractors: List[Extractor] = None) -> Response:
        """
        Extract insight for a single verbatim
        :param lang: language of the sentence {en/fr/de}
        :param verbatim: Unicode sentence
        :param extractors: list of feature extractors
        :return: dict response from Compute Engine
        """
        logger.info('Running Single Extract on: {}'.format(verbatim))
        return self.make_request(
            'GET',
            '/extract',
            dict(
                verbatim=verbatim,
                lang=lang,
                extractors=json.dumps([_.to_dict() for _ in extractors] if extractors else [])
            )
        )
Example #48
0
    def test_enforce_content_length_no_body(self):
        done_event = Event()

        def socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(b'HTTP/1.1 200 OK\r\n'
                      b'Content-Length: 22\r\n'
                      b'Content-type: text/plain\r\n'
                      b'\r\n')
            done_event.wait(1)
            sock.close()

        self._start_server(socket_handler)
        conn = HTTPConnectionPool(self.host, self.port, maxsize=1)
        self.addCleanup(conn.close)

        # Test stream on 0 length body
        head_response = conn.request('HEAD',
                                     url='/',
                                     preload_content=False,
                                     enforce_content_length=True)
        data = [chunk for chunk in head_response.stream(1)]
        self.assertEqual(len(data), 0)

        done_event.set()
Example #49
0
 def test_source_address(self):
     for addr in VALID_SOURCE_ADDRESSES:
         pool = HTTPConnectionPool(self.host,
                                   self.port,
                                   source_address=addr,
                                   retries=False)
         r = pool.request('GET', '/source_address')
         assert r.data == b(addr[0])
Example #50
0
 def test_httplib_headers_case_insensitive(self):
     self.start_response_handler(
         b"HTTP/1.1 200 OK\r\n" b"Content-Length: 0\r\n" b"Content-type: text/plain\r\n" b"\r\n"
     )
     pool = HTTPConnectionPool(self.host, self.port, retries=False)
     HEADERS = {"Content-Length": "0", "Content-type": "text/plain"}
     r = pool.request("GET", "/")
     self.assertEqual(HEADERS, dict(r.headers.items()))  # to preserve case sensitivity
Example #51
0
 def test_source_address(self):
     for addr in VALID_SOURCE_ADDRESSES:
         pool = HTTPConnectionPool(self.host, self.port,
                 source_address=addr, retries=False)
         r = pool.request('GET', '/source_address')
         assert r.data == b(addr[0]), (
             "expected the response to contain the source address {addr}, "
             "but was {data}".format(data=r.data, addr=b(addr[0])))
Example #52
0
 def test_source_address(self):
     for addr in VALID_SOURCE_ADDRESSES:
         pool = HTTPConnectionPool(self.host, self.port,
                 source_address=addr, retries=False)
         r = pool.request('GET', '/source_address')
         assert r.data == b(addr[0]), (
             "expected the response to contain the source address {addr}, "
             "but was {data}".format(data=r.data, addr=b(addr[0])))
    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = "foo"

        req_data = {"count": "a" * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {"count": "b" * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request("POST", "/echo", fields=req_data, multipart_boundary=boundary, preload_content=False)

        self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk])

        try:
            r2 = pool.request(
                "POST",
                "/echo",
                fields=req2_data,
                multipart_boundary=boundary,
                preload_content=False,
                pool_timeout=0.001,
            )

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk])

            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(r2.read(), resp2_data[first_chunk:])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)
Example #54
0
    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request('GET', '/keepalive?close=0')
        r = pool.request('GET', '/keepalive?close=0')

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)
Example #55
0
    def test_empty_head_response_does_not_hang(self):
        self.start_response_handler(
            b"HTTP/1.1 200 OK\r\n" b"Content-Length: 256\r\n" b"Content-type: text/plain\r\n" b"\r\n"
        )
        pool = HTTPConnectionPool(self.host, self.port, retries=False)
        r = pool.request("HEAD", "/", timeout=1, preload_content=False)

        # stream will use the read method here.
        self.assertEqual([], list(r.stream()))
    def test_timeout_float(self):
        block_event = Event()
        ready_event = self.start_basic_handler(block_send=block_event, num=2)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  timeout=SHORT_TIMEOUT,
                                  retries=False)
        self.addCleanup(pool.close)
        wait_for_socket(ready_event)
        self.assertRaises(ReadTimeoutError, pool.request, "GET", "/")
        block_event.set()  # Release block

        # Shouldn't raise this time
        wait_for_socket(ready_event)
        block_event.set()  # Pre-release block
        pool.request("GET", "/")
Example #57
0
    def _test_broken_header_parsing(self, headers):
        self.start_response_handler((
           b'HTTP/1.1 200 OK\r\n'
           b'Content-Length: 0\r\n'
           b'Content-type: text/plain\r\n'
           ) + b'\r\n'.join(headers) + b'\r\n'
        )

        pool = HTTPConnectionPool(self.host, self.port, retries=False)

        with LogRecorder() as logs:
            pool.request('GET', '/')

        for record in logs:
            if 'Failed to parse headers' in record.msg and \
                    pool._absolute_url('/') == record.args[0]:
                return
        self.fail('Missing log about unparsed headers')
Example #58
0
    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request('GET', '/keepalive?close=0')
        r = pool.request('GET', '/keepalive?close=0')

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)