Esempio n. 1
0
    def test_mock_httpresponse_stream(self):
        # Mock out a HTTP Request that does enough to make it through urllib3's
        # read() and close() calls, and also exhausts and underlying file
        # object.
        class MockHTTPRequest(object):
            self.fp = None

            @asyncio.coroutine
            def read(self, amt=None):
                data = yield from self.fp.read(amt)
                if not data:
                    self.fp = None

                return data

            def close(self):
                self.fp = None

        #bio = BytesIO(b'foo')
        bio = self._fake_fp(b'foo')
        fp = MockHTTPRequest()
        fp.fp = bio
        resp = HTTPResponse(fp, preload_content=False)
        stream = yield from resp.stream(2)

        self.assertEqual(next(stream), b'fo')
        self.assertEqual(next(stream), b'o')
        self.assertRaises(StopIteration, next, stream)
Esempio n. 2
0
    def tst_io_bufferedreader(self):

        fp = self._fake_fp(b'foo')
        #fp = BytesIO(b'foo')
        resp = HTTPResponse(fp, preload_content=False)
        br = BufferedReader(resp)

        self.assertEqual(br.read(), b'foo')

        br.close()
        self.assertEqual(resp.closed, True)

        b = b'fooandahalf'
        fp = self._fake_fp(b)
        #fp = BytesIO(b)
        resp = HTTPResponse(fp, preload_content=False)
        br = BufferedReader(resp, 5)

        br.read(1)  # sets up the buffer, reading 5
        self.assertEqual(len(fp.read()), len(b) - 5)

        # This is necessary to make sure the "no bytes left" part of `readinto`
        # gets tested.
        while not br.closed:
            br.read(5)
Esempio n. 3
0
    def tst_io(self):
        import socket
        from yieldfrom.http.client import HTTPResponse as OldHTTPResponse

        fp = self._fake_fp(b'foo')
        #fp = BytesIO(b'foo')
        resp = HTTPResponse(fp, preload_content=False)

        self.assertEqual(resp.closed, False)
        self.assertEqual(resp.readable(), True)
        self.assertEqual(resp.writable(), False)
        self.assertRaises(IOError, resp.fileno)

        resp.close()
        self.assertEqual(resp.closed, True)

        # Try closing with an `httplib.HTTPResponse`, because it has an
        # `isclosed` method.
        hlr = OldHTTPResponse(socket.socket())
        resp2 = HTTPResponse(hlr, preload_content=False)
        self.assertEqual(resp2.closed, False)
        resp2.close()
        self.assertEqual(resp2.closed, True)

        #also try when only data is present.
        resp3 = HTTPResponse('foodata')
        self.assertRaises(IOError, resp3.fileno)

        resp3._fp = 2
        # A corner case where _fp is present but doesn't have `closed`,
        # `isclosed`, or `fileno`.  Unlikely, but possible.
        self.assertEqual(resp3.closed, True)
        self.assertRaises(IOError, resp3.fileno)
Esempio n. 4
0
    def test_mock_httpresponse_stream(self):
        # Mock out a HTTP Request that does enough to make it through urllib3's
        # read() and close() calls, and also exhausts and underlying file
        # object.
        class MockHTTPRequest(object):
            self.fp = None

            @asyncio.coroutine
            def read(self, amt=None):
                data = yield from self.fp.read(amt)
                if not data:
                    self.fp = None

                return data

            def close(self):
                self.fp = None

        #bio = BytesIO(b'foo')
        bio = self._fake_fp(b'foo')
        fp = MockHTTPRequest()
        fp.fp = bio
        resp = HTTPResponse(fp, preload_content=False)
        stream = yield from resp.stream(2)

        self.assertEqual(next(stream), b'fo')
        self.assertEqual(next(stream), b'o')
        self.assertRaises(StopIteration, next, stream)
Esempio n. 5
0
    def test_empty_stream(self):

        fp = self._fake_fp(b'')
        #fp = BytesIO(b'')
        resp = HTTPResponse(fp, preload_content=False)
        stream = yield from resp.stream(2, decode_content=False)

        self.assertRaises(StopIteration, next, stream)
Esempio n. 6
0
    def test_streaming(self):
        fp = BytesIO(b'foo')
        resp = HTTPResponse(fp, preload_content=False)
        stream = yield from resp.stream(2, decode_content=False)

        self.assertEqual(next(stream), b'fo')
        self.assertEqual(next(stream), b'o')
        self.assertRaises(StopIteration, next, stream)
Esempio n. 7
0
    def test_streaming(self):
        fp = BytesIO(b'foo')
        resp = HTTPResponse(fp, preload_content=False)
        stream = yield from resp.stream(2, decode_content=False)

        self.assertEqual(next(stream), b'fo')
        self.assertEqual(next(stream), b'o')
        self.assertRaises(StopIteration, next, stream)
Esempio n. 8
0
    def test_empty_stream(self):

        fp = self._fake_fp(b'')
        #fp = BytesIO(b'')
        resp = HTTPResponse(fp, preload_content=False)
        stream = yield from resp.stream(2, decode_content=False)

        self.assertRaises(StopIteration, next, stream)
Esempio n. 9
0
    def test_deflate_streaming(self):
        import zlib
        data = zlib.compress(b'foo')

        fp = self._fake_fp(data)
        resp = HTTPResponse(fp, headers={'content-encoding': 'deflate'},
                         preload_content=False)
        stream = resp.stream(2)

        self.assertEqual(next(stream), b'f')
        self.assertEqual(next(stream), b'oo')
        self.assertRaises(StopIteration, next, stream)
Esempio n. 10
0
    def test_deflate_streaming(self):
        import zlib
        data = zlib.compress(b'foo')

        fp = self._fake_fp(data)
        resp = HTTPResponse(fp,
                            headers={'content-encoding': 'deflate'},
                            preload_content=False)
        stream = resp.stream(2)

        self.assertEqual(next(stream), b'f')
        self.assertEqual(next(stream), b'oo')
        self.assertRaises(StopIteration, next, stream)
Esempio n. 11
0
    def test_deflate2_streaming(self):
        import zlib
        compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
        data = compress.compress(b'foo')
        data += compress.flush()

        fp = self._fake_fp(data)
        resp = HTTPResponse(fp, headers={'content-encoding': 'deflate'},
                         preload_content=False)
        stream = resp.stream(2)

        self.assertEqual(next(stream), b'f')
        self.assertEqual(next(stream), b'oo')
        self.assertRaises(StopIteration, next, stream)
Esempio n. 12
0
    def test_deflate2_streaming(self):
        import zlib
        compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
        data = compress.compress(b'foo')
        data += compress.flush()

        fp = self._fake_fp(data)
        resp = HTTPResponse(fp,
                            headers={'content-encoding': 'deflate'},
                            preload_content=False)
        stream = resp.stream(2)

        self.assertEqual(next(stream), b'f')
        self.assertEqual(next(stream), b'oo')
        self.assertRaises(StopIteration, next, stream)
Esempio n. 13
0
    def test_chunked_decoding_deflate(self):
        import zlib
        data = zlib.compress(b'foo')

        fp = asyncio.StreamReader()
        fp.feed_data(data)
        r = HTTPResponse(fp, headers={'content-encoding': 'deflate'},
                         preload_content=False)
        #yield from r.init()
        _d1 = yield from r.read(3)
        _d2 = yield from r.read(1)
        _d3 = yield from r.read(2)
        self.assertEqual(_d1, b'')
        self.assertEqual(_d2, b'f')
        self.assertEqual(_d3, b'oo')
Esempio n. 14
0
    def test_gzipped_streaming(self):
        import zlib
        compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
        data = compress.compress(b'foo')
        data += compress.flush()

        #fp = BytesIO(data)
        fp = self._fake_fp(data)
        resp = HTTPResponse(fp, headers={'content-encoding': 'gzip'},
                         preload_content=False)
        stream = yield from resp.stream(2)

        self.assertEqual(next(stream), b'fo')
        self.assertEqual(next(stream), b'o')
        self.assertRaises(StopIteration, next, stream)
Esempio n. 15
0
    def test_preload(self):
        fp = self._fake_fp(b'foo')

        r = HTTPResponse(fp, preload_content=True)

        #self.assertEqual(fp.tell(), len(b'foo'))
        self.assertEqual((yield from r.data), b'foo')
Esempio n. 16
0
    def test_gzipped_streaming(self):
        import zlib
        compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
        data = compress.compress(b'foo')
        data += compress.flush()

        #fp = BytesIO(data)
        fp = self._fake_fp(data)
        resp = HTTPResponse(fp,
                            headers={'content-encoding': 'gzip'},
                            preload_content=False)
        stream = yield from resp.stream(2)

        self.assertEqual(next(stream), b'fo')
        self.assertEqual(next(stream), b'o')
        self.assertRaises(StopIteration, next, stream)
Esempio n. 17
0
    def test_chunked_decoding_deflate(self):
        import zlib
        data = zlib.compress(b'foo')

        fp = asyncio.StreamReader()
        fp.feed_data(data)
        r = HTTPResponse(fp,
                         headers={'content-encoding': 'deflate'},
                         preload_content=False)
        #yield from r.init()
        _d1 = yield from r.read(3)
        _d2 = yield from r.read(1)
        _d3 = yield from r.read(2)
        self.assertEqual(_d1, b'')
        self.assertEqual(_d2, b'f')
        self.assertEqual(_d3, b'oo')
Esempio n. 18
0
    def test_no_preload(self):
        fp = self._fake_fp(b'foo')

        r = HTTPResponse(fp, preload_content=False)

        #self.assertEqual(fp.tell(), 0)
        _d = yield from r.data
        self.assertEqual(_d, b'foo')
Esempio n. 19
0
    def test_decode_deflate_case_insensitve(self):
        import zlib
        data = zlib.compress(b'foo')

        fp = self._fake_fp(data)
        r = HTTPResponse(fp, headers={'content-encoding': 'DeFlAtE'})

        self.assertEqual((yield from r.data), b'foo')
Esempio n. 20
0
    def test_streaming_tell(self):

        fp = self._fake_fp(b'foo')
        #fp = BytesIO(b'foo')
        resp = HTTPResponse(fp, preload_content=False)
        stream = yield from resp.stream(2, decode_content=False)

        position = 0

        position += len(next(stream))
        self.assertEqual(2, position)
        self.assertEqual(position, resp.tell())

        position += len(next(stream))
        self.assertEqual(3, position)
        self.assertEqual(position, resp.tell())

        self.assertRaises(StopIteration, next, stream)
Esempio n. 21
0
    def test_streaming_tell(self):

        fp = self._fake_fp(b'foo')
        #fp = BytesIO(b'foo')
        resp = HTTPResponse(fp, preload_content=False)
        stream = yield from resp.stream(2, decode_content=False)

        position = 0

        position += len(next(stream))
        self.assertEqual(2, position)
        self.assertEqual(position, resp.tell())

        position += len(next(stream))
        self.assertEqual(3, position)
        self.assertEqual(position, resp.tell())

        self.assertRaises(StopIteration, next, stream)
Esempio n. 22
0
    def test_chunked_decoding_gzip(self):
        import zlib
        compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
        data = compress.compress(b'foo')
        data += compress.flush()

        fp = asyncio.StreamReader()
        fp.feed_data(data)
        r = HTTPResponse(fp, headers={'content-encoding': 'gzip'},
                         preload_content=False)

        yield from r.init()
        _d1 = yield from r.read(10)
        self.assertEqual(_d1, b'')
        _d2 = yield from r.read(5)
        self.assertEqual(_d2, b'foo')
        _d3 = yield from r.read(2)
        self.assertEqual(_d3, b'')
Esempio n. 23
0
    def test_io_readinto(self):
        # This test is necessary because in py2.6, `readinto` doesn't get called
        # in `test_io_bufferedreader` like it does for all the other python
        # versions.  Probably this is because the `io` module in py2.6 is an
        # old version that has a different underlying implementation.

        fp = self._fake_fp(b'foo')
        #fp = BytesIO(b'foo')
        resp = HTTPResponse(fp, preload_content=False)

        barr = bytearray(3)
        amtRead = yield from resp.readinto(barr)
        assert amtRead == 3
        assert b'foo' == barr

        # The reader should already be empty, so this should read nothing.
        amtRead = yield from resp.readinto(barr)
        assert amtRead == 0
        assert b'foo' == barr
Esempio n. 24
0
    def test_io_readinto(self):
        # This test is necessary because in py2.6, `readinto` doesn't get called
        # in `test_io_bufferedreader` like it does for all the other python
        # versions.  Probably this is because the `io` module in py2.6 is an
        # old version that has a different underlying implementation.

        fp = self._fake_fp(b'foo')
        #fp = BytesIO(b'foo')
        resp = HTTPResponse(fp, preload_content=False)

        barr = bytearray(3)
        amtRead = yield from resp.readinto(barr)
        assert amtRead == 3
        assert b'foo' == barr

        # The reader should already be empty, so this should read nothing.
        amtRead = yield from resp.readinto(barr)
        assert amtRead == 0
        assert b'foo' == barr
Esempio n. 25
0
    def test_gzipped_streaming_tell(self):
        import zlib
        compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
        uncompressed_data = b'foo'
        data = compress.compress(uncompressed_data)
        data += compress.flush()

        #fp = BytesIO(data)
        fp = self._fake_fp(data)
        resp = HTTPResponse(fp, headers={'content-encoding': 'gzip'},
                         preload_content=False)
        stream = yield from resp.stream()

        # Read everything
        payload = next(stream)
        self.assertEqual(payload, uncompressed_data)

        self.assertEqual(len(data), resp.tell())

        self.assertRaises(StopIteration, next, stream)
Esempio n. 26
0
    def test_gzipped_streaming_tell(self):
        import zlib
        compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
        uncompressed_data = b'foo'
        data = compress.compress(uncompressed_data)
        data += compress.flush()

        #fp = BytesIO(data)
        fp = self._fake_fp(data)
        resp = HTTPResponse(fp,
                            headers={'content-encoding': 'gzip'},
                            preload_content=False)
        stream = yield from resp.stream()

        # Read everything
        payload = next(stream)
        self.assertEqual(payload, uncompressed_data)

        self.assertEqual(len(data), resp.tell())

        self.assertRaises(StopIteration, next, stream)
Esempio n. 27
0
    def test_chunked_decoding_gzip(self):
        import zlib
        compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
        data = compress.compress(b'foo')
        data += compress.flush()

        fp = asyncio.StreamReader()
        fp.feed_data(data)
        r = HTTPResponse(fp,
                         headers={'content-encoding': 'gzip'},
                         preload_content=False)

        yield from r.init()
        _d1 = yield from r.read(10)
        self.assertEqual(_d1, b'')
        _d2 = yield from r.read(5)
        self.assertEqual(_d2, b'foo')
        _d3 = yield from r.read(2)
        self.assertEqual(_d3, b'')
Esempio n. 28
0
    def tst_deflate_streaming_tell_intermediate_point(self):

        # test not relevant any longer, now that 'stream' is just a cached
        #  set of blocks

        # Ensure that ``tell()`` returns the correct number of bytes when
        # part-way through streaming compressed content.
        import zlib

        NUMBER_OF_READS = 10

        class MockCompressedDataReading(BytesIO):
            """
            A ByteIO-like reader returning ``payload`` in ``NUMBER_OF_READS``
            calls to ``read``.
            """

            def __init__(self, payload, payload_part_size):
                self.payloads = [
                    payload[i*payload_part_size:(i+1)*payload_part_size]
                             for i in range(NUMBER_OF_READS+1)]

                assert b"".join(self.payloads) == payload

            def read(self, _=None):
                # Amount is unused.
                yield None

                return b''.join(self.payloads)

                #if len(self.payloads) > 0:
                #    return self.payloads.pop(0)
                #return b""


        uncompressed_data = zlib.decompress(ZLIB_PAYLOAD)

        payload_part_size = len(ZLIB_PAYLOAD) // NUMBER_OF_READS
        fp = MockCompressedDataReading(ZLIB_PAYLOAD, payload_part_size)
        resp = HTTPResponse(fp, headers={'content-encoding': 'deflate'},
                            preload_content=False)
        stream = yield from resp.stream(payload_part_size)

        parts_positions = []
        for part in stream:
            _t = resp.tell()
            parts_positions.append((part, _t))
        end_of_stream = resp.tell()

        self.assertRaises(StopIteration, next, stream)

        parts, positions = zip(*parts_positions)

        # Check that the payload is equal to the uncompressed data
        payload = b"".join(parts)
        self.assertEqual(uncompressed_data, payload)

        # Check that the positions in the stream are correct
        expected = [(i+1)*payload_part_size for i in range(NUMBER_OF_READS)]
        self.assertEqual(expected, list(positions))

        # Check that the end of the stream is in the correct place
        self.assertEqual(len(ZLIB_PAYLOAD), end_of_stream)
Esempio n. 29
0
    def tst_deflate_streaming_tell_intermediate_point(self):

        # test not relevant any longer, now that 'stream' is just a cached
        #  set of blocks

        # Ensure that ``tell()`` returns the correct number of bytes when
        # part-way through streaming compressed content.
        import zlib

        NUMBER_OF_READS = 10

        class MockCompressedDataReading(BytesIO):
            """
            A ByteIO-like reader returning ``payload`` in ``NUMBER_OF_READS``
            calls to ``read``.
            """
            def __init__(self, payload, payload_part_size):
                self.payloads = [
                    payload[i * payload_part_size:(i + 1) * payload_part_size]
                    for i in range(NUMBER_OF_READS + 1)
                ]

                assert b"".join(self.payloads) == payload

            def read(self, _=None):
                # Amount is unused.
                yield None

                return b''.join(self.payloads)

                #if len(self.payloads) > 0:
                #    return self.payloads.pop(0)
                #return b""

        uncompressed_data = zlib.decompress(ZLIB_PAYLOAD)

        payload_part_size = len(ZLIB_PAYLOAD) // NUMBER_OF_READS
        fp = MockCompressedDataReading(ZLIB_PAYLOAD, payload_part_size)
        resp = HTTPResponse(fp,
                            headers={'content-encoding': 'deflate'},
                            preload_content=False)
        stream = yield from resp.stream(payload_part_size)

        parts_positions = []
        for part in stream:
            _t = resp.tell()
            parts_positions.append((part, _t))
        end_of_stream = resp.tell()

        self.assertRaises(StopIteration, next, stream)

        parts, positions = zip(*parts_positions)

        # Check that the payload is equal to the uncompressed data
        payload = b"".join(parts)
        self.assertEqual(uncompressed_data, payload)

        # Check that the positions in the stream are correct
        expected = [(i + 1) * payload_part_size
                    for i in range(NUMBER_OF_READS)]
        self.assertEqual(expected, list(positions))

        # Check that the end of the stream is in the correct place
        self.assertEqual(len(ZLIB_PAYLOAD), end_of_stream)
Esempio n. 30
0
    def tst_io(self):
        import socket
        from yieldfrom.http.client import HTTPResponse as OldHTTPResponse

        fp = self._fake_fp(b'foo')
        #fp = BytesIO(b'foo')
        resp = HTTPResponse(fp, preload_content=False)

        self.assertEqual(resp.closed, False)
        self.assertEqual(resp.readable(), True)
        self.assertEqual(resp.writable(), False)
        self.assertRaises(IOError, resp.fileno)

        resp.close()
        self.assertEqual(resp.closed, True)

        # Try closing with an `httplib.HTTPResponse`, because it has an
        # `isclosed` method.
        hlr = OldHTTPResponse(socket.socket())
        resp2 = HTTPResponse(hlr, preload_content=False)
        self.assertEqual(resp2.closed, False)
        resp2.close()
        self.assertEqual(resp2.closed, True)

        #also try when only data is present.
        resp3 = HTTPResponse('foodata')
        self.assertRaises(IOError, resp3.fileno)

        resp3._fp = 2
        # A corner case where _fp is present but doesn't have `closed`,
        # `isclosed`, or `fileno`.  Unlikely, but possible.
        self.assertEqual(resp3.closed, True)
        self.assertRaises(IOError, resp3.fileno)
Esempio n. 31
0
 def test_body_blob(self):
     resp = HTTPResponse(b'foo')
     _d = yield from resp.data
     self.assertEqual(_d, b'foo')
     self.assertTrue(resp.closed)
Esempio n. 32
0
 def test_cache_content(self):
     r = HTTPResponse('foo')
     _d = yield from r.data
     self.assertEqual(_d, 'foo')
     self.assertEqual(r._body, 'foo')
Esempio n. 33
0
 def test_default(self):
     r = HTTPResponse()
     self.assertEqual((yield from r.data), None)
Esempio n. 34
0
 def test_none(self):
     r = HTTPResponse(None)
     _d = yield from r.data
     self.assertEqual(_d, None)
Esempio n. 35
0
 def test_decode_bad_data(self):
     fp = asyncio.StreamReader()
     fp.feed_data(b'\x00' * 10)
     fp.feed_eof()
     t = HTTPResponse(fp, headers={'content-encoding': 'deflate'})
     yield from self.aioAssertRaises(DecodeError, t.init)
Esempio n. 36
0
    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
        """Sends PreparedRequest object. Returns Response object.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a (`connect timeout, read
            timeout <user/advanced.html#timeouts>`_) tuple.
        :type timeout: float or tuple
        :param verify: (optional) Whether to verify SSL certificates.
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        """

        conn = self.get_connection(request.url, proxies)

        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(request)

        chunked = not (request.body is None or 'Content-Length' in request.headers)

        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError as e:
                # this may raise a string formatting error.
                err = ("Invalid timeout {0}. Pass a (connect, read) "
                       "timeout tuple, or a single float to set "
                       "both timeouts to the same value".format(timeout))
                raise ValueError(err)
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)

        try:
            if not chunked:
                resp = yield from conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=Retry(self.max_retries, read=False),
                    timeout=timeout
                )

            # Send the request.
            else:
                if hasattr(conn, 'proxy_pool'):
                    conn = conn.proxy_pool

                low_conn = conn._get_conn(timeout=timeout)

                try:
                    low_conn.putrequest(request.method,
                                        url,
                                        skip_accept_encoding=True)

                    for header, value in request.headers.items():
                        low_conn.putheader(header, value)

                    low_conn.endheaders()

                    for i in request.body:
                        low_conn.send(hex(len(i))[2:].encode('utf-8'))
                        low_conn.send(b'\r\n')
                        low_conn.send(i)
                        low_conn.send(b'\r\n')
                    low_conn.send(b'0\r\n\r\n')

                    r = low_conn.getresponse()
                    resp = yield from HTTPResponse.from_httplib(
                        r,
                        pool=conn,
                        connection=low_conn,
                        preload_content=False,
                        decode_content=False
                    )
                except:
                    # If we hit any problems here, clean up the connection.
                    # Then, reraise so that we can handle the actual exception.
                    low_conn.close()
                    raise
                else:
                    # All is well, return the connection to the pool.
                    conn._put_conn(low_conn)

        except (ProtocolError, socket.error) as err:
            raise ConnectionError(err, request=request)

        except MaxRetryError as e:
            if isinstance(e.reason, ConnectTimeoutError):
                raise ConnectTimeout(e, request=request)

            raise ConnectionError(e, request=request)

        except _ProxyError as e:
            raise ProxyError(e)

        except (_SSLError, _HTTPError) as e:
            if isinstance(e, _SSLError):
                raise SSLError(e, request=request)
            elif isinstance(e, ReadTimeoutError):
                raise ReadTimeout(e, request=request)
            else:
                raise

        return self.build_response(request, resp)
Esempio n. 37
0
 def test_get_case_insensitive_headers(self):
     headers = {'host': 'example.com'}
     r = HTTPResponse(headers=headers)
     self.assertEqual(r.headers.get('host'), 'example.com')
     self.assertEqual(r.headers.get('Host'), 'example.com')
Esempio n. 38
0
 def test_getheader(self):
     headers = {'host': 'example.com'}
     r = HTTPResponse(headers=headers)
     self.assertEqual(r.getheader('host'), 'example.com')
Esempio n. 39
0
 def test_getheader(self):
     headers = {'host': 'example.com'}
     r = HTTPResponse(headers=headers)
     self.assertEqual(r.getheader('host'), 'example.com')
Esempio n. 40
0
    def send(self,
             request,
             stream=False,
             timeout=None,
             verify=True,
             cert=None,
             proxies=None):
        """Sends PreparedRequest object. Returns Response object.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a (`connect timeout, read
            timeout <user/advanced.html#timeouts>`_) tuple.
        :type timeout: float or tuple
        :param verify: (optional) Whether to verify SSL certificates.
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        """

        conn = self.get_connection(request.url, proxies)

        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(request)

        chunked = not (request.body is None
                       or 'Content-Length' in request.headers)

        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError as e:
                # this may raise a string formatting error.
                err = ("Invalid timeout {0}. Pass a (connect, read) "
                       "timeout tuple, or a single float to set "
                       "both timeouts to the same value".format(timeout))
                raise ValueError(err)
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)

        try:
            if not chunked:
                resp = yield from conn.urlopen(method=request.method,
                                               url=url,
                                               body=request.body,
                                               headers=request.headers,
                                               redirect=False,
                                               assert_same_host=False,
                                               preload_content=False,
                                               decode_content=False,
                                               retries=Retry(self.max_retries,
                                                             read=False),
                                               timeout=timeout)

            # Send the request.
            else:
                if hasattr(conn, 'proxy_pool'):
                    conn = conn.proxy_pool

                low_conn = conn._get_conn(timeout=timeout)

                try:
                    low_conn.putrequest(request.method,
                                        url,
                                        skip_accept_encoding=True)

                    for header, value in request.headers.items():
                        low_conn.putheader(header, value)

                    low_conn.endheaders()

                    for i in request.body:
                        low_conn.send(hex(len(i))[2:].encode('utf-8'))
                        low_conn.send(b'\r\n')
                        low_conn.send(i)
                        low_conn.send(b'\r\n')
                    low_conn.send(b'0\r\n\r\n')

                    r = low_conn.getresponse()
                    resp = yield from HTTPResponse.from_httplib(
                        r,
                        pool=conn,
                        connection=low_conn,
                        preload_content=False,
                        decode_content=False)
                except:
                    # If we hit any problems here, clean up the connection.
                    # Then, reraise so that we can handle the actual exception.
                    low_conn.close()
                    raise
                else:
                    # All is well, return the connection to the pool.
                    conn._put_conn(low_conn)

        except (ProtocolError, socket.error) as err:
            raise ConnectionError(err, request=request)

        except MaxRetryError as e:
            if isinstance(e.reason, ConnectTimeoutError):
                raise ConnectTimeout(e, request=request)

            raise ConnectionError(e, request=request)

        except _ProxyError as e:
            raise ProxyError(e)

        except (_SSLError, _HTTPError) as e:
            if isinstance(e, _SSLError):
                raise SSLError(e, request=request)
            elif isinstance(e, ReadTimeoutError):
                raise ReadTimeout(e, request=request)
            else:
                raise

        return self.build_response(request, resp)