def test_proxy_verified(self):
        http = proxy_from_url(self.proxy_url, cert_reqs='REQUIRED',
                              ca_certs=DEFAULT_CA_BAD)
        # https_pool = http._new_pool('https', self.https_host,
        #                             self.https_port)
        # try:
        #     yield From(https_pool.request('GET', '/'))
        #     self.fail("Didn't raise SSL error with wrong CA")
        # except SSLError as e:
        #     self.assertTrue('certificate verify failed' in str(e),
        #                     "Expected 'certificate verify failed',"
        #                     "instead got: %r" % e)

        http = proxy_from_url(self.proxy_url, cert_reqs='REQUIRED',
                              ca_certs=DEFAULT_CA)
        #https_pool = http._new_pool('https', self.https_host,
        #                            self.https_port)

        #conn = https_pool._new_conn()
        #self.assertEqual(conn.__class__, VerifiedHTTPSConnection)
        #yield From(https_pool.request('GET', '/')  # Should succeed without exceptions.)

        http = proxy_from_url(self.proxy_url, cert_reqs='REQUIRED',
                              ca_certs=DEFAULT_CA)
        https_fail_pool = http._new_pool('https', '127.0.0.1', self.https_port)
    def test_proxy_verified(self):
        http = proxy_from_url(self.proxy_url,
                              cert_reqs='REQUIRED',
                              ca_certs=DEFAULT_CA_BAD)
        # https_pool = http._new_pool('https', self.https_host,
        #                             self.https_port)
        # try:
        #     yield From(https_pool.request('GET', '/'))
        #     self.fail("Didn't raise SSL error with wrong CA")
        # except SSLError as e:
        #     self.assertTrue('certificate verify failed' in str(e),
        #                     "Expected 'certificate verify failed',"
        #                     "instead got: %r" % e)

        http = proxy_from_url(self.proxy_url,
                              cert_reqs='REQUIRED',
                              ca_certs=DEFAULT_CA)
        #https_pool = http._new_pool('https', self.https_host,
        #                            self.https_port)

        #conn = https_pool._new_conn()
        #self.assertEqual(conn.__class__, VerifiedHTTPSConnection)
        #yield From(https_pool.request('GET', '/')  # Should succeed without exceptions.)

        http = proxy_from_url(self.proxy_url,
                              cert_reqs='REQUIRED',
                              ca_certs=DEFAULT_CA)
        https_fail_pool = http._new_pool('https', '127.0.0.1', self.https_port)
    def test_simple(self):
        def echo_socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(('HTTP/1.1 200 OK\r\n'
                      'Content-Type: text/plain\r\n'
                      'Content-Length: %d\r\n'
                      '\r\n'
                      '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8'))
            sock.close()

        self._start_server(echo_socket_handler)
        base_url = 'http://%s:%d' % (self.host, self.port)
        proxy = proxy_from_url(base_url)

        r = yield From(proxy.request('GET', 'http://google.com/'))

        self.assertEqual(r.status, 200)
        # FIXME: The order of the headers is not predictable right now. We
        # should fix that someday (maybe when we migrate to
        # OrderedDict/MultiDict).
        self.assertEqual(sorted((yield From(r.data)).split(b'\r\n')),
                         sorted([
                             b'GET http://google.com/ HTTP/1.1',
                             b'Host: google.com',
                             b'Accept-Encoding: identity',
                             b'Accept: */*',
                             b'',
                             b'',
                         ]))
    def test_simple(self):
        def echo_socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(('HTTP/1.1 200 OK\r\n'
                       'Content-Type: text/plain\r\n'
                       'Content-Length: %d\r\n'
                       '\r\n'
                       '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8'))
            sock.close()

        self._start_server(echo_socket_handler)
        base_url = 'http://%s:%d' % (self.host, self.port)
        proxy = proxy_from_url(base_url)

        r = yield From(proxy.request('GET', 'http://google.com/'))

        self.assertEqual(r.status, 200)
        # FIXME: The order of the headers is not predictable right now. We
        # should fix that someday (maybe when we migrate to
        # OrderedDict/MultiDict).
        self.assertEqual(
            sorted((yield From(r.data)).split(b'\r\n')),
            sorted([
                b'GET http://google.com/ HTTP/1.1',
                b'Host: google.com',
                b'Accept-Encoding: identity',
                b'Accept: */*',
                b'',
                b'',
            ]))
    def test_headers(self):
        def echo_socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(('HTTP/1.1 200 OK\r\n'
                      'Content-Type: text/plain\r\n'
                      'Content-Length: %d\r\n'
                      '\r\n'
                      '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8'))
            sock.close()

        self._start_server(echo_socket_handler)
        base_url = 'http://%s:%d' % (self.host, self.port)

        # Define some proxy headers.
        proxy_headers = {'For The Proxy': 'YEAH!'}
        proxy = proxy_from_url(base_url, proxy_headers=proxy_headers)

        conn = proxy.connection_from_url('http://www.google.com/')

        r = yield From(conn.urlopen('GET', 'http://www.google.com/', assert_same_host=False))

        self.assertEqual(r.status, 200)
        # FIXME: The order of the headers is not predictable right now. We
        # should fix that someday (maybe when we migrate to
        # OrderedDict/MultiDict).
        self.assertTrue(b'For The Proxy: YEAH!\r\n' in (yield From(r.data)))
    def test_retries(self):
        def echo_socket_handler(listener):
            sock = listener.accept()[0]
            # First request, which should fail
            sock.close()

            # Second request
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(('HTTP/1.1 200 OK\r\n'
                      'Content-Type: text/plain\r\n'
                      'Content-Length: %d\r\n'
                      '\r\n'
                      '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8'))
            sock.close()

        self._start_server(echo_socket_handler)
        base_url = 'http://%s:%d' % (self.host, self.port)

        proxy = proxy_from_url(base_url)
        conn = proxy.connection_from_url('http://www.google.com')

        r = yield From(conn.urlopen('GET', 'http://www.google.com',
                         assert_same_host=False, retries=1))
        self.assertEqual(r.status, 200)

        self.aioAssertRaises(ProxyError, conn.urlopen, 'GET',
                'http://www.google.com',
                assert_same_host=False, retries=False)
    def test_headers(self):
        def echo_socket_handler(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(('HTTP/1.1 200 OK\r\n'
                       'Content-Type: text/plain\r\n'
                       'Content-Length: %d\r\n'
                       '\r\n'
                       '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8'))
            sock.close()

        self._start_server(echo_socket_handler)
        base_url = 'http://%s:%d' % (self.host, self.port)

        # Define some proxy headers.
        proxy_headers = {'For The Proxy': 'YEAH!'}
        proxy = proxy_from_url(base_url, proxy_headers=proxy_headers)

        conn = proxy.connection_from_url('http://www.google.com/')

        r = yield From(
            conn.urlopen('GET',
                         'http://www.google.com/',
                         assert_same_host=False))

        self.assertEqual(r.status, 200)
        # FIXME: The order of the headers is not predictable right now. We
        # should fix that someday (maybe when we migrate to
        # OrderedDict/MultiDict).
        self.assertTrue(b'For The Proxy: YEAH!\r\n' in (yield From(r.data)))
 def test_proxy_pooling_ext(self):
     http = proxy_from_url(self.proxy_url)
     hc1 = http.connection_from_url(self.http_url)
     hc2 = http.connection_from_host(self.http_host, self.http_port)
     hc3 = http.connection_from_url(self.http_url_alt)
     hc4 = http.connection_from_host(self.http_host_alt, self.http_port)
     self.assertEqual(hc1,hc2)
     self.assertEqual(hc2,hc3)
     self.assertEqual(hc3,hc4)
 def test_proxy_pooling_ext(self):
     http = proxy_from_url(self.proxy_url)
     hc1 = http.connection_from_url(self.http_url)
     hc2 = http.connection_from_host(self.http_host, self.http_port)
     hc3 = http.connection_from_url(self.http_url_alt)
     hc4 = http.connection_from_host(self.http_host_alt, self.http_port)
     self.assertEqual(hc1, hc2)
     self.assertEqual(hc2, hc3)
     self.assertEqual(hc3, hc4)
    def test_proxy_pooling(self):
        http = proxy_from_url(self.proxy_url)

        for x in range(2):
            r = yield From(http.urlopen('GET', self.http_url))
        self.assertEqual(len(http.pools), 1)

        for x in range(2):
            r = yield From(http.urlopen('GET', self.http_url_alt))
        self.assertEqual(len(http.pools), 1)
    def test_proxy_pooling(self):
        http = proxy_from_url(self.proxy_url)

        for x in range(2):
            r = yield From(http.urlopen('GET', self.http_url))
        self.assertEqual(len(http.pools), 1)

        for x in range(2):
            r = yield From(http.urlopen('GET', self.http_url_alt))
        self.assertEqual(len(http.pools), 1)
    def tst_connect_reconn(self):
        def proxy_ssl_one(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)
            s = buf.decode('utf-8')
            if not s.startswith('CONNECT '):
                sock.send(('HTTP/1.1 405 Method not allowed\r\n'
                           'Allow: CONNECT\r\n\r\n').encode('utf-8'))
                sock.close()
                return

            if not s.startswith('CONNECT %s:443' % (self.host, )):
                sock.send(('HTTP/1.1 403 Forbidden\r\n\r\n').encode('utf-8'))
                sock.close()
                return

            sock.send((
                'HTTP/1.1 200 Connection Established\r\n\r\n').encode('utf-8'))
            ssl_sock = ssl.wrap_socket(sock,
                                       server_side=True,
                                       keyfile=DEFAULT_CERTS['keyfile'],
                                       certfile=DEFAULT_CERTS['certfile'],
                                       ca_certs=DEFAULT_CA)

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += ssl_sock.recv(65536)

            ssl_sock.send(('HTTP/1.1 200 OK\r\n'
                           'Content-Type: text/plain\r\n'
                           'Content-Length: 2\r\n'
                           'Connection: close\r\n'
                           '\r\n'
                           'Hi').encode('utf-8'))
            ssl_sock.close()

        def echo_socket_handler(listener):
            proxy_ssl_one(listener)
            proxy_ssl_one(listener)

        self._start_server(echo_socket_handler)
        base_url = 'http://%s:%d' % (self.host, self.port)

        proxy = proxy_from_url(base_url)

        url = 'https://{0}'.format(self.host)
        conn = proxy.connection_from_url(url)
        r = yield From(conn.urlopen('GET', url, retries=0))
        self.assertEqual(r.status, 200)
        r = yield From(conn.urlopen('GET', url, retries=0))
        self.assertEqual(r.status, 200)
    def test_proxy_conn_fail(self):
        host, port = get_unreachable_address()
        http = proxy_from_url('http://%s:%s/' % (host, port), retries=1)
        #self.aioAssertRaises(MaxRetryError, http.request, 'GET', '%s/' % self.https_url)
        self.aioAssertRaises(MaxRetryError, http.request, 'GET', '%s/' % self.http_url)

        try:
            yield From(http.request('GET', '%s/' % self.http_url))
            self.fail("Failed to raise retry error.")
        except MaxRetryError as e:
            self.assertEqual(type(e.reason), ProxyError)
 def test_nagle_proxy(self):
     """ Test that proxy connections do not have TCP_NODELAY turned on """
     http = proxy_from_url(self.proxy_url)
     hc2 = http.connection_from_host(self.http_host, self.http_port)
     conn = yield From(hc2._get_conn())
     yield From(hc2._make_request(conn, 'GET', '/'))
     sock = conn.notSock.socket()
     tcp_nodelay_setting = sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
     self.assertEqual(tcp_nodelay_setting, 0,
                      ("Expected TCP_NODELAY for proxies to be set "
                       "to zero, instead was %s" % tcp_nodelay_setting))
    def test_proxy_conn_fail(self):
        host, port = get_unreachable_address()
        http = proxy_from_url('http://%s:%s/' % (host, port), retries=1)
        #self.aioAssertRaises(MaxRetryError, http.request, 'GET', '%s/' % self.https_url)
        self.aioAssertRaises(MaxRetryError, http.request, 'GET',
                             '%s/' % self.http_url)

        try:
            yield From(http.request('GET', '%s/' % self.http_url))
            self.fail("Failed to raise retry error.")
        except MaxRetryError as e:
            self.assertEqual(type(e.reason), ProxyError)
    def tst_connect_reconn(self):
        def proxy_ssl_one(listener):
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)
            s = buf.decode('utf-8')
            if not s.startswith('CONNECT '):
                sock.send(('HTTP/1.1 405 Method not allowed\r\n'
                           'Allow: CONNECT\r\n\r\n').encode('utf-8'))
                sock.close()
                return

            if not s.startswith('CONNECT %s:443' % (self.host,)):
                sock.send(('HTTP/1.1 403 Forbidden\r\n\r\n').encode('utf-8'))
                sock.close()
                return

            sock.send(('HTTP/1.1 200 Connection Established\r\n\r\n').encode('utf-8'))
            ssl_sock = ssl.wrap_socket(sock,
                                       server_side=True,
                                       keyfile=DEFAULT_CERTS['keyfile'],
                                       certfile=DEFAULT_CERTS['certfile'],
                                       ca_certs=DEFAULT_CA)

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += ssl_sock.recv(65536)

            ssl_sock.send(('HTTP/1.1 200 OK\r\n'
                           'Content-Type: text/plain\r\n'
                           'Content-Length: 2\r\n'
                           'Connection: close\r\n'
                           '\r\n'
                           'Hi').encode('utf-8'))
            ssl_sock.close()

        def echo_socket_handler(listener):
            proxy_ssl_one(listener)
            proxy_ssl_one(listener)

        self._start_server(echo_socket_handler)
        base_url = 'http://%s:%d' % (self.host, self.port)

        proxy = proxy_from_url(base_url)

        url = 'https://{0}'.format(self.host)
        conn = proxy.connection_from_url(url)
        r = yield From(conn.urlopen('GET', url, retries=0))
        self.assertEqual(r.status, 200)
        r = yield From(conn.urlopen('GET', url, retries=0))
        self.assertEqual(r.status, 200)
 def test_nagle_proxy(self):
     """ Test that proxy connections do not have TCP_NODELAY turned on """
     http = proxy_from_url(self.proxy_url)
     hc2 = http.connection_from_host(self.http_host, self.http_port)
     conn = yield From(hc2._get_conn())
     yield From(hc2._make_request(conn, 'GET', '/'))
     sock = conn.notSock.socket()
     tcp_nodelay_setting = sock.getsockopt(socket.IPPROTO_TCP,
                                           socket.TCP_NODELAY)
     self.assertEqual(tcp_nodelay_setting, 0,
                      ("Expected TCP_NODELAY for proxies to be set "
                       "to zero, instead was %s" % tcp_nodelay_setting))
    def test_redirect(self):
        http = proxy_from_url(self.proxy_url)

        r = yield From(http.request('GET', '%s/redirect' % self.http_url,
                         fields={'target': '%s/' % self.http_url},
                         redirect=False))

        self.assertEqual(r.status, 303)

        r = yield From(http.request('GET', '%s/redirect' % self.http_url,
                         fields={'target': '%s/' % self.http_url}))

        self.assertEqual(r.status, 200)
        self.assertEqual((yield From(r.data)), b'Dummy server!')
    def tst_cross_protocol_redirect(self):
        http = proxy_from_url(self.proxy_url)

        try:
            yield From(http.request('GET', '%s/redirect' % self.http_url,
                         fields={'target': ('%s/echo?a=b' % self.https_url)},
                         timeout=0.1, retries=0))
            self.fail("We don't want to follow redirects here.")

        except MaxRetryError:
            pass

        r = yield From(http.request('GET', '%s/redirect' % self.http_url,
                         fields={'target': ('%s/echo?a=b' % self.https_url)},
                         timeout=0.5, retries=1))
        self.assertEqual(r._pool.host, self.https_host)
    def test_cross_host_redirect(self):

        http = proxy_from_url(self.proxy_url)

        cross_host_location = '%s/echo?a=b' % self.http_url_alt
        try:
            yield From(http.request('GET', '%s/redirect' % self.http_url,
                         fields={'target': cross_host_location},
                         timeout=0.1, retries=0))
            self.fail("We don't want to follow redirects here.")

        except MaxRetryError:
            pass

        r = yield From(http.request('GET', '%s/redirect' % self.http_url,
                         fields={'target': '%s/echo?a=b' % self.http_url_alt},
                         timeout=0.1, retries=1))
        self.assertNotEqual(r._pool.host, self.http_host_alt)
    def test_redirect(self):
        http = proxy_from_url(self.proxy_url)

        r = yield From(
            http.request('GET',
                         '%s/redirect' % self.http_url,
                         fields={'target': '%s/' % self.http_url},
                         redirect=False))

        self.assertEqual(r.status, 303)

        r = yield From(
            http.request('GET',
                         '%s/redirect' % self.http_url,
                         fields={'target': '%s/' % self.http_url}))

        self.assertEqual(r.status, 200)
        self.assertEqual((yield From(r.data)), b'Dummy server!')
예제 #22
0
    def proxy_manager_for(self, proxy, **proxy_kwargs):
        """Return urllib3 ProxyManager for the given proxy.

        This method should not be called from user code, and is only
        exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param proxy: The proxy to return a urllib3 ProxyManager for.
        :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
        :returns: ProxyManager
        """
        if not proxy in self.proxy_manager:
            proxy_headers = self.proxy_headers(proxy)
            self.proxy_manager[proxy] = proxy_from_url(
                proxy,
                proxy_headers=proxy_headers,
                num_pools=self._pool_connections,
                maxsize=self._pool_maxsize,
                block=self._pool_block,
                **proxy_kwargs)

        return self.proxy_manager[proxy]
    def proxy_manager_for(self, proxy, **proxy_kwargs):
        """Return urllib3 ProxyManager for the given proxy.

        This method should not be called from user code, and is only
        exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param proxy: The proxy to return a urllib3 ProxyManager for.
        :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
        :returns: ProxyManager
        """
        if not proxy in self.proxy_manager:
            proxy_headers = self.proxy_headers(proxy)
            self.proxy_manager[proxy] = proxy_from_url(
                proxy,
                proxy_headers=proxy_headers,
                num_pools=self._pool_connections,
                maxsize=self._pool_maxsize,
                block=self._pool_block,
                **proxy_kwargs)

        return self.proxy_manager[proxy]
    def test_retries(self):
        def echo_socket_handler(listener):
            sock = listener.accept()[0]
            # First request, which should fail
            sock.close()

            # Second request
            sock = listener.accept()[0]

            buf = b''
            while not buf.endswith(b'\r\n\r\n'):
                buf += sock.recv(65536)

            sock.send(('HTTP/1.1 200 OK\r\n'
                       'Content-Type: text/plain\r\n'
                       'Content-Length: %d\r\n'
                       '\r\n'
                       '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8'))
            sock.close()

        self._start_server(echo_socket_handler)
        base_url = 'http://%s:%d' % (self.host, self.port)

        proxy = proxy_from_url(base_url)
        conn = proxy.connection_from_url('http://www.google.com')

        r = yield From(
            conn.urlopen('GET',
                         'http://www.google.com',
                         assert_same_host=False,
                         retries=1))
        self.assertEqual(r.status, 200)

        self.aioAssertRaises(ProxyError,
                             conn.urlopen,
                             'GET',
                             'http://www.google.com',
                             assert_same_host=False,
                             retries=False)
    def tst_cross_protocol_redirect(self):
        http = proxy_from_url(self.proxy_url)

        try:
            yield From(
                http.request(
                    'GET',
                    '%s/redirect' % self.http_url,
                    fields={'target': ('%s/echo?a=b' % self.https_url)},
                    timeout=0.1,
                    retries=0))
            self.fail("We don't want to follow redirects here.")

        except MaxRetryError:
            pass

        r = yield From(
            http.request('GET',
                         '%s/redirect' % self.http_url,
                         fields={'target': ('%s/echo?a=b' % self.https_url)},
                         timeout=0.5,
                         retries=1))
        self.assertEqual(r._pool.host, self.https_host)
    def test_cross_host_redirect(self):

        http = proxy_from_url(self.proxy_url)

        cross_host_location = '%s/echo?a=b' % self.http_url_alt
        try:
            yield From(
                http.request('GET',
                             '%s/redirect' % self.http_url,
                             fields={'target': cross_host_location},
                             timeout=0.1,
                             retries=0))
            self.fail("We don't want to follow redirects here.")

        except MaxRetryError:
            pass

        r = yield From(
            http.request('GET',
                         '%s/redirect' % self.http_url,
                         fields={'target': '%s/echo?a=b' % self.http_url_alt},
                         timeout=0.1,
                         retries=1))
        self.assertNotEqual(r._pool.host, self.http_host_alt)
    def test_headers(self):
        http = proxy_from_url(self.proxy_url,headers={'Foo': 'bar'},
                              proxy_headers={'Hickory': 'dickory'})

        r = yield From(http.request_encode_url('GET', '%s/headers' % self.http_url))
        returned_headers = json.loads((yield From(r.data).decode()))
        self.assertEqual(returned_headers.get('Foo'), 'bar')
        self.assertEqual(returned_headers.get('Hickory'), 'dickory')
        self.assertEqual(returned_headers.get('Host'),
                '%s:%s'%(self.http_host,self.http_port))

        r = yield From(http.request_encode_url('GET', '%s/headers' % self.http_url_alt))
        returned_headers = json.loads((yield From(r.data).decode()))
        self.assertEqual(returned_headers.get('Foo'), 'bar')
        self.assertEqual(returned_headers.get('Hickory'), 'dickory')
        self.assertEqual(returned_headers.get('Host'),
                '%s:%s'%(self.http_host_alt,self.http_port))

        # r = yield From(http.request_encode_url('GET', '%s/headers' % self.https_url))
        # returned_headers = json.loads((yield From(r.data).decode()))
        # self.assertEqual(returned_headers.get('Foo'), 'bar')
        # self.assertEqual(returned_headers.get('Hickory'), None)
        # self.assertEqual(returned_headers.get('Host'),
        #         '%s:%s'%(self.https_host,self.https_port))
        #
        # r = yield From(http.request_encode_url('GET', '%s/headers' % self.https_url_alt))
        # returned_headers = json.loads((yield From(r.data).decode()))
        # self.assertEqual(returned_headers.get('Foo'), 'bar')
        # self.assertEqual(returned_headers.get('Hickory'), None)
        # self.assertEqual(returned_headers.get('Host'),
        #         '%s:%s'%(self.https_host_alt,self.https_port))

        r = yield From(http.request_encode_body('POST', '%s/headers' % self.http_url))
        returned_headers = json.loads((yield From(r.data).decode()))
        self.assertEqual(returned_headers.get('Foo'), 'bar')
        self.assertEqual(returned_headers.get('Hickory'), 'dickory')
        self.assertEqual(returned_headers.get('Host'),
                '%s:%s'%(self.http_host,self.http_port))

        r = yield From(http.request_encode_url('GET', '%s/headers' % self.http_url, headers={'Baz': 'quux'}))
        returned_headers = json.loads((yield From(r.data).decode()))
        self.assertEqual(returned_headers.get('Foo'), None)
        self.assertEqual(returned_headers.get('Baz'), 'quux')
        self.assertEqual(returned_headers.get('Hickory'), 'dickory')
        self.assertEqual(returned_headers.get('Host'),
                '%s:%s'%(self.http_host,self.http_port))

        # r = yield From(http.request_encode_url('GET', '%s/headers' % self.https_url, headers={'Baz': 'quux'}))
        # returned_headers = json.loads((yield From(r.data).decode()))
        # self.assertEqual(returned_headers.get('Foo'), None)
        # self.assertEqual(returned_headers.get('Baz'), 'quux')
        # self.assertEqual(returned_headers.get('Hickory'), None)
        # self.assertEqual(returned_headers.get('Host'),
        #         '%s:%s'%(self.https_host,self.https_port))
        #
        r = yield From(http.request_encode_body('GET', '%s/headers' % self.http_url, headers={'Baz': 'quux'}))
        returned_headers = json.loads((yield From(r.data).decode()))
        self.assertEqual(returned_headers.get('Foo'), None)
        self.assertEqual(returned_headers.get('Baz'), 'quux')
        self.assertEqual(returned_headers.get('Hickory'), 'dickory')
        self.assertEqual(returned_headers.get('Host'),
                '%s:%s'%(self.http_host,self.http_port))
    def test_basic_proxy(self):
        http = proxy_from_url(self.proxy_url)

        r = yield From(http.request('GET', '%s/' % self.http_url))
        self.assertEqual(r.status, 200)
    def test_headers(self):
        http = proxy_from_url(self.proxy_url,
                              headers={'Foo': 'bar'},
                              proxy_headers={'Hickory': 'dickory'})

        r = yield From(
            http.request_encode_url('GET', '%s/headers' % self.http_url))
        returned_headers = json.loads((yield From(r.data).decode()))
        self.assertEqual(returned_headers.get('Foo'), 'bar')
        self.assertEqual(returned_headers.get('Hickory'), 'dickory')
        self.assertEqual(returned_headers.get('Host'),
                         '%s:%s' % (self.http_host, self.http_port))

        r = yield From(
            http.request_encode_url('GET', '%s/headers' % self.http_url_alt))
        returned_headers = json.loads((yield From(r.data).decode()))
        self.assertEqual(returned_headers.get('Foo'), 'bar')
        self.assertEqual(returned_headers.get('Hickory'), 'dickory')
        self.assertEqual(returned_headers.get('Host'),
                         '%s:%s' % (self.http_host_alt, self.http_port))

        # r = yield From(http.request_encode_url('GET', '%s/headers' % self.https_url))
        # returned_headers = json.loads((yield From(r.data).decode()))
        # self.assertEqual(returned_headers.get('Foo'), 'bar')
        # self.assertEqual(returned_headers.get('Hickory'), None)
        # self.assertEqual(returned_headers.get('Host'),
        #         '%s:%s'%(self.https_host,self.https_port))
        #
        # r = yield From(http.request_encode_url('GET', '%s/headers' % self.https_url_alt))
        # returned_headers = json.loads((yield From(r.data).decode()))
        # self.assertEqual(returned_headers.get('Foo'), 'bar')
        # self.assertEqual(returned_headers.get('Hickory'), None)
        # self.assertEqual(returned_headers.get('Host'),
        #         '%s:%s'%(self.https_host_alt,self.https_port))

        r = yield From(
            http.request_encode_body('POST', '%s/headers' % self.http_url))
        returned_headers = json.loads((yield From(r.data).decode()))
        self.assertEqual(returned_headers.get('Foo'), 'bar')
        self.assertEqual(returned_headers.get('Hickory'), 'dickory')
        self.assertEqual(returned_headers.get('Host'),
                         '%s:%s' % (self.http_host, self.http_port))

        r = yield From(
            http.request_encode_url('GET',
                                    '%s/headers' % self.http_url,
                                    headers={'Baz': 'quux'}))
        returned_headers = json.loads((yield From(r.data).decode()))
        self.assertEqual(returned_headers.get('Foo'), None)
        self.assertEqual(returned_headers.get('Baz'), 'quux')
        self.assertEqual(returned_headers.get('Hickory'), 'dickory')
        self.assertEqual(returned_headers.get('Host'),
                         '%s:%s' % (self.http_host, self.http_port))

        # r = yield From(http.request_encode_url('GET', '%s/headers' % self.https_url, headers={'Baz': 'quux'}))
        # returned_headers = json.loads((yield From(r.data).decode()))
        # self.assertEqual(returned_headers.get('Foo'), None)
        # self.assertEqual(returned_headers.get('Baz'), 'quux')
        # self.assertEqual(returned_headers.get('Hickory'), None)
        # self.assertEqual(returned_headers.get('Host'),
        #         '%s:%s'%(self.https_host,self.https_port))
        #
        r = yield From(
            http.request_encode_body('GET',
                                     '%s/headers' % self.http_url,
                                     headers={'Baz': 'quux'}))
        returned_headers = json.loads((yield From(r.data).decode()))
        self.assertEqual(returned_headers.get('Foo'), None)
        self.assertEqual(returned_headers.get('Baz'), 'quux')
        self.assertEqual(returned_headers.get('Hickory'), 'dickory')
        self.assertEqual(returned_headers.get('Host'),
                         '%s:%s' % (self.http_host, self.http_port))
    def test_basic_proxy(self):
        http = proxy_from_url(self.proxy_url)

        r = yield From(http.request('GET', '%s/' % self.http_url))
        self.assertEqual(r.status, 200)