def test_proxy_verified(self): http = proxy_from_url(self.proxy_url, cert_reqs='REQUIRED', ca_certs=DEFAULT_CA_BAD) https_pool = http._new_pool('https', self.https_host, self.https_port) try: https_pool.request('GET', '/') self.fail("Didn't raise SSL error with wrong CA") except SSLError as e: self.assertTrue('certificate verify failed' in str(e), "Expected 'certificate verify failed'," "instead got: %r" % e) http = proxy_from_url(self.proxy_url, cert_reqs='REQUIRED', ca_certs=DEFAULT_CA) https_pool = http._new_pool('https', self.https_host, self.https_port) conn = https_pool._new_conn() self.assertEqual(conn.__class__, VerifiedHTTPSConnection) https_pool.request('GET', '/') # Should succeed without exceptions. http = proxy_from_url(self.proxy_url, cert_reqs='REQUIRED', ca_certs=DEFAULT_CA) https_fail_pool = http._new_pool('https', '127.0.0.1', self.https_port) try: https_fail_pool.request('GET', '/') self.fail("Didn't raise SSL invalid common name") except SSLError as e: self.assertTrue("doesn't match" in str(e))
def test_proxy_conn_fail(self): host, port = get_unreachable_address() http = proxy_from_url('http://%s:%s/' % (host, port)) self.assertRaises(ProxyError, http.request, 'GET', '%s/' % self.https_url) self.assertRaises(ProxyError, http.request, 'GET', '%s/' % self.http_url)
def test_simple(self): def echo_socket_handler(listener): sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) sock.send(('HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: %d\r\n' '\r\n' '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8')) sock.close() self._start_server(echo_socket_handler) base_url = 'http://%s:%d' % (self.host, self.port) proxy = proxy_from_url(base_url) r = proxy.request('GET', 'http://google.com/') self.assertEqual(r.status, 200) # FIXME: The order of the headers is not predictable right now. We # should fix that someday (maybe when we migrate to # OrderedDict/MultiDict). self.assertEqual(sorted(r.data.split(b'\r\n')), sorted([ b'GET http://google.com/ HTTP/1.1', b'Host: google.com', b'Accept-Encoding: identity', b'Accept: */*', b'', b'', ]))
def test_simple(self): base_url = "http://%s:%d" % (self.host, self.port) proxy = proxy_from_url(base_url) def echo_socket_handler(listener): sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(buf), buf.decode("utf-8")) ).encode("utf-8") ) self._start_server(echo_socket_handler) r = proxy.request("GET", "http://google.com/") self.assertEqual(r.status, 200) self.assertEqual( r.data, b"GET http://google.com/ HTTP/1.1\r\n" b"Host: google.com\r\n" b"Accept-Encoding: identity\r\n" b"Accept: */*\r\n" b"\r\n", )
def test_retries(self): def echo_socket_handler(listener): sock = listener.accept()[0] # First request, which should fail sock.close() # Second request sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) sock.send(('HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: %d\r\n' '\r\n' '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8')) sock.close() self._start_server(echo_socket_handler) base_url = 'http://%s:%d' % (self.host, self.port) proxy = proxy_from_url(base_url) conn = proxy.connection_from_url('http://www.google.com') r = conn.urlopen('GET', 'http://www.google.com', assert_same_host=False, retries=1) self.assertEqual(r.status, 200) self.assertRaises(ProxyError, conn.urlopen, 'GET', 'http://www.google.com', assert_same_host=False, retries=False)
def test_simple(self): base_url = 'http://%s:%d' % (self.host, self.port) proxy = proxy_from_url(base_url) def echo_socket_handler(listener): sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) sock.send(('HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: %d\r\n' '\r\n' '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8')) self._start_server(echo_socket_handler) r = proxy.request('GET', 'http://google.com/') self.assertEqual(r.status, 200) self.assertEqual(r.data, b'GET http://google.com/ HTTP/1.1\r\n' b'Host: google.com\r\n' b'Accept-Encoding: identity\r\n' b'Accept: */*\r\n' b'\r\n')
def test_https_proxy_timeout(self): https = proxy_from_url('https://{host}'.format(host=TARPIT_HOST)) try: https.request('GET', self.http_url, timeout=0.001) self.fail("Failed to raise retry error.") except MaxRetryError as e: self.assertEqual(type(e.reason), ConnectTimeoutError)
def test_headers(self): def echo_socket_handler(listener): sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) sock.send(('HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: %d\r\n' '\r\n' '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8')) sock.close() self._start_server(echo_socket_handler) base_url = 'http://%s:%d' % (self.host, self.port) # Define some proxy headers. proxy_headers = HTTPHeaderDict({'For The Proxy': 'YEAH!'}) proxy = proxy_from_url(base_url, proxy_headers=proxy_headers) conn = proxy.connection_from_url('http://www.google.com/') r = conn.urlopen('GET', 'http://www.google.com/', assert_same_host=False) self.assertEqual(r.status, 200) # FIXME: The order of the headers is not predictable right now. We # should fix that someday (maybe when we migrate to # OrderedDict/MultiDict). self.assertTrue(b'For The Proxy: YEAH!\r\n' in r.data)
def get_connection(self, url, proxies=None): """Returns a urllib3 connection for the given URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param url: The URL to connect to. :param proxies: (optional) A Requests-style dictionary of proxies used on this request. """ proxies = proxies or {} proxy = proxies.get(urlparse(url.lower()).scheme) if proxy: except_on_missing_scheme(proxy) proxy_headers = self.proxy_headers(proxy) if not proxy in self.proxy_manager: self.proxy_manager[proxy] = proxy_from_url( proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block) conn = self.proxy_manager[proxy].connection_from_url(url) else: # Only scheme should be lower case parsed = urlparse(url) url = parsed.geturl() conn = self.poolmanager.connection_from_url(url) return conn
def test_https_proxy_pool_timeout(self): https = proxy_from_url('https://{host}'.format(host=TARPIT_HOST), timeout=0.001) try: https.request('GET', self.http_url) self.fail("Failed to raise retry error.") except MaxRetryError as e: assert isinstance(e.reason, ConnectTimeoutError)
def test_basic_ipv6_proxy(self): http = proxy_from_url(self.proxy_url) r = http.request('GET', '%s/' % self.http_url) self.assertEqual(r.status, 200) r = http.request('GET', '%s/' % self.https_url) self.assertEqual(r.status, 200)
def test_connect_reconn(self): def proxy_ssl_one(listener): sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) s = buf.decode("utf-8") if not s.startswith("CONNECT "): sock.send(("HTTP/1.1 405 Method not allowed\r\n" "Allow: CONNECT\r\n\r\n").encode("utf-8")) sock.close() return if not s.startswith("CONNECT %s:443" % (self.host,)): sock.send(("HTTP/1.1 403 Forbidden\r\n\r\n").encode("utf-8")) sock.close() return sock.send(("HTTP/1.1 200 Connection Established\r\n\r\n").encode("utf-8")) ssl_sock = ssl.wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += ssl_sock.recv(65536) ssl_sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: 2\r\n" "Connection: close\r\n" "\r\n" "Hi" ).encode("utf-8") ) ssl_sock.close() def echo_socket_handler(listener): proxy_ssl_one(listener) proxy_ssl_one(listener) self._start_server(echo_socket_handler) base_url = "http://%s:%d" % (self.host, self.port) proxy = proxy_from_url(base_url) url = "https://{0}".format(self.host) conn = proxy.connection_from_url(url) r = conn.urlopen("GET", url, retries=0) self.assertEqual(r.status, 200) r = conn.urlopen("GET", url, retries=0) self.assertEqual(r.status, 200)
def test_basic_ipv6_proxy(self): http = proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) self.addCleanup(http.clear) r = http.request('GET', '%s/' % self.http_url) self.assertEqual(r.status, 200) r = http.request('GET', '%s/' % self.https_url) self.assertEqual(r.status, 200)
def test_scheme_host_case_insensitive(self): """Assert that upper-case schemes and hosts are normalized.""" http = proxy_from_url(self.proxy_url.upper()) r = http.request('GET', '%s/' % self.http_url.upper()) self.assertEqual(r.status, 200) r = http.request('GET', '%s/' % self.https_url.upper()) self.assertEqual(r.status, 200)
def test_nagle_proxy(self): """ Test that proxy connections do not have TCP_NODELAY turned on """ http = proxy_from_url(self.proxy_url) hc2 = http.connection_from_host(self.http_host, self.http_port) conn = hc2._get_conn() hc2._make_request(conn, 'GET', '/') tcp_nodelay_setting = conn.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) self.assertEqual(tcp_nodelay_setting, 0, ("Expected TCP_NODELAY for proxies to be set " "to zero, instead was %s" % tcp_nodelay_setting))
def test_connect_reconn(self): def proxy_ssl_one(listener): sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) s = buf.decode('utf-8') if not s.startswith('CONNECT '): sock.send(('HTTP/1.1 405 Method not allowed\r\n' 'Allow: CONNECT\r\n\r\n').encode('utf-8')) sock.close() return if not s.startswith('CONNECT %s:443' % (self.host,)): sock.send(('HTTP/1.1 403 Forbidden\r\n\r\n').encode('utf-8')) sock.close() return sock.send(('HTTP/1.1 200 Connection Established\r\n\r\n').encode('utf-8')) ssl_sock = ssl.wrap_socket(sock, server_side=True, keyfile=DEFAULT_CERTS['keyfile'], certfile=DEFAULT_CERTS['certfile'], ca_certs=DEFAULT_CA) buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += ssl_sock.recv(65536) ssl_sock.send(('HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: 2\r\n' 'Connection: close\r\n' '\r\n' 'Hi').encode('utf-8')) ssl_sock.close() def echo_socket_handler(listener): proxy_ssl_one(listener) proxy_ssl_one(listener) self._start_server(echo_socket_handler) base_url = 'http://%s:%d' % (self.host, self.port) proxy = proxy_from_url(base_url) self.addCleanup(proxy.clear) url = 'https://{0}'.format(self.host) conn = proxy.connection_from_url(url) r = conn.urlopen('GET', url, retries=0) self.assertEqual(r.status, 200) r = conn.urlopen('GET', url, retries=0) self.assertEqual(r.status, 200)
def test_proxy_conn_fail(self): # Get a free port on localhost, so a connection will be refused s = socket.socket() s.bind(('127.0.0.1', 0)) free_port = s.getsockname()[1] s.close() http = proxy_from_url('http://127.0.0.1:%s/' % free_port) self.assertRaises(ProxyError, http.request, 'GET', '%s/' % self.https_url) self.assertRaises(ProxyError, http.request, 'GET', '%s/' % self.http_url)
def get_connection(self, url, proxies=None): """Returns a connection for the given URL.""" proxies = proxies or {} proxy = proxies.get(urlparse(url).scheme) if proxy: proxy = prepend_scheme_if_needed(proxy, urlparse(url).scheme) conn = proxy_from_url(proxy) else: conn = self.poolmanager.connection_from_url(url) return conn
def test_proxy_conn_fail(self): host, port = get_unreachable_address() http = proxy_from_url('http://%s:%s/' % (host, port), retries=1, timeout=0.05) self.assertRaises(MaxRetryError, http.request, 'GET', '%s/' % self.https_url) self.assertRaises(MaxRetryError, http.request, 'GET', '%s/' % self.http_url) try: http.request('GET', '%s/' % self.http_url) self.fail("Failed to raise retry error.") except MaxRetryError as e: self.assertEqual(type(e.reason), ProxyError)
def test_redirect(self): http = proxy_from_url(self.proxy_url) r = http.request('GET', '%s/redirect' % self.http_url, fields={'target': '%s/' % self.http_url}, redirect=False) self.assertEqual(r.status, 303) r = http.request('GET', '%s/redirect' % self.http_url, fields={'target': '%s/' % self.http_url}) self.assertEqual(r.status, 200) self.assertEqual(r.data, b'Dummy server!')
def test_headerdict(self): default_headers = HTTPHeaderDict(a='b') proxy_headers = HTTPHeaderDict() proxy_headers.add('foo', 'bar') http = proxy_from_url( self.proxy_url, headers=default_headers, proxy_headers=proxy_headers) request_headers = HTTPHeaderDict(baz='quux') r = http.request('GET', '%s/headers' % self.http_url, headers=request_headers) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Baz'), 'quux')
def test_cross_protocol_redirect(self): http = proxy_from_url(self.proxy_url) cross_protocol_location = '%s/echo?a=b' % self.https_url try: http.request('GET', '%s/redirect' % self.http_url, fields={'target': cross_protocol_location}, timeout=1, retries=0) self.fail("We don't want to follow redirects here.") except MaxRetryError: pass r = http.request('GET', '%s/redirect' % self.http_url, fields={'target': '%s/echo?a=b' % self.https_url}, timeout=1, retries=1) self.assertEqual(r._pool.host, self.https_host)
def test_proxy_pooling(self): http = proxy_from_url(self.proxy_url) for x in range(2): r = http.urlopen('GET', self.http_url) self.assertEqual(len(http.pools), 1) for x in range(2): r = http.urlopen('GET', self.http_url_alt) self.assertEqual(len(http.pools), 1) for x in range(2): r = http.urlopen('GET', self.https_url) self.assertEqual(len(http.pools), 2) for x in range(2): r = http.urlopen('GET', self.https_url_alt) self.assertEqual(len(http.pools), 3)
def test_proxy_pooling_ext(self): http = proxy_from_url(self.proxy_url) hc1 = http.connection_from_url(self.http_url) hc2 = http.connection_from_host(self.http_host, self.http_port) hc3 = http.connection_from_url(self.http_url_alt) hc4 = http.connection_from_host(self.http_host_alt, self.http_port) self.assertEqual(hc1,hc2) self.assertEqual(hc2,hc3) self.assertEqual(hc3,hc4) sc1 = http.connection_from_url(self.https_url) sc2 = http.connection_from_host(self.https_host, self.https_port,scheme='https') sc3 = http.connection_from_url(self.https_url_alt) sc4 = http.connection_from_host(self.https_host_alt, self.https_port,scheme='https') self.assertEqual(sc1,sc2) self.assertNotEqual(sc2,sc3) self.assertEqual(sc3,sc4)
def test_proxy_pooling(self): http = proxy_from_url(self.proxy_url, cert_reqs='NONE') self.addCleanup(http.clear) for x in range(2): http.urlopen('GET', self.http_url) self.assertEqual(len(http.pools), 1) for x in range(2): http.urlopen('GET', self.http_url_alt) self.assertEqual(len(http.pools), 1) for x in range(2): http.urlopen('GET', self.https_url) self.assertEqual(len(http.pools), 2) for x in range(2): http.urlopen('GET', self.https_url_alt) self.assertEqual(len(http.pools), 3)
def test_simple(self): def echo_socket_handler(listener): sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(buf), buf.decode("utf-8")) ).encode("utf-8") ) sock.close() self._start_server(echo_socket_handler) base_url = "http://%s:%d" % (self.host, self.port) proxy = proxy_from_url(base_url) r = proxy.request("GET", "http://google.com/") self.assertEqual(r.status, 200) # FIXME: The order of the headers is not predictable right now. We # should fix that someday (maybe when we migrate to # OrderedDict/MultiDict). self.assertEqual( sorted(r.data.split(b"\r\n")), sorted( [ b"GET http://google.com/ HTTP/1.1", b"Host: google.com", b"Accept-Encoding: identity", b"Accept: */*", b"", b"", ] ), )
def test_cross_host_redirect(self): http = proxy_from_url(self.proxy_url) cross_host_location = '%s/echo?a=b' % self.http_url_alt try: http.request('GET', '%s/redirect' % self.http_url, fields={'target': cross_host_location}, timeout=0.1, retries=0) self.fail("We don't want to follow redirects here.") except MaxRetryError: pass r = http.request('GET', '%s/redirect' % self.http_url, fields={'target': '%s/echo?a=b' % self.http_url_alt}, timeout=0.1, retries=1) self.assertNotEqual(r._pool.host, self.http_host_alt)
def test_proxy_pooling_ext(self): with proxy_from_url(self.proxy_url) as http: hc1 = http.connection_from_url(self.http_url) hc2 = http.connection_from_host(self.http_host, self.http_port) hc3 = http.connection_from_url(self.http_url_alt) hc4 = http.connection_from_host(self.http_host_alt, self.http_port) assert hc1 == hc2 assert hc2 == hc3 assert hc3 == hc4 sc1 = http.connection_from_url(self.https_url) sc2 = http.connection_from_host(self.https_host, self.https_port, scheme="https") sc3 = http.connection_from_url(self.https_url_alt) sc4 = http.connection_from_host(self.https_host_alt, self.https_port, scheme="https") assert sc1 == sc2 assert sc2 != sc3 assert sc3 == sc4
def test_redirect(self): http = proxy_from_url(self.proxy_url) self.addCleanup(http.clear) r = http.request( "GET", "%s/redirect" % self.http_url, fields={"target": "%s/" % self.http_url}, redirect=False, ) self.assertEqual(r.status, 303) r = http.request( "GET", "%s/redirect" % self.http_url, fields={"target": "%s/" % self.http_url}, ) self.assertEqual(r.status, 200) self.assertEqual(r.data, b"Dummy server!")
def test_proxy_pooling_ext(self): http = proxy_from_url(self.proxy_url) self.addCleanup(http.clear) hc1 = http.connection_from_url(self.http_url) hc2 = http.connection_from_host(self.http_host, self.http_port) hc3 = http.connection_from_url(self.http_url_alt) hc4 = http.connection_from_host(self.http_host_alt, self.http_port) self.assertEqual(hc1, hc2) self.assertEqual(hc2, hc3) self.assertEqual(hc3, hc4) sc1 = http.connection_from_url(self.https_url) sc2 = http.connection_from_host(self.https_host, self.https_port, scheme='https') sc3 = http.connection_from_url(self.https_url_alt) sc4 = http.connection_from_host(self.https_host_alt, self.https_port, scheme='https') self.assertEqual(sc1, sc2) self.assertNotEqual(sc2, sc3) self.assertEqual(sc3, sc4)
def test_https_headers(self): with proxy_from_url( self.https_proxy_url, headers={"Foo": "bar"}, proxy_headers={"Hickory": "dickory"}, ca_certs=DEFAULT_CA, ) as http: r = http.request_encode_url("GET", "%s/headers" % self.http_url) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == "%s:%s" % ( self.http_host, self.http_port, ) r = http.request_encode_url("GET", "%s/headers" % self.http_url_alt) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == "%s:%s" % ( self.http_host_alt, self.http_port, ) with pytest.raises(ProxySchemeUnsupported): http.request_encode_url("GET", "%s/headers" % self.https_url) r = http.request_encode_url( "GET", "%s/headers" % self.http_url, headers={"Baz": "quux"} ) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == "%s:%s" % ( self.http_host, self.http_port, )
def test_retries(self): close_event = Event() def echo_socket_handler(listener): sock = listener.accept()[0] # First request, which should fail sock.close() # Second request sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(buf), buf.decode("utf-8")) ).encode("utf-8") ) sock.close() close_event.set() self._start_server(echo_socket_handler) base_url = "http://%s:%d" % (self.host, self.port) proxy = proxy_from_url(base_url) conn = proxy.connection_from_url("http://www.google.com") r = conn.urlopen("GET", "http://www.google.com", assert_same_host=False, retries=1) self.assertEqual(r.status, 200) close_event.wait(timeout=1) self.assertRaises( ProxyError, conn.urlopen, "GET", "http://www.google.com", assert_same_host=False, retries=False )
def proxy_manager_for(self, proxy, **proxy_kwargs): """Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <rkrizzle.adapters.HTTPAdapter>`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager """ if proxy not in self.proxy_manager: proxy_headers = self.proxy_headers(proxy) self.proxy_manager[proxy] = proxy_from_url( proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs) return self.proxy_manager[proxy]
def test_cross_protocol_redirect(self): http = proxy_from_url(self.proxy_url) self.addCleanup(http.clear) cross_protocol_location = '%s/echo?a=b' % self.https_url try: http.request('GET', '%s/redirect' % self.http_url, fields={'target': cross_protocol_location}, timeout=1, retries=0) self.fail("We don't want to follow redirects here.") except MaxRetryError: pass r = http.request('GET', '%s/redirect' % self.http_url, fields={'target': '%s/echo?a=b' % self.https_url}, timeout=1, retries=1) self.assertEqual(r._pool.host, self.https_host)
def proxy_manager_for(self, proxy, **proxy_kwargs): """Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager """ if not proxy in self.proxy_manager: proxy_headers = self.proxy_headers(proxy) self.proxy_manager[proxy] = proxy_from_url( proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs) return self.proxy_manager[proxy]
def proxy_manager_for(self, proxy, **proxy_kwargs): """Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager :rtype: urllib3.ProxyManager """ if proxy in self.proxy_manager: manager = self.proxy_manager[proxy] elif proxy.lower().startswith("socks"): username, password = get_auth_from_url(proxy) manager = self.proxy_manager[proxy] = SOCKSProxyManager( proxy, username=username, password=password, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs ) else: proxy_headers = self.proxy_headers(proxy) manager = self.proxy_manager[proxy] = proxy_from_url( proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs ) return manager
def proxy_manager_for(self, proxy, **proxy_kwargs): """Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <client_requests.adapters.HTTPAdapter>`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager :rtype: urllib3.ProxyManager """ print("Ashwin: Triggered proxy_manager_for; may need TCPKeepAliveProxyManager") if proxy in self.proxy_manager: manager = self.proxy_manager[proxy] elif proxy.lower().startswith('socks'): username, password = get_auth_from_url(proxy) manager = self.proxy_manager[proxy] = SOCKSProxyManager( proxy, username=username, password=password, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs ) else: proxy_headers = self.proxy_headers(proxy) manager = self.proxy_manager[proxy] = proxy_from_url( proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs) return manager
def test_simple(self): def echo_socket_handler(listener): sock = listener.accept()[0] buf = b'' while not buf.endswith(b'\r\n\r\n'): buf += sock.recv(65536) sock.send(('HTTP/1.1 200 OK\r\n' 'Content-Type: text/plain\r\n' 'Content-Length: %d\r\n' '\r\n' '%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8')) sock.close() self._start_server(echo_socket_handler) base_url = 'http://%s:%d' % (self.host, self.port) proxy = proxy_from_url(base_url) self.addCleanup(proxy.clear) r = proxy.request('GET', 'http://google.com/') self.assertEqual(r.status, 200) # FIXME: The order of the headers is not predictable right now. We # should fix that someday (maybe when we migrate to # OrderedDict/MultiDict). self.assertEqual( sorted(r.data.split(b'\r\n')), sorted([ b'GET http://google.com/ HTTP/1.1', b'Host: google.com', b'Accept-Encoding: identity', b'Accept: */*', b'', b'', ]))
def test_cross_protocol_redirect(self): with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: cross_protocol_location = "%s/echo?a=b" % self.https_url try: http.request( "GET", "%s/redirect" % self.http_url, fields={"target": cross_protocol_location}, timeout=1, retries=0, ) self.fail("We don't want to follow redirects here.") except MaxRetryError: pass r = http.request( "GET", "%s/redirect" % self.http_url, fields={"target": "%s/echo?a=b" % self.https_url}, timeout=1, retries=1, ) assert r._pool.host == self.https_host
def test_https_proxy_timeout(self): with proxy_from_url( "https://{host}".format(host=TARPIT_HOST)) as https: with pytest.raises(MaxRetryError) as e: https.request("GET", self.http_url, timeout=SHORT_TIMEOUT) assert type(e.value.reason) == ConnectTimeoutError
def test_headers(self): http = proxy_from_url( self.proxy_url, headers={"Foo": "bar"}, proxy_headers={"Hickory": "dickory"}, ca_certs=DEFAULT_CA, ) self.addCleanup(http.clear) r = http.request_encode_url("GET", "%s/headers" % self.http_url) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == "%s:%s" % ( self.http_host, self.http_port, ) r = http.request_encode_url("GET", "%s/headers" % self.http_url_alt) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == "%s:%s" % ( self.http_host_alt, self.http_port, ) r = http.request_encode_url("GET", "%s/headers" % self.https_url) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") is None assert returned_headers.get("Host") == "%s:%s" % ( self.https_host, self.https_port, ) r = http.request_encode_body("POST", "%s/headers" % self.http_url) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == "%s:%s" % ( self.http_host, self.http_port, ) r = http.request_encode_url( "GET", "%s/headers" % self.http_url, headers={"Baz": "quux"} ) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == "%s:%s" % ( self.http_host, self.http_port, ) r = http.request_encode_url( "GET", "%s/headers" % self.https_url, headers={"Baz": "quux"} ) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") is None assert returned_headers.get("Host") == "%s:%s" % ( self.https_host, self.https_port, ) r = http.request_encode_body( "GET", "%s/headers" % self.http_url, headers={"Baz": "quux"} ) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == "%s:%s" % ( self.http_host, self.http_port, ) r = http.request_encode_body( "GET", "%s/headers" % self.https_url, headers={"Baz": "quux"} ) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") is None assert returned_headers.get("Host") == "%s:%s" % ( self.https_host, self.https_port, )
def test_headers(self): http = proxy_from_url(self.proxy_url, headers={'Foo': 'bar'}, proxy_headers={'Hickory': 'dickory'}) r = http.request_encode_url('GET', '%s/headers' % self.http_url) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Hickory'), 'dickory') self.assertEqual(returned_headers.get('Host'), '%s:%s' % (self.http_host, self.http_port)) r = http.request_encode_url('GET', '%s/headers' % self.http_url_alt) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Hickory'), 'dickory') self.assertEqual(returned_headers.get('Host'), '%s:%s' % (self.http_host_alt, self.http_port)) r = http.request_encode_url('GET', '%s/headers' % self.https_url) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Hickory'), None) self.assertEqual(returned_headers.get('Host'), '%s:%s' % (self.https_host, self.https_port)) r = http.request_encode_url('GET', '%s/headers' % self.https_url_alt) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Hickory'), None) self.assertEqual(returned_headers.get('Host'), '%s:%s' % (self.https_host_alt, self.https_port)) r = http.request_encode_body('POST', '%s/headers' % self.http_url) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Hickory'), 'dickory') self.assertEqual(returned_headers.get('Host'), '%s:%s' % (self.http_host, self.http_port)) r = http.request_encode_url('GET', '%s/headers' % self.http_url, headers={'Baz': 'quux'}) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), None) self.assertEqual(returned_headers.get('Baz'), 'quux') self.assertEqual(returned_headers.get('Hickory'), 'dickory') self.assertEqual(returned_headers.get('Host'), '%s:%s' % (self.http_host, self.http_port)) r = http.request_encode_url('GET', '%s/headers' % self.https_url, headers={'Baz': 'quux'}) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), None) self.assertEqual(returned_headers.get('Baz'), 'quux') self.assertEqual(returned_headers.get('Hickory'), None) self.assertEqual(returned_headers.get('Host'), '%s:%s' % (self.https_host, self.https_port)) r = http.request_encode_body('GET', '%s/headers' % self.http_url, headers={'Baz': 'quux'}) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), None) self.assertEqual(returned_headers.get('Baz'), 'quux') self.assertEqual(returned_headers.get('Hickory'), 'dickory') self.assertEqual(returned_headers.get('Host'), '%s:%s' % (self.http_host, self.http_port)) r = http.request_encode_body('GET', '%s/headers' % self.https_url, headers={'Baz': 'quux'}) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), None) self.assertEqual(returned_headers.get('Baz'), 'quux') self.assertEqual(returned_headers.get('Hickory'), None) self.assertEqual(returned_headers.get('Host'), '%s:%s' % (self.https_host, self.https_port))
def test_headers(self): with proxy_from_url( self.proxy_url, headers={"Foo": "bar"}, proxy_headers={"Hickory": "dickory"}, ca_certs=DEFAULT_CA, ) as http: r = http.request_encode_url("GET", f"{self.http_url}/headers") returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get( "Host") == f"{self.http_host}:{self.http_port}" r = http.request_encode_url("GET", f"{self.http_url_alt}/headers") returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert (returned_headers.get("Host") == f"{self.http_host_alt}:{self.http_port}") r = http.request_encode_url("GET", f"{self.https_url}/headers") returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") is None assert (returned_headers.get("Host") == f"{self.https_host}:{self.https_port}") r = http.request_encode_body("POST", f"{self.http_url}/headers") returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get( "Host") == f"{self.http_host}:{self.http_port}" r = http.request_encode_url("GET", f"{self.http_url}/headers", headers={"Baz": "quux"}) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get( "Host") == f"{self.http_host}:{self.http_port}" r = http.request_encode_url("GET", f"{self.https_url}/headers", headers={"Baz": "quux"}) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") is None assert (returned_headers.get("Host") == f"{self.https_host}:{self.https_port}") r = http.request_encode_body("GET", f"{self.http_url}/headers", headers={"Baz": "quux"}) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get( "Host") == f"{self.http_host}:{self.http_port}" r = http.request_encode_body("GET", f"{self.https_url}/headers", headers={"Baz": "quux"}) returned_headers = json.loads(r.data.decode()) assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") is None assert (returned_headers.get("Host") == f"{self.https_host}:{self.https_port}")
def test_https_proxy_pool_timeout(self): with proxy_from_url(f"https://{TARPIT_HOST}", timeout=SHORT_TIMEOUT) as https: with pytest.raises(MaxRetryError) as e: https.request("GET", self.http_url) assert type(e.value.reason) == ConnectTimeoutError
def test_invalid_schema(self, url, error_msg): with pytest.raises(ProxySchemeUnknown, match=error_msg): proxy_from_url(url)
def test_headers(self): http = proxy_from_url(self.proxy_url,headers={'Foo': 'bar'}, proxy_headers={'Hickory': 'dickory'}) r = http.request_encode_url('GET', '%s/headers' % self.http_url) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Hickory'), 'dickory') self.assertEqual(returned_headers.get('Host'), '%s:%s'%(self.http_host,self.http_port)) r = http.request_encode_url('GET', '%s/headers' % self.http_url_alt) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Hickory'), 'dickory') self.assertEqual(returned_headers.get('Host'), '%s:%s'%(self.http_host_alt,self.http_port)) r = http.request_encode_url('GET', '%s/headers' % self.https_url) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Hickory'), None) self.assertEqual(returned_headers.get('Host'), '%s:%s'%(self.https_host,self.https_port)) r = http.request_encode_url('GET', '%s/headers' % self.https_url_alt) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Hickory'), None) self.assertEqual(returned_headers.get('Host'), '%s:%s'%(self.https_host_alt,self.https_port)) r = http.request_encode_body('POST', '%s/headers' % self.http_url) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), 'bar') self.assertEqual(returned_headers.get('Hickory'), 'dickory') self.assertEqual(returned_headers.get('Host'), '%s:%s'%(self.http_host,self.http_port)) r = http.request_encode_url('GET', '%s/headers' % self.http_url, headers={'Baz': 'quux'}) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), None) self.assertEqual(returned_headers.get('Baz'), 'quux') self.assertEqual(returned_headers.get('Hickory'), 'dickory') self.assertEqual(returned_headers.get('Host'), '%s:%s'%(self.http_host,self.http_port)) r = http.request_encode_url('GET', '%s/headers' % self.https_url, headers={'Baz': 'quux'}) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), None) self.assertEqual(returned_headers.get('Baz'), 'quux') self.assertEqual(returned_headers.get('Hickory'), None) self.assertEqual(returned_headers.get('Host'), '%s:%s'%(self.https_host,self.https_port)) r = http.request_encode_body('GET', '%s/headers' % self.http_url, headers={'Baz': 'quux'}) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), None) self.assertEqual(returned_headers.get('Baz'), 'quux') self.assertEqual(returned_headers.get('Hickory'), 'dickory') self.assertEqual(returned_headers.get('Host'), '%s:%s'%(self.http_host,self.http_port)) r = http.request_encode_body('GET', '%s/headers' % self.https_url, headers={'Baz': 'quux'}) returned_headers = json.loads(r.data.decode()) self.assertEqual(returned_headers.get('Foo'), None) self.assertEqual(returned_headers.get('Baz'), 'quux') self.assertEqual(returned_headers.get('Hickory'), None) self.assertEqual(returned_headers.get('Host'), '%s:%s'%(self.https_host,self.https_port))