def test_make_headers(self): self.assertEqual(make_headers(accept_encoding=True), {'accept-encoding': 'gzip,deflate'}) self.assertEqual(make_headers(accept_encoding='foo,bar'), {'accept-encoding': 'foo,bar'}) self.assertEqual(make_headers(accept_encoding=['foo', 'bar']), {'accept-encoding': 'foo,bar'}) self.assertEqual( make_headers(accept_encoding=True, user_agent='banana'), { 'accept-encoding': 'gzip,deflate', 'user-agent': 'banana' }) self.assertEqual(make_headers(user_agent='banana'), {'user-agent': 'banana'}) self.assertEqual(make_headers(keep_alive=True), {'connection': 'keep-alive'}) self.assertEqual(make_headers(basic_auth='foo:bar'), {'authorization': 'Basic Zm9vOmJhcg=='}) self.assertEqual(make_headers(proxy_basic_auth='foo:bar'), {'proxy-authorization': 'Basic Zm9vOmJhcg=='}) self.assertEqual(make_headers(disable_cache=True), {'cache-control': 'no-cache'})
def download_sig(opts, sig, version=None): """Download signature from hostname""" code = None downloaded = False useagent = 'ClamAV/0.101.1 (OS: linux-gnu, ARCH: x86_64, CPU: x86_64)' manager = PoolManager(headers=make_headers(user_agent=useagent), cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(), timeout=Timeout(connect=10.0, read=60.0)) if version: path = '/%s.cvd' % sig filename = os.path.join(opts.workdir, '%s.cvd' % sig) else: path = '/%s.cdiff' % sig filename = os.path.join(opts.workdir, '%s.cdiff' % sig) try: req = manager.request('GET', 'http://%s%s' % (opts.hostname, path)) except BaseException as msg: error("Request error: %s" % msg) data = req.data code = req.status if req.status == 200: with open(filename, 'wb') as handle: handle.write(data) downloaded = os.path.exists(filename) return downloaded, code
def _make_proxy_pool( ca_certs, cert_reqs, proxy_type, proxy_rdns, proxy_host, proxy_port, proxy_user, proxy_pass, **kwargs ): if proxy_type == PROXY_TYPE_HTTP: proxy_url = 'http://{}:{}/'.format(proxy_host, proxy_port) has_auth = True if proxy_user or proxy_pass else False proxy_headers = {} if not has_auth else make_headers( proxy_basic_auth='{}:{}'.format(proxy_user, proxy_pass) ) return urllib3.ProxyManager( proxy_url=proxy_url, proxy_headers=proxy_headers, ca_certs=ca_certs, cert_reqs=cert_reqs, ) elif proxy_type == PROXY_TYPE_SOCKS4: proxy_url = '{}://{}:{}/'.format( 'socks4' if not proxy_rdns else 'socks4a', proxy_host, proxy_port ) return SOCKSProxyManager(proxy_url, proxy_user, proxy_pass) elif proxy_type == PROXY_TYPE_SOCKS5: proxy_url = '{}://{}:{}/'.format( 'socks5' if not proxy_rdns else 'socks5h', proxy_host, proxy_port ) return SOCKSProxyManager(proxy_url, proxy_user, proxy_pass) raise NotImplementedError('Unsupported proxy protocol.')
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None): self.__service_url = service_url self.__service_name = service_name self.__url = urlparse.urlparse(service_url) self.__auth = '%s:%s' % (self.__url.username, self.__url.password) self.__headers = make_headers(keep_alive=True, user_agent=USER_AGENT, basic_auth=self.__auth) self.__headers.update({'Content-Type:': 'application/json'}) self.__conn = urllib3.PoolManager(retries=False, timeout=Timeout(timeout))
def test_reuse_conn(self): # Test pool.request() connection reusal: # 1. Create a new connection. # 2. Perform a request which will succeed. # 3. Reuse the connection - delay the response with original pool settings. # 4. Reuse same connection for a successful read request with delay in response. # 5. Reuse same connection for a timeout read request. # Create the connection pool with default timeouts long enough to connect and read. self.set_block_response(None) timeout = Timeout(connect=1., read=4) headers = make_headers(keep_alive=True) pool = HTTPSConnectionPool(self.host, self.port, timeout=timeout, headers=headers, retries=False) # First request - direct with pool._make_request() with delay=conn_timeout+read_timeout-0.5 delay = timeout.connect_timeout + timeout.read_timeout - 0.5 self.set_block_response(delay) conn = pool._get_conn() self.assertIsNone(conn.sock) pool._make_request(conn, 'GET', '/') self.set_block_response(None) # Make a request - it must succeed pool.request('GET', '/') # Reuse the connection - successful read request with delayed response. # * Ensure that new connection is not created by using a short connect timeout with # pool._make_request. # * Use a read timeout which will be larger than the pool's connect timeout but shorter # than the pool's read timeout timeout = Timeout(connect=SHORT_TIMEOUT, read=2.5) delay = 1.1 # Check that the timeouts are as intended self.assertLess(timeout.connect_timeout, pool.timeout.connect_timeout) self.assertLess(pool.timeout.connect_timeout, delay) self.assertLess(delay, timeout.read_timeout) self.assertLess(timeout.read_timeout, pool.timeout.read_timeout) # Make the request self.set_block_response(delay) pool.request('GET', '/', timeout=timeout) # Reuse the connection - timeout read request delay = timeout.read_timeout + 1 self.set_block_response(delay) now = time.time() with self.assertRaises(ReadTimeoutError) as cmgr: pool.request('GET', '/', timeout=timeout) delta = time.time() - now self.assertEqual(cmgr.exception.args[0].split()[-1], 'timeout=%s)' % timeout.read_timeout) self.assertAlmostEqual(delta, timeout.read_timeout, places=1) print('delta={}'.format(delta))
def test_make_headers(self): self.assertEqual(make_headers(accept_encoding=True), {"accept-encoding": "gzip,deflate"}) self.assertEqual(make_headers(accept_encoding="foo,bar"), {"accept-encoding": "foo,bar"}) self.assertEqual(make_headers(accept_encoding=["foo", "bar"]), {"accept-encoding": "foo,bar"}) self.assertEqual( make_headers(accept_encoding=True, user_agent="banana"), {"accept-encoding": "gzip,deflate", "user-agent": "banana"}, ) self.assertEqual(make_headers(user_agent="banana"), {"user-agent": "banana"}) self.assertEqual(make_headers(keep_alive=True), {"connection": "keep-alive"}) self.assertEqual(make_headers(basic_auth="foo:bar"), {"authorization": "Basic Zm9vOmJhcg=="}) self.assertEqual(make_headers(proxy_basic_auth="foo:bar"), {"proxy-authorization": "Basic Zm9vOmJhcg=="}) self.assertEqual(make_headers(disable_cache=True), {"cache-control": "no-cache"})
def test_make_headers(self): self.assertEqual( make_headers(accept_encoding=True), {'accept-encoding': 'gzip,deflate'}) self.assertEqual( make_headers(accept_encoding='foo,bar'), {'accept-encoding': 'foo,bar'}) self.assertEqual( make_headers(accept_encoding=['foo', 'bar']), {'accept-encoding': 'foo,bar'}) self.assertEqual( make_headers(accept_encoding=True, user_agent='banana'), {'accept-encoding': 'gzip,deflate', 'user-agent': 'banana'}) self.assertEqual( make_headers(user_agent='banana'), {'user-agent': 'banana'}) self.assertEqual( make_headers(keep_alive=True), {'connection': 'keep-alive'}) self.assertEqual( make_headers(basic_auth='foo:bar'), {'authorization': 'Basic Zm9vOmJhcg=='}) self.assertEqual( make_headers(proxy_basic_auth='foo:bar'), {'proxy-authorization': 'Basic Zm9vOmJhcg=='}) self.assertEqual( make_headers(disable_cache=True), {'cache-control': 'no-cache'})
def __init__(self, max_reusable_connections=8, mock_urlopen=None): """ Parameters max_reusable_connections max connections to keep alive in the pool mock_urlopen an optional alternate urlopen function for testing This class uses ``urllib3`` to maintain a pool of connections. We attempt to grab an existing idle connection from the pool, otherwise we spin up a new connection. Once a connection is closed, it is reinserted into the pool (unless the pool is full). SSL settings: - Certificates validated using Dropbox-approved trusted root certs - TLS v1.0 (newer TLS versions are not supported by urllib3) - Default ciphersuites. Choosing ciphersuites is not supported by urllib3 - Hostname verification is provided by urllib3 """ self.mock_urlopen = mock_urlopen if socket.gethostname()=='NIGSA291604': self.pool_manager = urllib3.ProxyManager( num_pools=4, # only a handful of hosts. api.dropbox.com, api-content.dropbox.com maxsize=max_reusable_connections, block=False, timeout=60.0, # long enough so datastores await doesn't get interrupted #cert_reqs=ssl.CERT_REQUIRED, ca_certs=TRUSTED_CERT_FILE, ssl_version=ssl.PROTOCOL_TLSv1, proxy_headers=make_headers(keep_alive=True, user_agent="Mozilla/5.0", proxy_basic_auth="aur\p729465:Jul4132!"), proxy_url='http://10.7.33.71:8080', ) else: self.pool_manager = urllib3.PoolManager( num_pools=4, # only a handful of hosts. api.dropbox.com, api-content.dropbox.com maxsize=max_reusable_connections, block=False, timeout=60.0, # long enough so datastores await doesn't get interrupted cert_reqs=ssl.CERT_REQUIRED, ca_certs=TRUSTED_CERT_FILE, ssl_version=ssl.PROTOCOL_TLSv1, )
def test_make_headers(self, kwargs, expected): assert make_headers(**kwargs) == expected
def test_make_headers(self, kwargs: Dict[str, Union[bool, str]], expected: Dict[str, str]) -> None: assert make_headers(**kwargs) == expected # type: ignore[arg-type]