def __init__( self, host, port=443, proxy_scheme=None, proxy_host=None, proxy_port=None, proxy_user=None, proxy_pass=None, timeout=None, ca_bundle_path=None, disable_certificate_validation=False, compression_threshold=64 * 1024, compression_level=None, compression_method="gzip", max_payload_size_in_bytes=1000000, audit_log_fp=None, ): self._host = host port = self._port = port self._compression_threshold = compression_threshold self._compression_level = compression_level self._compression_method = compression_method self._max_payload_size_in_bytes = max_payload_size_in_bytes self._audit_log_fp = audit_log_fp self._prefix = "" self._headers = dict(self.BASE_HEADERS) self._connection_kwargs = connection_kwargs = { "timeout": timeout, } self._urlopen_kwargs = urlopen_kwargs = {} if self.CONNECTION_CLS.scheme == "https": if not ca_bundle_path: verify_path = get_default_verify_paths() # If there is no resolved cafile, assume the bundled certs are # required and report this condition as a supportability metric. if not verify_path.cafile: ca_bundle_path = certs.where() internal_metric( "Supportability/Python/Certificate/BundleRequired", 1 ) if ca_bundle_path: if os.path.isdir(ca_bundle_path): connection_kwargs["ca_cert_dir"] = ca_bundle_path else: connection_kwargs["ca_certs"] = ca_bundle_path if disable_certificate_validation: connection_kwargs["cert_reqs"] = "NONE" proxy = self._parse_proxy( proxy_scheme, proxy_host, proxy_port, proxy_user, proxy_pass, ) proxy_headers = ( proxy and proxy.auth and urllib3.make_headers(proxy_basic_auth=proxy.auth) ) if proxy: if self.CONNECTION_CLS.scheme == "https" and proxy.scheme != "https": connection_kwargs["_proxy"] = proxy connection_kwargs["_proxy_headers"] = proxy_headers else: self._host = proxy.host self._port = proxy.port or 443 self._prefix = self.PREFIX_SCHEME + host + ":" + str(port) urlopen_kwargs["assert_same_host"] = False if proxy_headers: self._headers.update(proxy_headers) # Logging self._proxy = proxy self._connection_attr = None
class HttpClient(BaseClient): CONNECTION_CLS = urllib3.HTTPSConnectionPool PREFIX_SCHEME = "https://" BASE_HEADERS = urllib3.make_headers( keep_alive=True, accept_encoding=True, user_agent=USER_AGENT ) def __init__( self, host, port=443, proxy_scheme=None, proxy_host=None, proxy_port=None, proxy_user=None, proxy_pass=None, timeout=None, ca_bundle_path=None, disable_certificate_validation=False, compression_threshold=64 * 1024, compression_level=None, compression_method="gzip", max_payload_size_in_bytes=1000000, audit_log_fp=None, ): self._host = host port = self._port = port self._compression_threshold = compression_threshold self._compression_level = compression_level self._compression_method = compression_method self._max_payload_size_in_bytes = max_payload_size_in_bytes self._audit_log_fp = audit_log_fp self._prefix = "" self._headers = dict(self.BASE_HEADERS) self._connection_kwargs = connection_kwargs = { "timeout": timeout, } self._urlopen_kwargs = urlopen_kwargs = {} if self.CONNECTION_CLS.scheme == "https": if not ca_bundle_path: verify_path = get_default_verify_paths() # If there is no resolved cafile, assume the bundled certs are # required and report this condition as a supportability metric. if not verify_path.cafile: ca_bundle_path = certs.where() internal_metric( "Supportability/Python/Certificate/BundleRequired", 1 ) if ca_bundle_path: if os.path.isdir(ca_bundle_path): connection_kwargs["ca_cert_dir"] = ca_bundle_path else: connection_kwargs["ca_certs"] = ca_bundle_path if disable_certificate_validation: connection_kwargs["cert_reqs"] = "NONE" proxy = self._parse_proxy( proxy_scheme, proxy_host, proxy_port, proxy_user, proxy_pass, ) proxy_headers = ( proxy and proxy.auth and urllib3.make_headers(proxy_basic_auth=proxy.auth) ) if proxy: if self.CONNECTION_CLS.scheme == "https" and proxy.scheme != "https": connection_kwargs["_proxy"] = proxy connection_kwargs["_proxy_headers"] = proxy_headers else: self._host = proxy.host self._port = proxy.port or 443 self._prefix = self.PREFIX_SCHEME + host + ":" + str(port) urlopen_kwargs["assert_same_host"] = False if proxy_headers: self._headers.update(proxy_headers) # Logging self._proxy = proxy self._connection_attr = None @staticmethod def _parse_proxy(scheme, host, port, username, password): # Users may specify a full URL for the host # In this case, the URL is used as a starting point to build up the URL components = urllib3.util.parse_url(host) scheme = components.scheme or scheme or None host = components.host or host or None port = components.port or port or None if components.auth: auth = components.auth else: auth = username if auth and password is not None: auth = auth + ":" + password # Host must be defined if not host: return # At least one of (scheme, port) must be defined if not scheme and not port: return return urllib3.util.Url(scheme=scheme, auth=auth, host=host, port=port) def __enter__(self): self._connection.__enter__() return self def __exit__(self, exc, value, tb): if self._connection_attr: self._connection_attr.__exit__(exc, value, tb) self._connection_attr = None @property def _connection(self): if self._connection_attr: return self._connection_attr retries = urllib3.Retry( total=False, connect=None, read=None, redirect=0, status=None ) self._connection_attr = self.CONNECTION_CLS( self._host, self._port, strict=True, retries=retries, **self._connection_kwargs ) return self._connection_attr def close_connection(self): if self._connection_attr: self._connection_attr.close() self._connection_attr = None def log_request( self, fp, method, url, params, payload, headers, body=None, compression_time=None, ): if not self._prefix: url = self.CONNECTION_CLS.scheme + "://" + self._host + url return super(HttpClient, self).log_request( fp, method, url, params, payload, headers, body, compression_time ) @staticmethod def _compress(data, method="gzip", level=None): compression_start = time.time() level = level or zlib.Z_DEFAULT_COMPRESSION wbits = 31 if method == "gzip" else 15 compressor = zlib.compressobj(level, zlib.DEFLATED, wbits) data = compressor.compress(data) data += compressor.flush() compression_time = max(time.time(), compression_start) - compression_start return data, compression_time def send_request( self, method="POST", path="/agent_listener/invoke_raw_method", params=None, headers=None, payload=None, ): if self._proxy: proxy_scheme = self._proxy.scheme or "http" connection = proxy_scheme + "-proxy" else: connection = "direct" merged_headers = dict(self._headers) if headers: merged_headers.update(headers) path = self._prefix + path body = payload compression_time = None if payload is not None: if len(payload) > self._compression_threshold: body, compression_time = self._compress( payload, method=self._compression_method, level=self._compression_level, ) content_encoding = self._compression_method else: content_encoding = "Identity" merged_headers["Content-Encoding"] = content_encoding request_id = self.log_request( self._audit_log_fp, "POST", path, params, payload, merged_headers, body, compression_time, ) if body and len(body) > self._max_payload_size_in_bytes: return 413, b"" with warnings.catch_warnings(): warnings.simplefilter("ignore") try: response = self._connection.request_encode_url( method, path, fields=params, body=body, headers=merged_headers, **self._urlopen_kwargs ) except urllib3.exceptions.HTTPError as e: self.log_response( self._audit_log_fp, request_id, 0, None, None, connection, ) # All urllib3 HTTP errors should be treated as a network # interface exception. raise NetworkInterfaceException(e) self.log_response( self._audit_log_fp, request_id, response.status, response.headers, response.data, connection, ) return response.status, response.data