def __init__(self, swift_config): logger.debug("Creating OpenStack Swift client") self.auth_url = swift_config['swift_auth_url'] self.user_id = swift_config['swift_user_id'] self.project_id = swift_config['swift_project_id'] self.password = swift_config['swift_password'] self.region = swift_config['swift_region'] self.endpoint = None if 'token' in swift_config: self.token = swift_config['token'] self.endpoint = swift_config['endpoint'] else: self.token = self.generate_swift_token() swift_config['token'] = self.token swift_config['endpoint'] = self.endpoint self.session = requests.session() self.session.headers.update({'X-Auth-Token': self.token}) adapter = requests.adapters.HTTPAdapter(pool_maxsize=64, max_retries=3) self.session.mount('http://', adapter) self.session.mount('https://', adapter) msg = STORAGE_CLI_MSG.format('OpenStack Swift') logger.info("{} - Region: {}".format(msg, self.region))
def __init__(self, ceph_config): logger.debug("Creating Ceph client") self.ceph_config = ceph_config user_agent = ceph_config['user_agent'] service_endpoint = ceph_config.get('endpoint') logger.debug("Seting Ceph endpoint to {}".format(service_endpoint)) logger.debug("Using access_key and secret_key") access_key = ceph_config.get('access_key') secret_key = ceph_config.get('secret_key') client_config = ibm_botocore.client.Config( max_pool_connections=128, user_agent_extra=user_agent, connect_timeout=CONN_READ_TIMEOUT, read_timeout=CONN_READ_TIMEOUT, retries={'max_attempts': OBJ_REQ_RETRIES}) self.cos_client = ibm_boto3.client('s3', aws_access_key_id=access_key, aws_secret_access_key=secret_key, config=client_config, endpoint_url=service_endpoint) msg = STORAGE_CLI_MSG.format('Ceph') logger.info("{} - Endpoint: {}".format(msg, service_endpoint))
def __init__(self, s3_config): logger.debug("Creating S3 client") self.s3_config = s3_config self.user_agent = s3_config['user_agent'] self.service_endpoint = s3_config['endpoint'] self.region = s3_config['region_name'] if 'http:' in self.service_endpoint: logger.warning('Endpoint {} is insecure - it is recommended ' 'to change this to https://'.format( self.service_endpoint)) client_config = botocore.client.Config( max_pool_connections=128, user_agent_extra=self.user_agent, connect_timeout=CONN_READ_TIMEOUT, read_timeout=CONN_READ_TIMEOUT, retries={'max_attempts': OBJ_REQ_RETRIES}) self.s3_client = boto3.client( 's3', aws_access_key_id=s3_config['access_key_id'], aws_secret_access_key=s3_config['secret_access_key'], config=client_config, endpoint_url=self.service_endpoint) msg = STORAGE_CLI_MSG.format('S3') logger.info("{} - Region: {}".format(msg, self.region))
def __init__(self, infinispan_config): logger.debug("Creating Infinispan Hotrod storage client") self.infinispan_config = infinispan_config conf = Infinispan.Configuration() connConf = infinispan_config.get('endpoint').split(":") conf.addServer(connConf[0], int(connConf[1]) if len(connConf) > 1 else 11222) conf.setProtocol("2.8") conf.setSasl("DIGEST-MD5", "node0", infinispan_config.get('username'), infinispan_config.get('password')) self.conf = conf self.cacheManager = Infinispan.RemoteCacheManager(conf) self.cacheManager.start() self.cacheManagerAdmin = Infinispan.RemoteCacheManagerAdmin( self.cacheManager) self.basicAuth = HTTPDigestAuth(infinispan_config.get('username'), infinispan_config.get('password')) self.cache_names = infinispan_config.get('cache_names', ['storage']) self.cache_type = infinispan_config.get('cache_type', 'org.infinispan.DIST_SYNC') self.infinispan_client = requests.session() self.caches = {} for cache_name in self.cache_names: self.__create_cache(cache_name, self.cache_type) msg = STORAGE_CLI_MSG.format('Infinispan_hotrod') logger.info("{} - Endpoint: {}".format(msg, self.endpoint))
def __init__(self, config): logger.debug("Creating Redis storage client") config.pop('user_agent', None) self.config = config self.host = self.config['host'] self._client = redis.StrictRedis(**config) msg = STORAGE_CLI_MSG.format('Redis') logger.info("{} - Host: {}".format(msg, self.host))
def __init__(self, gcp_storage_config): logger.debug("Creating GCP Storage client") self.credentials_path = gcp_storage_config['credentials_path'] try: # Get credenitals from JSON file self.client = storage.Client.from_service_account_json( self.credentials_path) except Exception: # Get credentials from gcp function environment self.client = storage.Client() msg = STORAGE_CLI_MSG.format('GCP') logger.info("{}".format(msg))
def __init__(self, azure_blob_config): logger.debug("Creating Azure Blob Storage client") self.storage_account_name = azure_blob_config['storage_account_name'] self.blob_service_url = 'https://{}.blob.core.windows.net'.format( self.storage_account_name) self.blob_client = BlobServiceClient( account_url=self.blob_service_url, credential=azure_blob_config['storage_account_key']) msg = STORAGE_CLI_MSG.format('Azure Blob') logger.info("{}".format(msg))
def __init__(self, config): logger.debug("Creating Alibaba Object Storage client") self.config = config self.auth = oss2.Auth(self.config['access_key_id'], self.config['access_key_secret']) if is_lithops_worker(): self.endpoint = self.config['internal_endpoint'] else: self.endpoint = self.config['public_endpoint'] # Connection pool size in aliyun_oss must be updated to avoid "connection pool is full" type errors. oss2.defaults.connection_pool_size = CONNECTION_POOL_SIZE msg = STORAGE_CLI_MSG.format('Alibaba Object Storage') logger.info("{} - Endpoint: {}".format(msg, self.endpoint))
def __init__(self, s3_config): logger.debug("Creating S3 client") service_endpoint = s3_config.get('endpoint').replace('http:', 'https:') logger.debug('AWS S3 using access_key_id and secret_access_key') client_config = botocore.client.Config(max_pool_connections=128, user_agent_extra='cloudbutton', connect_timeout=1) self.s3_client = boto3.client( 's3', aws_access_key_id=s3_config['access_key_id'], aws_secret_access_key=s3_config['secret_access_key'], config=client_config, endpoint_url=service_endpoint) msg = STORAGE_CLI_MSG.format('S3') logger.info("{} - Endpoint: {}".format(msg, service_endpoint))
def __init__(self, infinispan_config): logger.debug("Creating Infinispan storage client") self.infinispan_config = infinispan_config self.basicAuth = HTTPBasicAuth(infinispan_config.get('username'), infinispan_config.get('password')) self.endpoint = infinispan_config.get('endpoint') self.cache_name = infinispan_config.get('cache_name', 'default') self.cache_type = infinispan_config.get('cache_type', 'org.infinispan.DIST_SYNC') self.infinispan_client = requests.session() self.__is_server_version_supported() self.__create_cache(self.cache_name, self.cache_type) self.headers = { "Content-Type": "application/octet-stream", "Key-Content-Type": "application/octet-stream;encoding=base64" } msg = STORAGE_CLI_MSG.format('Infinispan') logger.info("{} - Endpoint: {}".format(msg, self.endpoint))
def __init__(self, minio_config): logger.debug("Creating MinIO client") self.minio_config = minio_config user_agent = minio_config['user_agent'] service_endpoint = minio_config['endpoint'] logger.debug("Setting MinIO endpoint to {}".format(service_endpoint)) client_config = botocore.client.Config( max_pool_connections=128, user_agent_extra=user_agent, connect_timeout=CONN_READ_TIMEOUT, read_timeout=CONN_READ_TIMEOUT, retries={'max_attempts': OBJ_REQ_RETRIES}) self.s3_client = boto3.client( 's3', aws_access_key_id=minio_config['access_key_id'], aws_secret_access_key=minio_config['secret_access_key'], config=client_config, endpoint_url=service_endpoint) msg = STORAGE_CLI_MSG.format('MinIO') logger.info("{} - Endpoint: {}".format(msg, service_endpoint))
def __init__(self, localhost_config): logger.debug("Creating Localhost storage client") self.localhost_config = localhost_config msg = STORAGE_CLI_MSG.format('Localhost') logger.info("{}".format(msg))
def __init__(self, gcfs_config): logger.debug("Creating gcsfs storage client") self.config = gcfs_config self.fs = gcsfs.GCSFileSystem(project=gcfs_config["project_id"]) msg = STORAGE_CLI_MSG.format('gcfs') logger.info("{}".format(msg))
def __init__(self, ibm_cos_config): logger.debug("Creating IBM COS client") self.ibm_cos_config = ibm_cos_config self.region = self.ibm_cos_config['region'] self.is_lithops_worker = is_lithops_worker() user_agent = self.ibm_cos_config['user_agent'] api_key = None if 'api_key' in self.ibm_cos_config: api_key = self.ibm_cos_config.get('api_key') api_key_type = 'COS' elif 'iam_api_key' in self.ibm_cos_config: api_key = self.ibm_cos_config.get('iam_api_key') api_key_type = 'IAM' service_endpoint = self.ibm_cos_config.get('endpoint').replace( 'http:', 'https:') if self.is_lithops_worker and 'private_endpoint' in self.ibm_cos_config: service_endpoint = self.ibm_cos_config.get('private_endpoint') if api_key: service_endpoint = service_endpoint.replace('http:', 'https:') logger.debug("Set IBM COS Endpoint to {}".format(service_endpoint)) if {'secret_key', 'access_key'} <= set(self.ibm_cos_config): logger.debug("Using access_key and secret_key") access_key = self.ibm_cos_config.get('access_key') secret_key = self.ibm_cos_config.get('secret_key') client_config = ibm_botocore.client.Config( max_pool_connections=128, user_agent_extra=user_agent, connect_timeout=CONN_READ_TIMEOUT, read_timeout=CONN_READ_TIMEOUT, retries={'max_attempts': OBJ_REQ_RETRIES}) self.cos_client = ibm_boto3.client( 's3', aws_access_key_id=access_key, aws_secret_access_key=secret_key, config=client_config, endpoint_url=service_endpoint) elif api_key is not None: client_config = ibm_botocore.client.Config( signature_version='oauth', max_pool_connections=128, user_agent_extra=user_agent, connect_timeout=CONN_READ_TIMEOUT, read_timeout=CONN_READ_TIMEOUT, retries={'max_attempts': OBJ_REQ_RETRIES}) token = self.ibm_cos_config.get('token', None) token_expiry_time = self.ibm_cos_config.get( 'token_expiry_time', None) iam_token_manager = IBMTokenManager(api_key, api_key_type, token, token_expiry_time) token, token_expiry_time = iam_token_manager.get_token() self.ibm_cos_config['token'] = token self.ibm_cos_config['token_expiry_time'] = token_expiry_time self.cos_client = ibm_boto3.client( 's3', token_manager=iam_token_manager._token_manager, config=client_config, endpoint_url=service_endpoint) msg = STORAGE_CLI_MSG.format('IBM COS') logger.info("{} - Region: {}".format(msg, self.region))