def test_client_passes_through_arguments(self): ibm_boto3.DEFAULT_SESSION = self.Session() ibm_boto3.client('s3', region_name='us-west-2', verify=False) ibm_boto3.DEFAULT_SESSION.client.assert_called_with( 's3', region_name='us-west-2', verify=False)
def __init__(self, ibm_cos_config, **kwargs): logger.debug("Creating IBM COS client") self.ibm_cos_config = ibm_cos_config self.is_lithops_worker = is_lithops_worker() user_agent = self.ibm_cos_config['user_agent'] api_key = None if 'api_key' in self.ibm_cos_config: api_key = self.ibm_cos_config.get('api_key') api_key_type = 'COS' elif 'iam_api_key' in self.ibm_cos_config: api_key = self.ibm_cos_config.get('iam_api_key') api_key_type = 'IAM' service_endpoint = self.ibm_cos_config.get('endpoint').replace('http:', 'https:') if self.is_lithops_worker and 'private_endpoint' in self.ibm_cos_config: service_endpoint = self.ibm_cos_config.get('private_endpoint') if api_key: service_endpoint = service_endpoint.replace('http:', 'https:') logger.debug("Set IBM COS Endpoint to {}".format(service_endpoint)) if {'secret_key', 'access_key'} <= set(self.ibm_cos_config): logger.debug("Using access_key and secret_key") access_key = self.ibm_cos_config.get('access_key') secret_key = self.ibm_cos_config.get('secret_key') client_config = ibm_botocore.client.Config(max_pool_connections=128, user_agent_extra=user_agent, connect_timeout=CONN_READ_TIMEOUT, read_timeout=CONN_READ_TIMEOUT, retries={'max_attempts': OBJ_REQ_RETRIES}) self.cos_client = ibm_boto3.client('s3', aws_access_key_id=access_key, aws_secret_access_key=secret_key, config=client_config, endpoint_url=service_endpoint) elif api_key is not None: client_config = ibm_botocore.client.Config(signature_version='oauth', max_pool_connections=128, user_agent_extra=user_agent, connect_timeout=CONN_READ_TIMEOUT, read_timeout=CONN_READ_TIMEOUT, retries={'max_attempts': OBJ_REQ_RETRIES}) token = self.ibm_cos_config.get('token', None) token_expiry_time = self.ibm_cos_config.get('token_expiry_time', None) iam_token_manager = IBMTokenManager(api_key, api_key_type, token, token_expiry_time) token, token_expiry_time = iam_token_manager.get_token() self.ibm_cos_config['token'] = token self.ibm_cos_config['token_expiry_time'] = token_expiry_time self.cos_client = ibm_boto3.client('s3', token_manager=iam_token_manager._token_manager, config=client_config, endpoint_url=service_endpoint) logger.info("IBM COS client created successfully")
def test_client_uses_existing_session(self, setup_session): ibm_boto3.DEFAULT_SESSION = self.Session() ibm_boto3.client('sqs') assert not setup_session.called assert ibm_boto3.DEFAULT_SESSION.client.called
def __init__(self, cos_config): service_endpoint = cos_config.get('endpoint').replace('http:', 'https:') if 'api_key' in cos_config: client_config = ibm_botocore.client.Config(signature_version='oauth', max_pool_connections=200, user_agent_extra='pywren-ibm-cloud') api_key = cos_config.get('api_key') token_manager = DefaultTokenManager(api_key_id=api_key) if 'token' in cos_config: token_manager._token = cos_config['token'] expiry_time = cos_config['token_expiry_time'] token_manager._expiry_time = datetime.strptime(expiry_time, '%Y-%m-%d %H:%M:%S.%f%z') self.cos_client = ibm_boto3.client('s3', token_manager=token_manager, config=client_config, endpoint_url=service_endpoint) if 'token' not in cos_config: cos_config['token'] = token_manager.get_token() cos_config['token_expiry_time'] = token_manager._expiry_time.strftime('%Y-%m-%d %H:%M:%S.%f%z') elif {'secret_key', 'access_key'} <= set(cos_config): secret_key = cos_config.get('secret_key') access_key = cos_config.get('access_key') client_config = ibm_botocore.client.Config(max_pool_connections=200, user_agent_extra='pywren-ibm-cloud') self.cos_client = ibm_boto3.client('s3', aws_access_key_id=access_key, aws_secret_access_key=secret_key, config=client_config, endpoint_url=service_endpoint)
def test_client_creates_default_session(self, setup_session): ibm_boto3.DEFAULT_SESSION = None ibm_boto3.client('sqs') assert setup_session.called assert ibm_boto3.DEFAULT_SESSION.client.called
def test_client_uses_existing_session(self, setup_session): ibm_boto3.DEFAULT_SESSION = self.Session() ibm_boto3.client('s3') self.assertFalse(setup_session.called, 'setup_default_session should not have been called') self.assertTrue(ibm_boto3.DEFAULT_SESSION.client.called, 'Default session client method not called')
def __init__(self, cos_config): if 'cos_endpoint' in cos_config: service_endpoint = cos_config.get('cos_endpoint').replace( 'http:', 'https:') elif {'cos_endpoints', 'cos_region'} <= set(cos_config): endpoints = requests.get(cos_config.get('cos_endpoints')).json() region = cos_config.get('cos_region') if region in endpoints['service-endpoints']['cross-region']['us'][ 'public']: cos_host = endpoints['service-endpoints']['cross-region'][ 'us']['public'][region] elif region in endpoints['service-endpoints']['cross-region'][ 'eu']['public']: cos_host = endpoints['service-endpoints']['cross-region'][ 'eu']['public'][region] elif region in endpoints['service-endpoints']['regional']: cos_host = endpoints['service-endpoints']['regional'][region][ 'public'][region] elif region in endpoints['service-endpoints']['regional']: cos_host = endpoints['service-endpoints']['regional'][region][ 'public'][region] service_endpoint = 'https://' + cos_host if 'cos_api_key' in cos_config: client_config = ibm_botocore.client.Config( signature_version='oauth', max_pool_connections=200) api_key = cos_config.get('cos_api_key') token_manager = DefaultTokenManager(api_key_id=api_key) if 'cos_token' in cos_config: token_manager._token = cos_config.get('cos_token') self.cos_client = ibm_boto3.client('s3', token_manager=token_manager, config=client_config, endpoint_url=service_endpoint) cos_config['cos_token'] = token_manager.get_token() elif {'cos_secret_key', 'cos_access_key'} <= set(cos_config): secret_key = cos_config.get('cos_secret_key') access_key = cos_config.get('cos_access_key') client_config = ibm_botocore.client.Config( max_pool_connections=200) self.cos_client = ibm_boto3.client( 's3', aws_access_key_id=access_key, aws_secret_access_key=secret_key, config=client_config, endpoint_url=service_endpoint)
def test_client_creates_default_session(self, setup_session): ibm_boto3.DEFAULT_SESSION = None ibm_boto3.client('s3') self.assertTrue(setup_session.called, 'setup_default_session not called') self.assertTrue(ibm_boto3.DEFAULT_SESSION.client.called, 'Default session client method not called')
def getParamsCOS(args): endpoint = args.get('endpoint', 'https://s3-api.us-geo.objectstorage.softlayer.net') api_key_id = args.get( 'apikey', args.get( 'apiKeyId', args.get('__bx_creds', {}).get('cloud-object-storage', {}).get('apikey', ''))) service_instance_id = args.get( 'resource_instance_id', args.get( 'serviceInstanceId', args.get('__bx_creds', {}).get('cloud-object-storage', {}).get('resource_instance_id', ''))) ibm_auth_endpoint = args.get('ibmAuthEndpoint', 'https://iam.ng.bluemix.net/oidc/token') cos = ibm_boto3.client('s3', ibm_api_key_id=api_key_id, ibm_service_instance_id=service_instance_id, ibm_auth_endpoint=ibm_auth_endpoint, config=Config(signature_version='oauth'), endpoint_url=endpoint) params = {} params['bucket'] = args['bucket'] return {'cos': cos, 'params': params}
def test2(container, filename): cos = ibm_boto3.client('s3',ibm_api_key_id=cos_credentials['apikey'],ibm_service_instance_id=cos_credentials['resource_instance_id'],ibm_auth_endpoint=auth_endpoint,config=Config(signature_version='oauth'),endpoint_url=service_endpoint) obj = cos.get_object(Bucket = container, Key = filename)['Body'] b = io.BytesIO(obj.read()) df = pd.read_csv(b) x = df.iloc[:,0].values/1000 y = df.iloc[:,1].values z = np.diff(y) z1 = np.divide(z,np.diff(x)) z = np.insert(z1, 0, 0, axis=0) tools_to_show = "wheel_zoom,pan,box_zoom,reset,save" plot1 = figure(title="Total Capacitance (Ch1)", x_axis_label='Time [s]', y_axis_label='Capacitance [pF]', tools= tools_to_show) plot1.line(x,y,line_width=1.5) plot1.add_tools(HoverTool(tooltips=[("Time", "$x"),("Capcitance", "$y"),])) plot2 = figure(title="Capacitance Change (Ch1)", x_axis_label='Time [s]', y_axis_label='Cap. Change [pF/s]', tools= tools_to_show) plot2.line(x,z1,line_width=1) plot2.add_tools(HoverTool(tooltips=[("Time", "$x"),("Capcitance", "$y"),])) if len(df.columns) == 2: p = gridplot([[plot1,plot2]], plot_width=800, plot_height=400, sizing_mode = "scale_width") else: y2 = df.iloc[:,2].values z2 = np.diff(y2) z3 = np.divide(z2,np.diff(x)) z2 = np.insert(z3, 0, 0, axis=0) plot3 = figure(title="Total Capacitance (Ch2)", x_axis_label='Time [s]', y_axis_label='Capacitance [pF]', tools= tools_to_show) plot3.line(x,y2,line_width=1.5) plot3.add_tools(HoverTool(tooltips=[("Time", "$x"),("Capcitance", "$y"),])) plot4 = figure(title="Capacitance Change (Ch2)", x_axis_label='Time [s]', y_axis_label='Cap. Change [pF/s]', tools= tools_to_show) plot4.line(x,z2,line_width=1) plot4.add_tools(HoverTool(tooltips=[("Time", "$x"),("Capcitance", "$y"),])) p = gridplot([[plot1, plot2], [plot3, plot4]], plot_width=800, plot_height=400, sizing_mode = "scale_width") script, div = components(p) return render_template('plot.html', script = script, div = div)
def GetObjStoContainerInfo(container): cos = ibm_boto3.client('s3',ibm_api_key_id=cos_credentials['apikey'],ibm_service_instance_id=cos_credentials['resource_instance_id'],ibm_auth_endpoint=auth_endpoint,config=Config(signature_version='oauth'),endpoint_url=service_endpoint) bucs = [] for data in cos.list_objects(Bucket = container)['Contents']: data['downloadURL'] = thehost + "/" + container + "/" + data['Key'] bucs.append(data) return render_template('table.html', objs = bucs, container = container)
def __init__(self, *args, **kwargs): # Always work on a copy of meta, otherwise we would affect other # instances of the same subclass. self.meta = self.meta.copy() # Create a default client if none was passed if kwargs.get('client') is not None: self.meta.client = kwargs.get('client') else: self.meta.client = ibm_boto3.client(self.meta.service_name) # Allow setting identifiers as positional arguments in the order # in which they were defined in the ResourceJSON. for i, value in enumerate(args): setattr(self, '_' + self.meta.identifiers[i], value) # Allow setting identifiers via keyword arguments. Here we need # extra logic to ignore other keyword arguments like ``client``. for name, value in kwargs.items(): if name == 'client': continue if name not in self.meta.identifiers: raise ValueError(f'Unknown keyword argument: {name}') setattr(self, '_' + name, value) # Validate that all identifiers have been set. for identifier in self.meta.identifiers: if getattr(self, identifier) is None: raise ValueError(f'Required parameter {identifier} not set')
def get_image_size(cloud_id, region, bucket_name, image_name, image_format): """ Get the size of image to convert. This task will get the size using object's HEAD data using S3 APIs :param cloud_id: <string> cloud ID for which the image is being converted (for credentials) :param region: <string> region in which the COS bucket resides :param bucket_name: <string> bucket name in which the image resides :param image_name: <string> Name of the image :return: <int> Image size in MBs """ cloud = IBMCloud.query.filter_by(id=cloud_id).first() if not cloud: return client = ibm_boto3.client( service_name='s3', ibm_api_key_id=decrypt_api_key(cloud.api_key), ibm_service_instance_id=cloud.service_credentials.resource_instance_id, ibm_auth_endpoint="https://iam.cloud.ibm.com/identity/token", config=Config(signature_version="oauth"), endpoint_url="https://s3.{region}.cloud-object-storage.appdomain.cloud" .format(region=region)) response = client.head_object(Bucket=bucket_name, Key="{image_name}.{image_format}".format( image_name=image_name, image_format=image_format)) if not response.get("ResponseMetadata") or not response["ResponseMetadata"].get("HTTPHeaders") \ or not response["ResponseMetadata"]["HTTPHeaders"].get("content-length"): return return int( int(response["ResponseMetadata"]["HTTPHeaders"]["content-length"]) / 1000000)
def getParamsCOS(args): operation = args.get('operation').lower(); if '_' not in operation: index = operation.find('object') operation = operation[:index] + '_' + operation[index:] expires = args.get('expires', 60 * 15) endpoint = args.get('endpoint','https://s3-api.us-geo.objectstorage.softlayer.net') access_key_id=args.get('access_key_id', args.get('__bx_creds', {}).get('cloud-object-storage', {}).get('cos_hmac_keys', {}).get('access_key_id', '')) secret_access_key = args.get('secret_access_key', args.get('__bx_creds', {}).get('cloud-object-storage', {}).get('cos_hmac_keys', {}).get('secret_access_key', '')) api_key_id = args.get('apikey', args.get('apiKeyId', args.get('__bx_creds', {}).get('cloud-object-storage', {}).get('apikey', ''))) service_instance_id = args.get('resource_instance_id', args.get('serviceInstanceId', args.get('__bx_creds', {}).get('cloud-object-storage', {}).get('resource_instance_id', ''))) ibm_auth_endpoint = args.get('ibmAuthEndpoint', 'https://iam.ng.bluemix.net/oidc/token') cos = ibm_boto3.client('s3', aws_access_key_id=access_key_id, aws_secret_access_key=secret_access_key, region_name='us-standard', ibm_auth_endpoint=ibm_auth_endpoint, config=Config(signature_version='s3v4'), endpoint_url=endpoint) params = {} params['bucket'] = args['bucket'] params['key'] = args['key'] params['operation'] = operation params['expires'] = expires return {'cos':cos, 'params':params}
def main(dict): client = ibm_boto3.client( 's3', ibm_api_key_id=dict['apikey'], ibm_service_instance_id=dict['resource_instance_id'], ibm_auth_endpoint=dict['auth_endpoint'], config=Config(signature_version='oauth'), endpoint_url=dict['service_endpoint']) download_file_path = './sf.xls' try: try: client.download_file(dict['bucket_name'], dict['object_name'], download_file_path) except Exception as e: raise else: print('File Downloaded') return analyse_file(download_file_path) except Exception as e: print(e) return {"status": "error"}
def __init__(self, ceph_config, **kwargs): logger.debug("Creating Ceph client") self.ceph_config = ceph_config self.is_cloudbutton_function = is_cloudbutton_function() user_agent = ceph_config['user_agent'] service_endpoint = ceph_config.get('endpoint').replace( 'http:', 'https:') logger.debug("Seting Ceph endpoint to {}".format(service_endpoint)) logger.debug("Using access_key and secret_key") access_key = ceph_config.get('access_key') secret_key = ceph_config.get('secret_key') client_config = ibm_botocore.client.Config( max_pool_connections=128, user_agent_extra=user_agent, connect_timeout=CONN_READ_TIMEOUT, read_timeout=CONN_READ_TIMEOUT, retries={'max_attempts': OBJ_REQ_RETRIES}) self.cos_client = ibm_boto3.client('s3', aws_access_key_id=access_key, aws_secret_access_key=secret_key, config=client_config, endpoint_url=service_endpoint) logger.debug("Ceph client created successfully")
def transcription(data): cos = ibm_boto3.client("s3", ibm_api_key_id=cfg.COS_API_KEY_ID, ibm_service_instance_id=cfg.COS_RESOURCE_CRN, ibm_auth_endpoint=cfg.COS_AUTH_ENDPOINT, config=Config(signature_version="oauth"), endpoint_url=cfg.COS_ENDPOINT) headers = { 'Content-Type': 'audio/flac', } params = ( ('word_alternatives_threshold', '0.9'), ('keywords', 'colorado,tornado,tornadoes'), ('keywords_threshold', '0.5'), ) with open(data, 'wb') as test: cos.download_fileobj('buckettestestandard', data, test) data = open(data, 'rb').read() url = cfg.STTURL + '/v1/recognize' response = requests.post(url, headers=headers, params=params, data=data, auth=('apikey', cfg.STTAPIKEY)) resp = json.loads(response.text) return str(resp['results'][0]['alternatives'][0]['transcript'])
def data_retrieval(comp): """ the given file will be retrived or downloaded in the relative path from the cloud object storage. Args: comp: filename to be retrieved. Returns: does not have a return value. """ print('downloading data...(227MB)') credentials={"api_key": "QouAvUnBFzdt8WgugSwEdk3cvFvGWFtaLVSqOOmdID6B",\ "bucket":"moviesdataset",\ "endpoint_url":"https://s3.eu-de.cloud-object-storage.appdomain.cloud",\ "service_id":"crn:v1:bluemix:public:cloud-object-storage:global:a/7952d450a1c747d6a4fd528ca95a95\ 2c:b8d2f94f-1bca-442a-ae31-0ba42b0ce6ec::"} cos=ibm_boto3.client("s3",\ ibm_api_key_id=credentials['api_key'],\ ibm_service_instance_id=credentials['service_id'],\ config=Config(signature_version='oauth'),\ endpoint_url=credentials['endpoint_url'],\ region_name='ap-standard') print('it may take a few minutes depending upon the connection speed') cos.download_file(comp,f'{comp}.zip',f'./{comp}.zip') print('downloaded.')
def __init__(self): """Initialize the instance using global configuration.""" self._conf = get_cephci_config()["cos"] self._api_key = self._conf["api-key"] self._resource_id = self._conf["resource-id"] self.endpoint = self._conf["endpoint"] self.location_constraint = dict( {"LocationConstraint": self._conf["location-constraint"]}) self.client = ibm_boto3.client( "s3", ibm_api_key_id=self._api_key, ibm_service_instance_id=self._resource_id, config=Config(signature_version="oauth"), endpoint_url=self.endpoint, ) self.resource = ibm_boto3.resource( "s3", ibm_api_key_id=self._api_key, ibm_service_instance_id=self._resource_id, config=Config(signature_version="oauth"), endpoint_url=self.endpoint, )
def cres3cos(): cos_credentials = { "apikey": "SRoE84GJgY37s35b8_Ombfk5H2h6c7eVRXyC_hvntgND", "endpoints": "https://cos-service.bluemix.net/endpoints", "iam_apikey_description": "Auto generated apikey during resource-key operation for Instance - crn:v1:bluemix:public:cloud-object-storage:global:a/11bef123053fda60aec82db537d2b9b4:0c7aec40-1644-4e12-bf8b-3df410c20c5a::", "iam_apikey_name": "auto-generated-apikey-593c7907-e82c-428a-8f60-a419498464f0", "iam_role_crn": "crn:v1:bluemix:public:iam::::serviceRole:Writer", "iam_serviceid_crn": "crn:v1:bluemix:public:iam-identity::a/11bef123053fda60aec82db537d2b9b4::serviceid:ServiceId-1dc7c415-853a-47a8-a5b3-8da9df9fc793", "resource_instance_id": "crn:v1:bluemix:public:cloud-object-storage:global:a/11bef123053fda60aec82db537d2b9b4:0c7aec40-1644-4e12-bf8b-3df410c20c5a::" } auth_endpoint = 'https://iam.bluemix.net/oidc/token' service_endpoint = 'https://s3-api.us-geo.objectstorage.softlayer.net' cos = ibm_boto3.client( 's3', ibm_api_key_id=cos_credentials['apikey'], ibm_service_instance_id=cos_credentials['resource_instance_id'], ibm_auth_endpoint=auth_endpoint, config=Config(signature_version='oauth'), endpoint_url=service_endpoint) return cos
def __init__(self, settings): """ Initializes the BotoStorageHandler with it's special connection string :param settings: settings from `mlapp > config.py` depending on handler type name. """ super(IBMBoto3StorageHandler, self).__init__() configuration = settings if not configuration: logging.error( 'Configuration should be added to the file. Key should be "boto"' ) try: # Create client self.botoClient = ibm_boto3.client( "s3", ibm_api_key_id=configuration["api_key_id"], ibm_service_instance_id=configuration['service_crn'], config=Config(signature_version="oauth"), endpoint_url=configuration['endpoint']) except KeyError as e: logging.error("Missing parameter in file storage config %s" % str(e))
def __init__ (self, config): client_config = ibm_botocore.client.Config(max_pool_connections=200) self.cos_client = ibm_boto3.client('s3', aws_access_key_id=config["access_key"], aws_secret_access_key=config["secret_key"], config=client_config, endpoint_url=config["endpoint"])
def createFile(tweets): outtweets=[] for tweet in tweets: outtweets.append([tweet.created_at.hour, tweet.text, tweet.retweet_count, tweet.favorite_count]) client = ibm_boto3.client(service_name='s3', ibm_api_key_id=<"COS_API_KEY">, ibm_service_instance_id= <"COS_SERVICE_ID">, config=Config(signature_version='oauth'), endpoint_url= "https://" + <"COS_ENDPOINT_URL">) cols=['hour','text','retweets','favorites'] table=pd.DataFrame(columns= cols) for i in outtweets: table=table.append({'hour':i[0], 'text':i[1], 'retweets': i[2], 'favorites': i[3]}, ignore_index=True) table.to_csv('tweets_data.csv', index=False) try: res=client.upload_file(Filename="tweets_data.csv", Bucket=<'BUCKET_NAME'>,Key='tweets.csv') except Exception as e: print(Exception, e) else: print('File Uploaded')
def __init__(self): """ Description : Constructor to class FeedbackLoop, initializes the connection to COS Parameters : 1. self """ constants = Constants() self.client = ibm_boto3.client(service_name='s3',ibm_api_key_id=constants.API_KEY,ibm_auth_endpoint=constants.AUTH_ENDPOINT,config=Config(signature_version='oauth'),endpoint_url=constants.COS_API_ENDPOINT) column_names = ['Timestamp','CPU','Instances'] util = COSUtils() self.bucketName = 'getcsvdata' self.data = {} self.score = {} self.modelSelector = {} self.response = {} for region in ['Dallas', 'London', 'Tokyo']: self.initialDataset = 'initial_'+region+'.csv' self.fetchedDataset = 'data_'+region+'.csv' self.data[region] = util.get_dataframe(column_names, self.bucketName, self.initialDataset) self.data[region].columns = column_names self.data[region] = self.convertTimestampToFloat(self.data[region]) self.modelSelector[region] = RegressionSelector() self.score[region] = self.getScore(self.data[region], region)['Best Score'] logging.info("Score:" + str(self.score[region])) logging.debug("Response:") logging.debug(self.response)
def __init__(self, cos, bucket=None): ''' cos: if cos is dict then funcs assume IBM cloud object storage api Issue: add some test that cos works??? Issue: separate args for creds and cos, maybe even creds as json file path ''' if isinstance(cos, dict): keys = [ 'endpoint_url', 'ibm_api_key_id', 'ibm_auth_endpoint', 'ibm_service_instance_id' ] msg = 'creds dict must have valid keys: %s' % (keys) assert all(k in cos for k in keys), msg cos = ibm_boto3.client('s3', config=Config(signature_version="oauth"), **cos) # consider if NOT instance client if all other types have meta... elif isinstance(cos, ibm_boto3.resources.factory.s3.ServiceResource): cos = cos.meta.client else: raise TypeError('cos is not right') self.cos = cos self.bucket = bucket
def _get_client(self): return client('s3', ibm_api_key_id='MYAPIKEYID', ibm_service_instance_id='MYAPIKEYID', ibm_auth_endpoint='IBMAUTHENDPOINT', endpoint_url='https://192.168.0.1:443', config=Config(signature_version='oauth'))
def get_client(): return ibm_boto3.client( "s3", ibm_api_key_id=os.getenv('COS_API_KEY_ID'), ibm_service_instance_id=os.getenv('COS_INSTANCE_CRN'), config=Config(signature_version="oauth"), endpoint_url=os.getenv('COS_ENDPOINT'))
def GetObjStoreInfo(): cos = ibm_boto3.client('s3',ibm_api_key_id=cos_credentials['apikey'],ibm_service_instance_id=cos_credentials['resource_instance_id'],ibm_auth_endpoint=auth_endpoint,config=Config(signature_version='oauth'),endpoint_url=service_endpoint) bucs = [] for bucket in cos.list_buckets()['Buckets']: bucket['accessURL'] = thehost + "/" + bucket['Name'] bucs.append(bucket) return render_template('main.html', cons = bucs)
def getParamsCOS(args): endpoint = args.get('endpoint', 'https://s3.us.cloud-object-storage.appdomain.cloud') if not (endpoint.startswith("https://") or endpoint.startswith("http://")): endpoint = "https://" + endpoint api_key_id = args.get( 'apikey', args.get( 'apiKeyId', args.get('__bx_creds', {}).get('cloud-object-storage', {}).get( 'apikey', os.environ.get('__OW_IAM_NAMESPACE_API_KEY') or ''))) service_instance_id = args.get( 'resource_instance_id', args.get( 'serviceInstanceId', args.get('__bx_creds', {}).get('cloud-object-storage', {}).get('resource_instance_id', ''))) ibm_auth_endpoint = args.get('ibmAuthEndpoint', 'https://iam.cloud.ibm.com/identity/token') params = {} params['bucket'] = args.get('bucket') if not api_key_id: return {'cos': None, 'params': params} cos = ibm_boto3.client('s3', ibm_api_key_id=api_key_id, ibm_service_instance_id=service_instance_id, ibm_auth_endpoint=ibm_auth_endpoint, config=Config(signature_version='oauth'), endpoint_url=endpoint) return {'cos': cos, 'params': params}
def __init__(self, ceph_config): logger.debug("Creating Ceph client") self.ceph_config = ceph_config user_agent = ceph_config['user_agent'] service_endpoint = ceph_config.get('endpoint') logger.debug("Seting Ceph endpoint to {}".format(service_endpoint)) logger.debug("Using access_key and secret_key") access_key = ceph_config.get('access_key') secret_key = ceph_config.get('secret_key') client_config = ibm_botocore.client.Config( max_pool_connections=128, user_agent_extra=user_agent, connect_timeout=CONN_READ_TIMEOUT, read_timeout=CONN_READ_TIMEOUT, retries={'max_attempts': OBJ_REQ_RETRIES}) self.cos_client = ibm_boto3.client('s3', aws_access_key_id=access_key, aws_secret_access_key=secret_key, config=client_config, endpoint_url=service_endpoint) msg = STORAGE_CLI_MSG.format('Ceph') logger.info("{} - Endpoint: {}".format(msg, service_endpoint))