def create_admin_user(self, username, displayname, cluster_name='ceph'): try: add_io_info = AddIOInfo() cmd = 'radosgw-admin user create --uid=%s --display-name=%s --cluster %s' % ( username, displayname, cluster_name) log.info('cmd: %s' % cmd) variable = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True) v = variable.stdout.read() v_as_json = json.loads(v) # log.info(v_as_json) user_details = {} user_details['user_id'] = v_as_json['user_id'] user_details['display_name'] = v_as_json['display_name'] user_details['access_key'] = v_as_json['keys'][0]['access_key'] user_details['secret_key'] = v_as_json['keys'][0]['secret_key'] add_io_info.add_user_info( **{ 'user_id': user_details['user_id'], 'access_key': user_details['access_key'], 'secret_key': user_details['secret_key'] }) return user_details except subprocess.CalledProcessError as e: error = e.output + str(e.returncode) log.error(error) return False
def delete(self, bucket_name): log.debug("function: %s" % self.delete.__name__) log.info("in delete bucket") """ :param bucket_name: string :rtype: dict :return: delete_bucket_stack: args: status: True, if bucket is deleted or False if not deleted msgs: error messages """ try: self.connection.delete_bucket(bucket_name) delete_bucket_stack = {"status": True} except exception.S3ResponseError as e: log.error(e) delete_bucket_stack = {"status": False, "msgs": e} return delete_bucket_stack
def delete_keys(self, delete_bucket=True): log.info('deleted buckets with keys') for bucket_name in self.bucket_names: log.info('ops on bucket name: %s' % bucket_name) bucket = self.bucket_ops.get(bucket_name) all_keys_in_bucket = bucket['bucket'].list() if all_keys_in_bucket: log.info('got all keys in bucket: %s' % all_keys_in_bucket) key_op = KeyOp(bucket['bucket']) log.info('delete of all keys') keys_deleted = key_op.multidelete_keys(all_keys_in_bucket) if keys_deleted is None: log.error('key not deleted') raise AssertionError log.info('all keys deleted')
def download_keys(self): download_dir = os.path.join(os.getcwd(), "Download") if not os.path.exists(download_dir): os.makedirs(download_dir) for bucket_name in self.bucket_names: log.info('ops on bucket name: %s' % bucket_name) bucket_dir = os.path.join(download_dir, bucket_name) if not os.path.exists(bucket_dir): os.makedirs(bucket_dir) bucket = self.bucket_ops.get(bucket_name, self.json_file_download) all_keys_in_bucket = bucket['bucket'].list() for each_key in all_keys_in_bucket: get_contents = PutContentsFromFile(each_key, self.json_file_download) filename = os.path.join(bucket_dir, each_key.key) download = get_contents.get(filename) if not download['status']: log.error(download['msgs']) raise AssertionError else: log.info('download complete') log.info('after download, deleting key: %s' % filename) os.unlink(filename)
def test_exec(config): test_info = AddTestInfo("multipart Upload with cancel and download") add_io_info = AddIOInfo() add_io_info.initialize() try: # test case starts test_info.started_info() all_user_details = rgw_lib.create_users(config.user_count) log.info("multipart upload enabled") for each_user in all_user_details: config.objects_count = 1 rgw = ObjectOps(config, each_user) buckets = rgw.create_bucket() rgw.break_upload_at_part_no = config.break_at_part_no rgw.multipart_upload(buckets) log.info("starting at part no: %s" % config.break_at_part_no) log.info("--------------------------------------------------") rgw.break_upload_at_part_no = 0 rgw.multipart_upload(buckets) rgw.download_keys() test_info.success_status("test completed") sys.exit(0) except AssertionError as e: log.error(e) test_info.failed_status("test failed: %s" % e) sys.exit(1)
def test_exec(config): add_io_info = AddIOInfo() add_io_info.initialize() test_info = AddTestInfo('create m buckets, n keys and download') try: # test case starts test_info.started_info() with open('user_details') as fout: all_user_details = simplejson.load(fout) for each_user in all_user_details: rgw = ObjectOps(config, each_user) buckets = rgw.create_bucket() rgw.upload(buckets) rgw.download_keys() test_info.success_status('test completed') sys.exit(0) except AssertionError as e: log.error(e) test_info.failed_status('test failed: %s' % e) sys.exit(1)
def test_exec(config): test_info = AddTestInfo("create users") add_io_info = AddIOInfo() add_io_info.initialize() try: test_info.started_info() all_user_details = rgw_lib.create_users(config.user_count, config.cluster_name) # dump the list of users into a file with open("user_details", "w") as fout: json.dump(all_user_details, fout) test_info.success_status("user creation completed") sys.exit(0) except AssertionError as e: log.error(e) test_info.failed_status("user creation failed: %s" % e) sys.exit(1)
def check_contents(self): log.debug('function: %s' % self.check_contents.__name__) log.info('checking contents or getting the string val') """ can also be used for getting the contents. i.e download :return: string_exists_status (dictionary): args: 1. status: True 2. contents: contents of string 3. msgs: error messages """ try: string_contents = self.key.get_contents_as_string() string_exists_status = { 'status': True, 'contents': string_contents } except exception.BotoClientError, e: log.error(e) string_exists_status = {'status': False, 'msgs': e}
def test_exec_read(config): grants = {'permission': 'READ', 'user_id': None, 'recursive': True} test_info = AddTestInfo( 'Test with read permission on buckets for all users') add_io_info = AddIOInfo() add_io_info.initialize() try: # test case starts test_info.started_info() all_user_details = rgw_lib.create_users(config.user_count) user1 = all_user_details[0] log.info('user1: %s' % user1) all_user_details.pop(0) u1 = ObjectOps(config, user1) for each_user in all_user_details: u2 = ObjectOps(config, each_user) u2_canonical_id = u2.canonical_id log.info('canonical id of u2: %s' % u2_canonical_id) grants['user_id'] = u2_canonical_id u1.grants = None u1.create_bucket() u1.set_bucket_properties() u2.bucket_names = u1.bucket_names u2.buckets_created = u1.buckets_created u2.grants = None u2.set_bucket_properties() # set permissions and read u1.grants = grants u1.set_bucket_properties() u2.bucket_names = u1.bucket_names u2.buckets_created = u1.buckets_created u2.grants = None u2.set_bucket_properties() test_info.success_status('test completed') except AssertionError, e: log.error(e) test_info.failed_status('test failed: %s' % e) sys.exit(1)
def test_exec(config): test_info = AddTestInfo('multipart Upload') add_io_info = AddIOInfo() add_io_info.initialize() try: # test case starts test_info.started_info() all_user_details = rgw_lib.create_users(config.user_count) log.info('multipart upload enabled') for each_user in all_user_details: config.objects_count = 1 rgw = ObjectOps(config, each_user) buckets = rgw.create_bucket() rgw.multipart_upload(buckets) test_info.success_status('test completed') sys.exit(0) except AssertionError, e: log.error(e) test_info.failed_status('test failed: %s' % e) sys.exit(1)
def get(self, bucket_name, json_file=None): log.debug("function: %s" % self.get.__name__) log.info("in get bucket") """ :param bucket_name:string :rtype: dict :return: get_bucket_stack: args: status: True, if got bucket or False, no get bucket attribs: bucket object msgs: error messges """ try: bucket = self.connection.get_bucket(bucket_name) if json_file is not None: add_bucket_to_json = JBucket(json_file) add_bucket_to_json.add(bucket_name) get_bucket_stack = {"status": True, "bucket": bucket} except (exception.S3ResponseError, exception.AWSConnectionError) as e: log.error(e) get_bucket_stack = {"status": False, "msgs": e} return get_bucket_stack
def test_exec(config): test_info = AddTestInfo( 'enable versioning on a bucket and upload keys and its versions') add_io_info = AddIOInfo() add_io_info.initialize() try: test_info.started_info() all_user_details = rgw_lib.create_users(config.user_count) for each_user in all_user_details: rgw = ObjectOps(config, each_user) rgw.enable_versioning = True rgw.version_count = config.version_count buckets = rgw.create_bucket() rgw.set_bucket_properties() rgw.upload(buckets) test_info.success_status('test completed') sys.exit(0) except AssertionError, e: log.error(e) sys.exit(1)
def check_if_bucket_empty(bucket): log.debug("function: %s" % check_if_bucket_empty.__name__) log.info("checking if bucket is empty") """ :param bucket: bucket object :rtype: dict :return: check_for_empty_stack: args: 1.contents: empty list ( [] ) or list of buckets 2.msgs: error messages """ try: bucket_contents = bucket.list() check_for_empty_stack = {"contents": bucket_contents} except (exception.S3ResponseError, exception.AWSConnectionError) as e: log.error(e) check_for_empty_stack = {"contents": [], "msgs": e} return check_for_empty_stack
def list_all_buckets(connection): log.debug("function: %s" % list_all_buckets.__name__) log.info("listing all buckets") """ :param connection: AWS authentication connection :rtype: dict :return: list_buckets_stack: args: 1.attribs: list of all buckets or None 2. msgs: error messages """ try: all_buckets = connection.get_all_buckets() list_buckets_stack = {"all_buckets": all_buckets} except (exception.S3ResponseError, exception.AWSConnectionError) as e: log.error(e) list_buckets_stack = {"all_buckets": None, "msgs": e} return list_buckets_stack
def test_exec(config): test_info = AddTestInfo('create m buckets') add_io_info = AddIOInfo() add_io_info.initialize() try: # test case starts test_info.started_info() all_user_details = rgw_lib.create_users(config.user_count) for each_user in all_user_details: rgw = ObjectOps(config, each_user) assert rgw.create_bucket() test_info.success_status('test completed') sys.exit(0) except AssertionError, e: log.error(e) test_info.failed_status('test failed: %s' % e) sys.exit(1)
def set_acls(self, bucket, acls): """ :param bucket: bucket objects :param acls: canned acls : private, public-read, public-read-write, authenticated-read :return: """ if acls is not None: try: log.info("got acl: %s" % acls) bucket.set_acl(acls) acp = bucket.get_acl() for grant in acp.acl.grants: log.info("canned acls set: %s on %s" % (grant.permission, grant.id)) return True except (exception.S3ResponseError, exception.BotoClientError) as e: log.error(e) return False else: log.info("not setting any acls")
def delete(self, bucket_name): log.debug('function: %s' % self.delete.__name__) log.info('in delete bucket') """ :param bucket_name: string :rtype: dict :return: delete_bucket_stack: args: status: True, if bucket is deleted or False if not deleted msgs: error messages """ try: self.connection.delete_bucket(bucket_name) delete_bucket_stack = {'status': True} except exception.S3ResponseError, e: log.error(e) delete_bucket_stack = {'status': False, 'msgs': e}
def set_user_grant(self, bucket, grants): """ :param acls: send acls in form of {'permission' : <permission type>, 'user_id' : canonical_user_id, 'recursive' : bool } persmission type : (READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL) :param bucket: buckect object """ if grants is not None: try: log.debug('setting grants %s' % grants) bucket.add_user_grant(permission=grants['permission'], user_id=grants['user_id'], recursive=grants['recursive']) acp = bucket.get_acl() for grant in acp.acl.grants: log.info('grants set: %s on %s' % (grant.permission, grant.id)) return True except (exception.S3ResponseError, exception.BotoClientError) as e: log.error(e) return False else: log.info('not setting any acls')
def create_admin_user(self, username, displayname, cluster_name="ceph"): try: add_io_info = AddIOInfo() cmd = ( 'radosgw-admin user create --uid="%s" --display-name="%s" --cluster %s' % (username, displayname, cluster_name)) log.info("cmd: %s" % cmd) variable = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True) v = variable.stdout.read() v_as_json = json.loads(v) # log.info(v_as_json) user_details = {} user_details["user_id"] = v_as_json["user_id"] user_details["display_name"] = v_as_json["display_name"] user_details["access_key"] = v_as_json["keys"][0]["access_key"] user_details["secret_key"] = v_as_json["keys"][0]["secret_key"] add_io_info.add_user_info( **{ "user_id": user_details["user_id"], "access_key": user_details["access_key"], "secret_key": user_details["secret_key"], }) return user_details except subprocess.CalledProcessError as e: error = e.output + str(e.returncode) log.error(error) return False
def delete(self, key_name, version_id=None): log.debug('function: %s' % self.delete.__name__) log.debug('in delete key %s:' % key_name) """ :param key_name: string :return: deleted key object.. or None try to check delete_marker was created for this delete. """ try: key_deleted = self.bucket.delete_key(key_name, version_id=version_id) log.info('key_name: %s' % key_name) log.info('version_id: %s' % version_id) return key_deleted except (exception.BotoClientError, exception.S3ResponseError), e: log.error(e) return None
def do_auth(self): log.debug('function: %s' % self.do_auth.__name__) try: log.info('got the credentials') # conn = S3Connection(self.ak, self.sk) self.dump_to_json_upload() self.dump_to_json_download() conn = boto.connect_s3( aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key, host=self.hostname, port=self.port, is_secure=self.is_secure, calling_format=boto.s3.connection.OrdinaryCallingFormat()) log.info('acess_key %s\nsecret_key %s' % (self.access_key, self.secret_key)) auth_stack = { 'status': True, 'conn': conn, 'upload_json_file': self.json_file_upload, 'download_json_file': self.json_file_download } except (boto.s3.connection.HostRequiredError, exception.AWSConnectionError, Exception), e: log.error('connection failed') log.error(e) auth_stack = {'status': False, 'msgs': e}
def verify_io(self): data = self.file_op.get_data() users = data['users'] try: for each_user in users: log.info('verifying data for the user: \n') log.info('user_id: %s' % each_user['user_id']) log.info('access_key: %s' % each_user['access_key']) log.info('secret_key: %s' % each_user['secret_key']) conn = boto.connect_s3( aws_access_key_id=each_user['access_key'], aws_secret_access_key=each_user['secret_key'], host=socket.gethostname(), port=int(utils.get_radosgw_port_no()), is_secure=False, calling_format=boto.s3.connection.OrdinaryCallingFormat() ) for each_bucket in each_user['bucket']: log.info('verifying data for bucket: %s' % each_bucket['name']) if each_bucket['test_op_code'] == 'delete': bucket_from_s3 = conn.lookup(each_bucket['name']) if bucket_from_s3 is None: log.info('bucket deleted') log.info('cannot verify objects as objects will be deleted since bucket does not exist') if bucket_from_s3 is not None: log.info('Bucket exists') raise Exception("Bucket exists") else: bucket_from_s3 = conn.get_bucket(each_bucket['name']) if not each_bucket['keys']: log.info('keys are not created') else: for each_key in each_bucket['keys']: log.info('verifying data for key: %s' % each_key['name']) if each_key['test_op_code'] == 'create': key_from_s3 = bucket_from_s3.get_key(each_key['name']) log.info('verifying size') log.info('size from yaml: %s' % each_key['size']) log.info('size from s3: %s' % key_from_s3.size) if int(each_key['size']) != int(key_from_s3.size): raise Exception("Size not matched") log.info('verifying md5') log.info('md5_on_s3_from yaml: %s' % each_key['md5_on_s3']) log.info('md5_on_s3: %s' % key_from_s3.etag.replace('"', '')) if each_key['md5_on_s3'] != key_from_s3.etag.replace('"', ''): raise Exception("Md5 not matched") log.info('verification complete for the key: %s' % key_from_s3.name) if each_key['test_op_code'] == 'delete': key_from_s3 = bucket_from_s3.get_key(each_key['name']) if key_from_s3 is None: log.info('key deleted') if key_from_s3 is not None: log.info('key exists') raise Exception("Key is not deleted") log.info('verification of data completed, data intact') except Exception as e: log.error(e) log.error('verification failed') exit(1)
def test_exec_write(config): test_info = AddTestInfo( "test with write persmission on objects and buckets for all users") try: # test case starts test_info.started_info() all_user_details = rgw_lib.create_users(config.user_count) user1 = all_user_details[0] u1 = ObjectOps(config, user1) u1.create_bucket() log.info("user1: %s" % user1) all_user_details.pop(0) for each_user in all_user_details: print("iter ------------------>") log.info("user2: %s" % each_user) u2 = ObjectOps(config, each_user) u2_canonical_id = u2.canonical_id log.info("canonical id of u2: %s" % u2_canonical_id) log.info("setting only read permission") grants = {"permission": "READ", "user_id": None, "recursive": True} log.info("write persmission are not set") grants["user_id"] = u2_canonical_id u1.grants = grants u1.set_bucket_properties() u2.bucket_names = u1.bucket_names u2.buckets_created = u1.buckets_created u2.json_file_upload = u1.json_file_upload u2.json_file_download = u1.json_file_download u2.grants = None buckets = u2.set_bucket_properties() key_created = u2.upload(buckets) if not key_created: log.info( "no write permission set and hence failing to create object" ) elif key_created: raise AssertionError("object created even with no permission") log.info("setting permission to write also") grants = { "permission": "WRITE", "user_id": u2_canonical_id, "recursive": True, } u1.grants = grants u1.set_bucket_properties() u2.bucket_names = u1.bucket_names u2.buckets_created = u1.buckets_created u2.grants = None buckets = u2.set_bucket_properties() key_created = u2.upload(buckets, object_base_name=str(u2.canonical_id) + ".key") if key_created: log.info("object created after permission set") test_info.success_status("test completed") except AssertionError as e: log.error(e) test_info.failed_status("test failed: %s" % e) sys.exit(1)
def test_exec(config): test_info = AddTestInfo('create m buckets, n objects and delete') try: test_info.started_info() add_io_info = AddIOInfo() read_io_info = ReadIOInfo() add_io_info.initialize() rgw_service = RGWService() quota_mgmt = QuotaMgmt() test_config = AddToCephConf() if config.shards: test_config.set_to_ceph_conf( 'global', ConfigOpts.rgw_override_bucket_index_max_shards, config.shards) rgw_service.restart() no_of_shards_for_each_bucket = int(config.shards) * int( config.bucket_count) with open('user_details') as fout: all_user_details = simplejson.load(fout) for each_user in all_user_details: add_io_info.add_user_info( **{ 'user_id': each_user['user_id'], 'access_key': each_user['access_key'], 'secret_key': each_user['secret_key'] }) for each_user in all_user_details: if config.max_objects: quota_mgmt.set_bucket_quota(each_user['user_id'], config.max_objects) quota_mgmt.enable_bucket_quota(each_user['user_id']) rgw = ObjectOps(config, each_user) buckets = rgw.create_bucket() rgw.upload(buckets) #read_io_info.verify_io() test_info.success_status('test completed') sys.exit(0) except AssertionError, e: log.error(e) test_info.failed_status('test failed: %s' % e) sys.exit(1)
def get(self, filename): log.debug('function: %s' % self.get.__name__) log.info('getting the contents of file %s:' % self.key) log.info('download or get the file to filename: %s' % filename) """ :param: filename: mention the filename which will be used to get the contents from s3 to this file. can be different from the original filename :return: dictionary, args: 1. status: True for successful download or False for failed download, 2. msgs : error messages """ try: self.key.get_contents_to_filename(filename) md5_on_s3 = self.key.etag.replace('"', '') md5_local = utils.get_md5(filename) if md5_on_s3 == md5_local: md5_match = "match" else: md5_match = "no match" key_details = { 'key_name': os.path.basename(filename), 'key_name_os_s3': self.key.name, 'size': os.stat(filename).st_size, 'md5_local': md5_local, 'md5_on_s3': md5_on_s3, 'md5_match': md5_match, 'opcode': { "edit": { "new_md5": None }, "move": { "new_name": None }, "delete": { "deleted": None } } } self.jkey.add(self.key.bucket.name, **key_details) download_status = {'status': True} except (exception.BotoClientError, exception.S3ResponseError, Exception), e: log.error(e) download_status = {'status': False, 'msgs': e}
def test_exec_read(config): test_info = AddTestInfo('Test with read permission on buckets') try: # test case starts add_io_info = AddIOInfo() add_io_info.initialize() test_info.started_info() all_user_details = rgw_lib.create_users(config.user_count) user1 = all_user_details[0] log.info('user1: %s' % user1) user2 = all_user_details[1] log.info('user2: %s' % user2) u1 = ObjectOps(config, user1) u2 = ObjectOps(config, user2) u1_grants = { 'permission': 'READ', 'user_id': u2.canonical_id, 'recursive': True } u2_grants = { 'permission': 'FULL_CONTROL', 'user_id': u1.canonical_id, 'recursive': True } u1.grants = u1_grants u1.create_bucket() u1_buckets = u1.set_bucket_properties() u1.upload(u1_buckets, object_base_name=u1.canonical_id + '.key') all_keys = u1_buckets[0].get_all_keys() for key in all_keys: log.info('all keys from user 1--------------') log.info('name: %s' % key.name) u2.grants = u2_grants u2.create_bucket() u2_buckets = u2.set_bucket_properties() bu2 = u1.connection['conn'].get_bucket(u2_buckets[0].name) log.info('copying the objects from u1 to u2') for each in all_keys: bu2.copy_key(each.key, u1_buckets[0].name, each.key) all_keys2 = bu2.get_all_keys() for key in all_keys2: log.info('all keys from user 2--------------') log.info('name: %s' % key.name) log.info('verifying copied objects--------') u2.grants = None u2_buckets = u2.set_bucket_properties() all_keys3 = u2_buckets[0].get_all_keys() for key in all_keys3: log.info('all keys from user 2--------------') log.info('name: %s' % key.name) test_info.success_status('test completed') except AssertionError as e: log.error(e) test_info.failed_status('test failed: %s' % e) sys.exit(1)
def test_exec_read(config): test_info = AddTestInfo("Test with read permission on buckets") try: # test case starts add_io_info = AddIOInfo() add_io_info.initialize() test_info.started_info() all_user_details = rgw_lib.create_users(config.user_count) user1 = all_user_details[0] log.info("user1: %s" % user1) user2 = all_user_details[1] log.info("user2: %s" % user2) u1 = ObjectOps(config, user1) u2 = ObjectOps(config, user2) u1_grants = { "permission": "READ", "user_id": u2.canonical_id, "recursive": True, } u2_grants = { "permission": "FULL_CONTROL", "user_id": u1.canonical_id, "recursive": True, } u1.grants = u1_grants u1.create_bucket() u1_buckets = u1.set_bucket_properties() u1.upload(u1_buckets, object_base_name=u1.canonical_id + ".key") all_keys = u1_buckets[0].get_all_keys() for key in all_keys: log.info("all keys from user 1--------------") log.info("name: %s" % key.name) u2.grants = u2_grants u2.create_bucket() u2_buckets = u2.set_bucket_properties() bu2 = u1.connection["conn"].get_bucket(u2_buckets[0].name) log.info("copying the objects from u1 to u2") for each in all_keys: bu2.copy_key(each.key, u1_buckets[0].name, each.key) all_keys2 = bu2.get_all_keys() for key in all_keys2: log.info("all keys from user 2--------------") log.info("name: %s" % key.name) log.info("verifying copied objects--------") u2.grants = None u2_buckets = u2.set_bucket_properties() all_keys3 = u2_buckets[0].get_all_keys() for key in all_keys3: log.info("all keys from user 2--------------") log.info("name: %s" % key.name) test_info.success_status("test completed") except AssertionError as e: log.error(e) test_info.failed_status("test failed: %s" % e) sys.exit(1)
def put(self, filename, test_op_code='create'): log.debug('function: %s' % self.put.__name__) log.info('upload of file: %s' % filename) """ :param filename: filename i.e along with location :return: dictionary, args: 1. status: True for successful upload or False for failed upload, 2. msgs : error messages """ try: self.key.set_contents_from_filename(filename) md5_on_s3 = self.key.etag.replace('"', '') key_details = { 'key_name': self.key.key, 'size': os.stat(filename).st_size, 'md5_local': utils.get_md5(filename), 'md5_on_s3': md5_on_s3, 'opcode': { "edit": { "new_md5": None }, "move": { "new_name": None }, "delete": { "deleted": None } } } self.jkey.add(self.key.bucket.name, **key_details) self.add_io_info.add_keys_info( self.key.bucket.connection.access_key, self.key.bucket.name, **{ 'key_name': self.key.key, 'size': os.stat(filename).st_size, 'md5_on_s3': md5_on_s3, 'upload_type': 'normal', 'test_op_code': test_op_code }) upload_status = {'status': True} except (exception.BotoClientError, exception.S3ResponseError), e: log.error(e) upload_status = {'status': False, 'msgs': e}
def set_metadata(self, **metadata): log.debug('function: %s' % self.set_metadata.__name__) log.info('setting metadata %s' % metadata) metadata_name = list(metadata.keys())[0] metadata_value = list(metadata.values())[0] try: self.key.set_metadata(metadata_name, metadata_value) return True except exception.BotoClientError as e: log.error(e) return False
def set_metadata(self, **metadata): log.debug("function: %s" % self.set_metadata.__name__) log.info("setting metadata %s" % metadata) metadata_name = list(metadata.keys())[0] metadata_value = list(metadata.values())[0] try: self.key.set_metadata(metadata_name, metadata_value) return True except (exception.BotoClientError, exception.S3ResponseError) as e: log.error(e) return False