def list_all_buckets(connection): log.debug("function: %s" % list_all_buckets.__name__) log.info("listing all buckets") """ :param connection: AWS authentication connection :rtype: dict :return: list_buckets_stack: args: 1.attribs: list of all buckets or None 2. msgs: error messages """ try: all_buckets = connection.get_all_buckets() list_buckets_stack = {"all_buckets": all_buckets} except (exception.S3ResponseError, exception.AWSConnectionError) as e: log.error(e) list_buckets_stack = {"all_buckets": None, "msgs": e} return list_buckets_stack
def create_update_json_file(self): log.debug('creating_updating json file') json_data = self.create_json_data() self.add_data(json_data)
def get(self, bucket_name, json_file=None): log.debug("function: %s" % self.get.__name__) log.info("in get bucket") """ :param bucket_name:string :rtype: dict :return: get_bucket_stack: args: status: True, if got bucket or False, no get bucket attribs: bucket object msgs: error messges """ try: bucket = self.connection.get_bucket(bucket_name) if json_file is not None: add_bucket_to_json = JBucket(json_file) add_bucket_to_json.add(bucket_name) get_bucket_stack = {"status": True, "bucket": bucket} except (exception.S3ResponseError, exception.AWSConnectionError) as e: log.error(e) get_bucket_stack = {"status": False, "msgs": e} return get_bucket_stack
def create_bucket(self): log.info('no of buckets to create: %s' % self.bucket_create_nos) log.info('buckets_creating......') for bucket_no in range(self.bucket_create_nos): log.debug('iter: %s' % bucket_no) bucket_name = self.user_id + "." + str('bucky') + "." + str( bucket_no) self.bucket_names.append(bucket_name) log.info('bucket_name: %s' % bucket_name) bucket_created = self.bucket_ops.create(bucket_name, self.json_file_upload) if not bucket_created['status']: raise AssertionError, bucket_created['msgs'] print 'created bucket' print bucket_created self.buckets_created.append(bucket_created['bucket']) log.info('bucket created') return self.buckets_created
def do_auth(self): log.debug('function: %s' % self.do_auth.__name__) try: log.info('got the credentials') # conn = S3Connection(self.ak, self.sk) self.dump_to_json_upload() self.dump_to_json_download() conn = boto.connect_s3( aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key, host=self.hostname, port=self.port, is_secure=self.is_secure, calling_format=boto.s3.connection.OrdinaryCallingFormat()) log.info('acess_key %s\nsecret_key %s' % (self.access_key, self.secret_key)) auth_stack = { 'status': True, 'conn': conn, 'upload_json_file': self.json_file_upload, 'download_json_file': self.json_file_download } except (boto.s3.connection.HostRequiredError, exception.AWSConnectionError, Exception), e: log.error('connection failed') log.error(e) auth_stack = {'status': False, 'msgs': e}
def __init__(self, connection): log.debug("class: %s" % self.__class__.__name__) self.connection = connection self.add_io_info = AddIOInfo() self.test_op_code = "create"
def multipart_upload(self, buckets_created): object_size = self.objects_size_range min_object_size = object_size['min'] max_object_size = object_size['max'] for bucket in buckets_created: for object_count in range(self.objects_count): key_name = bucket.name + "." + str(object_count) + ".key" + ".mpFile" if not os.path.exists(key_name): size = utils.get_file_size(min_object_size, max_object_size) log.info('size of the file to create %s' % size) log.info('file does not exists, so creating the file') filename = utils.create_file(key_name, size) else: log.info('file exists') filename = os.path.abspath(key_name) md5 = utils.get_md5(filename) log.info('got filename %s' % filename) log.debug('got file dirname %s' % os.path.dirname(filename)) json_file = os.path.join(os.path.dirname(filename), os.path.basename(filename) + ".json") log.info('json_file_name %s' % json_file) multipart = MultipartPut(bucket, filename) multipart.break_at_part_no = self.break_upload_at_part_no multipart.cancel_multpart = self.set_cancel_multipart multipart.iniate_multipart(json_file) put = multipart.put() print(put['status']) if not put['status']: raise AssertionError(put['msgs'])
def delete(self, bucket_name): log.debug("function: %s" % self.delete.__name__) log.info("in delete bucket") """ :param bucket_name: string :rtype: dict :return: delete_bucket_stack: args: status: True, if bucket is deleted or False if not deleted msgs: error messages """ try: self.connection.delete_bucket(bucket_name) delete_bucket_stack = {"status": True} except exception.S3ResponseError as e: log.error(e) delete_bucket_stack = {"status": False, "msgs": e} return delete_bucket_stack
def check_contents(self): log.debug('function: %s' % self.check_contents.__name__) log.info('checking contents or getting the string val') """ can also be used for getting the contents. i.e download :return: string_exists_status (dictionary): args: 1. status: True 2. contents: contents of string 3. msgs: error messages """ try: string_contents = self.key.get_contents_as_string() string_exists_status = { 'status': True, 'contents': string_contents } except exception.BotoClientError, e: log.error(e) string_exists_status = {'status': False, 'msgs': e}
def delete(self, key_name, version_id=None): log.debug('function: %s' % self.delete.__name__) log.debug('in delete key %s:' % key_name) """ :param key_name: string :return: deleted key object.. or None try to check delete_marker was created for this delete. """ try: key_deleted = self.bucket.delete_key(key_name, version_id=version_id) log.info('key_name: %s' % key_name) log.info('version_id: %s' % version_id) return key_deleted except (exception.BotoClientError, exception.S3ResponseError), e: log.error(e) return None
def check_if_bucket_empty(bucket): log.debug("function: %s" % check_if_bucket_empty.__name__) log.info("checking if bucket is empty") """ :param bucket: bucket object :rtype: dict :return: check_for_empty_stack: args: 1.contents: empty list ( [] ) or list of buckets 2.msgs: error messages """ try: bucket_contents = bucket.list() check_for_empty_stack = {"contents": bucket_contents} except (exception.S3ResponseError, exception.AWSConnectionError) as e: log.error(e) check_for_empty_stack = {"contents": [], "msgs": e} return check_for_empty_stack
def delete(self, bucket_name): log.debug('function: %s' % self.delete.__name__) log.info('in delete bucket') """ :param bucket_name: string :rtype: dict :return: delete_bucket_stack: args: status: True, if bucket is deleted or False if not deleted msgs: error messages """ try: self.connection.delete_bucket(bucket_name) delete_bucket_stack = {'status': True} except exception.S3ResponseError, e: log.error(e) delete_bucket_stack = {'status': False, 'msgs': e}
def set_user_grant(self, bucket, grants): """ :param acls: send acls in form of {'permission' : <permission type>, 'user_id' : canonical_user_id, 'recursive' : bool } persmission type : (READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL) :param bucket: buckect object """ if grants is not None: try: log.debug('setting grants %s' % grants) bucket.add_user_grant(permission=grants['permission'], user_id=grants['user_id'], recursive=grants['recursive']) acp = bucket.get_acl() for grant in acp.acl.grants: log.info('grants set: %s on %s' % (grant.permission, grant.id)) return True except (exception.S3ResponseError, exception.BotoClientError) as e: log.error(e) return False else: log.info('not setting any acls')
def __init__(self, name): log.debug('class: %s' % self.__class__.__name__) self.name = name log.info('process_name: %s' % self.name) self.process = None
def __init__(self, key, json_file): log.debug('class: %s' % self.__class__.__name__) self.json_file = json_file self.jkey = JKeys(self.json_file) self.key = key self.add_io_info = AddIOInfo()
def get(self, filename): log.debug('function: %s' % self.get.__name__) log.info('getting the contents of file %s:' % self.key) log.info('download or get the file to filename: %s' % filename) """ :param: filename: mention the filename which will be used to get the contents from s3 to this file. can be different from the original filename :return: dictionary, args: 1. status: True for successful download or False for failed download, 2. msgs : error messages """ try: self.key.get_contents_to_filename(filename) md5_on_s3 = self.key.etag.replace('"', '') md5_local = utils.get_md5(filename) if md5_on_s3 == md5_local: md5_match = "match" else: md5_match = "no match" key_details = { 'key_name': os.path.basename(filename), 'key_name_os_s3': self.key.name, 'size': os.stat(filename).st_size, 'md5_local': md5_local, 'md5_on_s3': md5_on_s3, 'md5_match': md5_match, 'opcode': { "edit": { "new_md5": None }, "move": { "new_name": None }, "delete": { "deleted": None } } } self.jkey.add(self.key.bucket.name, **key_details) download_status = {'status': True} except (exception.BotoClientError, exception.S3ResponseError, Exception), e: log.error(e) download_status = {'status': False, 'msgs': e}
def put(self, filename, test_op_code='create'): log.debug('function: %s' % self.put.__name__) log.info('upload of file: %s' % filename) """ :param filename: filename i.e along with location :return: dictionary, args: 1. status: True for successful upload or False for failed upload, 2. msgs : error messages """ try: self.key.set_contents_from_filename(filename) md5_on_s3 = self.key.etag.replace('"', '') key_details = { 'key_name': self.key.key, 'size': os.stat(filename).st_size, 'md5_local': utils.get_md5(filename), 'md5_on_s3': md5_on_s3, 'opcode': { "edit": { "new_md5": None }, "move": { "new_name": None }, "delete": { "deleted": None } } } self.jkey.add(self.key.bucket.name, **key_details) self.add_io_info.add_keys_info( self.key.bucket.connection.access_key, self.key.bucket.name, **{ 'key_name': self.key.key, 'size': os.stat(filename).st_size, 'md5_on_s3': md5_on_s3, 'upload_type': 'normal', 'test_op_code': test_op_code }) upload_status = {'status': True} except (exception.BotoClientError, exception.S3ResponseError), e: log.error(e) upload_status = {'status': False, 'msgs': e}
def set_metadata(self, **metadata): log.debug('function: %s' % self.set_metadata.__name__) log.info('setting metadata %s' % metadata) metadata_name = list(metadata.keys())[0] metadata_value = list(metadata.values())[0] try: self.key.set_metadata(metadata_name, metadata_value) return True except exception.BotoClientError as e: log.error(e) return False
def set_metadata(self, **metadata): log.debug("function: %s" % self.set_metadata.__name__) log.info("setting metadata %s" % metadata) metadata_name = list(metadata.keys())[0] metadata_value = list(metadata.values())[0] try: self.key.set_metadata(metadata_name, metadata_value) return True except (exception.BotoClientError, exception.S3ResponseError) as e: log.error(e) return False
def __init__(self, bucket, filename): log.debug('class: %s' % self.__class__.__name__) self.bucket = bucket self.split_files_list = [] self.filename = filename self.json_ops = None self.cancel_multpart = False self.mp = None self.md5 = None self.break_at_part_no = 0 self.add_io_info = AddIOInfo()
def __init__(self, access_key, secret_key, user_id, port=None): log.debug("class: %s" % self.__class__.__name__) self.access_key = access_key self.secret_key = secret_key self.hostname = socket.gethostname() self.port = int(utils_v2.get_radosgw_port_no()) self.is_secure = False self.user_id = user_id self.json_file_upload = self.user_id + "." + "upload" + "." + "json" self.json_file_download = self.user_id + "." + "download" + "." + "json"
def __init__(self, access_key, secret_key, user_id, port=None): log.debug('class: %s' % self.__class__.__name__) self.access_key = access_key self.secret_key = secret_key self.hostname = socket.gethostname() self.port = 8080 self.is_secure = False self.user_id = user_id self.json_file_upload = self.user_id + "." + "upload" + "." + "json" self.json_file_download = self.user_id + "." + "download" + "." + "json"
def multidelete_keys(self, keys_list): log.debug('function: %s' % self.multidelete_keys.__name__) log.info('in mutiple keys delete %s' % keys_list) """ :param keys_list: list of key names :return: instace of multidelete or None """ try: keys_deleted = self.bucket.delete_keys(keys_list) return keys_deleted except (exception.BotoClientError, exception.S3ResponseError) as e: log.error(e) return None
def get(self, key_name): log.debug('function: %s' % self.get.__name__) log.info('in get key: %s' % key_name) """ :param key_name: string :return: key object or None """ try: key = self.bucket.get_key(key_name) return key except (exception.BotoClientError, exception.S3ResponseError) as e: log.error(e) return None
def create(self, key_name): log.debug('function: %s' % self.create.__name__) log.info('creating key %s' % key_name) """ :param key_name: string :return: key object or None """ try: k = Key(self.bucket) k.key = key_name return k except (exception.BotoClientError, exception.S3ResponseError) as e: log.error(e) return None
def __init__(self, user_details): log.debug("class: %s" % self.__class__.__name__) self.user_id = user_details["user_id"] self.access_key = user_details["access_key"] self.secret_key = user_details["secret_key"] # self.port = user_details['port'] auth = Authenticate(self.access_key, self.secret_key, self.user_id) self.connection = auth.do_auth() assert self.connection["status"], self.connection["msgs"] connection = self.connection["conn"] self.canonical_id = connection.get_canonical_user_id() self.json_file_upload = self.connection["upload_json_file"] self.json_file_download = self.connection["download_json_file"] self.bucket_ops = Bucket(connection)
def __init__(self, user_details): log.debug('class: %s' % self.__class__.__name__) self.user_id = user_details['user_id'] self.access_key = user_details['access_key'] self.secret_key = user_details['secret_key'] # self.port = user_details['port'] auth = Authenticate(self.access_key, self.secret_key, self.user_id) self.connection = auth.do_auth() assert self.connection['status'], self.connection['msgs'] connection = self.connection['conn'] self.canonical_id = connection.get_canonical_user_id() self.json_file_upload = self.connection['upload_json_file'] self.json_file_download = self.connection['download_json_file'] self.bucket_ops = Bucket(connection)
def put(self, string_val): log.debug('function: %s' % self.put.__name__) log.info('upload of string %s' % string_val) """ :param string_val: string :return: upload_status (dictionary): args: 1.status: True or False 2. msgs : error messages """ try: self.key.set_contents_from_string(string_val) upload_status = {'status': True} except exception.BotoClientError as e: upload_status = {'status': False, 'msgs': e} return upload_status
def check_contents(self): log.debug("function: %s" % self.check_contents.__name__) log.info("checking contents or getting the string val") """ can also be used for getting the contents. i.e download :return: string_exists_status (dictionary): args: 1. status: True 2. contents: contents of string 3. msgs: error messages """ try: string_contents = self.key.get_contents_as_string() string_exists_status = {"status": True, "contents": string_contents} except exception.BotoClientError as e: log.error(e) string_exists_status = {"status": False, "msgs": e} return string_exists_status
def create_bucket(self): log.info("no of buckets to create: %s" % self.bucket_create_nos) log.info("buckets_creating......") for bucket_no in range(self.bucket_create_nos): log.debug("iter: %s" % bucket_no) # BZ1942136 : In pacific,bucket creation with ( . ) fails with 'InvalidBucketName' bucket_name = self.user_id + "-" + str("bucky") + "-" + str( bucket_no) self.bucket_names.append(bucket_name) log.info("bucket_name: %s" % bucket_name) bucket_created = self.bucket_ops.create(bucket_name, self.json_file_upload) if not bucket_created["status"]: raise AssertionError(bucket_created["msgs"]) print("created bucket") print(bucket_created) self.buckets_created.append(bucket_created["bucket"]) log.info("bucket created") return self.buckets_created