def test(yaml_file_path): ganesha_test_config = {'mount_point': 'ganesha-mount', 'rgw_user_info': yaml_file_path} log.info('ganesha_test_config :%s\n' % ganesha_test_config) log.info('initiating nfs ganesha') add_io_info = AddIOInfo() add_io_info.initialize() nfs_ganesha = PrepNFSGanesha(mount_point=ganesha_test_config['mount_point'], yaml_fname=ganesha_test_config['rgw_user_info']) nfs_ganesha.initialize() config = Config() config.bucket_count = 2 config.objects_count = 2 config.objects_size_range = {'min': 10, 'max': 50} log.info('begin IO') rgw_user = nfs_ganesha.read_config() rgw = ObjectOps(config, rgw_user) buckets = rgw.create_bucket() rgw.upload(buckets) time.sleep(20) bdir = BaseDir(count=None, json_fname=rgw.json_file_upload, mount_point=ganesha_test_config['mount_point'], auth=rgw.connection['conn']) subd = SubdirAndObjects(base_dir_list=None, config=None, json_fname=rgw.json_file_upload, auth=rgw.connection['conn']) ks_op_status = subd.operation_on_s3(op_code='delete') verification= {'delete': True, 'key': True} for status in ks_op_status: if not status['op_code_status']: verification['delete'] = False break if verification['delete']: log.info('verification starts') log.info('key verificaion starts') kstatus = subd.verify_nfs(mount_point=ganesha_test_config['mount_point']) log.info('key verification complete: %s' % kstatus) for ks in kstatus: if not ks['exists']: verification['key'] = True if ks['exists']: verification['key'] = False return verification
if __name__ == "__main__": parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument("-p", dest="port", default="8080", help="port number where RGW is running") args = parser.parse_args() yaml_file = args.config config = Config() if yaml_file is None: config.cluster_name = "ceph" config.user_count = 2 else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.cluster_name = doc["config"]["cluster_name"] config.user_count = doc["config"]["user_count"] log.info("user_count:%s\n" % (config.user_count)) test_exec(config)
test_info.failed_status("test failed: %s" % e) sys.exit(1) if __name__ == "__main__": parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument("-p", dest="port", default="8080", help="port number where RGW is running") args = parser.parse_args() yaml_file = args.config config = Config() config.port = args.port if yaml_file is None: config.bucket_count = 2 config.user_count = 3 config.objects_count = 4 config.objects_size_range = {"min": 10, "max": 50} else: with open(yaml_file, "r") as f: doc = yaml.safe_load(f) config.bucket_count = doc["config"]["bucket_count"] config.user_count = doc["config"]["user_count"] config.objects_count = doc["config"]["objects_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"], "max": doc["config"]["objects_size_range"]["max"],
default='yamls/config.yaml', help='RGW Test yaml configuration') parser.add_argument('-p', dest="port", default='8080', help='port number where RGW is running') args = parser.parse_args() yaml_file = args.config with open(yaml_file, 'r') as f: doc = yaml.load(f) config = Config() config.bucket_count = doc['config']['bucket_count'] config.objects_count = doc['config']['objects_count'] config.objects_size_range = { 'min': doc['config']['objects_size_range']['min'], 'max': doc['config']['objects_size_range']['max'] } log.info( 'bucket_count: %s\n' 'objects_count: %s\n' 'objects_size_range: %s\n' % (config.bucket_count, config.objects_count, config.objects_size_range)) test_exec(config)
if __name__ == "__main__": parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument( "-p", dest="port", default="8080", help="port number where RGW is running" ) args = parser.parse_args() yaml_file = args.config config = Config() config.port = args.port if yaml_file is None: config.bucket_count = 10 config.objects_count = 2 config.objects_size_range = {"min": 10, "max": 50} config.version_count = 5 else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.bucket_count = doc["config"]["bucket_count"] config.objects_count = doc["config"]["objects_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"], "max": doc["config"]["objects_size_range"]["max"], }
def test(yaml_file_path): ganesha_test_config = { "mount_point": "ganesha-mount", "rgw_user_info": yaml_file_path, } log.info("ganesha_test_config :%s\n" % ganesha_test_config) log.info("initiating nfs ganesha") add_io_info = AddIOInfo() add_io_info.initialize() nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config["mount_point"], yaml_fname=ganesha_test_config["rgw_user_info"], ) nfs_ganesha.initialize() config = Config() config.bucket_count = 1 config.objects_count = 1 config.objects_size_range = {"min": 10, "max": 50} log.info("begin IO") rgw_user = nfs_ganesha.read_config() rgw = ObjectOps(config, rgw_user) buckets = rgw.create_bucket() rgw.upload(buckets) time.sleep(20) bdir = BaseDir( count=None, json_fname=rgw.json_file_upload, mount_point=ganesha_test_config["mount_point"], auth=rgw.connection["conn"], ) subd = SubdirAndObjects( base_dir_list=None, config=None, json_fname=rgw.json_file_upload, auth=rgw.connection["conn"], download_json_fname=rgw.json_file_download, ) op_status = subd.operation_on_s3(op_code="edit") for op in op_status: if not op["op_code_status"]: log.error("operation failed") exit(1) log.info("verification starts") log.info("key verificaion starts") kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"], op_type="edit") log.info("key verification complete: %s" % kstatus) return kstatus
def test(yaml_file_path): ganesha_test_config = { "mount_point": "ganesha-mount", "rgw_user_info": yaml_file_path, } log.info("ganesha_test_config :%s\n" % ganesha_test_config) log.info("initiating nfs ganesha") add_io_info = AddIOInfo() add_io_info.initialize() nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config["mount_point"], yaml_fname=ganesha_test_config["rgw_user_info"], ) nfs_ganesha.initialize() config = Config() config.bucket_count = 5 config.objects_count = 2 config.objects_size_range = {"min": 10, "max": 50} log.info("begin IO") rgw_user = nfs_ganesha.read_config() rgw = ObjectOps(config, rgw_user) buckets = rgw.create_bucket() rgw.upload(buckets) time.sleep(20) bdir = BaseDir( count=None, json_fname=rgw.json_file_upload, mount_point=ganesha_test_config["mount_point"], auth=rgw.connection, ) subd = SubdirAndObjects( base_dir_list=None, config=None, json_fname=rgw.json_file_upload, auth=rgw.connection, ) time.sleep(15) log.info("verification starts") log.info("bucket verification starts") bstatus = bdir.verify_nfs() log.info("bucket verification complete:%s" % bstatus) log.info("key verificaion starts") kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"]) log.info("key verification complete: %s" % kstatus) verification = {"bucket": True, "key": True} if not bstatus: verification["bucket"] = False else: verification["bucket"] = True for ks in kstatus: if not ks["exists"]: verification["key"] = False if not ks["md5_matched"]: verification["key"] = False break if not ks["size_matched"]: verification["key"] = False break return verification
parser = argparse.ArgumentParser(description='RGW Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') parser.add_argument('-p', dest="port", default='8080', help='port number where RGW is running') args = parser.parse_args() yaml_file = args.config config = Config() config.port = args.port if yaml_file is None: config.user_count = 2 config.bucket_count = 10 config.objects_count = 0 config.objects_size_range = 0 else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.user_count = doc['config']['user_count'] config.bucket_count = doc['config']['bucket_count'] config.objects_size_range = 0 config.objects_count = 0 log.info('user_count:%s\n'
test_info.failed_status('test failed: %s' % e) sys.exit(1) if __name__ == '__main__': parser = argparse.ArgumentParser(description='RGW Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config() config.shards = None config.max_objects = None if yaml_file is None: config.bucket_count = 10 config.objects_count = 2 config.objects_size_range = {'min': 10, 'max': 50} config.shards = 32 config.max_objects = 2 else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.bucket_count = doc['config']['bucket_count'] config.objects_count = doc['config']['objects_count'] config.objects_size_range = { 'min': doc['config']['objects_size_range']['min'],
if __name__ == '__main__': parser = argparse.ArgumentParser(description='RGW Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') parser.add_argument('-p', dest="port", default='8080', help='port number where RGW is running') args = parser.parse_args() yaml_file = args.config config = Config() if yaml_file is None: config.cluster_name = 'ceph' config.user_count = 2 else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.cluster_name = doc['config']['cluster_name'] config.user_count = doc['config']['user_count'] log.info('user_count:%s\n' % (config.user_count)) test_exec(config)
test_info.failed_status("test failed: %s" % e) sys.exit(1) if __name__ == "__main__": parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config() config.shards = None config.max_objects = None if yaml_file is None: config.bucket_count = 10 config.objects_count = 2 config.objects_size_range = {"min": 10, "max": 50} config.shards = 32 config.max_objects = 2 else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.bucket_count = doc["config"]["bucket_count"] config.objects_count = doc["config"]["objects_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"],
test_info.success_status('test completed') sys.exit(0) except AssertionError as e: log.error(e) test_info.failed_status('test failed: %s' % e) sys.exit(1) if __name__ == '__main__': parser = argparse.ArgumentParser(description='RGW Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config() config.shards = None config.max_objects = None if yaml_file is None: config.user_count = 2 config.bucket_count = 10 config.objects_count = 2 config.objects_size_range = {'min': 10, 'max': 50} config.shards = 32 config.max_objects = 2 else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.user_count = doc['config']['user_count'] config.bucket_count = doc['config']['bucket_count'] config.objects_count = doc['config']['objects_count']
def test(yaml_file_path): ganesha_test_config = { 'mount_point': 'ganesha-mount', 'rgw_user_info': yaml_file_path } log.info('ganesha_test_config :%s\n' % ganesha_test_config) log.info('initiating nfs ganesha') add_io_info = AddIOInfo() add_io_info.initialize() nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config['mount_point'], yaml_fname=ganesha_test_config['rgw_user_info']) nfs_ganesha.initialize() config = Config() config.bucket_count = 1 config.objects_count = 1 config.objects_size_range = {'min': 10, 'max': 50} log.info('begin IO') rgw_user = nfs_ganesha.read_config() rgw = ObjectOps(config, rgw_user) buckets = rgw.create_bucket() rgw.upload(buckets) time.sleep(20) bdir = BaseDir(count=None, json_fname=rgw.json_file_upload, mount_point=ganesha_test_config['mount_point'], auth=rgw.connection['conn']) subd = SubdirAndObjects(base_dir_list=None, config=None, json_fname=rgw.json_file_upload, auth=rgw.connection['conn'], download_json_fname=rgw.json_file_download) op_status = subd.operation_on_s3(op_code='edit') for op in op_status: if not op['op_code_status']: log.error('operation failed') exit(1) log.info('verification starts') log.info('key verificaion starts') kstatus = subd.verify_nfs(mount_point=ganesha_test_config['mount_point'], op_type='edit') log.info('key verification complete: %s' % kstatus) return kstatus
parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument("-p", dest="port", default="8080", help="port number where RGW is running") args = parser.parse_args() yaml_file = args.config config = Config() config.port = args.port if yaml_file is None: config.bucket_count = 10 config.objects_count = 0 config.objects_size_range = 0 else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.bucket_count = doc["config"]["bucket_count"] config.objects_size_range = 0 config.objects_count = 0 log.info("bucket_count: %s\n" "port: %s\n" % (config.bucket_count, config.port))
parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument( "-c", dest="config", default="yamls/config.yaml", help="RGW Test yaml configuration", ) parser.add_argument("-p", dest="port", default="8080", help="port number where RGW is running") args = parser.parse_args() yaml_file = args.config with open(yaml_file, "r") as f: doc = yaml.load(f) config = Config() config.user_count = doc["config"]["user_count"] config.bucket_count = doc["config"]["bucket_count"] config.objects_count = doc["config"]["objects_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"], "max": doc["config"]["objects_size_range"]["max"], } log.info("user_count:%s\n" "bucket_count: %s\n" "objects_count: %s\n" "objects_size_range: %s\n" % ( config.user_count, config.bucket_count, config.objects_count, config.objects_size_range,
sys.exit(1) if __name__ == '__main__': parser = argparse.ArgumentParser(description='RGW Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') parser.add_argument('-p', dest="port", default='8080', help='port number where RGW is running') args = parser.parse_args() yaml_file = args.config config = Config() config.port = args.port if yaml_file is None: config.bucket_count = 10 config.objects_size_range = {'min': 300, 'max': 500} config.break_at_part_no = 19 else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.bucket_count = doc['config']['bucket_count'] config.objects_size_range = {'min': doc['config']['objects_size_range']['min'], 'max': doc['config']['objects_size_range']['max']} config.break_at_part_no = doc['config']['break_at_part_no'] log.info('bucket_count: %s\n' 'object_min_size: %s\n'
def test(yaml_file_path): ganesha_test_config = { "mount_point": "ganesha-mount", "rgw_user_info": yaml_file_path, } log.info("ganesha_test_config :%s\n" % ganesha_test_config) add_io_info = AddIOInfo() add_io_info.initialize() log.info("initiating nfs ganesha") nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config["mount_point"], yaml_fname=ganesha_test_config["rgw_user_info"], ) nfs_ganesha.initialize() config = Config() config.bucket_count = 1 config.objects_count = 2 config.objects_size_range = {"min": 10, "max": 50} log.info("begin IO") rgw_user = nfs_ganesha.read_config() rgw = ObjectOps(config, rgw_user) buckets = rgw.create_bucket() rgw.upload(buckets) time.sleep(20) bdir = BaseDir( count=None, json_fname=rgw.json_file_upload, mount_point=ganesha_test_config["mount_point"], auth=rgw.connection["conn"], ) subd = SubdirAndObjects( base_dir_list=None, config=None, json_fname=rgw.json_file_upload, auth=rgw.connection["conn"], ) ks_op_status = subd.operation_on_s3(op_code="move") time.sleep(300) # wait for 5 mins # after move, verify on nfs for the changes verification = {"delete": True, "key": True} for status in ks_op_status: if not status["op_code_status"]: verification["delete"] = False break if verification["delete"]: log.info("verification starts") log.info("key verificaion starts") kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"]) log.info("key verification complete: %s" % kstatus) for ks in kstatus: if ks["exists"]: verification["key"] = True else: verification["key"] = False return verification
test_info.failed_status("test failed: %s" % e) sys.exit(1) if __name__ == "__main__": parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument("-p", dest="port", default="8080", help="port number where RGW is running") args = parser.parse_args() yaml_file = args.config config = Config() config.port = args.port if yaml_file is None: config.user_count = 2 config.bucket_count = 10 config.objects_size_range = {"min": 300, "max": 500} config.break_at_part_no = 19 else: with open(yaml_file, "r") as f: doc = yaml.safe_load(f) config.user_count = doc["config"]["user_count"] config.bucket_count = doc["config"]["bucket_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"], "max": doc["config"]["objects_size_range"]["max"], }
if __name__ == "__main__": parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument( "-p", dest="port", default="8080", help="port number where RGW is running" ) args = parser.parse_args() yaml_file = args.config config = Config() config.port = args.port if yaml_file is None: config.bucket_count = 10 config.objects_size_range = {"min": 300, "max": 500} else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.bucket_count = doc["config"]["bucket_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"], "max": doc["config"]["objects_size_range"]["max"], } log.info( "bucket_count: %s\n"
parser = argparse.ArgumentParser(description='RGW Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') parser.add_argument('-p', dest="port", default='8080', help='port number where RGW is running') args = parser.parse_args() yaml_file = args.config config = Config() config.user_count = 2 config.port = args.port if yaml_file is None: config.bucket_count = 2 config.objects_count = 10 config.objects_size_range = {'min': 10, 'max': 50} else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.bucket_count = doc['config']['bucket_count'] config.objects_count = doc['config']['objects_count'] config.objects_size_range = { 'min': doc['config']['objects_size_range']['min'],
test_info.success_status("test completed") sys.exit(0) except AssertionError as e: log.error(e) test_info.failed_status("test failed: %s" % e) sys.exit(1) if __name__ == "__main__": parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config() config.shards = None config.max_objects = None if yaml_file is None: config.user_count = 2 config.bucket_count = 10 config.objects_count = 2 config.objects_size_range = {"min": 10, "max": 50} config.shards = 32 config.max_objects = 2 else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.user_count = doc["config"]["user_count"] config.bucket_count = doc["config"]["bucket_count"] config.objects_count = doc["config"]["objects_count"]