def test(yaml_file_path): ganesha_test_config = { "mount_point": "ganesha-mount", "rgw_user_info": yaml_file_path, } log.info("ganesha_test_config :%s\n" % ganesha_test_config) log.info("initiating nfs ganesha") add_io_info = AddIOInfo() add_io_info.initialize() nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config["mount_point"], yaml_fname=ganesha_test_config["rgw_user_info"], ) nfs_ganesha.initialize() config = Config() config.bucket_count = 1 config.objects_count = 1 config.objects_size_range = {"min": 10, "max": 50} log.info("begin IO") rgw_user = nfs_ganesha.read_config() rgw = ObjectOps(config, rgw_user) buckets = rgw.create_bucket() rgw.upload(buckets) time.sleep(20) bdir = BaseDir( count=None, json_fname=rgw.json_file_upload, mount_point=ganesha_test_config["mount_point"], auth=rgw.connection["conn"], ) subd = SubdirAndObjects( base_dir_list=None, config=None, json_fname=rgw.json_file_upload, auth=rgw.connection["conn"], download_json_fname=rgw.json_file_download, ) op_status = subd.operation_on_s3(op_code="edit") for op in op_status: if not op["op_code_status"]: log.error("operation failed") exit(1) log.info("verification starts") log.info("key verificaion starts") kstatus = subd.verify_nfs(mount_point=ganesha_test_config["mount_point"], op_type="edit") log.info("key verification complete: %s" % kstatus) return kstatus
dest="config", help='RGW Test yaml configuration') parser.add_argument('-p', dest="port", default='8080', help='port number where RGW is running') args = parser.parse_args() yaml_file = args.config config = Config() config.port = args.port if yaml_file is None: config.user_count = 2 config.bucket_count = 10 config.objects_size_range = {'min': 300, 'max': 500} else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.user_count = doc['config']['user_count'] config.bucket_count = doc['config']['bucket_count'] config.objects_size_range = { 'min': doc['config']['objects_size_range']['min'], 'max': doc['config']['objects_size_range']['max'] } log.info( 'user_count:%s\n' 'bucket_count: %s\n' 'object_min_size: %s\n' %
def test(yaml_file_path): ganesha_test_config = { 'mount_point': 'ganesha-mount', 'rgw_user_info': yaml_file_path } log.info('ganesha_test_config :%s\n' % ganesha_test_config) log.info('initiating nfs ganesha') add_io_info = AddIOInfo() add_io_info.initialize() nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config['mount_point'], yaml_fname=ganesha_test_config['rgw_user_info']) nfs_ganesha.initialize() config = Config() config.bucket_count = 2 config.objects_count = 2 config.objects_size_range = {'min': 10, 'max': 50} log.info('begin IO') rgw_user = nfs_ganesha.read_config() rgw = ObjectOps(config, rgw_user) buckets = rgw.create_bucket() rgw.upload(buckets) time.sleep(20) bdir = BaseDir(count=None, json_fname=rgw.json_file_upload, mount_point=ganesha_test_config['mount_point'], auth=rgw.connection['conn']) subd = SubdirAndObjects(base_dir_list=None, config=None, json_fname=rgw.json_file_upload, auth=rgw.connection['conn']) ks_op_status = subd.operation_on_s3(op_code='delete') verification = {'delete': True, 'key': True} for status in ks_op_status: if not status['op_code_status']: verification['delete'] = False break if verification['delete']: log.info('verification starts') log.info('key verificaion starts') kstatus = subd.verify_nfs( mount_point=ganesha_test_config['mount_point']) log.info('key verification complete: %s' % kstatus) for ks in kstatus: if not ks['exists']: verification['key'] = True if ks['exists']: verification['key'] = False return verification
) parser.add_argument("-p", dest="port", default="8080", help="port number where RGW is running") args = parser.parse_args() yaml_file = args.config with open(yaml_file, "r") as f: doc = yaml.load(f) config = Config() config.bucket_count = doc["config"]["bucket_count"] config.objects_count = doc["config"]["objects_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"], "max": doc["config"]["objects_size_range"]["max"], } log.info( "bucket_count: %s\n" "objects_count: %s\n" "objects_size_range: %s\n" % (config.bucket_count, config.objects_count, config.objects_size_range)) test_exec(config)
if __name__ == "__main__": parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument("-p", dest="port", default="8080", help="port number where RGW is running") args = parser.parse_args() yaml_file = args.config config = Config() config.port = args.port if yaml_file is None: config.bucket_count = 1 config.user_count = 2 config.objects_count = 4 config.objects_size_range = {"min": 10, "max": 50} else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.user_count = 2 config.bucket_count = 1 config.objects_count = doc["config"]["objects_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"], "max": doc["config"]["objects_size_range"]["max"], } log.info("user_count:%s\n"