def test(yaml_file_path): ganesha_test_config = { "mount_point": "ganesha-mount", "rgw_user_info": yaml_file_path, } verification = {"bucket": False, "key": False} log.info("ganesha_test_config :%s\n" % ganesha_test_config) io_config = { "base_dir_count": 2, "sub_dir_count": 2, "Files": {"files_in_dir": 2, "size": 10}, } add_io_info = AddIOInfo() add_io_info.initialize() log.info("io_config: %s\n" % io_config) log.info("initiating nfs ganesha") log.info( "resetting rgw_user_info yaml file with null values to that new rgw user will be created and with new_config" ) rgw_user_config_ops = RGWUserConfigOps( yaml_fname=ganesha_test_config["rgw_user_info"] ) rgw_user_config_ops.update_config() log.info("will take new config and start the basic IO test") log.info("--------------------------------------------------") nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config["mount_point"], yaml_fname=ganesha_test_config["rgw_user_info"], ) nfs_ganesha.initialize() log.info("authenticating rgw user") rgw_auth = Authenticate( user_id=nfs_ganesha.user_id, access_key=nfs_ganesha.access_key, secret_key=nfs_ganesha.secret_key, ) auth = rgw_auth.do_auth() log.info("begin IO") bdir = BaseDir( int(io_config["base_dir_count"]), rgw_auth.json_file_upload, ganesha_test_config["mount_point"], auth["conn"], ) bdirs = bdir.create(uname=str(rgw_auth.user_id)) subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth["conn"]) subdir.create() log.info("verification starts") time.sleep(15) bstatus = bdir.verify_s3() log.info("bucket verification complete") kstatus = subdir.verify_s3() log.info("key verification complete") for bs in bstatus: if not bs["exists"]: verification["bucket"] = False break else: verification["bucket"] = True for ks in kstatus: if not ks["exists"]: verification["key"] = False if ks["type"] == "file": if not ks["md5_matched"]: verification["key"] = False break if not ks["size_matched"]: verification["key"] = False break else: verification["key"] = True return verification
def test(yaml_file_path): ganesha_test_config = { "mount_point": "ganesha-mount", "rgw_user_info": yaml_file_path, } log.info("ganesha_test_config :%s\n" % ganesha_test_config) io_config = { "base_dir_count": 2, "sub_dir_count": 2, "Files": { "files_in_dir": 2, "size": 10 }, } add_io_info = AddIOInfo() add_io_info.initialize() log.info("io_config: %s\n" % io_config) log.info("initiating nfs ganesha") nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config["mount_point"], yaml_fname=ganesha_test_config["rgw_user_info"], ) nfs_ganesha.initialize() log.info("authenticating rgw user") rgw_auth = Authenticate( user_id=nfs_ganesha.user_id, access_key=nfs_ganesha.access_key, secret_key=nfs_ganesha.secret_key, ) auth = rgw_auth.do_auth() log.info("begin IO") bdir = BaseDir( int(io_config["base_dir_count"]), rgw_auth.json_file_upload, ganesha_test_config["mount_point"], auth["conn"], ) bdirs = bdir.create(uname=str(rgw_auth.user_id)) subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth["conn"]) subdir.create() op_status = subdir.operation_on_nfs(ganesha_test_config["mount_point"], op_code="move") verification = {"key": True, "move": True} for ops in op_status: if not ops["op_code_status"]: verification["move"] = False break else: log.info("verification starts") log.info("key verifcation starts on s3") kstatus = subdir.verify_s3() log.info("key verificaion complete: \n%s" % kstatus) for ks in kstatus: if ks["type"] == "file": if ks["exists"]: verification["key"] = True else: verification["key"] = False return verification
def test(yaml_file_path): ganesha_test_config = { 'mount_point': 'ganesha-mount', 'rgw_user_info': yaml_file_path } log.info('ganesha_test_config :%s\n' % ganesha_test_config) io_config = { 'base_dir_count': 2, 'sub_dir_count': 2, 'Files': { 'files_in_dir': 2, 'size': 10 } } add_io_info = AddIOInfo() add_io_info.initialize() log.info('io_config: %s\n' % io_config) log.info('initiating nfs ganesha') nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config['mount_point'], yaml_fname=ganesha_test_config['rgw_user_info']) nfs_ganesha.initialize() log.info('authenticating rgw user') rgw_auth = Authenticate(user_id=nfs_ganesha.user_id, access_key=nfs_ganesha.access_key, secret_key=nfs_ganesha.secret_key) auth = rgw_auth.do_auth() log.info('begin IO') bdir = BaseDir(int(io_config['base_dir_count']), rgw_auth.json_file_upload, ganesha_test_config['mount_point'], auth['conn']) bdirs = bdir.create(uname=str(rgw_auth.user_id)) subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth['conn']) subdir.create() log.info('verification starts') time.sleep(15) bstatus = bdir.verify_s3() log.info('bucket verification complete') kstatus = subdir.verify_s3() log.info('key verification complete') verification = {} for bs in bstatus: if not bs['exists']: verification['bucket'] = False break else: verification['bucket'] = True for ks in kstatus: if not ks['exists']: verification['key'] = False if ks['type'] == 'file': if not ks['md5_matched']: verification['key'] = False break if not ks['size_matched']: verification['key'] = False break else: verification['key'] = True return verification
def test(yaml_file_path): ganesha_test_config = { 'mount_point': 'ganesha-mount', 'rgw_user_info': yaml_file_path } log.info('ganesha_test_config :%s\n' % ganesha_test_config) io_config = { 'base_dir_count': 1, 'sub_dir_count': 1, 'Files': { 'files_in_dir': 1, 'size': 10 } } add_io_info = AddIOInfo() add_io_info.initialize() log.info('io_config: %s\n' % io_config) log.info('initiating nfs ganesha') nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config['mount_point'], yaml_fname=ganesha_test_config['rgw_user_info']) nfs_ganesha.initialize() log.info('authenticating rgw user') rgw_auth = Authenticate(user_id=nfs_ganesha.user_id, access_key=nfs_ganesha.access_key, secret_key=nfs_ganesha.secret_key) auth = rgw_auth.do_auth() log.info('begin IO') bdir = BaseDir(int(io_config['base_dir_count']), rgw_auth.json_file_upload, ganesha_test_config['mount_point'], auth['conn']) bdirs = bdir.create(uname=str(rgw_auth.user_id)) subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth['conn']) subdir.create(file_type='text') log.info('operation starting: %s' % 'edit') op_status = subdir.operation_on_nfs(ganesha_test_config['mount_point'], op_code='edit') for op in op_status: if not op['op_code_status']: log.error('operation failed') exit(1) log.info('verification starts') kstatus = subdir.verify_s3(op_type='edit') log.info('key verificaion complete: \n%s' % kstatus) return kstatus
def test(yaml_file_path): ganesha_test_config = { 'mount_point': 'ganesha-mount', 'rgw_user_info': yaml_file_path } log.info('ganesha_test_config :%s\n' % ganesha_test_config) io_config = { 'base_dir_count': 2, 'sub_dir_count': 2, 'Files': { 'files_in_dir': 2, 'size': 10 } } add_io_info = AddIOInfo() add_io_info.initialize() log.info('io_config: %s\n' % io_config) log.info('initiating nfs ganesha') nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config['mount_point'], yaml_fname=ganesha_test_config['rgw_user_info']) nfs_ganesha.initialize() log.info('authenticating rgw user') rgw_auth = Authenticate(user_id=nfs_ganesha.user_id, access_key=nfs_ganesha.access_key, secret_key=nfs_ganesha.secret_key) auth = rgw_auth.do_auth() log.info('begin IO') bdir = BaseDir(int(io_config['base_dir_count']), rgw_auth.json_file_upload, ganesha_test_config['mount_point'], auth['conn']) bdirs = bdir.create(uname=str(rgw_auth.user_id)) subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth['conn']) subdir.create() op_status = subdir.operation_on_nfs(ganesha_test_config['mount_point'], op_code='move') verification = {'key': True, 'move': True} for ops in op_status: if not ops['op_code_status']: verification['move'] = False break else: log.info('verification starts') log.info('key verifcation starts on s3') kstatus = subdir.verify_s3() log.info('key verificaion complete: \n%s' % kstatus) for ks in kstatus: if ks['type'] == 'file': if ks['exists']: verification['key'] = True else: verification['key'] = False return verification
def test(yaml_file_path): ganesha_test_config = { "mount_point": "ganesha-mount", "rgw_user_info": yaml_file_path, } log.info("ganesha_test_config :%s\n" % ganesha_test_config) io_config = { "base_dir_count": 1, "sub_dir_count": 1, "Files": {"files_in_dir": 1, "size": 10}, } add_io_info = AddIOInfo() add_io_info.initialize() log.info("io_config: %s\n" % io_config) log.info("initiating nfs ganesha") nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config["mount_point"], yaml_fname=ganesha_test_config["rgw_user_info"], ) nfs_ganesha.initialize() log.info("authenticating rgw user") rgw_auth = Authenticate( user_id=nfs_ganesha.user_id, access_key=nfs_ganesha.access_key, secret_key=nfs_ganesha.secret_key, ) auth = rgw_auth.do_auth() log.info("begin IO") bdir = BaseDir( int(io_config["base_dir_count"]), rgw_auth.json_file_upload, ganesha_test_config["mount_point"], auth["conn"], ) bdirs = bdir.create(uname=str(rgw_auth.user_id)) subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth["conn"]) subdir.create(file_type="text") log.info("operation starting: %s" % "edit") op_status = subdir.operation_on_nfs( ganesha_test_config["mount_point"], op_code="edit" ) for op in op_status: if not op["op_code_status"]: log.error("operation failed") exit(1) log.info("verification starts") kstatus = subdir.verify_s3(op_type="edit") log.info("key verificaion complete: \n%s" % kstatus) return kstatus
def test(yaml_file_path): ganesha_test_config = { "mount_point": "ganesha-mount", "rgw_user_info": yaml_file_path, } verification = {"bucket": False, "key": False} log.info("ganesha_test_config :%s\n" % ganesha_test_config) io_config = { "base_dir_count": 2, "sub_dir_count": 2, "Files": {"files_in_dir": 2, "size": 10}, } add_io_info = AddIOInfo() add_io_info.initialize() log.info("io_config: %s\n" % io_config) log.info("initiating nfs ganesha") nfs_ganesha = PrepNFSGanesha( mount_point=ganesha_test_config["mount_point"], yaml_fname=ganesha_test_config["rgw_user_info"], ) nfs_ganesha.initialize() log.info("authenticating rgw user") rgw_auth = Authenticate( user_id=nfs_ganesha.user_id, access_key=nfs_ganesha.access_key, secret_key=nfs_ganesha.secret_key, ) auth = rgw_auth.do_auth() log.info("begin IO") bdir = BaseDir( int(io_config["base_dir_count"]), rgw_auth.json_file_upload, ganesha_test_config["mount_point"], auth["conn"], ) bdirs = bdir.create(uname=str(rgw_auth.user_id)) subdir = SubdirAndObjects(bdirs, io_config, rgw_auth.json_file_upload, auth["conn"]) sub_dir_creation = threading.Thread(target=subdir.create) # adding this to thread sub_dir_creation.start() # kill RGW process log.info("killing rgw process") p = Process(name="radosgw") p.find() if p.process is None: log.info("process not running") else: log.info("killing the process") p.process.kill() sub_dir_creation.join() log.info("verification starts") time.sleep(15) bstatus = bdir.verify_s3() log.info("bucket verification complete") kstatus = subdir.verify_s3() log.info("key verification complete") verification = {} for bs in bstatus: if not bs["exists"]: verification["bucket"] = False break else: verification["bucket"] = True for ks in kstatus: if not ks["exists"]: verification["key"] = False if ks["type"] == "file": if not ks["md5_matched"]: verification["key"] = False break if not ks["size_matched"]: verification["key"] = False break else: verification["key"] = True return verification