except Exception as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status('test failed') sys.exit(1) except TestExecError as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status('test failed') sys.exit(1) if __name__ == '__main__': project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration', default=None) args = parser.parse_args() config = Config() config.user_count = 2 config.bucket_count = 2 test_exec(config)
log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config() with open(yaml_file, 'r') as f: doc = yaml.load(f) config.container_count = doc['config']['container_count'] config.objects_count = doc['config']['objects_count'] config.cluster_name = doc['config']['cluster_name'] config.objects_size_range = { 'min': doc['config']['objects_size_range']['min'], 'max': doc['config']['objects_size_range']['max'] } log.info('bucket_count: %s\n' 'objects_count: %s\n' 'objects_size_range: %s\n' %
sys.exit(1) if __name__ == '__main__': project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config() if yaml_file is None: config.bucket_count = 10 else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.bucket_count = doc['config']['bucket_count'] config.cluster_name = doc['config']['cluster_name'] config.test_ops = doc['config']['test_ops'] log.info('bucket_count: %s\n' 'cluster_name: %s' % (config.bucket_count, config.cluster_name)) log.info('test_ops: %s' % config.test_ops) test_exec(config)
if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument( "-log_level", dest="log_level", help="Set Log Level [DEBUG, INFO, WARNING, ERROR, CRITICAL]", default="info", ) args = parser.parse_args() yaml_file = args.config log_f_name = os.path.basename(os.path.splitext(yaml_file)[0]) configure_logging(f_name=log_f_name, set_level=args.log_level.upper()) config = Config(yaml_file) config.read() test_exec(config) test_info.success_status("test passed") sys.exit(0) except (RGWBaseException, Exception) as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status("test failed") sys.exit(1)
if __name__ == '__main__': project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config() config.shards = None config.max_objects = None if yaml_file is None: config.objects_size_range = {'min': 10, 'max': 50} else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.user_count = doc['config'] config.cluster_name = doc['config']['cluster_name'] config.objects_size_range = { 'min': doc['config']['objects_size_range']['min'], 'max': doc['config']['objects_size_range']['max'] } test_exec(config)
test_info = AddTestInfo('storage_policy test') try: project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config(yaml_file) with open(yaml_file, 'r') as f: doc = yaml.load(f) test_info.name = 'storage_policy for %s' % config.rgw_client test_info.started_info() config.objects_size_range = doc['config']['objects_size_range'] config.rgw_client = doc['rgw_client'] test_exec(config) test_info.success_status('test passed') sys.exit(0) except (RGWBaseException, Exception) as e:
if __name__ == '__main__': project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config(yaml_file) config.shards = None config.max_objects = None if yaml_file is None: config.user_count = 2 config.bucket_count = 10 config.objects_count = 2 config.objects_size_range = {'min': 10, 'max': 50} else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.user_count = doc['config']['user_count'] config.cluster_name = doc['config']['cluster_name'] config.bucket_count = doc['config']['bucket_count'] config.objects_count = doc['config']['objects_count'] config.objects_size_range = {
test_info.failed_status('test failed') sys.exit(1) except TestExecError as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status('test failed') sys.exit(1) if __name__ == '__main__': project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration', default=None) args = parser.parse_args() config = Config() yaml_file = args.config with open(yaml_file, 'r') as f: doc = yaml.load(f) config.user_count = doc['config']['user_count'] config.bucket_count = doc['config']['bucket_count'] config.objects_count = doc['config'].get('objects_count', None) config.objects_size_range = doc['config'].get('objects_size_range', None) test_exec(config)
if __name__ == '__main__': project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config() config.shards = None config.max_objects = None if yaml_file is None: config.user_count = 2 config.bucket_count = 10 config.objects_count = 2 config.objects_size_range = {'min': 10, 'max': 50} else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.user_count = doc['config']['user_count'] config.bucket_count = doc['config']['bucket_count'] config.objects_count = doc['config']['objects_count'] config.objects_size_range = { 'min': doc['config']['objects_size_range']['min'],
test_info = AddTestInfo("user create test") test_info.started_info() project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument( "-log_level", dest="log_level", help="Set Log Level [DEBUG, INFO, WARNING, ERROR, CRITICAL]", default="info", ) args = parser.parse_args() yaml_file = args.config log_f_name = os.path.basename(os.path.splitext(yaml_file)[0]) configure_logging(f_name=log_f_name, set_level=args.log_level.upper()) config = Config(yaml_file) config.read() # if config.mapped_sizes is None: # config.mapped_sizes = utils.make_mapped_sizes(config) with open(yaml_file, "r") as f: doc = yaml.load(f) config.user_count = doc["config"]["user_count"] log.info("user_count:%s\n" % (config.user_count)) test_exec(config)
sys.exit(1) except TestExecError as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status("user creation failed") sys.exit(1) if __name__ == "__main__": parser = argparse.ArgumentParser(description="RGW Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument("-p", dest="port", default="8080", help="port number where RGW is running") args = parser.parse_args() yaml_file = args.config config = Config() if yaml_file is None: config.cluster_name = "ceph" config.user_count = 2 else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.cluster_name = doc["config"]["cluster_name"] config.user_count = doc["config"]["user_count"] log.info("user_count:%s\n" % (config.user_count)) test_exec(config)
os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") parser.add_argument( "-log_level", dest="log_level", help="Set Log Level [DEBUG, INFO, WARNING, ERROR, CRITICAL]", default="info", ) args = parser.parse_args() yaml_file = args.config log_f_name = os.path.basename(os.path.splitext(yaml_file)[0]) configure_logging(f_name=log_f_name, set_level=args.log_level.upper()) config = Config(yaml_file) with open(yaml_file, "r") as f: doc = yaml.load(f) test_info.name = "storage_policy for %s" % config.rgw_client test_info.started_info() config.objects_size_range = doc["config"]["objects_size_range"] config.rgw_client = doc["rgw_client"] test_exec(config) test_info.success_status("test passed") sys.exit(0) except (RGWBaseException, Exception) as e:
if __name__ == "__main__": project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config() with open(yaml_file, "r") as f: doc = yaml.load(f) config.objects_count = doc["config"]["objects_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"], "max": doc["config"]["objects_size_range"]["max"], } config.sharding_type = doc["config"]["sharding_type"] log.info("objects_count: %s\n" "objects_size_range: %s\n" "sharding_type: %s\n" % (config.objects_count, config.objects_size_range, config.sharding_type)) test_exec(config)
if __name__ == "__main__": project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config() config.shards = None config.max_objects = None if yaml_file is None: config.user_count = 2 config.bucket_count = 10 config.objects_count = 2 config.objects_size_range = {"min": 10, "max": 50} else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.user_count = doc["config"]["user_count"] config.bucket_count = doc["config"]["bucket_count"] config.objects_count = doc["config"]["objects_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"],
sys.exit(1) if __name__ == "__main__": project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="NFS-Ganesha-RGW Automation") parser.add_argument("-r", dest="rgw_user_info", help="RGW user info") parser.add_argument("-c", dest="test_config", help="Test Configuration") parser.add_argument( "-log_level", dest="log_level", help="Set Log Level [DEBUG, INFO, WARNING, ERROR, CRITICAL]", default="info", ) args = parser.parse_args() rgw_user_info_yaml = args.rgw_user_info test_config_yaml = args.test_config log_f_name = os.path.basename(os.path.splitext(test_config_yaml)[0]) configure_logging(f_name=log_f_name, set_level=args.log_level.upper()) config = Config(test_config_yaml) config.read() test_exec(rgw_user_info_yaml, config)
except Exception as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status("test failed") sys.exit(1) except TestExecError as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status("test failed") sys.exit(1) if __name__ == "__main__": project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config(yaml_file) with open(yaml_file, "r") as f: doc = yaml.load(f) config.bucket_policy_op = doc["config"]["bucket_policy_op"] config.cluster_name = doc["config"]["cluster_name"] test_exec(config)
except Exception as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status('test failed') sys.exit(1) except TestExecError as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status('test failed') sys.exit(1) if __name__ == '__main__': project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config() with open(yaml_file, 'r') as f: doc = yaml.load(f) config.rgw_client = doc['rgw_client'] test_exec(config)
project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config() with open(yaml_file, 'r') as f: doc = yaml.load(f) config.bucket_policy_op = doc['config']['bucket_policy_op'] config.cluster_name = doc['config']['cluster_name'] test_exec(config)
test_info.failed_status('test failed') sys.exit(1) except TestExecError as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status('test failed') sys.exit(1) if __name__ == '__main__': project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config(yaml_file) config.read() config.objects_count = 2 if config.mapped_sizes is None: config.mapped_sizes = utils.make_mapped_sizes(config) test_exec(config)
log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config() with open(yaml_file, 'r') as f: doc = yaml.load(f) config.objects_count = doc['config']['objects_count'] config.objects_size_range = { 'min': doc['config']['objects_size_range']['min'], 'max': doc['config']['objects_size_range']['max'] } config.sharding_type = doc['config']['sharding_type'] log.info('objects_count: %s\n' 'objects_size_range: %s\n' 'sharding_type: %s\n' % (config.objects_count, config.objects_size_range,
sys.exit(1) if __name__ == "__main__": project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config(yaml_file) config.shards = None config.max_objects = None if yaml_file is None: config.user_count = 2 config.bucket_count = 10 config.objects_count = 2 config.objects_size_range = {"min": 10, "max": 50} else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.user_count = doc["config"]["user_count"] config.cluster_name = doc["config"]["cluster_name"] config.bucket_count = doc["config"]["bucket_count"] config.objects_count = doc["config"]["objects_count"] config.objects_size_range = {
test_info.started_info() project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') parser.add_argument( '-log_level', dest='log_level', help='Set Log Level [DEBUG, INFO, WARNING, ERROR, CRITICAL]', default='info') args = parser.parse_args() yaml_file = args.config log_f_name = os.path.basename(os.path.splitext(yaml_file)[0]) configure_logging(f_name=log_f_name, set_level=args.log_level.upper()) config = Config(yaml_file) config.read() # if config.mapped_sizes is None: # config.mapped_sizes = utils.make_mapped_sizes(config) with open(yaml_file, 'r') as f: doc = yaml.load(f) config.user_count = doc['config']['user_count'] log.info('user_count:%s\n' % (config.user_count)) test_exec(config)
sys.exit(1) if __name__ == "__main__": project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config() with open(yaml_file, "r") as f: doc = yaml.load(f) config.container_count = doc["config"]["container_count"] config.objects_count = doc["config"]["objects_count"] config.cluster_name = doc["config"]["cluster_name"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"], "max": doc["config"]["objects_size_range"]["max"], } log.info( "bucket_count: %s\n" "objects_count: %s\n" "objects_size_range: %s\n" % (config.container_count, config.objects_count, config.objects_size_range) )
sys.exit(1) if __name__ == "__main__": project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config() if yaml_file is None: config.bucket_count = 10 else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.bucket_count = doc["config"]["bucket_count"] config.cluster_name = doc["config"]["cluster_name"] config.test_ops = doc["config"]["test_ops"] log.info("bucket_count: %s\n" "cluster_name: %s" % (config.bucket_count, config.cluster_name)) log.info("test_ops: %s" % config.test_ops) test_exec(config)
if __name__ == '__main__': project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = 'test_data' TEST_DATA_PATH = (os.path.join(project_dir, test_data_dir)) log.info('TEST_DATA_PATH: %s' % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='RGW S3 Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') args = parser.parse_args() yaml_file = args.config config = Config() config.max_objects = None if yaml_file is None: config.objects_count = 2 config.objects_size_range = {'min': 10, 'max': 50} else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.objects_count = doc['config']['objects_count'] config.objects_size_range = {'min': doc['config']['objects_size_range']['min'], 'max': doc['config']['objects_size_range']['max']} config.test_ops = doc['config']['test_ops'] log.info('objects_count: %s\n' 'objects_size_range: %s\n' % (config.objects_count, config.objects_size_range)) log.info('test_ops: %s' % config.test_ops)
log.info(traceback.format_exc()) test_info.failed_status('user creation failed') sys.exit(1) if __name__ == '__main__': parser = argparse.ArgumentParser(description='RGW Automation') parser.add_argument('-c', dest="config", help='RGW Test yaml configuration') parser.add_argument('-p', dest="port", default='8080', help='port number where RGW is running') args = parser.parse_args() yaml_file = args.config config = Config() if yaml_file is None: config.cluster_name = 'ceph' config.user_count = 2 else: with open(yaml_file, 'r') as f: doc = yaml.load(f) config.cluster_name = doc['config']['cluster_name'] config.user_count = doc['config']['user_count'] log.info('user_count:%s\n' % ( config.user_count)) test_exec(config)
if response["ContentLength"] != config.obj_size * 1024 * 1024: TestExecError("Content Lenght not matched") log.info("testing for one positive and one negative range") response = rgw_conn2.get_object( Bucket=bucket.name, Key=s3_object_name, Range="-1-3" ) log.info("response: %s\n" % response) log.info("Content-Length: %s" % response["ContentLength"]) log.info("s3_object_size: %s" % (config.obj_size * 1024 * 1024)) if response["ContentLength"] != config.obj_size * 1024 * 1024: TestExecError("Content Lenght not matched") if __name__ == "__main__": project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config(yaml_file) config.read() if config.mapped_sizes is None: config.mapped_sizes = utils.make_mapped_sizes(config) test_exec(config)
sys.exit(0) except Exception as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status("test failed") sys.exit(1) except TestExecError as e: log.info(e) log.info(traceback.format_exc()) test_info.failed_status("test failed") sys.exit(1) if __name__ == "__main__": project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config() with open(yaml_file, "r") as f: doc = yaml.load(f) config.rgw_client = doc["rgw_client"] test_exec(config)
if not os.path.exists(TEST_DATA_PATH): log.info('test data dir not exists, creating.. ') os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description='NFS-Ganesha-RGW Automation') parser.add_argument('-r', dest="rgw_user_info", help='RGW user info') parser.add_argument('-c', dest="test_config", help='Test Configuration') args = parser.parse_args() rgw_user_info_yaml = args.rgw_user_info test_config_yaml = args.test_config test_config = Config() # test config with open(test_config_yaml, 'r') as f: doc = yaml.load(f) test_config.bucket_count = doc['config']['bucket_count'] test_config.objects_count = doc['config']['objects_count'] test_config.objects_size_range = { 'min': doc['config']['objects_size_range']['min'], 'max': doc['config']['objects_size_range']['max'] } test_config.io_op_config = doc['io_op_config'] test_exec(rgw_user_info_yaml, test_config)
sys.exit(1) if __name__ == "__main__": project_dir = os.path.abspath(os.path.join(__file__, "../../..")) test_data_dir = "test_data" TEST_DATA_PATH = os.path.join(project_dir, test_data_dir) log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH) if not os.path.exists(TEST_DATA_PATH): log.info("test data dir not exists, creating.. ") os.makedirs(TEST_DATA_PATH) parser = argparse.ArgumentParser(description="RGW S3 Automation") parser.add_argument("-c", dest="config", help="RGW Test yaml configuration") args = parser.parse_args() yaml_file = args.config config = Config() config.max_objects = None if yaml_file is None: config.objects_count = 2 config.objects_size_range = {"min": 10, "max": 50} else: with open(yaml_file, "r") as f: doc = yaml.load(f) config.objects_count = doc["config"]["objects_count"] config.objects_size_range = { "min": doc["config"]["objects_size_range"]["min"], "max": doc["config"]["objects_size_range"]["max"], } config.test_ops = doc["config"]["test_ops"] log.info( "objects_count: %s\n"