def _CreateBenchmarkSpec(self, benchmark_config_yaml): config = configs.LoadConfig(benchmark_config_yaml, {}, NAME) config_spec = benchmark_config_spec.BenchmarkConfigSpec( NAME, flag_values=FLAGS, **config) return benchmark_spec.BenchmarkSpec(ping_benchmark, config_spec, UID)
def GetConfig(user_config): config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME) if FLAGS['ycsb_client_vms'].present: config['vm_groups']['default']['vm_count'] = FLAGS.ycsb_client_vms return config
def GetConfig(user_config: Dict[str, Any]) -> Dict[str, Any]: return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def GetConfig(user_config): """Returns the configuration of a benchmark.""" config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME) if FLAGS['num_vms'].present: config['vm_groups']['default']['vm_count'] = FLAGS.num_vms return config
def testLoadConfigDoesMerge(self): default = yaml.safe_load(CONFIG_A) overrides = yaml.safe_load(CONFIG_B) merged_config = configs.MergeConfigs(default, overrides) config = configs.LoadConfig(CONFIG_A, overrides['a'], CONFIG_NAME) self.assertEqual(merged_config['a'], config)
def testBadParameter(self): config = configs.LoadConfig(BAD_VM_PARAMETER_CONFIG, {}, NAME) spec = benchmark_spec.BenchmarkSpec(config, NAME, UID) with self.assertRaises(errors.Config.UnrecognizedOption): spec.ConstructVirtualMachines()
def GetConfig(user_config): config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME) if FLAGS['num_vms'].present: config['vm_groups']['slaves']['vm_count'] = FLAGS.num_vms return config
def GetConfig(user_config): config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME) config['vm_groups']['default']['vm_count'] = FLAGS.num_vms if FLAGS.num_vms < 2: # Needs at least 2 vms to run the benchmark. config['vm_groups']['default']['vm_count'] = 2 return config
def makeSpec(self, yaml_benchmark_config=ping_benchmark.BENCHMARK_CONFIG): config = configs.LoadConfig(yaml_benchmark_config, {}, NAME) spec = benchmark_spec.BenchmarkSpec(config, NAME, UID) spec.ConstructVirtualMachines() return spec
def GetConfig(user_config): config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME) config['vm_groups']['default']['vm_count'] = 1 + FLAGS.redis_clients return config
def GetConfig(user_config): """Decide number of vms needed to run oldisim.""" config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME) config['vm_groups']['default']['vm_count'] = (FLAGS.oldisim_num_leaves + NUM_DRIVERS + NUM_ROOTS) return config
def _CreateBenchmarkSpec(self, benchmark_config_yaml): config = configs.LoadConfig(benchmark_config_yaml, {}, NAME) config_spec = benchmark_config_spec.BenchmarkConfigSpec( NAME, flag_values=self._mocked_flags, **config) return benchmark_spec.BenchmarkSpec(config_spec, NAME, UID)
def GetConfig(user_config): """Returns the configuration of a benchmark.""" return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def GetConfig(user_config): config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME) num_clients = FLAGS.cs_websearch_num_clients config['vm_groups']['clients']['vm_count'] = num_clients return config
def _CreateBenchmarkSpecFromYaml(self, yaml_string, benchmark_name=NAME): config = configs.LoadConfig(yaml_string, {}, benchmark_name) return self._CreateBenchmarkSpecFromConfigDict(config, benchmark_name)
def testBackgroundWorkloadConfigBadIp(self): """ Check that the config with invalid ip type gets an error """ config = configs.LoadConfig(CONFIG_WITH_BACKGROUND_NETWORK_BAD_IPFLAG, {}, NAME) with self.assertRaises(errors.Config.InvalidValue): benchmark_spec.BenchmarkSpec(config, NAME, UID)
def GetConfig(user_config): return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def GetConfig(user_config: Dict[str, Any]) -> Dict[str, Any]: config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME) config['vm_groups']['default']['vm_count'] = 1 + _NUM_CLIENTS.value return config