def get_params(self, seed=None): if seed is None: seed = random.randint(0, 100000) arguments = unflatten( dict(zip(self.space.keys(), self.space.sample(seed=seed)[0]))) logger.debug('Sampling:\n{}'.format(pprint.pformat(arguments))) return arguments
def get_params(self, seed): if seed is None: seed = random.randint(0, 100000) self.primary.algorithm.study.sampler.rng.seed(seed) self.primary.algorithm.study.sampler.random_sampler.rng.seed(seed) params = unflatten( dict(zip(self.space.keys(), self.primary.suggest()[0]))) logger.debug('Sampling:\n{}'.format(pprint.pformat(params))) return params
def get_params(self, seed=None): if seed is None: seed = random.randint(0, 100000) self.primary.algorithm._init_optimizer() optimizer = self.primary.algorithm.optimizer optimizer.rng.seed(seed) # Giving the same seed could be problematic since optimizer.rng and # optimizer.base_estimator.rng would be synchronized and sample the same values. optimizer.base_estimator_.random_state = optimizer.rng.randint( 0, 100000) params = unflatten( dict(zip(self.space.keys(), self.primary.suggest()[0]))) logger.debug('Sampling:\n{}'.format(pprint.pformat(params))) return params
def get_params(self, seed=None): params = self.linear_space.reverse(self.params[len(self.trials)]) arguments = unflatten(dict(zip(self.space.keys(), params))) logger.debug('Sampling:\n{}'.format(pprint.pformat(arguments))) return arguments
def update(config, arguments): pairs = [argument.split("=") for argument in arguments] kwargs = unflatten(dict((pair[0], eval(pair[1])) for pair in pairs)) return merge_configs(config, kwargs)
def merge(config, subconfig): flattened_config = copy.deepcopy(flatten(config)) flattened_config.update(flatten(subconfig)) return unflatten(flattened_config)