def __init__( self, cal_directory_uris=["."], mode="local_disk" ): print ("Loading Calibration Service Available Directories") # calList will contain absolute paths/filenames self.calList = [] for path in cal_directory_uris: for cpath in ConfigSpace.general_walk(path, [".fits"]): self.calList.append(cpath)
def OBSOLETE_update_xml_index(self): ''' Re-updates the xml index, could be useful if this becomes long running and there are changes to the xml files, etc. ''' self.xmlIndex = {} try: for dpath, dnames, files in ConfigSpace.config_walk( "xmlcalibrations" ): for file in files: self.xmlIndex.update( {file:os.path.join(str(dpath), file)} ) #print "CDL30", self.xmlIndex except: raise "Could not load XML Index."
def test_to_ndarray_name_last_pos(): np.random.seed(123456) random_state = np.random.RandomState(123456) config_space = CS.ConfigurationSpace() config_space.add_hyperparameters([ CSH.UniformFloatHyperparameter('a', lower=0., upper=1.), CSH.UniformIntegerHyperparameter('b', lower=2, upper=3), CSH.CategoricalHyperparameter('c', choices=('1', '2', '3')), CSH.UniformIntegerHyperparameter('d', lower=2, upper=3), CSH.CategoricalHyperparameter('e', choices=('1', '2')) ]) hp_a = HyperparameterRangeContinuous('a', lower_bound=0., upper_bound=1., scaling=LinearScaling()) hp_b = HyperparameterRangeInteger('b', lower_bound=2, upper_bound=3, scaling=LinearScaling()) hp_c = HyperparameterRangeCategorical('c', choices=('1', '2', '3')) hp_d = HyperparameterRangeInteger('d', lower_bound=2, upper_bound=3, scaling=LinearScaling()) hp_e = HyperparameterRangeCategorical('e', choices=('1', '2')) for name_last_pos in ['a', 'c', 'd', 'e']: hp_ranges_cs = HyperparameterRanges_CS(config_space, name_last_pos=name_last_pos) if name_last_pos == 'a': lst = [hp_b, hp_c, hp_d, hp_e, hp_a] elif name_last_pos == 'c': lst = [hp_a, hp_b, hp_d, hp_e, hp_c] elif name_last_pos == 'd': lst = [hp_a, hp_b, hp_c, hp_e, hp_d] else: lst = [hp_a, hp_b, hp_c, hp_d, hp_e] hp_ranges = HyperparameterRanges_Impl(*lst) names = [hp.name for hp in hp_ranges.hp_ranges] config_cs = hp_ranges_cs.random_candidate(random_state) _config = config_cs.get_dictionary() config = (_config[name] for name in names) ndarr_cs = hp_ranges_cs.to_ndarray(config_cs) ndarr = hp_ranges.to_ndarray(config) assert_allclose(ndarr_cs, ndarr, rtol=1e-4)
def _convert_hyper_parameters_to_cs(self): # type: () -> CS.ConfigurationSpace cs = CS.ConfigurationSpace(seed=self._seed) for p in self._hyper_parameters: if isinstance(p, UniformParameterRange): hp = CSH.UniformFloatHyperparameter( p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size) elif isinstance(p, UniformIntegerParameterRange): hp = CSH.UniformIntegerHyperparameter( p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size) elif isinstance(p, DiscreteParameterRange): hp = CSH.CategoricalHyperparameter(p.name, choices=p.values) else: raise ValueError("HyperParameter type {} not supported yet with OptimizerBOHB".format(type(p))) cs.add_hyperparameter(hp) return cs
def wrapper(self, configuration, **kwargs): if not isinstance(configuration, ConfigSpace.Configuration): try: squirtle = { k: configuration[i] for (i, k) in enumerate(self.configuration_space) } wartortle = ConfigSpace.Configuration( self.configuration_space, squirtle) except Exception as e: raise Exception( 'Error during the conversion of the provided ' 'into a ConfigSpace.Configuration object') from e else: wartortle = configuration self.configuration_space.check_configuration(wartortle) return (foo(self, configuration, **kwargs))
def new_result(self, job, *args, **kwargs): super().new_result( job, update_model=True, force_model_update=( self.warmstarted_model.choose_similarity_budget_strategy == "current")) budget = job.kwargs["budget"] config = ConfigSpace.Configuration( configuration_space=self.configspace, values=job.kwargs["config"]) loss = job.result["loss"] if ( job.result is not None and "loss" in job.result) else float("inf") if config not in self.observations: self.observations[config] = list() self.observations[config].append((budget, loss))
def openmlsetup_to_configuration(openmlsetup, config_space): name_values = dict() for param_id, param in openmlsetup.parameters.items(): name = param.parameter_name if name in config_space.get_hyperparameter_names(): hyperparam = config_space._hyperparameters[name] if isinstance(hyperparam, ConfigSpace.hyperparameters.UniformIntegerHyperparameter): name_values[name] = int(param.value) elif isinstance(hyperparam, ConfigSpace.hyperparameters.NumericalHyperparameter): name_values[name] = float(param.value) else: val = json.loads(param.value) if isinstance(val, bool): val = str(val) name_values[name] = val return ConfigSpace.Configuration(config_space, name_values)
def get_hyperparameter_search_space( dataset_properties: Optional[Dict[str, str]] = None) -> ConfigurationSpace: cs = ConfigurationSpace() p = UniformFloatHyperparameter('p', lower=0.2, upper=1, default_value=0.5) use_augmenter = CategoricalHyperparameter('use_augmenter', choices=[True, False], default_value=True) cs.add_hyperparameters([p, use_augmenter]) # only add hyperparameters to configuration space if we are using the augmenter cs.add_condition(CS.EqualsCondition(p, use_augmenter, True)) return cs
def get_hyperparameter_search_space(dataset_properties: Optional[Dict] = None, min_num_gropus: int = 1, max_num_groups: int = 15, min_num_units: int = 10, max_num_units: int = 1024, ) -> ConfigurationSpace: cs = ConfigurationSpace() # The number of groups that will compose the resnet. That is, # a group can have N Resblock. The M number of this N resblock # repetitions is num_groups num_groups = UniformIntegerHyperparameter( "num_groups", lower=min_num_gropus, upper=max_num_groups, default_value=5) mlp_shape = CategoricalHyperparameter('mlp_shape', choices=[ 'funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs' ]) activation = CategoricalHyperparameter( "activation", choices=list(_activations.keys()) ) max_units = UniformIntegerHyperparameter( "max_units", lower=min_num_units, upper=max_num_units, ) output_dim = UniformIntegerHyperparameter( "output_dim", lower=min_num_units, upper=max_num_units ) cs.add_hyperparameters([num_groups, activation, mlp_shape, max_units, output_dim]) # We can have dropout in the network for # better generalization use_dropout = CategoricalHyperparameter( "use_dropout", choices=[True, False]) max_dropout = UniformFloatHyperparameter("max_dropout", lower=0.0, upper=1.0) cs.add_hyperparameters([use_dropout, max_dropout]) cs.add_condition(CS.EqualsCondition(max_dropout, use_dropout, True)) return cs
def setUp(self): self.configspace = CS.ConfigurationSpace() self.HPs = [] self.HPs.append( CS.CategoricalHyperparameter('parent', [1,2,3])) self.HPs.append( CS.CategoricalHyperparameter('child1_x1', ['foo','bar'])) self.HPs.append( CS.UniformFloatHyperparameter('child2_x1', lower=-1, upper=1)) self.HPs.append( CS.UniformIntegerHyperparameter('child3_x1', lower=-2, upper=5)) self.configspace.add_hyperparameters(self.HPs) self.conditions = [] self.conditions += [CS.EqualsCondition(self.HPs[1], self.HPs[0], 1)] self.conditions += [CS.EqualsCondition(self.HPs[2], self.HPs[0], 2)] self.conditions += [CS.EqualsCondition(self.HPs[3], self.HPs[0], 3)] [self.configspace.add_condition(cond) for cond in self.conditions]
def get_configspace(): """:return: ConfigurationsSpace-Object Here is the main place to create particular hyperparameters to tune. Particular hyperparameter should be defined as: hyperparameter = type_of_parameter(name, lower_range, upper_range, default_value, logging) add.hyperparameter([hyperparameter]) """ cs = CS.ConfigurationSpace() # num_pca = CSH.UniformIntegerHyperparameter('num_pca', lower=850, upper=930, default_value=900, log=True) # cs.add_hyperparameters([num_pca]) n_estimators = CSH.UniformIntegerHyperparameter('n_estimators', lower=1, upper=500, default_value=100, log=True) criterion = CSH.CategoricalHyperparameter('criterion', ['gini', 'entropy']) max_depth = CSH.UniformIntegerHyperparameter('max_depth', lower=100, upper=1000, default_value=None, log=True) min_sample_split = CSH.UniformIntegerHyperparameter('min_sample_split', lower=2, upper=30, default_value=2, log=True) # min_sample_leaf = CSH.UniformIntegerHyperparameter('min_sample_leaf', lower=1, upper=100, # default_value=50, log=True) # max_features = CSH.CategoricalHyperparameter('max_features', ['auto', 'sqrt', 'log2']) # max_leaf_nodes = CSH.UniformIntegerHyperparameter('max_leaf_nodes', lower=10, upper=1000, # default_value=500, log=True) # min_impur_dist = CSH.UniformFloatHyperparameter('min_impur_dist', lower=0.1, upper=1.0, # default_value=0.5, log=True) cs.add_hyperparameters( [n_estimators, criterion, max_depth, min_sample_split]) # cs.add_hyperparameters([n_estimators, criterion, max_depth, # min_sample_split, min_sample_leaf, # max_features, max_leaf_nodes, min_impur_dist]) return cs
def get_configuration_space(self): cs = CS.ConfigurationSpace() nonlintype = CSH.CategoricalHyperparameter( "nonlintype", choices=["relu", "tanh", "sigmoid", "selu"], default_value="relu") #choices=["relu"]) n_hidden_layers = CSH.CategoricalHyperparameter( "n_hidden_layers", choices=["1", "2", "3", "4"], default_value="2") hidden_size_1 = CSH.UniformIntegerHyperparameter("hidden_size_1", lower=16, upper=256, default_value=128) hidden_size_2 = CSH.UniformIntegerHyperparameter("hidden_size_2", lower=16, upper=256, default_value=128) hidden_size_3 = CSH.UniformIntegerHyperparameter("hidden_size_3", lower=16, upper=256, default_value=128) hidden_size_4 = CSH.UniformIntegerHyperparameter("hidden_size_4", lower=16, upper=256, default_value=128) hidden_cond_2 = CSC.InCondition(child=hidden_size_2, parent=n_hidden_layers, values=["2", "3", "4"]) hidden_cond_3 = CSC.InCondition(child=hidden_size_3, parent=n_hidden_layers, values=["3", "4"]) hidden_cond_4 = CSC.InCondition(child=hidden_size_4, parent=n_hidden_layers, values=["4"]) lr = CSH.UniformFloatHyperparameter("lr", lower=1e-5, upper=1, default_value=1e-3, log=True) cs.add_hyperparameters([ nonlintype, n_hidden_layers, hidden_size_1, hidden_size_2, hidden_size_3, hidden_size_4, lr ]) cs.add_conditions([hidden_cond_2, hidden_cond_3, hidden_cond_4]) return cs
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() cs.add_hyperparameters([ CSH.UniformFloatHyperparameter( 'optimizer:lr', lower=0.001, upper=0.1, default_value=0.04, log=True, ), # CSH.OrdinalHyperparameter( # 'ignite_random:minibatch_size', # sequence=[2, 4, 8, 16, 32], # default_value=8, # ), # CSH.OrdinalHyperparameter( # 'ignite_random:num_minibatches', # sequence=[2, 4, 8, 16, 32], # default_value=8, # ), CSH.UniformIntegerHyperparameter( 'model:history', lower=1, upper=12, default_value=12, ), CSH.UniformIntegerHyperparameter( 'model:n_layers', lower=2, upper=8, default_value=3, ), CSH.OrdinalHyperparameter( 'model:n_channels', sequence=[2, 4, 8, 16, 32, 64], default_value=8, ), ]) return cs
def get_configspace(): """ Define all the hyperparameters that need to be optimised and store them in config """ cs = CS.ConfigurationSpace() initial_lr = CSH.UniformFloatHyperparameter('initial_lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True) optimizer = CSH.CategoricalHyperparameter('optimizer', settings.opti_dict.keys()) batch_size = CSH.UniformIntegerHyperparameter('batch_size', lower=16, upper=32, default_value=24) cs.add_hyperparameters([initial_lr, optimizer, batch_size]) lr_scheduler = CSH.CategoricalHyperparameter( 'scheduler', ['Exponential', 'Cosine', 'Plateau']) weight_decay = CSH.UniformFloatHyperparameter('weight_decay', lower=1e-5, upper=1e-3, default_value=3e-4, log=True) drop_path_rate = CSH.UniformFloatHyperparameter('max_droppath_rate', lower=0, upper=0.4, default_value=0.3, log=False) weight_auxiliary = CSH.UniformFloatHyperparameter('weight_auxiliary', lower='0', upper='0.4', log=False) grad_clip_value = CSH.UniformIntegerHyperparameter('grad_clip_value', lower=4, upper=8, default_value=5) cs.add_hyperparameters([ lr_scheduler, drop_path_rate, weight_auxiliary, weight_decay, grad_clip_value ]) return cs
def get_range_creation_config(op_name, dtypes): config_space = random_range_cs config = config_space.sample_configuration() config_dict = config.get_dictionary() config_dict.update( {'stop': config_dict['start'] + config_dict.pop('interval')}) # random dtype config_space = cs.ConfigurationSpace() config_space.add_hyperparameter( csh.CategoricalHyperparameter('dtype', choices=dtypes)) config = config_space.sample_configuration() dtype = config.get('dtype') config_dict.update({'dtype': dtype}) if op_name == 'linspace': config_space = random_num_cs config = config_space.sample_configuration() config_dict.update(config.get_dictionary()) return config_dict
def get_fidelity_space( seed: Union[int, None] = None) -> CS.ConfigurationSpace: """ Creates an empty ConfigSpace.ConfigurationSpace for traditional models as no fidelitie are used. Parameters ---------- seed : int, None Fixing the seed for the ConfigSpace.ConfigurationSpace Returns ------- ConfigSpace.ConfigurationSpace """ seed = seed if seed is not None else np.random.randint(1, 100000) fidel_space = CS.ConfigurationSpace(seed=seed) return fidel_space
def get_hyperparameter_search_space_small(seed): """ Small version of svm config space, featuring important hyperparameters based on https://arxiv.org/abs/1710.04725 Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace('sklearn.svm.SVC', seed) C = ConfigSpace.UniformFloatHyperparameter( name='svc__C', lower=0.03125, upper=32768, log=True, default_value=1.0) kernel = ConfigSpace.CategoricalHyperparameter( name='svc__kernel', choices=['rbf', 'poly', 'sigmoid'], default_value='rbf') degree = ConfigSpace.UniformIntegerHyperparameter( name='svc__degree', lower=1, upper=5, default_value=3) gamma = ConfigSpace.UniformFloatHyperparameter( name='svc__gamma', lower=3.0517578125e-05, upper=8, log=True, default_value=0.1) coef0 = ConfigSpace.UniformFloatHyperparameter( name='svc__coef0', lower=-1, upper=1, default_value=0) cs.add_hyperparameters([ C, kernel, degree, gamma, coef0 ]) degree_depends_on_poly = ConfigSpace.EqualsCondition(degree, kernel, 'poly') coef0_condition = ConfigSpace.InCondition(coef0, kernel, ['poly', 'sigmoid']) cs.add_condition(degree_depends_on_poly) cs.add_condition(coef0_condition) return cs
def get_config_space(): config_space=CS.ConfigurationSpace() #config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('learning_rate', # lower=1e-3, # upper=1, # log=True)) config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('weight_decay', lower=1e-5, upper=1e-2, log=False)) config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('cutout_prob', lower=0, upper=1, log=False)) return config_space
def get_warmstart_configspace(): ws_cs = CS.ConfigurationSpace() # HYPERPARAMETERS n_estimators = CSH.Constant('n_estimators', value=100) max_depth = CSH.Constant('max_depth', value=40) min_samples_leaf = CSH.Constant('min_samples_leaf', value=30) min_samples_split = CSH.Constant('min_samples_split', value=20) max_features = CSH.Constant('max_features', value='auto') ws_cs.add_hyperparameters([n_estimators, max_depth, min_samples_leaf, min_samples_split, max_features]) return ws_cs
def query_nasbench(self, nasbench, sample, search_space=None): config = ConfigSpace.Configuration( search_space.get_configuration_space(), vector=sample) adjacency_matrix, node_list = search_space.convert_config_to_nasbench_format( config) if type(search_space) == SearchSpace3: node_list = [INPUT, *node_list, OUTPUT] else: node_list = [INPUT, *node_list, CONV1X1, OUTPUT] adjacency_list = adjacency_matrix.astype(np.int).tolist() model_spec = api.ModelSpec(matrix=adjacency_list, ops=node_list) nasbench_data = nasbench.query(model_spec) self.arch = Architecture(adjacency_matrix=adjacency_matrix, node_list=node_list) self.validation_accuracy = nasbench_data['validation_accuracy'] self.test_accuracy = nasbench_data['test_accuracy'] self.training_time = nasbench_data['training_time']
def reformat_for_tuning(self): """ Converts the dictionnary of CSH object to a proper ConfigurationSpace accepted by HpBandSter. """ # Initialization of configuration space cs = CS.ConfigurationSpace() # We extract CSH object from the dictionnary and put it in a list if len(self.space) != 0: self.space = list(self.space.values()) cs.add_hyperparameters(self.space) self.space = cs else: raise Exception( 'Search space has not been modified yet, no tuning can be done.' )
def get_configuration_space(system, task, model): cs = CS.ConfigurationSpace() horizon = CSH.UniformIntegerHyperparameter(name="horizon", lower=10, upper=100, default_value=10) cs.add_hyperparameter(horizon) kappa = CSH.UniformFloatHyperparameter(name='kappa', lower=0.1, upper=1.0, default_value=1.0) cs.add_hyperparameter(kappa) num_traj = CSH.UniformIntegerHyperparameter(name='num_traj', lower=100, upper=1000, default_value=200) cs.add_hyperparameter(num_traj) return cs
def get_hyperparameter_search_space( dataset_properties: Optional[Dict[str, str]] = None, min_num_layers: int = 2, max_num_layers: int = 5, min_init_filters: int = 16, max_init_filters: int = 64, min_kernel_size: int = 2, max_kernel_size: int = 5, min_stride: int = 1, max_stride: int = 3, min_padding: int = 2, max_padding: int = 3, min_pool_size: int = 2, max_pool_size: int = 3) -> ConfigurationSpace: cs = CS.ConfigurationSpace() cs.add_hyperparameter( UniformIntegerHyperparameter('num_layers', lower=min_num_layers, upper=max_num_layers)) cs.add_hyperparameter( CategoricalHyperparameter('activation', choices=list(_activations.keys()))) cs.add_hyperparameter( UniformIntegerHyperparameter('conv_init_filters', lower=min_init_filters, upper=max_init_filters)) cs.add_hyperparameter( UniformIntegerHyperparameter('conv_kernel_size', lower=min_kernel_size, upper=max_kernel_size)) cs.add_hyperparameter( UniformIntegerHyperparameter('conv_kernel_stride', lower=min_stride, upper=max_stride)) cs.add_hyperparameter( UniformIntegerHyperparameter('conv_kernel_padding', lower=min_padding, upper=max_padding)) cs.add_hyperparameter( UniformIntegerHyperparameter('pool_size', lower=min_pool_size, upper=max_pool_size)) return cs
def get_config(self, budget): """ function to sample a new configuration This function is called inside Hyperband to query a new configuration Parameters: ----------- budget: float the budget for which this configuration is scheduled returns: config should return a valid configuration """ self.lock.acquire() if not self.is_trained: c = self.config_space.sample_configuration().get_array() else: candidates = np.array([ self.config_space.sample_configuration().get_array() for _ in range(self.n_candidates) ]) # We are only interested on the asymptotic value projected_candidates = np.concatenate( (candidates, np.ones([self.n_candidates, 1])), axis=1) # Compute the upper confidence bound of the function at the asymptote m, v = self.model.predict(projected_candidates) ucb_values = m + self.delta * np.sqrt(v) print(ucb_values) # Sample a configuration based on the ucb values p = np.ones(self.n_candidates) * (ucb_values / np.sum(ucb_values)) idx = np.random.choice(self.n_candidates, 1, False, p) c = candidates[idx][0] config = ConfigSpace.Configuration(self.config_space, vector=c) self.lock.release() return config.get_dictionary(), {}
def get_nas101_configuration_space(): nas101_cs = ConfigSpace.ConfigurationSpace() nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_0", OPS)) nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_1", OPS)) nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_2", OPS)) nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_3", OPS)) nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_4", OPS)) for i in range(VERTICES * (VERTICES - 1) // 2): nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("edge_%d" % i, [0, 1])) return nas101_cs
def get_config_space(classifier): if classifier is not 'neural_network': autosklearn_config_space = get_configuration_space( info={'task': autosklearn.constants.MULTICLASS_CLASSIFICATION, 'is_sparse': 0}, include_estimators=[classifier], include_preprocessors=['no_preprocessing']) configuration_space = ConfigSpace.ConfigurationSpace() for name, hyperparameter in autosklearn_config_space._hyperparameters.items(): if isinstance(hyperparameter, ConfigSpace.hyperparameters.Constant): continue if hyperparameter.name.startswith('classifier') or hyperparameter.name.startswith('imputation'): configuration_space.add_hyperparameter(hyperparameter) if classifier == 'random_forest': hyperparameter = configuration_space.get_hyperparameter('classifier:random_forest:max_features') hyperparameter.lower = 0.1 hyperparameter.lower_hard = 0.1 hyperparameter.upper = 0.9 hyperparameter.upper_hard = 0.9 hyperparameter.default = 0.1 return configuration_space config_space = ConfigSpace.ConfigurationSpace() config_space.add_hyperparameter(ConfigSpace.CategoricalHyperparameter('imputation:strategy', ['mean', 'median', 'most_frequent'])) config_space.add_hyperparameter(ConfigSpace.CategoricalHyperparameter('classifier:__choice__', [classifier])) config_space.add_hyperparameter(ConfigSpace.UniformIntegerHyperparameter('classifier:neural_network:hidden_layer_sizes', 32, 1024)) config_space.add_hyperparameter(ConfigSpace.UniformIntegerHyperparameter('classifier:neural_network:num_hidden_layers', 1, 5)) config_space.add_hyperparameter(ConfigSpace.UniformFloatHyperparameter('classifier:neural_network:learning_rate_init', 0.00001, 1, log=True)) config_space.add_hyperparameter(ConfigSpace.UniformFloatHyperparameter('classifier:neural_network:alpha', 0.0000001, 0.0001, log=True)) # config_space.add_hyperparameter(ConfigSpace.UniformFloatHyperparameter('classifier:neural_network:beta_1', 0, 1)) # config_space.add_hyperparameter(ConfigSpace.UniformFloatHyperparameter('classifier:neural_network:beta_2', 0, 1)) # config_space.add_hyperparameter(ConfigSpace.UniformIntegerHyperparameter('classifier:neural_network:max_iter', 2, 1000)) config_space.add_hyperparameter(ConfigSpace.UniformFloatHyperparameter('classifier:neural_network:momentum', 0.1, 0.9)) return config_space
def from_ndarray(self, cand_ndarray: np.ndarray) -> Candidate: assert cand_ndarray.size == self._ndarray_size, \ "Internal vector [{}] must have size {}".format( cand_ndarray, self._ndarray_size) cand_ndarray = cand_ndarray.reshape((-1, )) assert cand_ndarray.min() >= 0. and cand_ndarray.max() <= 1., \ "Internal vector [{}] must have entries in [0, 1]".format( cand_ndarray) # Deal with categoricals by using argmax srcvec = np.zeros(self.__len__(), dtype=cand_ndarray.dtype) srcvec.put(self.numer_src, cand_ndarray.take(self.numer_trg, mode='clip'), mode='clip') for srcpos, trgpos, card in zip(self.categ_src, self.categ_trg, self.categ_card): maxpos = cand_ndarray[trgpos:(trgpos + card)].argmax() srcvec[srcpos] = maxpos # Rest is dealt with by CS.Configuration return CS.Configuration(self.config_space, vector=srcvec)
def get_hyperparameter_search_space( dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None, use_augmenter: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="use_augmenter", value_range=(True, False), default_value=True, ), sigma_offset: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="sigma_offset", value_range=(0.0, 3.0), default_value=0.3, ), ) -> ConfigurationSpace: cs = ConfigurationSpace() use_augmenter = get_hyperparameter(use_augmenter, CategoricalHyperparameter) sigma_offset = get_hyperparameter(sigma_offset, UniformFloatHyperparameter) cs.add_hyperparameters([use_augmenter, sigma_offset]) # only add hyperparameters to configuration space if we are using the augmenter cs.add_condition(CS.EqualsCondition(sigma_offset, use_augmenter, True)) return cs
def create_ch(): import ConfigSpace as CS # BOHB uses ConfigSpace for their hyperparameter search space config_space = CS.ConfigurationSpace() config_space.add_hyperparameter( CS.UniformFloatHyperparameter("lr", lower=0.001, upper=1, log=True)) config_space.add_hyperparameter( CS.UniformFloatHyperparameter("momentum", lower=0.1, upper=0.9)) config_space.add_hyperparameter( CS.UniformFloatHyperparameter("dropout_1", lower=0.01, upper=0.1)) config_space.add_hyperparameter( CS.UniformFloatHyperparameter("dropout_2", lower=0.05, upper=0.2)) config_space.add_hyperparameter( CS.UniformFloatHyperparameter("conv_1", lower=32, upper=128)) config_space.add_hyperparameter( CS.UniformFloatHyperparameter("conv_2", lower=64, upper=256)) config_space.add_hyperparameter( CS.UniformFloatHyperparameter("dense_1", lower=128, upper=1024)) return config_space
def remap_resource(self, config_ext: CS.Configuration, resource: int, as_dict: bool = False) -> Union[CS.Configuration, dict]: """ Re-assigns resource value for extended config. :param config_ext: Extended config :param resource: New resource value :param as_dict: Return as dict? :return: """ x_dct = copy.copy(config_ext.get_dictionary()) x_dct[self.resource_attr_name] = resource if as_dict: return x_dct else: return CS.Configuration(self.hp_ranges_ext.config_space, values=x_dct)
def setUp(self): self.configspace = CS.ConfigurationSpace(42) self.add_hyperparameters() x_train_confs = [ self.configspace.sample_configuration() for i in range(self.n_train)] self.x_train = np.array( [c.get_array() for c in x_train_confs]) x_test_confs = [ self.configspace.sample_configuration() for i in range(self.n_test)] self.x_test= np.array( [c.get_array() for c in x_test_confs]) self.sm_x_train = self.sm_transform_data(self.x_train) self.sm_x_test = self.sm_transform_data(self.x_test) self.sm_kde = sm.nonparametric.KDEMultivariate(data=self.sm_x_train, var_type=self.var_types, bw='cv_ml') self.hp_kde_full = MultivariateKDE(self.configspace, fully_dimensional=True, fix_boundary=False) self.hp_kde_factor = MultivariateKDE(self.configspace, fully_dimensional=False, fix_boundary=False) self.hp_kde_full.fit(self.x_train, bw_estimator='mlcv') self.hp_kde_factor.fit(self.x_train, bw_estimator='mlcv')
def get_configspace(hp_space: dict): cs = CS.ConfigurationSpace() # Transform the skopt hyperparameter space into the required format for hpbandster params_list = [] for i in range(len(hp_space)): if type(hp_space[i]) == skopt.space.space.Integer: params_list.append( CSH.UniformIntegerHyperparameter(name=hp_space[i].name, lower=hp_space[i].low, upper=hp_space[i].high)) elif type(hp_space[i]) == skopt.space.space.Categorical: params_list.append( CSH.CategoricalHyperparameter(hp_space[i].name, choices=list( hp_space[i].categories))) elif type(hp_space[i]) == skopt.space.space.Real: # Sample in the log domain if hp_space[i].prior == 'log-uniform' and hp_space[ i].base == 10: params_list.append( CSH.UniformFloatHyperparameter(hp_space[i].name, lower=hp_space[i].low, upper=hp_space[i].high, log=True)) # Uniform sampling else: params_list.append( CSH.UniformFloatHyperparameter(hp_space[i].name, lower=hp_space[i].low, upper=hp_space[i].high, log=False)) else: raise Exception( 'The skopt HP-space could not be converted correctly!') cs.add_hyperparameters(params_list) return cs
def compose_multi_table(lookaddr, *lookups, **args): """ Returns a dictionary keyed by the lookups name of a composed combination of tables distributed throughout loaded kits. """ retdict = {} context = None if "context" in args: context = args["context"] if context == None: context = ConfigSpace.get_current_default_context() just_one = False if "just_one" in args and args["just_one"] == True: just_one = True paths = ConfigSpace.lookup_multi_paths(lookaddr, context=context) if len(paths) == 0: return None paths.reverse() # so early files override latter, as it's in ADCONFIG path order for modname in paths: # print "L185: modname = %s" % modname f = file(modname) g = {} l = {} exec(f, g, l) # print "L191:",l.keys(),l f.close() i = 0 contributed = False for lookup in lookups: # print "L197: #%d - lookup = %s" % (i,lookup); i+= 1 if lookup in l: lval = l[lookup] valtype = type(lval) if valtype == dict: if not lookup in retdict: retdict[lookup] = {} curval = retdict[lookup] curval.update(l[lookup]) elif valtype == list: if not lookup in retdict: retdict[lookup] = [] curval = retdict[lookup] lval.extend(curval) # print "L211 lval = %s" % lval curval = lval else: if not lookup in retdict: retdict[lookup] = [] curval = retdict[lookup] curval.insert(0, lval) retdict[lookup] = curval contributed = True if not ("_contributors" in retdict): retdict["_contributors"] = [] contribs = retdict["_contributors"] if contributed: contribs.append(modname) if just_one: break return retdict
def get_lookup_table(modname, *lookup, **args): """ get_lookup_table() is used to get lookup table style sets of variables from a common facility, allowing the storage in common (global) space so that multiple scripts can refer to one lookup table without having to manage where this table is stored. E.g. the Calculator (see L{Descriptors}) for NIRI data requires a NIRI lookup table that other parts of the package, unrelated to Descriptors, also need to access. This facility saves these separate components from knowing where the configuration is actually stored, or even that other parts of the system are relying on it, and ensure that changes will affect every part of the system. @param modname: namespace specifier for the table... in default case this is the directory and file name of the module in which the lookup table is stored, and the file is pure python. However, the Lookups module can redirect this, using the modname, for example, as a key to find the lookup table in a database or elsewhere. Nothing like the latter is done at this time, and what is loaded are pure python files (e.g. a dict definition) from disk. @type modname: string @param lookup: name of the lookup table to load @type lookup: string """ retval = None context = None if "context" in args: context = args["context"] if context == None: context = ConfigSpace.get_current_default_context() # if not context: # modname = ConfigSpace.lookup_path(modname) # else: modname = ConfigSpace.lookup_context_path(modname, context=context) if not modname or (not os.path.exists(modname)): return None if ".py" in modname: f = file(modname) g = {} l = {} exec(f, g, l) # print "L38:",l.keys(),l f.close() if len(lookup) == 1: if lookup[0] in l: retval = l[lookup[0]] else: retval = [None] elif len(lookup) == 0: retval = [] for key in l: retval.append(l[key]) else: retval = [] for item in lookup: if item in l: retval.append(l[item]) else: retval.append(None) elif ".fits" in modname: # in this case lookup will have extension ids table = pyfits.open(modname) if len(lookup) == 1: retval = table[lookup[0]] else: retval = [] for item in lookup: retval.append(table[item]) else: raise "this should never happen, tell someone" return retval