Beispiel #1
0
    def prepare(self, pipeline_elements: list, maximize_metric: bool):

        self.hyperparameter_list = []
        # build space
        self.space = ConfigurationSpace()

        for element in pipeline_elements:
            # check if Switch object

            if isinstance(element, Switch):
                algorithm_options = {
                }  # mapping algorithm name with their child hyper params

                for algo in element.elements:
                    algo_params = []  # hyper params corresponding to "algo"
                    for name, value in algo.hyperparameters.items():
                        smac_param = self._convert_PHOTON_to_smac_space(
                            value,
                            (element.name + "__" +
                             name))  # or element.name__algo.name__name ???
                        algo_params.append(smac_param)
                    algorithm_options[(element.name + "__" +
                                       algo.name)] = algo_params

                algos = CategoricalHyperparameter(
                    name=element.name + "__algos",
                    choices=algorithm_options.keys())
                self.space.add_hyperparameter(algos)
                for algo, params in algorithm_options.items():
                    for param in params:
                        cond = InCondition(child=param,
                                           parent=algos,
                                           values=[algo])
                        self.space.add_hyperparameter(param)
                        self.space.add_condition(cond)

            else:
                for name, value in element.hyperparameters.items():
                    smac_param = self._convert_PHOTON_to_smac_space(
                        value, name)
                    if smac_param is not None:
                        self.space.add_hyperparameter(smac_param)

        self.scenario = Scenario({
            "run_obj": self.run_obj,
            "cutoff_time": self.cutoff_time,
            "runcount_limit": self.runcount_limit,
            "tuner-timeout": self.tuner_timeout,
            "wallclock_limit": self.wallclock_limit,
            "cs": self.space,
            "deterministic": "true"
        })
        self.smac = SMAC4BO(scenario=self.scenario,
                            rng=np.random.RandomState(42))
        self.optimizer = self.smac.solver
        self.optimizer.stats.start_timing()
        self.optimizer.incumbent = self.get_default_incumbent()

        self.flag = False  # False: compute performance of challenger, True: compute performance of incumbent
        self.ask = self.ask_generator()
Beispiel #2
0
    def setUp(self):
        self.cs = ConfigurationSpace()
        self.cs.add_hyperparameter(
            UniformFloatHyperparameter("x", lower=MIN_V, upper=MAX_V))

        # Scenario object
        self.scenario = Scenario({
            "run_obj":
            "quality",  # we optimize quality (alternatively runtime)
            "runcount-limit": 50,  # max. number of function evaluations
            "cs": self.cs,  # configuration space
            "deterministic": True,
            "multi_objectives": "metric1, metric2",
            "limit_resources": False,
        })

        self.facade_kwargs = {
            "scenario": self.scenario,
            "rng": np.random.RandomState(5),
            "tae_runner": tae,
        }

        self.parego_facade_kwargs = {
            "scenario": self.scenario,
            "rng": np.random.RandomState(5),
            "tae_runner": tae,
            "multi_objective_algorithm": ParEGO,
            "multi_objective_kwargs": {
                "rho": 0.05
            },
        }
Beispiel #3
0
    def _get_cfg(self):
        cs = ConfigurationSpace()
        max_depth = UniformIntegerHyperparameter("max_depth",
                                                 3,
                                                 16,
                                                 default_value=3)
        min_child_weight = UniformIntegerHyperparameter("min_child_weight",
                                                        1,
                                                        15,
                                                        default_value=1)
        gamma = UniformFloatHyperparameter("gamma",
                                           0.0,
                                           0.4,
                                           default_value=0.0)
        subsample = UniformFloatHyperparameter("subsample",
                                               0.6,
                                               0.9,
                                               default_value=0.6)
        colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                      0.6,
                                                      0.9,
                                                      default_value=0.6)

        cs.add_hyperparameters(
            [max_depth, min_child_weight, gamma, subsample, colsample_bytree])
        return cs
 def getPCS(self):
     '''
     maxDepth :[3,12] 树深度 默认5
     minInstancesPerNode:[1,100] 默认10
     minInfoGain:[0,0.1] 默认0
     impurity :["gini", "entropy"],默认”gini”
     '''
     # Build Configuration Space which defines all parameters and their
     # ranges
     cs = ConfigurationSpace()
     maxDepth = UniformIntegerHyperparameter("maxDepth",
                                             3,
                                             12,
                                             default_value=5)
     minInstancesPerNode = UniformIntegerHyperparameter(
         "minInstancesPerNode", 1, 100, default_value=10)
     minInfoGain = UniformFloatHyperparameter("minInfoGain",
                                              1e-06,
                                              1e-01,
                                              default_value=1e-06)
     impurity = CategoricalHyperparameter("impurity", ["gini", "entropy"],
                                          default_value="gini")
     cs.add_hyperparameters(
         [maxDepth, minInstancesPerNode, minInfoGain, impurity])
     return cs
    def smac_em(self):
        clu_cs = ConfigurationSpace()
        n_init = UniformIntegerHyperparameter("n_init", 1, 15)
        n_com = UniformIntegerHyperparameter("n_components", 2, 10)
        reg_c = UniformFloatHyperparameter("min_covar", 1e-6, 0.1)
        max_iter = UniformIntegerHyperparameter("n_iters", 10, 1000)
        tr = UniformFloatHyperparameter("threshold", 1e-6, 0.1)
        clu_cs.add_hyperparameters([n_init, tr, reg_c, n_com, max_iter])

        clu_scenario = Scenario({
            "run_obj":
            "quality",  # we optimize quality (alternatively runtime)
            # "runcount-limit": Constants.num_eval,  # maximum function evaluations
            "cs": clu_cs,  # configuration space
            "deterministic": "true",
            "tuner-timeout": constants.em_timeout,
            "wallclock_limit": constants.em_timeout,
            "cutoff_time": constants.em_timeout,
            "runcount-limit": 1
        })

        print('Run EM SMAC ' + self.name)
        smac = SMAC(scenario=clu_scenario, tae_runner=self.run_em)
        parameters = smac.optimize()
        value = smac.get_runhistory().get_cost(parameters)
        return value, parameters
class TestMultiInitialDesign(unittest.TestCase):

    def setUp(self):
        self.cs = ConfigurationSpace()
        self.cs.add_hyperparameter(UniformFloatHyperparameter(
            name="x1", lower=1, upper=10, default_value=2)
        )
        self.scenario = Scenario({'cs': self.cs, 'run_obj': 'quality',
                                  'output_dir': ''})
        self.ta = ExecuteTAFuncDict(lambda x: x["x1"]**2)

    def test_multi_config_design(self):
        stats = Stats(scenario=self.scenario)
        stats.start_timing()
        self.ta.stats = stats
        tj = TrajLogger(output_dir=None, stats=stats)
        rh = RunHistory(aggregate_func=average_cost)
        self.ta.runhistory = rh
        rng = np.random.RandomState(seed=12345)

        intensifier = Intensifier(tae_runner=self.ta, stats=stats, traj_logger=tj, rng=rng, instances=[None],
                                  run_obj_time=False)

        configs = [Configuration(configuration_space=self.cs, values={"x1":4}),
                   Configuration(configuration_space=self.cs, values={"x1":2})]
        dc = MultiConfigInitialDesign(tae_runner=self.ta, scenario=self.scenario, stats=stats,
                             traj_logger=tj, runhistory=rh, rng=rng, configs=configs,
                             intensifier=intensifier, aggregate_func=average_cost)

        inc = dc.run()
        self.assertTrue(stats.ta_runs==2)
        self.assertTrue(len(rh.data)==2)
        self.assertTrue(rh.get_cost(inc) == 4)
    def get_depth(self, cs: ConfigurationSpace, param: str):
        """
        Get depth in configuration space of a given parameter name
        breadth search until reaching a leaf for the first time

        Parameters
        ----------
        cs: ConfigurationSpace
            ConfigurationSpace to get parents of a parameter
        param: str
            name of parameter to inspect
        """
        parents = cs.get_parents_of(param)
        if not parents:
            return 1
        new_parents = parents
        d = 1
        while new_parents:
            d += 1
            old_parents = new_parents
            new_parents = []
            for p in old_parents:
                pp = cs.get_parents_of(p)
                if pp:
                    new_parents.extend(pp)
                else:
                    return d
    def getPCS(self):
        '''
        maxDepth  [3,12] 默认5
        minInstancesPerNode:[1,100] 默认10
        minInfoGain=:[0,0.1] 默认0
        subsamplingRate=[0.01,1], 默认1
        numTrees:[1,100] 默认 50

        '''
        # Build Configuration Space which defines all parameters and their
        # ranges
        cs = ConfigurationSpace()
        maxDepth = UniformIntegerHyperparameter("maxDepth",
                                                3,
                                                12,
                                                default_value=5)
        minInstancesPerNode = UniformIntegerHyperparameter(
            "minInstancesPerNode", 1, 100, default_value=10)
        minInfoGain = UniformFloatHyperparameter("minInfoGain",
                                                 1e-06,
                                                 1e-01,
                                                 default_value=1e-06)
        numTrees = UniformIntegerHyperparameter("numTrees",
                                                1,
                                                100,
                                                default_value=50)
        cs.add_hyperparameters(
            [maxDepth, minInstancesPerNode, minInfoGain, numTrees])
        return cs
class SchafferTest(unittest.TestCase):
    def setUp(self):
        self.cs = ConfigurationSpace()
        self.cs.add_hyperparameter(
            UniformFloatHyperparameter("x", lower=MIN_V, upper=MAX_V))

        # Scenario object
        self.scenario = Scenario({
            "run_obj":
            "quality",  # we optimize quality (alternatively runtime)
            "runcount-limit": 50,  # max. number of function evaluations
            "cs": self.cs,  # configuration space
            "deterministic": True,
            "multi_objectives": "metric1, metric2",
            "limit_resources": False,
        })

        self.facade_kwargs = {
            "scenario": self.scenario,
            "rng": np.random.RandomState(0),
            "tae_runner": tae,
        }

    def test_AC(self):
        smac = SMAC4AC(**self.facade_kwargs)
        incumbent = smac.optimize()

        f1_inc, f2_inc = schaffer(incumbent["x"])
        f1_opt, f2_opt = get_optimum()

        f2_inc = f2_inc / UPSCALING_FACTOR

        self.assertAlmostEqual(f1_inc + f2_inc, f1_opt + f2_opt, places=1)

        return smac
Beispiel #10
0
    def getPCS(self):
        '''
        maxDepth  [3,12] 默认5
        minInstancesPerNode:[1,100] 默认10
        minInfoGain=:[0,0.1] 默认0
        maxIter=[ 1,50] ,默认20
        stepSize [0.001,1]默认0.1

        '''
        # Build Configuration Space which defines all parameters and their
        # ranges
        cs = ConfigurationSpace()
        maxDepth = UniformIntegerHyperparameter("maxDepth",
                                                3,
                                                12,
                                                default_value=5)
        maxIter = UniformIntegerHyperparameter("maxIter",
                                               1,
                                               50,
                                               default_value=20)
        minInstancesPerNode = UniformIntegerHyperparameter(
            "minInstancesPerNode", 1, 100, default_value=10)
        minInfoGain = UniformFloatHyperparameter("minInfoGain",
                                                 1e-06,
                                                 1e-01,
                                                 default_value=1e-06)
        stepSize = UniformFloatHyperparameter("stepSize",
                                              0.001,
                                              1,
                                              default_value=0.1)
        cs.add_hyperparameters(
            [maxDepth, maxIter, minInstancesPerNode, minInfoGain, stepSize])
        return cs
Beispiel #11
0
        def test_facade(self):
            config_space = ConfigurationSpace()
            n_components = UniformIntegerHyperparameter(
                "PCA__n_components", 5, 30)
            config_space.add_hyperparameter(n_components)
            scenario_dict = {
                "run_obj": "quality",
                "deterministic": "true",
                "cs": config_space,
                "wallclock_limit": 60
            }

            with self.assertRaises(ValueError):
                SMACOptimizer(facade="SMAC4BOO", scenario_dict=scenario_dict)

            with self.assertRaises(ValueError):
                facade = SMAC4BO(scenario=Scenario(scenario_dict))
                SMACOptimizer(facade=facade, scenario_dict=scenario_dict)

            facades = [
                "SMAC4BO", SMAC4BO, "SMAC4AC", SMAC4AC, "SMAC4HPO", SMAC4HPO,
                "BOHB4HPO", BOHB4HPO
            ]
            for facade in facades:
                SMACOptimizer(facade=facade, scenario_dict=scenario_dict)
Beispiel #12
0
def rf_reg_configs():
    def run(dataset, seed, cfg):
        cfg = {k: cfg[k] for k in cfg}
        cfg["criterion"] = criterion[cfg["criterion"]]
        clf = RandomForestRegressor(random_state=seed, **cfg)
        scores = cross_val_score(clf,
                                 dataset.data,
                                 dataset.target,
                                 cv=5,
                                 scoring="r2")
        cfg = {k: cfg[k] for k in cfg}
        return 1 - np.mean(scores)

    criterion = ["mse", "mae"]
    cs = ConfigurationSpace()
    cs.add_hyperparameters([
        UniformIntegerHyperparameter("n_estimators", 1, 200),
        UniformIntegerHyperparameter("criterion", 0, 1),
        UniformIntegerHyperparameter("max_depth", 10, 1000),
        UniformIntegerHyperparameter("min_samples_leaf", 1, 200),
        UniformIntegerHyperparameter("min_samples_split", 2, 100),
        UniformFloatHyperparameter("min_weight_fraction_leaf", 0.0, 0.5),
        UniformFloatHyperparameter("min_impurity_decrease", 0.0, 0.9),
    ])
    return (cs, run, "Random Forest")
Beispiel #13
0
def mlp_configs():
    def run(dataset, seed, cfg):
        cfg = {k: cfg[k] for k in cfg}
        cfg["activation"] = activation[cfg["activation"]]
        cfg["solver"] = solver[cfg["solver"]]
        clf = MLPRegressor(random_state=seed, **cfg)
        scores = cross_val_score(clf,
                                 dataset.data,
                                 dataset.target,
                                 cv=5,
                                 scoring="r2")
        cfg = {k: cfg[k] for k in cfg}
        return 1 - np.mean(scores)

    activation = ["identity", "logistic", "tanh", "relu"]
    solver = ["lbfgs", "sgd", "adam"]
    cs = ConfigurationSpace()
    cs.add_hyperparameters([
        UniformIntegerHyperparameter("activation", 0, 3),
        UniformIntegerHyperparameter("solver", 0, 2),
        UniformFloatHyperparameter("tol", 1e-7, 1e-1),
        UniformFloatHyperparameter("alpha", 1e-7, 1e-1),
        UniformIntegerHyperparameter("max_iter", 10, 1000),
    ])
    return (cs, run, "MLP")
    def _set_param_space(self, param_space):
        self.param_space = param_space

        from ConfigSpace.hyperparameters import UniformFloatHyperparameter
        from smac.configspace import ConfigurationSpace
        from smac.optimizer.objective import average_cost
        from smac.runhistory.runhistory import RunHistory
        from smac.scenario.scenario import Scenario
        from smac.stats.stats import Stats
        from smac.utils.io.traj_logging import TrajLogger

        self.cs = ConfigurationSpace()
        for param in param_space:
            if param.type == 'continuous':
                var = UniformFloatHyperparameter(param.name, param.low,
                                                 param.high)
                self.cs.add_hyperparameter(var)
        self.runhistory = RunHistory(aggregate_func=average_cost)
        self.scenario = Scenario({
            'run_obj': 'quality',
            'runcount-limit': self.budget,
            'cs': self.cs
        })
        self.stats = Stats(self.scenario)
        self.traj_logger = TrajLogger(output_dir=__scratch__, stats=self.stats)
 def setUp(self):
     self.cs = ConfigurationSpace()
     self.cs.add_hyperparameter(
         UniformFloatHyperparameter(name="x1", lower=1, upper=10,
                                    default=2))
     self.scenario = Scenario({'cs': self.cs, 'run_obj': 'quality'})
     self.ta = ExecuteTAFuncDict(lambda x: x["x1"]**2)
Beispiel #16
0
def impute_default_values(configuration_space: ConfigurationSpace,
                          configs_array: np.ndarray) -> np.ndarray:
    """Impute inactive hyperparameters in configuration array with their default.

    Necessary to apply an EPM to the data.

    Parameters
    ----------
    configuration_space : ConfigurationSpace
    
    configs_array : np.ndarray
        Array of configurations.

    Returns
    -------
    np.ndarray
        Array with configuration hyperparameters. Inactive values are imputed
        with their default value.
    """
    for hp in configuration_space.get_hyperparameters():
        default = hp.normalized_default_value
        idx = configuration_space.get_idx_by_hyperparameter_name(hp.name)
        nonfinite_mask = ~np.isfinite(configs_array[:, idx])
        configs_array[nonfinite_mask, idx] = default

    return configs_array
Beispiel #17
0
def smac():
    # Build Configuration Space which defines all parameters and their ranges
    configuration_space = ConfigurationSpace()

    #Define initial ranges
    num_of_hidden_nodes = UniformIntegerHyperparameter("num_of_hidden_nodes",
                                                       1,
                                                       max_hidden_nodes,
                                                       default_value=1)
    decay = UniformFloatHyperparameter("decay", 0, 0.1, default_value=0)

    configuration_space.add_hyperparameters([num_of_hidden_nodes, decay])

    # creating the scenario object
    scenario = Scenario({
        "run_obj": "quality",
        "runcount-limit": hyperparameter_tuning_configs.SMAC_RUNCOUNT_LIMIT,
        "cs": configuration_space,
        "deterministic": "true",
        "abort_on_first_run_crash": "false"
    })

    # optimize using an SMAC object
    smac = SMAC(scenario=scenario,
                rng=np.random.RandomState(1),
                tae_runner=train_model)

    incumbent = smac.optimize()
    return incumbent.get_dictionary()
Beispiel #18
0
    def test_save_load_configspace(self):
        """Check if inputreader can load different config-spaces"""
        cs = ConfigurationSpace()
        hyp = UniformFloatHyperparameter('A', 0.0, 1.0, default_value=0.5)
        cs.add_hyperparameters([hyp])

        output_writer = OutputWriter()
        input_reader = InputReader()

        # pcs_new
        output_writer.save_configspace(cs, self.pcs_fn, 'pcs_new')
        restored_cs = input_reader.read_pcs_file(self.pcs_fn)
        self.assertEqual(cs, restored_cs)
        restored_cs = input_reader.read_pcs_file(self.pcs_fn, self.logger)
        self.assertEqual(cs, restored_cs)

        # json
        output_writer.save_configspace(cs, self.json_fn, 'json')
        restored_cs = input_reader.read_pcs_file(self.json_fn)
        self.assertEqual(cs, restored_cs)
        restored_cs = input_reader.read_pcs_file(self.json_fn, self.logger)
        self.assertEqual(cs, restored_cs)

        # pcs
        with open(self.pcs_fn, 'w') as fh:
            fh.write(pcs.write(cs))
        restored_cs = input_reader.read_pcs_file(self.pcs_fn)
        self.assertEqual(cs, restored_cs)
        restored_cs = input_reader.read_pcs_file(self.pcs_fn)
        self.assertEqual(cs, restored_cs)
        restored_cs = input_reader.read_pcs_file(self.pcs_fn, self.logger)
        self.assertEqual(cs, restored_cs)
Beispiel #19
0
class TestSingleInitialDesign(unittest.TestCase):
    def setUp(self):
        self.cs = ConfigurationSpace()
        self.cs.add_hyperparameter(
            UniformFloatHyperparameter(name="x1", lower=1, upper=10,
                                       default=2))
        self.scenario = Scenario({
            'cs': self.cs,
            'run_obj': 'quality',
            'output_dir': ''
        })
        self.ta = ExecuteTAFuncDict(lambda x: x["x1"]**2)

    def test_single_default_config_design(self):
        stats = Stats(scenario=self.scenario)
        stats.start_timing()
        self.ta.stats = stats
        tj = TrajLogger(output_dir=None, stats=stats)
        rh = RunHistory(aggregate_func=average_cost)

        dc = DefaultConfiguration(tae_runner=self.ta,
                                  scenario=self.scenario,
                                  stats=stats,
                                  traj_logger=tj,
                                  rng=np.random.RandomState(seed=12345))

        inc = dc.run()
        self.assertTrue(stats.ta_runs == 1)
        self.assertTrue(len(rh.data) == 0)
    def smac_gm(self):
        clu_cs = ConfigurationSpace()
        cov_t = CategoricalHyperparameter(
            "covariance_type", ["full", "tied", "diag", "spherical"])
        tol = UniformFloatHyperparameter("tol", 1e-6, 0.1)
        reg_c = UniformFloatHyperparameter("reg_covar", 1e-10, 0.1)
        n_com = UniformIntegerHyperparameter("n_components", 2, 10)
        max_iter = UniformIntegerHyperparameter("max_iter", 10, 1000)
        clu_cs.add_hyperparameters([cov_t, tol, reg_c, n_com, max_iter])

        clu_scenario = Scenario({
            "run_obj":
            "quality",  # we optimize quality (alternatively runtime)
            # "runcount-limit": Constants.num_eval,  # maximum function evaluations
            "cs": clu_cs,  # configuration space
            "deterministic": "true",
            "tuner-timeout": constants.timeout,
            "wallclock_limit": constants.timeout,
            "cutoff_time": constants.timeout
        })
        print('Run GM SMAC ' + self.name)
        smac = SMAC(scenario=clu_scenario, tae_runner=self.run_gm)
        parameters = smac.optimize()
        value = smac.get_runhistory().get_cost(parameters)
        return value, parameters
Beispiel #21
0
    def _get_cfg(self):
        n_features = self.X_train.shape[1]

        cs = ConfigurationSpace()
        max_features = UniformIntegerHyperparameter("max_features",
                                                    1,
                                                    n_features,
                                                    default_value=1)
        min_samples_split = UniformIntegerHyperparameter("min_samples_split",
                                                         2,
                                                         50,
                                                         default_value=2)
        min_samples_leaf = UniformIntegerHyperparameter("min_samples_leaf",
                                                        1,
                                                        50,
                                                        default_value=1)
        min_weight_fraction_leaf = UniformFloatHyperparameter(
            "min_weight_fraction_leaf", 0.0, 0.5, default_value=0.0)
        max_leaf_nodes = UniformIntegerHyperparameter("max_leaf_nodes",
                                                      10,
                                                      1000,
                                                      default_value=100)

        cs.add_hyperparameters([
            max_features, min_samples_split, min_samples_leaf,
            min_weight_fraction_leaf, max_leaf_nodes
        ])
        return cs
    def getPCS(self):
        '''
        maxIter: [1,100]最大迭代次数,默认50
        regParam :[0,0.2] 正则化参数,默认0
        tol:[1e-6,1e-1] 迭代算法收敛性,默认 1e-6


        '''
        # Build Configuration Space which defines all parameters and their
        # ranges
        cs = ConfigurationSpace()
        maxIter = UniformIntegerHyperparameter("maxIter",
                                               1,
                                               100,
                                               default_value=50)
        regParam = UniformFloatHyperparameter("regParam",
                                              0,
                                              0.4,
                                              default_value=1e-04)
        tol = UniformFloatHyperparameter("tol",
                                         1e-06,
                                         1e-01,
                                         default_value=1e-06)
        cs.add_hyperparameters([maxIter, regParam, tol])
        return cs
Beispiel #23
0
def addSearchSpaceGrid(
    hp: SearchSpaceGrid, disc: int, parent_disc: Hyperparameter, cs: ConfigurationSpace
) -> None:
    smac = SearchSpaceGridtoSMAC(hp, disc)
    for hyp in smac:
        cs.add_hyperparameter(hyp)
        cs.add_condition(EqualsCondition(child=hyp, parent=parent_disc, value=disc))
def get_smac_func_and_space(func, cp_domain):
    """ Returns a function to be passed to SMAC. """
    from smac.configspace import ConfigurationSpace
    from ConfigSpace.hyperparameters import CategoricalHyperparameter, \
         UniformFloatHyperparameter, UniformIntegerHyperparameter
    smac_configs = []
    num_dims_per_domain = []
    disc_spaces = []
    cp_dom_types = []
    # Iterate through each domain
    counter = 0
    for dom in cp_domain.list_of_domains:
        dom_type = dom.get_type()
        cp_dom_types.append(dom_type)
        if dom_type == 'euclidean':
            num_dims_per_domain.append(dom.get_dim())
            for bds in dom.bounds:
                disc_spaces.append(None)
                counter, var_label = _get_var_label_with_counter(counter)
                smac_configs.append(
                    UniformFloatHyperparameter(
                        var_label,
                        bds[0],
                        bds[1],
                        default_value=(bds[0] + bds[1]) / 2))
        elif dom_type == 'integral':
            num_dims_per_domain.append(dom.get_dim())
            for bds in dom.bounds:
                disc_spaces.append(None)
                counter, var_label = _get_var_label_with_counter(counter)
                smac_configs.append(
                    UniformIntegerHyperparameter(
                        var_label,
                        bds[0],
                        bds[1],
                        default_value=(bds[0] + bds[1]) // 2))
        elif dom_type in ['prod_discrete', 'prod_discrete_numeric']:
            num_dims_per_domain.append(dom.get_dim())
            for lois in dom.list_of_list_of_items:
                curr_disc_to_idx_converter = DiscItemsToIndexConverter(lois)
                disc_spaces.append(curr_disc_to_idx_converter)
                counter, var_label = _get_var_label_with_counter(counter)
                smac_configs.append(
                    CategoricalHyperparameter(
                        var_label,
                        curr_disc_to_idx_converter.indices,
                        default_value=curr_disc_to_idx_converter.indices[0]))
    smac_space = ConfigurationSpace()
    smac_space.add_hyperparameters(smac_configs)
    # The convert back function
    smac_convert_pt_back = lambda x: _convert_alphabetical_dict_repr_to_cp_domain_repr(
        x, num_dims_per_domain, cp_dom_types, disc_spaces)
    # Then the function
    smac_func_to_min = lambda x: -func(smac_convert_pt_back(x))[0]
    return smac_func_to_min, smac_space, smac_convert_pt_back
Beispiel #25
0
 def _create_config_space(self):
     self.cs = ConfigurationSpace()
     for var_index, var_dict in enumerate(self.param_dict['variables']):
         variable = var_dict[self.var_names[var_index]]
         if variable['type'] == 'float':
             param = UniformFloatHyperparameter(
                 'x%d' % var_index, variable['low'], variable['high']
             )  #, default = np.random.uniform(low = variable['low'], high = variable['high'], size = variable['size']))
         else:
             raise NotImplementedError()
         self.cs.add_hyperparameter(param)
Beispiel #26
0
    def _get_cfg(self):
        n_features = self.X_train.shape[1]

        cs = ConfigurationSpace()
        max_depth = UniformIntegerHyperparameter("max_depth", 5, 16, default_value=5)
        min_samples_split = UniformIntegerHyperparameter("min_samples_split", 200, 1000, default_value=200)
        min_samples_leaf = UniformIntegerHyperparameter("min_samples_leaf", 30, 70, default_value=30)
        max_features = UniformIntegerHyperparameter("max_features", 1, n_features, default_value=1)
        subsample = UniformFloatHyperparameter("subsample", 0.6, 0.9, default_value=0.6)

        cs.add_hyperparameters([max_depth, min_samples_split, min_samples_leaf, max_features, subsample])
        return cs
Beispiel #27
0
def get_gp(n_dimensions,
           rs,
           noise=1e-3,
           normalize_y=True,
           average_samples=False,
           n_iter=50):
    from smac.epm.gp_kernels import ConstantKernel, Matern, WhiteKernel

    cov_amp = ConstantKernel(
        2.0,
        constant_value_bounds=(1e-10, 2),
        prior=LognormalPrior(mean=0.0, sigma=1.0, rng=rs),
    )
    exp_kernel = Matern(
        np.ones([n_dimensions]),
        [(np.exp(-10), np.exp(2)) for _ in range(n_dimensions)],
        nu=2.5,
        prior=None,
    )
    noise_kernel = WhiteKernel(
        noise_level=noise,
        noise_level_bounds=(1e-10, 2),
        prior=HorseshoePrior(scale=0.1, rng=rs),
    )
    kernel = cov_amp * exp_kernel + noise_kernel

    n_mcmc_walkers = 3 * len(kernel.theta)
    if n_mcmc_walkers % 2 == 1:
        n_mcmc_walkers += 1

    bounds = [(0., 1.) for _ in range(n_dimensions)]
    types = np.zeros(n_dimensions)

    configspace = ConfigurationSpace()
    for i in range(n_dimensions):
        configspace.add_hyperparameter(
            UniformFloatHyperparameter('x%d' % i, 0, 1))

    model = GaussianProcessMCMC(
        configspace=configspace,
        types=types,
        bounds=bounds,
        kernel=kernel,
        n_mcmc_walkers=n_mcmc_walkers,
        chain_length=n_iter,
        burnin_steps=n_iter,
        normalize_y=normalize_y,
        seed=rs.randint(low=1, high=10000),
        mcmc_sampler='emcee',
        average_samples=average_samples,
    )
    return model
    def test_initializations(self):
        cs = ConfigurationSpace()
        for i in range(40):
            cs.add_hyperparameter(UniformFloatHyperparameter('x%d' % (i + 1), 0, 1))
        scenario = Scenario({'cs': cs, 'run_obj': 'quality'})
        hb_kwargs = {'initial_budget': 1, 'max_budget': 3}
        facade = HB4AC(scenario=scenario, intensifier_kwargs=hb_kwargs)

        self.assertIsInstance(facade.solver.initial_design, RandomConfigurations)
        self.assertIsInstance(facade.solver.epm_chooser.model, RandomEPM)
        self.assertIsInstance(facade.solver.intensifier, Hyperband)
        self.assertEqual(facade.solver.intensifier.min_chall, 1)
        self.output_dirs.append(scenario.output_dir)
Beispiel #29
0
def _create_config_space(dict_hyperparams):
    """Create the hyperparameters hyperspace."""
    config_space = ConfigurationSpace()

    if not isinstance(dict_hyperparams, dict):
        raise TypeError('Hyperparams must be a dictionary.')

    for name, hyperparam in dict_hyperparams.items():
        hp_type = hyperparam['type']

        if hp_type == 'int':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformIntegerHyperparameter(name,
                                                hp_min,
                                                hp_max,
                                                default_value=hp_default))

        elif hp_type == 'float':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformFloatHyperparameter(name,
                                              hp_min,
                                              hp_max,
                                              default_value=hp_default))

        elif hp_type == 'bool':
            hp_default = bool(hyperparam.get('default'))
            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name, ['true', 'false'],
                                             default_value=hp_default))

        elif hp_type == 'str':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_range = [_NONE if hp is None else hp for hp in hp_range]
            hp_default = hyperparam.get('default') or hp_range[0]
            hp_default = _NONE if hp_default is None else hp_default

            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name,
                                             hp_range,
                                             default_value=hp_default))

    return config_space
    def getPCS(self):
        '''
        smoothing: [0.01,100] 默认1

        '''
        # Build Configuration Space which defines all parameters and their
        # ranges
        cs = ConfigurationSpace()
        smoothing = UniformFloatHyperparameter(
            "smoothing", 0.01, 100, default_value=1)

        cs.add_hyperparameters(
            [smoothing])
        return cs
Beispiel #31
0
from ConfigSpace.hyperparameters import UniformFloatHyperparameter
from fanova import fANOVA
import fanova.visualizer

import os
path = os.path.dirname(os.path.realpath(__file__))

# get sample data from online lda
X = np.loadtxt(path + '/example_data/online_lda/online_lda_features.csv', delimiter=",")
Y = np.loadtxt(path + '/example_data/online_lda/online_lda_responses.csv', delimiter=",")

# setting up config space:
param_file = path + '/example_data/online_lda/param-file.txt'
f = open(param_file, 'rb')

cs = ConfigurationSpace()
for row in f:
    cs.add_hyperparameter(UniformFloatHyperparameter("%s" %row[0:4].decode('utf-8'), np.float(row[6:9]), np.float(row[10:13]),np.float(row[18:21])))
param = cs.get_hyperparameters()


# create an instance of fanova with data for the random forest and the configSpace
f = fANOVA(X = X, Y = Y, config_space = cs)

# marginal for first parameter
p_list = (0, )
res = f.quantify_importance(p_list)
print(res)

p2_list = ('Col1', 'Col2')
res2 = f.quantify_importance(p2_list)