Exemplo n.º 1
0
def rf_reg_configs():
    def run(dataset, seed, cfg):
        cfg = {k: cfg[k] for k in cfg}
        cfg["criterion"] = criterion[cfg["criterion"]]
        clf = RandomForestRegressor(random_state=seed, **cfg)
        scores = cross_val_score(clf,
                                 dataset.data,
                                 dataset.target,
                                 cv=5,
                                 scoring="r2")
        cfg = {k: cfg[k] for k in cfg}
        return 1 - np.mean(scores)

    criterion = ["mse", "mae"]
    cs = ConfigurationSpace()
    cs.add_hyperparameters([
        UniformIntegerHyperparameter("n_estimators", 1, 200),
        UniformIntegerHyperparameter("criterion", 0, 1),
        UniformIntegerHyperparameter("max_depth", 10, 1000),
        UniformIntegerHyperparameter("min_samples_leaf", 1, 200),
        UniformIntegerHyperparameter("min_samples_split", 2, 100),
        UniformFloatHyperparameter("min_weight_fraction_leaf", 0.0, 0.5),
        UniformFloatHyperparameter("min_impurity_decrease", 0.0, 0.9),
    ])
    return (cs, run, "Random Forest")
Exemplo n.º 2
0
def tree_configs(scale=1.0):
    def run(dataset, seed, cfg):
        cfg = {k: cfg[k] for k in cfg}
        if scale > 0.4:
            cfg["criterion"] = criterion[cfg["criterion"]]
            cfg["splitter"] = splitter[cfg["splitter"]]
        clf = DecisionTreeClassifier(random_state=seed, **cfg)
        scores = cross_val_score(clf, dataset.data, dataset.target, cv=5)
        return 1 - np.mean(scores)

    criterion = ["gini", "entropy", "gini", "entropy", "gini", "entropy"]
    splitter = ["best", "random", "best", "random", "best", "random"]
    cs = ConfigurationSpace()
    cs.add_hyperparameters([
        UniformIntegerHyperparameter("max_depth", 10, 1500 * scale),
        UniformIntegerHyperparameter("min_samples_leaf", 1, 400 * scale),
        UniformIntegerHyperparameter("min_samples_split", 2, 300 * scale),
        UniformFloatHyperparameter("min_weight_fraction_leaf", 0.0,
                                   0.5 * scale),
        UniformFloatHyperparameter("min_impurity_decrease", 0.0, 1.0 * scale),
    ])
    if scale > 0.4:
        cs.add_hyperparameters([
            UniformIntegerHyperparameter("criterion", 0, 5),
            UniformIntegerHyperparameter("splitter", 0, 5),
        ])
    return (cs, run, "Desicion Tree, %.2lf" % scale)
Exemplo n.º 3
0
def mlp_configs():
    def run(dataset, seed, cfg):
        cfg = {k: cfg[k] for k in cfg}
        cfg["activation"] = activation[cfg["activation"]]
        cfg["solver"] = solver[cfg["solver"]]
        clf = MLPRegressor(random_state=seed, **cfg)
        scores = cross_val_score(clf,
                                 dataset.data,
                                 dataset.target,
                                 cv=5,
                                 scoring="r2")
        cfg = {k: cfg[k] for k in cfg}
        return 1 - np.mean(scores)

    activation = ["identity", "logistic", "tanh", "relu"]
    solver = ["lbfgs", "sgd", "adam"]
    cs = ConfigurationSpace()
    cs.add_hyperparameters([
        UniformIntegerHyperparameter("activation", 0, 3),
        UniformIntegerHyperparameter("solver", 0, 2),
        UniformFloatHyperparameter("tol", 1e-7, 1e-1),
        UniformFloatHyperparameter("alpha", 1e-7, 1e-1),
        UniformIntegerHyperparameter("max_iter", 10, 1000),
    ])
    return (cs, run, "MLP")
Exemplo n.º 4
0
    def config_space(self):
        """Decision tree hyperparameter space."""

        criterion = CategoricalHyperparameter(
            'criterion', ['gini', 'entropy'], default_value='gini'
        )
        # NOTE: Default value = 'none' is translated to None in base class.
        # ConfigSpace does not allow for None values as default.
        max_depth = CategoricalHyperparameter(
            'max_depth', [5, 10, 20, 'none'], default_value='none'
        )
        # NOTE: Default value = 'none' is translated to None in base class.
        # ConfigSpace does not allow for None values as default.
        max_features = CategoricalHyperparameter(
            'max_features', ['auto', 'sqrt', 'log2', 'none'],
            default_value='none'
        )
        min_samples_leaf = UniformIntegerHyperparameter(
            'min_samples_leaf', lower=2, upper=5, default_value=3
        )
        # Add hyperparameters to config space.
        config = ConfigurationSpace()
        config.seed(self.random_state)
        config.add_hyperparameters(
            (
                criterion,
                max_depth,
                max_features,
                min_samples_leaf
            )
        )
        return config
Exemplo n.º 5
0
    def test_save_load_configspace(self):
        """Check if inputreader can load different config-spaces"""
        cs = ConfigurationSpace()
        hyp = UniformFloatHyperparameter('A', 0.0, 1.0, default_value=0.5)
        cs.add_hyperparameters([hyp])

        output_writer = OutputWriter()
        input_reader = InputReader()

        # pcs_new
        output_writer.save_configspace(cs, self.pcs_fn, 'pcs_new')
        restored_cs = input_reader.read_pcs_file(self.pcs_fn)
        self.assertEqual(cs, restored_cs)
        restored_cs = input_reader.read_pcs_file(self.pcs_fn, self.logger)
        self.assertEqual(cs, restored_cs)

        # json
        output_writer.save_configspace(cs, self.json_fn, 'json')
        restored_cs = input_reader.read_pcs_file(self.json_fn)
        self.assertEqual(cs, restored_cs)
        restored_cs = input_reader.read_pcs_file(self.json_fn, self.logger)
        self.assertEqual(cs, restored_cs)

        # pcs
        with open(self.pcs_fn, 'w') as fh:
            fh.write(pcs.write(cs))
        restored_cs = input_reader.read_pcs_file(self.pcs_fn)
        self.assertEqual(cs, restored_cs)
        restored_cs = input_reader.read_pcs_file(self.pcs_fn)
        self.assertEqual(cs, restored_cs)
        restored_cs = input_reader.read_pcs_file(self.pcs_fn, self.logger)
        self.assertEqual(cs, restored_cs)
Exemplo n.º 6
0
    def getPCS(self):
        '''
        maxIter: [1,100]最大迭代次数,默认50
        regParam :[0,0.2] 正则化参数,默认0
        tol:[1e-6,1e-1] 迭代算法收敛性,默认 1e-6


        '''
        # Build Configuration Space which defines all parameters and their
        # ranges
        cs = ConfigurationSpace()
        maxIter = UniformIntegerHyperparameter("maxIter",
                                               1,
                                               100,
                                               default_value=50)
        regParam = UniformFloatHyperparameter("regParam",
                                              0,
                                              0.4,
                                              default_value=1e-04)
        tol = UniformFloatHyperparameter("tol",
                                         1e-06,
                                         1e-01,
                                         default_value=1e-06)
        cs.add_hyperparameters([maxIter, regParam, tol])
        return cs
Exemplo n.º 7
0
    def _get_cfg(self):
        cs = ConfigurationSpace()
        max_depth = UniformIntegerHyperparameter("max_depth",
                                                 3,
                                                 16,
                                                 default_value=3)
        min_child_weight = UniformIntegerHyperparameter("min_child_weight",
                                                        1,
                                                        15,
                                                        default_value=1)
        gamma = UniformFloatHyperparameter("gamma",
                                           0.0,
                                           0.4,
                                           default_value=0.0)
        subsample = UniformFloatHyperparameter("subsample",
                                               0.6,
                                               0.9,
                                               default_value=0.6)
        colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                      0.6,
                                                      0.9,
                                                      default_value=0.6)

        cs.add_hyperparameters(
            [max_depth, min_child_weight, gamma, subsample, colsample_bytree])
        return cs
Exemplo n.º 8
0
    def smac_em(self):
        clu_cs = ConfigurationSpace()
        n_init = UniformIntegerHyperparameter("n_init", 1, 15)
        n_com = UniformIntegerHyperparameter("n_components", 2, 10)
        reg_c = UniformFloatHyperparameter("min_covar", 1e-6, 0.1)
        max_iter = UniformIntegerHyperparameter("n_iters", 10, 1000)
        tr = UniformFloatHyperparameter("threshold", 1e-6, 0.1)
        clu_cs.add_hyperparameters([n_init, tr, reg_c, n_com, max_iter])

        clu_scenario = Scenario({
            "run_obj":
            "quality",  # we optimize quality (alternatively runtime)
            # "runcount-limit": Constants.num_eval,  # maximum function evaluations
            "cs": clu_cs,  # configuration space
            "deterministic": "true",
            "tuner-timeout": constants.em_timeout,
            "wallclock_limit": constants.em_timeout,
            "cutoff_time": constants.em_timeout,
            "runcount-limit": 1
        })

        print('Run EM SMAC ' + self.name)
        smac = SMAC(scenario=clu_scenario, tae_runner=self.run_em)
        parameters = smac.optimize()
        value = smac.get_runhistory().get_cost(parameters)
        return value, parameters
Exemplo n.º 9
0
    def smac_gm(self):
        clu_cs = ConfigurationSpace()
        cov_t = CategoricalHyperparameter(
            "covariance_type", ["full", "tied", "diag", "spherical"])
        tol = UniformFloatHyperparameter("tol", 1e-6, 0.1)
        reg_c = UniformFloatHyperparameter("reg_covar", 1e-10, 0.1)
        n_com = UniformIntegerHyperparameter("n_components", 2, 10)
        max_iter = UniformIntegerHyperparameter("max_iter", 10, 1000)
        clu_cs.add_hyperparameters([cov_t, tol, reg_c, n_com, max_iter])

        clu_scenario = Scenario({
            "run_obj":
            "quality",  # we optimize quality (alternatively runtime)
            # "runcount-limit": Constants.num_eval,  # maximum function evaluations
            "cs": clu_cs,  # configuration space
            "deterministic": "true",
            "tuner-timeout": constants.timeout,
            "wallclock_limit": constants.timeout,
            "cutoff_time": constants.timeout
        })
        print('Run GM SMAC ' + self.name)
        smac = SMAC(scenario=clu_scenario, tae_runner=self.run_gm)
        parameters = smac.optimize()
        value = smac.get_runhistory().get_cost(parameters)
        return value, parameters
Exemplo n.º 10
0
    def getPCS(self):
        '''
        maxDepth  [3,12] 默认5
        minInstancesPerNode:[1,100] 默认10
        minInfoGain=:[0,0.1] 默认0
        maxIter=[ 1,50] ,默认20
        stepSize [0.001,1]默认0.1

        '''
        # Build Configuration Space which defines all parameters and their
        # ranges
        cs = ConfigurationSpace()
        maxDepth = UniformIntegerHyperparameter("maxDepth",
                                                3,
                                                12,
                                                default_value=5)
        maxIter = UniformIntegerHyperparameter("maxIter",
                                               1,
                                               50,
                                               default_value=20)
        minInstancesPerNode = UniformIntegerHyperparameter(
            "minInstancesPerNode", 1, 100, default_value=10)
        minInfoGain = UniformFloatHyperparameter("minInfoGain",
                                                 1e-06,
                                                 1e-01,
                                                 default_value=1e-06)
        stepSize = UniformFloatHyperparameter("stepSize",
                                              0.001,
                                              1,
                                              default_value=0.1)
        cs.add_hyperparameters(
            [maxDepth, maxIter, minInstancesPerNode, minInfoGain, stepSize])
        return cs
Exemplo n.º 11
0
    def config_space(self):
        """Decision tree hyperparameter space."""

        n_estimators = UniformIntegerHyperparameter(
            'n_estimators', lower=10, upper=200, default_value=100
        )
        criterion = CategoricalHyperparameter(
            'criterion', ['gini', 'entropy'], default_value='gini'
        )
        max_depth = CategoricalHyperparameter(
            'max_depth', [5, 10, 20, 'none'], default_value='none'
        )
        max_features = CategoricalHyperparameter(
            'max_features', ['auto', 'sqrt', 'log2', 'none'], default_value='none'
        )
        # Add hyperparameters to config space.
        config = ConfigurationSpace()
        config.seed(self.random_state)
        config.add_hyperparameters(
            (
                n_estimators,
                criterion,
                max_depth,
                max_features
            )
        )
        return config
 def getPCS(self):
     '''
     maxDepth :[3,12] 树深度 默认5
     minInstancesPerNode:[1,100] 默认10
     minInfoGain:[0,0.1] 默认0
     impurity :["gini", "entropy"],默认”gini”
     '''
     # Build Configuration Space which defines all parameters and their
     # ranges
     cs = ConfigurationSpace()
     maxDepth = UniformIntegerHyperparameter("maxDepth",
                                             3,
                                             12,
                                             default_value=5)
     minInstancesPerNode = UniformIntegerHyperparameter(
         "minInstancesPerNode", 1, 100, default_value=10)
     minInfoGain = UniformFloatHyperparameter("minInfoGain",
                                              1e-06,
                                              1e-01,
                                              default_value=1e-06)
     impurity = CategoricalHyperparameter("impurity", ["gini", "entropy"],
                                          default_value="gini")
     cs.add_hyperparameters(
         [maxDepth, minInstancesPerNode, minInfoGain, impurity])
     return cs
    def getPCS(self):
        '''
        maxDepth  [3,12] 默认5
        minInstancesPerNode:[1,100] 默认10
        minInfoGain=:[0,0.1] 默认0
        subsamplingRate=[0.01,1], 默认1
        numTrees:[1,100] 默认 50

        '''
        # Build Configuration Space which defines all parameters and their
        # ranges
        cs = ConfigurationSpace()
        maxDepth = UniformIntegerHyperparameter("maxDepth",
                                                3,
                                                12,
                                                default_value=5)
        minInstancesPerNode = UniformIntegerHyperparameter(
            "minInstancesPerNode", 1, 100, default_value=10)
        minInfoGain = UniformFloatHyperparameter("minInfoGain",
                                                 1e-06,
                                                 1e-01,
                                                 default_value=1e-06)
        numTrees = UniformIntegerHyperparameter("numTrees",
                                                1,
                                                100,
                                                default_value=50)
        cs.add_hyperparameters(
            [maxDepth, minInstancesPerNode, minInfoGain, numTrees])
        return cs
Exemplo n.º 14
0
    def _get_cfg(self):
        n_features = self.X_train.shape[1]

        cs = ConfigurationSpace()
        max_features = UniformIntegerHyperparameter("max_features",
                                                    1,
                                                    n_features,
                                                    default_value=1)
        min_samples_split = UniformIntegerHyperparameter("min_samples_split",
                                                         2,
                                                         50,
                                                         default_value=2)
        min_samples_leaf = UniformIntegerHyperparameter("min_samples_leaf",
                                                        1,
                                                        50,
                                                        default_value=1)
        min_weight_fraction_leaf = UniformFloatHyperparameter(
            "min_weight_fraction_leaf", 0.0, 0.5, default_value=0.0)
        max_leaf_nodes = UniformIntegerHyperparameter("max_leaf_nodes",
                                                      10,
                                                      1000,
                                                      default_value=100)

        cs.add_hyperparameters([
            max_features, min_samples_split, min_samples_leaf,
            min_weight_fraction_leaf, max_leaf_nodes
        ])
        return cs
Exemplo n.º 15
0
def smac():
    # Build Configuration Space which defines all parameters and their ranges
    configuration_space = ConfigurationSpace()

    #Define initial ranges
    num_of_hidden_nodes = UniformIntegerHyperparameter("num_of_hidden_nodes",
                                                       1,
                                                       max_hidden_nodes,
                                                       default_value=1)
    decay = UniformFloatHyperparameter("decay", 0, 0.1, default_value=0)

    configuration_space.add_hyperparameters([num_of_hidden_nodes, decay])

    # creating the scenario object
    scenario = Scenario({
        "run_obj": "quality",
        "runcount-limit": hyperparameter_tuning_configs.SMAC_RUNCOUNT_LIMIT,
        "cs": configuration_space,
        "deterministic": "true",
        "abort_on_first_run_crash": "false"
    })

    # optimize using an SMAC object
    smac = SMAC(scenario=scenario,
                rng=np.random.RandomState(1),
                tae_runner=train_model)

    incumbent = smac.optimize()
    return incumbent.get_dictionary()
def get_smac_func_and_space(func, cp_domain):
    """ Returns a function to be passed to SMAC. """
    from smac.configspace import ConfigurationSpace
    from ConfigSpace.hyperparameters import CategoricalHyperparameter, \
         UniformFloatHyperparameter, UniformIntegerHyperparameter
    smac_configs = []
    num_dims_per_domain = []
    disc_spaces = []
    cp_dom_types = []
    # Iterate through each domain
    counter = 0
    for dom in cp_domain.list_of_domains:
        dom_type = dom.get_type()
        cp_dom_types.append(dom_type)
        if dom_type == 'euclidean':
            num_dims_per_domain.append(dom.get_dim())
            for bds in dom.bounds:
                disc_spaces.append(None)
                counter, var_label = _get_var_label_with_counter(counter)
                smac_configs.append(
                    UniformFloatHyperparameter(
                        var_label,
                        bds[0],
                        bds[1],
                        default_value=(bds[0] + bds[1]) / 2))
        elif dom_type == 'integral':
            num_dims_per_domain.append(dom.get_dim())
            for bds in dom.bounds:
                disc_spaces.append(None)
                counter, var_label = _get_var_label_with_counter(counter)
                smac_configs.append(
                    UniformIntegerHyperparameter(
                        var_label,
                        bds[0],
                        bds[1],
                        default_value=(bds[0] + bds[1]) // 2))
        elif dom_type in ['prod_discrete', 'prod_discrete_numeric']:
            num_dims_per_domain.append(dom.get_dim())
            for lois in dom.list_of_list_of_items:
                curr_disc_to_idx_converter = DiscItemsToIndexConverter(lois)
                disc_spaces.append(curr_disc_to_idx_converter)
                counter, var_label = _get_var_label_with_counter(counter)
                smac_configs.append(
                    CategoricalHyperparameter(
                        var_label,
                        curr_disc_to_idx_converter.indices,
                        default_value=curr_disc_to_idx_converter.indices[0]))
    smac_space = ConfigurationSpace()
    smac_space.add_hyperparameters(smac_configs)
    # The convert back function
    smac_convert_pt_back = lambda x: _convert_alphabetical_dict_repr_to_cp_domain_repr(
        x, num_dims_per_domain, cp_dom_types, disc_spaces)
    # Then the function
    smac_func_to_min = lambda x: -func(smac_convert_pt_back(x))[0]
    return smac_func_to_min, smac_space, smac_convert_pt_back
Exemplo n.º 17
0
    def _get_cfg(self):
        n_features = self.X_train.shape[1]

        cs = ConfigurationSpace()
        max_depth = UniformIntegerHyperparameter("max_depth", 5, 16, default_value=5)
        min_samples_split = UniformIntegerHyperparameter("min_samples_split", 200, 1000, default_value=200)
        min_samples_leaf = UniformIntegerHyperparameter("min_samples_leaf", 30, 70, default_value=30)
        max_features = UniformIntegerHyperparameter("max_features", 1, n_features, default_value=1)
        subsample = UniformFloatHyperparameter("subsample", 0.6, 0.9, default_value=0.6)

        cs.add_hyperparameters([max_depth, min_samples_split, min_samples_leaf, max_features, subsample])
        return cs
Exemplo n.º 18
0
def main_loop(problem):
    logging.basicConfig(level=logging.INFO)  # logging.DEBUG for debug output

    cs = ConfigurationSpace()

    n_neighbors = UniformIntegerHyperparameter("n_neighbors", 2,10, default_value=5)
    cs.add_hyperparameter(n_neighbors)

    weights = CategoricalHyperparameter("weights", ["uniform","distance"], default_value="uniform")
    algorithm = CategoricalHyperparameter("algorithm", ["ball_tree", "kd_tree","brute","auto"], default_value="auto")
    cs.add_hyperparameters([weights, algorithm])

    leaf_size = UniformIntegerHyperparameter("leaf_size", 1, 100, default_value=50)
    cs.add_hyperparameter(leaf_size)
    use_leaf_size= InCondition(child=leaf_size, parent=algorithm, values=["ball_tree","kd_tree"])
    cs.add_condition(use_leaf_size)

    p = UniformIntegerHyperparameter("p", 1,3, default_value=2)
    cs.add_hyperparameter(p)

    # Scenario object
    max_eval=100000
    scenario = Scenario({"run_obj": "quality",   # we optimize quality (alternatively runtime)
                         "runcount-limit": max_eval,  # maximum function evaluations
                         "cs": cs,                        # configuration space
                         "shared_model": True,
                         "output_dir": "/home/naamah/Documents/CatES/result_All/smac/KNN/run_{}_{}_{}".format(max_eval,
                                                                                                           datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d_%H:%M:%S'),
                                                                                                              problem)

                         # "output_dir": "/home/naamah/Documents/CatES/result_All/smac/KNN/{}/run_{}_{}".format(problem,max_eval, datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d_%H:%M:%S_%f')),
                         # "input_psmac_dirs":"/home/naamah/Documents/CatES/result_All/",
                         # "deterministic": "true"
                         })

    # Example call of the function
    # It returns: Status, Cost, Runtime, Additional Infos
    def_value = svm_from_cfg(cs.get_default_configuration())
    print("Default Value: %.2f" % (def_value))

    # Optimize, using a SMAC-object
    print("Optimizing! Depending on your machine, this might take a few minutes.")
    smac = SMAC(scenario=scenario,tae_runner=svm_from_cfg)

    incumbent = smac.optimize()

    inc_value = svm_from_cfg(incumbent)
    print("Optimized Value: %.2f" % (inc_value))

    return (incumbent)


# main_loop()
Exemplo n.º 19
0
    def getPCS(self):
        '''
        smoothing: [0.01,100] 默认1

        '''
        # Build Configuration Space which defines all parameters and their
        # ranges
        cs = ConfigurationSpace()
        smoothing = UniformFloatHyperparameter(
            "smoothing", 0.01, 100, default_value=1)

        cs.add_hyperparameters(
            [smoothing])
        return cs
Exemplo n.º 20
0
    def config_space(self):
        """Logistic regression hyperparameter space."""

        C_param = UniformFloatHyperparameter(
            'C', lower=1e-8, upper=1000.0, default_value=1.0
        )
        penalty = CategoricalHyperparameter(
            'penalty', ['l1', 'l2'], default_value='l1'
        )
        # Add hyperparameters to config space.
        config = ConfigurationSpace()
        config.seed(self.random_state)
        config.add_hyperparameters((C_param, penalty))

        return config
Exemplo n.º 21
0
    def config_space(self):
        """Returns the MI hyperparameter configuration space."""

        num_neighbors = UniformIntegerHyperparameter('num_neighbors',
                                                     lower=10,
                                                     upper=100,
                                                     default_value=20)
        num_features = UniformIntegerHyperparameter('num_features',
                                                    lower=2,
                                                    upper=50,
                                                    default_value=20)
        config = ConfigurationSpace()
        config.seed(self.random_state)
        config.add_hyperparameters((num_neighbors, num_features))

        return config
Exemplo n.º 22
0
def create_configspace():
    cs = ConfigurationSpace()

    n_estimators = UniformFloatHyperparameter("n_estimators",
                                              100,
                                              600,
                                              default_value=200,
                                              q=50)
    eta = UniformFloatHyperparameter("eta",
                                     0.025,
                                     0.3,
                                     default_value=0.3,
                                     q=0.025)
    min_child_weight = UniformIntegerHyperparameter("min_child_weight",
                                                    1,
                                                    10,
                                                    default_value=1)
    max_depth = UniformIntegerHyperparameter("max_depth",
                                             1,
                                             14,
                                             default_value=6)
    subsample = UniformFloatHyperparameter("subsample",
                                           0.5,
                                           1,
                                           default_value=1,
                                           q=0.05)
    gamma = UniformFloatHyperparameter("gamma", 0, 1, default_value=0, q=0.1)
    colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                  0.5,
                                                  1,
                                                  default_value=1.,
                                                  q=0.05)
    alpha = UniformFloatHyperparameter("alpha", 0, 10, default_value=0., q=1.)
    _lambda = UniformFloatHyperparameter("lambda",
                                         1,
                                         2,
                                         default_value=1,
                                         q=0.1)
    scale_pos_weight = CategoricalHyperparameter("scale_pos_weight",
                                                 [0.01, 0.1, 1., 10, 100],
                                                 default_value=1.)

    cs.add_hyperparameters([
        n_estimators, eta, min_child_weight, max_depth, subsample, gamma,
        colsample_bytree, alpha, _lambda, scale_pos_weight
    ])
    return cs
Exemplo n.º 23
0
 def _get_acm_cs(self):
     """
         returns a configuration space 
         designed for querying ~smac.optimizer.smbo._component_builder
         
         Returns
         ------- 
             ConfigurationSpace
     """
     
     cs = ConfigurationSpace()
     cs.seed(self.rng.randint(0,2**20))
     
     model = CategoricalHyperparameter("model", choices=("RF", "GP"))
     
     num_trees = Constant("num_trees", value=10)
     bootstrap = CategoricalHyperparameter("do_bootstrapping", choices=(True, False), default_value=True)
     ratio_features = CategoricalHyperparameter("ratio_features", choices=(3 / 6, 4 / 6, 5 / 6, 1), default_value=1)
     min_split = UniformIntegerHyperparameter("min_samples_to_split", lower=1, upper=10, default_value=2)
     min_leaves = UniformIntegerHyperparameter("min_samples_in_leaf", lower=1, upper=10, default_value=1)
     
     cs.add_hyperparameters([model, num_trees, bootstrap, ratio_features, min_split, min_leaves])
     
     inc_num_trees = InCondition(num_trees, model, ["RF"])
     inc_bootstrap = InCondition(bootstrap, model, ["RF"])
     inc_ratio_features = InCondition(ratio_features, model, ["RF"])
     inc_min_split = InCondition(min_split, model, ["RF"])
     inc_min_leavs = InCondition(min_leaves, model, ["RF"])
     
     cs.add_conditions([inc_num_trees, inc_bootstrap, inc_ratio_features, inc_min_split, inc_min_leavs])
     
     acq  = CategoricalHyperparameter("acq_func", choices=("EI", "LCB", "PI", "LogEI"))
     par_ei = UniformFloatHyperparameter("par_ei", lower=-10, upper=10)
     par_pi = UniformFloatHyperparameter("par_pi", lower=-10, upper=10)
     par_logei = UniformFloatHyperparameter("par_logei", lower=0.001, upper=100, log=True)
     par_lcb = UniformFloatHyperparameter("par_lcb", lower=0.0001, upper=0.9999)
     
     cs.add_hyperparameters([acq, par_ei, par_pi, par_logei, par_lcb])
     
     inc_par_ei = InCondition(par_ei, acq, ["EI"])
     inc_par_pi = InCondition(par_pi, acq, ["PI"])
     inc_par_logei = InCondition(par_logei, acq, ["LogEI"])
     inc_par_lcb = InCondition(par_lcb, acq, ["LCB"])
     
     cs.add_conditions([inc_par_ei, inc_par_pi, inc_par_logei, inc_par_lcb])
     
     return cs
Exemplo n.º 24
0
    def getPCS(self):
        '''
        maxIter: [1,100]最大迭代次数,默认50
        regParam :[0,0.2] 正则化参数,默认0
        tol:[1e-6,1e-1] 迭代算法收敛性,默认 1e-6
        family ,link, variancePower 对应关系
        •   “gaussian” -> “identity”, “log”, “inverse”
        •   “binomial” -> “logit”, “probit”, “cloglog”
        •   “poisson” -> “log”, “identity”, “sqrt”
        •   “gamma” -> “inverse”, “identity”, “log”
        •   “tweedie” -> power link function specified through “linkPower”.
        The default link power in the tweedie family is 1 - variancePower.


        '''
        # Build Configuration Space which defines all parameters and their
        # ranges
        cs = ConfigurationSpace()
        maxIter = UniformIntegerHyperparameter("maxIter",
                                               1,
                                               100,
                                               default_value=50)
        regParam = UniformFloatHyperparameter("regParam",
                                              0,
                                              0.4,
                                              default_value=1e-04)
        tol = UniformFloatHyperparameter("tol",
                                         1e-06,
                                         1e-01,
                                         default_value=1e-06)
        family = CategoricalHyperparameter("family", ["gaussian", "poisson"],
                                           default_value="gaussian")
        gaussianLink = CategoricalHyperparameter(
            "gaussianLink", ["identity", "log", "inverse"],
            default_value="identity")
        poissonLink = CategoricalHyperparameter("poissonLink",
                                                ["log", "identity", "sqrt"],
                                                default_value="log")
        cs.add_hyperparameters(
            [maxIter, regParam, tol, family, gaussianLink, poissonLink])
        cs.add_condition(
            InCondition(child=gaussianLink, parent=family,
                        values=["gaussian"]))
        cs.add_condition(
            InCondition(child=poissonLink, parent=family, values=["poisson"]))
        return cs
Exemplo n.º 25
0
def best_hyperparams_smac():
    iteration = 1
    cs = ConfigurationSpace()
    cs.add_hyperparameters(SVD_SMAC_SPACE.values())
    scenario = Scenario({
        "run_obj": "quality",  # we optimize quality (alternatively runtime)
        "runcount-limit":
        100,  # max. number of function evaluations; for this example set to a low number
        "cs": cs,  # configuration space
        "deterministic": "true"
    })
    smac = SMAC4HPO(
        scenario=scenario,
        rng=np.random.RandomState(42),
        tae_runner=_hyperopt,
    )
    smac.optimize()
Exemplo n.º 26
0
def main():
    cs = ConfigurationSpace()

    cell_size = CategoricalHyperparameter("cell_size", [128],
                                          default_value=128)  # kick up to 256
    n_cell = CategoricalHyperparameter("n_cell", [2], default_value=2)
    dropout = CategoricalHyperparameter("dropout", [0.5], default_value=0.5)

    activation = CategoricalHyperparameter("activation", ['sigmoid'],
                                           default_value='sigmoid')
    optimizer = CategoricalHyperparameter("optimizer", ['adam'],
                                          default_value='adam')
    optimizer_lr = CategoricalHyperparameter(
        "optimizer_lr", [.001, .003, .006, .01, 0.03, 0.1], default_value=.01)
    learning_decay_rate = UniformFloatHyperparameter("learning_decay_rate",
                                                     0,
                                                     0.9,
                                                     default_value=.6)

    nn_type = CategoricalHyperparameter("nn_type", ['RNN', 'LSTM', 'GRU'],
                                        default_value='LSTM')

    epochs = CategoricalHyperparameter("epochs", [10], default_value=10)

    cs.add_hyperparameters([
        cell_size, n_cell, dropout, nn_type, activation, optimizer,
        optimizer_lr, learning_decay_rate, epochs
    ])

    scenario = Scenario({
        "run_obj": "quality",
        "runcount-limit": 32,
        "cs": cs,
        "deterministic": "true"
    })
    scenario.output_dir_for_this_run = "C:\\NNwork\\HFSF\\SMAC3out"
    scenario.output_dir = "C:\\NNwork\\HFSF\\SMAC3out"
    smac = SMAC(scenario=scenario,
                rng=np.random.RandomState(23),
                tae_runner=rnn_from_cfg)

    best_model = smac.optimize()
    print_incumb(best_model)
    np.save("C:\\NNwork\\HFSF\\SMAC3out\\best.cfg", best_model)
Exemplo n.º 27
0
def smac_opt():
    # Import ConfigSpace and different types of parameters
    from smac.configspace import ConfigurationSpace
    from ConfigSpace.hyperparameters import UniformFloatHyperparameter
    # Import SMAC-utilities
    from smac.scenario.scenario import Scenario
    from smac.facade.smac_hpo_facade import SMAC4HPO

    n_params = 5

    def fun_to_optimize(x):
        from jnius import autoclass
        RunStrategy = autoclass('RunStrategy')

        params = [x[f'x{i}'] for i in range(0, n_params)]
        print(f'params:{params}')
        ret = -RunStrategy.runStrategyWithConfiguration(params, 20)
        print(ret)
        return ret

    cs = ConfigurationSpace()
    hyper_params = [
        UniformFloatHyperparameter(f"x{i}", 0, 100, default_value=1)
        for i in range(0, n_params)
    ]
    cs.add_hyperparameters(hyper_params)

    # Scenario object
    scenario = Scenario({
        "run_obj": "quality",  # we optimize quality (alternatively runtime)
        "runcount-limit":
        999999,  # max. number of function evaluations; for this example set to a low number
        "cs": cs,  # configuration space
        "deterministic": "false"
    })

    # Optimize, using a SMAC-object
    smac = SMAC4HPO(scenario=scenario,
                    rng=np.random.RandomState(42),
                    tae_runner=fun_to_optimize)

    smac.optimize()
Exemplo n.º 28
0
 def bayesian_optimize(self):    
     """conduct Bayesian optimization on the hyperparameters, starting at current values""" 
     if self.algorithm in ['GNB','Perceptron']:
         return self
     else:
         cs = ConfigurationSpace()
         cs.add_hyperparameters(list(getattr(util, self.algorithm + '_range')(self.hyperparameters).values()))
         if self.algorithm == 'kNN':
             if self.hyperparameters['k'] == 1: num = 3
             else: num = 5
         else: num = 10
         scenario = Scenario({'run_obj': 'quality', 'runcount-limit': num, 'cs': cs, 'deterministic': 'true', 'memory_limit': None})
         smac = SMAC(scenario=scenario, rng=np.random.RandomState(100), tae_runner=self.error_function)
         try:
             incumbent = smac.optimize()
         finally:
             incumbent = smac.solver.incumbent
         self.error = smac.get_tae_runner().run(incumbent, 1)[1]
         self.hyperparameters = incumbent.get_dictionary()
         self.bayesian_optimized = True
         return self
Exemplo n.º 29
0
    def config_space(self):
        """SVC hyperparameter space."""

        C_param = UniformFloatHyperparameter(
            'C', lower=1e-8, upper=100.0, default_value=1.0
        )
        shrinking = CategoricalHyperparameter(
            'shrinking', [True, False], default_value=True
        )
        kernel = CategoricalHyperparameter(
            'kernel', ['linear', 'rbf', 'poly', 'sigmoid'],
        )
        degree = UniformIntegerHyperparameter(
            'degree', lower=1, upper=5, default_value=2
        )
        coef0 = UniformFloatHyperparameter(
            'coef0', lower=0.0, upper=10.0, default_value=0.0
        )
        # Add hyperparameters to config space.
        config = ConfigurationSpace()
        config.seed(self.random_state)
        config.add_hyperparameters(
            (
                C_param,
                shrinking,
                kernel,
                degree,
                coef0,
            )
        )
        # Conditionals on hyperparameters specific to kernels.
        config.add_conditions(
            (
                InCondition(child=degree, parent=kernel, values=['poly']),
                InCondition(
                    child=coef0, parent=kernel, values=['poly', 'sigmoid']
                )
            )
        )
        return config
Exemplo n.º 30
0
    def config_space(self):
        """XGBoost hyperparameter space."""

        # The mumber of Decision Trees.
        n_estimators = UniformIntegerHyperparameter(
            'n_estimators', lower=10, upper=200, default_value=100
        )
        # The maximum depth of each decision tree. Generally, boosting
        # algorithms are configured with weak learners = shallow decision trees.
        max_depth = UniformIntegerHyperparameter(
            'max_depth', lower=5, upper=500, default_value=100
        )
        # L1 regularization term on weights.
        reg_alpha = UniformFloatHyperparameter(
            'reg_alpha', lower=1e-8, upper=100, default_value=1e-3
        )
        # L2 regularization term on weights.
        reg_lambda = UniformFloatHyperparameter(
            'reg_lambda', lower=1e-8, upper=100, default_value=1e-3
        )
        learning_rate = UniformFloatHyperparameter(
            'learning_rate', lower=1e-8, upper=50, default_value=0.01
        )
        min_data_in_leaf = UniformIntegerHyperparameter(
            'min_data_in_leaf', lower=2, upper=5, default_value=3
        )
        # Add hyperparameters to config space.
        config = ConfigurationSpace()
        config.seed(self.random_state)
        config.add_hyperparameters(
            (
                n_estimators,
                min_data_in_leaf,
                max_depth,
                reg_alpha,
                reg_lambda,
                learning_rate
            )
        )
        return config