Beispiel #1
0
def get_random_forest_default_search_space():
    cs = ConfigSpace.ConfigurationSpace()
    imputation = ConfigSpace.CategoricalHyperparameter(
        'imputation__strategy', ['mean', 'median', 'most_frequent'])
    n_estimators = ConfigSpace.Constant("classifier__n_estimators", 100)
    criterion = ConfigSpace.CategoricalHyperparameter("classifier__criterion",
                                                      ["gini", "entropy"],
                                                      default_value="gini")

    # The maximum number of features used in the forest is calculated as m^max_features, where
    # m is the total number of features, and max_features is the hyperparameter specified below.
    # The default is 0.5, which yields sqrt(m) features as max_features in the estimator. This
    # corresponds with Geurts' heuristic.
    max_features = ConfigSpace.UniformFloatHyperparameter(
        "classifier__max_features", 0., 1., default_value=0.5)

    # max_depth = ConfigSpace.UnParametrizedHyperparameter("classifier__max_depth", None)
    min_samples_split = ConfigSpace.UniformIntegerHyperparameter(
        "classifier__min_samples_split", 2, 20, default_value=2)
    min_samples_leaf = ConfigSpace.UniformIntegerHyperparameter(
        "classifier__min_samples_leaf", 1, 20, default_value=1)
    min_weight_fraction_leaf = ConfigSpace.UnParametrizedHyperparameter(
        "classifier__min_weight_fraction_leaf", 0.)
    # max_leaf_nodes = ConfigSpace.UnParametrizedHyperparameter("classifier__max_leaf_nodes", None)
    bootstrap = ConfigSpace.CategoricalHyperparameter("classifier__bootstrap",
                                                      ["True", "False"],
                                                      default_value="True")
    cs.add_hyperparameters([
        imputation, n_estimators, criterion, max_features, min_samples_split,
        min_samples_leaf, min_weight_fraction_leaf, bootstrap
    ])

    return cs
    def __init__(self, development_stage=-1):
        super().__init__()

        self.development_stage = development_stage

        self.add_hyperparameter(
            CS.UniformIntegerHyperparameter("dropout_1", lower=0, upper=2)
        )
        self.add_hyperparameter(
            CS.UniformIntegerHyperparameter("dropout_2", lower=0, upper=2)
        )
        self.add_hyperparameter(
            CS.UniformIntegerHyperparameter("init_lr", lower=0, upper=5)
        )
        self.add_hyperparameter(
            CS.CategoricalHyperparameter("activation_fn_1", choices=["relu", "tanh"])
        )
        self.add_hyperparameter(
            CS.CategoricalHyperparameter("activation_fn_2", choices=["relu", "tanh"])
        )

        if development_stage == 0:
            self.budget = 50
            self.add_hyperparameter(
                CS.UniformIntegerHyperparameter("batch_size", lower=0, upper=3)
            )
        else:
            self.budget = 100
    def get_configuration_space():
        cs = ConfigSpace.ConfigurationSpace()

        cs.add_hyperparameter(
            ConfigSpace.OrdinalHyperparameter("n_units_1",
                                              [16, 32, 64, 128, 256, 512]))
        cs.add_hyperparameter(
            ConfigSpace.OrdinalHyperparameter("n_units_2",
                                              [16, 32, 64, 128, 256, 512]))
        cs.add_hyperparameter(
            ConfigSpace.OrdinalHyperparameter("dropout_1", [0.0, 0.3, 0.6]))
        cs.add_hyperparameter(
            ConfigSpace.OrdinalHyperparameter("dropout_2", [0.0, 0.3, 0.6]))
        cs.add_hyperparameter(
            ConfigSpace.CategoricalHyperparameter("activation_fn_1",
                                                  ["tanh", "relu"]))
        cs.add_hyperparameter(
            ConfigSpace.CategoricalHyperparameter("activation_fn_2",
                                                  ["tanh", "relu"]))
        cs.add_hyperparameter(
            ConfigSpace.OrdinalHyperparameter(
                "init_lr", [5 * 1e-4, 1e-3, 5 * 1e-3, 1e-2, 5 * 1e-2, 1e-1]))
        cs.add_hyperparameter(
            ConfigSpace.CategoricalHyperparameter("lr_schedule",
                                                  ["cosine", "const"]))
        cs.add_hyperparameter(
            ConfigSpace.OrdinalHyperparameter("batch_size", [8, 16, 32, 64]))
        return cs
Beispiel #4
0
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper:
    """
    The SVM configuration space based on the search space from
    auto-sklearn:
    https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/libsvm_svc.py

    Parameters
    ----------
    seed: int
        Random seed that will be used to sample random configurations

    Returns
    -------
    cs: ConfigSpace.ConfigurationSpace
        The configuration space object
    """
    cs = ConfigSpace.ConfigurationSpace('sklearn.svm.SVC', seed)

    C = ConfigSpace.UniformFloatHyperparameter(name='C',
                                               lower=0.03125,
                                               upper=32768,
                                               log=True,
                                               default_value=1.0)
    kernel = ConfigSpace.CategoricalHyperparameter(
        name='kernel', choices=['rbf', 'poly', 'sigmoid'], default_value='rbf')
    degree = ConfigSpace.UniformIntegerHyperparameter(name='degree',
                                                      lower=1,
                                                      upper=5,
                                                      default_value=3)
    gamma = ConfigSpace.UniformFloatHyperparameter(name='gamma',
                                                   lower=3.0517578125e-05,
                                                   upper=8,
                                                   log=True,
                                                   default_value=0.1)
    coef0 = ConfigSpace.UniformFloatHyperparameter(name='coef0',
                                                   lower=-1,
                                                   upper=1,
                                                   default_value=0)
    shrinking = ConfigSpace.CategoricalHyperparameter(name='shrinking',
                                                      choices=[True, False],
                                                      default_value=True)
    tol = ConfigSpace.UniformFloatHyperparameter(name='tol',
                                                 lower=1e-5,
                                                 upper=1e-1,
                                                 default_value=1e-3,
                                                 log=True)
    max_iter = ConfigSpace.UnParametrizedHyperparameter('max_iter', -1)

    hyperparameters = [
        C, kernel, degree, gamma, coef0, shrinking, tol, max_iter
    ]

    degree_depends_on_poly = ConfigSpace.EqualsCondition(
        degree, kernel, 'poly')
    coef0_condition = ConfigSpace.InCondition(coef0, kernel,
                                              ['poly', 'sigmoid'])
    conditions = [degree_depends_on_poly, coef0_condition]

    return ConfigSpaceWrapper(cs, hyperparameters, conditions)
Beispiel #5
0
def get_hyperparameter_search_space(seed=None):
    """
    Neural Network search space based on a best effort using the scikit-learn
    implementation. Note that for state of the art performance, other packages
    could be preferred.

    Parameters
    ----------
    seed: int
        Random seed that will be used to sample random configurations

    Returns
    -------
    cs: ConfigSpace.ConfigurationSpace
        The configuration space object
    """
    cs = ConfigSpace.ConfigurationSpace('ResNet18_classifier', seed)
    # batch_size = ConfigSpace.UniformIntegerHyperparameter(
    #     name='batch_size', lower=1, upper=256, log=True, default_value=128)
    # learning_rate = ConfigSpace.CategoricalHyperparameter(
    #     name='learning_rate', choices=['constant', 'invscaling', 'adaptive'], default_value='constant')
    learning_rate = ConfigSpace.UniformFloatHyperparameter(
        name='learning_rate',
        lower=1e-6,
        upper=1e-1,
        log=True,
        default_value=1e-2)

    epochs = ConfigSpace.UniformIntegerHyperparameter(name='epochs',
                                                      lower=1,
                                                      upper=50,
                                                      default_value=20)
    batch_size = ConfigSpace.CategoricalHyperparameter(
        name='batch_size',
        choices=[1, 2, 4, 8, 16, 32, 64, 128, 256, 512],
        default_value=128)
    shuffle = ConfigSpace.CategoricalHyperparameter(name='shuffle',
                                                    choices=[True, False],
                                                    default_value=True)
    momentum = ConfigSpace.UniformFloatHyperparameter(name='momentum',
                                                      lower=0,
                                                      upper=1,
                                                      default_value=0.9)
    weight_decay = ConfigSpace.UniformFloatHyperparameter(name='weight_decay',
                                                          lower=1e-6,
                                                          upper=1e-2,
                                                          log=True,
                                                          default_value=5e-4)

    cs.add_hyperparameters([
        batch_size,
        learning_rate,
        epochs,
        shuffle,
        momentum,
        weight_decay,
    ])

    return cs
Beispiel #6
0
def get_hyperparameter_search_space(seed=None):
    """
    Neural Network search space based on a best effort using the scikit-learn
    implementation. Note that for state of the art performance, other packages
    could be preferred.

    Parameters
    ----------
    seed: int
        Random seed that will be used to sample random configurations

    Returns
    -------
    cs: ConfigSpace.ConfigurationSpace
        The configuration space object
    """
    cs = ConfigSpace.ConfigurationSpace('ResNet18_classifier', seed)
    learning_rate_init = ConfigSpace.UniformFloatHyperparameter(
        name='learning_rate_init', lower=1e-6, upper=1e-1, log=True, default_value=1e-2)
    epochs = ConfigSpace.UniformIntegerHyperparameter(
        name='epochs', lower=1, upper=50, default_value=20)
    batch_size = ConfigSpace.CategoricalHyperparameter(
        name='batch_size', choices=[32, 64, 128, 256, 512], default_value=128)
    momentum = ConfigSpace.UniformFloatHyperparameter(
        name='momentum', lower=0, upper=1, default_value=0.9)
    weight_decay = ConfigSpace.UniformFloatHyperparameter(
        name='weight_decay', lower=1e-6, upper=1e-2, log=True, default_value=5e-4)
    lr_decay = ConfigSpace.UniformIntegerHyperparameter(
        name='learning_rate_decay', lower=2, upper=1000, log=True, default_value=10)
    patience = ConfigSpace.UniformIntegerHyperparameter(
        name='patience', lower=2, upper=200, log=False, default_value=10)
    tolerance = ConfigSpace.UniformFloatHyperparameter(
        name='tolerance', lower=1e-5, upper=1e-2, log=True, default_value=1e-4)
    resize_crop = ConfigSpace.CategoricalHyperparameter(
        name='resize_crop', choices=[True, False], default_value=False)
    h_flip = ConfigSpace.CategoricalHyperparameter(
        name='horizontal_flip', choices=[True, False], default_value=False)
    v_flip = ConfigSpace.CategoricalHyperparameter(
        name='vertical_flip', choices=[True, False], default_value=False)
    shuffle = ConfigSpace.CategoricalHyperparameter(
        name='shuffle', choices=[True, False], default_value=True)

    cs.add_hyperparameters([
        batch_size,
        learning_rate_init,
        epochs,
        momentum,
        weight_decay,
        lr_decay,
        patience,
        tolerance,
        resize_crop,
        h_flip,
        v_flip,
        shuffle,
    ])

    return cs
Beispiel #7
0
def get_config_space():
    config_space = CS.ConfigurationSpace()
    config_space.add_hyperparameter(
        CS.UniformFloatHyperparameter('momentum',
                                      lower=0,
                                      upper=1,
                                      default_value=0.9))
    config_space.add_hyperparameter(
        CS.UniformFloatHyperparameter('initial_lr',
                                      lower=1e-3,
                                      upper=1,
                                      default_value=1e-1,
                                      log=True))
    config_space.add_hyperparameter(
        CS.UniformIntegerHyperparameter('lr_step_length',
                                        lower=1,
                                        upper=8,
                                        default_value=4))
    config_space.add_hyperparameter(
        CS.UniformFloatHyperparameter('lr_decay_factor',
                                      lower=1e-4,
                                      upper=1,
                                      default_value=1e-1,
                                      log=True))
    config_space.add_hyperparameter(
        CS.UniformFloatHyperparameter('dropout_rate',
                                      lower=0,
                                      upper=0.5,
                                      default_value=0.5,
                                      log=False))

    config_space.add_hyperparameter(
        CS.CategoricalHyperparameter('num_filters_1',
                                     choices=[4, 8, 16, 32],
                                     default_value=8))
    config_space.add_hyperparameter(
        CS.CategoricalHyperparameter('num_filters_2',
                                     choices=[4, 8, 16, 32],
                                     default_value=8))
    config_space.add_hyperparameter(
        CS.CategoricalHyperparameter('kernel_size',
                                     choices=[3, 5, 7],
                                     default_value=3))
    config_space.add_hyperparameter(
        CS.UniformIntegerHyperparameter('stride',
                                        lower=1,
                                        upper=5,
                                        default_value=1))
    config_space.add_hyperparameter(
        CS.UniformIntegerHyperparameter('max_pool_size',
                                        lower=2,
                                        upper=4,
                                        default_value=2))
    config_space.add_hyperparameter(
        CS.CategoricalHyperparameter('num_fc_units', choices=[8, 16, 32, 64]))

    return (config_space)
Beispiel #8
0
def main(args):
    cfg = setup(args)

    search_space = CS.ConfigurationSpace()
    search_space.add_hyperparameters([
        CS.UniformFloatHyperparameter(name="lr", lower=1e-6, upper=1e-3),
        CS.UniformFloatHyperparameter(name="wd", lower=0, upper=1e-3),
        CS.UniformFloatHyperparameter(name="wd_bias", lower=0, upper=1e-3),
        CS.CategoricalHyperparameter(name="bsz",
                                     choices=[64, 96, 128, 160, 224, 256]),
        CS.CategoricalHyperparameter(name="num_inst",
                                     choices=[2, 4, 8, 16, 32]),
        CS.UniformIntegerHyperparameter(name="delay_iters", lower=20,
                                        upper=60),
        CS.UniformFloatHyperparameter(name="ce_scale", lower=0.1, upper=1.0),
        CS.UniformIntegerHyperparameter(name="circle_scale",
                                        lower=8,
                                        upper=256),
        CS.UniformFloatHyperparameter(name="circle_margin",
                                      lower=0.1,
                                      upper=0.5),
        CS.CategoricalHyperparameter(name="autoaug_enabled",
                                     choices=[True, False]),
        CS.CategoricalHyperparameter(name="cj_enabled", choices=[True, False]),
    ])

    exp_metrics = dict(metric="score", mode="max")
    bohb_hyperband = HyperBandForBOHB(
        time_attr="training_iteration",
        max_t=7,
        **exp_metrics,
    )
    bohb_search = TuneBOHB(search_space, max_concurrent=4, **exp_metrics)

    reporter = CLIReporter(parameter_columns=["bsz", "num_inst", "lr"],
                           metric_columns=["r1", "map", "training_iteration"])

    analysis = tune.run(partial(train_reid_tune, cfg),
                        resources_per_trial={
                            "cpu": 10,
                            "gpu": 1
                        },
                        search_alg=bohb_search,
                        num_samples=args.num_samples,
                        scheduler=bohb_hyperband,
                        progress_reporter=reporter,
                        local_dir=cfg.OUTPUT_DIR,
                        keep_checkpoints_num=4,
                        name="bohb")

    best_trial = analysis.get_best_trial("map", "max", "last")
    logger.info("Best trial config: {}".format(best_trial.config))
    logger.info("Best trial final validation mAP: {}, Rank-1: {}".format(
        best_trial.last_result["map"], best_trial.last_result["r1"]))
Beispiel #9
0
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper:
    """
    The random forest configuration space based on the search space from
    auto-sklearn:
    https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/random_forest.py

    Parameters
    ----------
    seed: int
        Random seed that will be used to sample random configurations

    Returns
    -------
    cs: ConfigSpace.ConfigurationSpace
        The configuration space object
    """
    cs = ConfigSpace.ConfigurationSpace(
        'sklearn.ensemble.RandomForestClassifier', seed)

    # TODO: parameterize the number of estimators?
    n_estimators = ConfigSpace.Constant(name='n_estimators', value=100)
    criterion = ConfigSpace.CategoricalHyperparameter(
        name='criterion', choices=['gini', 'entropy'], default_value='gini')
    max_features = ConfigSpace.UniformFloatHyperparameter(name='max_features',
                                                          lower=0.,
                                                          upper=1.,
                                                          default_value=0.5)
    # max_depth = ConfigSpace.UnParametrizedHyperparameter(
    #   name='randomforestclassifier__max_depth', value=None)
    min_samples_split = ConfigSpace.UniformIntegerHyperparameter(
        name='min_samples_split', lower=2, upper=20, default_value=2)
    min_samples_leaf = ConfigSpace.UniformIntegerHyperparameter(
        name='min_samples_leaf', lower=1, upper=20, default_value=1)
    min_weight_fraction_leaf = ConfigSpace.UnParametrizedHyperparameter(
        name='min_weight_fraction_leaf', value=0.)
    # max_leaf_nodes = ConfigSpace.UnParametrizedHyperparameter(
    #   name='randomforestclassifier__max_leaf_nodes', value=None)
    bootstrap = ConfigSpace.CategoricalHyperparameter(name='bootstrap',
                                                      choices=[True, False],
                                                      default_value=True)

    hyperparameters = [
        n_estimators,
        criterion,
        max_features,
        # max_depth,
        min_samples_split,
        min_samples_leaf,
        min_weight_fraction_leaf,
        # max_leaf_nodes,
        bootstrap
    ]

    return ConfigSpaceWrapper(cs, hyperparameters, None)
    def get_configuration_space(self):
        cs = ConfigSpace.ConfigurationSpace()

        for node in list(self.num_parents_per_node.keys())[1:-1]:
            cs.add_hyperparameter(ConfigSpace.CategoricalHyperparameter("choice_block_{}_op".format(node),
                                                                        [CONV1X1, CONV3X3, MAXPOOL3X3]))

        for choice_block_index, num_parents in list(self.num_parents_per_node.items())[1:]:
            cs.add_hyperparameter(
                ConfigSpace.CategoricalHyperparameter(
                    "choice_block_{}_parents".format(choice_block_index),
                    parent_combinations(node=choice_block_index, num_parents=num_parents)))
        return cs
Beispiel #11
0
    def get_configuration_space():
        cs = ConfigSpace.ConfigurationSpace()

        ops_choices = ['conv1x1-bn-relu', 'conv3x3-bn-relu', 'maxpool3x3']
        cs.add_hyperparameter(ConfigSpace.CategoricalHyperparameter("op_node_0", ops_choices))
        cs.add_hyperparameter(ConfigSpace.CategoricalHyperparameter("op_node_1", ops_choices))
        cs.add_hyperparameter(ConfigSpace.CategoricalHyperparameter("op_node_2", ops_choices))
        cs.add_hyperparameter(ConfigSpace.CategoricalHyperparameter("op_node_3", ops_choices))
        cs.add_hyperparameter(ConfigSpace.CategoricalHyperparameter("op_node_4", ops_choices))

        cs.add_hyperparameter(ConfigSpace.UniformIntegerHyperparameter("num_edges", 0, MAX_EDGES))

        for i in range(VERTICES * (VERTICES - 1) // 2):
            cs.add_hyperparameter(ConfigSpace.UniformFloatHyperparameter("edge_%d" % i, 0, 1))
        return cs
Beispiel #12
0
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper:
    """
    The Bernoulli NB configuration space based on the search space from
    auto-sklearn:
    https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/bernoulli_nb.py

    Parameters
    ----------
    seed: int
        Random seed that will be used to sample random configurations

    Returns
    -------
    cs: ConfigSpace.ConfigurationSpace
        The configuration space object
    """
    cs = ConfigSpace.ConfigurationSpace('sklearn.naive_bayes.BernoulliNB',
                                        seed)

    # the smoothing parameter is a non-negative float
    # I will limit it to 1000 and put it on a logarithmic scale. (SF)
    # Please adjust that, if you know a proper range, this is just a guess.
    alpha = ConfigSpace.UniformFloatHyperparameter(name='alpha',
                                                   lower=1e-2,
                                                   upper=100,
                                                   default_value=1,
                                                   log=True)
    fit_prior = ConfigSpace.CategoricalHyperparameter(name='fit_prior',
                                                      choices=[True, False],
                                                      default_value=True)

    hyperparameters = [alpha, fit_prior]

    return ConfigSpaceWrapper(cs, hyperparameters, None)
Beispiel #13
0
	def add_hyperparameters(self):
		HPs=[]
		HPs.append( CS.CategoricalHyperparameter('cat1', choices=['foo', 'bar', 'baz']))
		HPs.append( CS.UniformFloatHyperparameter('cont1', lower=0, upper=1))
		HPs.append( CS.UniformIntegerHyperparameter('int1', lower=-2, upper=2))
		HPs.append( CS.OrdinalHyperparameter('ord1', ['cold', 'mild', 'warm', 'hot']))
		self.configspace.add_hyperparameters(HPs)
Beispiel #14
0
    def get_configuration_space(
            seed: Union[int, None] = None) -> CS.ConfigurationSpace:
        """
        Return the CS representation of the search space.
        From https://github.com/D-X-Y/AutoDL-Projects/blob/master/exps/algos/BOHB.py
        Author: https://github.com/D-X-Y [[email protected]]

        Parameters
        ----------
        seed : int, None
            Random seed for the configuration space.

        Returns
        -------
        CS.ConfigurationSpace -
            Containing the benchmark's hyperparameter
        """
        seed = seed if seed is not None else np.random.randint(1, 100000)
        cs = CS.ConfigurationSpace(seed=seed)

        search_space = NasBench201BaseBenchmark.get_search_spaces(
            'cell', 'nas-bench-201')
        hps = [
            CS.CategoricalHyperparameter(f'{i}<-{j}', search_space)
            for i in range(1, MAX_NODES) for j in range(i)
        ]
        cs.add_hyperparameters(hps)
        return cs
Beispiel #15
0
        def resolve_value(
                par: str,
                domain: Domain) -> ConfigSpace.hyperparameters.Hyperparameter:
            quantize = None

            sampler = domain.get_sampler()
            if isinstance(sampler, Quantized):
                quantize = sampler.q
                sampler = sampler.sampler

            if isinstance(domain, Float):
                if isinstance(sampler, LogUniform):
                    lower = domain.lower
                    upper = domain.upper
                    if quantize:
                        lower = math.ceil(domain.lower / quantize) * quantize
                        upper = math.floor(domain.upper / quantize) * quantize
                    return ConfigSpace.UniformFloatHyperparameter(par,
                                                                  lower=lower,
                                                                  upper=upper,
                                                                  q=quantize,
                                                                  log=True)
                elif isinstance(sampler, Uniform):
                    lower = domain.lower
                    upper = domain.upper
                    if quantize:
                        lower = math.ceil(domain.lower / quantize) * quantize
                        upper = math.floor(domain.upper / quantize) * quantize
                    return ConfigSpace.UniformFloatHyperparameter(par,
                                                                  lower=lower,
                                                                  upper=upper,
                                                                  q=quantize,
                                                                  log=False)
                elif isinstance(sampler, Normal):
                    return ConfigSpace.NormalFloatHyperparameter(
                        par,
                        mu=sampler.mean,
                        sigma=sampler.sd,
                        q=quantize,
                        log=False)

            elif isinstance(domain, Integer):
                if isinstance(sampler, Uniform):
                    lower = domain.lower
                    upper = domain.upper
                    if quantize:
                        lower = math.ceil(domain.lower / quantize) * quantize
                        upper = math.floor(domain.upper / quantize) * quantize
                    return ConfigSpace.UniformIntegerHyperparameter(
                        par, lower=lower, upper=upper, q=quantize, log=False)

            elif isinstance(domain, Categorical):
                if isinstance(sampler, Uniform):
                    return ConfigSpace.CategoricalHyperparameter(
                        par, choices=domain.categories)

            raise ValueError("TuneBOHB does not support parameters of type "
                             "`{}` with samplers of type `{}`".format(
                                 type(domain).__name__,
                                 type(domain.sampler).__name__))
Beispiel #16
0
def get_size_config_space(search_space):
    cs = ConfigSpace.ConfigurationSpace()
    for ilayer in range(search_space['numbers']):
        node_str = 'layer-{:}'.format(ilayer)
        cs.add_hyperparameter(
            ConfigSpace.CategoricalHyperparameter(node_str,
                                                  search_space['candidates']))
    return cs
Beispiel #17
0
def get_configuration_space(max_nodes, search_space):
    cs = ConfigSpace.ConfigurationSpace()
    for i in range(1, max_nodes):
        for j in range(i):
            node_str = '{:}<-{:}'.format(i, j)
            cs.add_hyperparameter(
                ConfigSpace.CategoricalHyperparameter(node_str, search_space))
    return cs
Beispiel #18
0
    def get_configuration_space():
        # for unpruned graph
        cs = ConfigSpace.ConfigurationSpace()

        ops_choices = ['nor_conv_3x3', 'nor_conv_1x1', 'avg_pool_3x3', 'skip_connect', 'none']
        for i in range(6):
            cs.add_hyperparameter(ConfigSpace.CategoricalHyperparameter("edge_%d" % i, ops_choices))
        return cs
Beispiel #19
0
def get_topology_config_space(search_space, max_nodes=4):
  cs = ConfigSpace.ConfigurationSpace()
  #edge2index   = {}
  for i in range(1, max_nodes):
    for j in range(i):
      node_str = '{:}<-{:}'.format(i, j)
      cs.add_hyperparameter(ConfigSpace.CategoricalHyperparameter(node_str, search_space))
  return cs
Beispiel #20
0
    def testConvertBOHB(self):
        from ray.tune.suggest.bohb import TuneBOHB
        import ConfigSpace

        config = {
            "a": tune.sample.Categorical([2, 3, 4]).uniform(),
            "b": {
                "x": tune.sample.Integer(0, 5).quantized(2),
                "y": 4,
                "z": tune.sample.Float(1e-4, 1e-2).loguniform()
            }
        }
        converted_config = TuneBOHB.convert_search_space(config)
        bohb_config = ConfigSpace.ConfigurationSpace()
        bohb_config.add_hyperparameters([
            ConfigSpace.CategoricalHyperparameter("a", [2, 3, 4]),
            ConfigSpace.UniformIntegerHyperparameter("b/x",
                                                     lower=0,
                                                     upper=4,
                                                     q=2),
            ConfigSpace.UniformFloatHyperparameter("b/z",
                                                   lower=1e-4,
                                                   upper=1e-2,
                                                   log=True)
        ])

        converted_config.seed(1234)
        bohb_config.seed(1234)

        searcher1 = TuneBOHB(space=converted_config)
        searcher2 = TuneBOHB(space=bohb_config)

        config1 = searcher1.suggest("0")
        config2 = searcher2.suggest("0")

        self.assertEqual(config1, config2)
        self.assertIn(config1["a"], [2, 3, 4])
        self.assertIn(config1["b"]["x"], list(range(5)))
        self.assertLess(1e-4, config1["b"]["z"])
        self.assertLess(config1["b"]["z"], 1e-2)

        searcher = TuneBOHB(metric="a", mode="max")
        analysis = tune.run(_mock_objective,
                            config=config,
                            search_alg=searcher,
                            num_samples=1)
        trial = analysis.trials[0]
        self.assertIn(trial.config["a"], [2, 3, 4])
        self.assertEqual(trial.config["b"]["y"], 4)

        mixed_config = {
            "a": tune.uniform(5, 6),
            "b": tune.uniform(8, 9)  # Cannot mix ConfigSpace and Dict
        }
        searcher = TuneBOHB(space=mixed_config, metric="a", mode="max")
        config = searcher.suggest("0")
        self.assertTrue(5 <= config["a"] <= 6)
        self.assertTrue(8 <= config["b"] <= 9)
Beispiel #21
0
def get_neural_network_default_search_space():
    config_space = ConfigSpace.ConfigurationSpace()
    config_space.add_hyperparameter(
        ConfigSpace.CategoricalHyperparameter('imputation:strategy', ['mean', 'median', 'most_frequent']))
    config_space.add_hyperparameter(ConfigSpace.CategoricalHyperparameter('classifier:__choice__', ['neural_network']))
    config_space.add_hyperparameter(
        ConfigSpace.UniformIntegerHyperparameter('classifier:neural_network:hidden_layer_sizes', 32, 1024))
    config_space.add_hyperparameter(
        ConfigSpace.UniformIntegerHyperparameter('classifier:neural_network:num_hidden_layers', 1, 5))
    config_space.add_hyperparameter(
        ConfigSpace.UniformFloatHyperparameter('classifier:neural_network:learning_rate_init', 0.00001, 1, log=True))
    config_space.add_hyperparameter(
        ConfigSpace.UniformFloatHyperparameter('classifier:neural_network:alpha', 0.0000001, 0.0001, log=True))
    # config_space.add_hyperparameter(ConfigSpace.UniformFloatHyperparameter('classifier:neural_network:beta_1', 0, 1))
    # config_space.add_hyperparameter(ConfigSpace.UniformFloatHyperparameter('classifier:neural_network:beta_2', 0, 1))
    # config_space.add_hyperparameter(ConfigSpace.UniformIntegerHyperparameter('classifier:neural_network:max_iter', 2, 1000))
    config_space.add_hyperparameter(ConfigSpace.UniformFloatHyperparameter('classifier:neural_network:momentum', 0.1, 0.9))
    return config_space
Beispiel #22
0
    def setUp(self):
        root = logging.getLogger()
        root.setLevel(logging.INFO)

        self.cs = ConfigSpace.ConfigurationSpace(
            'sklearn.ensemble.GradientBoostingClassifier', 0)
        self.cs.add_hyperparameter(
            ConfigSpace.CategoricalHyperparameter(name='algorithm',
                                                  choices=['a1', 'a2', 'a3']))
	def setUp(self):
		self.configspace = CS.ConfigurationSpace()

		self.HPs = []
		
		self.HPs.append( CS.CategoricalHyperparameter('parent', [1,2,3]))
		
		self.HPs.append( CS.CategoricalHyperparameter('child1_x1', ['foo','bar']))
		self.HPs.append( CS.UniformFloatHyperparameter('child2_x1', lower=-1, upper=1))
		self.HPs.append( CS.UniformIntegerHyperparameter('child3_x1', lower=-2, upper=5))

		self.configspace.add_hyperparameters(self.HPs)
		
		self.conditions = []
		
		self.conditions += [CS.EqualsCondition(self.HPs[1], self.HPs[0], 1)]
		self.conditions += [CS.EqualsCondition(self.HPs[2], self.HPs[0], 2)] 
		self.conditions += [CS.EqualsCondition(self.HPs[3], self.HPs[0], 3)]
		[self.configspace.add_condition(cond) for cond in self.conditions]
Beispiel #24
0
def get_libsvm_svc_default_search_space():
    imputation = ConfigSpace.CategoricalHyperparameter('imputation__strategy', ['mean', 'median', 'most_frequent'])
    C = ConfigSpace.UniformFloatHyperparameter("classifier__C", 0.03125, 32768, log=True, default_value=1.0)
    kernel = ConfigSpace.CategoricalHyperparameter(name="classifier__kernel", choices=["rbf", "poly", "sigmoid"], default_value="rbf")
    degree = ConfigSpace.UniformIntegerHyperparameter("classifier__degree", 1, 5, default_value=3)
    gamma = ConfigSpace.UniformFloatHyperparameter("classifier__gamma", 3.0517578125e-05, 8, log=True, default_value=0.1)
    coef0 = ConfigSpace.UniformFloatHyperparameter("classifier__coef0", -1, 1, default_value=0)
    shrinking = ConfigSpace.CategoricalHyperparameter("classifier__shrinking", ["True", "False"], default_value="True")
    tol = ConfigSpace.UniformFloatHyperparameter("classifier__tol", 1e-5, 1e-1, default_value=1e-3, log=True)
    max_iter = ConfigSpace.UnParametrizedHyperparameter("classifier__max_iter", -1)

    cs = ConfigSpace.ConfigurationSpace()
    cs.add_hyperparameters([imputation, C, kernel, degree, gamma, coef0, shrinking, tol, max_iter])

    degree_depends_on_poly = ConfigSpace.EqualsCondition(degree, kernel, "poly")
    coef0_condition = ConfigSpace.InCondition(coef0, kernel, ["poly", "sigmoid"])
    cs.add_condition(degree_depends_on_poly)
    cs.add_condition(coef0_condition)

    return cs
Beispiel #25
0
    def get_configuration_space(self):
        cs = ConfigSpace.ConfigurationSpace()
        OPS = PRIMITIVES[0:-1]
        for cell in ['normal', 'reduce']:
            for node in range(2, 6):
                for prev in range(0, node):
                    cs.add_hyperparameter(
                        ConfigSpace.CategoricalHyperparameter(
                            "{}_{}_{}".format(cell, node, prev), OPS))

        return cs
Beispiel #26
0
 def get_config_space(self):
     cs = CS.ConfigurationSpace()
     cs.add_hyperparameter(
         CS.UniformIntegerHyperparameter("n_units_1", lower=0, upper=5))
     cs.add_hyperparameter(
         CS.UniformIntegerHyperparameter("n_units_2", lower=0, upper=5))
     cs.add_hyperparameter(
         CS.UniformIntegerHyperparameter("dropout_1", lower=0, upper=2))
     cs.add_hyperparameter(
         CS.UniformIntegerHyperparameter("dropout_2", lower=0, upper=2))
     cs.add_hyperparameter(
         CS.CategoricalHyperparameter("activation_fn_1", ["tanh", "relu"]))
     cs.add_hyperparameter(
         CS.CategoricalHyperparameter("activation_fn_2", ["tanh", "relu"]))
     cs.add_hyperparameter(
         CS.UniformIntegerHyperparameter("init_lr", lower=0, upper=5))
     cs.add_hyperparameter(
         CS.CategoricalHyperparameter("lr_schedule", ["cosine", "const"]))
     cs.add_hyperparameter(
         CS.UniformIntegerHyperparameter("batch_size", lower=0, upper=3))
     return cs
Beispiel #27
0
	def setUp(self):
		self.configspace = CS.ConfigurationSpace(43)
		
		HPs=[]
		HPs.append( CS.CategoricalHyperparameter('cat1', choices=['foo', 'bar', 'baz']))
		self.configspace.add_hyperparameters(HPs)
		
		x_train_confs = [ self.configspace.sample_configuration() for i in range(self.n_train)]
		self.x_train = np.array(	[c.get_array() for c in x_train_confs]).squeeze()

		x_test_confs = [ self.configspace.sample_configuration() for i in range(self.n_test)]
		self.x_test= np.array(	[c.get_array() for c in x_train_confs]).squeeze()
Beispiel #28
0
    def setUp(self):

        self.X = np.loadtxt('toy_data_set_features.csv', delimiter=',')
        self.y = np.loadtxt('toy_data_set_responses.csv', delimiter=',')

        self.cfs = cfs.ConfigurationSpace()

        f1 = cfs.UniformFloatHyperparameter('x1', 0, 100)
        f2 = cfs.CategoricalHyperparameter('x2', [0, 1, 2])

        self.cfs.add_hyperparameter(f1)
        self.cfs.add_hyperparameter(f2)
    def get_configspace(self):

        class MyConstrainedConfigurationSpace(ConfigSpace.ConfigurationSpace):

            def __init__(self, tp, computer, t):

                super(MyConstrainedConfigurationSpace, self).__init__()
                self.t = t
                self.tp          = tp
                self.computer          = computer

            def sample_configuration(self, size=1):

                cond = False
                cpt = 0
                while (not cond):

                    cpt += 1
                    if (size == 1):
                        accepted_configurations = [super(MyConstrainedConfigurationSpace, self).sample_configuration(size=size)]
                    else:
                        accepted_configurations = super(MyConstrainedConfigurationSpace, self).sample_configuration(size=size)
                    for config in accepted_configurations:
                        t = self.t
                        # print(config,type(config))
                        x = [config[p] for p in self.tp.parameter_space.dimension_names]
                        kwargs = {d.name: x[i] for (i, d) in enumerate(self.tp.parameter_space)}
                        kwargs2 = {d.name: t[i] for (i, d) in enumerate(self.tp.input_space)}
                        kwargs2.update(kwargs)
                        check_constraints = functools.partial(self.computer.evaluate_constraints, self.tp, inputs_only = False, kwargs = kwargs)
                        cond = check_constraints(kwargs2)
                        if (not cond):
                            break

                if (size == 1):
                    return accepted_configurations[0]
                else:
                    return accepted_configurations

        #config_space = ConfigSpace.ConfigurationSpace()
        config_space = MyConstrainedConfigurationSpace(self.tp, self.computer, self.t)

        for n,p in zip(self.tp.parameter_space.dimension_names,self.tp.parameter_space.dimensions):
            if (isinstance(p, Real)):
                config_space.add_hyperparameter(ConfigSpace.UniformFloatHyperparameter(n, lower = p.bounds[0], upper = p.bounds[1]))
            elif (isinstance(p, Integer)):
                config_space.add_hyperparameter(ConfigSpace.UniformIntegerHyperparameter(n, lower = p.bounds[0], upper = p.bounds[1]))
            elif (isinstance(p, Categorical)):
                config_space.add_hyperparameter(ConfigSpace.CategoricalHyperparameter(n, choices = list(p.bounds)))
            else:
                raise Exception("Unknown parameter type")
        return(config_space)
Beispiel #30
0
    def get_configspace():
        """Builds the config space as described in the header docstring."""
        cs = CS.ConfigurationSpace()

        lr = CS.UniformFloatHyperparameter('lr',
                                           lower=1e-5,
                                           upper=1e-2,
                                           default_value=1e-4,
                                           log=True)

        optimizer = CS.CategoricalHyperparameter('optimizer', ['adam', 'sgd'])
        momentum = CS.UniformFloatHyperparameter('momentum',
                                                 lower=0.,
                                                 upper=1.00,
                                                 default_value=0.9)
        epsilon = CS.UniformFloatHyperparameter('epsilon',
                                                lower=1e-2,
                                                upper=1.,
                                                default_value=0.1)
        bs = CS.UniformIntegerHyperparameter('bs', lower=4, upper=256)

        first_layer = CS.UniformIntegerHyperparameter('first_layer',
                                                      lower=16,
                                                      upper=64)
        second_layer = CS.UniformIntegerHyperparameter('second_layer',
                                                       lower=8,
                                                       upper=64)

        leaky1 = CS.CategoricalHyperparameter('leaky1', [True, False])
        leaky2 = CS.CategoricalHyperparameter('leaky2', [True, False])
        leaky3 = CS.CategoricalHyperparameter('leaky3', [True, False])
        cs.add_hyperparameters([
            lr, optimizer, momentum, epsilon, bs, first_layer, second_layer,
            leaky1, leaky2, leaky3
        ])
        cs.add_condition(CS.EqualsCondition(momentum, optimizer, 'sgd'))
        cs.add_condition(CS.EqualsCondition(epsilon, optimizer, 'adam'))

        return cs