Example #1
0
def objective(
    num_steps: int,
    episode_length: int,
    trial: optuna.trial.Trial,
    policy_bounds: List[float],
    baseline_bounds: List[float],
    random_seed: int,
):
    alpha_policy = trial.suggest_loguniform("alpha_policy", policy_bounds[0],
                                            policy_bounds[1])
    alpha_baseline = trial.suggest_loguniform("alpha_baseline",
                                              baseline_bounds[0],
                                              baseline_bounds[1])
    print(
        f"Initialising trial {trial.number}, Alpha policy = {alpha_policy}, Alpha baseline = {alpha_baseline}\n"
    )
    return train_policy(
        alpha_baseline=alpha_baseline,
        alpha_policy=alpha_policy,
        num_steps=num_steps,
        trial=trial,
        alpha_decay=0.9999,
        discount_factor=0.999,
        episode_length=episode_length,
        random_seed=random_seed,
    )
Example #2
0
    def _preprocess(self, trial: optuna.trial.Trial) -> None:
        if self.pbar is not None:
            self.pbar.set_description(self.pbar_fmt.format(self.step_name, self.best_score))

        if "lambda_l1" in self.target_param_names:
            self.lgbm_params["lambda_l1"] = trial.suggest_loguniform("lambda_l1", 1e-8, 10.0)
        if "lambda_l2" in self.target_param_names:
            self.lgbm_params["lambda_l2"] = trial.suggest_loguniform("lambda_l2", 1e-8, 10.0)
        if "num_leaves" in self.target_param_names:
            tree_depth = self.lgbm_params.get("max_depth", _DEFAULT_TUNER_TREE_DEPTH)
            max_num_leaves = 2 ** tree_depth if tree_depth > 0 else 2 ** _DEFAULT_TUNER_TREE_DEPTH
            self.lgbm_params["num_leaves"] = trial.suggest_int("num_leaves", 2, max_num_leaves)
        if "feature_fraction" in self.target_param_names:
            # `GridSampler` is used for sampling feature_fraction value.
            # The value 1.0 for the hyperparameter is always sampled.
            param_value = min(trial.suggest_uniform("feature_fraction", 0.4, 1.0 + _EPS), 1.0)
            self.lgbm_params["feature_fraction"] = param_value
        if "bagging_fraction" in self.target_param_names:
            # `TPESampler` is used for sampling bagging_fraction value.
            # The value 1.0 for the hyperparameter might by sampled.
            param_value = min(trial.suggest_uniform("bagging_fraction", 0.4, 1.0 + _EPS), 1.0)
            self.lgbm_params["bagging_fraction"] = param_value
        if "bagging_freq" in self.target_param_names:
            self.lgbm_params["bagging_freq"] = trial.suggest_int("bagging_freq", 1, 7)
        if "min_child_samples" in self.target_param_names:
            # `GridSampler` is used for sampling min_child_samples value.
            # The value 1.0 for the hyperparameter is always sampled.
            param_value = int(trial.suggest_uniform("min_child_samples", 5, 100 + _EPS))
            self.lgbm_params["min_child_samples"] = param_value
Example #3
0
def feedforward(config: BaseConfig,
                trial: optuna.trial.Trial) -> pl.LightningModule:
    """Returns a tunable PyTorch lightning feedforward module.

    Args:
        config (BaseConfig): the hard-coded configuration.
        trial (optuna.Trial): optuna trial.

    Returns:
        pl.LightningModule: a lightning module.
    """

    model = FeedForward(num_inputs=config.NUM_INPUTS,
                        num_outputs=config.NUM_OUTPUTS,
                        num_hidden=trial.suggest_int('num_hidden', 1, 4),
                        num_layers=trial.suggest_int('num_layers', 1, 2),
                        dropout=trial.suggest_float('dropout', 0.0, 0.5),
                        activation=trial.suggest_categorical(
                            'activation', ['relu', 'none']))

    training_config = get_training_config(
        lr=trial.suggest_loguniform('lr', 1e-3, 1e-0),
        weight_decay=trial.suggest_loguniform('weight_decay', 1e-5, 1e-1),
        max_epochs=config.MAX_EPOCHS)

    pl_model = TemporalConvNet(training_config=training_config,
                               lr=trial.suggest_loguniform('lr', 1e-3, 1e-0),
                               weight_decay=trial.suggest_loguniform(
                                   'weight_decay', 1e-5, 1e-1),
                               max_epochs=config.MAX_EPOCHS)

    return pl_model
Example #4
0
def _xgbclassifier_default(trial: optuna.trial.Trial):
    param = {
        'silent':
        1,
        'objective':
        'binary:logistic',
        'booster':
        trial.suggest_categorical('booster', ['gbtree', 'gblinear', 'dart']),
        'lambda':
        trial.suggest_loguniform('lambda', 1e-8, 1.0),
        'alpha':
        trial.suggest_loguniform('alpha', 1e-8, 1.0)
    }

    if param['booster'] == 'gbtree' or param['booster'] == 'dart':
        param['max_depth'] = trial.suggest_int('max_depth', 1, 9)
        param['eta'] = trial.suggest_loguniform('eta', 1e-8, 1.0)
        param['gamma'] = trial.suggest_loguniform('gamma', 1e-8, 1.0)
        param['grow_policy'] = trial.suggest_categorical(
            'grow_policy', ['depthwise', 'lossguide'])
    if param['booster'] == 'dart':
        param['sample_type'] = trial.suggest_categorical(
            'sample_type', ['uniform', 'weighted'])
        param['normalize_type'] = trial.suggest_categorical(
            'normalize_type', ['tree', 'forest'])
        param['rate_drop'] = trial.suggest_loguniform('rate_drop', 1e-8, 1.0)
        param['skip_drop'] = trial.suggest_loguniform('skip_drop', 1e-8, 1.0)

    return param
Example #5
0
def tcn(config: BaseConfig, trial: optuna.trial.Trial) -> pl.LightningModule:
    """Returns a tunable PyTorch lightning tcn module.

    Args:
        config (BaseConfig): the hard-coded configuration.
        trial (optuna.Trial): optuna trial.

    Returns:
        pl.LightningModule: a lightning module.
    """

    training_config = get_training_config(
        lr=trial.suggest_loguniform('lr', 1e-3, 1e-0),
        weight_decay=trial.suggest_loguniform('weight_decay', 1e-5, 1e-1),
        max_epochs=config.MAX_EPOCHS)

    tcn = TemporalConvNet(training_config=training_config,
                          num_inputs=config.NUM_INPUTS,
                          num_outputs=config.NUM_OUTPUTS,
                          num_hidden=trial.suggest_int('num_hidden', 1, 4),
                          kernel_size=trial.suggest_int('kernel_size', 2, 4),
                          num_layers=trial.suggest_int('num_layers', 1, 2),
                          dropout=trial.suggest_float('dropout', 0.1, 0.3))

    return tcn
Example #6
0
def _catboostclassifier_default(trial: optuna.trial.Trial):
    params = {
        'iterations': trial.suggest_int('iterations', 50, 300),
        'depth': trial.suggest_int('depth', 4, 10),
        'learning_rate': trial.suggest_loguniform('learning_rate', 0.01, 0.3),
        'random_strength': trial.suggest_int('random_strength', 0, 100),
        'bagging_temperature': trial.suggest_loguniform('bagging_temperature', 0.01, 100.00),
        'od_type': trial.suggest_categorical('od_type', ['IncToDec', 'Iter']),
        'od_wait': trial.suggest_int('od_wait', 10, 50)
    }

    return params
Example #7
0
def _objective(trial: optuna.trial.Trial) -> float:

    p0 = trial.suggest_uniform("p0", -3.3, 5.2)
    p1 = trial.suggest_uniform("p1", 2.0, 2.0)
    p2 = trial.suggest_loguniform("p2", 0.0001, 0.3)
    p3 = trial.suggest_loguniform("p3", 1.1, 1.1)
    p4 = trial.suggest_int("p4", -100, 8)
    p5 = trial.suggest_int("p5", -20, -20)
    p6 = trial.suggest_discrete_uniform("p6", 10, 20, 2)
    p7 = trial.suggest_discrete_uniform("p7", 0.1, 1.0, 0.1)
    p8 = trial.suggest_discrete_uniform("p8", 2.2, 2.2, 0.5)
    p9 = trial.suggest_categorical("p9", ["9", "3", "0", "8"])
    assert isinstance(p9, str)

    return p0 + p1 + p2 + p3 + p4 + p5 + p6 + p7 + p8 + int(p9)
Example #8
0
def _objective_func(trial: optuna.trial.Trial) -> float:
    x = trial.suggest_uniform("x", -1.0, 1.0)
    y = trial.suggest_loguniform("y", 20.0, 30.0)
    z = trial.suggest_categorical("z", (-1.0, 1.0))
    assert isinstance(z, float)
    trial.set_user_attr("my_user_attr", "my_user_attr_value")
    return (x - 2) ** 2 + (y - 25) ** 2 + z
Example #9
0
def func(trial: optuna.trial.Trial, x_max: float = 1.0) -> float:

    x = trial.suggest_uniform("x", -x_max, x_max)
    y = trial.suggest_loguniform("y", 20, 30)
    z = trial.suggest_categorical("z", (-1.0, 1.0))
    assert isinstance(z, float)
    return (x - 2)**2 + (y - 25)**2 + z
Example #10
0
    def objective2(trial: optuna.trial.Trial) -> float:

        p1 = trial.suggest_loguniform("p1", 50,
                                      100)  # The range has been changed
        p3 = trial.suggest_discrete_uniform("p3", 0, 9, 3)
        p5 = trial.suggest_uniform("p5", 0, 1)

        return p1 + p3 + p5
Example #11
0
def objective(trial: optuna.trial.Trial):
    settings = Settings(
        learning_rate=trial.suggest_loguniform('learning_rate', 1e-5, 1e-2),
        hidden1=trial.suggest_int('hidden1', 50, 200),
        hidden2=trial.suggest_int('hidden2', 10, 50),
    )
    val_err = run_training(settings)
    return val_err
Example #12
0
    def objective0(trial: optuna.trial.Trial) -> float:

        p0 = trial.suggest_uniform("p0", 0, 10)
        p1 = trial.suggest_loguniform("p1", 1, 10)
        p2 = trial.suggest_int("p2", 0, 10)
        p3 = trial.suggest_discrete_uniform("p3", 0, 9, 3)
        p4 = trial.suggest_categorical("p4", ["10", "20", "30"])
        assert isinstance(p4, str)
        return p0 + p1 + p2 + p3 + int(p4)
Example #13
0
def _objective_func_long_user_attr(trial: optuna.trial.Trial) -> float:

    x = trial.suggest_uniform("x", -1.0, 1.0)
    y = trial.suggest_loguniform("y", 20, 30)
    z = trial.suggest_categorical("z", (-1.0, 1.0))
    assert isinstance(z, float)
    long_str = str(list(range(5000)))
    trial.set_user_attr("my_user_attr", long_str)
    return (x - 2)**2 + (y - 25)**2 + z
Example #14
0
def _lgbmclassifier_default(trial: optuna.trial.Trial):
    # TODO: using LightGBMTuner
    params = {
        'boosting_type': trial.suggest_categorical('boosting', ['gbdt', 'dart', 'goss']),
        'objective': 'binary',
        'metric': ['binary', 'binary_error', 'auc'],
        'num_leaves': trial.suggest_int("num_leaves", 10, 500),
        'learning_rate': trial.suggest_loguniform("learning_rate", 1e-5, 1),
        'feature_fraction': trial.suggest_uniform("feature_fraction", 0.0, 1.0),
    }
    if params['boosting_type'] == 'dart':
        params['drop_rate'] = trial.suggest_loguniform('drop_rate', 1e-8, 1.0)
        params['skip_drop'] = trial.suggest_loguniform('skip_drop', 1e-8, 1.0)
    if params['boosting_type'] == 'goss':
        params['top_rate'] = trial.suggest_uniform('top_rate', 0.0, 1.0)
        params['other_rate'] = trial.suggest_uniform('other_rate', 0.0, 1.0 - params['top_rate'])

    return params
Example #15
0
    def objective1(trial: optuna.trial.Trial) -> float:

        # p0, p2 and p4 are deleted.
        p1 = trial.suggest_loguniform("p1", 1, 10)
        p3 = trial.suggest_discrete_uniform("p3", 0, 9, 3)

        # p5 is added.
        p5 = trial.suggest_uniform("p5", 0, 1)

        return p1 + p3 + p5
Example #16
0
 def modelCatBoostClassifier(self, trial: optuna.trial.Trial):
     opt_params = dict(
         num_leaves=trial.suggest_int("num_leaves", 2, 2**8),
         learning_rate=trial.suggest_discrete_uniform(
             'learning_rate', 0.001, 1, 0.001),
         n_estimators=trial.suggest_int("n_estimators", 2, 2**10, log=True),
         min_child_samples=trial.suggest_int('min_child_samples', 2, 2**8),
         min_child_weight=trial.suggest_loguniform('min_child_weight', 1e-8,
                                                   1),
         min_split_gain=trial.suggest_loguniform('min_split_gain', 1e-8, 1),
         subsample=trial.suggest_uniform('subsample', 0.4, 1),
         subsample_freq=trial.suggest_int("subsample_freq", 0, 2**4),
         colsample_bytree=trial.suggest_uniform('colsample_bytree', 0.4, 1),
         reg_alpha=trial.suggest_loguniform('reg_alpha', 1e-8, 10),
         reg_lambda=trial.suggest_loguniform('reg_lambda', 1e-8, 10),
     )
     clf = CatBoostClassifier()
     clf.set_params(**{**opt_params, **self.params})
     return clf
Example #17
0
def _objective_func(trial: optuna.trial.Trial) -> float:
    u = trial.suggest_int("u", 0, 10, step=2)
    v = trial.suggest_int("v", 1, 10, log=True)
    w = trial.suggest_float("w", -1.0, 1.0, step=0.1)
    x = trial.suggest_uniform("x", -1.0, 1.0)
    y = trial.suggest_loguniform("y", 20.0, 30.0)
    z = trial.suggest_categorical("z", (-1.0, 1.0))
    assert isinstance(z, float)
    trial.set_user_attr("my_user_attr", "my_user_attr_value")
    return u + v + w + (x - 2)**2 + (y - 25)**2 + z
def get_optimizer(trial: optuna.trial.Trial,
                  model: nn.Module) -> optim.Optimizer:
    def adam(model: nn.Module, lr: float,
             weight_decay: float) -> optim.Optimizer:
        return optim.Adam(model.parameters(), lr=lr, weight_decay=weight_decay)

    def momentum(model: nn.Module, lr: float,
                 weight_decay: float) -> optim.Optimizer:
        return optim.SGD(model.parameters(),
                         lr=lr,
                         momentum=0.9,
                         weight_decay=weight_decay)

    optimizer_name = trial.suggest_categorical('optimizer',
                                               ['adam', 'momentum'])
    optimizer: Callable[[nn.Module, float, float],
                        optim.Optimizer] = locals()[optimizer_name]
    lr = trial.suggest_loguniform('lr', 1e-5, 1e-1)
    weight_decay = trial.suggest_loguniform('weight_decay', 1e-10, 1e-3)
    return optimizer(model, lr, weight_decay)
Example #19
0
 def objective(trial: optuna.trial.Trial) -> Tuple[float, float]:
     p0 = trial.suggest_float("p0", -10, 10)
     p1 = trial.suggest_uniform("p1", 3, 5)
     p2 = trial.suggest_loguniform("p2", 0.00001, 0.1)
     p3 = trial.suggest_discrete_uniform("p3", 100, 200, q=5)
     p4 = trial.suggest_int("p4", -20, -15)
     p5 = cast(int, trial.suggest_categorical("p5", [7, 1, 100]))
     p6 = trial.suggest_float("p6", -10, 10, step=1.0)
     p7 = trial.suggest_int("p7", 1, 7, log=True)
     return (
         p0 + p1 + p2,
         p3 + p4 + p5 + p6 + p7,
     )
Example #20
0
 def modelLGBMClassifier(self, trial: optuna.trial.Trial):
     opt_params = dict(
         num_leaves=trial.suggest_int("num_leaves", 2, 2**8),
         learning_rate=trial.suggest_discrete_uniform(
             'learning_rate', 0.001, 1, 0.001),
         n_estimators=trial.suggest_int("n_estimators", 2, 2**10, log=True),
         min_child_samples=trial.suggest_int('min_child_samples', 2, 2**8),
         min_child_weight=trial.suggest_loguniform('min_child_weight', 1e-8,
                                                   1),
         min_split_gain=trial.suggest_loguniform('min_split_gain', 1e-8, 1),
         subsample=trial.suggest_uniform('subsample', 0.4, 1),
         subsample_freq=trial.suggest_int("subsample_freq", 0, 2**4),
         colsample_bytree=trial.suggest_uniform('colsample_bytree', 0.4, 1),
         reg_alpha=trial.suggest_loguniform('reg_alpha', 1e-8, 10),
         reg_lambda=trial.suggest_loguniform('reg_lambda', 1e-8, 10),
     )
     clf = LGBMClassifier(boosting_type='gbdt',
                          num_leaves=31,
                          max_depth=-1,
                          learning_rate=0.1,
                          n_estimators=100,
                          subsample_for_bin=200000,
                          objective=None,
                          class_weight=None,
                          min_split_gain=0.,
                          min_child_weight=1e-3,
                          min_child_samples=20,
                          subsample=1.,
                          subsample_freq=0,
                          colsample_bytree=1.,
                          reg_alpha=0.,
                          reg_lambda=0.,
                          random_state=None,
                          n_jobs=-1,
                          silent=True,
                          importance_type='split')
     clf.set_params(**{**opt_params, **self.params})
     return clf
Example #21
0
 def modelXGBClassifier(self, trial: optuna.trial.Trial):
     opt_params = dict(
         max_depth=trial.suggest_int("max_depth", 2, 2**4),
         learning_rate=trial.suggest_discrete_uniform(
             'learning_rate', 0.001, 1, 0.001),
         n_estimators=trial.suggest_int("n_estimators", 2, 2**10, log=True),
         gamma=trial.suggest_loguniform('gamma', 1e-8, 1),
         min_child_weight=trial.suggest_loguniform('min_child_weight', 1e-8,
                                                   2**10),
         subsample=trial.suggest_uniform('subsample', 0.1, 1),
         colsample_bytree=trial.suggest_uniform('colsample_bytree', 0.1, 1),
         colsample_bylevel=trial.suggest_uniform('colsample_bylevel', 0.1,
                                                 1),
         reg_alpha=trial.suggest_loguniform('reg_alpha', 1e-8, 10),
         reg_lambda=trial.suggest_loguniform('reg_lambda', 1e-8, 10),
     )
     clf = XGBClassifier(max_depth=3,
                         learning_rate=0.1,
                         n_estimators=100,
                         silent=True,
                         objective="binary:logistic",
                         booster='gbtree',
                         n_jobs=1,
                         gamma=0,
                         min_child_weight=1,
                         max_delta_step=0,
                         subsample=1,
                         colsample_bytree=1,
                         colsample_bylevel=1,
                         reg_alpha=0,
                         reg_lambda=1,
                         scale_pos_weight=1,
                         base_score=0.5,
                         random_state=0,
                         missing=None)
     clf.set_params(**{**opt_params, **self.params})
     return clf
Example #22
0
def objective_for_binary_unet(args, trial: optuna.trial.Trial):
    args.lr = trial.suggest_loguniform("lr", low=1e-5, high=1e-2)
    args.edge_weight = trial.suggest_uniform("edge_weight", low=1, high=5)
    args.wf = trial.suggest_int("wf", low=2, high=4)
    args.depth = trial.suggest_int("depth", low=4, high=6)

    pl_pruning_callback = PyTorchLightningPruningCallback(
        trial, "val/f1_score")
    ckpt_callback = train_binary_unet_model(args,
                                            callbacks=[pl_pruning_callback])

    best_f1_score = ckpt_callback.best_model_score.detach().cpu().numpy().item(
    )
    trial.set_user_attr("best_val_f1", best_f1_score)
    trial.set_user_attr("best_model_path", ckpt_callback.best_model_path)

    return best_f1_score
Example #23
0
def _objective_func(trial: optuna.trial.Trial) -> float:

    x = trial.suggest_uniform("x", low=-10, high=10)
    y = trial.suggest_loguniform("y", low=1, high=10)
    return (x - 2)**2 + (y - 25)**2