예제 #1
0
def test_sentinels_optimization(env_0):
    optimizer = GBRT(iterations=2)
    optimizer.set_experiment_guidelines(
        model_initializer=XGBClassifier,
        model_init_params=dict(objective="reg:linear",
                               max_depth=Integer(2, 20),
                               subsample=0.5),
        model_extra_params=dict(fit=dict(
            eval_set=get_all_sentinels(env_0),
            early_stopping_rounds=5,
            eval_metric=Categorical(["auc", "mae"]),
        )),
    )
    optimizer.go()
예제 #2
0
# Now that HyperparameterHunter has an active `Environment`, we can do two things:
#################### 1. Perform Experiments ####################
experiment = CVExperiment(
    model_initializer=XGBRegressor,
    model_init_params=dict(max_depth=4, n_estimators=400, subsample=0.5),
    model_extra_params=dict(fit=dict(eval_metric="mae")),
)

# And/or...
#################### 2. Hyperparameter Optimization ####################
optimizer = GBRT(iterations=20, random_state=32)
optimizer.forge_experiment(
    model_initializer=XGBRegressor,
    model_init_params=dict(
        max_depth=Integer(2, 20),
        n_estimators=Integer(100, 900),
        learning_rate=Real(0.0001, 0.5),
        subsample=0.5,
        booster=Categorical(["gbtree", "gblinear"]),
    ),
    model_extra_params=dict(fit=dict(eval_metric=Categorical(["rmse", "mae"]))),
)
optimizer.go()

# Notice, `optimizer` recognizes our earlier `experiment`'s hyperparameters fit inside the search
# space/guidelines set for `optimizer`.

# Then, when optimization is started, it automatically learns from `experiment`'s results
# - without any extra work for us!