def _execute():
    env = Environment(
        train_dataset=get_toy_classification_data(target='diagnosis'),
        root_results_path='HyperparameterHunterAssets',
        target_column='diagnosis',
        metrics_map=['roc_auc_score'],
        cross_validation_type=RepeatedStratifiedKFold,
        cross_validation_params=dict(n_splits=5, n_repeats=2, random_state=32),
    )

    optimizer = ExtraTreesOptimization(
        iterations=10,
        read_experiments=True,
        random_state=None,
    )

    optimizer.set_experiment_guidelines(
        model_initializer=RGFClassifier,
        model_init_params=dict(max_leaf=1000,
                               algorithm=Categorical(
                                   ['RGF', 'RGF_Opt', 'RGF_Sib']),
                               l2=Real(0.01, 0.3),
                               normalize=Categorical([True, False]),
                               learning_rate=Real(0.3, 0.7),
                               loss=Categorical(['LS', 'Expo', 'Log', 'Abs'])),
    )

    optimizer.go()
def opt_dtc_0():
    optimizer = ExtraTreesOptimization(iterations=2, random_state=1337)
    optimizer.set_experiment_guidelines(
        model_initializer=DecisionTreeClassifier,
        model_init_params=dict(
            criterion="gini",
            min_samples_split=Integer(2, 5),
            splitter=Categorical(["best", "random"]),
            min_weight_fraction_leaf=Real(0.0, 0.1),
        ),
    )
    optimizer.go()
    yield optimizer
예제 #3
0
    target_column="median_value",
    metrics=dict(r2=r2_score),
    cv_type=RepeatedKFold,
    cv_params=dict(n_repeats=2, n_splits=5, random_state=42),
)

# Now that HyperparameterHunter has an active `Environment`, we can do two things:
#################### 1. Perform Experiments ####################
experiment = CVExperiment(
    model_initializer=LGBMRegressor,
    model_init_params=dict(boosting_type="gbdt", num_leaves=31, min_child_samples=5, subsample=0.5),
)

# And/or...
#################### 2. Hyperparameter Optimization ####################
optimizer = ExtraTreesOptimization(iterations=12, random_state=1337)
optimizer.set_experiment_guidelines(
    model_initializer=LGBMRegressor,
    model_init_params=dict(
        boosting_type=Categorical(["gbdt", "dart"]),
        num_leaves=Integer(10, 40),
        max_depth=-1,
        min_child_samples=5,
        subsample=Real(0.3, 0.7),
    ),
)
optimizer.go()

# Notice, `optimizer` recognizes our earlier `experiment`'s hyperparameters fit inside the search
# space/guidelines set for `optimizer`.
    cross_validation_type="KFold",
    cross_validation_params=dict(n_splits=10, random_state=42),
    runs=3,
)

# Now that HyperparameterHunter has an active `Environment`, we can do two things:
#################### 1. Perform Experiments ####################
experiment = CVExperiment(
    model_initializer=RGFRegressor,
    model_init_params=dict(max_leaf=2000, algorithm="RGF",
                           min_samples_leaf=10),
)

# And/or...
#################### 2. Hyperparameter Optimization ####################
optimizer = ExtraTreesOptimization(iterations=30, random_state=42)
optimizer.set_experiment_guidelines(
    model_initializer=RGFRegressor,
    model_init_params=dict(
        max_leaf=2000,
        algorithm=Categorical(["RGF", "RGF_Opt", "RGF_Sib"]),
        l2=Real(0.01, 0.3),
        normalize=Categorical([True, False]),
        learning_rate=Real(0.3, 0.7),
        loss=Categorical(["LS", "Expo", "Log"]),
    ),
)
optimizer.go()

# Notice, `optimizer` recognizes our earlier `experiment`'s hyperparameters fit inside the search
# space/guidelines set for `optimizer`.