Example #1
0
def test_all_methods():
    from hyperactive import RandomSearchOptimizer

    Optimizer = RandomSearchOptimizer(search_config, n_iter=10, verbosity=0)
    Optimizer.fit(X_train, y_train)
    Optimizer.predict(X_test)
    Optimizer.score(X_test, y_test)
    Optimizer.export("test")
Example #2
0
def test_warm_start():
    from hyperactive import RandomSearchOptimizer

    opt = RandomSearchOptimizer(search_config,
                                1,
                                n_jobs=1,
                                warm_start=warm_start)
    opt.fit(X, y)
Example #3
0
def test_keras_losses():
    from hyperactive import RandomSearchOptimizer

    ml_losses = [
        "mean_squared_error",
        "mean_absolute_error",
        "mean_absolute_percentage_error",
        "mean_squared_logarithmic_error",
        "squared_hinge",
        "hinge",
        # "categorical_hinge",
        "logcosh",
        "categorical_crossentropy",
        # "sparse_categorical_crossentropy",
        "binary_crossentropy",
        "kullback_leibler_divergence",
        "poisson",
        "cosine_proximity",
    ]

    for loss in ml_losses:
        opt = RandomSearchOptimizer(search_config, 1, metric=loss)
        assert opt._config_.metric == loss
        opt.fit(X, y)
        assert opt._config_.metric == loss
        opt.predict(X)
        assert opt._config_.metric == loss
        opt.score(X, y)
        assert opt._config_.metric == loss
Example #4
0
def test_lightgbm_classification():
    from hyperactive import RandomSearchOptimizer

    ml_scores = [
        "accuracy_score",
        "balanced_accuracy_score",
        "average_precision_score",
        "brier_score_loss",
        "f1_score",
        "log_loss",
        "precision_score",
        "recall_score",
        "jaccard_score",
        "roc_auc_score",
    ]

    for score in ml_scores:
        opt = RandomSearchOptimizer(search_config, 1, metric=score)
        assert opt._config_.metric == score
        opt.fit(X, y)
        assert opt._config_.metric == score
        opt.predict(X)
        assert opt._config_.metric == score
        opt.score(X, y)
        assert opt._config_.metric == score
Example #5
0
def test_RandomSearchOptimizer():
    from hyperactive import RandomSearchOptimizer

    opt0 = RandomSearchOptimizer(
        search_config,
        n_iter_0,
        random_state=random_state,
        verbosity=0,
        cv=cv,
        n_jobs=1,
        warm_start=warm_start,
    )
    opt0.fit(X, y)

    opt1 = RandomSearchOptimizer(
        search_config,
        n_iter_1,
        random_state=random_state,
        verbosity=0,
        cv=cv,
        n_jobs=n_jobs,
        warm_start=warm_start,
    )
    opt1.fit(X, y)

    assert opt0.score_best < opt1.score_best
Example #6
0
def test_lightgbm():
    from hyperactive import RandomSearchOptimizer

    opt = RandomSearchOptimizer(search_config, 1)
    opt.fit(X, y)
    opt.predict(X)
    opt.score(X, y)
Example #7
0
def test_memory():
    from hyperactive import RandomSearchOptimizer

    opt0 = RandomSearchOptimizer(search_config, 1, memory=True)
    opt0.fit(X, y)

    opt1 = RandomSearchOptimizer(search_config, 1, memory=False)
    opt1.fit(X, y)
Example #8
0
def test_verbosity():
    from hyperactive import RandomSearchOptimizer

    opt0 = RandomSearchOptimizer(search_config, 1, verbosity=0)
    opt0.fit(X, y)

    opt1 = RandomSearchOptimizer(search_config, 1, verbosity=1)
    opt1.fit(X, y)
Example #9
0
def test_import_and_inits():

    from hyperactive import Hydra, Insight, Iota

    _ = Hydra()
    # _ = Insight()
    # _ = Iota()

    from hyperactive import (
        HillClimbingOptimizer,
        StochasticHillClimbingOptimizer,
        TabuOptimizer,
        RandomSearchOptimizer,
        RandomRestartHillClimbingOptimizer,
        RandomAnnealingOptimizer,
        SimulatedAnnealingOptimizer,
        StochasticTunnelingOptimizer,
        ParallelTemperingOptimizer,
        ParticleSwarmOptimizer,
        EvolutionStrategyOptimizer,
        BayesianOptimizer,
    )

    _ = HillClimbingOptimizer(search_config, 1)
    _ = StochasticHillClimbingOptimizer(search_config, 1)
    _ = TabuOptimizer(search_config, 1)
    _ = RandomSearchOptimizer(search_config, 1)
    _ = RandomRestartHillClimbingOptimizer(search_config, 1)
    _ = RandomAnnealingOptimizer(search_config, 1)
    _ = SimulatedAnnealingOptimizer(search_config, 1)
    _ = StochasticTunnelingOptimizer(search_config, 1)
    _ = ParallelTemperingOptimizer(search_config, 1)
    _ = ParticleSwarmOptimizer(search_config, 1)
    _ = EvolutionStrategyOptimizer(search_config, 1)
    _ = BayesianOptimizer(search_config, 1)
Example #10
0
def test_lightgbm_memory():
    from hyperactive import RandomSearchOptimizer

    memory_list = [False, True]
    for memory in memory_list:
        opt = RandomSearchOptimizer(search_config, 1, memory=memory)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #11
0
def test_lightgbm_n_iter():
    from hyperactive import RandomSearchOptimizer

    n_iter_list = [0, 1, 3, 10]
    for n_iter in n_iter_list:
        opt = RandomSearchOptimizer(search_config, n_iter)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #12
0
def test_lightgbm_random_state():
    from hyperactive import RandomSearchOptimizer

    random_state_list = [None, 0, 1, 2]
    for random_state in random_state_list:
        opt = RandomSearchOptimizer(search_config, 1, random_state=random_state)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #13
0
def test_lightgbm_verbosity():
    from hyperactive import RandomSearchOptimizer

    verbosity_list = [0, 1, 2]
    for verbosity in verbosity_list:
        opt = RandomSearchOptimizer(search_config, 1, verbosity=verbosity)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #14
0
def test_lightgbm_scatter_init():
    from hyperactive import RandomSearchOptimizer

    scatter_init_list = [False, 2, 3, 4]
    for scatter_init in scatter_init_list:
        opt = RandomSearchOptimizer(search_config, 1, scatter_init=scatter_init)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #15
0
def test_lightgbm_cv():
    from hyperactive import RandomSearchOptimizer

    cv_list = [0.1, 0.5, 0.9, 2, 4]
    for cv in cv_list:
        opt = RandomSearchOptimizer(search_config, 1, cv=cv)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #16
0
def test_xgboost_warm_start():
    from hyperactive import RandomSearchOptimizer

    warm_start = {"sklearn.tree.DecisionTreeClassifier": {"max_depth": [1]}}

    warm_start_list = [None, warm_start]
    for warm_start in warm_start_list:
        opt = RandomSearchOptimizer(search_config, 1, warm_start=warm_start)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #17
0
def test_sklearn_n_iter():
    from hyperactive import RandomSearchOptimizer

    n_iter_list = [0, 1, 3, 10]
    for n_iter in n_iter_list:
        opt = RandomSearchOptimizer(search_config, n_iter)
        assert opt._config_.n_iter == n_iter
        opt.fit(X, y)
        assert opt._config_.n_iter == n_iter
        opt.predict(X)
        assert opt._config_.n_iter == n_iter
        opt.score(X, y)
        assert opt._config_.n_iter == n_iter
Example #18
0
def test_sklearn_memory():
    from hyperactive import RandomSearchOptimizer

    memory_list = [False, True]
    for memory in memory_list:
        opt = RandomSearchOptimizer(search_config, 1, memory=memory)
        assert opt._config_.memory == memory
        opt.fit(X, y)
        assert opt._config_.memory == memory
        opt.predict(X)
        assert opt._config_.memory == memory
        opt.score(X, y)
        assert opt._config_.memory == memory
Example #19
0
def test_sklearn_cv():
    from hyperactive import RandomSearchOptimizer

    cv_list = [0.1, 0.5, 0.9, 2, 4]
    for cv in cv_list:
        opt = RandomSearchOptimizer(search_config, 1, cv=cv)
        assert opt._config_.cv == cv
        opt.fit(X, y)
        assert opt._config_.cv == cv
        opt.predict(X)
        assert opt._config_.cv == cv
        opt.score(X, y)
        assert opt._config_.cv == cv
Example #20
0
def test_scatter_init_and_warm_start():
    from hyperactive import RandomSearchOptimizer

    opt = RandomSearchOptimizer(search_config,
                                1,
                                warm_start=warm_start,
                                scatter_init=10)
    opt.fit(X, y)

    opt = RandomSearchOptimizer(search_config,
                                2,
                                warm_start=warm_start,
                                scatter_init=10)
    opt.fit(X, y)
Example #21
0
def test_sklearn_random_state():
    from hyperactive import RandomSearchOptimizer

    random_state_list = [None, 0, 1, 2]
    for random_state in random_state_list:
        opt = RandomSearchOptimizer(search_config,
                                    1,
                                    random_state=random_state)
        assert opt._config_.random_state == random_state
        opt.fit(X, y)
        assert opt._config_.random_state == random_state
        opt.predict(X)
        assert opt._config_.random_state == random_state
        opt.score(X, y)
        assert opt._config_.random_state == random_state
Example #22
0
def test_sklearn_scatter_init():
    from hyperactive import RandomSearchOptimizer

    scatter_init_list = [False, 2, 3, 4]
    for scatter_init in scatter_init_list:
        opt = RandomSearchOptimizer(search_config,
                                    1,
                                    scatter_init=scatter_init)
        assert opt._config_.scatter_init == scatter_init
        opt.fit(X, y)
        assert opt._config_.scatter_init == scatter_init
        opt.predict(X)
        assert opt._config_.scatter_init == scatter_init
        opt.score(X, y)
        assert opt._config_.scatter_init == scatter_init
Example #23
0
def test_lightgbm_warm_start():
    from hyperactive import RandomSearchOptimizer

    warm_start = {
        "lightgbm.LGBMClassifier": {
            "boosting_type": ["gbdt"],
            "num_leaves": [5],
            "learning_rate": [0.05],
        }
    }

    warm_start_list = [None, warm_start]
    for warm_start in warm_start_list:
        opt = RandomSearchOptimizer(search_config, 1, warm_start=warm_start)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #24
0
def test_catboost_warm_start():
    from hyperactive import RandomSearchOptimizer

    warm_start = {
        "catboost.CatBoostClassifier": {
            "iterations": [3],
            "learning_rate": [1],
            "depth": [3],
            "verbose": [0],
        }
    }

    warm_start_list = [None, warm_start]
    for warm_start in warm_start_list:
        opt = RandomSearchOptimizer(search_config, 1, warm_start=warm_start)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #25
0
def test_keras_scores():
    from hyperactive import RandomSearchOptimizer

    ml_scores = [
        "accuracy",
        "binary_accuracy",
        "categorical_accuracy",
        "sparse_categorical_accuracy",
        "top_k_categorical_accuracy",
        "sparse_top_k_categorical_accuracy",
    ]

    for score in ml_scores:
        opt = RandomSearchOptimizer(search_config, 1, metric=score)
        assert opt._config_.metric == score
        opt.fit(X, y)
        assert opt._config_.metric == score
        opt.predict(X)
        assert opt._config_.metric == score
        opt.score(X, y)
        assert opt._config_.metric == score
Example #26
0
def test_lightgbm_regression():
    from hyperactive import RandomSearchOptimizer

    ml_losses = [
        "explained_variance_score",
        "max_error",
        "mean_absolute_error",
        "mean_squared_error",
        "mean_squared_log_error",
        "median_absolute_error",
        "r2_score",
    ]

    for loss in ml_losses:
        opt = RandomSearchOptimizer(search_config, 1, metric=loss)
        assert opt._config_.metric == loss
        opt.fit(X, y)
        assert opt._config_.metric == loss
        opt.predict(X)
        assert opt._config_.metric == loss
        opt.score(X, y)
        assert opt._config_.metric == loss
Example #27
0
cancer_data = load_breast_cancer()
X = cancer_data.data
y = cancer_data.target

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20)

# this defines the model and hyperparameter search space
search_config = {
    "lightgbm.LGBMClassifier": {
        "boosting_type": ["gbdt"],
        "num_leaves": range(2, 20),
        "learning_rate": np.arange(0.01, 0.1, 0.01),
        "feature_fraction": np.arange(0.1, 0.95, 0.1),
        "bagging_fraction": np.arange(0.1, 0.95, 0.1),
        "bagging_freq": range(2, 10, 1),
    }
}

opt = RandomSearchOptimizer(search_config, n_iter=10, n_jobs=4, cv=3)

# search best hyperparameter for given data
opt.fit(X, y)

# predict from test data
prediction = opt.predict(X_test)

# calculate score
score = opt.score(X_test, y_test)

print("\ntest score of best model:", score)
Example #28
0
from sklearn.datasets import load_iris
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import cross_val_score

from hyperactive import Hyperactive, RandomSearchOptimizer

data = load_iris()
X, y = data.data, data.target


def model(opt):
    knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
    scores = cross_val_score(knr, X, y, cv=5)
    score = scores.mean()

    return score


search_space = {
    "n_neighbors": list(range(1, 100)),
}

optimizer = RandomSearchOptimizer()

hyper = Hyperactive()
hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
hyper.run()
Example #29
0
from sklearn.datasets import load_iris

from hyperactive import RandomSearchOptimizer

iris_data = load_iris()
<<<<<<< HEAD
X, y = iris_data.data, iris_data.target
=======
X = iris_data.data
y = iris_data.target
>>>>>>> dbd0b511032907d8ce3be0ca13570fb6c3f0fa6e

search_config = {
    "sklearn.ensemble.RandomForestClassifier": {"n_estimators": range(10, 100, 10)}
}

<<<<<<< HEAD
opt = RandomSearchOptimizer(search_config, n_iter=10)
opt.fit(X, y)
=======
Optimizer = RandomSearchOptimizer(search_config, n_iter=10, verbosity=0)
Optimizer.fit(X, y)
>>>>>>> dbd0b511032907d8ce3be0ca13570fb6c3f0fa6e
Example #30
0
        "kernel_size": [3],
        "activation": ["relu"],
    },
    "keras.layers.MaxPooling2D.4": {
        "pool_size": [(2, 2)]
    },
    "keras.layers.Flatten.5": {},
    "keras.layers.Dense.6": {
        "units": range(10, 200, 10),
        "activation": ["softmax"]
    },
    "keras.layers.Dropout.7": {
        "rate": np.arange(0.2, 0.8, 0.1)
    },
    "keras.layers.Dense.8": {
        "units": [10],
        "activation": ["softmax"]
    },
}

Optimizer = RandomSearchOptimizer(search_config, n_iter=10, metric="accuracy")

# search best hyperparameter for given data
Optimizer.fit(X_train, y_train)

# predict from test data
prediction = Optimizer.predict(X_test)

# calculate score
score = Optimizer.score(X_test, y_test)