Example #1
0
def test_keras_warm_start():
    from hyperactive import HillClimbingOptimizer

    warm_start = {
        "keras.compile.0": {
            "loss": ["binary_crossentropy"],
            "optimizer": ["adam"]
        },
        "keras.fit.0": {
            "epochs": [1],
            "batch_size": [300],
            "verbose": [0]
        },
        "keras.layers.Dense.1": {
            "units": [1],
            "activation": ["softmax"]
        },
    }

    warm_start_list = [None, warm_start]
    for warm_start in warm_start_list:
        opt = HillClimbingOptimizer(search_config, 1, warm_start=warm_start)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #2
0
def test_HillClimbingOptimizer():
    from hyperactive import HillClimbingOptimizer

    opt0 = HillClimbingOptimizer(
        search_config,
        n_iter_0,
        random_state=random_state,
        verbosity=1,
        cv=cv,
        n_jobs=1,
        warm_start=warm_start,
    )
    opt0.fit(X, y)

    opt1 = HillClimbingOptimizer(
        search_config,
        n_iter_1,
        random_state=random_state,
        verbosity=1,
        cv=cv,
        n_jobs=n_jobs,
        warm_start=warm_start,
    )
    opt1.fit(X, y)

    assert opt0.score_best < opt1.score_best
Example #3
0
def test_keras():
    from hyperactive import HillClimbingOptimizer

    opt = HillClimbingOptimizer(search_config, 1)
    opt.fit(X, y)
    opt.predict(X)
    opt.score(X, y)
Example #4
0
def test_max_score_0():
    def objective_function(para):
        score = -para["x1"] * para["x1"]
        return score

    search_space = {
        "x1": np.arange(0, 100, 0.1),
    }

    max_score = -9999

    opt = HillClimbingOptimizer(
        epsilon=0.01,
        rand_rest_p=0,
    )

    hyper = Hyperactive()
    hyper.add_search(
        objective_function,
        search_space,
        optimizer=opt,
        n_iter=100000,
        initialize={"warm_start": [{
            "x1": 99
        }]},
        max_score=max_score,
    )
    hyper.run()

    print("\n Results head \n", hyper.results(objective_function).head())
    print("\n Results tail \n", hyper.results(objective_function).tail())

    print("\nN iter:", len(hyper.results(objective_function)))

    assert -100 > hyper.best_score(objective_function) > max_score
Example #5
0
def test_import_and_inits():

    from hyperactive import Hydra, Insight, Iota

    _ = Hydra()
    # _ = Insight()
    # _ = Iota()

    from hyperactive import (
        HillClimbingOptimizer,
        StochasticHillClimbingOptimizer,
        TabuOptimizer,
        RandomSearchOptimizer,
        RandomRestartHillClimbingOptimizer,
        RandomAnnealingOptimizer,
        SimulatedAnnealingOptimizer,
        StochasticTunnelingOptimizer,
        ParallelTemperingOptimizer,
        ParticleSwarmOptimizer,
        EvolutionStrategyOptimizer,
        BayesianOptimizer,
    )

    _ = HillClimbingOptimizer(search_config, 1)
    _ = StochasticHillClimbingOptimizer(search_config, 1)
    _ = TabuOptimizer(search_config, 1)
    _ = RandomSearchOptimizer(search_config, 1)
    _ = RandomRestartHillClimbingOptimizer(search_config, 1)
    _ = RandomAnnealingOptimizer(search_config, 1)
    _ = SimulatedAnnealingOptimizer(search_config, 1)
    _ = StochasticTunnelingOptimizer(search_config, 1)
    _ = ParallelTemperingOptimizer(search_config, 1)
    _ = ParticleSwarmOptimizer(search_config, 1)
    _ = EvolutionStrategyOptimizer(search_config, 1)
    _ = BayesianOptimizer(search_config, 1)
Example #6
0
def test_keras_n_jobs():
    from hyperactive import HillClimbingOptimizer

    n_jobs_list = [1, 2]
    for n_jobs in n_jobs_list:
        opt = HillClimbingOptimizer(search_config, 1, n_jobs=n_jobs)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #7
0
def test_keras_memory():
    from hyperactive import HillClimbingOptimizer

    memory_list = [False, True]
    for memory in memory_list:
        opt = HillClimbingOptimizer(search_config, 1, memory=memory)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #8
0
def test_keras_verbosity():
    from hyperactive import HillClimbingOptimizer

    verbosity_list = [0, 1]
    for verbosity in verbosity_list:
        opt = HillClimbingOptimizer(search_config, 1, verbosity=verbosity)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #9
0
def test_keras_cv():
    from hyperactive import HillClimbingOptimizer

    cv_list = [0.1, 0.5, 0.9, 2]
    for cv in cv_list:
        opt = HillClimbingOptimizer(search_config, 1, cv=cv)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #10
0
def test_keras_n_iter():
    from hyperactive import HillClimbingOptimizer

    n_iter_list = [0, 1, 2]
    for n_iter in n_iter_list:
        opt = HillClimbingOptimizer(search_config, n_iter)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #11
0
def test_keras_scatter_init():
    from hyperactive import HillClimbingOptimizer

    scatter_init_list = [False, 2]
    for scatter_init in scatter_init_list:
        opt = HillClimbingOptimizer(search_config,
                                    1,
                                    scatter_init=scatter_init)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #12
0
def test_keras_random_state():
    from hyperactive import HillClimbingOptimizer

    random_state_list = [None, 0, 1]
    for random_state in random_state_list:
        opt = HillClimbingOptimizer(search_config,
                                    1,
                                    random_state=random_state)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
Example #13
0
def test_data():
    from hyperactive import HillClimbingOptimizer

    opt0 = HillClimbingOptimizer(
        search_config, n_iter_0, random_state=random_state, verbosity=0, cv=cv, n_jobs=1
    )
    opt0.fit(X_np, y_np)

    opt1 = HillClimbingOptimizer(
        search_config, n_iter_0, random_state=random_state, verbosity=0, cv=cv, n_jobs=1
    )
    opt1.fit(X_pd, y_pd)
def test_max_score_1():
    def objective_function(optimizer):
        score = (-optimizer.suggested_params["x1"] *
                 optimizer.suggested_params["x1"])
        time.sleep(0.01)
        return score

    search_space = {
        "x1": np.arange(0, 100, 0.1),
    }

    max_score = -9999

    c_time = time.time()
    opt = HillClimbingOptimizer()

    hyper = Hyperactive()
    hyper.add_search(
        objective_function,
        search_space,
        optimizer=opt,
        n_iter=100000,
        initialize={"warm_start": [{
            "x1": 99
        }]},
        max_score=max_score,
    )
    hyper.run()

    diff_time = time.time() - c_time

    print("\n Results head \n", hyper.results(objective_function).head())
    print("\n Results tail \n", hyper.results(objective_function).tail())

    print("\nN iter:", len(hyper.results(objective_function)))

    assert diff_time < 1
Example #15
0
from sklearn.datasets import load_iris
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import cross_val_score

from hyperactive import Hyperactive, HillClimbingOptimizer

data = load_iris()
X, y = data.data, data.target


def model(opt):
    knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"])
    scores = cross_val_score(knr, X, y, cv=5)
    score = scores.mean()

    return score


search_space = {
    "n_neighbors": list(range(1, 100)),
}

optimizer = HillClimbingOptimizer(epsilon=0.1,
                                  distribution="laplace",
                                  n_neighbours=4,
                                  rand_rest_p=0.1)

hyper = Hyperactive()
hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
hyper.run()
Example #16
0
def test_HillClimbingOptimizer_args():
    from hyperactive import HillClimbingOptimizer

    opt = HillClimbingOptimizer(search_config, 3, eps=2)
    opt.fit(X, y)