Esempio n. 1
0
def test_ParticleSwarmOptimizer():
    from hyperactive import ParticleSwarmOptimizer

    opt0 = ParticleSwarmOptimizer(
        search_config,
        n_iter_0,
        random_state=random_state,
        verbosity=0,
        cv=cv,
        n_jobs=1,
        warm_start=warm_start,
    )
    opt0.fit(X, y)

    opt1 = ParticleSwarmOptimizer(
        search_config,
        n_iter_1,
        random_state=random_state,
        verbosity=0,
        cv=cv,
        n_jobs=n_jobs,
        warm_start=warm_start,
    )
    opt1.fit(X, y)

    assert opt0.score_best < opt1.score_best
Esempio n. 2
0
def test_import_and_inits():

    from hyperactive import Hydra, Insight, Iota

    _ = Hydra()
    # _ = Insight()
    # _ = Iota()

    from hyperactive import (
        HillClimbingOptimizer,
        StochasticHillClimbingOptimizer,
        TabuOptimizer,
        RandomSearchOptimizer,
        RandomRestartHillClimbingOptimizer,
        RandomAnnealingOptimizer,
        SimulatedAnnealingOptimizer,
        StochasticTunnelingOptimizer,
        ParallelTemperingOptimizer,
        ParticleSwarmOptimizer,
        EvolutionStrategyOptimizer,
        BayesianOptimizer,
    )

    _ = HillClimbingOptimizer(search_config, 1)
    _ = StochasticHillClimbingOptimizer(search_config, 1)
    _ = TabuOptimizer(search_config, 1)
    _ = RandomSearchOptimizer(search_config, 1)
    _ = RandomRestartHillClimbingOptimizer(search_config, 1)
    _ = RandomAnnealingOptimizer(search_config, 1)
    _ = SimulatedAnnealingOptimizer(search_config, 1)
    _ = StochasticTunnelingOptimizer(search_config, 1)
    _ = ParallelTemperingOptimizer(search_config, 1)
    _ = ParticleSwarmOptimizer(search_config, 1)
    _ = EvolutionStrategyOptimizer(search_config, 1)
    _ = BayesianOptimizer(search_config, 1)
Esempio n. 3
0
    nn.add(Dense(opt["layer.0"]))
    nn.add(Activation("relu"))
    nn.add(Dropout(0.5))
    nn.add(Dense(10))
    nn.add(Activation("softmax"))

    nn.compile(optimizer="adam",
               loss="categorical_crossentropy",
               metrics=["accuracy"])
    nn.fit(X_train, y_train, epochs=10, batch_size=128)

    _, score = nn.evaluate(x=X_test, y=y_test)

    return score


search_space = {
    "filter.0": [16, 32, 64, 128],
    "layer.0": list(range(100, 1000, 100)),
}

opt_pso = ParticleSwarmOptimizer(
    inertia=0.4,
    cognitive_weight=0.7,
    social_weight=0.7,
    temp_weight=0.3,
    rand_rest_p=0.05,
)
hyper = Hyperactive()
hyper.add_search(cnn, search_space, optimizer=opt_pso, n_iter=10)
hyper.run()
Esempio n. 4
0
    # CNN model
    search_config = {
        "keras.compile.0": {"loss": ["categorical_crossentropy"], "optimizer": ["adam", "SGD"]},
        "keras.fit.0": {"epochs": [25], "batch_size": range(10, 51), "verbose": [1]},
        "keras.layers.Conv2D.1": {
            "filters": range(4, 101),
            "kernel_size": [2, 3, 4, 5, 6],
            "activation": ["relu"],
            "input_shape": [(77, 77, 3)],
        },
        "keras.layers.MaxPooling2D.2": {"pool_size": [(2, 2)]},
        "keras.layers.Conv2D.3": {
            "filters": range(4, 101),
            "kernel_size": [2, 3, 4, 5, 6],
            "activation": ["relu"],
        },
        "keras.layers.MaxPooling2D.4": {"pool_size": [(2, 2)]},
        "keras.layers.Flatten.5": {},
        #"keras.layers.Dense.6": {"units": range(512, 4097, 512), "activation": ["relu"]},
        "keras.layers.Dense.6": {"units": range(4, 1025), "activation": ["relu"]},
        "keras.layers.Dropout.7": {"rate": np.arange(0.1, 1.0, 0.1)},
        "keras.layers.Dense.8": {"units": [2], "activation": ["softmax"]},
    }

    start = time.time()
    #Optimizer = RandomSearchOptimizer(search_config, n_iter=1, cv=5, metric="accuracy")
    Optimizer = ParticleSwarmOptimizer(search_config, n_iter=30, cv=0.8, metric="accuracy", n_part=10, w=0.9, c_k=2.0, c_s=2.0)
    Optimizer.fit(X_train, y_train)
    end = time.time()

    print("time: {}".format(end-start) + "[sec]")
Esempio n. 5
0
from sklearn.datasets import load_iris
from hyperactive import ParticleSwarmOptimizer

iris_data = load_iris()
X = iris_data.data
y = iris_data.target

# this defines the model and hyperparameter search space
search_config = {
    "sklearn.linear_model.LogisticRegression": {
        "penalty": ["l1", "l2"],
        "C": [1e-4, 1e-3, 1e-2, 1e-1, 0.5, 1.0, 5.0, 10.0, 15.0, 20.0, 25.0],
        "dual": [False],
        "solver": ["liblinear"],
        "multi_class": ["auto", "ovr"],
        "max_iter": range(300, 1000, 10),
    }
}

opt = ParticleSwarmOptimizer(search_config, n_iter=100, n_jobs=2, cv=5)

# search best hyperparameter for given data
opt.fit(X, y)
        "n_estimators": [30],
        "max_depth": [6],
        "criterion": ["entropy"],
        "min_samples_split": [12],
        "min_samples_leaf": [16],
    }
}

#Optimizer = SimulatedAnnealingOptimizer(search_config, n_iter=100, n_jobs=4)
Optimizer = ParticleSwarmOptimizer(search_config, 
                                    n_iter=10,  # number of iterations to perform
                                    metric="accuracy", 
                                    n_jobs=1, 
                                    cv=3, 
                                    verbosity=1, 
                                    random_state=None, 
                                    warm_start=start_point,  # Hyperparameter configuration to start from
                                    memory=True,  # Stores explored evaluations in a dictionary to save computing time
                                    scatter_init=False,  # Chooses better initial position by training on multiple random positions with smaller training dataset 
                                    n_part=10,  # number of particles
                                    w=0.5,  # interia factor
                                    c_k=0.5,  # cognitive factor
                                    c_s=0.9)  # social factor

# search best hyperparameter for given data
t1 = time.time()
Optimizer.fit(X_train, y_train)
t2 = time.time()
print("time: {}".format(t2-t1))

# predict from test data
prediction = Optimizer.predict(X_test)