コード例 #1
0
def test_lightgbm():
    from hyperactive import RandomSearchOptimizer

    opt = RandomSearchOptimizer(search_config, 1)
    opt.fit(X, y)
    opt.predict(X)
    opt.score(X, y)
コード例 #2
0
def test_keras_losses():
    from hyperactive import RandomSearchOptimizer

    ml_losses = [
        "mean_squared_error",
        "mean_absolute_error",
        "mean_absolute_percentage_error",
        "mean_squared_logarithmic_error",
        "squared_hinge",
        "hinge",
        # "categorical_hinge",
        "logcosh",
        "categorical_crossentropy",
        # "sparse_categorical_crossentropy",
        "binary_crossentropy",
        "kullback_leibler_divergence",
        "poisson",
        "cosine_proximity",
    ]

    for loss in ml_losses:
        opt = RandomSearchOptimizer(search_config, 1, metric=loss)
        assert opt._config_.metric == loss
        opt.fit(X, y)
        assert opt._config_.metric == loss
        opt.predict(X)
        assert opt._config_.metric == loss
        opt.score(X, y)
        assert opt._config_.metric == loss
コード例 #3
0
def test_lightgbm_classification():
    from hyperactive import RandomSearchOptimizer

    ml_scores = [
        "accuracy_score",
        "balanced_accuracy_score",
        "average_precision_score",
        "brier_score_loss",
        "f1_score",
        "log_loss",
        "precision_score",
        "recall_score",
        "jaccard_score",
        "roc_auc_score",
    ]

    for score in ml_scores:
        opt = RandomSearchOptimizer(search_config, 1, metric=score)
        assert opt._config_.metric == score
        opt.fit(X, y)
        assert opt._config_.metric == score
        opt.predict(X)
        assert opt._config_.metric == score
        opt.score(X, y)
        assert opt._config_.metric == score
コード例 #4
0
def test_all_methods():
    from hyperactive import RandomSearchOptimizer

    Optimizer = RandomSearchOptimizer(search_config, n_iter=10, verbosity=0)
    Optimizer.fit(X_train, y_train)
    Optimizer.predict(X_test)
    Optimizer.score(X_test, y_test)
    Optimizer.export("test")
コード例 #5
0
def test_lightgbm_verbosity():
    from hyperactive import RandomSearchOptimizer

    verbosity_list = [0, 1, 2]
    for verbosity in verbosity_list:
        opt = RandomSearchOptimizer(search_config, 1, verbosity=verbosity)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
コード例 #6
0
def test_lightgbm_random_state():
    from hyperactive import RandomSearchOptimizer

    random_state_list = [None, 0, 1, 2]
    for random_state in random_state_list:
        opt = RandomSearchOptimizer(search_config, 1, random_state=random_state)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
コード例 #7
0
def test_lightgbm_memory():
    from hyperactive import RandomSearchOptimizer

    memory_list = [False, True]
    for memory in memory_list:
        opt = RandomSearchOptimizer(search_config, 1, memory=memory)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
コード例 #8
0
def test_lightgbm_scatter_init():
    from hyperactive import RandomSearchOptimizer

    scatter_init_list = [False, 2, 3, 4]
    for scatter_init in scatter_init_list:
        opt = RandomSearchOptimizer(search_config, 1, scatter_init=scatter_init)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
コード例 #9
0
def test_lightgbm_n_iter():
    from hyperactive import RandomSearchOptimizer

    n_iter_list = [0, 1, 3, 10]
    for n_iter in n_iter_list:
        opt = RandomSearchOptimizer(search_config, n_iter)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
コード例 #10
0
def test_lightgbm_cv():
    from hyperactive import RandomSearchOptimizer

    cv_list = [0.1, 0.5, 0.9, 2, 4]
    for cv in cv_list:
        opt = RandomSearchOptimizer(search_config, 1, cv=cv)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
コード例 #11
0
def test_xgboost_warm_start():
    from hyperactive import RandomSearchOptimizer

    warm_start = {"sklearn.tree.DecisionTreeClassifier": {"max_depth": [1]}}

    warm_start_list = [None, warm_start]
    for warm_start in warm_start_list:
        opt = RandomSearchOptimizer(search_config, 1, warm_start=warm_start)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
コード例 #12
0
def test_sklearn_cv():
    from hyperactive import RandomSearchOptimizer

    cv_list = [0.1, 0.5, 0.9, 2, 4]
    for cv in cv_list:
        opt = RandomSearchOptimizer(search_config, 1, cv=cv)
        assert opt._config_.cv == cv
        opt.fit(X, y)
        assert opt._config_.cv == cv
        opt.predict(X)
        assert opt._config_.cv == cv
        opt.score(X, y)
        assert opt._config_.cv == cv
コード例 #13
0
def test_sklearn_n_iter():
    from hyperactive import RandomSearchOptimizer

    n_iter_list = [0, 1, 3, 10]
    for n_iter in n_iter_list:
        opt = RandomSearchOptimizer(search_config, n_iter)
        assert opt._config_.n_iter == n_iter
        opt.fit(X, y)
        assert opt._config_.n_iter == n_iter
        opt.predict(X)
        assert opt._config_.n_iter == n_iter
        opt.score(X, y)
        assert opt._config_.n_iter == n_iter
コード例 #14
0
def test_sklearn_memory():
    from hyperactive import RandomSearchOptimizer

    memory_list = [False, True]
    for memory in memory_list:
        opt = RandomSearchOptimizer(search_config, 1, memory=memory)
        assert opt._config_.memory == memory
        opt.fit(X, y)
        assert opt._config_.memory == memory
        opt.predict(X)
        assert opt._config_.memory == memory
        opt.score(X, y)
        assert opt._config_.memory == memory
コード例 #15
0
def test_sklearn_random_state():
    from hyperactive import RandomSearchOptimizer

    random_state_list = [None, 0, 1, 2]
    for random_state in random_state_list:
        opt = RandomSearchOptimizer(search_config,
                                    1,
                                    random_state=random_state)
        assert opt._config_.random_state == random_state
        opt.fit(X, y)
        assert opt._config_.random_state == random_state
        opt.predict(X)
        assert opt._config_.random_state == random_state
        opt.score(X, y)
        assert opt._config_.random_state == random_state
コード例 #16
0
def test_sklearn_scatter_init():
    from hyperactive import RandomSearchOptimizer

    scatter_init_list = [False, 2, 3, 4]
    for scatter_init in scatter_init_list:
        opt = RandomSearchOptimizer(search_config,
                                    1,
                                    scatter_init=scatter_init)
        assert opt._config_.scatter_init == scatter_init
        opt.fit(X, y)
        assert opt._config_.scatter_init == scatter_init
        opt.predict(X)
        assert opt._config_.scatter_init == scatter_init
        opt.score(X, y)
        assert opt._config_.scatter_init == scatter_init
コード例 #17
0
def test_lightgbm_warm_start():
    from hyperactive import RandomSearchOptimizer

    warm_start = {
        "lightgbm.LGBMClassifier": {
            "boosting_type": ["gbdt"],
            "num_leaves": [5],
            "learning_rate": [0.05],
        }
    }

    warm_start_list = [None, warm_start]
    for warm_start in warm_start_list:
        opt = RandomSearchOptimizer(search_config, 1, warm_start=warm_start)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
コード例 #18
0
def test_catboost_warm_start():
    from hyperactive import RandomSearchOptimizer

    warm_start = {
        "catboost.CatBoostClassifier": {
            "iterations": [3],
            "learning_rate": [1],
            "depth": [3],
            "verbose": [0],
        }
    }

    warm_start_list = [None, warm_start]
    for warm_start in warm_start_list:
        opt = RandomSearchOptimizer(search_config, 1, warm_start=warm_start)
        opt.fit(X, y)
        opt.predict(X)
        opt.score(X, y)
コード例 #19
0
def test_keras_scores():
    from hyperactive import RandomSearchOptimizer

    ml_scores = [
        "accuracy",
        "binary_accuracy",
        "categorical_accuracy",
        "sparse_categorical_accuracy",
        "top_k_categorical_accuracy",
        "sparse_top_k_categorical_accuracy",
    ]

    for score in ml_scores:
        opt = RandomSearchOptimizer(search_config, 1, metric=score)
        assert opt._config_.metric == score
        opt.fit(X, y)
        assert opt._config_.metric == score
        opt.predict(X)
        assert opt._config_.metric == score
        opt.score(X, y)
        assert opt._config_.metric == score
コード例 #20
0
def test_lightgbm_regression():
    from hyperactive import RandomSearchOptimizer

    ml_losses = [
        "explained_variance_score",
        "max_error",
        "mean_absolute_error",
        "mean_squared_error",
        "mean_squared_log_error",
        "median_absolute_error",
        "r2_score",
    ]

    for loss in ml_losses:
        opt = RandomSearchOptimizer(search_config, 1, metric=loss)
        assert opt._config_.metric == loss
        opt.fit(X, y)
        assert opt._config_.metric == loss
        opt.predict(X)
        assert opt._config_.metric == loss
        opt.score(X, y)
        assert opt._config_.metric == loss
コード例 #21
0
ファイル: lightgbm_.py プロジェクト: weihong1021/LDWPSO-CNN
cancer_data = load_breast_cancer()
X = cancer_data.data
y = cancer_data.target

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20)

# this defines the model and hyperparameter search space
search_config = {
    "lightgbm.LGBMClassifier": {
        "boosting_type": ["gbdt"],
        "num_leaves": range(2, 20),
        "learning_rate": np.arange(0.01, 0.1, 0.01),
        "feature_fraction": np.arange(0.1, 0.95, 0.1),
        "bagging_fraction": np.arange(0.1, 0.95, 0.1),
        "bagging_freq": range(2, 10, 1),
    }
}

opt = RandomSearchOptimizer(search_config, n_iter=10, n_jobs=4, cv=3)

# search best hyperparameter for given data
opt.fit(X, y)

# predict from test data
prediction = opt.predict(X_test)

# calculate score
score = opt.score(X_test, y_test)

print("\ntest score of best model:", score)
コード例 #22
0
        "kernel_size": [3],
        "activation": ["relu"],
    },
    "keras.layers.MaxPooling2D.4": {
        "pool_size": [(2, 2)]
    },
    "keras.layers.Flatten.5": {},
    "keras.layers.Dense.6": {
        "units": range(10, 200, 10),
        "activation": ["softmax"]
    },
    "keras.layers.Dropout.7": {
        "rate": np.arange(0.2, 0.8, 0.1)
    },
    "keras.layers.Dense.8": {
        "units": [10],
        "activation": ["softmax"]
    },
}

Optimizer = RandomSearchOptimizer(search_config, n_iter=10, metric="accuracy")

# search best hyperparameter for given data
Optimizer.fit(X_train, y_train)

# predict from test data
prediction = Optimizer.predict(X_test)

# calculate score
score = Optimizer.score(X_test, y_test)