def test_lightgbm_classification(): from hyperactive import RandomSearchOptimizer ml_scores = [ "accuracy_score", "balanced_accuracy_score", "average_precision_score", "brier_score_loss", "f1_score", "log_loss", "precision_score", "recall_score", "jaccard_score", "roc_auc_score", ] for score in ml_scores: opt = RandomSearchOptimizer(search_config, 1, metric=score) assert opt._config_.metric == score opt.fit(X, y) assert opt._config_.metric == score opt.predict(X) assert opt._config_.metric == score opt.score(X, y) assert opt._config_.metric == score
def test_keras_losses(): from hyperactive import RandomSearchOptimizer ml_losses = [ "mean_squared_error", "mean_absolute_error", "mean_absolute_percentage_error", "mean_squared_logarithmic_error", "squared_hinge", "hinge", # "categorical_hinge", "logcosh", "categorical_crossentropy", # "sparse_categorical_crossentropy", "binary_crossentropy", "kullback_leibler_divergence", "poisson", "cosine_proximity", ] for loss in ml_losses: opt = RandomSearchOptimizer(search_config, 1, metric=loss) assert opt._config_.metric == loss opt.fit(X, y) assert opt._config_.metric == loss opt.predict(X) assert opt._config_.metric == loss opt.score(X, y) assert opt._config_.metric == loss
def test_RandomSearchOptimizer(): from hyperactive import RandomSearchOptimizer opt0 = RandomSearchOptimizer( search_config, n_iter_0, random_state=random_state, verbosity=0, cv=cv, n_jobs=1, warm_start=warm_start, ) opt0.fit(X, y) opt1 = RandomSearchOptimizer( search_config, n_iter_1, random_state=random_state, verbosity=0, cv=cv, n_jobs=n_jobs, warm_start=warm_start, ) opt1.fit(X, y) assert opt0.score_best < opt1.score_best
def test_lightgbm(): from hyperactive import RandomSearchOptimizer opt = RandomSearchOptimizer(search_config, 1) opt.fit(X, y) opt.predict(X) opt.score(X, y)
def test_warm_start(): from hyperactive import RandomSearchOptimizer opt = RandomSearchOptimizer(search_config, 1, n_jobs=1, warm_start=warm_start) opt.fit(X, y)
def test_verbosity(): from hyperactive import RandomSearchOptimizer opt0 = RandomSearchOptimizer(search_config, 1, verbosity=0) opt0.fit(X, y) opt1 = RandomSearchOptimizer(search_config, 1, verbosity=1) opt1.fit(X, y)
def test_all_methods(): from hyperactive import RandomSearchOptimizer Optimizer = RandomSearchOptimizer(search_config, n_iter=10, verbosity=0) Optimizer.fit(X_train, y_train) Optimizer.predict(X_test) Optimizer.score(X_test, y_test) Optimizer.export("test")
def test_memory(): from hyperactive import RandomSearchOptimizer opt0 = RandomSearchOptimizer(search_config, 1, memory=True) opt0.fit(X, y) opt1 = RandomSearchOptimizer(search_config, 1, memory=False) opt1.fit(X, y)
def test_lightgbm_random_state(): from hyperactive import RandomSearchOptimizer random_state_list = [None, 0, 1, 2] for random_state in random_state_list: opt = RandomSearchOptimizer(search_config, 1, random_state=random_state) opt.fit(X, y) opt.predict(X) opt.score(X, y)
def test_lightgbm_memory(): from hyperactive import RandomSearchOptimizer memory_list = [False, True] for memory in memory_list: opt = RandomSearchOptimizer(search_config, 1, memory=memory) opt.fit(X, y) opt.predict(X) opt.score(X, y)
def test_lightgbm_scatter_init(): from hyperactive import RandomSearchOptimizer scatter_init_list = [False, 2, 3, 4] for scatter_init in scatter_init_list: opt = RandomSearchOptimizer(search_config, 1, scatter_init=scatter_init) opt.fit(X, y) opt.predict(X) opt.score(X, y)
def test_lightgbm_n_iter(): from hyperactive import RandomSearchOptimizer n_iter_list = [0, 1, 3, 10] for n_iter in n_iter_list: opt = RandomSearchOptimizer(search_config, n_iter) opt.fit(X, y) opt.predict(X) opt.score(X, y)
def test_lightgbm_cv(): from hyperactive import RandomSearchOptimizer cv_list = [0.1, 0.5, 0.9, 2, 4] for cv in cv_list: opt = RandomSearchOptimizer(search_config, 1, cv=cv) opt.fit(X, y) opt.predict(X) opt.score(X, y)
def test_lightgbm_verbosity(): from hyperactive import RandomSearchOptimizer verbosity_list = [0, 1, 2] for verbosity in verbosity_list: opt = RandomSearchOptimizer(search_config, 1, verbosity=verbosity) opt.fit(X, y) opt.predict(X) opt.score(X, y)
def test_xgboost_warm_start(): from hyperactive import RandomSearchOptimizer warm_start = {"sklearn.tree.DecisionTreeClassifier": {"max_depth": [1]}} warm_start_list = [None, warm_start] for warm_start in warm_start_list: opt = RandomSearchOptimizer(search_config, 1, warm_start=warm_start) opt.fit(X, y) opt.predict(X) opt.score(X, y)
def test_sklearn_n_iter(): from hyperactive import RandomSearchOptimizer n_iter_list = [0, 1, 3, 10] for n_iter in n_iter_list: opt = RandomSearchOptimizer(search_config, n_iter) assert opt._config_.n_iter == n_iter opt.fit(X, y) assert opt._config_.n_iter == n_iter opt.predict(X) assert opt._config_.n_iter == n_iter opt.score(X, y) assert opt._config_.n_iter == n_iter
def test_sklearn_cv(): from hyperactive import RandomSearchOptimizer cv_list = [0.1, 0.5, 0.9, 2, 4] for cv in cv_list: opt = RandomSearchOptimizer(search_config, 1, cv=cv) assert opt._config_.cv == cv opt.fit(X, y) assert opt._config_.cv == cv opt.predict(X) assert opt._config_.cv == cv opt.score(X, y) assert opt._config_.cv == cv
def test_sklearn_memory(): from hyperactive import RandomSearchOptimizer memory_list = [False, True] for memory in memory_list: opt = RandomSearchOptimizer(search_config, 1, memory=memory) assert opt._config_.memory == memory opt.fit(X, y) assert opt._config_.memory == memory opt.predict(X) assert opt._config_.memory == memory opt.score(X, y) assert opt._config_.memory == memory
def test_scatter_init_and_warm_start(): from hyperactive import RandomSearchOptimizer opt = RandomSearchOptimizer(search_config, 1, warm_start=warm_start, scatter_init=10) opt.fit(X, y) opt = RandomSearchOptimizer(search_config, 2, warm_start=warm_start, scatter_init=10) opt.fit(X, y)
def test_sklearn_random_state(): from hyperactive import RandomSearchOptimizer random_state_list = [None, 0, 1, 2] for random_state in random_state_list: opt = RandomSearchOptimizer(search_config, 1, random_state=random_state) assert opt._config_.random_state == random_state opt.fit(X, y) assert opt._config_.random_state == random_state opt.predict(X) assert opt._config_.random_state == random_state opt.score(X, y) assert opt._config_.random_state == random_state
def test_sklearn_scatter_init(): from hyperactive import RandomSearchOptimizer scatter_init_list = [False, 2, 3, 4] for scatter_init in scatter_init_list: opt = RandomSearchOptimizer(search_config, 1, scatter_init=scatter_init) assert opt._config_.scatter_init == scatter_init opt.fit(X, y) assert opt._config_.scatter_init == scatter_init opt.predict(X) assert opt._config_.scatter_init == scatter_init opt.score(X, y) assert opt._config_.scatter_init == scatter_init
def test_lightgbm_warm_start(): from hyperactive import RandomSearchOptimizer warm_start = { "lightgbm.LGBMClassifier": { "boosting_type": ["gbdt"], "num_leaves": [5], "learning_rate": [0.05], } } warm_start_list = [None, warm_start] for warm_start in warm_start_list: opt = RandomSearchOptimizer(search_config, 1, warm_start=warm_start) opt.fit(X, y) opt.predict(X) opt.score(X, y)
def test_catboost_warm_start(): from hyperactive import RandomSearchOptimizer warm_start = { "catboost.CatBoostClassifier": { "iterations": [3], "learning_rate": [1], "depth": [3], "verbose": [0], } } warm_start_list = [None, warm_start] for warm_start in warm_start_list: opt = RandomSearchOptimizer(search_config, 1, warm_start=warm_start) opt.fit(X, y) opt.predict(X) opt.score(X, y)
def test_keras_scores(): from hyperactive import RandomSearchOptimizer ml_scores = [ "accuracy", "binary_accuracy", "categorical_accuracy", "sparse_categorical_accuracy", "top_k_categorical_accuracy", "sparse_top_k_categorical_accuracy", ] for score in ml_scores: opt = RandomSearchOptimizer(search_config, 1, metric=score) assert opt._config_.metric == score opt.fit(X, y) assert opt._config_.metric == score opt.predict(X) assert opt._config_.metric == score opt.score(X, y) assert opt._config_.metric == score
def test_lightgbm_regression(): from hyperactive import RandomSearchOptimizer ml_losses = [ "explained_variance_score", "max_error", "mean_absolute_error", "mean_squared_error", "mean_squared_log_error", "median_absolute_error", "r2_score", ] for loss in ml_losses: opt = RandomSearchOptimizer(search_config, 1, metric=loss) assert opt._config_.metric == loss opt.fit(X, y) assert opt._config_.metric == loss opt.predict(X) assert opt._config_.metric == loss opt.score(X, y) assert opt._config_.metric == loss
def test_random_state(): from hyperactive import RandomSearchOptimizer opt0 = RandomSearchOptimizer(search_config, 1, random_state=False) opt0.fit(X, y) opt1 = RandomSearchOptimizer(search_config, 1, random_state=0) opt1.fit(X, y) opt2 = RandomSearchOptimizer(search_config, 1, random_state=1) opt2.fit(X, y)
def test_positional_args(): from hyperactive import RandomSearchOptimizer opt0 = RandomSearchOptimizer(search_config, 1, random_state=False) opt0.fit(X, y) opt1 = RandomSearchOptimizer(search_config, n_iter=1, random_state=1) opt1.fit(X, y) opt2 = RandomSearchOptimizer(search_config=search_config, n_iter=1, random_state=1) opt2.fit(X, y)
cancer_data = load_breast_cancer() X = cancer_data.data y = cancer_data.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20) # this defines the model and hyperparameter search space search_config = { "lightgbm.LGBMClassifier": { "boosting_type": ["gbdt"], "num_leaves": range(2, 20), "learning_rate": np.arange(0.01, 0.1, 0.01), "feature_fraction": np.arange(0.1, 0.95, 0.1), "bagging_fraction": np.arange(0.1, 0.95, 0.1), "bagging_freq": range(2, 10, 1), } } opt = RandomSearchOptimizer(search_config, n_iter=10, n_jobs=4, cv=3) # search best hyperparameter for given data opt.fit(X, y) # predict from test data prediction = opt.predict(X_test) # calculate score score = opt.score(X_test, y_test) print("\ntest score of best model:", score)
from sklearn.datasets import load_iris from hyperactive import RandomSearchOptimizer iris_data = load_iris() <<<<<<< HEAD X, y = iris_data.data, iris_data.target ======= X = iris_data.data y = iris_data.target >>>>>>> dbd0b511032907d8ce3be0ca13570fb6c3f0fa6e search_config = { "sklearn.ensemble.RandomForestClassifier": {"n_estimators": range(10, 100, 10)} } <<<<<<< HEAD opt = RandomSearchOptimizer(search_config, n_iter=10) opt.fit(X, y) ======= Optimizer = RandomSearchOptimizer(search_config, n_iter=10, verbosity=0) Optimizer.fit(X, y) >>>>>>> dbd0b511032907d8ce3be0ca13570fb6c3f0fa6e
"kernel_size": [3], "activation": ["relu"], }, "keras.layers.MaxPooling2D.4": { "pool_size": [(2, 2)] }, "keras.layers.Flatten.5": {}, "keras.layers.Dense.6": { "units": range(10, 200, 10), "activation": ["softmax"] }, "keras.layers.Dropout.7": { "rate": np.arange(0.2, 0.8, 0.1) }, "keras.layers.Dense.8": { "units": [10], "activation": ["softmax"] }, } Optimizer = RandomSearchOptimizer(search_config, n_iter=10, metric="accuracy") # search best hyperparameter for given data Optimizer.fit(X_train, y_train) # predict from test data prediction = Optimizer.predict(X_test) # calculate score score = Optimizer.score(X_test, y_test)