def test_gridsearch_metrics_threads(n_threads=3): X, y, sample_weight = generate_classification_data(n_classes=2, distance=0.7) param_grid = OrderedDict({'reg_param': numpy.linspace(0, 1, 20)}) from itertools import cycle optimizers = cycle([ RegressionParameterOptimizer(param_grid=param_grid, n_evaluations=4, start_evaluations=2), SubgridParameterOptimizer(param_grid=param_grid, n_evaluations=4), RandomParameterOptimizer(param_grid=param_grid, n_evaluations=4), ]) for metric in [RocAuc(), OptimalAMS(), OptimalSignificance(), log_loss]: scorer = FoldingScorer(metric) clf = SklearnClassifier(QDA()) grid = GridOptimalSearchCV( estimator=clf, params_generator=next(optimizers), scorer=scorer, parallel_profile='threads-{}'.format(n_threads)) grid.fit(X, y) print(grid.params_generator.best_score_) print(grid.params_generator.best_params_) grid.params_generator.print_results()
def test_gridsearch_threads(n_threads=3): scorer = FoldingScorer(numpy.random.choice([OptimalAMS(), RocAuc()])) grid_param = OrderedDict({"n_estimators": [10, 20], "learning_rate": [0.1, 0.05], 'features': [['column0', 'column1'], ['column0', 'column1', 'column2']]}) generator = RegressionParameterOptimizer(grid_param, n_evaluations=4) base = SklearnClassifier(clf=AdaBoostClassifier()) grid = GridOptimalSearchCV(base, generator, scorer, parallel_profile='threads-{}'.format(n_threads)) X, y, sample_weight = generate_classification_data() grid.fit(X, y, sample_weight=sample_weight)
def test_gridsearch_threads(n_threads=3): scorer = FoldingScorer(numpy.random.choice([OptimalAMS(), RocAuc()])) grid_param = OrderedDict({ "n_estimators": [10, 20], "learning_rate": [0.1, 0.05], 'features': [['column0', 'column1'], ['column0', 'column1', 'column2']] }) generator = RegressionParameterOptimizer(grid_param, n_evaluations=4) base = SklearnClassifier(clf=AdaBoostClassifier()) grid = GridOptimalSearchCV(base, generator, scorer, parallel_profile='threads-{}'.format(n_threads)) X, y, sample_weight = generate_classification_data() grid.fit(X, y, sample_weight=sample_weight)
def test_gridsearch_metrics_threads(n_threads=3): X, y, sample_weight = generate_classification_data(n_classes=2, distance=0.7) param_grid = OrderedDict({ 'reg_param': numpy.linspace(0, 1, 20) }) from itertools import cycle optimizers = cycle([ RegressionParameterOptimizer(param_grid=param_grid, n_evaluations=4, start_evaluations=2), SubgridParameterOptimizer(param_grid=param_grid, n_evaluations=4), RandomParameterOptimizer(param_grid=param_grid, n_evaluations=4), ]) for metric in [RocAuc(), OptimalAMS(), OptimalSignificance(), log_loss]: scorer = FoldingScorer(metric) clf = SklearnClassifier(QuadraticDiscriminantAnalysis()) grid = GridOptimalSearchCV(estimator=clf, params_generator=next(optimizers), scorer=scorer, parallel_profile='threads-{}'.format(n_threads)) grid.fit(X, y) print(grid.params_generator.best_score_) print(grid.params_generator.best_params_) grid.params_generator.print_results()
print "Downloaded magic04.data" data = pandas.read_csv('toy_datasets/magic04.data', names=columns) labels = numpy.array(data['g'] == 'g', dtype=int) data = data.drop('g', axis=1) import numpy import numexpr import pandas from rep import utils from sklearn.ensemble import GradientBoostingClassifier from rep.report.metrics import RocAuc from rep.metaml import GridOptimalSearchCV, FoldingScorer, RandomParameterOptimizer from rep.estimators import SklearnClassifier, TMVAClassifier, XGBoostRegressor # define grid parameters grid_param = {} grid_param['learning_rate'] = [0.2, 0.1, 0.05, 0.02, 0.01] grid_param['max_depth'] = [2, 3, 4, 5] # use random hyperparameter optimization algorithm generator = RandomParameterOptimizer(grid_param) # define folding scorer scorer = FoldingScorer(RocAuc(), folds=3, fold_checks=3) estimator = SklearnClassifier(GradientBoostingClassifier(n_estimators=30)) #grid_finder = GridOptimalSearchCV(estimator, generator, scorer) #% time grid_finder.fit(data, labels) grid_finder = GridOptimalSearchCV(estimator, generator, scorer, parallel_profile="default") print "start grid search" grid_finder.fit(data, labels) grid_finder.params_generator.print_results() assert 10 == grid_finder.params_generator.n_evaluations, "oops"
data = data.drop('g', axis=1) import numpy import numexpr import pandas from rep import utils from sklearn.ensemble import GradientBoostingClassifier from rep.report.metrics import RocAuc from rep.metaml import GridOptimalSearchCV, FoldingScorer, RandomParameterOptimizer from rep.estimators import SklearnClassifier, TMVAClassifier, XGBoostRegressor # define grid parameters grid_param = {} grid_param['learning_rate'] = [0.2, 0.1, 0.05, 0.02, 0.01] grid_param['max_depth'] = [2, 3, 4, 5] # use random hyperparameter optimization algorithm generator = RandomParameterOptimizer(grid_param) # define folding scorer scorer = FoldingScorer(RocAuc(), folds=3, fold_checks=3) estimator = SklearnClassifier(GradientBoostingClassifier(n_estimators=30)) #grid_finder = GridOptimalSearchCV(estimator, generator, scorer) #% time grid_finder.fit(data, labels) grid_finder = GridOptimalSearchCV(estimator, generator, scorer, parallel_profile="default") print "start grid search" grid_finder.fit(data, labels) grid_finder.params_generator.print_results() assert 10 == grid_finder.params_generator.n_evaluations, "oops"