def test_pybrain_SoftMax_Tanh(): check_classifier(PyBrainClassifier(epochs=10, layers=[5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer'], use_rprop=True), **classifier_params) check_regression( PyBrainRegressor(epochs=2, layers=[10, 5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer', 'TanhLayer']), **regressor_params)
def test_theanets_regression(): check_regression(TheanetsRegressor(layers=[3], trainers=[dict(algo='rmsprop', **impatient)]), **regressor_params) check_regression(TheanetsRegressor(scaler=StandardScaler(), trainers=[dict(algo='rmsprop', **impatient)]), **regressor_params)
def test_sklearn_regression(): # supports weights check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50))) # doesn't support weights check_regression( SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)), supports_weight=False)
def test_tmva(): # check classifier check_classifier(TMVAClassifier(), check_instance=True, has_staged_pp=False, has_importances=False) cl = TMVAClassifier(method='kSVM', Gamma=0.25, Tol=0.001, sigmoid_function='identity') check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False) cl = TMVAClassifier(method='kCuts', FitMethod='GA', EffMethod='EffSel', sigmoid_function='sig_eff=0.9') check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False) # check regressor, need to run twice to check for memory leak. for i in range(2): check_regression(TMVARegressor(), check_instance=True, has_staged_predictions=False, has_importances=False)
def test_models(): for _ in range(3): clf = CacheClassifier('clf', SGDClassifier(loss='log')) check_classifier(clf, has_staged_pp=False, has_importances=False) reg = CacheRegressor('reg', SGDRegressor()) check_regression(reg, has_staged_predictions=False, has_importances=False) cache_helper.clear_cache()
def test_theanets_multi_regression(): check_regression(TheanetsRegressor(layers=[13], trainers=[{ 'algo': 'rmsprop', 'min_improvement': 0.1 }]), n_targets=3, **regressor_params)
def test_theanets_regression(): check_regression( TheanetsRegressor(layers=[3], trainers=[{ 'algo': 'rmsprop', 'learning_rate': 0.1 }]), **regressor_params) check_regression(TheanetsRegressor(scaler=StandardScaler()), **regressor_params)
def test_theanets_regression(): check_regression( TheanetsRegressor(layers=[20], trainers=[{ 'optimize': 'rmsprop', 'min_improvement': 0.1 }]), **regressor_params) check_regression(TheanetsRegressor(scaler=StandardScaler()), **regressor_params)
def test_theanets_regression(): check_regression( TheanetsRegressor(layers=[3], trainers=[dict(algo='rmsprop', **impatient)]), **regressor_params) check_regression( TheanetsRegressor(scaler=StandardScaler(), trainers=[dict(algo='rmsprop', **impatient)]), **regressor_params)
def test_pybrain_Tanh(): check_classifier(PyBrainClassifier(layers=[10], hiddenclass=['TanhLayer']), has_staged_pp=False, has_importances=False, supports_weight=False) check_regression(PyBrainRegressor(layers=[10], hiddenclass=['TanhLayer']), has_staged_predictions=False, has_importances=False, supports_weight=False)
def test_pybrain_Linear_MDLSTM(): check_classifier( PyBrainClassifier(epochs=2, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **classifier_params) check_regression( PyBrainRegressor(epochs=3, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **regressor_params)
def test_theanets_regression(): check_regression(TheanetsRegressor(layers=[20], trainers=[{ 'optimize': 'rmsprop' }]), supports_weight=False, has_staged_predictions=False, has_importances=False) check_regression(TheanetsRegressor(), supports_weight=False, has_staged_predictions=False, has_importances=False)
def test_pybrain_SoftMax_Tanh(): check_classifier( PyBrainClassifier(epochs=10, layers=[5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer'], use_rprop=True), **classifier_params) check_regression( PyBrainRegressor( epochs=2, layers=[10, 5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer', 'TanhLayer']), **regressor_params)
def test_neurolab_regression(): check_regression(NeurolabRegressor(layers=[], show=0, epochs=N_EPOCHS_REGR, trainf=nl.train.train_rprop), supports_weight=False, has_importances=False, has_staged_predictions=False) check_regression(NeurolabRegressor(net_type='single-layer', cn='auto', show=0, epochs=N_EPOCHS_REGR, trainf=nl.train.train_delta), supports_weight=False, has_importances=False, has_staged_predictions=False)
def test_tmva(): # check classifier factory_options = "Silent=True:V=False:DrawProgressBar=False" cl = TMVAClassifier(factory_options=factory_options, method='kBDT', NTrees=10) check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False) cl = TMVAClassifier(factory_options=factory_options, method='kSVM', Gamma=0.25, Tol=0.001, sigmoid_function='identity') check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False) cl = TMVAClassifier(factory_options=factory_options, method='kCuts', FitMethod='GA', EffMethod='EffSel', sigmoid_function='sig_eff=0.9') check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False) # check regressor, need to run twice to check for memory leak. for i in range(2): check_regression(TMVARegressor(factory_options=factory_options, method='kBDT', NTrees=10), check_instance=True, has_staged_predictions=False, has_importances=False)
def test_xgboost(): check_classifier(XGBoostClassifier(n_estimators=20), n_classes=2) check_classifier(XGBoostClassifier(n_estimators=20), n_classes=4) check_regression(XGBoostRegressor(n_estimators=20))
def test_theanets_regression(): check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop'}]), supports_weight=False, has_staged_predictions=False, has_importances=False) check_regression(TheanetsRegressor(), supports_weight=False, has_staged_predictions=False, has_importances=False)
def test_sklearn_regression(): # supports weights check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50))) # doesn't support weights check_regression(SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)), supports_weight=False)
def test_folding_regressor_with_check_model(): base_clf = SklearnRegressor(GradientBoostingRegressor(n_estimators=4)) folding_str = FoldingRegressor(base_clf, n_folds=2) check_regression(folding_str, True, True, True)
def test_neurolab_regression(): check_regression(NeurolabRegressor(layers=[1], epochs=N_EPOCHS_REGR), **regressor_params)
def test_mn_regression(): clf = MatrixNetRegressor() check_regression(clf) assert {'effect', 'information', 'efficiency'} == set(clf.get_feature_importances().columns)
def test_neurolab_multi_regression(): check_regression(NeurolabRegressor(layers=[10], epochs=N_EPOCHS_REGR), n_targets=3, **regressor_params)
def test_pybrain_regression(): check_regression(PyBrainRegressor(), has_staged_predictions=False, has_importances=False, supports_weight=False)
def test_xgboost(): check_classifier(XGBoostClassifier(), n_classes=2) check_classifier(XGBoostClassifier(), n_classes=4) check_regression(XGBoostRegressor())
def test_theanets_multi_regression(): check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop', 'min_improvement': 0.1}]), n_targets=3, **regressor_params)
def test_sklearn_regression(): check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50))) check_regression( SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)))
def test_pybrain_Linear_MDLSTM(): check_classifier(PyBrainClassifier(epochs=2, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **classifier_params) check_regression(PyBrainRegressor(epochs=3, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **regressor_params)
def test_theanets_regression(): check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop', 'min_improvement': 0.1}]), **regressor_params) check_regression(TheanetsRegressor(scaler=StandardScaler()), **regressor_params)
def test_pybrain_multi_regression(): check_regression(PyBrainRegressor(), n_targets=4, **regressor_params)
def test_theanets_regression(): check_regression(TheanetsRegressor(layers=[11], trainers=[{'algo': 'rmsprop', 'learning_rate': 0.1}]), **regressor_params) check_regression(TheanetsRegressor(scaler=StandardScaler()), **regressor_params)
def test_pybrain_regression(): check_regression(PyBrainRegressor(), **regressor_params)