def test_pybrain_SoftMax_Tanh(): check_classifier(PyBrainClassifier(epochs=10, layers=[5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer'], use_rprop=True), **classifier_params) check_regression( PyBrainRegressor(epochs=2, layers=[10, 5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer', 'TanhLayer']), **regressor_params)
def test_theanets_single_classification(): check_classifier(TheanetsClassifier(), supports_weight=False, has_staged_pp=False, has_importances=False) check_classifier(TheanetsClassifier(layers=[]), supports_weight=False, has_staged_pp=False, has_importances=False) check_classifier(TheanetsClassifier(layers=[20], trainers=[{ 'optimize': 'sgd', 'learning_rate': 0.3 }]), supports_weight=False, has_staged_pp=False, has_importances=False) check_classifier(TheanetsClassifier(layers=[5, 5], trainers=[{ 'optimize': 'sgd', 'learning_rate': 0.3 }]), supports_weight=False, has_staged_pp=False, has_importances=False) check_classifier(TheanetsClassifier(layers=[5, 5], trainers=[{ 'optimize': 'sgd', 'learning_rate': 0.3 }]), supports_weight=False, has_staged_pp=False, has_importances=False)
def test_theanets_multiclassification(): check_classifier(TheanetsClassifier(trainers=[{ 'min_improvement': 0.1, 'learning_rate': 0.1 }]), n_classes=4, **classifier_params)
def test_complex_stacking_xgboost(): # Ada over kFold over xgboost base_kfold = FoldingClassifier(base_estimator=XGBoostClassifier()) check_classifier(SklearnClassifier( clf=AdaBoostClassifier(base_estimator=base_kfold, n_estimators=3)), has_staged_pp=False, has_importances=False)
def test_theanets_simple_stacking(): base_tnt = TheanetsClassifier() check_classifier(SklearnClassifier( clf=BaggingClassifier(base_estimator=base_tnt, n_estimators=3)), supports_weight=False, has_staged_pp=False, has_importances=False)
def test_simple_stacking_pybrain(): base_pybrain = PyBrainClassifier() check_classifier(SklearnClassifier( clf=BaggingClassifier(base_estimator=base_pybrain, n_estimators=3)), has_staged_pp=False, has_importances=False, supports_weight=False)
def test_simple_stacking_nolearn(): # AdaBoostClassifier fails because sample_weight is not supported in nolearn base_nl = NolearnClassifier() check_classifier(SklearnClassifier( clf=BaggingClassifier(base_estimator=base_nl, n_estimators=3)), has_staged_pp=False, has_importances=False, supports_weight=False)
def test_theanets_multiple_classification(): check_classifier( TheanetsClassifier(trainers=[{ 'optimize': 'adadelta', 'min_improvement': 0.5 }, { 'optimize': 'nag' }]), **classifier_params)
def test_theanets_configurations(): check_classifier( TheanetsClassifier(layers=[20], scaler=False, trainers=[{'optimize': 'nag', 'learning_rate': 0.3, 'min_improvement': 0.5}]), **classifier_params) check_classifier( TheanetsClassifier(layers=[5, 5], trainers=[{'optimize': 'nag', 'learning_rate': 0.3, 'min_improvement': 0.5}]), **classifier_params)
def test_complex_stacking_tmva(): # Ada over kFold over TMVA base_kfold = FoldingClassifier(base_estimator=TMVAClassifier(), random_state=13) check_classifier(SklearnClassifier( clf=AdaBoostClassifier(base_estimator=base_kfold, n_estimators=3)), has_staged_pp=False, has_importances=False)
def test_models(): for _ in range(3): clf = CacheClassifier('clf', SGDClassifier(loss='log')) check_classifier(clf, has_staged_pp=False, has_importances=False) reg = CacheRegressor('reg', SGDRegressor()) check_regression(reg, has_staged_predictions=False, has_importances=False) cache_helper.clear_cache()
def test_complex_stacking_mn(): # Ada over kFold over MatrixNet base_kfold = FoldingClassifier(base_estimator=MatrixNetClassifier( iterations=30)) check_classifier(SklearnClassifier( clf=AdaBoostClassifier(base_estimator=base_kfold, n_estimators=3)), has_staged_pp=False, has_importances=False)
def test_pybrain_classification(): check_classifier(PyBrainClassifier(), has_staged_pp=False, has_importances=False, supports_weight=False) check_classifier(PyBrainClassifier(layers=[10, 10]), has_staged_pp=False, has_importances=False, supports_weight=False)
def test_pybrain_Tanh(): check_classifier(PyBrainClassifier(layers=[10], hiddenclass=['TanhLayer']), has_staged_pp=False, has_importances=False, supports_weight=False) check_regression(PyBrainRegressor(layers=[10], hiddenclass=['TanhLayer']), has_staged_predictions=False, has_importances=False, supports_weight=False)
def test_theanets_configurations(): check_classifier( TheanetsClassifier(layers=[13], scaler=False, trainers=[{'algo': 'nag', 'learning_rate': 0.1}]), **classifier_params) check_classifier( TheanetsClassifier(layers=[5, 5], scaler='minmax', trainers=[{'algo': 'adadelta', 'learning_rate': 0.1}]), **classifier_params)
def test_theanets_multiple_classification(): check_classifier(TheanetsClassifier(trainers=[{ 'optimize': 'adadelta' }, { 'optimize': 'nag' }]), supports_weight=False, has_staged_pp=False, has_importances=False)
def test_pretrain(): clf = TheanetsClassifier(trainers=[{ 'optimize': 'pretrain', 'patience': 1, 'learning_rate': 0.1 }, { 'optimize': 'nag', 'patience': 1 }]) check_classifier(clf, **classifier_params)
def test_neurolab_stacking(): base_nlab = NeurolabClassifier(show=0, layers=[], epochs=N_EPOCHS2, trainf=nl.train.train_rprop) check_classifier(SklearnClassifier( clf=BaggingClassifier(base_estimator=base_nlab, n_estimators=3)), supports_weight=False, has_staged_pp=False, has_importances=False)
def test_theanets_single_classification(): check_classifier(TheanetsClassifier(trainers=[{ 'patience': 0 }]), **classifier_params) check_classifier( TheanetsClassifier(layers=[], scaler='minmax', trainers=[{ 'patience': 0 }]), **classifier_params)
def test_pretrain(): clf = TheanetsClassifier(layers=[5, 5], trainers=[{ 'algo': 'pretrain', 'learning_rate': 0.1 }, { 'algo': 'nag', 'learning_rate': 0.1 }]) check_classifier(clf, **classifier_params)
def test_theanets_configurations(): check_classifier( TheanetsClassifier(layers=[13], scaler=False, trainers=[dict(algo='nag', learning_rate=0.1, **impatient)]), **classifier_params) check_classifier( TheanetsClassifier(layers=[5, 5], trainers=[dict(algo='adam', learning_rate=0.01, momentum=0.9)] ), **classifier_params)
def test_theanets_single_classification(): check_classifier(TheanetsClassifier(), supports_weight=False, has_staged_pp=False, has_importances=False) check_classifier(TheanetsClassifier(layers=[]), supports_weight=False, has_staged_pp=False, has_importances=False) check_classifier(TheanetsClassifier(layers=[20], trainers=[{'optimize': 'sgd', 'learning_rate': 0.3}]), supports_weight=False, has_staged_pp=False, has_importances=False) check_classifier(TheanetsClassifier(layers=[5, 5], trainers=[{'optimize': 'sgd', 'learning_rate': 0.3}]), supports_weight=False, has_staged_pp=False, has_importances=False) check_classifier(TheanetsClassifier(layers=[5, 5], trainers=[{'optimize': 'sgd', 'learning_rate': 0.3}]), supports_weight=False, has_staged_pp=False, has_importances=False)
def test_pybrain_Linear_MDLSTM(): check_classifier( PyBrainClassifier(epochs=2, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **classifier_params) check_regression( PyBrainRegressor(epochs=3, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **regressor_params)
def test_theanets_configurations(): check_classifier( TheanetsClassifier( layers=[13], scaler=False, trainers=[dict(algo='nag', learning_rate=0.1, **impatient)]), **classifier_params) check_classifier( TheanetsClassifier( layers=[5, 5], trainers=[dict(algo='adam', learning_rate=0.01, momentum=0.9)]), **classifier_params)
def test_pybrain_SoftMax_Tanh(): check_classifier( PyBrainClassifier(epochs=10, layers=[5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer'], use_rprop=True), **classifier_params) check_regression( PyBrainRegressor( epochs=2, layers=[10, 5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer', 'TanhLayer']), **regressor_params)
def test_nolearn_classification(): cl = NolearnClassifier() check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False, supports_weight=False) cl = NolearnClassifier(layers=[]) check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False, supports_weight=False)
def test_theanets_configurations(): check_classifier( TheanetsClassifier(layers=[20], scaler=False, trainers=[{ 'optimize': 'nag', 'learning_rate': 0.3, 'min_improvement': 0.5 }]), **classifier_params) check_classifier( TheanetsClassifier(layers=[5, 5], trainers=[{ 'optimize': 'nag', 'learning_rate': 0.3, 'min_improvement': 0.5 }]), **classifier_params)
def test_neurolab_single_classification(): check_classifier(NeurolabClassifier(show=0, layers=[], epochs=N_EPOCHS2, trainf=nl.train.train_rprop), supports_weight=False, has_staged_pp=False, has_importances=False) check_classifier(NeurolabClassifier(net_type='single-layer', cn='auto', show=0, epochs=N_EPOCHS2, trainf=nl.train.train_delta), supports_weight=False, has_staged_pp=False, has_importances=False)
def test_neurolab_single_classification(): check_classifier(NeurolabClassifier(layers=[], epochs=N_EPOCHS2, trainf=None), **classifier_params) check_classifier(NeurolabClassifier(layers=[2], epochs=N_EPOCHS2), **classifier_params) check_classifier(NeurolabClassifier(layers=[1, 1], epochs=N_EPOCHS2), **classifier_params)
def test_tmva(): # check classifier check_classifier(TMVAClassifier(), check_instance=True, has_staged_pp=False, has_importances=False) cl = TMVAClassifier(method='kSVM', Gamma=0.25, Tol=0.001, sigmoid_function='identity') check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False) cl = TMVAClassifier(method='kCuts', FitMethod='GA', EffMethod='EffSel', sigmoid_function='sig_eff=0.9') check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False) # check regressor, need to run twice to check for memory leak. for i in range(2): check_regression(TMVARegressor(), check_instance=True, has_staged_predictions=False, has_importances=False)
def test_neurolab_single_classification(): check_classifier( NeurolabClassifier(layers=[], epochs=N_EPOCHS2, trainf=None), **classifier_params) check_classifier(NeurolabClassifier(layers=[2], epochs=N_EPOCHS2), **classifier_params) check_classifier(NeurolabClassifier(layers=[1, 1], epochs=N_EPOCHS2), **classifier_params)
def test_sklearn_classification(): check_classifier( SklearnClassifier(clf=AdaBoostClassifier(n_estimators=10))) check_classifier( SklearnClassifier(clf=AdaBoostClassifier(n_estimators=10)), n_classes=3) check_classifier( SklearnClassifier(clf=GradientBoostingClassifier(n_estimators=10)))
def test_pybrain_classification(): clf = PyBrainClassifier(epochs=2) check_classifier(clf, **classifier_params) check_classifier( PyBrainClassifier(epochs=-1, continue_epochs=1, layers=[]), **classifier_params) check_classifier(PyBrainClassifier(epochs=2, layers=[5, 2]), **classifier_params)
def test_nolearn_classification(): cl = NolearnClassifier() check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False, supports_weight=False) cl = NolearnClassifier(layers=[]) check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False, supports_weight=False) cl = NolearnClassifier(layers=[5, 5]) check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False, supports_weight=False)
def test_sklearn_classification(): # supports weights check_classifier( SklearnClassifier(clf=AdaBoostClassifier(n_estimators=10))) check_classifier( SklearnClassifier(clf=AdaBoostClassifier(n_estimators=10)), n_classes=3) # doesn't support weights check_classifier( SklearnClassifier(clf=GradientBoostingClassifier(n_estimators=10)), supports_weight=False)
def test_tmva(): # check classifier factory_options = "Silent=True:V=False:DrawProgressBar=False" cl = TMVAClassifier(factory_options=factory_options, method='kBDT', NTrees=10) check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False) cl = TMVAClassifier(factory_options=factory_options, method='kSVM', Gamma=0.25, Tol=0.001, sigmoid_function='identity') check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False) cl = TMVAClassifier(factory_options=factory_options, method='kCuts', FitMethod='GA', EffMethod='EffSel', sigmoid_function='sig_eff=0.9') check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False) # check regressor, need to run twice to check for memory leak. for i in range(2): check_regression(TMVARegressor(factory_options=factory_options, method='kBDT', NTrees=10), check_instance=True, has_staged_predictions=False, has_importances=False)
def test_simple_stacking_xgboost(): base_xgboost = XGBoostClassifier() classifier = SklearnClassifier(clf=AdaBoostClassifier(base_estimator=base_xgboost, n_estimators=3)) check_classifier(classifier, has_staged_pp=False)
def test_xgboost(): check_classifier(XGBoostClassifier(n_estimators=20), n_classes=2) check_classifier(XGBoostClassifier(n_estimators=20), n_classes=4) check_regression(XGBoostRegressor(n_estimators=20))
def test_theanets_single_classification(): check_classifier(TheanetsClassifier(trainers=[{'patience': 0}]), **classifier_params) check_classifier(TheanetsClassifier(layers=[], scaler='minmax', trainers=[{'patience': 0}]), **classifier_params)
def test_neurolab_multiclassification(): check_classifier(NeurolabClassifier(layers=[10], epochs=N_EPOCHS4, trainf=nl.train.train_rprop), n_classes=4, **classifier_params)
def test_neurolab_stacking(): base_nlab = NeurolabClassifier(layers=[], epochs=N_EPOCHS2 * 2, trainf=nl.train.train_rprop) base_bagging = BaggingClassifier(base_estimator=base_nlab, n_estimators=3) check_classifier(SklearnClassifier(clf=base_bagging), **classifier_params)
def test_pretrain(): clf = TheanetsClassifier(trainers=[{'optimize': 'pretrain', 'patience': 1, 'learning_rate': 0.1}, {'optimize': 'nag', 'patience': 1}]) check_classifier(clf, **classifier_params)
def test_theanets_multiclassification(): check_classifier(TheanetsClassifier(trainers=[{'patience': 0}]), n_classes=4, **classifier_params)
def test_theanets_simple_stacking(): base_tnt = TheanetsClassifier(trainers=[{'min_improvement': 0.1}]) base_bagging = BaggingClassifier(base_estimator=base_tnt, n_estimators=3) check_classifier(SklearnClassifier(clf=base_bagging), **classifier_params)
def test_nolearn_multiple_classification(): cl = NolearnClassifier() check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False, supports_weight=False, n_classes=4)
def test_pybrain_multi_classification(): check_classifier(PyBrainClassifier(), n_classes=4, **classifier_params)
def test_pybrain_classification(): clf = PyBrainClassifier(epochs=2) check_classifier(clf, **classifier_params) check_classifier(PyBrainClassifier(epochs=-1, continue_epochs=1, layers=[]), **classifier_params) check_classifier(PyBrainClassifier(epochs=2, layers=[5, 2]), **classifier_params)
def test_sklearn_classification(): # supports weights check_classifier(SklearnClassifier(clf=AdaBoostClassifier(n_estimators=10))) check_classifier(SklearnClassifier(clf=AdaBoostClassifier(n_estimators=10)), n_classes=3) # doesn't support weights check_classifier(SklearnClassifier(clf=GradientBoostingClassifier(n_estimators=10)), supports_weight=False)
def test_simple_stacking_pybrain(): base_pybrain = PyBrainClassifier(epochs=2) base_bagging = BaggingClassifier(base_estimator=base_pybrain, n_estimators=3) check_classifier(SklearnClassifier(clf=base_bagging), **classifier_params)
def test_simple_stacking_sklearn(): base_sk = AdaBoostClassifier(n_estimators=30) check_classifier(SklearnClassifier(clf=AdaBoostClassifier(base_estimator=base_sk, n_estimators=3)))
def test_pybrain_Linear_MDLSTM(): check_classifier(PyBrainClassifier(epochs=2, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **classifier_params) check_regression(PyBrainRegressor(epochs=3, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **regressor_params)
def test_simple_stacking_tmva(): base_tmva = TMVAClassifier() check_classifier(SklearnClassifier(clf=BaggingClassifier(base_estimator=base_tmva, n_estimators=3, random_state=13)), has_staged_pp=False, has_importances=False)
def test_complex_stacking_tmva(): # Ada over kFold over TMVA base_kfold = FoldingClassifier(base_estimator=TMVAClassifier(), random_state=13) check_classifier(SklearnClassifier(clf=AdaBoostClassifier(base_estimator=base_kfold, n_estimators=3)), has_staged_pp=False, has_importances=False)
def test_complex_stacking_xgboost(): # Ada over kFold over xgboost base_kfold = FoldingClassifier(base_estimator=XGBoostClassifier()) check_classifier(SklearnClassifier(clf=AdaBoostClassifier(base_estimator=base_kfold, n_estimators=3)), has_staged_pp=False, has_importances=False)
def test_simple_stacking_nolearn(): # AdaBoostClassifier fails because sample_weight is not supported in nolearn base_nl = NolearnClassifier() check_classifier(SklearnClassifier(clf=BaggingClassifier(base_estimator=base_nl, n_estimators=3)), has_staged_pp=False, has_importances=False, supports_weight=False)
def test_theanets_multiple_classification(): check_classifier(TheanetsClassifier(trainers=[{'optimize': 'adadelta', 'min_improvement': 0.5}, {'optimize': 'nag'}]), **classifier_params)