Ejemplo n.º 1
0
def test_pybrain_SoftMax_Tanh():
    check_classifier(PyBrainClassifier(epochs=10, layers=[5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer'],
                                       use_rprop=True),
                     **classifier_params)
    check_regression(
        PyBrainRegressor(epochs=2, layers=[10, 5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer', 'TanhLayer']),
        **regressor_params)
Ejemplo n.º 2
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[3],
                                       trainers=[dict(algo='rmsprop', **impatient)]),
                     **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler(),
                                       trainers=[dict(algo='rmsprop', **impatient)]),
                     **regressor_params)
Ejemplo n.º 3
0
def test_sklearn_regression():
    # supports weights
    check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50)))
    # doesn't support weights
    check_regression(
        SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)),
        supports_weight=False)
Ejemplo n.º 4
0
def test_tmva():
    # check classifier
    check_classifier(TMVAClassifier(),
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(method='kSVM',
                        Gamma=0.25,
                        Tol=0.001,
                        sigmoid_function='identity')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(method='kCuts',
                        FitMethod='GA',
                        EffMethod='EffSel',
                        sigmoid_function='sig_eff=0.9')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(),
                         check_instance=True,
                         has_staged_predictions=False,
                         has_importances=False)
Ejemplo n.º 5
0
def test_models():
    for _ in range(3):
        clf = CacheClassifier('clf', SGDClassifier(loss='log'))
        check_classifier(clf, has_staged_pp=False, has_importances=False)

        reg = CacheRegressor('reg', SGDRegressor())
        check_regression(reg, has_staged_predictions=False, has_importances=False)
    cache_helper.clear_cache()
Ejemplo n.º 6
0
def test_theanets_multi_regression():
    check_regression(TheanetsRegressor(layers=[13],
                                       trainers=[{
                                           'algo': 'rmsprop',
                                           'min_improvement': 0.1
                                       }]),
                     n_targets=3,
                     **regressor_params)
Ejemplo n.º 7
0
def test_theanets_regression():
    check_regression(
        TheanetsRegressor(layers=[3],
                          trainers=[{
                              'algo': 'rmsprop',
                              'learning_rate': 0.1
                          }]), **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()),
                     **regressor_params)
Ejemplo n.º 8
0
def test_theanets_regression():
    check_regression(
        TheanetsRegressor(layers=[20],
                          trainers=[{
                              'optimize': 'rmsprop',
                              'min_improvement': 0.1
                          }]), **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()),
                     **regressor_params)
Ejemplo n.º 9
0
def test_theanets_regression():
    check_regression(
        TheanetsRegressor(layers=[3],
                          trainers=[dict(algo='rmsprop', **impatient)]),
        **regressor_params)
    check_regression(
        TheanetsRegressor(scaler=StandardScaler(),
                          trainers=[dict(algo='rmsprop', **impatient)]),
        **regressor_params)
Ejemplo n.º 10
0
def test_pybrain_Tanh():
    check_classifier(PyBrainClassifier(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_pp=False,
                     has_importances=False,
                     supports_weight=False)
    check_regression(PyBrainRegressor(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_predictions=False,
                     has_importances=False,
                     supports_weight=False)
Ejemplo n.º 11
0
def test_models():
    for _ in range(3):
        clf = CacheClassifier('clf', SGDClassifier(loss='log'))
        check_classifier(clf, has_staged_pp=False, has_importances=False)

        reg = CacheRegressor('reg', SGDRegressor())
        check_regression(reg,
                         has_staged_predictions=False,
                         has_importances=False)
    cache_helper.clear_cache()
Ejemplo n.º 12
0
def test_pybrain_Linear_MDLSTM():
    check_classifier(
        PyBrainClassifier(epochs=2,
                          layers=[10, 2],
                          hiddenclass=['LinearLayer', 'MDLSTMLayer']),
        **classifier_params)
    check_regression(
        PyBrainRegressor(epochs=3,
                         layers=[10, 2],
                         hiddenclass=['LinearLayer', 'MDLSTMLayer']),
        **regressor_params)
Ejemplo n.º 13
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[20],
                                       trainers=[{
                                           'optimize': 'rmsprop'
                                       }]),
                     supports_weight=False,
                     has_staged_predictions=False,
                     has_importances=False)
    check_regression(TheanetsRegressor(),
                     supports_weight=False,
                     has_staged_predictions=False,
                     has_importances=False)
Ejemplo n.º 14
0
def test_pybrain_SoftMax_Tanh():
    check_classifier(
        PyBrainClassifier(epochs=10,
                          layers=[5, 2],
                          hiddenclass=['TanhLayer', 'SoftmaxLayer'],
                          use_rprop=True), **classifier_params)
    check_regression(
        PyBrainRegressor(
            epochs=2,
            layers=[10, 5, 2],
            hiddenclass=['TanhLayer', 'SoftmaxLayer', 'TanhLayer']),
        **regressor_params)
Ejemplo n.º 15
0
def test_tmva():
    # check classifier
    check_classifier(TMVAClassifier(), check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(method='kSVM', Gamma=0.25, Tol=0.001, sigmoid_function='identity')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(method='kCuts', FitMethod='GA', EffMethod='EffSel', sigmoid_function='sig_eff=0.9')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(), check_instance=True, has_staged_predictions=False, has_importances=False)
Ejemplo n.º 16
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[],
                                       show=0,
                                       epochs=N_EPOCHS_REGR,
                                       trainf=nl.train.train_rprop),
                     supports_weight=False,
                     has_importances=False,
                     has_staged_predictions=False)
    check_regression(NeurolabRegressor(net_type='single-layer',
                                       cn='auto',
                                       show=0,
                                       epochs=N_EPOCHS_REGR,
                                       trainf=nl.train.train_delta),
                     supports_weight=False,
                     has_importances=False,
                     has_staged_predictions=False)
Ejemplo n.º 17
0
def test_tmva():
    # check classifier
    factory_options = "Silent=True:V=False:DrawProgressBar=False"
    cl = TMVAClassifier(factory_options=factory_options, method='kBDT', NTrees=10)
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options, method='kSVM', Gamma=0.25, Tol=0.001,
                        sigmoid_function='identity')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options, method='kCuts',
                        FitMethod='GA', EffMethod='EffSel', sigmoid_function='sig_eff=0.9')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(factory_options=factory_options, method='kBDT', NTrees=10), check_instance=True,
                         has_staged_predictions=False, has_importances=False)
Ejemplo n.º 18
0
def test_tmva():
    # check classifier
    factory_options = "Silent=True:V=False:DrawProgressBar=False"
    cl = TMVAClassifier(factory_options=factory_options,
                        method='kBDT',
                        NTrees=10)
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options,
                        method='kSVM',
                        Gamma=0.25,
                        Tol=0.001,
                        sigmoid_function='identity')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options,
                        method='kCuts',
                        FitMethod='GA',
                        EffMethod='EffSel',
                        sigmoid_function='sig_eff=0.9')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(factory_options=factory_options,
                                       method='kBDT',
                                       NTrees=10),
                         check_instance=True,
                         has_staged_predictions=False,
                         has_importances=False)
Ejemplo n.º 19
0
def test_xgboost():
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=2)
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=4)
    check_regression(XGBoostRegressor(n_estimators=20))
Ejemplo n.º 20
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop'}]),
                     supports_weight=False, has_staged_predictions=False, has_importances=False)
    check_regression(TheanetsRegressor(),
                     supports_weight=False, has_staged_predictions=False, has_importances=False)
Ejemplo n.º 21
0
def test_sklearn_regression():
    # supports weights
    check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50)))
    # doesn't support weights
    check_regression(SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)),
                     supports_weight=False)
Ejemplo n.º 22
0
def test_folding_regressor_with_check_model():
    base_clf = SklearnRegressor(GradientBoostingRegressor(n_estimators=4))
    folding_str = FoldingRegressor(base_clf, n_folds=2)
    check_regression(folding_str, True, True, True)
Ejemplo n.º 23
0
def test_folding_regressor_with_check_model():
    base_clf = SklearnRegressor(GradientBoostingRegressor(n_estimators=4))
    folding_str = FoldingRegressor(base_clf, n_folds=2)
    check_regression(folding_str, True, True, True)
Ejemplo n.º 24
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[1], epochs=N_EPOCHS_REGR),
                     **regressor_params)
Ejemplo n.º 25
0
def test_mn_regression():
    clf = MatrixNetRegressor()
    check_regression(clf)
    assert {'effect', 'information', 'efficiency'} == set(clf.get_feature_importances().columns)
Ejemplo n.º 26
0
def test_neurolab_multi_regression():
    check_regression(NeurolabRegressor(layers=[10], epochs=N_EPOCHS_REGR),
                     n_targets=3, **regressor_params)
Ejemplo n.º 27
0
def test_pybrain_regression():
    check_regression(PyBrainRegressor(), has_staged_predictions=False, has_importances=False, supports_weight=False)
Ejemplo n.º 28
0
def test_xgboost():
    check_classifier(XGBoostClassifier(), n_classes=2)
    check_classifier(XGBoostClassifier(), n_classes=4)
    check_regression(XGBoostRegressor())
Ejemplo n.º 29
0
def test_theanets_multi_regression():
    check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop', 'min_improvement': 0.1}]),
                     n_targets=3, **regressor_params)
Ejemplo n.º 30
0
def test_xgboost():
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=2)
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=4)
    check_regression(XGBoostRegressor(n_estimators=20))
Ejemplo n.º 31
0
def test_mn_regression():
    clf = MatrixNetRegressor()
    check_regression(clf)
    assert {'effect', 'information',
            'efficiency'} == set(clf.get_feature_importances().columns)
Ejemplo n.º 32
0
def test_neurolab_multi_regression():
    check_regression(NeurolabRegressor(layers=[10], epochs=N_EPOCHS_REGR),
                     n_targets=3,
                     **regressor_params)
Ejemplo n.º 33
0
def test_pybrain_regression():
    check_regression(PyBrainRegressor(),
                     has_staged_predictions=False,
                     has_importances=False,
                     supports_weight=False)
Ejemplo n.º 34
0
def test_pybrain_Tanh():
    check_classifier(PyBrainClassifier(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_pp=False, has_importances=False, supports_weight=False)
    check_regression(PyBrainRegressor(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_predictions=False, has_importances=False, supports_weight=False)
Ejemplo n.º 35
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[], show=0, epochs=N_EPOCHS_REGR, trainf=nl.train.train_rprop),
                     supports_weight=False, has_importances=False, has_staged_predictions=False)
    check_regression(NeurolabRegressor(net_type='single-layer', cn='auto', show=0, epochs=N_EPOCHS_REGR, trainf=nl.train.train_delta),
                     supports_weight=False, has_importances=False, has_staged_predictions=False)
Ejemplo n.º 36
0
def test_sklearn_regression():
    check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50)))
    check_regression(
        SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)))
Ejemplo n.º 37
0
def test_pybrain_Linear_MDLSTM():
    check_classifier(PyBrainClassifier(epochs=2, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']),
                     **classifier_params)
    check_regression(PyBrainRegressor(epochs=3, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']),
                     **regressor_params)
Ejemplo n.º 38
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop', 'min_improvement': 0.1}]),
                     **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()), **regressor_params)
Ejemplo n.º 39
0
def test_pybrain_multi_regression():
    check_regression(PyBrainRegressor(), n_targets=4, **regressor_params)
Ejemplo n.º 40
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[11],
                                       trainers=[{'algo': 'rmsprop', 'learning_rate': 0.1}]),
                     **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()), **regressor_params)
Ejemplo n.º 41
0
def test_pybrain_regression():
    check_regression(PyBrainRegressor(), **regressor_params)
Ejemplo n.º 42
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[1], epochs=N_EPOCHS_REGR), **regressor_params)