예제 #1
0
def test_pybrain_SoftMax_Tanh():
    check_classifier(PyBrainClassifier(epochs=10, layers=[5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer'],
                                       use_rprop=True),
                     **classifier_params)
    check_regression(
        PyBrainRegressor(epochs=2, layers=[10, 5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer', 'TanhLayer']),
        **regressor_params)
예제 #2
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[3],
                                       trainers=[dict(algo='rmsprop', **impatient)]),
                     **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler(),
                                       trainers=[dict(algo='rmsprop', **impatient)]),
                     **regressor_params)
예제 #3
0
파일: test_sklearn.py 프로젝트: spolakh/rep
def test_sklearn_regression():
    # supports weights
    check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50)))
    # doesn't support weights
    check_regression(
        SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)),
        supports_weight=False)
예제 #4
0
def test_tmva():
    # check classifier
    check_classifier(TMVAClassifier(),
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(method='kSVM',
                        Gamma=0.25,
                        Tol=0.001,
                        sigmoid_function='identity')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(method='kCuts',
                        FitMethod='GA',
                        EffMethod='EffSel',
                        sigmoid_function='sig_eff=0.9')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(),
                         check_instance=True,
                         has_staged_predictions=False,
                         has_importances=False)
예제 #5
0
def test_models():
    for _ in range(3):
        clf = CacheClassifier('clf', SGDClassifier(loss='log'))
        check_classifier(clf, has_staged_pp=False, has_importances=False)

        reg = CacheRegressor('reg', SGDRegressor())
        check_regression(reg, has_staged_predictions=False, has_importances=False)
    cache_helper.clear_cache()
예제 #6
0
def test_theanets_multi_regression():
    check_regression(TheanetsRegressor(layers=[13],
                                       trainers=[{
                                           'algo': 'rmsprop',
                                           'min_improvement': 0.1
                                       }]),
                     n_targets=3,
                     **regressor_params)
예제 #7
0
def test_theanets_regression():
    check_regression(
        TheanetsRegressor(layers=[3],
                          trainers=[{
                              'algo': 'rmsprop',
                              'learning_rate': 0.1
                          }]), **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()),
                     **regressor_params)
예제 #8
0
def test_theanets_regression():
    check_regression(
        TheanetsRegressor(layers=[20],
                          trainers=[{
                              'optimize': 'rmsprop',
                              'min_improvement': 0.1
                          }]), **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()),
                     **regressor_params)
예제 #9
0
def test_theanets_regression():
    check_regression(
        TheanetsRegressor(layers=[3],
                          trainers=[dict(algo='rmsprop', **impatient)]),
        **regressor_params)
    check_regression(
        TheanetsRegressor(scaler=StandardScaler(),
                          trainers=[dict(algo='rmsprop', **impatient)]),
        **regressor_params)
예제 #10
0
파일: test_pybrain.py 프로젝트: spolakh/rep
def test_pybrain_Tanh():
    check_classifier(PyBrainClassifier(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_pp=False,
                     has_importances=False,
                     supports_weight=False)
    check_regression(PyBrainRegressor(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_predictions=False,
                     has_importances=False,
                     supports_weight=False)
예제 #11
0
def test_models():
    for _ in range(3):
        clf = CacheClassifier('clf', SGDClassifier(loss='log'))
        check_classifier(clf, has_staged_pp=False, has_importances=False)

        reg = CacheRegressor('reg', SGDRegressor())
        check_regression(reg,
                         has_staged_predictions=False,
                         has_importances=False)
    cache_helper.clear_cache()
예제 #12
0
def test_pybrain_Linear_MDLSTM():
    check_classifier(
        PyBrainClassifier(epochs=2,
                          layers=[10, 2],
                          hiddenclass=['LinearLayer', 'MDLSTMLayer']),
        **classifier_params)
    check_regression(
        PyBrainRegressor(epochs=3,
                         layers=[10, 2],
                         hiddenclass=['LinearLayer', 'MDLSTMLayer']),
        **regressor_params)
예제 #13
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[20],
                                       trainers=[{
                                           'optimize': 'rmsprop'
                                       }]),
                     supports_weight=False,
                     has_staged_predictions=False,
                     has_importances=False)
    check_regression(TheanetsRegressor(),
                     supports_weight=False,
                     has_staged_predictions=False,
                     has_importances=False)
예제 #14
0
def test_pybrain_SoftMax_Tanh():
    check_classifier(
        PyBrainClassifier(epochs=10,
                          layers=[5, 2],
                          hiddenclass=['TanhLayer', 'SoftmaxLayer'],
                          use_rprop=True), **classifier_params)
    check_regression(
        PyBrainRegressor(
            epochs=2,
            layers=[10, 5, 2],
            hiddenclass=['TanhLayer', 'SoftmaxLayer', 'TanhLayer']),
        **regressor_params)
예제 #15
0
파일: test_tmva.py 프로젝트: 0x0all/rep
def test_tmva():
    # check classifier
    check_classifier(TMVAClassifier(), check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(method='kSVM', Gamma=0.25, Tol=0.001, sigmoid_function='identity')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(method='kCuts', FitMethod='GA', EffMethod='EffSel', sigmoid_function='sig_eff=0.9')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(), check_instance=True, has_staged_predictions=False, has_importances=False)
예제 #16
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[],
                                       show=0,
                                       epochs=N_EPOCHS_REGR,
                                       trainf=nl.train.train_rprop),
                     supports_weight=False,
                     has_importances=False,
                     has_staged_predictions=False)
    check_regression(NeurolabRegressor(net_type='single-layer',
                                       cn='auto',
                                       show=0,
                                       epochs=N_EPOCHS_REGR,
                                       trainf=nl.train.train_delta),
                     supports_weight=False,
                     has_importances=False,
                     has_staged_predictions=False)
예제 #17
0
def test_tmva():
    # check classifier
    factory_options = "Silent=True:V=False:DrawProgressBar=False"
    cl = TMVAClassifier(factory_options=factory_options, method='kBDT', NTrees=10)
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options, method='kSVM', Gamma=0.25, Tol=0.001,
                        sigmoid_function='identity')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options, method='kCuts',
                        FitMethod='GA', EffMethod='EffSel', sigmoid_function='sig_eff=0.9')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(factory_options=factory_options, method='kBDT', NTrees=10), check_instance=True,
                         has_staged_predictions=False, has_importances=False)
예제 #18
0
def test_tmva():
    # check classifier
    factory_options = "Silent=True:V=False:DrawProgressBar=False"
    cl = TMVAClassifier(factory_options=factory_options,
                        method='kBDT',
                        NTrees=10)
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options,
                        method='kSVM',
                        Gamma=0.25,
                        Tol=0.001,
                        sigmoid_function='identity')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options,
                        method='kCuts',
                        FitMethod='GA',
                        EffMethod='EffSel',
                        sigmoid_function='sig_eff=0.9')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(factory_options=factory_options,
                                       method='kBDT',
                                       NTrees=10),
                         check_instance=True,
                         has_staged_predictions=False,
                         has_importances=False)
예제 #19
0
def test_xgboost():
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=2)
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=4)
    check_regression(XGBoostRegressor(n_estimators=20))
예제 #20
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop'}]),
                     supports_weight=False, has_staged_predictions=False, has_importances=False)
    check_regression(TheanetsRegressor(),
                     supports_weight=False, has_staged_predictions=False, has_importances=False)
예제 #21
0
def test_sklearn_regression():
    # supports weights
    check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50)))
    # doesn't support weights
    check_regression(SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)),
                     supports_weight=False)
예제 #22
0
def test_folding_regressor_with_check_model():
    base_clf = SklearnRegressor(GradientBoostingRegressor(n_estimators=4))
    folding_str = FoldingRegressor(base_clf, n_folds=2)
    check_regression(folding_str, True, True, True)
예제 #23
0
def test_folding_regressor_with_check_model():
    base_clf = SklearnRegressor(GradientBoostingRegressor(n_estimators=4))
    folding_str = FoldingRegressor(base_clf, n_folds=2)
    check_regression(folding_str, True, True, True)
예제 #24
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[1], epochs=N_EPOCHS_REGR),
                     **regressor_params)
예제 #25
0
def test_mn_regression():
    clf = MatrixNetRegressor()
    check_regression(clf)
    assert {'effect', 'information', 'efficiency'} == set(clf.get_feature_importances().columns)
예제 #26
0
def test_neurolab_multi_regression():
    check_regression(NeurolabRegressor(layers=[10], epochs=N_EPOCHS_REGR),
                     n_targets=3, **regressor_params)
예제 #27
0
def test_pybrain_regression():
    check_regression(PyBrainRegressor(), has_staged_predictions=False, has_importances=False, supports_weight=False)
예제 #28
0
def test_xgboost():
    check_classifier(XGBoostClassifier(), n_classes=2)
    check_classifier(XGBoostClassifier(), n_classes=4)
    check_regression(XGBoostRegressor())
예제 #29
0
def test_theanets_multi_regression():
    check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop', 'min_improvement': 0.1}]),
                     n_targets=3, **regressor_params)
예제 #30
0
def test_xgboost():
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=2)
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=4)
    check_regression(XGBoostRegressor(n_estimators=20))
예제 #31
0
def test_mn_regression():
    clf = MatrixNetRegressor()
    check_regression(clf)
    assert {'effect', 'information',
            'efficiency'} == set(clf.get_feature_importances().columns)
예제 #32
0
def test_neurolab_multi_regression():
    check_regression(NeurolabRegressor(layers=[10], epochs=N_EPOCHS_REGR),
                     n_targets=3,
                     **regressor_params)
예제 #33
0
파일: test_pybrain.py 프로젝트: spolakh/rep
def test_pybrain_regression():
    check_regression(PyBrainRegressor(),
                     has_staged_predictions=False,
                     has_importances=False,
                     supports_weight=False)
예제 #34
0
def test_pybrain_Tanh():
    check_classifier(PyBrainClassifier(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_pp=False, has_importances=False, supports_weight=False)
    check_regression(PyBrainRegressor(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_predictions=False, has_importances=False, supports_weight=False)
예제 #35
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[], show=0, epochs=N_EPOCHS_REGR, trainf=nl.train.train_rprop),
                     supports_weight=False, has_importances=False, has_staged_predictions=False)
    check_regression(NeurolabRegressor(net_type='single-layer', cn='auto', show=0, epochs=N_EPOCHS_REGR, trainf=nl.train.train_delta),
                     supports_weight=False, has_importances=False, has_staged_predictions=False)
예제 #36
0
def test_sklearn_regression():
    check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50)))
    check_regression(
        SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)))
예제 #37
0
def test_pybrain_Linear_MDLSTM():
    check_classifier(PyBrainClassifier(epochs=2, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']),
                     **classifier_params)
    check_regression(PyBrainRegressor(epochs=3, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']),
                     **regressor_params)
예제 #38
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop', 'min_improvement': 0.1}]),
                     **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()), **regressor_params)
예제 #39
0
파일: test_pybrain.py 프로젝트: yhaddad/rep
def test_pybrain_multi_regression():
    check_regression(PyBrainRegressor(), n_targets=4, **regressor_params)
예제 #40
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[11],
                                       trainers=[{'algo': 'rmsprop', 'learning_rate': 0.1}]),
                     **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()), **regressor_params)
예제 #41
0
파일: test_pybrain.py 프로젝트: yhaddad/rep
def test_pybrain_regression():
    check_regression(PyBrainRegressor(), **regressor_params)
예제 #42
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[1], epochs=N_EPOCHS_REGR), **regressor_params)