示例#1
0
def test_pybrain_SoftMax_Tanh():
    check_classifier(PyBrainClassifier(epochs=10, layers=[5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer'],
                                       use_rprop=True),
                     **classifier_params)
    check_regression(
        PyBrainRegressor(epochs=2, layers=[10, 5, 2], hiddenclass=['TanhLayer', 'SoftmaxLayer', 'TanhLayer']),
        **regressor_params)
示例#2
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[3],
                                       trainers=[dict(algo='rmsprop', **impatient)]),
                     **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler(),
                                       trainers=[dict(algo='rmsprop', **impatient)]),
                     **regressor_params)
示例#3
0
def test_sklearn_regression():
    # supports weights
    check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50)))
    # doesn't support weights
    check_regression(
        SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)),
        supports_weight=False)
示例#4
0
def test_tmva():
    # check classifier
    check_classifier(TMVAClassifier(),
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(method='kSVM',
                        Gamma=0.25,
                        Tol=0.001,
                        sigmoid_function='identity')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(method='kCuts',
                        FitMethod='GA',
                        EffMethod='EffSel',
                        sigmoid_function='sig_eff=0.9')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(),
                         check_instance=True,
                         has_staged_predictions=False,
                         has_importances=False)
示例#5
0
def test_models():
    for _ in range(3):
        clf = CacheClassifier('clf', SGDClassifier(loss='log'))
        check_classifier(clf, has_staged_pp=False, has_importances=False)

        reg = CacheRegressor('reg', SGDRegressor())
        check_regression(reg, has_staged_predictions=False, has_importances=False)
    cache_helper.clear_cache()
示例#6
0
def test_theanets_multi_regression():
    check_regression(TheanetsRegressor(layers=[13],
                                       trainers=[{
                                           'algo': 'rmsprop',
                                           'min_improvement': 0.1
                                       }]),
                     n_targets=3,
                     **regressor_params)
示例#7
0
def test_theanets_regression():
    check_regression(
        TheanetsRegressor(layers=[3],
                          trainers=[{
                              'algo': 'rmsprop',
                              'learning_rate': 0.1
                          }]), **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()),
                     **regressor_params)
示例#8
0
def test_theanets_regression():
    check_regression(
        TheanetsRegressor(layers=[20],
                          trainers=[{
                              'optimize': 'rmsprop',
                              'min_improvement': 0.1
                          }]), **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()),
                     **regressor_params)
示例#9
0
def test_theanets_regression():
    check_regression(
        TheanetsRegressor(layers=[3],
                          trainers=[dict(algo='rmsprop', **impatient)]),
        **regressor_params)
    check_regression(
        TheanetsRegressor(scaler=StandardScaler(),
                          trainers=[dict(algo='rmsprop', **impatient)]),
        **regressor_params)
示例#10
0
def test_pybrain_Tanh():
    check_classifier(PyBrainClassifier(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_pp=False,
                     has_importances=False,
                     supports_weight=False)
    check_regression(PyBrainRegressor(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_predictions=False,
                     has_importances=False,
                     supports_weight=False)
示例#11
0
def test_models():
    for _ in range(3):
        clf = CacheClassifier('clf', SGDClassifier(loss='log'))
        check_classifier(clf, has_staged_pp=False, has_importances=False)

        reg = CacheRegressor('reg', SGDRegressor())
        check_regression(reg,
                         has_staged_predictions=False,
                         has_importances=False)
    cache_helper.clear_cache()
示例#12
0
def test_pybrain_Linear_MDLSTM():
    check_classifier(
        PyBrainClassifier(epochs=2,
                          layers=[10, 2],
                          hiddenclass=['LinearLayer', 'MDLSTMLayer']),
        **classifier_params)
    check_regression(
        PyBrainRegressor(epochs=3,
                         layers=[10, 2],
                         hiddenclass=['LinearLayer', 'MDLSTMLayer']),
        **regressor_params)
示例#13
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[20],
                                       trainers=[{
                                           'optimize': 'rmsprop'
                                       }]),
                     supports_weight=False,
                     has_staged_predictions=False,
                     has_importances=False)
    check_regression(TheanetsRegressor(),
                     supports_weight=False,
                     has_staged_predictions=False,
                     has_importances=False)
示例#14
0
def test_pybrain_SoftMax_Tanh():
    check_classifier(
        PyBrainClassifier(epochs=10,
                          layers=[5, 2],
                          hiddenclass=['TanhLayer', 'SoftmaxLayer'],
                          use_rprop=True), **classifier_params)
    check_regression(
        PyBrainRegressor(
            epochs=2,
            layers=[10, 5, 2],
            hiddenclass=['TanhLayer', 'SoftmaxLayer', 'TanhLayer']),
        **regressor_params)
示例#15
0
文件: test_tmva.py 项目: 0x0all/rep
def test_tmva():
    # check classifier
    check_classifier(TMVAClassifier(), check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(method='kSVM', Gamma=0.25, Tol=0.001, sigmoid_function='identity')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(method='kCuts', FitMethod='GA', EffMethod='EffSel', sigmoid_function='sig_eff=0.9')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(), check_instance=True, has_staged_predictions=False, has_importances=False)
示例#16
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[],
                                       show=0,
                                       epochs=N_EPOCHS_REGR,
                                       trainf=nl.train.train_rprop),
                     supports_weight=False,
                     has_importances=False,
                     has_staged_predictions=False)
    check_regression(NeurolabRegressor(net_type='single-layer',
                                       cn='auto',
                                       show=0,
                                       epochs=N_EPOCHS_REGR,
                                       trainf=nl.train.train_delta),
                     supports_weight=False,
                     has_importances=False,
                     has_staged_predictions=False)
示例#17
0
def test_tmva():
    # check classifier
    factory_options = "Silent=True:V=False:DrawProgressBar=False"
    cl = TMVAClassifier(factory_options=factory_options, method='kBDT', NTrees=10)
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options, method='kSVM', Gamma=0.25, Tol=0.001,
                        sigmoid_function='identity')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options, method='kCuts',
                        FitMethod='GA', EffMethod='EffSel', sigmoid_function='sig_eff=0.9')
    check_classifier(cl, check_instance=True, has_staged_pp=False, has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(factory_options=factory_options, method='kBDT', NTrees=10), check_instance=True,
                         has_staged_predictions=False, has_importances=False)
示例#18
0
def test_tmva():
    # check classifier
    factory_options = "Silent=True:V=False:DrawProgressBar=False"
    cl = TMVAClassifier(factory_options=factory_options,
                        method='kBDT',
                        NTrees=10)
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options,
                        method='kSVM',
                        Gamma=0.25,
                        Tol=0.001,
                        sigmoid_function='identity')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)

    cl = TMVAClassifier(factory_options=factory_options,
                        method='kCuts',
                        FitMethod='GA',
                        EffMethod='EffSel',
                        sigmoid_function='sig_eff=0.9')
    check_classifier(cl,
                     check_instance=True,
                     has_staged_pp=False,
                     has_importances=False)
    # check regressor, need to run twice to check for memory leak.
    for i in range(2):
        check_regression(TMVARegressor(factory_options=factory_options,
                                       method='kBDT',
                                       NTrees=10),
                         check_instance=True,
                         has_staged_predictions=False,
                         has_importances=False)
示例#19
0
def test_xgboost():
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=2)
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=4)
    check_regression(XGBoostRegressor(n_estimators=20))
示例#20
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop'}]),
                     supports_weight=False, has_staged_predictions=False, has_importances=False)
    check_regression(TheanetsRegressor(),
                     supports_weight=False, has_staged_predictions=False, has_importances=False)
示例#21
0
def test_sklearn_regression():
    # supports weights
    check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50)))
    # doesn't support weights
    check_regression(SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)),
                     supports_weight=False)
示例#22
0
def test_folding_regressor_with_check_model():
    base_clf = SklearnRegressor(GradientBoostingRegressor(n_estimators=4))
    folding_str = FoldingRegressor(base_clf, n_folds=2)
    check_regression(folding_str, True, True, True)
示例#23
0
def test_folding_regressor_with_check_model():
    base_clf = SklearnRegressor(GradientBoostingRegressor(n_estimators=4))
    folding_str = FoldingRegressor(base_clf, n_folds=2)
    check_regression(folding_str, True, True, True)
示例#24
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[1], epochs=N_EPOCHS_REGR),
                     **regressor_params)
示例#25
0
def test_mn_regression():
    clf = MatrixNetRegressor()
    check_regression(clf)
    assert {'effect', 'information', 'efficiency'} == set(clf.get_feature_importances().columns)
示例#26
0
def test_neurolab_multi_regression():
    check_regression(NeurolabRegressor(layers=[10], epochs=N_EPOCHS_REGR),
                     n_targets=3, **regressor_params)
示例#27
0
def test_pybrain_regression():
    check_regression(PyBrainRegressor(), has_staged_predictions=False, has_importances=False, supports_weight=False)
示例#28
0
def test_xgboost():
    check_classifier(XGBoostClassifier(), n_classes=2)
    check_classifier(XGBoostClassifier(), n_classes=4)
    check_regression(XGBoostRegressor())
示例#29
0
def test_theanets_multi_regression():
    check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop', 'min_improvement': 0.1}]),
                     n_targets=3, **regressor_params)
示例#30
0
def test_xgboost():
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=2)
    check_classifier(XGBoostClassifier(n_estimators=20), n_classes=4)
    check_regression(XGBoostRegressor(n_estimators=20))
示例#31
0
def test_mn_regression():
    clf = MatrixNetRegressor()
    check_regression(clf)
    assert {'effect', 'information',
            'efficiency'} == set(clf.get_feature_importances().columns)
示例#32
0
def test_neurolab_multi_regression():
    check_regression(NeurolabRegressor(layers=[10], epochs=N_EPOCHS_REGR),
                     n_targets=3,
                     **regressor_params)
示例#33
0
def test_pybrain_regression():
    check_regression(PyBrainRegressor(),
                     has_staged_predictions=False,
                     has_importances=False,
                     supports_weight=False)
示例#34
0
def test_pybrain_Tanh():
    check_classifier(PyBrainClassifier(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_pp=False, has_importances=False, supports_weight=False)
    check_regression(PyBrainRegressor(layers=[10], hiddenclass=['TanhLayer']),
                     has_staged_predictions=False, has_importances=False, supports_weight=False)
示例#35
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[], show=0, epochs=N_EPOCHS_REGR, trainf=nl.train.train_rprop),
                     supports_weight=False, has_importances=False, has_staged_predictions=False)
    check_regression(NeurolabRegressor(net_type='single-layer', cn='auto', show=0, epochs=N_EPOCHS_REGR, trainf=nl.train.train_delta),
                     supports_weight=False, has_importances=False, has_staged_predictions=False)
示例#36
0
def test_sklearn_regression():
    check_regression(SklearnRegressor(clf=AdaBoostRegressor(n_estimators=50)))
    check_regression(
        SklearnRegressor(clf=GradientBoostingRegressor(n_estimators=50)))
示例#37
0
def test_pybrain_Linear_MDLSTM():
    check_classifier(PyBrainClassifier(epochs=2, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']),
                     **classifier_params)
    check_regression(PyBrainRegressor(epochs=3, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']),
                     **regressor_params)
示例#38
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[20], trainers=[{'optimize': 'rmsprop', 'min_improvement': 0.1}]),
                     **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()), **regressor_params)
示例#39
0
def test_pybrain_multi_regression():
    check_regression(PyBrainRegressor(), n_targets=4, **regressor_params)
示例#40
0
def test_theanets_regression():
    check_regression(TheanetsRegressor(layers=[11],
                                       trainers=[{'algo': 'rmsprop', 'learning_rate': 0.1}]),
                     **regressor_params)
    check_regression(TheanetsRegressor(scaler=StandardScaler()), **regressor_params)
示例#41
0
def test_pybrain_regression():
    check_regression(PyBrainRegressor(), **regressor_params)
示例#42
0
def test_neurolab_regression():
    check_regression(NeurolabRegressor(layers=[1], epochs=N_EPOCHS_REGR), **regressor_params)