Exemple #1
0
##     regrswh += ENET(descr="ENET()")

# glmnet from R via RPy
if externals.exists('glmnet'):
    from mvpa.clfs.glmnet import GLMNET_C, GLMNET_R
    clfswh += GLMNET_C(descr="GLMNET_C()")
    regrswh += GLMNET_R(descr="GLMNET_R()")

# LDA/QDA
clfswh += LDA(descr='LDA()')
clfswh += QDA(descr='QDA()')

if externals.exists('skl'):
    from scikits.learn.lda import LDA as sklLDA
    from mvpa.clfs.skl.base import SKLLearnerAdapter
    clfswh += SKLLearnerAdapter(sklLDA(), tags=['lda', 'linear', 'multiclass', 'binary'],
                                descr='scikits.learn.LDA()_adapter')

# kNN
clfswh += kNN(k=5, descr="kNN(k=5)")
clfswh += kNN(k=5, voting='majority', descr="kNN(k=5, voting='majority')")

clfswh += \
    FeatureSelectionClassifier(
        kNN(),
        SensitivityBasedFeatureSelection(
           SMLRWeights(SMLR(lm=1.0, implementation="C"),
                       postproc=maxofabs_sample()),
           RangeElementSelector(mode='select')),
        descr="kNN on SMLR(lm=1) non-0")
Exemple #2
0
##     regrswh += ENET(descr="ENET()")

# glmnet from R via RPy
if externals.exists('glmnet'):
    from mvpa.clfs.glmnet import GLMNET_C, GLMNET_R
    clfswh += GLMNET_C(descr="GLMNET_C()")
    regrswh += GLMNET_R(descr="GLMNET_R()")

# LDA/QDA
clfswh += LDA(descr='LDA()')
clfswh += QDA(descr='QDA()')

if externals.exists('skl'):
    from scikits.learn.lda import LDA as sklLDA
    from mvpa.clfs.skl.base import SKLLearnerAdapter
    clfswh += SKLLearnerAdapter(sklLDA(),
                                tags=['lda', 'linear', 'multiclass', 'binary'],
                                descr='skl.LDA()')

    if externals.versions['skl'] >= '0.8':
        from scikits.learn.pls import PLSRegression as sklPLSRegression
        # somewhat silly use of PLS, but oh well
        regrswh += SKLLearnerAdapter(sklPLSRegression(n_components=1),
                                     tags=['linear', 'regression'],
                                     enforce_dim=1,
                                     descr='skl.PLSRegression_1d()')

    if externals.versions['skl'] >= '0.6.0':
        from scikits.learn.linear_model import \
             LARS as sklLARS, LassoLARS as sklLassoLARS
        _lars_tags = ['lars', 'linear', 'regression', 'does_feature_selection']
Exemple #3
0
# glmnet from R via RPy
if externals.exists("glmnet"):
    from mvpa2.clfs.glmnet import GLMNET_C, GLMNET_R

    clfswh += GLMNET_C(descr="GLMNET_C()")
    regrswh += GLMNET_R(descr="GLMNET_R()")

# LDA/QDA
clfswh += LDA(descr="LDA()")
clfswh += QDA(descr="QDA()")

if externals.exists("skl"):
    from scikits.learn.lda import LDA as sklLDA
    from mvpa2.clfs.skl.base import SKLLearnerAdapter

    clfswh += SKLLearnerAdapter(sklLDA(), tags=["lda", "linear", "multiclass", "binary"], descr="skl.LDA()")

    if externals.versions["skl"] >= "0.8":
        from scikits.learn.pls import PLSRegression as sklPLSRegression

        # somewhat silly use of PLS, but oh well
        regrswh += SKLLearnerAdapter(
            sklPLSRegression(n_components=1),
            tags=["linear", "regression"],
            enforce_dim=1,
            descr="skl.PLSRegression_1d()",
        )

    if externals.versions["skl"] >= "0.6.0":
        from scikits.learn.linear_model import LARS as sklLARS, LassoLARS as sklLassoLARS