def test_glmnet_r_sensitivities(): data = datasets['chirp_linear'] clf = GLMNET_R() clf.train(data) # now ask for the sensitivities WITHOUT having to pass the dataset # again sens = clf.get_sensitivity_analyzer(force_training=False)() assert_equal(sens.shape, (1, data.nfeatures))
def test_glmnet_state(): #data = datasets['dumb2'] # for some reason the R code fails with the dumb data data = datasets['chirp_linear'] clf = GLMNET_R() clf.train(data) clf.ca.enable('predictions') p = clf.predict(data.samples) assert_array_equal(p, clf.ca.predictions)
def test_glmnet_r(): # not the perfect dataset with which to test, but # it will do for now. #data = datasets['dumb2'] # for some reason the R code fails with the dumb data data = datasets['chirp_linear'] clf = GLMNET_R() clf.train(data) # prediction has to be almost perfect # test with a correlation pre = clf.predict(data.samples) corerr = CorrErrorFx()(pre, data.targets) if cfg.getboolean('tests', 'labile', default='yes'): assert_true(corerr < .2)
# clfswh += MulticlassClassifier(lars, # descr='Multiclass %s' % lars.descr) ## Still fails unittests battery although overhauled otherwise. ## # enet from R via RPy2 ## if externals.exists('elasticnet'): ## from mvpa.clfs.enet import ENET ## clfswh += RegressionAsClassifier(ENET(), ## descr="RegressionAsClassifier(ENET())") ## regrswh += ENET(descr="ENET()") # glmnet from R via RPy if externals.exists('glmnet'): from mvpa.clfs.glmnet import GLMNET_C, GLMNET_R clfswh += GLMNET_C(descr="GLMNET_C()") regrswh += GLMNET_R(descr="GLMNET_R()") # kNN clfswh += kNN(k=5, descr="kNN(k=5)") clfswh += kNN(k=5, voting='majority', descr="kNN(k=5, voting='majority')") clfswh += \ FeatureSelectionClassifier( kNN(), SensitivityBasedFeatureSelection( SMLRWeights(SMLR(lm=1.0, implementation="C"), postproc=maxofabs_sample()), RangeElementSelector(mode='select')), descr="kNN on SMLR(lm=1) non-0") clfswh += \