Exemplo n.º 1
0
 def test_basic(self):
     dataset = data_generators.linear1d_gaussian_noise()
     k = GeneralizedLinearKernel()
     clf = GPR(k)
     clf.train(dataset)
     y = clf.predict(dataset.samples)
     assert_array_equal(y.shape, dataset.targets.shape)
Exemplo n.º 2
0
    def _test_gpr_model_selection(self):  # pragma: no cover
        """Smoke test for running model selection while getting GPRWeights

        TODO: DISABLED because setting of hyperparameters was not adopted for 0.6 (yet)
        """
        if not externals.exists('openopt'):
            return
        amap = AttributeMap()  # we would need to pass numbers into the GPR
        dataset = datasets['uni2small'].copy(
        )  #data_generators.linear1d_gaussian_noise()
        dataset.targets = amap.to_numeric(dataset.targets).astype(float)
        k = GeneralizedLinearKernel()
        clf = GPR(k, enable_ca=['log_marginal_likelihood'])
        sa = clf.get_sensitivity_analyzer()  # should be regular weights
        sa_ms = clf.get_sensitivity_analyzer(
            flavor='model_select')  # with model selection

        def prints():
            print clf.ca.log_marginal_likelihood, clf.kernel.Sigma_p, clf.kernel.sigma_0

        sa(dataset)
        lml = clf.ca.log_marginal_likelihood

        sa_ms(dataset)
        lml_ms = clf.ca.log_marginal_likelihood

        self.assertTrue(lml_ms > lml)
Exemplo n.º 3
0
 def test_basic(self):
     dataset = data_generators.linear1d_gaussian_noise()
     k = GeneralizedLinearKernel()
     clf = GPR(k)
     clf.train(dataset)
     y = clf.predict(dataset.samples)
     assert_array_equal(y.shape, dataset.targets.shape)
Exemplo n.º 4
0
 def test_basic(self):
     skip_if_no_external('scipy') # needed by GPR code
     dataset = data_generators.linear1d_gaussian_noise()
     k = GeneralizedLinearKernel()
     clf = GPR(k)
     clf.train(dataset)
     y = clf.predict(dataset.samples)
     assert_array_equal(y.shape, dataset.targets.shape)
Exemplo n.º 5
0
 def test_basic(self):
     skip_if_no_external('scipy')  # needed by GPR code
     dataset = data_generators.linear1d_gaussian_noise()
     k = GeneralizedLinearKernel()
     clf = GPR(k)
     clf.train(dataset)
     y = clf.predict(dataset.samples)
     assert_array_equal(y.shape, dataset.targets.shape)
Exemplo n.º 6
0
    def _test_gpr_model_selection(self):  # pragma: no cover
        """Smoke test for running model selection while getting GPRWeights

        TODO: DISABLED because setting of hyperparameters was not adopted for 0.6 (yet)
        """
        if not externals.exists('openopt'):
            return
        amap = AttributeMap()           # we would need to pass numbers into the GPR
        dataset = datasets['uni2small'].copy() #data_generators.linear1d_gaussian_noise()
        dataset.targets = amap.to_numeric(dataset.targets).astype(float)
        k = GeneralizedLinearKernel()
        clf = GPR(k, enable_ca=['log_marginal_likelihood'])
        sa = clf.get_sensitivity_analyzer() # should be regular weights
        sa_ms = clf.get_sensitivity_analyzer(flavor='model_select') # with model selection
        def prints():
            print clf.ca.log_marginal_likelihood, clf.kernel.Sigma_p, clf.kernel.sigma_0

        sa(dataset)
        lml = clf.ca.log_marginal_likelihood

        sa_ms(dataset)
        lml_ms = clf.ca.log_marginal_likelihood

        self.assertTrue(lml_ms > lml)
Exemplo n.º 7
0
clfswh += GNB(descr="GNB()")
clfswh += GNB(common_variance=True, descr="GNB(common_variance=True)")
clfswh += GNB(prior='uniform', descr="GNB(prior='uniform')")
clfswh += \
    FeatureSelectionClassifier(
        GNB(),
        SensitivityBasedFeatureSelection(
           OneWayAnova(),
           FractionTailSelector(0.05, mode='select', tail='upper')),
        descr="GNB on 5%(ANOVA)")

# GPR
if externals.exists('scipy'):
    from mvpa2.clfs.gpr import GPR

    regrswh += GPR(kernel=LinearKernel(), descr="GPR(kernel='linear')")
    regrswh += GPR(kernel=SquaredExponentialKernel(),
                   descr="GPR(kernel='sqexp')")

    # Add wrapped GPR as a classifier
    gprcb = RegressionAsClassifier(GPR(kernel=GeneralizedLinearKernel()),
                                   descr="GPRC(kernel='linear')")
    # lets remove multiclass label from it
    gprcb.__tags__.pop(gprcb.__tags__.index('multiclass'))
    clfswh += gprcb

    # and create a proper multiclass one
    clfswh += MulticlassClassifier(RegressionAsClassifier(
        GPR(kernel=GeneralizedLinearKernel())),
                                   descr="GPRCM(kernel='linear')")
Exemplo n.º 8
0
        #ds.fa['parcel']  = msk_data
        ds_all.append(ds)
        verbose(2, "subject %i of %i loaded" % (index, nsubs))

fds = vstack(ds_all) #stack datasets


#classifier algorithm
if clf_type is 'SVM':
    clf = LinearCSVMC(tube_epsilon=0.01)
elif clf_type is 'SVM-rbf':
    clf = RbfCSVMC(tube_epsilon=0.01)
elif clf_type is 'ridgeReg':
    clf = RidgeReg()
elif clf_type is 'gpr':
    clf = GPR()

# #feature selection
# fsel = SensitivityBasedFeatureSelection(
#             OneWayAnova(),
#             FractionTailSelector(0.05, mode='select', tail='upper'))
# fclf = FeatureSelectionClassifier(clf, fsel)

#cross-validation algorithm
if cv_type is 'split_half':
    cv = CrossValidation(clf,
                         HalfPartitioner(count=2,
                                         selection_strategy='random', attr='subject'),
                         errorfx=mean_match_accuracy)
elif cv_type is 'LOSO':
    cv = CrossValidation(clf,
Exemplo n.º 9
0
clfswh += GNB(descr="GNB()")
clfswh += GNB(common_variance=True, descr="GNB(common_variance=True)")
clfswh += GNB(prior='uniform', descr="GNB(prior='uniform')")
clfswh += \
    FeatureSelectionClassifier(
        GNB(),
        SensitivityBasedFeatureSelection(
           OneWayAnova(),
           FractionTailSelector(0.05, mode='select', tail='upper')),
        descr="GNB on 5%(ANOVA)")

# GPR
if externals.exists('scipy'):
    from mvpa2.clfs.gpr import GPR

    regrswh += GPR(kernel=LinearKernel(), descr="GPR(kernel='linear')")
    regrswh += GPR(kernel=SquaredExponentialKernel(),
                   descr="GPR(kernel='sqexp')")

    # Add wrapped GPR as a classifier
    gprcb = RegressionAsClassifier(
        GPR(kernel=GeneralizedLinearKernel()), descr="GPRC(kernel='linear')")
    # lets remove multiclass label from it
    gprcb.__tags__.pop(gprcb.__tags__.index('multiclass'))
    clfswh += gprcb

    # and create a proper multiclass one
    clfswh += MulticlassClassifier(
        RegressionAsClassifier(
            GPR(kernel=GeneralizedLinearKernel())),
        descr="GPRCM(kernel='linear')")