예제 #1
0
    def build_vector_based_pm(self):
        # samples: 40 samples in 20d space (40x20; samples x features)
        self.samples = np.random.rand(40,20)

        # initial prototypes are samples itself:
        self.prototypes = self.samples.copy()

        # using just two similarities for now:
        self.similarities = [ExponentialKernel(), SquaredExponentialKernel()]
        # set up prototype mapper with prototypes identical to samples.
        self.pm = PrototypeMapper(similarities=self.similarities,
                                  prototypes=self.prototypes)
        # train Prototype
        self.pm.train(self.samples)
예제 #2
0
    def __init__(self, kernel=None, **kwargs):
        """Initialize a GPR regression analysis.

        Parameters
        ----------
        kernel : Kernel
          a kernel object defining the covariance between instances.
          (Defaults to SquaredExponentialKernel if None in arguments)
        """
        # init base class first
        Classifier.__init__(self, **kwargs)

        # It does not make sense to calculate a confusion matrix for a GPR
        # XXX it does ;) it will be a RegressionStatistics actually ;-)
        # So if someone desires -- let him have it
        # self.ca.enable('training_stats', False)

        # set kernel:
        if kernel is None:
            kernel = SquaredExponentialKernel()
            debug(
                "GPR",
                "No kernel was provided, falling back to default: %s" % kernel)
        self.__kernel = kernel

        # append proper clf_internal depending on the kernel
        # TODO: add "__tags__" to kernels since the check
        #       below does not scale
        if isinstance(kernel, GeneralizedLinearKernel) or \
           isinstance(kernel, LinearKernel):
            self.__tags__ += ['linear']
        else:
            self.__tags__ += ['non-linear']

        if externals.exists('openopt') \
               and not 'has_sensitivity' in self.__tags__:
            self.__tags__ += ['has_sensitivity']

        # No need to initialize conditional attributes. Unless they got set
        # they would raise an exception self.predicted_variances =
        # None self.log_marginal_likelihood = None
        self._init_internals()
        pass
예제 #3
0
clfswh += GNB(common_variance=True, descr="GNB(common_variance=True)")
clfswh += GNB(prior='uniform', descr="GNB(prior='uniform')")
clfswh += \
    FeatureSelectionClassifier(
        GNB(),
        SensitivityBasedFeatureSelection(
           OneWayAnova(),
           FractionTailSelector(0.05, mode='select', tail='upper')),
        descr="GNB on 5%(ANOVA)")

# GPR
if externals.exists('scipy'):
    from mvpa2.clfs.gpr import GPR

    regrswh += GPR(kernel=LinearKernel(), descr="GPR(kernel='linear')")
    regrswh += GPR(kernel=SquaredExponentialKernel(),
                   descr="GPR(kernel='sqexp')")

    # Add wrapped GPR as a classifier
    gprcb = RegressionAsClassifier(GPR(kernel=GeneralizedLinearKernel()),
                                   descr="GPRC(kernel='linear')")
    # lets remove multiclass label from it
    gprcb.__tags__.pop(gprcb.__tags__.index('multiclass'))
    clfswh += gprcb

    # and create a proper multiclass one
    clfswh += MulticlassClassifier(RegressionAsClassifier(
        GPR(kernel=GeneralizedLinearKernel())),
                                   descr="GPRCM(kernel='linear')")

# BLR