Beispiel #1
0
    def __init__(self, **kwargs):
        """Cheap initialization.
        """
        ClassWithCollections.__init__(self, **kwargs)


        self.__trainednfeatures = None
        """Stores number of features for which classifier was trained.
        If None -- it wasn't trained at all"""

        self._setRetrainable(self.params.retrainable, force=True)

        if self.params.regression:
            for statevar in [ "trained_labels"]: #, "training_confusion" ]:
                if self.states.isEnabled(statevar):
                    if __debug__:
                        debug("CLF",
                              "Disabling state %s since doing regression, " %
                              statevar + "not classification")
                    self.states.disable(statevar)
            self._summaryClass = RegressionStatistics
        else:
            self._summaryClass = ConfusionMatrix
            clf_internals = self._clf_internals
            if 'regression' in clf_internals and not ('binary' in clf_internals):
                # regressions are used as binary classifiers if not
                # asked to perform regression explicitly
                # We need a copy of the list, so we don't override class-wide
                self._clf_internals = clf_internals + ['binary']
Beispiel #2
0
    def __init__(self, sd=0, distribution='rdist', fpp=None, nbins=400, **kwargs):
        """L2-Norm the values, convert them to p-values of a given distribution.

        Parameters
        ----------
        sd : int
          Samples dimension (if len(x.shape)>1) on which to operate
        distribution : string
          Which distribution to use. Known are: 'rdist' (later normal should
          be there as well)
        fpp : float
          At what p-value (both tails) if not None, to control for false
          positives. It would iteratively prune the tails (tentative real positives)
          until empirical p-value becomes less or equal to numerical.
        nbins : int
          Number of bins for the iterative pruning of positives

        WARNING: Highly experimental/slow/etc: no theoretical grounds have been
        presented in any paper, nor proven
        """
        externals.exists('scipy', raise_=True)
        ClassWithCollections.__init__(self, **kwargs)

        self.sd = sd
        if not (distribution in ['rdist']):
            raise ValueError, "Actually only rdist supported at the moment" \
                  " got %s" % distribution
        self.distribution = distribution
        self.fpp = fpp
        self.nbins = nbins
Beispiel #3
0
    def __init__(self,
                 sd=0,
                 distribution='rdist',
                 fpp=None,
                 nbins=400,
                 **kwargs):
        """L2-Norm the values, convert them to p-values of a given distribution.

        Parameters
        ----------
        sd : int
          Samples dimension (if len(x.shape)>1) on which to operate
        distribution : string
          Which distribution to use. Known are: 'rdist' (later normal should
          be there as well)
        fpp : float
          At what p-value (both tails) if not None, to control for false
          positives. It would iteratively prune the tails (tentative real positives)
          until empirical p-value becomes less or equal to numerical.
        nbins : int
          Number of bins for the iterative pruning of positives

        WARNING: Highly experimental/slow/etc: no theoretical grounds have been
        presented in any paper, nor proven
        """
        externals.exists('scipy', raise_=True)
        ClassWithCollections.__init__(self, **kwargs)

        self.sd = sd
        if not (distribution in ['rdist']):
            raise ValueError, "Actually only rdist supported at the moment" \
                  " got %s" % distribution
        self.distribution = distribution
        self.fpp = fpp
        self.nbins = nbins
Beispiel #4
0
    def __init__(self, mode='discard', **kwargs):
        """Cheap initialization.

        :Parameters:
           mode : ['discard', 'select']
              Decides whether to `select` or to `discard` features.
        """
        ClassWithCollections.__init__(self, **kwargs)

        self._setMode(mode)
        """Flag whether to select or to discard elements."""
Beispiel #5
0
    def __init__(self, mode='discard', **kwargs):
        """
        Parameters
        ----------
         mode : {'discard', 'select'}
            Decides whether to `select` or to `discard` features.
        """
        ClassWithCollections.__init__(self, **kwargs)

        self._set_mode(mode)
        """Flag whether to select or to discard elements."""
Beispiel #6
0
    def __init__(self, tail='both', **kwargs):
        """
        Parameters
        ----------
        tail : {'left', 'right', 'any', 'both'}
          Which tail of the distribution to report. For 'any' and 'both'
          it chooses the tail it belongs to based on the comparison to
          p=0.5. In the case of 'any' significance is taken like in a
          one-tailed test.
        """
        ClassWithCollections.__init__(self, **kwargs)

        self._set_tail(tail)
Beispiel #7
0
    def __init__(self, tail='both', **kwargs):
        """
        Parameters
        ----------
        tail : {'left', 'right', 'any', 'both'}
          Which tail of the distribution to report. For 'any' and 'both'
          it chooses the tail it belongs to based on the comparison to
          p=0.5. In the case of 'any' significance is taken like in a
          one-tailed test.
        """
        ClassWithCollections.__init__(self, **kwargs)

        self._set_tail(tail)
Beispiel #8
0
    def __init__(self, **kwargs):
        ClassWithCollections.__init__(self, **kwargs)

        # XXX
        # the place to map literal to numerical labels (and back)
        # this needs to be in the base class, since some classifiers also
        # have this nasty 'regression' mode, and the code in this class
        # needs to deal with converting the regression output into discrete
        # labels
        # however, preferably the mapping should be kept in the respective
        # low-level implementations that need it
        self._attrmap = AttributeMap()

        self.__trainednfeatures = None
        """Stores number of features for which classifier was trained.
        If None -- it wasn't trained at all"""

        self._set_retrainable(self.params.retrainable, force=True)
Beispiel #9
0
    def __init__(self, **kwargs):
        ClassWithCollections.__init__(self, **kwargs)

        # XXX
        # the place to map literal to numerical labels (and back)
        # this needs to be in the base class, since some classifiers also
        # have this nasty 'regression' mode, and the code in this class
        # needs to deal with converting the regression output into discrete
        # labels
        # however, preferably the mapping should be kept in the respective
        # low-level implementations that need it
        self._attrmap = AttributeMap()

        self.__trainednfeatures = None
        """Stores number of features for which classifier was trained.
        If None -- it wasn't trained at all"""

        self._set_retrainable(self.params.retrainable, force=True)
Beispiel #10
0
    def __init__(self, transformer=None, null_dist=None, **kwargs):
        """Does nothing special.

        :Parameters:
          transformer: Functor
            This functor is called in `__call__()` to perform a final
            processing step on the to be returned dataset measure. If None,
            nothing is called
          null_dist: instance of distribution estimator
            The estimated distribution is used to assign a probability for a
            certain value of the computed measure.
        """
        ClassWithCollections.__init__(self, **kwargs)

        self.__transformer = transformer
        """Functor to be called in return statement of all subclass __call__()
        methods."""
        null_dist_ = autoNullDist(null_dist)
        if __debug__:
            debug('SA', 'Assigning null_dist %s whenever original given was %s'
                  % (null_dist_, null_dist))
        self.__null_dist = null_dist_
Beispiel #11
0
    def __init__(self, postproc=None, null_dist=None, **kwargs):
        """Does nothing special.

        Parameters
        ----------
        postproc : Mapper instance
          Mapper to perform post-processing of results. This mapper is applied
          in `__call__()` to perform a final processing step on the to be
          returned dataset measure. If None, nothing is done.
        null_dist : instance of distribution estimator
          The estimated distribution is used to assign a probability for a
          certain value of the computed measure.
        """
        ClassWithCollections.__init__(self, **kwargs)

        self.__postproc = postproc
        """Functor to be called in return statement of all subclass __call__()
        methods."""
        null_dist_ = auto_null_dist(null_dist)
        if __debug__:
            debug('SA', 'Assigning null_dist %s whenever original given was %s'
                  % (null_dist_, null_dist))
        self.__null_dist = null_dist_
Beispiel #12
0
    def __init__(self, postproc=None, null_dist=None, **kwargs):
        """Does nothing special.

        Parameters
        ----------
        postproc : Mapper instance
          Mapper to perform post-processing of results. This mapper is applied
          in `__call__()` to perform a final processing step on the to be
          returned dataset measure. If None, nothing is done.
        null_dist : instance of distribution estimator
          The estimated distribution is used to assign a probability for a
          certain value of the computed measure.
        """
        ClassWithCollections.__init__(self, **kwargs)

        self.__postproc = postproc
        """Functor to be called in return statement of all subclass __call__()
        methods."""
        null_dist_ = auto_null_dist(null_dist)
        if __debug__:
            debug('SA', 'Assigning null_dist %s whenever original given was %s'
                  % (null_dist_, null_dist))
        self.__null_dist = null_dist_
Beispiel #13
0
 def __init__(self, **kwargs):
     # base init first
     ClassWithCollections.__init__(self, **kwargs)
Beispiel #14
0
 def __init__(self, **kwargs):
     # base init first
     ClassWithCollections.__init__(self, **kwargs)
Beispiel #15
0
 def __init__(self, *args, **kwargs):
     ClassWithCollections.__init__(self, *args, **kwargs)
     self._k = None
     """Implementation specific version of the kernel"""
Beispiel #16
0
 def __init__(self, *args, **kwargs):
     """Base Kernel class has no parameters
     """
     ClassWithCollections.__init__(self, *args, **kwargs)
     self._k = None
     """Implementation specific version of the kernel"""
Beispiel #17
0
 def __init__(self, **kwargs):
     ClassWithCollections.__init__(self, **kwargs)
Beispiel #18
0
 def __init__(self, **kwargs):
     # XXX make such example when we actually need to invoke
     # constructor
     # TestClassProper.__init__(self, **kwargs)
     ClassWithCollections.__init__(self, **kwargs)
Beispiel #19
0
 def __init__(self, **kwargs):
     # XXX make such example when we actually need to invoke
     # constructor
     # TestClassProper.__init__(self, **kwargs)
     ClassWithCollections.__init__(self, **kwargs)