コード例 #1
0
 def __init__(self, **kwargs):
     # force disable auto-train: would make no sense
     Learner.__init__(self, auto_train=False, **kwargs)
     if 1. / (self.params.n_bootstrap + 1) > self.params.feature_thresh_prob:
         raise ValueError('number of bootstrap samples is insufficient for'
                          ' the desired threshold probability')
     self.untrain()
コード例 #2
0
ファイル: base.py プロジェクト: jgors/PyMVPA
 def __init__(self, **kwargs):
     """
     Parameters
     ----------
     **kwargs
       All additional arguments are passed to the baseclass.
     """
     Learner.__init__(self, **kwargs)
     # internal settings that influence what should be done to the dataset
     # attributes in the default forward() and reverse() implementations.
     # they are passed to the Dataset.copy() method
     self._sa_filter = None
     self._fa_filter = None
     self._a_filter = None
コード例 #3
0
 def __init__(self, **kwargs):
     """
     Parameters
     ----------
     **kwargs
       All additional arguments are passed to the baseclass.
     """
     Learner.__init__(self, **kwargs)
     # internal settings that influence what should be done to the dataset
     # attributes in the default forward() and reverse() implementations.
     # they are passed to the Dataset.copy() method
     self._sa_filter = None
     self._fa_filter = None
     self._a_filter = None
コード例 #4
0
ファイル: base.py プロジェクト: psederberg/PyMVPA
    def __init__(self, null_dist=None, **kwargs):
        """
        Parameters
        ----------
        null_dist : instance of distribution estimator
          The estimated distribution is used to assign a probability for a
          certain value of the computed measure.
        """
        Learner.__init__(self, **kwargs)

        null_dist_ = auto_null_dist(null_dist)
        if __debug__:
            debug("SA", "Assigning null_dist %s whenever original given was %s" % (null_dist_, null_dist))
        self.__null_dist = null_dist_
コード例 #5
0
ファイル: base.py プロジェクト: neurosbh/PyMVPA
    def __init__(self, null_dist=None, **kwargs):
        """
        Parameters
        ----------
        null_dist : instance of distribution estimator
          The estimated distribution is used to assign a probability for a
          certain value of the computed measure.
        """
        Learner.__init__(self, **kwargs)

        null_dist_ = auto_null_dist(null_dist)
        if __debug__:
            debug('SA', 'Assigning null_dist %s whenever original given was %s'
                  % (null_dist_, null_dist))
        self.__null_dist = null_dist_
コード例 #6
0
ファイル: base.py プロジェクト: neurosbh/PyMVPA
    def __init__(self, space=None, **kwargs):
        # by default we want classifiers to use the 'targets' sample attribute
        # for training/testing
        if space is None:
            space = 'targets'
        Learner.__init__(self, space=space, **kwargs)

        # XXX
        # the place to map literal to numerical labels (and back)
        # this needs to be in the base class, since some classifiers also
        # have this nasty 'regression' mode, and the code in this class
        # needs to deal with converting the regression output into discrete
        # labels
        # however, preferably the mapping should be kept in the respective
        # low-level implementations that need it
        self._attrmap = AttributeMap()

        self.__trainednfeatures = 0
        """Stores number of features for which classifier was trained.
        If 0 -- it wasn't trained at all"""

        self._set_retrainable(self.params.retrainable, force=True)
コード例 #7
0
ファイル: base.py プロジェクト: adamatus/PyMVPA
    def __init__(self, space=None, **kwargs):
        # by default we want classifiers to use the 'targets' sample attribute
        # for training/testing
        if space is None:
            space = 'targets'
        Learner.__init__(self, space=space, **kwargs)

        # XXX
        # the place to map literal to numerical labels (and back)
        # this needs to be in the base class, since some classifiers also
        # have this nasty 'regression' mode, and the code in this class
        # needs to deal with converting the regression output into discrete
        # labels
        # however, preferably the mapping should be kept in the respective
        # low-level implementations that need it
        self._attrmap = AttributeMap()

        self.__trainednfeatures = 0
        """Stores number of features for which classifier was trained.
        If 0 -- it wasn't trained at all"""

        self._set_retrainable(self.params.retrainable, force=True)