def __init__(self, **kwargs): """ Parameters ---------- **kwargs All additional arguments are passed to the baseclass. """ Learner.__init__(self, **kwargs) # internal settings that influence what should be done to the dataset # attributes in the default forward() and reverse() implementations. # they are passed to the Dataset.copy() method self._sa_filter = None self._fa_filter = None self._a_filter = None
def __init__(self, null_dist=None, **kwargs): """ Parameters ---------- null_dist : instance of distribution estimator The estimated distribution is used to assign a probability for a certain value of the computed measure. """ Learner.__init__(self, **kwargs) null_dist_ = auto_null_dist(null_dist) if __debug__: debug('SA', 'Assigning null_dist %s whenever original given was %s' % (null_dist_, null_dist)) self.__null_dist = null_dist_
def __init__(self, space=None, **kwargs): # by default we want classifiers to use the 'targets' sample attribute # for training/testing if space is None: space = 'targets' Learner.__init__(self, space=space, **kwargs) # XXX # the place to map literal to numerical labels (and back) # this needs to be in the base class, since some classifiers also # have this nasty 'regression' mode, and the code in this class # needs to deal with converting the regression output into discrete # labels # however, preferably the mapping should be kept in the respective # low-level implementations that need it self._attrmap = AttributeMap() self.__trainednfeatures = None """Stores number of features for which classifier was trained. If None -- it wasn't trained at all""" self._set_retrainable(self.params.retrainable, force=True)