def __init__(self, num_permutations=200, num_bootstraps=100, **kwargs): # init base classes first FeaturewiseDatasetMeasure.__init__(self, **kwargs) # save the args for the analysis self.num_permutations = num_permutations self.num_bootstraps = num_bootstraps
def __init__(self, design, voi="pe", **kwargs): """ Parameters ---------- design : array (nsamples x nregressors) GLM design matrix. voi : {'pe', 'zstat'} Variable of interest that should be reported as feature-wise measure. 'beta' are the parameter estimates and 'zstat' returns standardized parameter estimates. """ FeaturewiseDatasetMeasure.__init__(self, **kwargs) # store the design matrix as a such (no copying if already array) self._design = np.asmatrix(design) # what should be computed ('variable of interest') if not voi in ["pe", "zstat"]: raise ValueError, "Unknown variable of interest '%s'" % str(voi) self._voi = voi # will store the precomputed Moore-Penrose pseudo-inverse of the # design matrix (lazy calculation) self._inv_design = None # also store the inverse of the inner product for beta variance # estimation self._inv_ip = None
def __init__(self, design, voi='pe', **kwargs): """ Parameters ---------- design : array (nsamples x nregressors) GLM design matrix. voi : {'pe', 'zstat'} Variable of interest that should be reported as feature-wise measure. 'beta' are the parameter estimates and 'zstat' returns standardized parameter estimates. """ FeaturewiseDatasetMeasure.__init__(self, **kwargs) # store the design matrix as a such (no copying if already array) self._design = np.asmatrix(design) # what should be computed ('variable of interest') if not voi in ['pe', 'zstat']: raise ValueError, \ "Unknown variable of interest '%s'" % str(voi) self._voi = voi # will store the precomputed Moore-Penrose pseudo-inverse of the # design matrix (lazy calculation) self._inv_design = None # also store the inverse of the inner product for beta variance # estimation self._inv_ip = None
def __init__(self, sensana, splitter=NoneSplitter, combiner=FirstAxisMean, **kwargs): """Cheap initialization. :Parameters: sensana : FeaturewiseDatasetMeasure that shall be run on the `Dataset` splits. splitter : Splitter used to split the `Dataset`. By convention the first dataset in the tuple returned by the splitter on each iteration is used to compute the sensitivity map. combiner This functor will be called on an array of sensitivity maps and the result will be returned by __call__(). The result of a combiner must be an 1d ndarray. """ # init base classes first FeaturewiseDatasetMeasure.__init__(self, **kwargs) self.__sensana = sensana """Sensitivity analyzer used to compute the sensitivity maps. """ self.__splitter = splitter """Splitter instance used to split the datasets.""" self.__combiner = combiner """Function to combine sensitivities to serve a result of
def __init__(self, num_permutations=200, num_bootstraps=100, **kwargs): raise NotImplemented, 'PLS was not yet implemented fully' # init base classes first FeaturewiseDatasetMeasure.__init__(self, **kwargs) # save the args for the analysis self.num_permutations = num_permutations self.num_bootstraps = num_bootstraps
def __init__(self, targets_attr='targets', **kwargs): """ Parameters ---------- targets_attr : str What samples attribute to use as targets (labels). """ self._targets_attr = targets_attr FeaturewiseDatasetMeasure.__init__(self, **kwargs)
def __init__(self, attr="labels", **kwargs): """Initialize :Parameters: attr : basestring Attribute to correlate across chunks. """ # init base classes first FeaturewiseDatasetMeasure.__init__(self, **kwargs) self.__attr = attr
def __init__(self, attr='targets', **kwargs): """Initialize Parameters ---------- attr : str Attribute to correlate across chunks. """ # init base classes first FeaturewiseDatasetMeasure.__init__(self, **kwargs) self.__attr = attr
def __init__(self, threshold=1.0e-2, kernel_width=1.0, w_guess=None, **kwargs): """Constructor of the IRELIEF class. """ # init base classes first FeaturewiseDatasetMeasure.__init__(self, **kwargs) # Threshold in W changes (stopping criterion for irelief). self.threshold = threshold self.w_guess = w_guess self.w = None self.kernel_width = kernel_width
def __init__(self, val=0, combiner=SecondAxisSumOfAbs, **kwargs): """Initialize :Parameters: val : float Real-valued number for the null-hypothesis. combiner : Functor The combiner is only applied if the computed featurewise dataset measure is more than one-dimensional. This is different from a `transformer`, which is always applied. By default, the sum of absolute values along the second axis is computed. """ FeaturewiseDatasetMeasure.__init__(self,combiner,**kwargs) self.__val = val
def __init__(self, pvalue=False, attr='labels', **kwargs): """Initialize :Parameters: pvalue : bool Either to report p-value of pearsons correlation coefficient instead of pure correlation coefficient attr : basestring What attribut to correlate with """ # init base classes first FeaturewiseDatasetMeasure.__init__(self, **kwargs) self.__pvalue = int(pvalue) self.__attr = attr
def __init__(self, datameasure, noise=N.random.normal): """Cheap initialization. Parameters datameasure: `Datameasure` that is used to quantify the effect of noise perturbation. noise: Functor to generate noise. The noise generator has to return an 1d array of n values when called the `size=n` keyword argument. This is the default interface of the random number generators in NumPy's `random` module. """ # init base classes first FeaturewiseDatasetMeasure.__init__(self) self.__datameasure = datameasure self.__noise = noise
def __init__(self, datameasure, noise=np.random.normal): """ Parameters ---------- datameasure : `DatasetMeasure` Used to quantify the effect of noise perturbation. noise: Callable Used to generate noise. The noise generator has to return an 1d array of n values when called the `size=n` keyword argument. This is the default interface of the random number generators in NumPy's `random` module. """ # init base classes first FeaturewiseDatasetMeasure.__init__(self) self.__datameasure = datameasure self.__noise = noise
def __init__(self, mult=1, **kwargs): FeaturewiseDatasetMeasure.__init__(self, **kwargs) self.__mult = mult
def __init__(self, combine = lambda x: np.median(x,axis=1), **kwargs): """Initialize """ FeaturewiseDatasetMeasure.__init__(self,**kwargs) self._combine = combine