def __init__(self, data, run=True, logger=None, loglevel=None, **kwargs): # Logger Logger.__init__(self, logger=logger, loglevel=loglevel, **dict_filter(kwargs, 'log_')) self._kwargs = kwargs self._kwargs['keep_invalids'] = True self._kwargs.setdefault('nvalid', 1) self._kwargs.setdefault('quiet', True) self._data = data self.nstep = 0 self.cv = 100 self._analyzes = [] self._kwfill = {} self._ana = 'pca' # Setup analyzer span = self.analyze(**kwargs) # Checks if not span.invalids.any(): self.warning("No gap to fill") # Keep original data safe self._set_field_(span.stacked_data, 'orig') # Start filling? if run: self.fill(**kwargs)
def __init__(self, dataset, weights=None, norms=None, keep_invalids=False, minvalid=None, clean_weights=True, logger=None, loglevel=None, zerofill=False, **kwargs): # Logger Logger.__init__(self, logger=logger, loglevel=loglevel, **dict_filter(kwargs, 'log_')) # Input shape if isinstance(dataset, (list, tuple)): dataset = list(dataset) self.map = len(dataset) else: dataset = [dataset] self.map = 0 self.ndataset = self.nd = len(dataset) self.dataset = dataset # Other inits self.data = [] self.nt = None # weights = self.remap(weights, reshape=True) norms = self.remap(norms, reshape=True) if self.ndataset==1 and norms[0] is None: norms = [False] self._invalids = [] self.masked = False # Loop on datasets for idata,data in enumerate(dataset): # Create the Data instance and pack array dd = Data(data, norm=norms[idata], #weights=weights[idata], keep_invalids=keep_invalids, minvalid=minvalid, #clean_weights=clean_weights, zerofill=zerofill) self.data.append(dd) self._invalids.append(dd.invalids) self.masked |= dd.masked # Check nt if self.nt is None: self.nt = dd.nt elif self.nt != dd.nt: self.error('Time dimension of variable %i must have length %i (not %i)'%(idata, self.nt, dd.nt)) # Merge self.stacked_data = npy.asfortranarray(npy.vstack([d.packed_data for d in self.data])) self.splits = npy.cumsum([d.packed_data.shape[0] for d in self.data[:-1]]) # self.stacked_weights = npy.hstack([d.packed_weights for d in self.data]) self.ns = self.stacked_data.shape[0] self.ntv = (self.stacked_data!=default_missing_value).any(axis=0).sum()
def __init__(self, ldataset, rdataset, lweights=None, rweights=None, lnorms=None, rnorms=None, lminvalid=None, rminvalid=None, logger=None, loglevel=None, zerofill=0, **kwargs): # Loggers Logger.__init__(self, logger=logger, loglevel=loglevel, **dict_filter(kwargs, 'log_')) self._quiet = False # Left and right Analyzer instances if zerofill == 2: kwargs['zerofill'] = 2 kwleft, kwright = self._dict_filter_lr_(kwargs) kwargs['logger'] = self.logger self.lspan = Analyzer(ldataset, weights=lweights, norms=lnorms, minvalid=lminvalid, **kwargs) self.rspan = Analyzer(rdataset, weights=rweights, norms=rnorms, minvalid=rminvalid, **kwargs) # Init results self.clean(pca=False) # Check and save parameters self.update_params(None, **kwargs)
def __init__(self, ldataset, rdataset, lweights=None, rweights=None, lnorms=None, rnorms=None, lminvalid=None, rminvalid=None, logger=None, loglevel=None, zerofill=0, **kwargs): # Loggers Logger.__init__(self, logger=logger, loglevel=loglevel, **dict_filter(kwargs, 'log_')) self._quiet=False # Left and right Analyzer instances if zerofill==2: kwargs['zerofill'] = 2 kwleft, kwright = self._dict_filter_lr_(kwargs) kwargs['logger'] = self.logger self.lspan = Analyzer(ldataset, weights=lweights, norms=lnorms, minvalid=lminvalid, **kwargs) self.rspan = Analyzer(rdataset, weights=rweights, norms=rnorms, minvalid=rminvalid, **kwargs) # Init results self.clean(pca=False) # Check and save parameters self.update_params(None, **kwargs)
def _dict_filter_lr_(kwargs): return dict_filter(kwargs, 'l'), dict_filter(kwargs, 'r')
def fill(self, nitermax=20, errchmax=-0.01, fillmode='masked', testmode='crossvalid', mssa=True, full=True, cvregen=False, nreanapca=3, nreanamssa=2, errchmaxreana=-1, remode=False, **kwargs): """Run the filler with a convergence loop Results are accessible in the following attributes: .. attribute:: filtered Filtered data, result from the convergence loop. .. attribute:: filled Data filled with :attr:`filtered` :Parameters: - **fillmode**: "zeros" or "masked" - **nitermax**: Maximal number of iterations - **cvmax**: Convergence criterion (%) - **npca**: Number of PCA modes (see :class:`Analyzer`) - **nmssa**: Number of MSSA modes (see :class:`Analyzer`) - **cvfield_level**: Percent of data used for cross-validation. - Other parameters are passed to :class:`Analyzer` :Returns: - :attr:`filled` """ # Parameters self._kwfill.update(nitermax=nitermax, errchmax=errchmax, fillmode=fillmode, testmode=testmode, mssa=mssa, full=full, cvregen=cvregen, nreanapca=nreanapca, nreanamssa=nreanamssa, **kwargs) kwgencv = dict_filter(kwargs, 'cvfield_') span = self.span if fillmode == 0: fillmode = "none" fillmode = str(fillmode).lower() if fillmode.startswith('n') or fillmode.startswith('m'): fillmode = "masked" elif fillmode.startswith('z'): fillmode = "zeros" kwargs['zerofill'] = 0 if fillmode == "masked" else 2 kwargs['prepca'] = True # always PCA self.debug('Filling mode: %s (%i)' % (fillmode, kwargs['zerofill'])) span.update_params(**kwargs) nreana = dict(pca=nreanapca, mssa=nreanamssa) # which analyzes types? analyzes = [] # if not mssa or span.prepca: analyzes.append('pca') if mssa: analyzes.append('mssa') self._analyzes = analyzes self._nomssaneeded = False # Which modes? testmode = str(testmode).lower() if testmode.startswith('c'): kwgencv['regen'] = cvregen anamodes = [2, 0] else: anamodes = [1] # Loop on analysis modes self._nmodes = {} self._errors = {} for anamode in anamodes: self.debug( 'Analysis mode: ' + ['NORMAL', 'SELF-VALIDATION', 'CROSS-VALIDATION'][anamode]) # Loop on analysis types (PCA, MSSA) self._errors[anamode] = {} for self._ana in analyzes: self.debug(' Running ' + self._ana.upper()) # Update the number of pre-PCA modes for MSSA if self._ana == 'mssa': span.update_params(prepca=imode + 1) # Link to appropriate data # - reference data self._link_field_('orig' if self._ana == 'pca' else 'pcs', 'ref') rmask = self._refm.mask saxis = int(self._ana == 'mssa') if rmask is npy.ma.nomask or not (rmask.all(axis=saxis) | rmask.any(axis=saxis)).any(): self.warning('%s: No gap to fill -> skipping' % self._ana.upper()) if 'mssa' in analyzes: analyzes.remove('mssa') self._nomssaneeded = True self._nmodes.setdefault(self._ana, [[self.span.nmssa]]) break # - data to fill if anamode == 2: # cross-validation self._gen_cvfield_(**kwgencv) self._link_field_('cvfield', 'current') else: # normal, self-validation self._link_field_('ref', 'current') # Initialize raw data self._set_raw_(self._currentm.data) # Reanalyses loop self._errors[anamode][self._ana] = [] last_reana_err = None for ira in range(nreana[self._ana]): self.debug(' Analysis (%i/%i)' % (ira + 1, nreana[self._ana])) # Run analysis to get EOFs self._get_func_()(force=True) # CV loop on EC estimation (not for PCA with T-EOF)? ecloop = self._ana != 'pca' or not self.span.useteof niterec = nitermax if ecloop else 1 # Number of modes to retain self._nmodes.setdefault(self._ana, []) amodes = self._nmodes[self._ana] trymodes = anamode != 0 if len(amodes) < ira + 1: if remode or ira == 0: amodes.append( range(getattr(self.span, 'n' + self._ana))) # test all else: trymodes = False amodes.append( amodes[0]) # same as for first analysis # Loop on the number of modes last_mode_err = None self._last_pcs_mode = {} self._errors[anamode][self._ana].append([]) for im, imode in enumerate(amodes[ira]): verb = ' Reconstructing' if not trymodes else ' Trying' self.debug(verb + ' with %i mode%s' % (imode + 1, 's' * (imode > 0))) # Inits self._recm = default_missing_value if hasattr(self, '_necmiss'): del self._necmiss if hasattr(self, '_necmiss_old'): del self._necmiss_old last_iter_err = None skiplast = False if anamode == 1 and im > 0: # save previous pcs self._last_pcs_mode[self._ana] = \ getattr(self.span, '_%s_raw_pc'%self._ana) self._last_pcs_iter = {} # Convergence loop for expansion coefficients self._errors[anamode][self._ana][-1].append([]) for istep in range(niterec): if ecloop: self.debug(' EC convergence step: %i' % istep) # Reconstruct if anamode == 1 and istep > 0: self._last_pcs_iter[self._ana] = \ getattr(self.span, '_%s_raw_pc'%self._ana) self._rec_(imode) # Current error err = self._get_error_(anamode) self._errors[anamode][self._ana][-1][-1].append( err) # Check MSSA full filling if self._ana == 'mssa' and full: nem, nemch = self._get_necmiss_() if nem and (nemch or nemch is None): self.debug( ' Still %i missing values in %s MSSA PCs' % (nem, self._ana.upper())) last_iter_err = err continue # Check convergence error for EC if ecloop: self.debug(' Current error: %.1f%%' % err) if istep > 0 and last_iter_err is not None: errch = err - last_iter_err self.debug(' Error change: %g%%' % errch) if errch >= errchmax: if errch > 0: self.debug( ' Error change > 0: unstable mode -> step skipped' ) err = last_iter_err if anamode == 1: setattr( self.span, '_%s_raw_pc' % self._ana, self._last_pcs_iter[ self._ana]) self.debug( ' Recovered PCs from last step' ) self._errors[anamode][ self._ana][-1][-1] *= -1 # skiplast = True else: self.debug( ' Error change > threshold -> stopping convergence' % errch) break else: self.debug(' Error: %.1f%%' % err) break last_iter_err = err else: self.debug( ' Reached max number of iterations for EC convergence loop' ) # Check mode truncature error if anamode != 0 and im: if skiplast: errch = 1 else: errch = err - last_mode_err self.debug( ' Error change between %i and %i modes: %g' % (imode, imode + 1, errch)) if errch > errchmax: imode -= 1 self.debug(' Best number of %s modes: %i' % (self._ana.upper(), imode + 1)) if anamode == 1: setattr(self.span, '_%s_raw_pc' % self._ana, self._last_pcs_mode[self._ana]) self.debug( ' Recovered PCs from last mode') if self._errors[anamode][ self._ana][-1][-1] > 0: self._errors[anamode][ self._ana][-1][-1] *= -1 break last_mode_err = err else: if trymodes: self.debug( ' Reached max number of %s modes (%i)' % (self._ana.upper(), imode + 1)) # -> NMODES # Refill for next reana if ira < nreana[self._ana] - 1: self.debug(' Filling for next reanalysis') self._set_raw_(self._currentm.filled(self._recm.data)) # Store optimal number of modes info for normal analysis after cross-validation if anamode == 2: self._nmodes[self._ana][ira] = [imode] # Error check if ira > 0: errch = err - last_reana_err self.debug(' Error change since last analysis: %.2g' % errch) if errch > errchmaxreana: self.debug(' Stopping reanalyzes') break last_reana_err = err else: self.debug( ' Reached max number of reanalyzes for %s (%i)' % (self._ana.upper(), ira + 1)) # -> REANA # Store number of reanalyzes for normal analysis after cross-validation if anamode == 2: nreana[self._ana] = ira + 1 # Store PCA pcs for MSSA if self._ana == 'pca' and 'mssa' in analyzes: self.span.prepca = imode + 1 self._set_field_(self.span._pca_raw_pc[:, :imode + 1].T, 'pcs', mean=True, std=False)
def __init__(self, data, weights=None, norm=None, keep_invalids=False, minvalid=None, clean_weights=True, logger=None, loglevel=None, zerofill=False, **kwargs): # Logger Logger.__init__(self, logger=logger, loglevel=loglevel, **dict_filter(kwargs, 'log_')) # Guess data type and copy if cdms2_isVariable(data): self.array_type = 'MV2' self.array_mod = MV2 data = data.clone() elif npy.ma.isMA(data): self.array_type = 'numpy.ma' self.array_mod = numpy.ma data = data.copy() else: self.array_type = 'numpy' data = data.copy() self.array_mod = numpy self.data = data self.dtype = data.dtype data = data.astype('d') # Shape self.shape = data.shape self.ndim = data.ndim self.nt = self.shape[0] self.nstot = data.size/self.nt self.nsdim = data.ndim-1 # Check time axis if cdms2_isVariable(data) and data.getTime() is not None: order = data.getOrder() if not order.startswith('t'): warn('Time axis is not the first axis of input variable (order="%s")'%order) # # Weights ? # if weights is None or weights is False: # if (weights is not False and data.ndim == 3 and # cdms2_isVariable(data) and # 'x' in data.getOrder() and 'y' in data.getOrder()): # import cdutil# FIXME: WARNING FALSE # weights = cdutil.area_weights(data[0]).data.astype('d') # Geographic weights # elif self.nstot==1: # weights = npy.ones(1) # else: # weights = npy.ones(self.shape[1:]) # elif npy.ma.isMA(weights): # weights = weight.astype('d').filled(0.) # else: # weights = npy.asarray(weights, dtype='d') # if data.ndim>1 and self.shape[1:] != weights.shape: # self.error('Weights must be of shape %s (instead of %s)' # %(self.shape[1:], weights.shape)) # Store some info # - time if not cdms2_isVariable(data): self.taxis = data.shape[0] else: self.taxis = data.getAxis(0) # - others axes and attributes if cdms2_isVariable(data): # cdms -> ids self.saxes = data.getAxisList()[1:] self.id = data.id self.atts = {} for att in data.listattributes(): self.atts[att] = data.attributes[att] self.grid = data.getGrid() data = data.asma() else: # numpy -> length self.saxes = data.shape[1:] self.id = None self.atts = None self.grid = None # - missing value if npy.ma.isMA(data): self.missing_value = data.get_fill_value() else: self.missing_value = 1.e20 # - special cases for att in 'long_name', 'units': if hasattr(data, att): setattr(self, att, data.attributes[att]) # Masking nans nans = npy.isnan(data) if nans.any(): self.warning("Masking %i NaNs"%nans.sum()) if self.array_type == 'numpy': self.array_type = 'numpy.ma' self.array_mod = numpy.ma data = npy.ma.array(data, mask=nans, copy=False) else: data[nans] = npy.ma.masked self.data = data # Mask (1 means good) # - real good values bmask = npy.ma.getmaskarray(data) good = 1-bmask.astype('l') # - first from data (integrate) => 1D count = npy.atleast_1d(good.sum(axis=0)) del good # # - now remove channels where weight is zero # if clean_weights: # count[npy.atleast_1d(weights==0.)] = 0 # - check number of valid data along time minvalid = kwargs.pop('nvalid', minvalid) if minvalid is not None and minvalid < 0: minvalid = -int(round(npy.clip(minvalid, -100., 0)*self.nt/100)) minvalid = npy.clip(int(minvalid), 1, self.nt) if minvalid is not None else 1 count[count<minvalid] = 0 # <minvalid -> 0 count = npy.clip(count, 0, 1) # - save as 0/1 self.ns = int(count.sum()) self.compress = count.size != self.ns self.good = count>0 # points in space where there are enough data in time self.minvalid = self.nvalid = minvalid # Scale unpacked data if not self.good.any(): self.warning('No valid data') self.norm = 1. self.mean = 0 else: # - mean self.mean = data.mean(axis=0) # - normalisation factor if norm is True or norm is None: norm = self.data.std() # Standard norm elif norm is not False: if norm <0: # Relative norm, else strict norm norm = abs(norm)*self.data.std() else: norm = 1. self.norm = norm # - apply self.scale(data) # Fill data # - fill with missing value or mean (0.) where possible if minvalid != self.nt: # invalids = bmask & self.good # invalids = masked data that will be analyzed # data[invalids] = 0. if zerofill else default_missing_value # data[invalids] = default_missing_value data[:, ~self.good] = default_missing_value if keep_invalids: self.invalids = bmask & self.good # invalids = masked data that will be analyzed else: self.invalids = None #del invalids else: self.invalids = None # - finally fill with missing values at zero if npy.ma.isMA(data): data_num = data.filled(default_missing_value) else: data_num = data # Pack # - data self.packed_data = self.core_pack(data_num, force2d=True) self.masked = npy.isclose(self.packed_data, default_missing_value).any()
def fill(self, nitermax=20, errchmax=-0.01, fillmode='masked', testmode='crossvalid', mssa=True, full=True, cvregen=False, nreanapca=3, nreanamssa=2, errchmaxreana=-1, remode=False, **kwargs): """Run the filler with a convergence loop Results are accessible in the following attributes: .. attribute:: filtered Filtered data, result from the convergence loop. .. attribute:: filled Data filled with :attr:`filtered` :Parameters: - **fillmode**: "zeros" or "masked" - **nitermax**: Maximal number of iterations - **cvmax**: Convergence criterion (%) - **npca**: Number of PCA modes (see :class:`Analyzer`) - **nmssa**: Number of MSSA modes (see :class:`Analyzer`) - **cvfield_level**: Percent of data used for cross-validation. - Other parameters are passed to :class:`Analyzer` :Returns: - :attr:`filled` """ # Parameters self._kwfill.update(nitermax=nitermax, errchmax=errchmax, fillmode=fillmode, testmode=testmode, mssa=mssa, full=full, cvregen=cvregen, nreanapca=nreanapca, nreanamssa=nreanamssa, **kwargs) kwgencv = dict_filter(kwargs, 'cvfield_') span = self.span if fillmode==0: fillmode = "none" fillmode = str(fillmode).lower() if fillmode.startswith('n') or fillmode.startswith('m'): fillmode = "masked" elif fillmode.startswith('z'): fillmode = "zeros" kwargs['zerofill'] = 0 if fillmode=="masked" else 2 kwargs['prepca'] = True # always PCA self.debug('Filling mode: %s (%i)'%(fillmode, kwargs['zerofill'])) span.update_params(**kwargs) nreana = dict(pca=nreanapca, mssa=nreanamssa) # which analyzes types? analyzes = [] # if not mssa or span.prepca: analyzes.append('pca') if mssa: analyzes.append('mssa') self._analyzes = analyzes self._nomssaneeded = False # Which modes? testmode = str(testmode).lower() if testmode.startswith('c'): kwgencv['regen'] = cvregen anamodes = [2, 0] else: anamodes = [1] # Loop on analysis modes self._nmodes = {} self._errors = {} for anamode in anamodes: self.debug('Analysis mode: '+['NORMAL', 'SELF-VALIDATION', 'CROSS-VALIDATION'][anamode]) # Loop on analysis types (PCA, MSSA) self._errors[anamode] = {} for self._ana in analyzes: self.debug(' Running '+self._ana.upper()) # Update the number of pre-PCA modes for MSSA if self._ana=='mssa': span.update_params(prepca = imode+1) # Link to appropriate data # - reference data self._link_field_('orig' if self._ana=='pca' else 'pcs', 'ref') rmask = self._refm.mask saxis = int(self._ana=='mssa') if rmask is npy.ma.nomask or not (rmask.all(axis=saxis)|rmask.any(axis=saxis)).any(): self.warning('%s: No gap to fill -> skipping'%self._ana.upper()) if 'mssa' in analyzes: analyzes.remove('mssa') self._nomssaneeded = True self._nmodes.setdefault(self._ana, [[self.span.nmssa]]) break # - data to fill if anamode==2: # cross-validation self._gen_cvfield_(**kwgencv) self._link_field_('cvfield', 'current') else: # normal, self-validation self._link_field_('ref', 'current') # Initialize raw data self._set_raw_(self._currentm.data) # Reanalyses loop self._errors[anamode][self._ana] = [] last_reana_err = None for ira in range(nreana[self._ana]): self.debug(' Analysis (%i/%i)'%(ira+1, nreana[self._ana])) # Run analysis to get EOFs self._get_func_()(force=True) # CV loop on EC estimation (not for PCA with T-EOF)? ecloop = self._ana!= 'pca' or not self.span.useteof niterec = nitermax if ecloop else 1 # Number of modes to retain self._nmodes.setdefault(self._ana, []) amodes = self._nmodes[self._ana] trymodes = anamode!=0 if len(amodes)<ira+1: if remode or ira==0: amodes.append(range(getattr(self.span, 'n'+self._ana))) # test all else: trymodes = False amodes.append(amodes[0]) # same as for first analysis # Loop on the number of modes last_mode_err = None self._last_pcs_mode = {} self._errors[anamode][self._ana].append([]) for im, imode in enumerate(amodes[ira]): verb = ' Reconstructing' if not trymodes else ' Trying' self.debug(verb+' with %i mode%s'%(imode+1, 's'*(imode>0))) # Inits self._recm = default_missing_value if hasattr(self, '_necmiss'): del self._necmiss if hasattr(self, '_necmiss_old'): del self._necmiss_old last_iter_err = None skiplast = False if anamode==1 and im>0: # save previous pcs self._last_pcs_mode[self._ana] = \ getattr(self.span, '_%s_raw_pc'%self._ana) self._last_pcs_iter = {} # Convergence loop for expansion coefficients self._errors[anamode][self._ana][-1].append([]) for istep in range(niterec): if ecloop: self.debug(' EC convergence step: %i'%istep) # Reconstruct if anamode==1 and istep>0: self._last_pcs_iter[self._ana] = \ getattr(self.span, '_%s_raw_pc'%self._ana) self._rec_(imode) # Current error err = self._get_error_(anamode) self._errors[anamode][self._ana][-1][-1].append(err) # Check MSSA full filling if self._ana=='mssa' and full: nem, nemch = self._get_necmiss_() if nem and (nemch or nemch is None): self.debug(' Still %i missing values in %s MSSA PCs'% (nem, self._ana.upper())) last_iter_err = err continue # Check convergence error for EC if ecloop: self.debug(' Current error: %.1f%%'%err) if istep>0 and last_iter_err is not None: errch = err - last_iter_err self.debug(' Error change: %g%%'%errch) if errch>=errchmax: if errch>0: self.debug(' Error change > 0: unstable mode -> step skipped') err = last_iter_err if anamode==1: setattr(self.span, '_%s_raw_pc'%self._ana, self._last_pcs_iter[self._ana]) self.debug(' Recovered PCs from last step') self._errors[anamode][self._ana][-1][-1] *= -1 # skiplast = True else: self.debug(' Error change > threshold -> stopping convergence'%errch) break else: self.debug(' Error: %.1f%%'%err) break last_iter_err = err else: self.debug(' Reached max number of iterations for EC convergence loop') # Check mode truncature error if anamode!=0 and im: if skiplast: errch = 1 else: errch = err-last_mode_err self.debug(' Error change between %i and %i modes: %g'% (imode, imode+1, errch)) if errch>errchmax: imode -= 1 self.debug(' Best number of %s modes: %i'%(self._ana.upper(), imode+1)) if anamode==1: setattr(self.span, '_%s_raw_pc'%self._ana, self._last_pcs_mode[self._ana]) self.debug(' Recovered PCs from last mode') if self._errors[anamode][self._ana][-1][-1] >0: self._errors[anamode][self._ana][-1][-1] *= -1 break last_mode_err = err else: if trymodes: self.debug(' Reached max number of %s modes (%i)'%(self._ana.upper(), imode+1)) # -> NMODES # Refill for next reana if ira<nreana[self._ana]-1: self.debug(' Filling for next reanalysis') self._set_raw_(self._currentm.filled(self._recm.data)) # Store optimal number of modes info for normal analysis after cross-validation if anamode==2: self._nmodes[self._ana][ira] = [imode] # Error check if ira>0: errch = err-last_reana_err self.debug(' Error change since last analysis: %.2g'%errch) if errch>errchmaxreana: self.debug(' Stopping reanalyzes') break last_reana_err = err else: self.debug(' Reached max number of reanalyzes for %s (%i)'%(self._ana.upper(), ira+1)) # -> REANA # Store number of reanalyzes for normal analysis after cross-validation if anamode==2: nreana[self._ana] = ira+1 # Store PCA pcs for MSSA if self._ana=='pca' and 'mssa' in analyzes: self.span.prepca = imode+1 self._set_field_(self.span._pca_raw_pc[:, :imode+1].T, 'pcs', mean=True, std=False)