def systematic_bias(self): """ Biais systematique (+1: sous-estimation du modele, -1: sur-estimation du modele) """ import numpy as np import cdms2 from vacumm.misc.grid import get_grid, set_grid map_bias = list() #Initialise une liste # Estimation de la moyenne a chaque pas de temps tps = self.obs.getTime() for i, t in enumerate(tps): map_bias.append(np.sign(self.obs[i, :, :] - self.model[i, :, :])) map_bias = cdms2.createVariable(map_bias, typecode='f', id='computation') res = np.sum(map_bias, axis=0) res # Trouve valeurs egale a la longueur de la serie temporelle (<=> uniquement valeurs positives) one = (res == len(map_bias)).nonzero() # Trouve valeurs egales a moins la longueur de la serie temporelle (<=> uniquement valeurs negatives) mone = (res == -len(map_bias)).nonzero() self.biassyst = np.zeros(res.shape) self.biassyst[one] = 1. self.biassyst[mone] = -1. self.biassyst = cdms2.createVariable(self.biassyst, typecode='f', id='syst_bias') ggm = get_grid(self.model) set_grid(self.biassyst, ggm, axes=True)
def temporal_std(self): """ Ecart-type en chaque point geographique """ from genutil import statistics from vacumm.misc.grid import get_grid, set_grid # centered and biased std (cf. http://www2-pcmdi.llnl.gov/cdat/manuals/cdutil/cdat_utilities-2.html) self.model.temp_std = () #Initialise un tuple self.obs.temp_std = () #Initialise un tuple self.model.temp_std = statistics.std(self.model, axis=0) self.obs.temp_std = statistics.std(self.obs, axis=0) ggm = get_grid(self.model) set_grid(self.model.temp_std, ggm, axes=True) ggo = get_grid(self.obs) set_grid(self.obs.temp_std, ggm, axes=True)
def temporal_std(self): """ Ecart-type en chaque point geographique """ from genutil import statistics from vacumm.misc.grid import get_grid, set_grid # centered and biased std (cf. http://www2-pcmdi.llnl.gov/cdat/manuals/cdutil/cdat_utilities-2.html) self.model.temp_std = () #Initialise un tuple self.obs.temp_std = () #Initialise un tuple self.model.temp_std = statistics.std(self.model, axis = 0) self.obs.temp_std = statistics.std(self.obs, axis = 0) ggm = get_grid(self.model) set_grid(self.model.temp_std, ggm, axes=True) ggo = get_grid(self.obs) set_grid(self.obs.temp_std, ggm, axes=True)
def systematic_bias(self): """ Biais systematique (+1: sous-estimation du modele, -1: sur-estimation du modele) """ import numpy as np import cdms2 from vacumm.misc.grid import get_grid, set_grid map_bias = list() #Initialise une liste # Estimation de la moyenne a chaque pas de temps tps = self.obs.getTime() for i, t in enumerate(tps): map_bias.append(np.sign(self.obs[i, :, :]-self.model[i, :, :] )) map_bias = cdms2.createVariable(map_bias,typecode='f',id='computation') res = np.sum(map_bias, axis=0) res # Trouve valeurs egale a la longueur de la serie temporelle (<=> uniquement valeurs positives) one = (res==len(map_bias)).nonzero() # Trouve valeurs egales a moins la longueur de la serie temporelle (<=> uniquement valeurs negatives) mone = (res==-len(map_bias)).nonzero() self.biassyst = np.zeros(res.shape) self.biassyst[one]=1. self.biassyst[mone]=-1. self.biassyst = cdms2.createVariable(self.biassyst, typecode='f',id='syst_bias') ggm = get_grid(self.model) set_grid(self.biassyst, ggm, axes=True)
def density(temp, sal, depth=None, lat=None, potential=False, getdepth=False, getlat=False, format_axes=False): """Compute density from temperature, salinity and depth (and latitude) :Params: - **temp**: Insitu or potential temperature. - **sal**: Salinity. - **depth**, optional: Depth at temperature and salinty points. Assumed to be 0 if not found. - **lat**, optional: Latitude. Error when not found. - **potential**, optional: True to get the potential density (at atmospheric pressure). :Algo: >>> pressure = seawater.csiro.pres(depth, lat) >>> density = seawater.csiro.dens(sal, temp, depth) """ # Compute if not potential and depth is not False: # In-situ # Get depth and latitude lat = grow_lat(temp, lat, mode='raise', getvar=False) if lat is None: raise VACUMMError('No latitude found for density') depth = grow_depth(temp, depth, mode='raise', getvar=False) if N.abs(depth.max()) < N.abs(depth.min()): # positive depth = -depth if (depth.asma() < 0).any(): depth = depth - depth.min() # top=0 # Get density pres = sw_pres(depth, lat) dens = sw_dens(sal, temp, pres) del pres else: # Potential dens = sw_dens0(sal, temp) getdepth = getlat = False # Format dens.setAxisList(temp.getAxisList()) set_grid(dens, get_grid(temp)) format_var(dens, 'dens', format_axes=format_axes) # Out if not getdepth and not getlat: return dens dens = dens, if getdepth: dens += depth, if getlat: dens += lat, return dens
def density(temp, sal, depth=None, lat=None, potential=False, getdepth=False, getlat=False, format_axes=False): """Compute density from temperature, salinity and depth (and latitude) :Params: - **temp**: Insitu or potential temperature. - **sal**: Salinity. - **depth**, optional: Depth at temperature and salinty points. Assumed to be 0 if not found. - **lat**, optional: Latitude. Error when not found. - **potential**, optional: True to get the potential density (at atmospheric pressure). :Algo: >>> pressure = seawater.csiro.pres(depth, lat) >>> density = seawater.csiro.dens(sal, temp, depth) """ # Compute if not potential and depth is not False: # In-situ # Get depth and latitude lat = grow_lat(temp, lat, mode='raise', getvar=False) if lat is None: raise VACUMMError('No latitude found for density') depth = grow_depth(temp, depth, mode='raise', getvar=False) if N.abs(depth.max())<N.abs(depth.min()): # positive depth = -depth if (depth.asma()<0).any(): depth = depth-depth.min() # top=0 # Get density pres = sw_pres(depth, lat) dens = sw_dens(sal, temp, pres) ; del pres else: # Potential dens = sw_dens0(sal, temp) getdepth = getlat = False # Format dens.setAxisList(temp.getAxisList()) set_grid(dens, get_grid(temp)) format_var(dens, 'dens', format_axes=format_axes) # Out if not getdepth and not getlat: return dens dens = dens, if getdepth: dens += depth, if getlat: dens += lat, return dens
def temporal_corr(self): """ Correlation entre modele et observations - calculee en chaque point sur la dimension temporelle / Resultat => Carte de Correlation uncentered and biased """ from genutil import statistics from vacumm.misc.grid import get_grid, set_grid self.temp_corr = () #Initialise un tuple self.temp_corr = statistics.correlation(self.model, self.obs, axis=0) gg = get_grid(self.model) set_grid(self.temp_corr, gg, axes=True)
def obs_coverage(self): """ Calcul du taux de couverture des observations en chaque point sur la periode """ import numpy as np from vacumm.misc.grid import get_grid, set_grid import cdms2 self.obs_cov = () #Initialise un tuple self.obs_cov = cdms2.createVariable(self.obs.count(axis=0),typecode='f',id='obs_cov', attributes=dict(units='%')) self.obs_cov = self.obs_cov / len(self.obs) * 100. gg = get_grid(self.obs) set_grid(self.obs_cov, gg, axes=True)
def temporal_corr(self): """ Correlation entre modele et observations - calculee en chaque point sur la dimension temporelle / Resultat => Carte de Correlation uncentered and biased """ from genutil import statistics from vacumm.misc.grid import get_grid, set_grid self.temp_corr = () #Initialise un tuple self.temp_corr = statistics.correlation(self.model, self.obs, axis = 0) gg = get_grid(self.model) set_grid(self.temp_corr, gg, axes=True)
def temporal_rmsc(self): """ RMS entre modele et observations - calculee en chaque point sur la dimension temporelle / Resultat => Carte de RMS centered and biased """ from genutil import statistics from vacumm.misc.grid import get_grid, set_grid self.temp_rmsc = () #Initialise un tuple self.temp_rmsc = statistics.rms(self.model, self.obs, axis = 0, centered = 1) gg = get_grid(self.model) set_grid(self.temp_rmsc, gg, axes=True)
def temporal_rmsc(self): """ RMS entre modele et observations - calculee en chaque point sur la dimension temporelle / Resultat => Carte de RMS centered and biased """ from genutil import statistics from vacumm.misc.grid import get_grid, set_grid self.temp_rmsc = () #Initialise un tuple self.temp_rmsc = statistics.rms(self.model, self.obs, axis=0, centered=1) gg = get_grid(self.model) set_grid(self.temp_rmsc, gg, axes=True)
def obs_coverage(self): """ Calcul du taux de couverture des observations en chaque point sur la periode """ import numpy as np from vacumm.misc.grid import get_grid, set_grid import cdms2 self.obs_cov = () #Initialise un tuple self.obs_cov = cdms2.createVariable(self.obs.count(axis=0), typecode='f', id='obs_cov', attributes=dict(units='%')) self.obs_cov = self.obs_cov / len(self.obs) * 100. gg = get_grid(self.obs) set_grid(self.obs_cov, gg, axes=True)
def mixed_layer_depth(data, depth=None, lat=None, zaxis=None, mode=None, deltatemp=.2, deltadens=.01, kzmax=0.0005, potential=True, format_axes=False): """Get mixed layer depth from temperature and salinity :Params: - **temp**: Insitu or potential temperature. - **sal**: Salinity. - **depth**, optional: Depth at temperature and salinty points. - **lat**, optional: Latitude. - **mode**, optional: ``"deltatemp"``, ``"deltadens"``, ``"kz"`` or ``"twolayers"`` :Raise: :class:`~vacumm.VACUMMError` if can't get depth (and latitude for density). """ # TODO: positive up # Inspection if isinstance(data, tuple): # data = temp,sal temp, sal=data # Get density if mode!='deltatemp': res = density(temp, sal, depth=depth, lat=lat, format_axes=False, potential=potential, getdepth=True) if isinstance(res, tuple): dens, depth = res else: dens = res dens = dens.asma() if mode is None: mode = 'deltadens' else: temp = data[0] # Check mode if mode == 'kz': warn("Switching MLD computation mode to 'deltadens'") mode = "deltadens" elif match_var(data, 'temp', mode='nslu'): if mode is not None and mode!='deltatemp': warn("Switching MLD computation mode to 'deltatemp'") mode = 'deltatemp' temp = data elif match_var(data, 'dens', mode='nslu'): if mode in ['kz', 'deltatemp']: warn("Switching MLD computation mode to 'deltadens'") mode = None if mode is None: mode = "deltadens" dens = data elif match_var(data, 'kz', mode='nslu'): if mode is None: mode = "kz" if mode != "kz": warn("Switching MLD computation mode to 'kz'") kz = data else: if mode in ['deltadens', 'twolayers']: dens = data elif mode == "deltatemp": temp = data elif mode == "kz": kz = data elif mode is not None: raise VACUMMError("Invalid MLD computation mode : '%s'"%mode) else: raise VACUMMError("Can't guess MLD computation mode") temp = delta # Find Z dim data0 = data[0] if isinstance(data, tuple) else data depth = grow_depth(data0, depth, mode='raise', getvar=False) zaxis = get_zdim(data0, axis=zaxis) if zaxis is None: raise VACUMMError("Can't guess zaxis") slices = get_axis_slices(data0, zaxis) # Init MLD axes = data0.getAxisList() del axes[zaxis] mld = MV2.array(data0.asma()[slices['first']], copy=1, axes=axes, copyaxes=False) set_grid(mld, get_grid(data0)) format_var(mld, 'mld', format_axes=format_axes) mld[:] = MV2.masked # Two-layers if mode=='twolayers': densbot = dens[slices['first']] denstop = dens[slices['last']] del dens H = 1.5*depth[slices['first']] - 0.5*depth[slices['firstp1']] H = -1.5*depth[slices['last']] + 0.5*depth[slices['lastm1']] mld[:] = -H*(densbot-denstop)/(densbot-denstop) del H elif mode=='deltadens': denscrit = dens[slices['last']]+deltadens mld[:] = -_val2z_(dens, depth, denscrit, zaxis, -1) del dens elif mode=='deltatemp': tempcrit = temp[slices['last']]-deltatemp mld[:] = -_val2z_(temp, depth, tempcrit, zaxis, 1) elif mode=='kz': mld[:] = -_valmin2z_(kz, depth, kzmax, zaxis, 1) else: raise VACUMMError("Invalid mode for computing MLD (%s)."%mode + "Please choose one of: deltadens, twolayers") # Mask zeros mld[:] = MV2.masked_values(mld, 0., copy=0) return mld
def _template_t2ht_(self, tpl): htpl = MV2.resize(tpl.astype('l'), (self.nbins,)+tpl.shape) htpl.setAxisList([self._baxis]+tpl.getAxisList()) set_grid(htpl, get_grid(tpl)) cp_atts(tpl, htpl) return htpl
def _template_t2ht_(self, tpl): htpl = MV2.resize(tpl.astype('l'), (self.nbins,)+tpl.shape) htpl.setAxisList([self._baxis]+tpl.getAxisList()) set_grid(htpl, get_grid(tpl)) return htpl
def mixed_layer_depth(data, depth=None, lat=None, zaxis=None, mode=None, deltatemp=.2, deltadens=.03, kzmax=0.0005, potential=True, format_axes=False): """Get mixed layer depth from temperature and salinity :Params: - **temp**: Insitu or potential temperature. - **sal**: Salinity. - **depth**, optional: Depth at temperature and salinty points. - **lat**, optional: Latitude. - **mode**, optional: ``"deltatemp"``, ``"deltadens"``, ``"kz"`` or ``"twolayers"`` :Raise: :class:`~vacumm.VACUMMError` if can't get depth (and latitude for density). """ # TODO: positive up # Inspection if isinstance(data, tuple): # data = temp,sal temp, sal = data # Get density if mode != 'deltatemp': res = density(temp, sal, depth=depth, lat=lat, format_axes=False, potential=potential, getdepth=True) if isinstance(res, tuple): dens, depth = res else: dens = res dens = dens.asma() if mode is None: mode = 'deltadens' else: temp = data[0] # Check mode if mode == 'kz': warn("Switching MLD computation mode to 'deltadens'") mode = "deltadens" elif match_var(data, 'temp', mode='nslu'): if mode is not None and mode != 'deltatemp': warn("Switching MLD computation mode to 'deltatemp'") mode = 'deltatemp' temp = data elif match_var(data, 'dens', mode='nslu'): if mode in ['kz', 'deltatemp']: warn("Switching MLD computation mode to 'deltadens'") mode = None if mode is None: mode = "deltadens" dens = data elif match_var(data, 'kz', mode='nslu'): if mode is None: mode = "kz" if mode != "kz": warn("Switching MLD computation mode to 'kz'") kz = data else: if mode in ['deltadens', 'twolayers']: dens = data elif mode == "deltatemp": temp = data elif mode == "kz": kz = data elif mode is not None: raise VACUMMError("Invalid MLD computation mode : '%s'" % mode) else: raise VACUMMError("Can't guess MLD computation mode") temp = delta # Find Z dim data0 = data[0] if isinstance(data, tuple) else data depth = grow_depth(data0, depth, mode='raise', getvar=False) zaxis = get_zdim(data0, axis=zaxis) if zaxis is None: raise VACUMMError("Can't guess zaxis") slices = get_axis_slices(data0, zaxis) # Init MLD axes = data0.getAxisList() del axes[zaxis] mld = MV2.array(data0.asma()[slices['first']], copy=1, axes=axes, copyaxes=False) set_grid(mld, get_grid(data0)) format_var(mld, 'mld', format_axes=format_axes) mld[:] = MV2.masked # Two-layers if mode == 'twolayers': densbot = dens[slices['first']] denstop = dens[slices['last']] del dens H = 1.5 * depth[slices['first']] - 0.5 * depth[slices['firstp1']] H = -1.5 * depth[slices['last']] + 0.5 * depth[slices['lastm1']] mld[:] = -H * (densbot - denstop) / (densbot - denstop) del H elif mode == 'deltadens': denscrit = dens[slices['last']] + deltadens mld[:] = -_val2z_(dens, depth, denscrit, zaxis, -1) del dens elif mode == 'deltatemp': tempcrit = temp[slices['last']] - deltatemp mld[:] = -_val2z_(temp, depth, tempcrit, zaxis, 1) elif mode == 'kz': mld[:] = -_valmin2z_(kz, depth, kzmax, zaxis, 1) else: raise VACUMMError("Invalid mode for computing MLD (%s)." % mode + "Please choose one of: deltadens, twolayers") # Mask zeros mld[:] = MV2.masked_values(mld, 0., copy=0) return mld