def do_something(self): num, den = 0, 0 try: num / den except: self.exception('Division by 0 failed !\n%s', self.exception_trace()) self.info('The function emitting this message is: %s', self.func_name()) self.info('The stack trace of the function emitting this message is:\n%s', self.stack_trace()) t = create_time(['1900-01-01 00:00:00', '9999-12-31 23:59:59']) y = create_lat(range(-90, 90)) x = create_lon(range(-180, 180)) v = cdms2.createVariable(numpy.random.ranf((len(t), len(y), len(x))), axes=[t, y, x], id='data', long_name='random data') self.info('Time:\n%s', self.describe(t)) self.info('Latitude:\n%s', self.describe(y)) self.info('Longitude:\n%s', self.describe(x, stats=True)) self.info('Variable:\n%s', self.describe(v, stats=True))
def zeros(var, ref='mean', mean=None, getref=True, **kwargs): """Get the zeros of a tidal signal :Returns: A :mod:`cdms2` variable of signs (-1,1) with a time axis :Usage: >>> tidal_zeros = zeros(sea_level,ref='demerliac') >>> print tidal_zeros[0:1] >>> print tidal_zeros[0:1].getTime().asComponentTime() """ # Get anomaly ref = kwargs.pop('reference', ref) vara, varref = _get_anomaly_(var, ref=ref, mean=mean) taxis = vara.getTime() vara = vara.filled() longref = hasattr(varref, '__len__') # Find indices sign = N.sign(vara) izeros = N.arange(len(vara) - 1).compress(sign[:-1] != sign[1:]) # Interpolate units = taxis.units times = taxis.getValue() zeros = N.zeros((len(izeros), )) if getref: ret = MV2.zeros(len(zeros), id='zeros') if not longref: ret[:] = varref for i, i0 in enumerate(izeros): dv = vara[i0 + 1] - vara[i0] zeros[i] = old_div(times[i0] * vara[i0 + 1], dv) - old_div( times[i0 + 1] * vara[i0], dv) if getref and longref: dt = times[i0 + 1] - times[i0] ret[i] = old_div(var_ref[i0] * vara[i0 + 1], dv) - old_div( var_ref[i0 + 1] * vara[i0], dv) # Format if not getref: ret = MV2.array(sign[izeros], id='zeros') ret.units = '1 up and -1 down' else: cp_atts(var, ret) ret.long_name = 'Zeros' zeros = create_time(zeros, units) ret.setAxis(0, zeros) return ret
def _extrema_var_(extrem,units=None,indices=False,**kwargs): ctime,var,idx = zip(*extrem) if indices: mytime = cdms2.createAxis(idx) mytime.id = 'time_index' mytime.long_name = 'Time index' else: if units is None: units = 'minutes since %s'%strftime('%Y-%m-%d %H:%M:%S',ctime[0]) mytime = create_time(list(ctime), units) var = cdms2.createVariable(var,copy=0) var.setMissing(1.e20) var.setAxis(0,mytime) for att,val in kwargs.items(): setattr(var,att,val) return var
def zeros(var, ref='mean',mean=None, getref=True, **kwargs): """Get the zeros of a tidal signal :Returns: A :mod:`cdms2` variable of signs (-1,1) with a time axis :Usage: >>> tidal_zeros = zeros(sea_level,ref='demerliac') >>> print tidal_zeros[0:1] >>> print tidal_zeros[0:1].getTime().asComponentTime() """ # Get anomaly ref = kwargs.pop('reference', ref) vara, varref = _get_anomaly_(var, ref=ref,mean=mean) taxis = vara.getTime() vara = vara.filled() longref = hasattr(varref, '__len__') # Find indices sign = N.sign(vara) izeros = N.arange(len(vara)-1).compress(sign[:-1]!=sign[1:]) # Interpolate units = taxis.units times = taxis.getValue() zeros = N.zeros((len(izeros), )) if getref: ret = MV2.zeros(len(zeros), id='zeros') if not longref: ret[:] = varref for i, i0 in enumerate(izeros): dv = vara[i0+1]-vara[i0] zeros[i] = times[i0]*vara[i0+1]/dv - times[i0+1]*vara[i0]/dv if getref and longref: dt = times[i0+1]-times[i0] ret[i] = var_ref[i0]*vara[i0+1]/dv - var_ref[i0+1]*vara[i0]/dv # Format if not getref: ret = MV2.array(sign[izeros], id='zeros') ret.units = '1 up and -1 down' else: cp_atts(var, ret) ret.long_name = 'Zeros' zeros = create_time(zeros, units) ret.setAxis(0, zeros) return ret
def _extrema_var_(extrem, units=None, indices=False, **kwargs): ctime, var, idx = zip(*extrem) if indices: mytime = cdms2.createAxis(idx) mytime.id = 'time_index' mytime.long_name = 'Time index' else: if units is None: units = 'minutes since %s' % strftime('%Y-%m-%d %H:%M:%S', ctime[0]) mytime = create_time(list(ctime), units) var = cdms2.createVariable(var, copy=0) var.setMissing(1.e20) var.setAxis(0, mytime) for att, val in kwargs.items(): setattr(var, att, val) return var
strftime, tz_to_tz, now, to_utc, utc_to_paris) from vacumm.misc.axes import create_time # We define units units = 'hours since 2000-01-15 06:00' # Is is well formatted? print are_valid_units(units) # -> True # Same units? print are_same_units('hours since 2000-1-15 06', units) # -> True # Change axis time units taxis = create_time(N.arange(6.) * 48, units) # - before print taxis.units, taxis[0:2] print taxis.asComponentTime()[0:2] # -> hours since 2000-01-15 06:00 [ 0. 48.] # -> [2000-1-15 6:0:0.0, 2000-1-17 6:0:0.0] # - change ch_units(taxis, 'days since 2000-1-15 06', copy=0) # - after print taxis.units, taxis[0:2] print taxis.asComponentTime()[0:2] # -> days since 2000-1-15 06 [ 0. 2.] # -> [2000-1-15 6:0:0.0, 2000-1-17 6:0:0.0] # Matplotlib times taxis_mpl = mpl(taxis)
# -*- coding: utf8 -*- # Lecture du niveau de la mer sur 9 pas de temps à une latitude import cdms2, MV2 from vacumm.config import data_sample f = cdms2.open(data_sample('mars3d.xt.xe.nc')) xe = f('xe', squeeze=1, time=slice(0, 9), lon=(-5, -4.83)) f.close() xe.long_name = 'Original' # On crée un trou xe[3:4, 20:30] = MV2.masked # Nouvel axe temporel plus précis from vacumm.misc.axes import create_time #old_time = xe.getTime() old_time = create_time((xe.shape[0], ), 'hours since 2000') xe.setAxis(0, old_time) dt = (old_time[1] - old_time[0]) / 10. new_time = create_time((old_time[0], old_time[-1] + dt, dt), old_time.units) # Interpolation from vacumm.misc.grid.regridding import interp1d # - nearest xe_nea = interp1d(xe, new_time, method='nearest') xe_nea.long_name = 'Nearest' # - linear xe_lin = interp1d(xe, new_time, method='linear') xe_lin.long_name = 'Linear' # - cubic xe_cub = interp1d(xe, new_time, method='cubic') xe_cub.long_name = 'Cubic'
# -*- coding: utf-8 -*- # Creation d'un jeu de precipitations horaires import MV2, cdms2, numpy as N from vacumm.misc.axes import create_time hours = create_time((12*60, 25.*60, 60), 'minutes since 2000') precip = MV2.sin(N.arange(len(hours))*.2)*10 precip.setAxis(0, hours) precip.units = 'mm' precip.long_name = 'Original' # Nouvel echantillonnage / 2h hours2 = create_time((10, 30., 2), 'hours since 2000') # Regrillage 1D conservatif from vacumm.misc.grid.regridding import regrid1d precip2 = regrid1d(precip, hours2, 'conservative') precip2.long_name = 'Regridded' # Verifications print 'Total precip.:' print '- original =', precip.sum() print '- remapped =', precip2.sum() # > Total precip.: # > - original = 89.957242832779755 # > - remapped = 89.957237 # Plots from vacumm.misc.plot import savefigs from vacumm.misc.plot import bar2 kwplot = dict(color='#00ffff',edgecolor='#55aaaa',
def load(self, restart_file=None, iterindex=None, nowtime=None): """Load the current instance from a netcdf file :Params: - **restart_file**, optional: Netcdf restart file. - **iterindex**, optional: If given, the restart file is not loaded if ``iterindex`` is greater or equal to the file's ``iterindex`` attribute. - **nowtime**, optional: If given, the restart file is not loaded if ``nowtime`` is greater or equal to the file's ``lasttime`` attribute. """ # File if restart_file is None: restart_file = self.restart_file if restart_file is None: restart_file = self.default_restart_file self.restart_file = restart_file f = cdms2.open(restart_file) # Config # - check status if iterindex is not None: self.iterindex = iterindex if hasattr(self, 'iterindex') and f.iterindex<=self.iterindex: return -1 if nowtime is not None: self.lasttime = comptime(nowtime) if (hasattr(self, 'lasttime') and f.withtime>0 and self.lasttime and reltime(f.lasttime, 'hours since 2000').value <= reltime(self.lasttime, 'hours since 2000').value): return -1 # - what was initially asked and some more for sname in self.all_stats + ('sum', 'sqr', 'prod', 'stats'): for st in 'st': if not hasattr(f, st+sname): continue value = getattr(f, st+sname) setattr(self, st+sname, bool(value)) # - current status self.iterindex = int(f.iterindex) self.nitems = int(f.nitems) if f.withtime==-1: self.withtime = None else: self.withtime = bool(f.withtime) if f.withtime: self.lasttime = cdtime.s2c(f.lasttime) if N.isscalar(f.bin_edges): self.bins = None else: self.bins = N.asarray(f.bin_edges) self.nbins = self.bins.shape[0]-1 self._baxis = f.getAxis('hbin').clone() if self.nitems==0: # Still no data f.close() return 0 # - already had some data self.dual = bool(f.dual) self.ns = int(f.ns) self.nt = int(f.nt) self._nts = f.nts.tolist() self.tstats = bool(f.tstats) self.sstats = bool(f.sstats) if not self.withtime: self._stimes = None # Spatial statistics if self.sstats: # Time axes if self.withtime: self._stimes = tuple([[] for i in xrange(self.nitems)]) for i, tt in enumerate(self._stimes): taxis = f.getAxis('t'+str(i)) tvalues = self._aslist_(taxis[:]) oldid = taxis.stataccum_oldid for tvals in tvalues: tx = create_time(tvals, taxis.units, id=oldid) cp_atts(taxis, tx, id=False, exclude=[oldid]) self._stimes[i].append(tx) # Count self._scount = self._load_array_(f, id='scount') # Other stats self._sstats = {} for key in self.single_stats: if not self.dual: # single var vid = 's' + key if vid not in f.variables: continue self._sstats[key] = self._load_array_(f, vid), else: # two vars for i in xrange(self.nitems): vid = 's%s%s'%(key, str(i)) if vid not in f.variables: break self._sstats.setdefault(key, ()) self._sstats[key] += self._load_array_(f, vid), for key in self.dual_stats: vid = 's%s'%key if vid in f.variables: self._sstats[key] = self._load_array_(f, vid) # Temporal statistics if self.tstats: # Count self._tcount = self._load_array_(f, 'tcount') # Other stats for key in self._dual_accums+self._single_accums: tid = 't'+key if not getattr(self, tid): continue if key in self._dual_accums: value = self._load_array_(f, tid) setattr(self, '_'+tid, value) else: value = () for i in xrange(self.nitems): value += self._load_array_(f, tid+str(i)), setattr(self, '_'+tid, value) # Templates # - base arrays self._tbase = N.zeros(self.ns) if self.thist: self._thbase = N.zeros((self.nbins, self.ns), 'l') # - cdat templates self._ttemplates = () self._thtemplates = None if self.thist: self._thtemplates = () for i in xrange(self.nitems): prefix = 'var%i_'%i for vname in f.variables: if vname.startswith(prefix) and vname != prefix+'atts': break ttpl = f(vname) _rm_id_prefix_(ttpl, 'var%i_'%i, exc=self._baxis) self._ttemplates += ttpl, if self.thist: self._thtemplates += self._template_t2ht_(ttpl), # Attributes self._atts = () for ivar in xrange(self.nitems): attrs = f['var%i_atts'%ivar].attributes.copy() attrs['id'] = attrs['stataccum_id'] del attrs['stataccum_id'] self._atts += attrs, f.close() return self.iterindex
# On definit des unites units = 'hours since 2000-01-15 06:00' # Le format est-il bon ? from vacumm.misc.atime import * print are_good_units(units) # -> True # Memes unites ? print are_same_units('hours since 2000-1-15 06', units) # -> True # Changer les unites d'un axe de temps from vacumm.misc.axes import create_time import numpy as N taxis = create_time(N.arange(6.)*48, units) # - avant changement print taxis.units, taxis[0:2] print taxis.asComponentTime()[0:2] # -> hours since 2000-01-15 06:00 [ 0. 48.] # -> [2000-1-15 6:0:0.0, 2000-1-17 6:0:0.0] # - changement ch_units(taxis, 'days since 2000-1-15 06', copy=0) # - apres changement print taxis.units, taxis[0:2] print taxis.asComponentTime()[0:2] # -> days since 2000-1-15 06 [ 0. 2.] # -> [2000-1-15 6:0:0.0, 2000-1-17 6:0:0.0] # Le temps matplotlib taxis_mpl = mpl(taxis)
def coloc_mod_on_pro(self, model, profiles, varnames, select=None, method='nearest'): '''Colocalize model on profile data. Load model data corresponding to the selected profiles positions and time. Returns loaded model longitudes, latitudes, depths and requested variable(s) :Params: - **model**: model data :class:`~vacumm.data.misc.dataset.Dataset` - **profiles**: profile data :class:`~vacumm.data.misc.profile.ProfilesDataset` - **varnames**: variables to load (ex: ('temp','sal') or (('temp','temperature'),('sal','salinity')) - **select**: selector - **method**: coloc method (**nearest** or **interp**) :Return: - **lons_mod**: model longitude coordinates, shape: (profile) - **lats_mod**: model latitude coordinates, shape: (profile) - **deps_mod**: model depth coordinates, shape: (level,profile) - **var1**: requested variables, shape: (level,profile) - ... - **varN** .. todo:: - also load and return profile data here - exclude coords where profile data is masked (no data for specified depth) - return time coordinates - return depth and vars with shape (profile,level) ''' self.verbose( 'Colocalizing %s on %s\nvarnames: %s\nselect: %s\n method: %s', model.__class__.__name__, profiles.__class__.__name__, varnames, select, method) prof_pro = profiles.get_axis('profile', select=select) if prof_pro is None or not len(prof_pro): raise Exception('No profiles found, aborting') lev_pro = profiles.get_axis('level', select=select) time_pro = profiles.get_variable('time', select=select) lons_pro = profiles.get_variable('longitude', select=select) lats_pro = profiles.get_variable('latitude', select=select) dates = create_time(time_pro).asComponentTime() self.info('Number of profiles: %s', len(dates)) self.info('Profiles time coverage: %s to %s', dates[0], dates[-1]) # Init model td = model.get_time_res() dtmax = (td.days * 86400 + td.seconds, 'seconds') self.info('Detected model time step: %s', td) grid_mod = model.get_grid() xres, yres = resol(grid_mod) time_mod = model.get_time() ctime_mod = time_mod.asComponentTime() self.info('Model time coverage: %s to %s', ctime_mod[0], ctime_mod[-1]) level_mod = model.get_level(select=select) lons_mod = MV2.zeros((len(prof_pro), )) + MV2.masked lats_mod = lons_mod.clone() deps_mod = MV2.zeros((len(level_mod), len(prof_pro))) + MV2.masked deps_mod.setAxis(1, prof_pro) lons_mod.id, lats_mod.id, deps_mod.id = 'longitude', 'latitude', 'depth' # Creation des variables demandees variables = [] for n in varnames: v = MV2.zeros((len(level_mod), len(prof_pro))) + MV2.masked v.setAxis(1, prof_pro) v.id = is_iterable(n) and n[0] or n variables.append(v) cdms2.setAutoBounds(1) # ??? # Boucle temporelle for ip, date in enumerate(dates): try: # Limites spatiales lon = lons_pro[ip] lat = lats_pro[ip] lon_min = lon - 2 * xres lon_max = lon + 2 * xres lat_min = lat - 2 * yres lat_max = lat + 2 * yres date_interval = (add_time(date, -dtmax[0], dtmax[1]), add_time(date, dtmax[0], dtmax[1]), 'ccb') self.info('Colocalizing data for date %s, lon: %s, lat: %s', date, lon, lat) # Methode 1 : donnees les plus proches if method == 'nearest': sel = dict(time=(date, date, 'ccb'), longitude=(lon, lon, 'ccb'), latitude=(lat, lat, 'ccb')) # Verifier la disponibilite des donnees if time_mod.mapIntervalExt(sel['time']) is None: self.warning('Time interval %s not found', sel['time']) continue if grid_mod.getLatitude().mapInterval( sel['latitude']) is None: self.warning('Latitude coordinate %s not found', sel['latitude']) continue if grid_mod.getLongitude().mapInterval( sel['longitude']) is None: self.warning('Longitude coordinate %s not found', sel['longitude']) continue # Load tmp depth to get lon & lat coordinates #tmp = model.get_depth(select=sel, squeeze=False) # tmp squeezed !!! see sigma ? tmp = model.get_variable(varnames[0], select=sel, squeeze=False) lons_mod[ip] = tmp.getLongitude()[0] lats_mod[ip] = tmp.getLatitude()[0] deps_mod[:, ip] = model.get_depth(select=sel, squeeze=True) for iv, vn in enumerate(varnames): variables[iv][:, ip] = model.get_variable(vn, select=sel, squeeze=True) # Methode 2 : interpolation elif method == 'interp': sel = dict(time=date_interval, longitude=(lon_min, lon_max), latitude=(lat_min, lat_max)) if time_mod.mapIntervalExt(sel['time']) is None: self.warning('Time interval %s not found', sel['time']) continue if grid_mod.getLatitude().mapInterval( sel['latitude']) is None: self.warning('Latitude coordinate %s not found', sel['latitude']) continue if grid_mod.getLongitude().mapInterval( sel['longitude']) is None: self.warning('Longitude coordinate %s not found', sel['longitude']) continue # Lectures order = 'tzyx' # lon & lat du profile car interp sur cette position lons_mod[ip], lats_mod[ip] = lon, lat deps_mod_tzyx = model.get_depth(select=sel, order=order, squeeze=True) tmp_tzyx = [] for iv, vn in enumerate(varnames): tmp_tzyx.append( model.get_variable(vn, select=sel, order=order, squeeze=True)) # Interpolations temporelles mctime = tmp_tzyx[0].getTime() mrtime = mctime.asRelativeTime() d0 = date.torel(mctime.units).value - mrtime[0].value d1 = mrtime[1].value - date.torel(mctime.units).value f0 = d0 / (d0 + d1) f1 = d1 / (d0 + d1) deps_mod_zyx = f0 * deps_mod_tzyx[0] + f1 * deps_mod_tzyx[1] tmp_zyx = [] for iv, vn in enumerate(varnames): tmp_zyx.append(f0 * tmp_tzyx[iv][0] + f1 * tmp_tzyx[iv][1]) del tmp_tzyx # Interpolations spatiales deps_mod[:, ip] = numpy.squeeze( grid2xy(deps_mod_zyx, numpy.array([lon]), numpy.array([lat]), method='nat')) for iv, vn in enumerate(varnames): variables[iv][:, ip] = numpy.squeeze( grid2xy(tmp_zyx[iv], numpy.array([lon]), numpy.array([lat]), method='nat')) del tmp_zyx else: raise ValueError('Invalid colocation method: %s' % (method)) except: self.exception('Failed to colocalize data for date %s', date) for v in [deps_mod] + variables: v.getAxis(0).id = 'level' v.getAxis(0).designateLevel() data = tuple([lons_mod, lats_mod, deps_mod] + variables) self.verbose('Colocalized data:\n %s', '\n '.join(self.describe(o) for o in data)) return data
def load(self, restart_file=None, iterindex=None, nowtime=None): """Load the current instance from a netcdf file :Params: - **restart_file**, optional: Netcdf restart file. - **iterindex**, optional: If given, the restart file is not loaded if ``iterindex`` is greater or equal to the file's ``iterindex`` attribute. - **nowtime**, optional: If given, the restart file is not loaded if ``nowtime`` is greater or equal to the file's ``lasttime`` attribute. """ # File if restart_file is None: restart_file = self.restart_file if restart_file is None: restart_file = self.default_restart_file self.restart_file = restart_file f = cdms2.open(restart_file) # Config # - check status if iterindex is not None: self.iterindex = iterindex if hasattr(self, 'iterindex') and f.iterindex<=self.iterindex: return -1 if nowtime is not None: self.lasttime = comptime(nowtime) if (hasattr(self, 'lasttime') and f.withtime>0 and self.lasttime and comptime(f.lasttime)<=comptime(self.lasttime)): return -1 # - what was initially asked and some more for sname in self.all_stats + ('sum', 'sqr', 'prod', 'stats'): for st in 'st': if not hasattr(f, st+sname): continue value = getattr(f, st+sname) setattr(self, st+sname, bool(value)) # - current status self.iterindex = int(f.iterindex) self.nitems = int(f.nitems) if f.withtime==-1: self.withtime = None else: self.withtime = bool(f.withtime) if f.withtime: self.lasttime = cdtime.s2c(f.lasttime) if N.isscalar(f.bin_edges): self.bins = None else: self.bins = N.asarray(f.bin_edges) self.nbins = self.bins.shape[0]-1 self._baxis = f.getAxis('hbin').clone() if self.nitems==0: # Still no data f.close() return 0 # - already had some data self.dual = bool(f.dual) self.ns = int(f.ns) self.nt = int(f.nt) self._nts = f.nts.tolist() self.tstats = bool(f.tstats) self.sstats = bool(f.sstats) if not self.withtime: self._stimes = None # Spatial statistics if self.sstats: # Time axes if self.withtime: self._stimes = tuple([[] for i in xrange(self.nitems)]) for i, tt in enumerate(self._stimes): taxis = f.getAxis('t'+str(i)) tvalues = self._aslist_(taxis[:]) oldid = taxis.stataccum_oldid for tvals in tvalues: tx = create_time(tvals, taxis.units, id=oldid) cp_atts(taxis, tx, id=False, exclude=[oldid]) self._stimes[i].append(tx) # Count self._scount = self._load_array_(f, id='scount') # Other stats self._sstats = {} for key in self.single_stats: self._sstats[key] = () for i in xrange(self.nitems): self._sstats[key] += self._load_array_(f, 's%s%s'%(key, str(i))), for key in self.dual_stats: self._sstats[key] = self._load_array_(f, 's%s'%key) # Temporal statistics if self.tstats: # Count self._tcount = self._load_array_(f, 'tcount') # Other stats for key in self._dual_accums+self._single_accums: tid = 't'+key if not getattr(self, tid): continue if key in self._dual_accums: value = self._load_array_(f, tid) setattr(self, '_'+tid, value) else: value = () for i in xrange(self.nitems): value += self._load_array_(f, tid+str(i)), setattr(self, '_'+tid, value) # Templates # - base arrays self._tbase = N.zeros(self.ns) if self.thist: self._thbase = N.zeros((self.nbins, self.ns), 'l') # - cdat templates self._ttemplates = () if self.thist: self._thtemplates = () for i in xrange(self.nitems): prefix = 'var%i_'%i for vname in f.variables: if vname.startswith(prefix): break ttpl = f(vname) _rm_id_prefix_(ttpl, 'var%i_'%i, exc=self._baxis) self._ttemplates += ttpl, if self.thist: self._thtemplates += self._template_t2ht_(ttpl), # Attributes self._atts = () for ivar in xrange(self.nitems): attrs = f['var%i_atts'%ivar].attributes.copy() attrs['id'] = attrs['stataccum_id'] del attrs['stataccum_id'] self._atts += attrs, f.close() return self.iterindex
# -*- coding: utf8 -*- # Lecture du niveau de la mer sur 9 pas de temps à une latitude import cdms2, MV2 from vacumm.config import data_sample f =cdms2.open(data_sample('mars3d.xt.xe.nc')) xe = f('xe', squeeze=1, time=slice(0, 9), lon=(-5, -4.83)) f.close() xe.long_name = 'Original' # On crée un trou xe[3:4, 20:30] = MV2.masked # Nouvel axe temporel plus précis from vacumm.misc.axes import create_time #old_time = xe.getTime() old_time=create_time((xe.shape[0], ), 'hours since 2000') xe.setAxis(0, old_time) dt = (old_time[1]-old_time[0])/10. new_time = create_time((old_time[0], old_time[-1]+dt, dt), old_time.units) # Interpolation from vacumm.misc.grid.regridding import interp1d # - nearest xe_nea = interp1d(xe, new_time, method='nearest') xe_nea.long_name = 'Nearest' # - linear xe_lin = interp1d(xe, new_time, method='linear') xe_lin.long_name = 'Linear' # - cubic xe_cub = interp1d(xe, new_time, method='cubic') xe_cub.long_name = 'Cubic'
def coloc_mod_on_pro(self, model, profiles, varnames, select=None, method='nearest'): '''Colocalize model on profile data. Load model data corresponding to the selected profiles positions and time. Returns loaded model longitudes, latitudes, depths and requested variable(s) :Params: - **model**: model data :class:`~vacumm.data.misc.dataset.Dataset` - **profiles**: profile data :class:`~vacumm.data.misc.profile.ProfilesDataset` - **varnames**: variables to load (ex: ('temp','sal') or (('temp','temperature'),('sal','salinity')) - **select**: selector - **method**: coloc method (**nearest** or **interp**) :Return: - **lons_mod**: model longitude coordinates, shape: (profile) - **lats_mod**: model latitude coordinates, shape: (profile) - **deps_mod**: model depth coordinates, shape: (level,profile) - **var1**: requested variables, shape: (level,profile) - ... - **varN** .. todo:: - also load and return profile data here - exclude coords where profile data is masked (no data for specified depth) - return time coordinates - return depth and vars with shape (profile,level) ''' self.verbose('Colocalizing %s on %s\nvarnames: %s\nselect: %s\n method: %s', model.__class__.__name__, profiles.__class__.__name__, varnames, select, method) prof_pro = profiles.get_axis('profile', select=select) if prof_pro is None or not len(prof_pro): raise Exception('No profiles found, aborting') lev_pro = profiles.get_axis('level', select=select) time_pro = profiles.get_variable('time', select=select) lons_pro = profiles.get_variable('longitude', select=select) lats_pro = profiles.get_variable('latitude', select=select) dates = create_time(time_pro).asComponentTime() self.info('Number of profiles: %s', len(dates)) self.info('Profiles time coverage: %s to %s', dates[0], dates[-1]) # Init model td = model.get_time_res() dtmax = (td.days*86400+td.seconds, 'seconds') self.info('Detected model time step: %s', td) grid_mod = model.get_grid() xres, yres = resol(grid_mod) time_mod = model.get_time() ctime_mod = time_mod.asComponentTime() self.info('Model time coverage: %s to %s', ctime_mod[0], ctime_mod[-1]) level_mod = model.get_level(select=select) lons_mod = MV2.zeros((len(prof_pro),))+MV2.masked lats_mod = lons_mod.clone() deps_mod = MV2.zeros((len(level_mod), len(prof_pro)))+MV2.masked deps_mod.setAxis(1, prof_pro) lons_mod.id, lats_mod.id, deps_mod.id = 'longitude', 'latitude', 'depth' # Creation des variables demandees variables = [] for n in varnames: v = MV2.zeros((len(level_mod), len(prof_pro)))+MV2.masked v.setAxis(1, prof_pro) v.id = is_iterable(n) and n[0] or n variables.append(v) cdms2.setAutoBounds(1) # ??? # Boucle temporelle for ip, date in enumerate(dates): try: # Limites spatiales lon = lons_pro[ip] lat = lats_pro[ip] lon_min = lon-2*xres lon_max = lon+2*xres lat_min = lat-2*yres lat_max = lat+2*yres date_interval = (add_time(date, - dtmax[0], dtmax[1]), add_time(date, dtmax[0], dtmax[1]), 'ccb') self.info('Colocalizing data for date %s, lon: %s, lat: %s', date, lon, lat) # Methode 1 : donnees les plus proches if method == 'nearest': sel = dict(time=(date, date, 'ccb'), longitude=(lon, lon, 'ccb'), latitude=(lat, lat, 'ccb')) # Verifier la disponibilite des donnees if time_mod.mapIntervalExt(sel['time']) is None: self.warning('Time interval %s not found', sel['time']) continue if grid_mod.getLatitude().mapInterval(sel['latitude']) is None: self.warning('Latitude coordinate %s not found', sel['latitude']) continue if grid_mod.getLongitude().mapInterval(sel['longitude']) is None: self.warning('Longitude coordinate %s not found', sel['longitude']) continue # Load tmp depth to get lon & lat coordinates #tmp = model.get_depth(select=sel, squeeze=False) # tmp squeezed !!! see sigma ? tmp = model.get_variable(varnames[0], select=sel, squeeze=False) lons_mod[ip] = tmp.getLongitude()[0] lats_mod[ip] = tmp.getLatitude()[0] deps_mod[:, ip] = model.get_depth(select=sel, squeeze=True) for iv,vn in enumerate(varnames): variables[iv][:,ip] = model.get_variable(vn, select=sel, squeeze=True) # Methode 2 : interpolation elif method == 'interp': sel = dict(time=date_interval, longitude=(lon_min, lon_max), latitude=(lat_min, lat_max)) if time_mod.mapIntervalExt(sel['time']) is None: self.warning('Time interval %s not found', sel['time']) continue if grid_mod.getLatitude().mapInterval(sel['latitude']) is None: self.warning('Latitude coordinate %s not found', sel['latitude']) continue if grid_mod.getLongitude().mapInterval(sel['longitude']) is None: self.warning('Longitude coordinate %s not found', sel['longitude']) continue # Lectures order = 'tzyx' # lon & lat du profile car interp sur cette position lons_mod[ip], lats_mod[ip] = lon, lat deps_mod_tzyx = model.get_depth(select=sel, order=order, squeeze=True) tmp_tzyx = [] for iv,vn in enumerate(varnames): tmp_tzyx.append(model.get_variable(vn, select=sel, order=order, squeeze=True)) # Interpolations temporelles mctime = tmp_tzyx[0].getTime() mrtime = mctime.asRelativeTime() d0 = date.torel(mctime.units).value - mrtime[0].value d1 = mrtime[1].value - date.torel(mctime.units).value f0 = d0 / (d0 + d1) f1 = d1 / (d0 + d1) deps_mod_zyx = f0 * deps_mod_tzyx[0] + f1 * deps_mod_tzyx[1] tmp_zyx = [] for iv,vn in enumerate(varnames): tmp_zyx.append(f0 * tmp_tzyx[iv][0] + f1 * tmp_tzyx[iv][1]) del tmp_tzyx # Interpolations spatiales deps_mod[:,ip] = numpy.squeeze(grid2xy(deps_mod_zyx, numpy.array([lon]), numpy.array([lat]), method='nat')) for iv,vn in enumerate(varnames): variables[iv][:,ip] = numpy.squeeze(grid2xy(tmp_zyx[iv], numpy.array([lon]), numpy.array([lat]), method='nat')) del tmp_zyx else: raise ValueError('Invalid colocation method: %s'%(method)) except: self.exception('Failed to colocalize data for date %s', date) for v in [deps_mod] + variables: v.getAxis(0).id = 'level' v.getAxis(0).designateLevel() data = tuple([lons_mod, lats_mod, deps_mod] + variables) self.verbose('Colocalized data:\n %s', '\n '.join(self.describe(o) for o in data)) return data
# Comversions ct = comptime('2000-01') print ct.day # -> 1 print comptime(['2000', '2001']) # -> [2000-1-1 0:0:0.0, 2001-1-1 0:0:0.0] print numtime(ct) # -> 730120.0 print date2num(Datetime(2000, 1, 1)) # -> 730120.0 print strtime([730120.0, cdtime.reltime(1, 'years since 1999')]) ['2000-1-1 0:0:0.0', '2000-1-1 0:0:0.0'] # Axes de temps taxis = create_time((0, 3.), 'days since 2000-01-01') print datetime(taxis) # -> [datetime.datetime(2000, 1, 1, 0, 0), # datetime.datetime(2000, 1, 2, 0, 0), # datetime.datetime(2000, 1, 3, 0, 0)] # Additions/soustractions print add(ct, 2, 'days') # -> 2000-1-3 0:0:0.0 print add(taxis, 1, 'month')[:] # -> [ 31. 32. 33.] # Zones print utc_to_paris('2000-01-01 12:00') # -> 2000-1-1 13:0:0.0 print utc_to_paris('2000-06-01 12:00')