def subaxis2slice(cdaxis, values): if cdms2.isVariable(cdaxis): return [subaxis2index(cdax, vv) for cdax, vv in zip(cdaxis.getAxisList(), values)] cdaxis = create_axis(cdaxis) ijk = cdaxis.mapIntervalExt((values[0], values[-1], 'cc')) if ijk is None: return return slice(*ijk)
def subaxis2slice(cdaxis, values): if cdms2.isVariable(cdaxis): return [ subaxis2index(cdax, vv) for cdax, vv in zip(cdaxis.getAxisList(), values) ] cdaxis = create_axis(cdaxis) ijk = cdaxis.mapIntervalExt((values[0], values[-1], 'cc')) if ijk is None: return return slice(*ijk)
def xycompress(valid, vari, **atts): """Keep valid spatial points Parameters ---------- valid: 1D or 2D bool array 2D array for data on structured grid vari: array The variable to compress """ # Slice for extra dimensions pslice = (slice(None), ) * (vari.ndim - valid.ndim) if cdms2.isVariable(vari): nv = valid.sum() if valid.ndim==2: # Init assert valid.ndim == 2, 'Valid must be a 2D array' varo = vari[pslice + (0, slice(0, nv))].clone() ax = create_axis((nv, ), id='point', long_name='Spatial points') varo.setAxis(-1, ax) varo.setGrid(None) # Fill varo[:] = vari.asma()[pslice + (valid, )] else: # Init ax = vari.getAxis(-1) varo = vari[pslice + (slice(0, nv), )].clone() # Fill varo.getAxis(-1)[:] = N.compress(valid, ax[:]) varo[:] = N.ma.compress(valid, vari.asma(), axis=-1) # Attributes set_atts(varo, **atts) else: # numpy or ma varo = vari[pslice + (valid, )] return varo
def create_mv2_scattered_xyzt(np=10, nz=6, nt=5, xmin=-6., xmax=-3, ymin=46, ymax=48, zmin=-200, zmax=0, tmin='2016', tmax='2016-02', tunits='days since 2016-01-01'): """Create a VM2 array of scattered data Return ------ array: longitudes array: latitude MV2.array: data """ # Axes shape = () axes = [] if nt != 0: time = create_time(lindates(tmin, tmax, nt), tunits) shape += nt, axes.append(time) if nz != 0: dep = create_dep(N.linspace(zmin, zmax, nz)) axes.append(dep) shape += nz, shape += np, axes.append(create_axis((np, ))) # Array data = MV2.array(N.arange(N.multiply.reduce(shape)).reshape(shape), copy=False, axes=axes, id='temp', dtype='d') # Positiions lons = N.linspace(xmin, xmax, np) lats = N.linspace(ymin, ymax, np) return lons, lats, data
def create_Dthnc(self, fileout, TimeSeries): if '2D' in fileout: self.i23d = 2 else: self.i23d = 3 # create file if self.i23d == 3: Nlev = self.zz.shape[1] else: Nlev = 1 time_Series, nc = create_ncTH( fileout, len(self.llon), Nlev, self.ivs, np.round((TimeSeries - TimeSeries[0]) * 24 * 3600)) for n in range(0, len(TimeSeries)): tin = create_time(np.ones(len(self.llon) * Nlev) * (TimeSeries[n] + 1), units='days since 1-1-1') total = np.zeros(shape=(self.ivs, len(self.llon), Nlev)) # get tide if self.tidal: var = self.HC.keys() for i, v in enumerate(sorted(var)): # horizontal interpolation tmp = get_tide(self.constidx, self.tfreq, self.HC[v], np.array(TimeSeries[n]), self.lat0) if self.i23d > 2: # vertical interpolation tmp = vertical_extrapolation(tmp, self.zz, z0=self.z0) total[i, :, :] = total[i, :, :] + tmp if self.residual: var = self.res_vars for i, v in enumerate(sorted(var)): arri = self.res_file[v][:] if self.i23d > 2: dep = create_depth(arri.getAxis(1)[:]) extra = create_axis(N.arange(1), id='member') arri2 = np.tile(arri, [1, 1, 1, 1, 1]) arri3 = MV2.array(arri2, axes=[ extra, arri.getAxis(0), dep, arri.getAxis(2), arri.getAxis(3) ], copy=False, fill_value=1e20) zi = arri.getAxis(1)[:] if np.mean(zi) > 0: zi = zi * -1 tb = grid2xy(arri3, xo=np.tile(self.llon, [Nlev, 1]).T.flatten(), yo=np.tile(self.llat, [Nlev, 1]).T.flatten(), zo=self.zz.flatten(), method='linear', to=tin, zi=zi) else: tb = grid2xy(arri, xo=self.llon, yo=self.llat, method='linear', to=tin) if np.any(tb.mask == True): bad = tb.mask == True if len(bad.shape) > 1: bad = bad[0, :] tin_bad = create_time(np.ones(len(bad)) * (TimeSeries[n] + 1), units='days since 1-1-1') if self.i23d > 2: llon = np.tile(self.llon, [Nlev, 1]).T.flatten() llat = np.tile(self.llat, [Nlev, 1]).T.flatten() zz = self.zz.flatten() zi = arri.getAxis(1)[:] if np.mean(zi) > 0: zi = zi * -1 tb[0, bad] = grid2xy(arri3, xo=llon[bad], yo=llat[bad], zo=zz[bad], method='nearest', to=tin_bad, zi=zi) else: tb[bad] = grid2xy( arri, xo=np.array(self.llon)[bad].tolist(), yo=np.array(self.llat)[bad].tolist(), method='nearest', to=tin_bad) if np.any(tb.mask == True): print('probleme') total[i, :, :] = total[i, :, :] + np.reshape( tb, (len(self.llon), Nlev)) total = np.transpose(total, (1, 2, 0)) if np.isnan(total).any(): import pdb pdb.set_trace() if n % 100 == 0: self.logger.info( 'For timestep=%.f, max=%.4f, min=%.4f , max abs diff=%.4f' % (TimeSeries[n], total.max(), total.min(), abs(np.diff(total, n=1, axis=0)).max())) time_Series[n, :, :, :] = total nc.close()
from vcmq import create_axis, isaxis, coord2slice, cdms2, N def subaxis2slice(cdaxis, values): if cdms2.isVariable(cdaxis): return [ subaxis2index(cdax, vv) for cdax, vv in zip(cdaxis.getAxisList(), values) ] cdaxis = create_axis(cdaxis) ijk = cdaxis.mapIntervalExt((values[0], values[-1], 'cc')) if ijk is None: return return slice(*ijk) cdaxis = create_axis(N.linspace(0, 11., 17)) subaxis = cdaxis[2:7] res = subaxis2slice(cdaxis, subaxis)
nez = 2 # Imports from vcmq import (N, MV2, code_file_name, os, P, create_lon, create_lat, create_dep, create_time, lindates, create_axis, reltime, grid2xy, comptime, set_grid, rotate_grid, add_grid) # Rectangular xyzt with 1d z data and coords # - data lon = create_lon(N.linspace(lon0, lon1, nx)) lat = create_lat(N.linspace(lat0, lat1, ny)) dep = create_dep(N.linspace(dep0, dep1, nz)) time = create_time(lindates(time0, time1, nt)) extra = create_axis(N.arange(ne), id='member') data = N.resize(lat[:], (ne, nt, nz, nx, ny)) # function of y data = N.moveaxis(data, -1, -2) #data = N.arange(nx*ny*nz*nt*ne, dtype='d').reshape(ne, nt, nz, ny, nx) vi = MV2.array(data, axes=[extra, time, dep, lat, lon], copy=False, fill_value=1e20) N.random.seed(0) xo = N.random.uniform(lon0, lon1, np) yo = N.random.uniform(lat0, lat1, np) zo = N.random.uniform(dep0, dep1, np) to = comptime(N.random.uniform(reltime(time0, time.units).value, reltime(time1, time.units).value, np), time.units) # Rectangular xyzt with 1d z
from vcmq import create_axis, isaxis, coord2slice, cdms2, N def subaxis2slice(cdaxis, values): if cdms2.isVariable(cdaxis): return [subaxis2index(cdax, vv) for cdax, vv in zip(cdaxis.getAxisList(), values)] cdaxis = create_axis(cdaxis) ijk = cdaxis.mapIntervalExt((values[0], values[-1], 'cc')) if ijk is None: return return slice(*ijk) cdaxis = create_axis(N.linspace(0, 11., 17)) subaxis = cdaxis[2:7] print subaxis2slice(cdaxis, subaxis)
def generate_pseudo_ensemble(ncpat, varnames=None, nrens=50, enrich=2., norms=None, getmodes=False, logger=None, asdicts=False, anomaly=True, ncensfile=None, **kwargs): """Generate a static pseudo-ensemble from a single simulation Parameters ---------- ncpat: string netcdf file name or pattern nrens: int Ensemble size enrich: float Enrichment factor getmodes: bool Get also EOFs end eigen values **kwargs: Extra parameters are passed to :func:`load_model_at_dates` Return ------ list (or dict) of arrays: variables with their name as keys dict: eofs, ev and variance, optional eofs: list (or dict) of arrays(nmodes, ...), optional EOFs ev: array(nmodes), optional Eigen values var: array Variance """ # Logger kwlog = kwfilter(kwargs, 'logger_') if logger is None: logger = get_logger(**kwlog) logger.verbose('Generating pseudo-ensemble') # Ensembe size enrich = max(enrich, 1.) nt = int(nrens * enrich) logger.debug( ' enrich={enrich}, nt={nt}, ncpat={ncpat}, varnames={varnames}'. format(**locals())) # Read variables logger.debug('Reading the model at {} dates'.format(nt)) data = load_model_at_regular_dates(ncpat, varnames=varnames, nt=nt, asdict=False, **kwargs) single = not isinstance(data, list) # Norms if isinstance(norms, dict): norms = var_prop_dict2list(data, norms) # Enrichment witheofs = nrens != nt if witheofs: logger.debug('Computing reduced rank ensemble with EOFs analysis') # Stack packed variables together stacker = Stacker(data, norms=norms, logger=logger) meanstate = N.zeros(stacker.ns) states = N.asfortranarray(stacker.stacked_data.copy()) # Compute EOFs stddev, svals, svecs, status = f_eofcovar(dim_fields=stacker.ns, offsets=1, remove_mstate=0, do_mv=0, states=states, meanstate=meanstate) if status != 0: raise SONATError('Error while calling fortran eofcovar routine') neof = svals.size # computed neofr = nrens - 1 # retained svals = svals[:neofr] * N.sqrt( (neof - 1.) / neof) # to be consistent with total variance svecs = svecs[:, :neofr] # Generate ensemble sens = f_sampleens(svecs, svals, meanstate, flag=0) # Unstack ens = stacker.unstack(sens, format=2, rescale='norm' if anomaly else True) if getmodes: # Modes mode_axis = create_axis(N.arange(1, neofr + 1, dtype='i'), id='mode') eofs = stacker.unstack(svecs, firstdims=mode_axis, id='{id}_eof', rescale=False, format=1) svals = MV2.array(svals, axes=[mode_axis], id='ev', attributes={'long_name': 'Eigen values'}) svals.total_variance = float(stacker.ns) # Variance vv = stacker.format_arrays([d.var(axis=0) for d in stacker.datas], id='{id}_variance', mode=1) variance = stacker.unmap(vv) else: # No enrichment -> take the anomaly if requested logger.debug('Getting the anomaly to build the ensemble') ens = data if anomaly: if single: ens[:] = ens.asma() - ens.asma().mean(axis=0) else: for i, e in enumerate(ens): ens[i][:] = e.asma() - e.asma().mean(axis=0) # Finalize getmodes = getmodes and witheofs member_axis = create_axis(N.arange(nrens, dtype='i'), id='member', long_name='Member') if single: ens.setAxis(0, member_axis) else: for var in ens: var.setAxis(0, member_axis) # Dump to file if ncensfile: logger.debug('Dump the ensemble to netcdf') checkdir(ncensfile) f = cdms2.open(ncensfile, 'w') ensvars = list(ens) if not single else [ens] if getmodes: if single: ensvars.append(eofs) ensvars.append(variance) else: ensvars.extend(eofs) ensvars.extend(variance) ensvars.append(svals) for var in ensvars: f.write(var) f.close() logger.created(ncensfile) # As dicts if asdicts: if single: ens = OrderedDict([(ens.id, ens)]) if getmodes: eofs = OrderedDict([(eofs.id, eofs)]) variance = OrderedDict([(variance.id, variance)]) else: ens = OrderedDict([(var.id, var) for var in ens]) if getmodes: eofs = OrderedDict([(var.id, var) for var in eofs]) variance = OrderedDict([(var.id, var) for var in variance]) # Return if not getmodes: return ens return ens, dict(eofs=eofs, eigenvalues=svals, variance=variance)
def slice_gridded_var(var, member=None, time=None, depth=None, lat=None, lon=None): """Make slices of a variable and squeeze out singletons to reduce it The "member" axis is considered here as a generic name for the first axis of unkown type. .. warning:: All axes must be 1D """ # Check order var = var(squeeze=1) order = var.getOrder() # Unkown axis if '-' in order and member is not None: i = order.find('-') id = var.getAxisIds()[i] if isinstance(member, slice): kw = {id:member} var = var(**kw) else: axo = create_axis(member) cp_atts(var.getAxis(i), axo) var = regrid1d(var, axo, iaxi=i)(squeeze=N.isscalar(member)) # Time interpolation if 't' in order and time is not None: axi = var.getTime() if isinstance(time, slice): var = var(time=time) else: axo = create_time(time, axi.units) var = regrid1d(var, axo)(squeeze=N.isscalar(time)) # Depth interpolation if 'z' in order and depth is not None: if depth=='bottom': var = slice_bottom(var) else: if depth=='surf': depth = slice(-1, None) if isinstance(depth, slice): var = var(level=depth, squeeze=1) # z squeeze only? elif (N.isscalar(depth) and var.getLevel()[:].ndim==1 and depth in var.getLevel()): var = var(level=depth) else: axo = create_dep(depth) if axo[:].max()>10: sonat_warn('Interpolation depth is positive. Taking this opposite') axo[:] *=-1 var = regrid1d(var, axo)(squeeze=N.isscalar(depth)) # Point if (order.endswith('yx') and lon is not None and lat is not None and not isinstance(lat, slice) and not isinstance(lon, slice)): var = grid2xy(var, lon, lat)(squeeze=N.isscalar(lon)) else: # Latitude interpolation if 'y' in order and lat: if isinstance(lat, slice): var = var(lat=lat) else: axo = create_lat(lat) var = regrid1d(var, axo)(squeeze=N.isscalar(lat)) # Longitude interpolation if 'x' in order and lon: if isinstance(lon, slice): var = var(lon=lon) else: axo = create_lon(lon) var = regrid1d(var, axo)(squeeze=N.isscalar(lon)) return var