def create_mv2_gridder_xyzt(nx=8, ny=7, nz=6, nt=5, xmin=-6., xmax=-3, ymin=46, ymax=48, zmin=-200, zmax=0, tmin='2016', tmax='2016-02', tunits='days since 2016-01-01', rotate=0): """Create a MV2 array on a grid Return ------ MV2.array """ # Axes shape = () axes = [] if nt != 0: time = create_time(lindates(tmin, tmax, nt), tunits) axes.append(time) shape += nt, if nz != 0: dep = create_dep(N.linspace(zmin, zmax, nz)) axes.append(dep) shape += nz, if ny != 0: lat = create_lat(N.linspace(ymin, ymax, ny)) axes.append(lat) shape += ny, if nx != 0: lon = create_lon(N.linspace(xmin, xmax, nx)) axes.append(lon) shape += nx, # Array data = MV2.array(N.arange(N.multiply.reduce(shape)).reshape(shape), copy=False, axes=axes, id='temp', dtype='d') # Rotate grid if rotate: grid = data.getGrid() if grid is not None: grid = rotate_grid(grid, rotate) set_grid(data, grid) return data
def create_mv2_scattered_xyzt(np=10, nz=6, nt=5, xmin=-6., xmax=-3, ymin=46, ymax=48, zmin=-200, zmax=0, tmin='2016', tmax='2016-02', tunits='days since 2016-01-01'): """Create a VM2 array of scattered data Return ------ array: longitudes array: latitude MV2.array: data """ # Axes shape = () axes = [] if nt != 0: time = create_time(lindates(tmin, tmax, nt), tunits) shape += nt, axes.append(time) if nz != 0: dep = create_dep(N.linspace(zmin, zmax, nz)) axes.append(dep) shape += nz, shape += np, axes.append(create_axis((np, ))) # Array data = MV2.array(N.arange(N.multiply.reduce(shape)).reshape(shape), copy=False, axes=axes, id='temp', dtype='d') # Positiions lons = N.linspace(xmin, xmax, np) lats = N.linspace(ymin, ymax, np) return lons, lats, data
from vacumm.misc.grid._interp_ import cellerr1d from scipy.stats import linregress # Read data f = cdms2.open(data_sample('radial_speed.nc')) sp = f('speed') spe = f('speed_error') f.close() # Create hourly time axis taxi = sp.getTime() taxi.toRelativeTime('hours since 2000') ctimesi = taxi.asComponentTime() ct0 = round_date(ctimesi[0], 'hour') ct1 = round_date(ctimesi[-1], 'hour') taxo = create_time(lindates(ct0, ct1, 1, 'hour'), taxi.units) # Lag error # - estimation els = [] lags = N.arange(1, 6) for lag in lags: els.append(N.sqrt(((sp[lag:] - sp[:-lag])**2).mean())) els = N.array(els) a, b, _, _, _ = linregress(lags, els) # - plot P.figure(figsize=(6, 6)) P.subplot(211) P.plot(lags, els, 'o') P.plot([0, lags[-1]], [b, a * lags[-1] + b], 'g') P.axhline(b, color='0.8', ls='--')
from vacumm.misc.grid._interp_ import cellerr1d from scipy.stats import linregress # Read data f = cdms2.open(data_sample("radial_speed.nc")) sp = f("speed") spe = f("speed_error") f.close() # Create hourly time axis taxi = sp.getTime() taxi.toRelativeTime("hours since 2000") ctimesi = taxi.asComponentTime() ct0 = round_date(ctimesi[0], "hour") ct1 = round_date(ctimesi[-1], "hour") taxo = create_time(lindates(ct0, ct1, 1, "hour"), taxi.units) # Lag error # - estimation els = [] lags = N.arange(1, 6) for lag in lags: els.append(N.sqrt(((sp[lag:] - sp[:-lag]) ** 2).mean())) els = N.array(els) a, b, _, _, _ = linregress(lags, els) # - plot P.figure(figsize=(6, 6)) P.subplot(211) P.plot(lags, els, "o") P.plot([0, lags[-1]], [b, a * lags[-1] + b], "g") P.axhline(b, color="0.8", ls="--")
from vacumm.misc.grid._interp_ import cellerr1d from scipy.stats import linregress # Read data f = cdms2.open(data_sample('radial_speed.nc')) sp = f('speed') spe = f('speed_error') f.close() # Create hourly time axis taxi = sp.getTime() taxi.toRelativeTime('hours since 2000') ctimesi = taxi.asComponentTime() ct0 = round_date(ctimesi[0], 'hour') ct1 = round_date(ctimesi[-1], 'hour') taxo = create_time(lindates(ct0, ct1, 1, 'hour'), taxi.units) # Lag error # - estimation els = [] lags = N.arange(1, 6) for lag in lags: els.append(N.sqrt(((sp[lag:]-sp[:-lag])**2).mean())) els = N.array(els) a, b, _, _, _ = linregress(lags, els) # - plot P.figure(figsize=(6, 6)) P.subplot(211) P.plot(lags, els, 'o') P.plot([0, lags[-1]], [b, a*lags[-1]+b], 'g') P.axhline(b, color='0.8', ls='--')
ne = 4 nez = 2 # Imports from vcmq import (N, MV2, code_file_name, os, P, create_lon, create_lat, create_dep, create_time, lindates, create_axis, reltime, grid2xy, comptime, set_grid, rotate_grid, add_grid) # Rectangular xyzt with 1d z data and coords # - data lon = create_lon(N.linspace(lon0, lon1, nx)) lat = create_lat(N.linspace(lat0, lat1, ny)) dep = create_dep(N.linspace(dep0, dep1, nz)) time = create_time(lindates(time0, time1, nt)) extra = create_axis(N.arange(ne), id='member') data = N.resize(lat[:], (ne, nt, nz, nx, ny)) # function of y data = N.moveaxis(data, -1, -2) #data = N.arange(nx*ny*nz*nt*ne, dtype='d').reshape(ne, nt, nz, ny, nx) vi = MV2.array(data, axes=[extra, time, dep, lat, lon], copy=False, fill_value=1e20) N.random.seed(0) xo = N.random.uniform(lon0, lon1, np) yo = N.random.uniform(lat0, lat1, np) zo = N.random.uniform(dep0, dep1, np) to = comptime(N.random.uniform(reltime(time0, time.units).value, reltime(time1, time.units).value, np), time.units)
# Original clim N.random.seed(0) s = N.resize(N.sin(N.linspace(0, 1, 13)[:12] * 2 * N.pi), (2, 12)).T clim = MV2.array(s, fill_value=1e20) p = curve(clim[:, 0], 'o-', show=False, subplot=211, title='Original climatology', xmin=-.5, xmax=11.5, xticks=range(12), xticklabels=[strftime('%b', '2000-%i' % i) for i in range(1, 13)]) # Target times times = lindates('2000-01-01', '2001-12-31', 5, 'day') # Interpolations for i, method in enumerate(( 'linear', 'cubic', )): climo = interp_clim(clim, times, method=method) c = curve(climo[:, 0], 'o-', color='gr'[i], show=False, label=method.title(), subplot=212, title='Interpolated climatology', legend=True,
def load_model_at_regular_dates(ncpat, varnames=None, time=None, lat=None, lon=None, level=None, depths=None, modeltype='mars', nt=50, dtfile=None, sort=True, asdict=False, logger=None, **kwargs): """Read model output at nearest unique dates with optional linear interpolation Parameters ---------- ncpat: string or list of strings varnames: string, strings Generic var names. If None, all variables that are known from the :mod:`vacumm.data.cf` module are used. level: string, None, list of floats, array, tuple of them, dict Here are some possible values: - "surf" or "bottom": self explenatory - None or "3d": No slice, so get all levels with no interpolation. - A list or array of negative depths: get all levels and interpolate at these depths. Variables sliced with "surf" and "bottom" are returned with an id suffixed with "_surf" or "_bottom". You can speficy different slicings using a tuple of depth specifications. You can specialise slicings of a variable using a dictionary with the key as the variable name. See also -------- :func:`sonat.misc.list_files_from_pattern` for more options Examples -------- >>> mdict = load_model_at_regular_dates('myfile.nc', level='surf') >>> mdict = load_model_at_regular_dates('myfile.nc', level=('surf', 'bottom') >>> mdict = load_model_at_regular_dates('myfile.nc', varnames=['temp', 'sal'], level={'temp':('surf', 'bottom'), 'sal':[-50, -10]}) >>> mdict = load_model_at_regular_dates('myfile.nc', varnames=['temp', 'sal'], level={'temp':('surf', '3d'), 'sal':None}, depths=[-50, -10]) """ # Logger kwlog = kwfilter(kwargs, 'logger_') if logger is None: logger = get_logger(**kwlog) logger.debug('Loading model at regular dates') # Get file list ncfiles = list_files_from_pattern(ncpat, time, dtfile=dtfile, sort=True) if not ncfiles: raise SONATError('No file found') # Time interval reqtime = time if time is None: # First taxis = ncget_time(ncfiles[0]) if taxis is None: raise SONATError("Can't get time axis for: " + ncfiles[0]) ctimes = taxis.asComponentTime() ct0 = ctimes[0] # Last if ncfiles[0] != ncfiles[-1]: taxis = ncget_time(ncfiles[-1]) if taxis is None: raise SONATError("Can't get time axis for: " + ncfiles[-1]) ctimes = taxis.asComponentTime() ct1 = ctimes[-1] # Time time = (ct0, ct1) # Generate dates dates = lindates(time[0], time[1], nt) # Get time indices iidict, iiinfo = ncfiles_time_indices(ncfiles, dates, getinfo=True, asslices=True) if iiinfo['missed'] or iiinfo['duplicates']: msg = ("You must provide at least {nt} model time steps to read " "independant dates") if reqtime: msg = msg + (", and your requested time range must be enclosed " "by model time range.") raise SONATError(msg) # Read single = isinstance(varnames, basestring) if single: varnames = [varnames] out = OrderedDict() vlevels = {} if not isinstance(level, dict): level = {'__default__': level} for ncfile, tslices in iidict.items(): # Dataset instance ds = DS(ncfile, modeltype, logger_name='SONAT.Dataset', logger_level='error') # List of well known variables if varnames is None: varnames = [] for ncvarname in ds.get_variable_names(): varname = match_known_var(ds[0][ncvarname]) if varname: varnames.append(varname) # Loop on variables vardepth = None kwvar = dict(lat=lat, lon=lon, verbose=False, bestestimate=False) for vname in list(varnames): # Level selector for this variable if vname in vlevels: # cached vlevel = vlevels[vname] else: vlevel = interpret_level(dicttree_get(level, vname), astuple=True) vlevels[vname] = vlevel # cache it # Loop on level specs for vlev in vlevel: # Output vname and vlev check if not isinstance(vlev, basestring): vnameo = vname elif vlev not in ('surf', "bottom", "3d"): raise SONATError('Depth string must one of ' 'surf, bottom, 3d') elif vlev != '3d': vnameo = vname + '_' + vlev else: vlev = None vnameo = vname # Slicing level and output depths if vlev not in ['surf', 'bottom']: # numeric so interpolation if vlev is None: vdep = depths if depths is not None else None else: vdep = vlev interp = vdep is not None if interp: vlev = None else: interp = False # Read and aggregate vout = out.setdefault(vnameo, []) # vinterp = None for tslice in tslices: # Get var kwvar['time'] = tslice var = ds(vname, level=vlev, **kwvar) # Interpolate at numeric depths if interp and var.getLevel() is not None: # Get depths if True or vardepth is None: #FIXME: bad to always read it vardepth = ds.get_depth(level=vlev, zerolid=True, **kwvar) # Interpolate var = ds._interp_at_depths_(var, vardepth, vdep, extrap='top') # Id with suffix var.id = vnameo # Store results vout.append(var) # Concatenate for vname, vout in out.items(): out[vname] = MV2_concatenate(vout) # Dict if asdict: return out # Single out = out.values() if single: return out[0] return out
def list_files_from_pattern(ncpat, time=None, dtfile=None, sort=True, **subst): """List files possibly with glob and date patterns Parameters ---------- ncpat: string File name with date patterns time: tuple, None Date interval dtfile: tuple, None Time step between two files like ``(10, 'days')``. This time step is assumed to be constant across files. sort: bool Sort after listing? \**subst: dict Use for substitution in ``ncpat``. """ # List all files if isinstance(ncpat, list): # A list of file files = [] for filepat in ncpat: files.extend(list_files_from_pattern(filepat, time=time, dtfile=dtfile, **subst)) else: # A single string with_magic = has_magic(ncpat) scan_fields, scan_props = scan_format_string(ncpat) if scan_props['with_time']: # With time pattern # With time if time is None: # without sonat_warn("You should better provide a time interval " "with a date pattern in file name") ncfile = DatePat2GlobFormatter().format(ncpat, **subst) files = glob(ncfile) else: # with # Guess pattern and frequency date_format = scan_fields[scan_props['with_time'][0]]['format_spec'] freq = pat2freq(date_format) if dtfile is None: dtfile = 1, freq sonat_warn('Time steps between files not explicitly specified. ' 'Set to {}. You may miss first files!'.format(dtfile)) elif not isinstance(dtfile, tuple): dtfile = dtfile, freq # Generate dates or glob patterns files = [] ct0 = add_time(time[0], -dtfile[0], dtfile[1]) ct1 = time[-1] for date in lindates(ct0, ct1, 1, dtfile[1]): date = adatetime(date) ss = subst.copy() ss['date'] = date ncfile = ncpat.format(**ss) if with_magic: files.extend(glob(ncfile)) elif os.path.exists(ncfile): files.append(ncfile) elif has_magic(ncpat): # Just glob pattern files = glob(ncpat) else: # Just a file files = [ncpat] # Check existence files = filter(os.path.exists, files) # Unique files = list(set(files)) # Sort if sort: files.sort(key=sort if callable(sort) else None) return files