def create_wind(fname,xi,eta): '''Surface wind file for GNOME''' nc=netcdf.ncopen(fname,'w',version=3) nc.add_dim('lon',xi) nc.add_dim('lat',eta) nc.add_dim('time',0) nc.close() nc=netcdf.ncopen(fname,'a') # lon v=nc.add_var('lon','double',('lat','lon')) v.add_att('long_name','Longitude') v.add_att('units','degrees_east') v.add_att('standard_name','longitude') v.add_att('point_spacing','even') # lat v=nc.add_var('lat','double',('lat','lon')) v.add_att('long_name','Latitude') v.add_att('units','degrees_north') v.add_att('standard_name','latitude') v.add_att('point_spacing','even') # depth v=nc.add_var('depth','double',('lat','lon')) v.add_att('long_name','Bathymetry') v.add_att('units','meters') v.add_att('positive','down') v.add_att('standard_name','depth') # u v=nc.add_var('air_u','double',('time','lat','lon'),fill_value=-9.9999e+32) v.add_att('long_name','Eastward Wind') v.add_att('units','m/s') v.add_att('scale_factor',1.) v.add_att('add_offset',0.) v.add_att('standard_name','eastward_wind') # v v=nc.add_var('air_v','double',('time','lat','lon'),fill_value=-9.9999e+32) v.add_att('long_name','Northward Wind') v.add_att('units','m/s') v.add_att('scale_factor',1.) v.add_att('add_offset',0.) v.add_att('standard_name','northward_wind') # time v=nc.add_var('time','double',('time',)) v.add_att('long_name','Valid Time') v.add_att('units',tunits) v.add_att('standard_name','time') nc.add_att('Convetions','CF-1.3') nc.add_att('grid_type','curvilinear') nc.add_att('title', 'forecast') nc.close()
def __init__(self,grd,quiet=True): if grd.startswith('http'): self.name=grd self.isremote=True else: self.name=os.path.realpath(grd) self.isremote=False self.type='grid' self.quiet=quiet self.nc=netcdf.ncopen(self.name) self.load_dims() self.load() # load_uvp was used to create uvp variables based on rho variables. The # objective was to load few variables in case of remote opendap url. In # the current implementation the variables are loaded only when needed, # so, there is no big gain not to load uvp variables from source. # User may still call self.load_uvp() after object creation to ensure, ie, # to force that less data is downloaded from remote grid if 0: self.load_uvp() self.load_atts() # about proj: self.load_proj()
def __init__(self,his,grd=False,quiet=True): self.name=his self.isremote=False if cb.isstr(his): if his.startswith('http:'): self.isremote=True else: self.name=os.path.realpath(his) self.type='his' self.quiet=quiet self.nc=netcdf.ncopen(self.name) self.load_grid(grd) self.load() self.load_dims() self.load_atts()
def __init__(self,grd,quiet=True): if grd.startswith('http'): self.name=grd self.isremote=True else: self.name=os.path.realpath(grd) self.isremote=False self.type='grid' self.quiet=quiet self.nc=netcdf.ncopen(self.name) self.load() self.load_dims() self.load_uvp() self.load_atts()
def __init__(self,his,grd=False,quiet=True): #try: self.name=os.path.realpath(his) #except: self.name=his self.name=his if isinstance(his,basestring) and not his.startswith('http:'): self.name=os.path.realpath(his) # local file self.type='his' self.quiet=quiet self.nc=netcdf.ncopen(self.name) self.load_grid(grd) self.load() self.load_dims() self.load_atts()
def s_params(nc,show=0): """ Get s-coordinates parameters from ROMS file Gets from ROMS output files the data needed to calculate vertical s-coordinate levels. This function looks for the required values among variables, file dimensions and file attributes. mma 28-7-2007; vs and vt added aug 2013 (Guayaquil) """ if isinstance(nc,basestring): nc=netcdf.ncopen(nc) theta_s=theta_b=hc=N=False theta_s_src=theta_b_src=hc_src=N_src='' vt=1 vs=1 if 'Vtransform' in nc.varnames: vt=nc.vars['Vtransform'][:] if 'Vstretching' in nc.varnames: vs=nc.vars['Vstretching'][:] v='theta_s' if v in nc.atts.keys(): theta_s=nc.atts[v].value theta_s_source='file attribute' elif v in nc.varnames: theta_s=nc.vars[v][:] theta_s_source='variable' v='theta_b' if v in nc.atts.keys(): theta_b=nc.atts[v].value theta_b_source='file attribute' elif v in nc.varnames: theta_b=nc.vars[v][:] theta_b_source='variable' v='hc' if v in nc.atts.keys(): hc=nc.atts[v].value hc_source='file attribute' elif v in nc.varnames: hc=nc.vars[v][:] hc_source='variable' else: # if vtransform=1 --> hc=min(Tcline,hmin) # else: hc=Tcline if vt==1: hmin=[] Tcline=[] v='Tcline' if v in nc.atts.keys(): Tcline=nc.atts[v].value elif v in nc.varnames: Tcline=nc.vars[v][:] if 'h' in nc.varnames: hmin=nc.vars['h'][:].min() if hmin and Tcline: hc=np.min([hmin,Tcline]) hc_source = 'min of hmin and Tcline'; else: hc='not found' else: # vt==2 hc=Tcline hc_source = 'Tcline' if 's_rho' in nc.dims.keys(): N=nc.dims['s_rho'] N_source = 'file dimension s_rho'; elif 'sc_r' in nc.atts.keys(): N=nc.atts['sc_r']['value'] N_source = 'file attribute sc_r'; elif 'sc_r' in nc.varnames: N=len(nc.vars['sc_r'][:]) N_source = 'length of variable sc_r'; if show: # show sources: print '%-10s : %4.2f %-10s' % ('theta_s',theta_s,theta_s_source) print '%-10s : %4.2f %-10s' % ('theta_b',theta_b,theta_b_source) print '%-10s : %4.2f %-10s' % ('hc',hc,hc_source) print '%-10s : %4d %-10s' % ('N',N,N_source) print '%-10s : %4d %-10s' % ('vt',vt,'variable') print '%-10s : %4d %-10s' % ('vs',vs,'variable') return theta_s, theta_b, hc, N,vt,vs
def narr_file_data(fname,xlim=False,ylim=False,quiet=False): ''' Returns bulk data from one NARR file ''' out={} # loading grid: if 0: if not quiet: print(' reading lon,lat from file %s' % grd) nc=netcdf.ncopen(grd) x=nc.vars['East_longitude_0-360'][0,...]-360. y=nc.vars['Latitude_-90_to_+90'][0,...] # time always 1 !! nc.close() else: if not quiet: print(' reading lon,lat from file %s' % grdTxt) x,y=load_grid() #x=x-360. x=-x ny,nx=x.shape if (xlim,ylim)==(False,False):i0,i1,j0,j1=0,nx,0,ny else: i0,i1,j0,j1=calc.ij_limits(x, y, xlim, ylim, margin=0) x=x[j0:j1,i0:i1] y=y[j0:j1,i0:i1] try: nc=netcdf.ncopen(fname) except: return {} xx=str(i0)+':'+str(i1) yy=str(j0)+':'+str(j1) tdim=netcdf.fdim(nc,'time1') if tdim!=1: print('WARNING: tdim !=1 !!!!!!') # T surface [K->C] if not quiet: print(' --> T air') tair=netcdf.use(nc,'Temperature_surface',time1=0,x=xx,y=yy) tair=tair-273.15 out['tair']=cb.Data(x,y,tair,'C') # R humidity [% -> 0--1] if not quiet: print(' --> R humidity') rhum=netcdf.use(nc,'Relative_humidity',time1=0,x=xx,y=yy) out['rhum']=cb.Data(x,y,rhum/100.,'0--1') # surface pressure [Pa] if not quiet: print(' --> Surface pressure') pres=netcdf.use(nc,'Pressure_surface',time1=0,x=xx,y=yy) out['pres']=cb.Data(x,y,pres,'Pa') # P rate [kg m-2 s-1 -> cm/d] if not quiet: print(' --> P rate') prate=netcdf.use(nc,'Precipitation_rate',time1=0,x=xx,y=yy) prate=prate*86400*100/1000. out['prate']=cb.Data(x,y,prate,'cm/d') # Net shortwave flux [ W m-2] if not quiet: print(' --> Net shortwave flux') if not quiet: print(' SW down') sw_down=netcdf.use(nc,'Downward_shortwave_radiation_flux',time1=0,x=xx,y=yy) if not quiet: print(' SW up') sw_up=netcdf.use(nc,'Upward_short_wave_radiation_flux_surface',time1=0,x=xx,y=yy) sw_net=sw_down-sw_up out['radsw']=cb.Data(x,y,sw_net,'W m-2',info='positive downward') # Net longwave flux [W/m^2] if not quiet: print(' --> Net longwave flux') if not quiet: print(' LW down') lw_down=netcdf.use(nc,'Downward_longwave_radiation_flux',time1=0,x=xx,y=yy) if not quiet: print(' LW up') lw_up=netcdf.use(nc,'Upward_long_wave_radiation_flux_surface',time1=0,x=xx,y=yy) lw_net=lw_down-lw_up out['radlw']=cb.Data(x,y,-lw_net,'W m-2',info='positive upward') # downward lw: out['dlwrf']=cb.Data(x,y,-lw_down,'W m-2',info='negative... downward') # U and V wind speed 10m if not quiet: print(' --> U and V wind') # vertical dim is height_above_ground1: 10 and 30 m uwnd=netcdf.use(nc,'u_wind_height_above_ground',height_above_ground1=0,time1=0,x=xx,y=yy) vwnd=netcdf.use(nc,'v_wind_height_above_ground',height_above_ground1=0,time1=0,x=xx,y=yy) if not quiet: print(' --> calc wind speed and stress') speed = np.sqrt(uwnd**2+vwnd**2) taux,tauy=air_sea.wind_stress(uwnd,vwnd) out['wspd']=cb.Data(x,y,speed,'m s-1') out['uwnd']=cb.Data(x,y,uwnd,'m s-1') out['vwnd']=cb.Data(x,y,vwnd,'m s-1') out['sustr']=cb.Data(x,y,taux,'Pa') out['svstr']=cb.Data(x,y,tauy,'Pa') # Cloud cover [0--100 --> 0--1]: if not quiet: print(' --> Cloud cover') clouds=netcdf.use(nc,'Total_cloud_cover',time1=0,x=xx,y=yy) out['cloud']=cb.Data(x,y,clouds/100.,'fraction (0--1)') nc.close() return out
def roms2swan_wind(frc, date0, date1, fname='swan_wind.dat', **kargs): tname = 'wind_time' uname = 'Uwind' vname = 'Vwind' grd = False # needed if wind is 1d dt = 1 # hours path = '' if 'tname' in kargs.keys(): tname = kargs['tname'] if 'uname' in kargs.keys(): uname = kargs['uname'] if 'vname' in kargs.keys(): vname = kargs['vname'] if 'grd' in kargs.keys(): grd = kargs['grd'] if 'dt' in kargs.keys(): dt = kargs['dt'] if 'path' in kargs.keys(): path = kargs['path'] print 'wind: loading time ...' time = netcdf.nctime(frc, tname) #time=np.load('tfile') #cond=(time>=date0)&(time<=date1) cond = (time >= date0) & ( time <= date1 + datetime.timedelta(days=1) ) # add one day at the end, just to avoid the "repeating last" time = time[cond] d = np.diff(pl.date2num(time)) print 'current max and min dt = %6.2f %6.2f hrs = %6.2f %6.2f mins' % ( d.max() * 24, d.min() * 24, d.max() * 24 * 60, d.min() * 24 * 60) # time=time[::dt] # d=np.diff(pl.date2num(time)) # print ' final max and min dt = %6.2f %6.2f hrs = %6.2f %6.2f mins'%(d.max()*24, d.min()*24, d.max()*24*60, d.min()*24*60) print 'wind: loading u ...' nc = netcdf.ncopen(frc) u = netcdf.var(nc, uname) print 'wind: loading v ...' v = netcdf.var(nc, uname) # u=u[cond,...][::dt,...] # v=v[cond,...][::dt,...] u = u[cond, ...] v = v[cond, ...] nc.close() if u.ndim == 1: if not grd: print 'must provide grd karg!' return nt = u.size eta = netcdf.fdim(grd)['eta_rho'] xi = netcdf.fdim(grd)['xi_rho'] else: nt, eta, xi = u.shape # array may be too big, so do this later (for each it) # # u=np.tile(u[:,np.newaxis,np.newaxis],(1,eta,xi)) # v=np.tile(v[:,np.newaxis,np.newaxis],(1,eta,xi)) i = open(fname, 'w') times = [] time0 = time[0] - datetime.timedelta(hours=dt) ITs = [] for it in range(nt): time0 = time0 + datetime.timedelta(hours=dt) if time0 > date1: break if time0 > time[-1]: print 'Warning : repeating last ...', it times += [time0] d = np.abs(time - time0) it = np.where(d == d.min())[0][0] ITs += [it] if it % 100 == 0: print 'saving u %s %s' % (fname, time[it].isoformat(' ')) if u[it, ...].ndim == 0: U = np.tile(u[it, ...], (eta, xi)).flatten() else: U = u[it, ...].flatten() [i.write('%8.4f\n' % uu) for uu in U] for it in ITs: if it % 100 == 0: print 'saving v %s %s' % (fname, time[it].isoformat(' ')) if v[it, ...].ndim == 0: V = np.tile(v[it, ...], (eta, xi)).flatten() else: V = v[it, ...].flatten() [i.write('%8.4f\n' % vv) for vv in V] times = np.asarray(times) t0iso = times[0].strftime('%Y%m%d.%H%M%S') t1iso = times[-1].strftime('%Y%m%d.%H%M%S') dt = times[1] - times[0] dth = dt.days * 24. + dt.seconds / 60.**2 print ' -- created swan wind file %s\n' % fname # add to swan INPUT: print '\n' print 'INPGRID WIND CURVILINEAR 0 0 %d %d EXC 9.999000e+003 &' % (xi - 1, eta - 1) print ' NONSTATIONARY %s %.2f HR %s' % (t0iso, dth, t1iso) print 'READINP WIND 1 \'%s\' 4 0 FREE ' % (os.path.join(path, fname)) print '\n'
def create_wind(fname, xi, eta): '''Surface wind file for GNOME''' nc = netcdf.ncopen(fname, 'w', version=3) nc.add_dim('lon', xi) nc.add_dim('lat', eta) nc.add_dim('time', 0) nc.close() nc = netcdf.ncopen(fname, 'a') # lon v = nc.add_var('lon', 'double', ('lat', 'lon')) v.add_att('long_name', 'Longitude') v.add_att('units', 'degrees_east') v.add_att('standard_name', 'longitude') v.add_att('point_spacing', 'even') # lat v = nc.add_var('lat', 'double', ('lat', 'lon')) v.add_att('long_name', 'Latitude') v.add_att('units', 'degrees_north') v.add_att('standard_name', 'latitude') v.add_att('point_spacing', 'even') # depth v = nc.add_var('depth', 'double', ('lat', 'lon')) v.add_att('long_name', 'Bathymetry') v.add_att('units', 'meters') v.add_att('positive', 'down') v.add_att('standard_name', 'depth') # u v = nc.add_var('air_u', 'double', ('time', 'lat', 'lon'), fill_value=-9.9999e+32) v.add_att('long_name', 'Eastward Wind') v.add_att('units', 'm/s') v.add_att('scale_factor', 1.) v.add_att('add_offset', 0.) v.add_att('standard_name', 'eastward_wind') # v v = nc.add_var('air_v', 'double', ('time', 'lat', 'lon'), fill_value=-9.9999e+32) v.add_att('long_name', 'Northward Wind') v.add_att('units', 'm/s') v.add_att('scale_factor', 1.) v.add_att('add_offset', 0.) v.add_att('standard_name', 'northward_wind') # time v = nc.add_var('time', 'double', ('time', )) v.add_att('long_name', 'Valid Time') v.add_att('units', tunits) v.add_att('standard_name', 'time') nc.add_att('Convetions', 'CF-1.3') nc.add_att('grid_type', 'curvilinear') nc.add_att('title', 'forecast') nc.close()
def frc2gnome(fname, frc, grd, xylim=False, dates=False, ij=(1, 1), **kargs): ''' Creates GNOME wind file kargs: t[u,v]var t[u,v]dim x[y,ang]var Ex: .frc2gnome(out,frc,grd,ij=(10,10),dates=dates,**{'tdim':'Time'}) ''' deta, dxi = ij tvar = 'time' uvar = 'Uwind' vvar = 'Vwind' #tvar='bulk_time' #uvar='uwnd' #vvar='vwnd' tdim = 'time' #tdim='bulk_time' xdim = 'xi_rho' ydim = 'eta_rho' xvar = 'lon_rho' yvar = 'lat_rho' angvar = 'angle' if 'tvar' in kargs.keys(): tvar = kargs['tvar'] if 'uvar' in kargs.keys(): uvar = kargs['uvar'] if 'vvar' in kargs.keys(): vvar = kargs['vvar'] if 'tdim' in kargs.keys(): tdim = kargs['tdim'] if 'xdim' in kargs.keys(): xdim = kargs['xdim'] if 'ydim' in kargs.keys(): ydim = kargs['ydim'] if 'xvar' in kargs.keys(): xvar = kargs['xvar'] if 'yvar' in kargs.keys(): yvar = kargs['yvar'] if 'angvar' in kargs.keys(): angvar = kargs['angvar'] dims = netcdf.fdim(grd) xi, eta = dims[xdim], dims[ydim] xi0, eta0 = xi, eta ncg = netcdf.ncopen(grd) nc0 = netcdf.ncopen(frc) try: t = netcdf.nctime(nc0, tvar) except: t = netcdf.use(nc0, tvar) t = netcdf.num2date(t, 'days since %d-01-01' % year0) time = netcdf.date2num(t, tunits) x0 = netcdf.use(grd, xvar) y0 = netcdf.use(grd, yvar) if x0.ndim == 1: x0, y0 = np.meshgrid(x0, y0) if angvar: ang = netcdf.use(grd, angvar) if not xylim is False: xlim = xylim[:2] ylim = xylim[2:] i1, i2, j1, j2 = calc.ij_limits(x0, y0, xlim, ylim) xi = i2 - i1 eta = j2 - j1 else: i1, i2 = 0, xi j1, j2 = 0, eta XI = '%d:%d:%d' % (i1, i2, dxi) ETA = '%d:%d:%d' % (j1, j2, deta) xi = len(range(i1, i2, dxi)) eta = len(range(j1, j2, deta)) # create file: create_wind(fname, xi, eta) nc = netcdf.ncopen(fname, 'a') x = x0[j1:j2:deta, i1:i2:dxi] y = y0[j1:j2:deta, i1:i2:dxi] nc.vars['lon'][:] = x nc.vars['lat'][:] = y if angvar: ang = ang[j1:j2:deta, i1:i2:dxi] n = -1 for it in range(len(time)): if not dates is False: d0, d1 = dates if t[it] < d0 or t[it] >= d1: continue n += 1 u = netcdf.use(nc0, uvar, **{xdim: XI, ydim: ETA, tdim: it}) v = netcdf.use(nc0, vvar, **{xdim: XI, ydim: ETA, tdim: it}) # rotate uv: if angvar: print('rotating ...') u, v = calc.rot2d(u, v, -ang) nc.vars['time'][n] = time[it] print('filling uv', n, t[it]) nc.vars['air_u'][n, ...] = u nc.vars['air_v'][n, ...] = v nc.close() nc0.close() ncg.close()
def his2gnome(fname, his, grd=False, nomask=False, gshhsMask=True, xylim=False, dates=False, ij=(1, 1)): ''' Creates GNOME wind file Ex: his2gnome(out,his,grd,dates=dates,ij=(2,2)) if gshhsMask, the high res mask file mask_gshhs.npy will be created at 1st usage. Mask is based on high (h) resolution gshhs data which must be available (env variable GSHHS_MASK must be set). ''' if not grd: grd = his deta, dxi = ij dims = netcdf.fdim(his) xi, eta = dims['xi_rho'], dims['eta_rho'] xi0, eta0 = xi, eta nc0 = netcdf.ncopen(his) time = netcdf.nctime(nc0, 'ocean_time') # for roms agrif: #t=netcdf.use(nc0,'scrum_time') #time=netcdf.num2date(t,'seconds since %d-01-01' % year0) x0 = netcdf.use(grd, 'lon_rho') y0 = netcdf.use(grd, 'lat_rho') ang = netcdf.use(grd, 'angle') if not xylim is False: xlim = xylim[:2] ylim = xylim[2:] i1, i2, j1, j2 = calc.ij_limits(x0, y0, xlim, ylim) print(i1, i2, j1, j2) xi = i2 - i1 eta = j2 - j1 else: i1, i2 = 0, xi j1, j2 = 0, eta XI = '%d:%d:%d' % (i1, i2, dxi) ETA = '%d:%d:%d' % (j1, j2, deta) xi = len(range(i1, i2, dxi)) eta = len(range(j1, j2, deta)) # create file: create_uv(fname, xi, eta) nc = netcdf.ncopen(fname, 'a') for v0, v in ('lon_rho', 'lon'), ('lat_rho', 'lat'), ('mask_rho', 'mask'), ('h', 'depth'): print('filling %s with %s' % (v, v0)) nc.vars[v][:] = netcdf.use(grd, v0, xi_rho=XI, eta_rho=ETA) if nomask: print('NO MASK !!!') nc.vars['mask'][:] = 1 if gshhsMask: try: mask = np.load('mask_gshhs.npy') except: mask = 1 + 0 * netcdf.use(nc0, 'mask_rho', xi_rho=XI, eta_rho=ETA) mask = mask.astype('bool') x = netcdf.use(grd, 'lon_rho', xi_rho=XI, eta_rho=ETA) y = netcdf.use(grd, 'lat_rho', xi_rho=XI, eta_rho=ETA) from okean import gshhs axis = x.min(), x.max(), y.min(), y.max() g = gshhs.gshhs(axis, resolution='h', area_thresh=0., max_level=2, clip=True) for lon, lat, level in zip(g.lon, g.lat, g.level): if level == 1: # land print('mask ', lon.shape) i = calc.inpolygon(x, y, lon, lat) mask = mask & ~i mask.dump('mask_gshhs.npy') nc.vars['mask'][:] = mask x = x0[j1:j2:deta, i1:i2:dxi] y = y0[j1:j2:deta, i1:i2:dxi] ang = ang[j1:j2:deta, i1:i2:dxi] n = -1 for it in range(len(time)): if not dates is False: d0, d1 = dates if time[it] < d0 or time[it] >= d1: continue n += 1 U = np.zeros((eta0, xi0), 'f') V = np.zeros((eta0, xi0), 'f') nc.vars['time'][n] = netcdf.date2num(time[it], tunits) # for roms agrif: #u=netcdf.use(nc0,'u',time=it,s_rho=-1) #v=netcdf.use(nc0,'v',time=it,s_rho=-1) u = netcdf.use(nc0, 'u', ocean_time=it, s_rho=-1) v = netcdf.use(nc0, 'v', ocean_time=it, s_rho=-1) # mask extrap: print('mask extrap...') u = calc.mask_extrap(x0, y0, np.ma.masked_where(u == 0, u)) v = calc.mask_extrap(x0, y0, np.ma.masked_where(v == 0, v)) U[:, 1:-1] = 0.5 * (u[:, :-1] + u[:, 1:]) U[:, 0] = u[:, 0] U[:, -1] = u[:, -1] V[1:-1, :] = 0.5 * (v[:-1.:] + v[1:, :]) V[0, :] = v[0, :] V[-1, :] = v[-1, :] U = U[j1:j2, i1:i2] V = V[j1:j2, i1:i2] U = U[j1:j2:deta, i1:i2:dxi] V = V[j1:j2:deta, i1:i2:dxi] # rotate uv: print('rotating ...') U, V = calc.rot2d(U, V, -ang) print('filling uv', n, time[it]) nc.vars['u'][n, ...] = U nc.vars['v'][n, ...] = V nc.close() nc0.close()
def frc2gnome(fname,frc,grd,xylim=False,dates=False,ij=(1,1),**kargs): ''' Creates GNOME wind file kargs: t[u,v]var t[u,v]dim x[y,ang]var Ex: .frc2gnome(out,frc,grd,ij=(10,10),dates=dates,**{'tdim':'Time'}) ''' deta,dxi=ij tvar='time' uvar='Uwind' vvar='Vwind' #tvar='bulk_time' #uvar='uwnd' #vvar='vwnd' tdim='time' #tdim='bulk_time' xdim='xi_rho' ydim='eta_rho' xvar='lon_rho' yvar='lat_rho' angvar='angle' if 'tvar' in kargs.keys(): tvar=kargs['tvar'] if 'uvar' in kargs.keys(): uvar=kargs['uvar'] if 'vvar' in kargs.keys(): vvar=kargs['vvar'] if 'tdim' in kargs.keys(): tdim=kargs['tdim'] if 'xdim' in kargs.keys(): xdim=kargs['xdim'] if 'ydim' in kargs.keys(): ydim=kargs['ydim'] if 'xvar' in kargs.keys(): xvar=kargs['xvar'] if 'yvar' in kargs.keys(): yvar=kargs['yvar'] if 'angvar' in kargs.keys(): angvar=kargs['angvar'] dims=netcdf.fdim(grd) xi,eta=dims[xdim],dims[ydim] xi0,eta0=xi,eta ncg=netcdf.ncopen(grd) nc0=netcdf.ncopen(frc) try: t=netcdf.nctime(nc0,tvar) except: t=netcdf.use(nc0,tvar) t=netcdf.num2date(t,'days since %d-01-01' % year0) time=netcdf.date2num(t,tunits) x0=netcdf.use(grd,xvar) y0=netcdf.use(grd,yvar) if x0.ndim==1: x0,y0=np.meshgrid(x0,y0) if angvar: ang=netcdf.use(grd,angvar) if not xylim is False: xlim=xylim[:2] ylim=xylim[2:] i1,i2,j1,j2=calc.ij_limits(x0,y0,xlim,ylim) xi=i2-i1 eta=j2-j1 else: i1,i2=0,xi j1,j2=0,eta XI ='%d:%d:%d' %(i1,i2,dxi) ETA ='%d:%d:%d' %(j1,j2,deta) xi=len(range(i1,i2,dxi)) eta=len(range(j1,j2,deta)) # create file: create_wind(fname,xi,eta) nc=netcdf.ncopen(fname,'a') x=x0[j1:j2:deta,i1:i2:dxi] y=y0[j1:j2:deta,i1:i2:dxi] nc.vars['lon'][:]=x nc.vars['lat'][:]=y if angvar: ang=ang[j1:j2:deta,i1:i2:dxi] n=-1 for it in range(len(time)): if not dates is False: d0,d1=dates if t[it]<d0 or t[it]>=d1: continue n+=1 u=netcdf.use(nc0,uvar,**{xdim:XI,ydim:ETA,tdim:it}) v=netcdf.use(nc0,vvar,**{xdim:XI,ydim:ETA,tdim:it}) # rotate uv: if angvar: print 'rotating ...' u,v=calc.rot2d(u,v,-ang) nc.vars['time'][n]=time[it] print 'filling uv',n,t[it] nc.vars['air_u'][n,...]=u nc.vars['air_v'][n,...]=v nc.close() nc0.close() ncg.close()
def his2gnome(fname,his,grd=False,nomask=False,gshhsMask=True,xylim=False,dates=False,ij=(1,1)): ''' Creates GNOME wind file Ex: his2gnome(out,his,grd,dates=dates,ij=(2,2)) if gshhsMask, the high res mask file mask_gshhs.npy will be created at 1st usage. Mask is based on high (h) resolution gshhs data which must be available (env variable GSHHS_MASK must be set). ''' if not grd: grd=his deta,dxi=ij dims=netcdf.fdim(his) xi,eta=dims['xi_rho'],dims['eta_rho'] xi0,eta0=xi,eta nc0=netcdf.ncopen(his) time=netcdf.nctime(nc0,'ocean_time') # for roms agrif: #t=netcdf.use(nc0,'scrum_time') #time=netcdf.num2date(t,'seconds since %d-01-01' % year0) x0=netcdf.use(grd,'lon_rho') y0=netcdf.use(grd,'lat_rho') ang=netcdf.use(grd,'angle') if not xylim is False: xlim=xylim[:2] ylim=xylim[2:] i1,i2,j1,j2=calc.ij_limits(x0,y0,xlim,ylim) print i1,i2,j1,j2 xi=i2-i1 eta=j2-j1 else: i1,i2=0,xi j1,j2=0,eta XI ='%d:%d:%d' %(i1,i2,dxi) ETA ='%d:%d:%d' %(j1,j2,deta) xi=len(range(i1,i2,dxi)) eta=len(range(j1,j2,deta)) # create file: create_uv(fname,xi,eta) nc=netcdf.ncopen(fname,'a') for v0,v in ('lon_rho','lon'),('lat_rho','lat'),('mask_rho','mask'),('h','depth'): print 'filling %s with %s' % (v,v0) nc.vars[v][:]=netcdf.use(grd,v0,xi_rho=XI,eta_rho=ETA) if nomask: print 'NO MASK !!!' nc.vars['mask'][:]=1 if gshhsMask: try: mask=np.load('mask_gshhs.npy') except: mask=1+0*netcdf.use(nc0,'mask_rho',xi_rho=XI,eta_rho=ETA) mask=mask.astype('bool') x=netcdf.use(grd,'lon_rho',xi_rho=XI,eta_rho=ETA) y=netcdf.use(grd,'lat_rho',xi_rho=XI,eta_rho=ETA) from okean import gshhs axis=x.min(),x.max(),y.min(),y.max() g=gshhs.gshhs(axis, resolution='h',area_thresh=0., max_level=2,clip=True) for lon, lat, level in zip(g.lon, g.lat, g.level): if level == 1: # land print 'mask ',lon.shape i=calc.inpolygon(x,y,lon,lat) mask=mask & ~i mask.dump('mask_gshhs.npy') nc.vars['mask'][:]=mask x=x0[j1:j2:deta,i1:i2:dxi] y=y0[j1:j2:deta,i1:i2:dxi] ang=ang[j1:j2:deta,i1:i2:dxi] n=-1 for it in range(len(time)): if not dates is False: d0,d1=dates if time[it]<d0 or time[it]>=d1: continue n+=1 U=np.zeros((eta0,xi0),'f') V=np.zeros((eta0,xi0),'f') nc.vars['time'][n]=netcdf.date2num(time[it],tunits) # for roms agrif: #u=netcdf.use(nc0,'u',time=it,s_rho=-1) #v=netcdf.use(nc0,'v',time=it,s_rho=-1) u=netcdf.use(nc0,'u',ocean_time=it,s_rho=-1) v=netcdf.use(nc0,'v',ocean_time=it,s_rho=-1) # mask extrap: print 'mask extrap...' u=calc.mask_extrap(x0,y0,np.ma.masked_where(u==0,u)) v=calc.mask_extrap(x0,y0,np.ma.masked_where(v==0,v)) U[:,1:-1]=0.5*(u[:,:-1]+u[:,1:]) U[:,0]=u[:,0] U[:,-1]=u[:,-1] V[1:-1,:]=0.5*(v[:-1.:]+v[1:,:]) V[0,:]=v[0,:] V[-1,:]=v[-1,:] U=U[j1:j2,i1:i2] V=V[j1:j2,i1:i2] U=U[j1:j2:deta,i1:i2:dxi] V=V[j1:j2:deta,i1:i2:dxi] # rotate uv: print 'rotating ...' U,V=calc.rot2d(U,V,-ang) print 'filling uv', n, time[it] nc.vars['u'][n,...]=U nc.vars['v'][n,...]=V nc.close() nc0.close()
def create_uv(fname,xi,eta): '''Surface currents file for GNOME''' nc=netcdf.ncopen(fname,'w',version=3) nc.add_dim('lon',xi) nc.add_dim('lat',eta) nc.add_dim('time',0) nc.close() nc=netcdf.ncopen(fname,'a') # lon v=nc.add_var('lon','double',('lat','lon')) v.add_att('long_name','Longitude') v.add_att('units','degrees_east') v.add_att('standard_name','longitude') # lat v=nc.add_var('lat','double',('lat','lon')) v.add_att('long_name','Latitude') v.add_att('units','degrees_north') v.add_att('standard_name','latitude') # depth v=nc.add_var('depth','double',('lat','lon')) v.add_att('long_name','Bathymetry') v.add_att('units','meters') v.add_att('positive','down') v.add_att('standard_name','depth') # mask v=nc.add_var('mask','double',('lat','lon')) v.add_att('long_name','Land Mask') v.add_att('units','nondimensional') v.add_att('standard_name','land_binary_mask') # u v=nc.add_var('u','double',('time','lat','lon'),fill_value=-99999.) v.add_att('long_name','Eastward Water Velocity') v.add_att('units','m/s') v.add_att('missing_value',-99999.) v.add_att('scale_factor',1.) v.add_att('add_offset',0.) v.add_att('standard_name','surface_eastward_sea_water_velocity') # v v=nc.add_var('v','double',('time','lat','lon'),fill_value=-99999.) v.add_att('long_name','Northward Water Velocity') v.add_att('units','m/s') v.add_att('missing_value',-99999.) v.add_att('scale_factor',1.) v.add_att('add_offset',0.) v.add_att('standard_name','surface_northward_sea_water_velocity') # time v=nc.add_var('time','double',('time',)) v.add_att('long_name','Time') v.add_att('units',tunits) v.add_att('standard_name','time') nc.add_att('file_type','Full_Grid') nc.add_att('Conventions','COARDS') nc.add_att('grid_type','curvilinear') nc.add_att('z-type','s-coordinate') nc.add_att('model','ROMS') nc.add_att('title', 'forecast') nc.close()
def load_data(f, quiet=0, **kargs): ''' Loads prognostic variables (temp,salt,u,v,ubar,vbar,zeta) from netcdf file or opendap server. Also loads lon,lat, depth, and time. If f is a file, it must include the 1d variables lon,lat and depth; the 2d variable ssh (zeta) and the 3d variables temp, salt, u and v; ie, each file must contain data for a simgle time. The file must also contain the variable time. If f is a opendap address, it must contain also all these variables or the ones defined in the input karg settings (DataAccess object) To deal with the case of variables in different files/opendap addresses, f can also be a dictionary with keys the variables and values the files or opendap addresses. In this case, the keys must be: - temp - salt - u - v - ssh - misc, for lon, lat, depth, time and dimensions or xy for lon,lat and x,ydim; z for depth and zdim, time for time The output data (dict) is suitable to be used by data2roms, which interpolates the data to ROMS 3d grid. Also outputs an error/status string. kargs: inds, dict with dimension names/values (where time dim can be integer or datetime) settings, DataAccess object extra, extra misc vars to load [(outKey0,fileVar0),...] t_units, units of variable time, by default the att units is used ''' sett = DataAccess() inds = {} extra = [] t_units = [] if 'settings' in kargs.keys(): sett = kargs['settings'] if 'inds' in kargs.keys(): inds = kargs['inds'] if 'extra' in kargs.keys(): extra = kargs['extra'] if 't_units' in kargs.keys(): t_units = kargs['t_units'] res = {} msg = '' if not isinstance(f, dict) and not f.startswith('http') and not isfile(f): msg = 'file not found %s' % f if not quiet: print msg return res, msg # load nc files: if not isinstance(f, dict): f = {'temp': f, 'salt': f, 'u': f, 'v': f, 'ssh': f, 'misc': f} if not f.has_key('xy'): f['xy'] = f['misc'] if not f.has_key('z'): f['z'] = f['misc'] if not f.has_key('time'): f['time'] = f['misc'] filesUsed = [] ncUsed = [] for i in f.keys(): if not quiet: print '(%s) loading from %s' % (i.ljust(5), f[i]) if i == 'temp': if f[i] in filesUsed: ncTemp = ncUsed[filesUsed.index(f[i])] else: ncTemp = netcdf.ncopen(f[i]) filesUsed += [f[i]] ncUsed += [ncTemp] elif i == 'salt': if f[i] in filesUsed: ncSalt = ncUsed[filesUsed.index(f[i])] else: ncSalt = netcdf.ncopen(f[i]) filesUsed += [f[i]] ncUsed += [ncSalt] elif i == 'u': if f[i] in filesUsed: ncU = ncUsed[filesUsed.index(f[i])] else: ncU = netcdf.ncopen(f[i]) filesUsed += [f[i]] ncUsed += [ncU] elif i == 'v': if f[i] in filesUsed: ncV = ncUsed[filesUsed.index(f[i])] else: ncV = netcdf.ncopen(f[i]) filesUsed += [f[i]] ncUsed += [ncV] elif i == 'ssh': if f[i] in filesUsed: ncSsh = ncUsed[filesUsed.index(f[i])] else: ncSsh = netcdf.ncopen(f[i]) filesUsed += [f[i]] ncUsed += [ncSsh] elif i == 'xy': if f[i] in filesUsed: ncXy = ncUsed[filesUsed.index(f[i])] else: ncXy = netcdf.ncopen(f[i]) filesUsed += [f[i]] ncUsed += [ncXy] elif i == 'z': if f[i] in filesUsed: ncZ = ncUsed[filesUsed.index(f[i])] else: ncZ = netcdf.ncopen(f[i]) filesUsed += [f[i]] ncUsed += [ncZ] elif i == 'time': if f[i] in filesUsed: ncTime = ncUsed[filesUsed.index(f[i])] else: ncTime = netcdf.ncopen(f[i]) filesUsed += [f[i]] ncUsed += [ncTime] elif i == 'misc': if f[i] in filesUsed: ncMisc = ncUsed[filesUsed.index(f[i])] else: ncMisc = netcdf.ncopen(f[i]) filesUsed += [f[i]] ncUsed += [ncMisc] # load dims: if not quiet: print ' loading dims...' dimsXy = netcdf.fdim(ncXy) dimsZ = netcdf.fdim(ncZ) res['NX'] = dimsXy[sett.xdim] res['NY'] = dimsXy[sett.ydim] ###if sett.z_name: if sett.zdim: res['NZ'] = dimsZ[sett.zdim] else: res['NZ'] = 1 # about horizontal inds: if inds.has_key( sett.xdim) and len(inds[sett.xdim]) == 2 and not isinstance( inds[sett.xdim], basestring): if not quiet: print ' calc horizontal inds...' xlim = inds[sett.xdim] ylim = inds[sett.ydim] inds.pop(sett.xdim) inds.pop(sett.ydim) lon = netcdf.use(ncXy, sett.x_name, **inds) if np.any(lon > 360): lon = np.mod(lon, 360.) lat = netcdf.use(ncXy, sett.y_name, **inds) i0, i1, j0, j1 = calc.ij_limits(lon, lat, xlim, ylim, margin=3) inds[sett.xdim] = '%d:%d' % (i0, i1) inds[sett.ydim] = '%d:%d' % (j0, j1) if not quiet: print ' loading lon, lat, depth...' res['lon'] = netcdf.use(ncXy, sett.x_name, **inds) if np.any(res['lon'] > 360): res['lon'] = np.mod(res['lon'], 360.) res['lat'] = netcdf.use(ncXy, sett.y_name, **inds) if sett.z_name: res['depth'] = -netcdf.use(ncZ, sett.z_name, **inds) else: res['depth'] = False if res['lon'].size != res['lat'].size: res['lon'], res['lat'] = np.meshgrid(res['lon'], res['lat']) # needed for griddata, later # update nx,ny: if inds.has_key(sett.xdim): res['NY'], res['NX'] = res['lon'].shape # extra misc vars: if len(extra): for outKey, fileVar in extra: if not quiet: print ' loading extra misc... %s %s' % (outKey, fileVar) res[outKey] = netcdf.use(ncMisc, fileVar, **inds) # time: # file may have one or several times. If several, time dim must be given # with kargs inds! # but file may also have no time dim or time name ! if sett.time_name: if not quiet: print ' loading time...' if t_units: times = netcdf.use(ncTime, sett.time_name) times = netcdf.num2date(times, t_units) else: times = netcdf.nctime(ncTime, sett.time_name) if inds.has_key(sett.tdim): try: tind = dts.parse_date(inds[sett.tdim]) except: tind = inds[sett.tdim] # is an integer, for instance if isinstance(tind, datetime.datetime): tind, = np.where(times == tind) if tind.size: tind = tind[0] inds[sett. tdim] = tind # update inds to extract other variables else: Msg = 'date not found' msg += '\n' + Msg return res, msg + ' ERROR' date = times[tind] try: len(date) ndates = True except: ndates = False if ndates: if not quiet: print ' tind, date= len=%d: %d to %d, %s to %s' % ( len(date), tind[0], tind[-1], date[0].isoformat(' '), date[-1].isoformat(' ')) else: if not quiet: print ' tind, date= %d %s' % (tind, date.isoformat(' ')) elif times.size == 1: date = times[0] if not quiet: print ' date= %s' % date.isoformat(' ') else: # must provide tind as input!! Msg = 'several dates in file... provice tind!' msg += '\n' + Msg return res, msg + ' ERROR' res['date'] = date else: if not quiet: print ' warning: not using time !!' res['date'] = 0 empty3d = np.zeros([res['NZ'], res['NY'], res['NX']]) empty2d = np.zeros([res['NY'], res['NX']]) if 'temp' in f.keys(): if not quiet: print ' loading temp...' if sett.temp_name in ncTemp.varnames: res['temp'] = netcdf.use(ncTemp, sett.temp_name, **inds) else: Msg = 'var %s not found' % 'temp' msg += '\n' + Msg if not quiet: print Msg res['temp'] = empty3d if 'salt' in f.keys(): if not quiet: print ' loading salt...' if sett.salt_name in ncSalt.varnames: res['salt'] = netcdf.use(ncSalt, sett.salt_name, **inds) else: Msg = 'var %s not found' % 'salt' msg += '\n' + Msg if not quiet: print Msg res['salt'] = empty3d if 'u' in f.keys(): if not quiet: print ' loading u...' if sett.u_name in ncU.varnames: res['u'] = netcdf.use(ncU, sett.u_name, **inds) else: Msg = 'var %s not found' % 'u' msg += '\n' + Msg if not quiet: print Msg res['u'] = empty3d if 'v' in f.keys(): if not quiet: print ' loading v...' if sett.v_name in ncV.varnames: res['v'] = netcdf.use(ncV, sett.v_name, **inds) else: Msg = 'var %s not found' % 'v' msg += '\n' + Msg if not quiet: print Msg res['v'] = empty3d if 'ssh' in f.keys(): if not quiet: print ' loading ssh...' if sett.ssh_name in ncSsh.varnames: res['ssh'] = netcdf.use(ncSsh, sett.ssh_name, **inds) else: Msg = 'var %s not found' % 'ssh' msg += '\n' + Msg if not quiet: print Msg res['ssh'] = empty2d for nc in ncUsed: try: nc.close() except: pass return res, msg
def narr_file_data(fname, xlim=False, ylim=False, quiet=False): ''' Returns bulk data from one NARR file ''' out = {} # loading grid: if 0: if not quiet: print ' reading lon,lat from file %s' % grd nc = netcdf.ncopen(grd) x = nc.vars['East_longitude_0-360'][0, ...] - 360. y = nc.vars['Latitude_-90_to_+90'][0, ...] # time always 1 !! nc.close() else: if not quiet: print ' reading lon,lat from file %s' % grdTxt x, y = load_grid() #x=x-360. x = -x ny, nx = x.shape if (xlim, ylim) == (False, False): i0, i1, j0, j1 = 0, nx, 0, ny else: i0, i1, j0, j1 = calc.ij_limits(x, y, xlim, ylim, margin=0) x = x[j0:j1, i0:i1] y = y[j0:j1, i0:i1] try: nc = netcdf.ncopen(fname) except: return {} xx = str(i0) + ':' + str(i1) yy = str(j0) + ':' + str(j1) tdim = netcdf.fdim(nc, 'time1') if tdim != 1: print 'WARNING: tdim !=1 !!!!!!' # T surface [K->C] if not quiet: print ' --> T air' tair = netcdf.use(nc, 'Temperature_surface', time1=0, x=xx, y=yy) tair = tair - 273.15 out['tair'] = cb.Data(x, y, tair, 'C') # R humidity [% -> 0--1] if not quiet: print ' --> R humidity' rhum = netcdf.use(nc, 'Relative_humidity', time1=0, x=xx, y=yy) out['rhum'] = cb.Data(x, y, rhum / 100., '0--1') # surface pressure [Pa] if not quiet: print ' --> Surface pressure' pres = netcdf.use(nc, 'Pressure_surface', time1=0, x=xx, y=yy) out['pres'] = cb.Data(x, y, pres, 'Pa') # P rate [kg m-2 s-1 -> cm/d] if not quiet: print ' --> P rate' prate = netcdf.use(nc, 'Precipitation_rate', time1=0, x=xx, y=yy) prate = prate * 86400 * 100 / 1000. out['prate'] = cb.Data(x, y, prate, 'cm/d') # Net shortwave flux [ W m-2] if not quiet: print ' --> Net shortwave flux' if not quiet: print ' SW down' sw_down = netcdf.use(nc, 'Downward_shortwave_radiation_flux', time1=0, x=xx, y=yy) if not quiet: print ' SW up' sw_up = netcdf.use(nc, 'Upward_short_wave_radiation_flux_surface', time1=0, x=xx, y=yy) sw_net = sw_down - sw_up out['radsw'] = cb.Data(x, y, sw_net, 'W m-2', info='positive downward') # Net longwave flux [W/m^2] if not quiet: print ' --> Net longwave flux' if not quiet: print ' LW down' lw_down = netcdf.use(nc, 'Downward_longwave_radiation_flux', time1=0, x=xx, y=yy) if not quiet: print ' LW up' lw_up = netcdf.use(nc, 'Upward_long_wave_radiation_flux_surface', time1=0, x=xx, y=yy) lw_net = lw_down - lw_up out['radlw'] = cb.Data(x, y, -lw_net, 'W m-2', info='positive upward') # downward lw: out['dlwrf'] = cb.Data(x, y, -lw_down, 'W m-2', info='negative... downward') # U and V wind speed 10m if not quiet: print ' --> U and V wind' # vertical dim is height_above_ground1: 10 and 30 m uwnd = netcdf.use(nc, 'u_wind_height_above_ground', height_above_ground1=0, time1=0, x=xx, y=yy) vwnd = netcdf.use(nc, 'v_wind_height_above_ground', height_above_ground1=0, time1=0, x=xx, y=yy) if not quiet: print ' --> calc wind speed and stress' speed = np.sqrt(uwnd**2 + vwnd**2) taux, tauy = air_sea.wind_stress(uwnd, vwnd) out['wspd'] = cb.Data(x, y, speed, 'm s-1') out['uwnd'] = cb.Data(x, y, uwnd, 'm s-1') out['vwnd'] = cb.Data(x, y, vwnd, 'm s-1') out['sustr'] = cb.Data(x, y, taux, 'Pa') out['svstr'] = cb.Data(x, y, tauy, 'Pa') # Cloud cover [0--100 --> 0--1]: if not quiet: print ' --> Cloud cover' clouds = netcdf.use(nc, 'Total_cloud_cover', time1=0, x=xx, y=yy) out['cloud'] = cb.Data(x, y, clouds / 100., 'fraction (0--1)') nc.close() return out
def create_uv(fname, xi, eta): '''Surface currents file for GNOME''' nc = netcdf.ncopen(fname, 'w', version=3) nc.add_dim('lon', xi) nc.add_dim('lat', eta) nc.add_dim('time', 0) nc.close() nc = netcdf.ncopen(fname, 'a') # lon v = nc.add_var('lon', 'double', ('lat', 'lon')) v.add_att('long_name', 'Longitude') v.add_att('units', 'degrees_east') v.add_att('standard_name', 'longitude') # lat v = nc.add_var('lat', 'double', ('lat', 'lon')) v.add_att('long_name', 'Latitude') v.add_att('units', 'degrees_north') v.add_att('standard_name', 'latitude') # depth v = nc.add_var('depth', 'double', ('lat', 'lon')) v.add_att('long_name', 'Bathymetry') v.add_att('units', 'meters') v.add_att('positive', 'down') v.add_att('standard_name', 'depth') # mask v = nc.add_var('mask', 'double', ('lat', 'lon')) v.add_att('long_name', 'Land Mask') v.add_att('units', 'nondimensional') v.add_att('standard_name', 'land_binary_mask') # u v = nc.add_var('u', 'double', ('time', 'lat', 'lon'), fill_value=-99999.) v.add_att('long_name', 'Eastward Water Velocity') v.add_att('units', 'm/s') v.add_att('missing_value', -99999.) v.add_att('scale_factor', 1.) v.add_att('add_offset', 0.) v.add_att('standard_name', 'surface_eastward_sea_water_velocity') # v v = nc.add_var('v', 'double', ('time', 'lat', 'lon'), fill_value=-99999.) v.add_att('long_name', 'Northward Water Velocity') v.add_att('units', 'm/s') v.add_att('missing_value', -99999.) v.add_att('scale_factor', 1.) v.add_att('add_offset', 0.) v.add_att('standard_name', 'surface_northward_sea_water_velocity') # time v = nc.add_var('time', 'double', ('time', )) v.add_att('long_name', 'Time') v.add_att('units', tunits) v.add_att('standard_name', 'time') nc.add_att('file_type', 'Full_Grid') nc.add_att('Conventions', 'COARDS') nc.add_att('grid_type', 'curvilinear') nc.add_att('z-type', 's-coordinate') nc.add_att('model', 'ROMS') nc.add_att('title', 'forecast') nc.close()
def roms2swan_wind(frc,date0,date1,fname='swan_wind.dat',**kargs): tname='wind_time' uname='Uwind' vname='Vwind' grd=False # needed if wind is 1d dt=1 # hours path='' if 'tname' in kargs.keys(): tname=kargs['tname'] if 'uname' in kargs.keys(): uname=kargs['uname'] if 'vname' in kargs.keys(): vname=kargs['vname'] if 'grd' in kargs.keys(): grd =kargs['grd'] if 'dt' in kargs.keys(): dt =kargs['dt'] if 'path' in kargs.keys(): path =kargs['path'] print 'wind: loading time ...' time=netcdf.nctime(frc,tname) #time=np.load('tfile') #cond=(time>=date0)&(time<=date1) cond=(time>=date0)&(time<=date1+datetime.timedelta(days=1)) # add one day at the end, just to avoid the "repeating last" time=time[cond] d=np.diff(pl.date2num(time)) print 'current max and min dt = %6.2f %6.2f hrs = %6.2f %6.2f mins'%(d.max()*24, d.min()*24, d.max()*24*60, d.min()*24*60) # time=time[::dt] # d=np.diff(pl.date2num(time)) # print ' final max and min dt = %6.2f %6.2f hrs = %6.2f %6.2f mins'%(d.max()*24, d.min()*24, d.max()*24*60, d.min()*24*60) print 'wind: loading u ...' nc=netcdf.ncopen(frc) u=netcdf.var(nc,uname) print 'wind: loading v ...' v=netcdf.var(nc,uname) # u=u[cond,...][::dt,...] # v=v[cond,...][::dt,...] u=u[cond,...] v=v[cond,...] nc.close() if u.ndim==1: if not grd: print 'must provide grd karg!' return nt=u.size eta=netcdf.fdim(grd)['eta_rho'] xi=netcdf.fdim(grd)['xi_rho'] else: nt,eta,xi=u.shape # array may be too big, so do this later (for each it) # # u=np.tile(u[:,np.newaxis,np.newaxis],(1,eta,xi)) # v=np.tile(v[:,np.newaxis,np.newaxis],(1,eta,xi)) i=open(fname,'w') times=[] time0=time[0]-datetime.timedelta(hours=dt) ITs=[] for it in range(nt): time0=time0+datetime.timedelta(hours=dt) if time0>date1: break if time0>time[-1]: print 'Warning : repeating last ...', it times+=[time0] d=np.abs(time-time0) it=np.where(d==d.min())[0][0] ITs+=[it] if it%100==0: print 'saving u %s %s'%(fname,time[it].isoformat(' ')) if u[it,...].ndim==0: U=np.tile(u[it,...],(eta,xi)).flatten() else: U=u[it,...].flatten() [i.write('%8.4f\n'%uu) for uu in U] for it in ITs: if it%100==0: print 'saving v %s %s'%(fname,time[it].isoformat(' ')) if v[it,...].ndim==0: V=np.tile(v[it,...],(eta,xi)).flatten() else: V=v[it,...].flatten() [i.write('%8.4f\n'%vv) for vv in V] times=np.asarray(times) t0iso=times[0].strftime('%Y%m%d.%H%M%S') t1iso=times[-1].strftime('%Y%m%d.%H%M%S') dt=times[1]-times[0] dth=dt.days*24. + dt.seconds/60.**2 print ' -- created swan wind file %s\n'%fname # add to swan INPUT: print '\n' print 'INPGRID WIND CURVILINEAR 0 0 %d %d EXC 9.999000e+003 &'%(xi-1,eta-1) print ' NONSTATIONARY %s %.2f HR %s'%(t0iso,dth,t1iso) print 'READINP WIND 1 \'%s\' 4 0 FREE '%(os.path.join(path,fname)) print '\n'
def s_params(nc, show=0): """ Get s-coordinates parameters from ROMS file Gets from ROMS output files the data needed to calculate vertical s-coordinate levels. This function looks for the required values among variables, file dimensions and file attributes. mma 28-7-2007; vs and vt added aug 2013 (Guayaquil) """ if isinstance(nc, basestring): nc = netcdf.ncopen(nc) theta_s = theta_b = hc = N = False theta_s_src = theta_b_src = hc_src = N_src = '' vt = 1 vs = 1 if 'Vtransform' in nc.varnames: vt = nc.vars['Vtransform'][:] if 'Vstretching' in nc.varnames: vs = nc.vars['Vstretching'][:] v = 'theta_s' if v in nc.atts.keys(): theta_s = nc.atts[v].value theta_s_source = 'file attribute' elif v in nc.varnames: theta_s = nc.vars[v][:] theta_s_source = 'variable' v = 'theta_b' if v in nc.atts.keys(): theta_b = nc.atts[v].value theta_b_source = 'file attribute' elif v in nc.varnames: theta_b = nc.vars[v][:] theta_b_source = 'variable' v = 'hc' if v in nc.atts.keys(): hc = nc.atts[v].value hc_source = 'file attribute' elif v in nc.varnames: hc = nc.vars[v][:] hc_source = 'variable' else: # if vtransform=1 --> hc=min(Tcline,hmin) # else: hc=Tcline if vt == 1: hmin = [] Tcline = [] v = 'Tcline' if v in nc.atts.keys(): Tcline = nc.atts[v].value elif v in nc.varnames: Tcline = nc.vars[v][:] if 'h' in nc.varnames: hmin = nc.vars['h'][:].min() if hmin and Tcline: #hc=np.min([hmin,Tcline]) hc = np.min([ np.max([hmin, 0]), Tcline ]) # to deal with WET_DRY (see Utility/set_scoord.F) hc_source = 'min of hmin and Tcline' else: hc = 'not found' else: # vt==2 hc = Tcline hc_source = 'Tcline' if 's_rho' in nc.dims.keys(): N = nc.dims['s_rho'] N_source = 'file dimension s_rho' elif 'sc_r' in nc.atts.keys(): N = nc.atts['sc_r']['value'] N_source = 'file attribute sc_r' elif 'sc_r' in nc.varnames: N = len(nc.vars['sc_r'][:]) N_source = 'length of variable sc_r' if show: # show sources: print '%-10s : %4.2f %-10s' % ('theta_s', theta_s, theta_s_source) print '%-10s : %4.2f %-10s' % ('theta_b', theta_b, theta_b_source) print '%-10s : %4.2f %-10s' % ('hc', hc, hc_source) print '%-10s : %4d %-10s' % ('N', N, N_source) print '%-10s : %4d %-10s' % ('vt', vt, 'variable') print '%-10s : %4d %-10s' % ('vs', vs, 'variable') return theta_s, theta_b, hc, N, vt, vs
def load_data(f,quiet=0,**kargs): ''' Loads prognostic variables (temp,salt,u,v,ubar,vbar,zeta) from netcdf file or opendap server. Also loads lon,lat, depth, and time. If f is a file, it must include the 1d variables lon,lat and depth; the 2d variable ssh (zeta) and the 3d variables temp, salt, u and v; ie, each file must contain data for a simgle time. The file must also contain the variable time. If f is a opendap address, it must contain also all these variables or the ones defined in the input karg settings (DataAccess object) To deal with the case of variables in different files/opendap addresses, f can also be a dictionary with keys the variables and values the files or opendap addresses. In this case, the keys must be: - temp - salt - u - v - ssh - misc, for lon, lat, depth, time and dimensions or xy for lon,lat and x,ydim; z for depth and zdim, time for time The output data (dict) is suitable to be used by data2roms, which interpolates the data to ROMS 3d grid. Also outputs an error/status string. kargs: inds, dict with dimension names/values (where time dim can be integer or datetime) settings, DataAccess object extra, extra misc vars to load [(outKey0,fileVar0),...] t_units, units of variable time, by default the att units is used ''' sett=DataAccess() inds={} extra=[] t_units=[] if 'settings' in kargs.keys(): sett = kargs['settings'] if 'inds' in kargs.keys(): inds = kargs['inds'] if 'extra' in kargs.keys(): extra = kargs['extra'] if 't_units' in kargs.keys(): t_units = kargs['t_units'] res={} msg='' if not isinstance(f,dict) and not f.startswith('http') and not isfile(f): msg='file not found %s' % f if not quiet: print msg return res, msg # load nc files: if not isinstance(f,dict): f={'temp':f,'salt':f,'u':f,'v':f,'ssh':f,'misc':f} if not f.has_key('xy'): f['xy'] = f['misc'] if not f.has_key('z'): f['z'] = f['misc'] if not f.has_key('time'): f['time'] = f['misc'] filesUsed=[] ncUsed=[] for i in f.keys(): if not quiet: print '(%s) loading from %s' % (i.ljust(5),f[i]) if i=='temp': if f[i] in filesUsed: ncTemp=ncUsed[filesUsed.index(f[i])] else: ncTemp=netcdf.ncopen(f[i]) filesUsed+=[f[i]] ncUsed+=[ncTemp] elif i=='salt': if f[i] in filesUsed: ncSalt=ncUsed[filesUsed.index(f[i])] else: ncSalt=netcdf.ncopen(f[i]) filesUsed+=[f[i]] ncUsed+=[ncSalt] elif i=='u': if f[i] in filesUsed: ncU=ncUsed[filesUsed.index(f[i])] else: ncU=netcdf.ncopen(f[i]) filesUsed+=[f[i]] ncUsed+=[ncU] elif i=='v': if f[i] in filesUsed: ncV=ncUsed[filesUsed.index(f[i])] else: ncV=netcdf.ncopen(f[i]) filesUsed+=[f[i]] ncUsed+=[ncV] elif i=='ssh': if f[i] in filesUsed: ncSsh=ncUsed[filesUsed.index(f[i])] else: ncSsh=netcdf.ncopen(f[i]) filesUsed+=[f[i]] ncUsed+=[ncSsh] elif i=='xy': if f[i] in filesUsed: ncXy=ncUsed[filesUsed.index(f[i])] else: ncXy=netcdf.ncopen(f[i]) filesUsed+=[f[i]] ncUsed+=[ncXy] elif i=='z': if f[i] in filesUsed: ncZ=ncUsed[filesUsed.index(f[i])] else: ncZ=netcdf.ncopen(f[i]) filesUsed+=[f[i]] ncUsed+=[ncZ] elif i=='time': if f[i] in filesUsed: ncTime=ncUsed[filesUsed.index(f[i])] else: ncTime=netcdf.ncopen(f[i]) filesUsed+=[f[i]] ncUsed+=[ncTime] elif i=='misc': if f[i] in filesUsed: ncMisc=ncUsed[filesUsed.index(f[i])] else: ncMisc=netcdf.ncopen(f[i]) filesUsed+=[f[i]] ncUsed+=[ncMisc] # load dims: if not quiet: print ' loading dims...' dimsXy=netcdf.fdim(ncXy) dimsZ =netcdf.fdim(ncZ) res['NX']=dimsXy[sett.xdim] res['NY']=dimsXy[sett.ydim] ###if sett.z_name: if sett.zdim: res['NZ']=dimsZ[sett.zdim] else: res['NZ']=1 # about horizontal inds: if inds.has_key(sett.xdim) and len(inds[sett.xdim])==2 and not isinstance(inds[sett.xdim],basestring): if not quiet: print ' calc horizontal inds...' xlim=inds[sett.xdim] ylim=inds[sett.ydim] inds.pop(sett.xdim) inds.pop(sett.ydim) lon=netcdf.use(ncXy,sett.x_name,**inds) if np.any(lon>360): lon=np.mod(lon,360.) lat=netcdf.use(ncXy,sett.y_name,**inds) i0,i1,j0,j1=calc.ij_limits(lon,lat,xlim,ylim,margin=3) inds[sett.xdim]='%d:%d' % (i0,i1) inds[sett.ydim]='%d:%d' % (j0,j1) if not quiet: print ' loading lon, lat, depth...' res['lon'] = netcdf.use(ncXy,sett.x_name,**inds) if np.any(res['lon']>360): res['lon']=np.mod(res['lon'],360.) res['lat'] = netcdf.use(ncXy,sett.y_name,**inds) if sett.z_name: res['depth'] = -netcdf.use(ncZ,sett.z_name,**inds) else: res['depth']=False if res['lon'].size!=res['lat'].size: res['lon'],res['lat']=np.meshgrid(res['lon'],res['lat']) # needed for griddata, later # update nx,ny: if inds.has_key(sett.xdim): res['NY'],res['NX']=res['lon'].shape # extra misc vars: if len(extra): for outKey,fileVar in extra: if not quiet: print ' loading extra misc... %s %s' % (outKey,fileVar) res[outKey]=netcdf.use(ncMisc,fileVar,**inds) # time: # file may have one or several times. If several, time dim must be given # with kargs inds! if not quiet: print ' loading time...' if t_units: times=netcdf.use(ncTime,sett.time_name) times=netcdftime.num2date(times,t_units) else: times=netcdf.nctime(ncTime,sett.time_name) if inds.has_key(sett.tdim): try: tind=dts.parse_date(inds[sett.tdim]) except: tind=inds[sett.tdim] # is an integer, for instance if isinstance(tind,datetime.datetime): tind,=np.where(times==tind) if tind.size: tind=tind[0] inds[sett.tdim]=tind # update inds to extract other variables else: Msg='date not found' msg+='\n'+Msg return res,msg+' ERROR' date=times[tind] if not quiet: print ' tind, date= %d %s' % (tind,date.isoformat(' ')) elif times.size==1: date=times[0] if not quiet: print ' date= %s' % date.isoformat(' ') else: # must provide tind as input!! Msg='several dates in file... provice tind!' msg+='\n'+Msg return res,msg+' ERROR' res['date'] = date empty3d=np.zeros([res['NZ'],res['NY'],res['NX']]) empty2d=np.zeros([res['NY'],res['NX']]) if 'temp' in f.keys(): if not quiet: print ' loading temp...' if sett.temp_name in ncTemp.varnames: res['temp'] = netcdf.use(ncTemp,sett.temp_name,**inds) else: Msg='var %s not found' % 'temp' msg+='\n'+Msg if not quiet: print Msg res['temp']=empty3d if 'salt' in f.keys(): if not quiet: print ' loading salt...' if sett.salt_name in ncSalt.varnames: res['salt'] = netcdf.use(ncSalt,sett.salt_name,**inds) else: Msg='var %s not found' % 'salt' msg+='\n'+Msg if not quiet: print Msg res['salt']=empty3d if 'u' in f.keys(): if not quiet: print ' loading u...' if sett.u_name in ncU.varnames: res['u'] = netcdf.use(ncU,sett.u_name,**inds) else: Msg='var %s not found' % 'u' msg+='\n'+Msg if not quiet: print Msg res['u']=empty3d if 'v' in f.keys(): if not quiet: print ' loading v...' if sett.v_name in ncV.varnames: res['v'] = netcdf.use(ncV,sett.v_name,**inds) else: Msg='var %s not found' % 'v' msg+='\n'+Msg if not quiet: print Msg res['v']=empty3d if 'ssh' in f.keys(): if not quiet: print ' loading ssh...' if sett.ssh_name in ncSsh.varnames: res['ssh'] = netcdf.use(ncSsh,sett.ssh_name,**inds) else: Msg='var %s not found' % 'ssh' msg+='\n'+Msg if not quiet: print Msg res['ssh']=empty2d for nc in ncUsed: try: nc.close() except: pass return res, msg