def get_zr(G, S, vn): # get z on the ROMS grids h = G['h'] if vn in ['theta', 's3d']: zr = zrfun.get_z(h, 0 * h, S, only_rho=True) elif vn in ['u3d']: xru, yru = get_xyr(G, 'ubar') hu = zfun.interp_scattered_on_plaid(G['lon_u'], G['lat_u'], G['lon_rho'][0, :], G['lat_rho'][:, 0], h, exnan=False) hu = np.reshape(hu, G['lon_u'].shape) zr = zrfun.get_z(hu, 0 * hu, S, only_rho=True) elif vn in ['v3d']: hv = zfun.interp_scattered_on_plaid(G['lon_v'], G['lat_v'], G['lon_rho'][0, :], G['lat_rho'][:, 0], h, exnan=False) hv = np.reshape(hv, G['lon_v'].shape) zr = zrfun.get_z(hv, 0 * hv, S, only_rho=True) else: print('Unknown variable name for get_zr: ' + vn) return zr
def get_full(fn): # gets v_dict for the full extent of one history file ds = nc.Dataset(fn) G, S, T = zrfun.get_basic_info(fn) # extract needed info from history file v_dict = dict() if print_info: print('\nINPUT Variable Info:') for vn in ['alkalinity', 'TIC', 'salt', 'temp','rho']: v = ds[vn][:] v = fillit(v) v_dict[vn] = v # create depth, pressure, and in situ temperature h = ds['h'][:] h = fillit(h) lat = G['lat_rho'][:] z_rho = zrfun.get_z(h, 0*h, S, only_rho=True) depth = -z_rho pres = sw.pres(depth, lat) v_dict['pres'] = pres temp = sw.ptmp(v_dict['salt'], v_dict['temp'], 0, v_dict['pres']) v_dict['temp'] = temp # convert from umol/L to umol/kg using in situ dentity v_dict['alkalinity'] = 1000 * v_dict['alkalinity'] / (v_dict['rho'] + 1000) v_dict['TIC'] = 1000 * v_dict['TIC'] / (v_dict['rho'] + 1000) # clean up v_dict.pop('rho') # no longer needed, so don't pass to worker ds.close() return v_dict
def get_zr(G, S, vn): # get z on the ROMS grids h = G['h'] if vn in ['theta', 's3d']: zr = zrfun.get_z(h, 0*h, S, only_rho=True) elif vn in ['u3d']: xru, yru = get_xyr(G, 'ubar') hu = zfun.interp_scattered_on_plaid(G['lon_u'], G['lat_u'], G['lon_rho'][0,:], G['lat_rho'][:,0], h, exnan=False) hu = np.reshape(hu, G['lon_u'].shape) zr = zrfun.get_z(hu, 0*hu, S, only_rho=True) elif vn in ['v3d']: hv = zfun.interp_scattered_on_plaid(G['lon_v'], G['lat_v'], G['lon_rho'][0,:], G['lat_rho'][:,0], h, exnan=False) hv = np.reshape(hv, G['lon_v'].shape) zr = zrfun.get_z(hv, 0*hv, S, only_rho=True) else: print('Unknown variable name for get_zr: ' + vn) return zr
def get_zfull(ds, fn, which_grid): # get zfull field on "which_grid" ('rho', 'u', or 'v') G, S, T = zrfun.get_basic_info(fn) zeta = 0 * ds.variables['zeta'][:].squeeze() zr_mid = zrfun.get_z(G['h'], zeta, S, only_rho=True) zr_bot = -G['h'].reshape(1, G['M'], G['L']).copy() zr_top = zeta.reshape(1, G['M'], G['L']).copy() zfull0 = make_full((zr_bot, zr_mid, zr_top)) if which_grid == 'rho': zfull = zfull0 elif which_grid == 'u': zfull = zfull0[:, :, 0:-1] + np.diff(zfull0, axis=2)/2 elif which_grid == 'v': zfull = zfull0[:, 0:-1, :] + np.diff(zfull0, axis=1)/2 return zfull
def get_zfull(ds, fn, which_grid): # get zfull field on "which_grid" ('rho', 'u', or 'v') G, S, T = zrfun.get_basic_info(fn) zeta = 0 * ds.variables['zeta'][:].squeeze() zr_mid = zrfun.get_z(G['h'], zeta, S, only_rho=True) zr_bot = -G['h'].reshape(1, G['M'], G['L']).copy() zr_top = zeta.reshape(1, G['M'], G['L']).copy() zfull0 = make_full((zr_bot, zr_mid, zr_top)) if which_grid == 'rho': zfull = zfull0 elif which_grid == 'u': zfull = zfull0[:, :, 0:-1] + np.diff(zfull0, axis=2) / 2 elif which_grid == 'v': zfull = zfull0[:, 0:-1, :] + np.diff(zfull0, axis=1) / 2 return zfull
def get_zinds(h, S, z): # Precalculate the array of indices to go from HYCOM z to ROMS z. # This just finds the vertical index in HYCOM z for each ROMS z_rho # value in the whole 3D array, with the index being the UPPER one of # the two HYCOM z indices that any ROMS z falls between. tt0 = time.time() zr = zrfun.get_z(h, 0 * h, S, only_rho=True) zrf = zr.flatten() zinds = np.nan * np.ones_like(zrf) if isinstance(z, np.ma.MaskedArray): z = z.data for ii in range(len(z) - 1): zlo = z[ii] zhi = z[ii + 1] mask = (zrf > zlo) & (zrf <= zhi) zinds[mask] = ii + 1 # this is where the UPPER index is enforced zinds = zinds.astype(int) if isinstance(zinds, np.ma.MaskedArray): zinds = zinds.data print(' --create zinds array took %0.1f seconds' % (time.time() - tt0)) return zinds
def get_delta(fn, pmpn, z, y, x): ds = nc.Dataset(fn) #delta is a small pain to calculate :/ om = 1 / ds[pmpn][y, x] # om0 = 0.5*(om00[:,:-1]+om00[:,1:]) # om = np.tile(om0,(NZ,1,1)) zeta = ds['zeta'][:] h = ds['h'][:] S = zrfun.get_basic_info(fn, only_S=True) zw = zrfun.get_z(h, zeta, S, only_w=True) Hz0 = zw[1:, :, :] - zw[:-1, :, :] if pmpn == 'pn': #u grid Hz = 0.5 * (Hz0[z, y, x + 1] + Hz0[z, y, x]) elif pmpn == 'pm': Hz = 0.5 * (Hz0[z, y, x] + Hz0[z, y + 1, x]) delta = Hz * om #delta0 ds.close() return delta
pass # lists of variables to process dlist = ['xi_rho', 'eta_rho', 'xi_psi', 'eta_psi', 'ocean_time'] vn_list_2d = ['lon_rho', 'lat_rho', 'lon_psi', 'lat_psi', 'mask_rho', 'h'] vn_list_2d_custom = ['DA'] vn_list_3d_t_custom = ['hyp_dz'] # make some things fn = fn_list[0] G = zrfun.get_basic_info(fn, only_G=True) DA = G['DX'] * G['DY'] ny, nx = DA.shape h = G['h'] S = zrfun.get_basic_info(fn, only_S=True) zr, zw = zrfun.get_z(h, 0 * h, S) dzr = np.diff(zw, axis=0) ds1 = nc.Dataset(fn) ds2 = nc.Dataset(out_fn, 'w') # Create dimensions for dname, the_dim in ds1.dimensions.items(): if dname in dlist: ds2.createDimension( dname, len(the_dim) if not the_dim.isunlimited() else None) # Create variables and their attributes # - first time vn = 'ocean_time'
def get_cast(gridname, tag, ex_name, date_string, station, lon_str, lat_str): # get the dict Ldir Ldir = Lfun.Lstart(gridname, tag) Ldir['gtagex'] = Ldir['gtag'] + '_' + ex_name Ldir['date_string'] = date_string Ldir['station'] = station Ldir['lon_str'] = lon_str Ldir['lat_str'] = lat_str # make sure the output directory exists outdir0 = Ldir['LOo'] + 'cast/' Lfun.make_dir(outdir0) outdir = outdir0 + Ldir['gtagex'] + '/' Lfun.make_dir(outdir) dt = Ldir['date_string'] #%% function definitions def get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy): dims = ds.variables[vv].dimensions if 'eta_rho' in dims: grd = 'rho' elif 'eta_u' in dims: grd = 'u' elif 'eta_v' in dims: grd = 'v' else: print('grid error!') xi0 = Xi0[grd]; yi0 = Yi0[grd] xi1 = Xi1[grd]; yi1 = Yi1[grd] aix = Aix[grd]; aiy = Aiy[grd] xi01 = np.array([xi0, xi1]).flatten() yi01 = np.array([yi0, yi1]).flatten() return xi01, yi01, aix, aiy #%% set up for the extraction # target position Lon = np.array(float(Ldir['lon_str'])) Lat = np.array(float(Ldir['lat_str'])) # get grid info indir = Ldir['roms'] + 'output/' + Ldir['gtagex'] + '/f' + dt + '/' fn = indir + 'ocean_his_0021.nc' # approx. noon local standard time if os.path.isfile(fn): pass else: print('Not found: ' + fn) return G = zrfun.get_basic_info(fn, only_G=True) S = zrfun.get_basic_info(fn, only_S=True) lon = G['lon_rho'] lat = G['lat_rho'] mask = G['mask_rho'] xvec = lon[0,:].flatten() yvec = lat[:,0].flatten() i0, i1, frx = zfun.get_interpolant(np.array([float(Ldir['lon_str'])]), xvec) j0, j1, fry = zfun.get_interpolant(np.array([float(Ldir['lat_str'])]), yvec) i0 = int(i0) j0 = int(j0) # find indices of nearest good point if mask[j0,i0] == 1: print('- ' + station + ': point OK') elif mask[j0,i0] == 0: print('- ' + station + ':point masked') i0, j0 = get_ij_good(lon, lat, xvec, yvec, i0, j0, mask) new_lon = xvec[i0] new_lat = yvec[j0] Lon = np.array(new_lon) Lat = np.array(new_lat) # get interpolants for this point Xi0 = dict(); Yi0 = dict() Xi1 = dict(); Yi1 = dict() Aix = dict(); Aiy = dict() for grd in ['rho', 'u', 'v']: xx = G['lon_' + grd][1,:] yy = G['lat_' + grd][:,1] xi0, xi1, xfr = zfun.get_interpolant(Lon, xx, extrap_nan=True) yi0, yi1, yfr = zfun.get_interpolant(Lat, yy, extrap_nan=True) Xi0[grd] = xi0 Yi0[grd] = yi0 Xi1[grd] = xi1 Yi1[grd] = yi1 # create little arrays that are used in the actual interpolation Aix[grd] = np.array([1-xfr, xfr]).reshape((1,1,2)) Aiy[grd] = np.array([1-yfr, yfr]).reshape((1,2)) # generating some lists v0_list = ['h', 'lon_rho', 'lat_rho', 'lon_u', 'lat_u', 'lon_v', 'lat_v'] v1_list = ['ocean_time'] v2_list = [] v3_list_rho = [] v3_list_w = [] ds = nc.Dataset(fn) for vv in ds.variables: vdim = ds.variables[vv].dimensions if ( ('ocean_time' in vdim) and ('s_rho' not in vdim) and ('s_w' not in vdim) and (vv != 'ocean_time') ): v2_list.append(vv) elif ( ('ocean_time' in vdim) and ('s_rho' in vdim) ): v3_list_rho.append(vv) elif ( ('ocean_time' in vdim) and ('s_w' in vdim) ): v3_list_w.append(vv) V = dict() V_long_name = dict() V_units = dict() v_all_list = v0_list + v1_list + v2_list + v3_list_rho + v3_list_w for vv in v_all_list: V[vv] = np.array([]) try: V_long_name[vv] = ds.variables[vv].long_name except: V_long_name[vv] = '' try: V_units[vv] = ds.variables[vv].units except: V_units[vv] = '' # get static variables for vv in v0_list: xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy) vvtemp = ds.variables[vv][yi01, xi01].squeeze() V[vv] = ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1) ds.close() #%% extract time-dependent fields print('-- Working on date: ' + dt) sys.stdout.flush() ds = nc.Dataset(fn) for vv in v1_list: vtemp = ds.variables[vv][:].squeeze() V[vv] = np.append(V[vv], vtemp) for vv in v2_list: xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy) vvtemp = ds.variables[vv][:, yi01, xi01].squeeze() vtemp = ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1) V[vv] = np.append(V[vv], vtemp) for vv in v3_list_rho: xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy) vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze() vtemp = ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1) V[vv] = vtemp.reshape((S['N'],1)) for vv in v3_list_w: xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy) vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze() vtemp = ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1) V[vv] = vtemp.reshape((S['N']+1,1)) ds.close() # create z_rho and z_w (has to be done after we have V['zeta']) hh = V['h'][:] * np.ones_like(V['zeta']) z_rho, z_w = zrfun.get_z(hh, V['zeta'][:], S) V['hh'] = hh V_long_name['hh'] = 'bottom depth (positive down) as a vector' V_units['hh'] = 'm' V['z_rho'] = z_rho V_long_name['z_rho'] = 'z on rho points (positive up)' V_units['z_rho'] = 'm' V['z_w'] = z_w V_long_name['z_w'] = 'z on w points (positive up)' V_units['z_w'] = 'm' v2_list.append('hh') v3_list_rho.append('z_rho') v3_list_w.append('z_w') #%% save the output to NetCDF out_fn = (outdir + Ldir['station'] + '_' + Ldir['date_string'] + '.nc') # get rid of the old version, if it exists try: os.remove(out_fn) except OSError: pass # assume error was because the file did not exist foo = nc.Dataset(out_fn, 'w') N = S['N'] NT = len(V['ocean_time'][:]) foo.createDimension('scalar', 1) foo.createDimension('s_rho', N) foo.createDimension('s_w', N+1) foo.createDimension('ocean_time', NT) for vv in v0_list: v_var = foo.createVariable(vv, float, ('scalar')) v_var[:] = V[vv][:] v_var.long_name = V_long_name[vv] v_var.units = V_units[vv] for vv in v1_list: v_var = foo.createVariable(vv, float, ('ocean_time',)) v_var[:] = V[vv][:] v_var.long_name = V_long_name[vv] v_var.units = V_units[vv] for vv in v2_list: v_var = foo.createVariable(vv, float, ('ocean_time',)) v_var[:] = V[vv][:] v_var.long_name = V_long_name[vv] v_var.units = V_units[vv] for vv in v3_list_rho: v_var = foo.createVariable(vv, float, ('s_rho', 'ocean_time')) v_var[:] = V[vv][:] v_var.long_name = V_long_name[vv] v_var.units = V_units[vv] for vv in v3_list_w: v_var = foo.createVariable(vv, float, ('s_w', 'ocean_time')) v_var[:] = V[vv][:] v_var.long_name = V_long_name[vv] v_var.units = V_units[vv] foo.close()
def add_fields(ds, count, vn_list, G, S, sinfo): ii0, ii1, jj0, jj1, sdir, landward, NT, NX, NZ, out_fn = sinfo foo = nc.Dataset(out_fn, 'a') # get depth and dz, and dd (which is either dx or dy) if sdir == 'NS': h = ds['h'][jj0:jj1 + 1, ii0:ii1 + 1].squeeze() zeta = ds['zeta'][0, jj0:jj1 + 1, ii0:ii1 + 1].squeeze() z = zrfun.get_z(h, zeta, S, only_w=True) # print(h) # print(zeta) # print(z) dz = np.diff(z, axis=0) DZ = dz.mean( axis=2) # fails for a channel one point wide 2019.05.20 (oak) dd = G['DY'][jj0:jj1 + 1, ii0:ii1 + 1].squeeze() DD = dd.mean(axis=1) zeta = zeta.mean(axis=1) if count == 0: hh = h.mean(axis=1) foo['h'][:] = hh z0 = zrfun.get_z(hh, 0 * hh, S, only_rho=True) foo['z0'][:] = z0 zw0 = zrfun.get_z(hh, 0 * hh, S, only_w=True) DZ0 = np.diff(zw0, axis=0) DA0 = DD.reshape((1, NX)) * DZ0 foo['DA0'][:] = DA0 elif sdir == 'EW': h = ds['h'][jj0:jj1 + 1, ii0:ii1 + 1].squeeze() zeta = ds['zeta'][0, jj0:jj1 + 1, ii0:ii1 + 1].squeeze() z = zrfun.get_z(h, zeta, S, only_w=True) dz = np.diff(z, axis=0) DZ = dz.mean(axis=1) dd = G['DX'][jj0:jj1 + 1, ii0:ii1 + 1].squeeze() DD = dd.mean(axis=0) zeta = zeta.mean(axis=0) if count == 0: hh = h.mean(axis=0) foo['h'][:] = hh z0 = zrfun.get_z(hh, 0 * hh, S, only_rho=True) foo['z0'][:] = z0 zw0 = zrfun.get_z(hh, 0 * hh, S, only_w=True) DZ0 = np.diff(zw0, axis=0) DA0 = DD.reshape((1, NX)) * DZ0 foo['DA0'][:] = DA0 # and then create the array of cell areas on the section DA = DD.reshape((1, NX)) * DZ # then velocity and hence transport if sdir == 'NS': vel = ds['u'][0, :, jj0:jj1 + 1, ii0].squeeze() elif sdir == 'EW': vel = ds['v'][0, :, jj0, ii0:ii1 + 1].squeeze() q = vel * DA # * landward foo['q'][count, :, :] = q foo['vel'][count, :, :] = vel foo['DA'][count, :, :] = DA foo['zeta'][count, :] = zeta foo['ocean_time'][count] = ds['ocean_time'][0] # save the tracer fields averaged onto this section for vn in vn_list: if sdir == 'NS': vvv = (ds[vn][0, :, jj0:jj1 + 1, ii0].squeeze() + ds[vn][0, :, jj0:jj1 + 1, ii1].squeeze()) / 2 elif sdir == 'EW': vvv = (ds[vn][0, :, jj0, ii0:ii1 + 1].squeeze() + ds[vn][0, :, jj1, ii0:ii1 + 1].squeeze()) / 2 foo[vn][count, :, :] = vvv foo.close()
lon_vec) jj0, jj1, jfr = zfun.get_interpolant(np.array([44.8, 45.2]), lat_vec) i0 = ii0[0] i1 = ii1[1] j0 = jj0[0] j1 = jj1[1] h = G['h'][j0:j1, i0:i1] dx = G['DX'][j0:j1, i0:i1] dy = G['DY'][j0:j1, i0:i1] da = dx * dy ny, nx = da.shape zeta = ds['zeta'][0, j0:j1, i0:i1].squeeze() salt = ds['salt'][0, :, j0:j1, i0:i1].squeeze() zr, zw = zrfun.get_z(h, zeta, S) dzr = np.diff(zw, axis=0) V = np.sum(da.reshape((1, ny, nx)) * dzr) # volume Salt = np.sum(da.reshape((1, ny, nx)) * dzr * salt) # net salt sbar = Salt / V # average salinity sp = salt - sbar # s' = s - sbar sv = sp**2 # salinity variance SV = np.sum(da.reshape((1, ny, nx)) * dzr * sv) # and calculate net destruction of variance by vertical mixing K = ds['AKs'][0, 1:-1, j0:j1, i0:i1].squeeze() dzw = np.diff(zr, axis=0) dsdz = np.diff(salt, axis=0) / dzw mix = 2 * K * dsdz**2 dvw = da.reshape((1, ny, nx)) * dzw
def get_section(ds, vn, x, y, in_dict): # PLOT CODE from warnings import filterwarnings filterwarnings('ignore') # skip a warning message # GET DATA G, S, T = zrfun.get_basic_info(in_dict['fn']) h = G['h'] zeta = ds['zeta'][:].squeeze() zr = zrfun.get_z(h, zeta, S, only_rho=True) sectvar = ds[vn][:].squeeze() L = G['L'] M = G['M'] N = S['N'] lon = G['lon_rho'] lat = G['lat_rho'] mask = G['mask_rho'] maskr = mask.reshape(1, M, L).copy() mask3 = np.tile(maskr, [N, 1, 1]) zbot = -h # don't need .copy() because of the minus operation # make sure fields are masked zeta[mask==False] = np.nan zbot[mask==False] = np.nan sectvar[mask3==False] = np.nan # create dist earth_rad = zfun.earth_rad(np.mean(lat[:,0])) # m xrad = np.pi * x /180 yrad = np.pi * y / 180 dx = earth_rad * np.cos(yrad[1:]) * np.diff(xrad) dy = earth_rad * np.diff(yrad) ddist = np.sqrt(dx**2 + dy**2) dist = np.zeros(len(x)) dist[1:] = ddist.cumsum()/1000 # km # find the index of zero i0, i1, fr = zfun.get_interpolant(np.zeros(1), dist) idist0 = i0 distr = dist.reshape(1, len(dist)).copy() dista = np.tile(distr, [N, 1]) # array # pack fields to process in dicts d2 = dict() d2['zbot'] = zbot d2['zeta'] = zeta d2['lon'] = lon d2['lat'] = lat d3 = dict() d3['zr'] = zr d3['sectvar'] = sectvar # get vectors describing the (plaid) grid xx = lon[1,:] yy = lat[:,1] col0, col1, colf = zfun.get_interpolant(x, xx) row0, row1, rowf = zfun.get_interpolant(y, yy) # and prepare them to do the bilinear interpolation colff = 1 - colf rowff = 1 - rowf # now actually do the interpolation # 2-D fields v2 = dict() for fname in d2.keys(): fld = d2[fname] fldi = (rowff*(colff*fld[row0, col0] + colf*fld[row0, col1]) + rowf*(colff*fld[row1, col0] + colf*fld[row1, col1])) if type(fldi) == np.ma.core.MaskedArray: fldi = fldi.data # just the data, not the mask v2[fname] = fldi # 3-D fields v3 = dict() for fname in d3.keys(): fld = d3[fname] fldi = (rowff*(colff*fld[:, row0, col0] + colf*fld[:, row0, col1]) + rowf*(colff*fld[:, row1, col0] + colf*fld[:, row1, col1])) if type(fldi) == np.ma.core.MaskedArray: fldid = fldi.data # just the data, not the mask fldid[fldi.mask == True] = np.nan v3[fname] = fldid v3['dist'] = dista # distance in km # make "full" fields by padding top and bottom nana = np.nan * np.ones((N + 2, len(dist))) # blank array v3['zrf'] = nana.copy() v3['zrf'][0,:] = v2['zbot'] v3['zrf'][1:-1,:] = v3['zr'] v3['zrf'][-1,:] = v2['zeta'] # v3['sectvarf'] = nana.copy() v3['sectvarf'][0,:] = v3['sectvar'][0,:] v3['sectvarf'][1:-1,:] = v3['sectvar'] v3['sectvarf'][-1,:] = v3['sectvar'][-1,:] # v3['distf'] = nana.copy() v3['distf'][0,:] = v3['dist'][0,:] v3['distf'][1:-1,:] = v3['dist'] v3['distf'][-1,:] = v3['dist'][-1,:] # attempt to skip over nan's v3.pop('zr') v3.pop('sectvar') v3.pop('dist') mask3 = ~np.isnan(v3['sectvarf'][:]) #print(mask3.shape) mask2 = mask3[-1,:] dist = dist[mask2] NC = len(dist) NR = mask3.shape[0] for k in v2.keys(): #print('v2 key: ' + k) v2[k] = v2[k][mask2] for k in v3.keys(): #print('v3 key: ' + k) v3[k] = v3[k][mask3] v3[k] = v3[k].reshape((NR, NC)) #print(v3[k].shape) return v2, v3, dist, idist0
vn_list_3d_t_custom = ['vave_salt', 'vave_temp', 'vave_rho'] vn_list_2d_uv_t = [] vn_list_3d_uv_t = [] vn_list_2d_custom = [] else: print('Unsupported layer name') sys.exit() # make some things fn = fn_list[0] G = zrfun.get_basic_info(fn, only_G=True) DA = G['DX'] * G['DY'] ny, nx = DA.shape h = G['h'] S = zrfun.get_basic_info(fn, only_S=True) zr = zrfun.get_z(h, 0*h, S, only_rho=True) zlay = zr[nlay, :, :].squeeze() ds1 = nc.Dataset(fn) ds2 = nc.Dataset(out_fn, 'w') # Create dimensions for dname, the_dim in ds1.dimensions.items(): if dname in dlist: ds2.createDimension(dname, len(the_dim) if not the_dim.isunlimited() else None) # Create variables and their attributes # - first time vn = 'ocean_time' varin = ds1[vn] vv = ds2.createVariable(vn, varin.dtype, varin.dimensions)
#next, try to calculate it using u_accel = <delta*du/dt> / <delta> + <u*ddelta/dt> / <delta> # need u, delta, udelta and dt, du, ddelta dt = ds_his1['ocean_time'][:] - ds_his0['ocean_time'][:] #dt du = ds_his1['u'][0, :] - ds_his0['u'][0, :] #du NZ, NY, NX = du.shape #delta is a small pain to calculate :/ on000 = 1 / ds_his0['pn'][:] on00 = 0.5 * (on000[:, :-1] + on000[:, 1:]) on0 = np.tile(on00, (NZ, 1, 1)) zeta0 = ds_his0['zeta'][:] h = ds_his0['h'][:] S0 = zrfun.get_basic_info(fn_his0, only_S=True) zw0 = zrfun.get_z(h, zeta0, S0, only_w=True) Hz00 = zw0[1:, :, :] - zw0[:-1, :, :] Hz0 = 0.5 * (Hz00[:, :, :-1] + Hz00[:, :, 1:]) delta0 = Hz0 * on0 #delta0 on100 = 1 / ds_his1['pn'][:] on10 = 0.5 * (on100[:, :-1] + on100[:, 1:]) on1 = np.tile(on10, (NZ, 1, 1)) zeta1 = ds_his1['zeta'][:] S1 = zrfun.get_basic_info(fn_his1, only_S=True) zw1 = zrfun.get_z(h, zeta1, S1, only_w=True) Hz10 = zw1[1:, :, :] - zw1[:-1, :, :] Hz1 = 0.5 * (Hz10[:, :, :-1] + Hz10[:, :, 1:]) delta1 = Hz1 * on1 #delta1 ddelta = delta1 - delta0
def get_cast(gridname, tag, ex_name, date_string, station, lon_str, lat_str): # get the dict Ldir Ldir = Lfun.Lstart(gridname, tag) Ldir['gtagex'] = Ldir['gtag'] + '_' + ex_name Ldir['date_string'] = date_string Ldir['station'] = station Ldir['lon_str'] = lon_str Ldir['lat_str'] = lat_str # make sure the output directory exists outdir0 = Ldir['LOo'] + 'cast/' Lfun.make_dir(outdir0) outdir = outdir0 + Ldir['gtagex'] + '/' Lfun.make_dir(outdir) dt = Ldir['date_string'] #%% function definitions def get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy): dims = ds.variables[vv].dimensions if 'eta_rho' in dims: grd = 'rho' elif 'eta_u' in dims: grd = 'u' elif 'eta_v' in dims: grd = 'v' else: print('grid error!') xi0 = Xi0[grd] yi0 = Yi0[grd] xi1 = Xi1[grd] yi1 = Yi1[grd] aix = Aix[grd] aiy = Aiy[grd] xi01 = np.array([xi0, xi1]).flatten() yi01 = np.array([yi0, yi1]).flatten() return xi01, yi01, aix, aiy #%% set up for the extraction # target position Lon = np.array(float(Ldir['lon_str'])) Lat = np.array(float(Ldir['lat_str'])) # get grid info indir = Ldir['roms'] + 'output/' + Ldir['gtagex'] + '/f' + dt + '/' fn = indir + 'ocean_his_0021.nc' # approx. noon local standard time if os.path.isfile(fn): pass else: print('Not found: ' + fn) return G = zrfun.get_basic_info(fn, only_G=True) S = zrfun.get_basic_info(fn, only_S=True) lon = G['lon_rho'] lat = G['lat_rho'] mask = G['mask_rho'] xvec = lon[0, :].flatten() yvec = lat[:, 0].flatten() i0, i1, frx = zfun.get_interpolant(np.array([float(Ldir['lon_str'])]), xvec) j0, j1, fry = zfun.get_interpolant(np.array([float(Ldir['lat_str'])]), yvec) i0 = int(i0) j0 = int(j0) # find indices of nearest good point if mask[j0, i0] == 1: print('- ' + station + ': point OK') elif mask[j0, i0] == 0: print('- ' + station + ':point masked') i0, j0 = get_ij_good(lon, lat, xvec, yvec, i0, j0, mask) new_lon = xvec[i0] new_lat = yvec[j0] Lon = np.array(new_lon) Lat = np.array(new_lat) # get interpolants for this point Xi0 = dict() Yi0 = dict() Xi1 = dict() Yi1 = dict() Aix = dict() Aiy = dict() for grd in ['rho', 'u', 'v']: xx = G['lon_' + grd][1, :] yy = G['lat_' + grd][:, 1] xi0, xi1, xfr = zfun.get_interpolant(Lon, xx, extrap_nan=True) yi0, yi1, yfr = zfun.get_interpolant(Lat, yy, extrap_nan=True) Xi0[grd] = xi0 Yi0[grd] = yi0 Xi1[grd] = xi1 Yi1[grd] = yi1 # create little arrays that are used in the actual interpolation Aix[grd] = np.array([1 - xfr, xfr]).reshape((1, 1, 2)) Aiy[grd] = np.array([1 - yfr, yfr]).reshape((1, 2)) # generating some lists v0_list = ['h', 'lon_rho', 'lat_rho', 'lon_u', 'lat_u', 'lon_v', 'lat_v'] v1_list = ['ocean_time'] v2_list = [] v3_list_rho = [] v3_list_w = [] ds = nc.Dataset(fn) for vv in ds.variables: vdim = ds.variables[vv].dimensions if (('ocean_time' in vdim) and ('s_rho' not in vdim) and ('s_w' not in vdim) and (vv != 'ocean_time')): v2_list.append(vv) elif (('ocean_time' in vdim) and ('s_rho' in vdim)): v3_list_rho.append(vv) elif (('ocean_time' in vdim) and ('s_w' in vdim)): v3_list_w.append(vv) V = dict() V_long_name = dict() V_units = dict() v_all_list = v0_list + v1_list + v2_list + v3_list_rho + v3_list_w for vv in v_all_list: V[vv] = np.array([]) try: V_long_name[vv] = ds.variables[vv].long_name except: V_long_name[vv] = '' try: V_units[vv] = ds.variables[vv].units except: V_units[vv] = '' # get static variables for vv in v0_list: xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy) vvtemp = ds.variables[vv][yi01, xi01].squeeze() V[vv] = (aiy * ((aix * vvtemp).sum(-1))).sum(-1) ds.close() #%% extract time-dependent fields print('-- Working on date: ' + dt) sys.stdout.flush() ds = nc.Dataset(fn) for vv in v1_list: vtemp = ds.variables[vv][:].squeeze() V[vv] = np.append(V[vv], vtemp) for vv in v2_list: xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy) vvtemp = ds.variables[vv][:, yi01, xi01].squeeze() vtemp = (aiy * ((aix * vvtemp).sum(-1))).sum(-1) V[vv] = np.append(V[vv], vtemp) for vv in v3_list_rho: xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy) vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze() vtemp = (aiy * ((aix * vvtemp).sum(-1))).sum(-1) V[vv] = vtemp.reshape((S['N'], 1)) for vv in v3_list_w: xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy) vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze() vtemp = (aiy * ((aix * vvtemp).sum(-1))).sum(-1) V[vv] = vtemp.reshape((S['N'] + 1, 1)) ds.close() # create z_rho and z_w (has to be done after we have V['zeta']) hh = V['h'][:] * np.ones_like(V['zeta']) z_rho, z_w = zrfun.get_z(hh, V['zeta'][:], S) V['hh'] = hh V_long_name['hh'] = 'bottom depth (positive down) as a vector' V_units['hh'] = 'm' V['z_rho'] = z_rho V_long_name['z_rho'] = 'z on rho points (positive up)' V_units['z_rho'] = 'm' V['z_w'] = z_w V_long_name['z_w'] = 'z on w points (positive up)' V_units['z_w'] = 'm' v2_list.append('hh') v3_list_rho.append('z_rho') v3_list_w.append('z_w') #%% save the output to NetCDF out_fn = (outdir + Ldir['station'] + '_' + Ldir['date_string'] + '.nc') # get rid of the old version, if it exists try: os.remove(out_fn) except OSError: pass # assume error was because the file did not exist foo = nc.Dataset(out_fn, 'w') N = S['N'] NT = len(V['ocean_time'][:]) foo.createDimension('scalar', 1) foo.createDimension('s_rho', N) foo.createDimension('s_w', N + 1) foo.createDimension('ocean_time', NT) for vv in v0_list: v_var = foo.createVariable(vv, float, ('scalar')) v_var[:] = V[vv][:] v_var.long_name = V_long_name[vv] v_var.units = V_units[vv] for vv in v1_list: v_var = foo.createVariable(vv, float, ('ocean_time', )) v_var[:] = V[vv][:] v_var.long_name = V_long_name[vv] v_var.units = V_units[vv] for vv in v2_list: v_var = foo.createVariable(vv, float, ('ocean_time', )) v_var[:] = V[vv][:] v_var.long_name = V_long_name[vv] v_var.units = V_units[vv] for vv in v3_list_rho: v_var = foo.createVariable(vv, float, ('s_rho', 'ocean_time')) v_var[:] = V[vv][:] v_var.long_name = V_long_name[vv] v_var.units = V_units[vv] for vv in v3_list_w: v_var = foo.createVariable(vv, float, ('s_w', 'ocean_time')) v_var[:] = V[vv][:] v_var.long_name = V_long_name[vv] v_var.units = V_units[vv] foo.close()
v_df = pd.DataFrame(columns=seg_list) for fn in fn_list: tt0 = time() print(fn) ds = nc.Dataset(fn) salt = ds['salt'][0, :, :, :] zeta = ds['zeta'][0, :, :] ot = ds['ocean_time'][:] ds.close() if testing: z_w_alt = zrfun.get_z(h, zeta, S, only_w=True) dz_alt = np.diff(z_w_alt, axis=0) DV_alt = dz_alt * DA3 dt = Lfun.modtime_to_datetime(ot.data[0]) # find the volume and volume-mean salinity for seg_name in seg_list: jjj = j_dict[seg_name] iii = i_dict[seg_name] z_w = zrfun.get_z(h[jjj, iii], zeta[jjj, iii], S, only_w=True) dz = np.diff(z_w, axis=0) DV = dz * DA3[0, jjj, iii] volume = DV.sum()
def get_section(ds, vn, x, y, in_dict): # PLOT CODE from warnings import filterwarnings filterwarnings('ignore') # skip a warning message # GET DATA G, S, T = zrfun.get_basic_info(in_dict['fn']) h = G['h'] zeta = ds['zeta'][:].squeeze() zr = zrfun.get_z(h, zeta, S, only_rho=True) sectvar = ds[vn][:].squeeze() L = G['L'] M = G['M'] N = S['N'] lon = G['lon_rho'] lat = G['lat_rho'] mask = G['mask_rho'] maskr = mask.reshape(1, M, L).copy() mask3 = np.tile(maskr, [N, 1, 1]) zbot = -h # don't need .copy() because of the minus operation # make sure fields are masked zeta[mask == False] = np.nan zbot[mask == False] = np.nan sectvar[mask3 == False] = np.nan # create dist earth_rad = zfun.earth_rad(np.mean(lat[:, 0])) # m xrad = np.pi * x / 180 yrad = np.pi * y / 180 dx = earth_rad * np.cos(yrad[1:]) * np.diff(xrad) dy = earth_rad * np.diff(yrad) ddist = np.sqrt(dx**2 + dy**2) dist = np.zeros(len(x)) dist[1:] = ddist.cumsum() / 1000 # km # find the index of zero i0, i1, fr = zfun.get_interpolant(np.zeros(1), dist) idist0 = i0 distr = dist.reshape(1, len(dist)).copy() dista = np.tile(distr, [N, 1]) # array # pack fields to process in dicts d2 = dict() d2['zbot'] = zbot d2['zeta'] = zeta d2['lon'] = lon d2['lat'] = lat d3 = dict() d3['zr'] = zr d3['sectvar'] = sectvar # get vectors describing the (plaid) grid xx = lon[1, :] yy = lat[:, 1] col0, col1, colf = zfun.get_interpolant(x, xx) row0, row1, rowf = zfun.get_interpolant(y, yy) # and prepare them to do the bilinear interpolation colff = 1 - colf rowff = 1 - rowf # now actually do the interpolation # 2-D fields v2 = dict() for fname in d2.keys(): fld = d2[fname] fldi = (rowff * (colff * fld[row0, col0] + colf * fld[row0, col1]) + rowf * (colff * fld[row1, col0] + colf * fld[row1, col1])) if type(fldi) == np.ma.core.MaskedArray: fldi = fldi.data # just the data, not the mask v2[fname] = fldi # 3-D fields v3 = dict() for fname in d3.keys(): fld = d3[fname] fldi = (rowff * (colff * fld[:, row0, col0] + colf * fld[:, row0, col1]) + rowf * (colff * fld[:, row1, col0] + colf * fld[:, row1, col1])) if type(fldi) == np.ma.core.MaskedArray: fldid = fldi.data # just the data, not the mask fldid[fldi.mask == True] = np.nan v3[fname] = fldid v3['dist'] = dista # distance in km # make "full" fields by padding top and bottom nana = np.nan * np.ones((N + 2, len(dist))) # blank array v3['zrf'] = nana.copy() v3['zrf'][0, :] = v2['zbot'] v3['zrf'][1:-1, :] = v3['zr'] v3['zrf'][-1, :] = v2['zeta'] # v3['sectvarf'] = nana.copy() v3['sectvarf'][0, :] = v3['sectvar'][0, :] v3['sectvarf'][1:-1, :] = v3['sectvar'] v3['sectvarf'][-1, :] = v3['sectvar'][-1, :] # v3['distf'] = nana.copy() v3['distf'][0, :] = v3['dist'][0, :] v3['distf'][1:-1, :] = v3['dist'] v3['distf'][-1, :] = v3['dist'][-1, :] # attempt to skip over nan's v3.pop('zr') v3.pop('sectvar') v3.pop('dist') mask3 = ~np.isnan(v3['sectvarf'][:]) #print(mask3.shape) mask2 = mask3[-1, :] dist = dist[mask2] NC = len(dist) NR = mask3.shape[0] for k in v2.keys(): #print('v2 key: ' + k) v2[k] = v2[k][mask2] for k in v3.keys(): #print('v3 key: ' + k) v3[k] = v3[k][mask3] v3[k] = v3[k].reshape((NR, NC)) #print(v3[k].shape) return v2, v3, dist, idist0
lat_rho = Lat[j0:j1, i0:i1] h = G['h'][j0:j1, i0:i1] NR, NC = lon_rho.shape N = S['N'] fnh_list = [] if testing: fn_list = fn_list[:2] for fn in fn_list: # get derived fields ds1 = nc.Dataset(fn) zeta = ds1['zeta'][0, j0:j1, i0:i1].squeeze() z = zrfun.get_z(h, zeta, S, only_rho=True) # interpolate velocities to the rho grid u = (ds1['u'][0, :, j0:j1, i0-1:i1-1].squeeze() + ds1['u'][0, :, j0:j1, i0:i1].squeeze())/2 v = (ds1['v'][0, :, j0-1:j1-1, i0:i1].squeeze() + ds1['v'][0, :, j0:j1, i0:i1].squeeze())/2 # pack into a NetCDF file: ## output files fnh = fn.split('/')[-1].replace('his', 'ext') out_fn = out_dir + fnh fnh_list.append(fnh) print(' - creating ' + fnh) # get rid of the old version, if it exists try:
def get_layer(fn, NZ=-1, aa=[], print_info=False): # function to extract and process fields from a history file # returning a dict of arrays that can be passed to CO2SYS.m # default is to get full surface field ds = nc.Dataset(fn) G, S, T = zrfun.get_basic_info(fn) if len(aa) == 4: # find indices that encompass region aa i0 = zfun.find_nearest_ind(G['lon_rho'][0,:], aa[0]) - 1 i1 = zfun.find_nearest_ind(G['lon_rho'][0,:], aa[1]) + 2 j0 = zfun.find_nearest_ind(G['lat_rho'][:,0], aa[2]) - 1 j1 = zfun.find_nearest_ind(G['lat_rho'][:,0], aa[3]) + 2 else: # full region i0 = 0; j0 = 0 j1, i1 = G['lon_rho'].shape i1 += 1; j1 += 1 plon = G['lon_psi'][j0:j1-1, i0:i1-1] plat = G['lat_psi'][j0:j1-1, i0:i1-1] # extract needed info from history file v_dict = dict() if print_info: print('\nINPUT Variable Info:') for vn in ['alkalinity', 'TIC', 'salt', 'temp','rho']: v = ds[vn][0,NZ, j0:j1, i0:i1] v = fillit(v) v_dict[vn] = v if print_info: name = ds[vn].long_name try: units = ds[vn].units except AttributeError: units = '' vmax = np.nanmax(v) vmin = np.nanmin(v) v_dict[vn] = v print('%25s (%25s) max = %6.1f min = %6.1f' % (name, units, vmax, vmin)) # create depth, pressure, and in situ temperature h = ds['h'][j0:j1, i0:i1] h = fillit(h) lat = G['lat_rho'][j0:j1, i0:i1] z_rho = zrfun.get_z(h, 0*h, S, only_rho=True) depth = -z_rho[NZ, :, :].squeeze() pres = sw.pres(depth, lat) v_dict['pres'] = pres # assume potential temperature is close enough # temp = sw.ptmp(v_dict['salt'], v_dict['temp'], 0, v_dict['pres']) # v_dict['temp'] = temp # convert from umol/L to umol/kg using in situ dentity v_dict['alkalinity'] = 1000 * v_dict['alkalinity'] / (v_dict['rho'] + 1000) v_dict['TIC'] = 1000 * v_dict['TIC'] / (v_dict['rho'] + 1000) # clean up v_dict.pop('rho') # no longer needed, so don't pass to worker ds.close() return v_dict, plon, plat
lat_rho = Lat[j0:j1, i0:i1] h = G['h'][j0:j1, i0:i1] NR, NC = lon_rho.shape N = S['N'] fnh_list = [] if testing: fn_list = fn_list[:2] for fn in fn_list: # get derived fields ds1 = nc.Dataset(fn) zeta = ds1['zeta'][0, j0:j1, i0:i1].squeeze() z = zrfun.get_z(h, zeta, S, only_rho=True) # interpolate velocities to the rho grid u = (ds1['u'][0, :, j0:j1, i0 - 1:i1 - 1].squeeze() + ds1['u'][0, :, j0:j1, i0:i1].squeeze()) / 2 v = (ds1['v'][0, :, j0 - 1:j1 - 1, i0:i1].squeeze() + ds1['v'][0, :, j0:j1, i0:i1].squeeze()) / 2 # pack into a NetCDF file: ## output files fnh = fn.split('/')[-1].replace('his', 'ext') out_fn = out_dir + fnh fnh_list.append(fnh) print(' - creating ' + fnh) # get rid of the old version, if it exists try:
if h == 0: rl = np.ma.masked_where(maskr == 0, rho) zl = np.ma.masked_where(maskr0 == 0, zeta) else: rl = rl + np.ma.masked_where(maskr == 0, rho) zl = zl + np.ma.masked_where(maskr0 == 0, zeta) dsa.close() #take the average rl = rl / 24 zl = zl / 24 zl = np.reshape(zl, (1, NY, NX)) # fill out axis definitions with time varying z coordinate S = zrfun.get_basic_info(oceanfna, only_S=True) zr, zw = zrfun.get_z(H, zl, S) zvec = np.zeros([np.shape(zr)[0], len(l_list), np.shape(zr)[2]]) for lat in range(len(l_list)): zvec[:, lat, :] = zr[:, l[lat], :] a_lz = [-1.0, 0.5, zvec.min() - 5, zvec.max() + 5] #plot averaged data #begin PLOT fig = plt.figure(figsize=(10, 7)) #plot surface plan view ax1 = plt.subplot2grid((llen, 2), (0, 0), colspan=1, rowspan=llen) rl = np.ma.masked_where(maskr0 == 0, rho[-1, :, :]) p = ax1.pcolormesh(lonr, latr, rl,
# extract info about ROMS structure S_info_dict = { 'VTRANSFORM': 2, 'VSTRETCHING': 4, 'THETA_S': 7, 'THETA_B': 4, 'TCLINE': 50, 'N': 40 } # these fields should be present in .nc files S = zrfun.get_S(S_info_dict) G = zrfun.get_basic_info(fng, only_G=True) fn = fn_list[0] ds = nc.Dataset(fn) h = ds['h'][:] z = zrfun.get_z(h, 0 * h, S, only_rho=True) z0 = z[n_layer, :, :].squeeze() ds.close() if testbatch: etag = '_test' else: etag = '' NT = len(fn_list) # prepare a directory for results outdir0 = outdir + model_type + '/' Lfun.make_dir(outdir0, clean=False) ncoutdir = outdir0 + 'bottom_pressure_extractions2/' Lfun.make_dir(ncoutdir, clean=False)
vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze() vtemp = ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1) if count == 0: V[vv] = vtemp.reshape((S['N']+1,1)) else: V[vv] = np.concatenate((V[vv], vtemp.reshape((S['N']+1,1))), axis=1) # listing of contents, if desired if count == 0 and False: zfun.ncd(ds) count += 1 ds.close() # create z_rho and z_w (has to be done after we have V['zeta']) hh = V['h'][:] * np.ones_like(V['zeta']) z_rho, z_w = zrfun.get_z(hh, V['zeta'][:], S) V['hh'] = hh V_long_name['hh'] = 'bottom depth (positive down) as a vector' V_units['hh'] = 'm' V['z_rho'] = z_rho V_long_name['z_rho'] = 'z on rho points (positive up)' V_units['z_rho'] = 'm' V['z_w'] = z_w V_long_name['z_w'] = 'z on w points (positive up)' V_units['z_w'] = 'm' v2_list.append('hh') v3_list_rho.append('z_rho') v3_list_w.append('z_w') foo = nc.Dataset(out_fn, 'w')
# input files in_dir = Ldir['roms'] + 'output/' + Ldir['gtagex'] + '/' + f_string + '/' fn_list_raw = os.listdir(in_dir) fn_list = [] for item in fn_list_raw: if 'ocean_his' in item and '.nc' in item: fn_list.append(in_dir + item) fn_list.sort() #%% make z fn = fn_list[0] ds = nc.Dataset(fn) S = zrfun.get_basic_info(fn, only_S=True) h = ds['h'][:] z = zrfun.get_z(h, 0*h, S, only_rho=True) z0 = z[-1,:,:].squeeze() ds.close() #%% Initialize the multi-file input dataset ds1 = nc.MFDataset(fn_list) #%% make surface velocity u0 = ds1['u'][:, -1, :, :].squeeze() v0 = ds1['v'][:, -1, :, :].squeeze() u = np.nan * ds1['salt'][:, -1, :, :].squeeze() v = u.copy() u[:, :, 1:-1] = (u0[:, :, 1:] + u0[:, :, :-1])/2 v[:, 1:-1, :] = (v0[:, 1:, :] + v0[:, :-1, :])/2 # output files
if verbose: print(' -- this history file took %0.2f seconds' % (time() - tt1)) # END OF EXTRACTING TIME-DEPENDENT FIELDS # create z_rho and z_w (has to be done after we have zeta) for sta_name in sta_dict.keys(): out_fn = out_fn_dict[sta_name] foo = foo_dict[sta_name] #foo = nc.Dataset(out_fn, 'a') zeta = foo['zeta'][:].squeeze() hh = foo['h'][:] * np.ones_like(zeta) z_rho, z_w = zrfun.get_z(hh, zeta, S) v_var = foo.createVariable('z_rho', float, ('ocean_time', 's_rho')) v_var.long_name = 'z on rho points (positive up)' v_var.units = 'm' v_var[:] = z_rho.T v_var = foo.createVariable('z_w', float, ('ocean_time', 's_w')) v_var.long_name = 'z on w points (positive up)' v_var.units = 'm' v_var[:] = z_w.T #foo.close() # close all output files for sta_name in sta_dict.keys(): foo_dict[sta_name].close()
while all_masked and (cc >= 0): this_col = mask_rho[:, cc] if (this_col == 1).any(): print(cc) break else: cc -= 1 this_h = h[:, cc] this_lat = dsg['lat_rho'][:, cc] this_dy = 1 / dsg['pn'][:, cc] mask = this_col == 1 hh = this_h[mask].data yy = this_lat[mask].data YY = yy * np.ones((S['N'], 1)) dy = this_dy[mask].data zz_rho, zz_w = zrfun.get_z(hh, 0 * hh, S) dz = np.diff(zz_w, axis=0) da = dz * dy # area of each grid box (on the rho grid) # set transports and their spatial distribution H1 = 5 # depth of no motion (m) h1 = np.minimum(H1, hh) parabola_1 = -(zz_rho + h1) * (-zz_rho + h1) parabola_2 = -(zz_rho + hh) * (zz_rho + h1) # set target transports Q1 = -15000 Q2 = 14000 h1_mask = zz_rho > -h1
for vv in v3_list_rho: xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy) vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze() vtemp = ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1) V[vv] = vtemp.reshape((S['N'],1)) for vv in v3_list_w: xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy) vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze() vtemp = ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1) V[vv] = vtemp.reshape((S['N']+1,1)) ds.close() # create z_rho and z_w (has to be done after we have V['zeta']) hh = V['h'][:] * np.ones_like(V['zeta']) z_rho, z_w = zrfun.get_z(hh, V['zeta'][:], S) V['hh'] = hh V_long_name['hh'] = 'bottom depth (positive down) as a vector' V_units['hh'] = 'm' V['z_rho'] = z_rho V_long_name['z_rho'] = 'z on rho points (positive up)' V_units['z_rho'] = 'm' V['z_w'] = z_w V_long_name['z_w'] = 'z on w points (positive up)' V_units['z_w'] = 'm' v2_list.append('hh') v3_list_rho.append('z_rho') v3_list_w.append('z_w') #%% save the output to NetCDF out_fn = (outdir +
def add_fields(ds, count, vn_list, G, S, sinfo): ii0, ii1, jj0, jj1, sdir, landward, NT, NX, NZ, out_fn = sinfo foo = nc.Dataset(out_fn, 'a') # get depth and dz, and dd (which is either dx or dy) if sdir=='NS': h = ds['h'][jj0:jj1+1,ii0:ii1+1].squeeze() zeta = ds['zeta'][0,jj0:jj1+1,ii0:ii1+1].squeeze() z = zrfun.get_z(h, zeta, S, only_w=True) dz = np.diff(z, axis=0) DZ = dz.mean(axis=2) dd = G['DY'][jj0:jj1+1,ii0:ii1+1].squeeze() DD = dd.mean(axis=1) zeta = zeta.mean(axis=1) if count==0: hh = h.mean(axis=1) foo['h'][:] = hh z0 = zrfun.get_z(hh, 0*hh, S, only_rho=True) foo['z0'][:] = z0 zw0 = zrfun.get_z(hh, 0*hh, S, only_w=True) DZ0 = np.diff(zw0, axis=0) DA0 = DD.reshape((1, NX)) * DZ0 foo['DA0'][:] = DA0 elif sdir=='EW': h = ds['h'][jj0:jj1+1,ii0:ii1+1].squeeze() zeta = ds['zeta'][0,jj0:jj1+1,ii0:ii1+1].squeeze() z = zrfun.get_z(h, zeta, S, only_w=True) dz = np.diff(z, axis=0) DZ = dz.mean(axis=1) dd = G['DX'][jj0:jj1+1,ii0:ii1+1].squeeze() DD = dd.mean(axis=0) zeta = zeta.mean(axis=0) if count==0: hh = h.mean(axis=0) foo['h'][:] = hh z0 = zrfun.get_z(hh, 0*hh, S, only_rho=True) foo['z0'][:] = z0 zw0 = zrfun.get_z(hh, 0*hh, S, only_w=True) DZ0 = np.diff(zw0, axis=0) DA0 = DD.reshape((1, NX)) * DZ0 foo['DA0'][:] = DA0 # and then create the array of cell areas on the section DA = DD.reshape((1, NX)) * DZ # then velocity and hence transport if sdir=='NS': vel = ds['u'][0, :, jj0:jj1+1, ii0].squeeze() elif sdir=='EW': vel = ds['v'][0, :, jj0, ii0:ii1+1].squeeze() q = vel * DA * landward foo['q'][count, :, :] = q foo['zeta'][count, :] = zeta foo['ocean_time'][count] = ds['ocean_time'][0] # save the tracer fields averaged onto this section for vn in vn_list: if sdir=='NS': vvv = (ds[vn][0,:,jj0:jj1+1,ii0].squeeze() + ds[vn][0,:,jj0:jj1+1,ii1].squeeze())/2 elif sdir=='EW': vvv = (ds[vn][0,:,jj0,ii0:ii1+1].squeeze() + ds[vn][0,:,jj1,ii0:ii1+1].squeeze())/2 foo[vn][count,:,:] = vvv foo.close()
fnd = [x for x in fn_list if x[-11:-8]=='dia'] fna = [x for x in fn_list if x[-11:-8]=='avg'] #load in data for each hour and find the average for h in np.arange(0,24): oceanfnd = dir0 + f_list[i] + '/' + fnd[h] oceanfna = dir0 + f_list[i] + '/' + fna[h] dsd = nc.Dataset(oceanfnd) dsa = nc.Dataset(oceanfna) G,S,T = zrfun.get_basic_info(oceanfna) zeta = dsa['zeta'][:] salt = dsa['salt'][0,-1,:,:] zr = zrfun.get_z(H,zeta,S,only_rho=True) D = dict() if h==0: for key in dsd.variables.keys(): if key in DIA_vars.keys(): DIA_vars[key][i,:,:,:] = dsd[key][:] z = zr sl = np.ma.masked_where(maskr==0, salt) else: for key in dsd.variables.keys(): if key in DIA_vars.keys(): DIA_vars[key][i,:,:,:] = DIA_vars[key][i,:,:,:]+dsd[key][:] z = z + zr
if True: # 3D trees for tag in ['w', 'rho', 'u', 'v']: # prepare fields to make the tree tt0 = time() if tag == 'u': hh = (h[:, :-1] + h[:, 1:]) / 2 elif tag == 'v': hh = (h[:-1, :] + h[1:, :]) / 2 elif tag in ['rho', 'w']: hh = h.copy() if tag in ['rho', 'u', 'v']: z = zrfun.get_z(hh, 0 * hh, S, only_rho=True) x = G['lon_' + tag] y = G['lat_' + tag] mask = G['mask_' + tag] elif tag == 'w': z = zrfun.get_z(hh, 0 * hh, S, only_w=True) x = G['lon_rho'] y = G['lat_rho'] mask = G['mask_rho'] N, M, L = z.shape X = np.tile(x.reshape(1, M, L), [N, 1, 1]) Y = np.tile(y.reshape(1, M, L), [N, 1, 1]) H = np.tile(hh.reshape(1, M, L), [N, 1, 1]) Z = z / H # fractional depth (-1 to 0)