예제 #1
0
def get_full(fn):
    # gets v_dict for the full extent of one history file
    ds = nc.Dataset(fn)
    G, S, T = zrfun.get_basic_info(fn)        

    # extract needed info from history file
    v_dict = dict()
    if print_info:
        print('\nINPUT Variable Info:')
    for vn in ['alkalinity', 'TIC', 'salt', 'temp','rho']:
        v = ds[vn][:]
        v = fillit(v)
        v_dict[vn] = v
                
    # create depth, pressure, and in situ temperature
    h = ds['h'][:]
    h = fillit(h)
    lat = G['lat_rho'][:]
    z_rho = zrfun.get_z(h, 0*h, S, only_rho=True)
    depth = -z_rho
    pres = sw.pres(depth, lat)
    v_dict['pres'] = pres
    temp = sw.ptmp(v_dict['salt'], v_dict['temp'], 0, v_dict['pres'])
    v_dict['temp'] = temp
    
    # convert from umol/L to umol/kg using in situ dentity
    v_dict['alkalinity'] = 1000 * v_dict['alkalinity'] / (v_dict['rho'] + 1000)
    v_dict['TIC'] = 1000 * v_dict['TIC'] / (v_dict['rho'] + 1000)
    
    # clean up
    v_dict.pop('rho') # no longer needed, so don't pass to worker
    ds.close()
    
    return v_dict
예제 #2
0
파일: pfun.py 프로젝트: parkermac/LiveOcean
def get_zfull(ds, fn, which_grid):
    # get zfull field on "which_grid" ('rho', 'u', or 'v')
    G, S, T = zrfun.get_basic_info(fn)
    zeta = 0 * ds.variables['zeta'][:].squeeze()
    zr_mid = zrfun.get_z(G['h'], zeta, S, only_rho=True)
    zr_bot = -G['h'].reshape(1, G['M'], G['L']).copy()
    zr_top = zeta.reshape(1, G['M'], G['L']).copy()
    zfull0 = make_full((zr_bot, zr_mid, zr_top))
    if which_grid == 'rho':
        zfull = zfull0
    elif which_grid == 'u':
        zfull = zfull0[:, :, 0:-1] + np.diff(zfull0, axis=2)/2
    elif which_grid == 'v':
        zfull = zfull0[:, 0:-1, :] + np.diff(zfull0, axis=1)/2
    return zfull
예제 #3
0
파일: pfun.py 프로젝트: parkermac/LiveOcean
def add_velocity_vectors(ax, ds, fn, v_scl=3, v_leglen=0.5, nngrid=80, zlev='top', center=(.7,.05)):
    # v_scl: scale velocity vector (smaller to get longer arrows)
    # v_leglen: m/s for velocity vector legend
    xc = center[0]
    yc = center[1]
    # GET DATA
    G = zrfun.get_basic_info(fn, only_G=True)
    if zlev == 'top':
        u = ds['u'][0, -1, :, :].squeeze()
        v = ds['v'][0, -1, :, :].squeeze()
    elif zlev == 'bot':
        u = ds['u'][0, 0, :, :].squeeze()
        v = ds['v'][0, 0, :, :].squeeze()
    else:
        zfull_u = get_zfull(ds, fn, 'u')
        zfull_v = get_zfull(ds, fn, 'v')
        u = get_laym(ds, zfull_u, ds['mask_u'][:], 'u', zlev).squeeze()
        v = get_laym(ds, zfull_v, ds['mask_v'][:], 'v', zlev).squeeze()
    # ADD VELOCITY VECTORS
    # set masked values to 0
    ud = u.data; ud[u.mask]=0
    vd = v.data; vd[v.mask]=0
    # create interpolant
    import scipy.interpolate as intp
    ui = intp.interp2d(G['lon_u'][0, :], G['lat_u'][:, 0], ud)
    vi = intp.interp2d(G['lon_v'][0, :], G['lat_v'][:, 0], vd)
    # create regular grid
    aaa = ax.axis()
    daax = aaa[1] - aaa[0]
    daay = aaa[3] - aaa[2]
    axrat = np.cos(np.deg2rad(aaa[2])) * daax / daay
    x = np.linspace(aaa[0], aaa[1], round(nngrid * axrat))
    y = np.linspace(aaa[2], aaa[3], nngrid)
    xx, yy = np.meshgrid(x, y)
    # interpolate to regular grid
    uu = ui(x, y)
    vv = vi(x, y)
    mask = uu != 0
    # plot velocity vectors
    ax.quiver(xx[mask], yy[mask], uu[mask], vv[mask],
        units='y', scale=v_scl, scale_units='y', color='k')
    ax.quiver([xc, xc] , [yc, yc], [v_leglen, v_leglen],
              [v_leglen, v_leglen],
        units='y', scale=v_scl, scale_units='y', color='k',
        transform=ax.transAxes)
    ax.text(xc+.05, yc, str(v_leglen) + ' $ms^{-1}$',
        horizontalalignment='left', transform=ax.transAxes)
예제 #4
0
파일: pfun.py 프로젝트: parkermac/LiveOcean
def add_info(ax, fn, fs=12, loc='lower_right'):
    # put info on plot
    T = zrfun.get_basic_info(fn, only_T=True)
    dt_local = get_dt_local(T['tm'])
    if loc == 'lower_right':
        ax.text(.95, .075, dt_local.strftime('%Y-%m-%d'),
            horizontalalignment='right' , verticalalignment='bottom',
            transform=ax.transAxes, fontsize=fs)
        ax.text(.95, .065, dt_local.strftime('%H:%M') + ' ' + dt_local.tzname(),
            horizontalalignment='right', verticalalignment='top',
            transform=ax.transAxes, fontsize=fs)
    elif loc == 'upper_right':
        ax.text(.95, .935, dt_local.strftime('%Y-%m-%d'),
            horizontalalignment='right' , verticalalignment='bottom',
            transform=ax.transAxes, fontsize=fs)
        ax.text(.95, .925, dt_local.strftime('%H:%M') + ' ' + dt_local.tzname(),
            horizontalalignment='right', verticalalignment='top',
            transform=ax.transAxes, fontsize=fs)
    ax.text(.06, .04, fn.split('/')[-3],
        verticalalignment='bottom', transform=ax.transAxes,
        rotation='vertical', fontsize=fs)
예제 #5
0
파일: pfun.py 프로젝트: parkermac/LiveOcean
def add_velocity_streams(ax, ds, fn, nngrid=80, zlev=0):
    # slower than adding quivers, but informative in a different way
    # GET DATA
    G = zrfun.get_basic_info(fn, only_G=True)
    if zlev == 0:
        u = ds['u'][0, -1, :, :].squeeze()
        v = ds['v'][0, -1, :, :].squeeze()
    else:
        zfull_u = get_zfull(ds, fn, 'u')
        zfull_v = get_zfull(ds, fn, 'v')
        u = get_laym(ds, zfull_u, ds['mask_u'][:], 'u', zlev).squeeze()
        v = get_laym(ds, zfull_v, ds['mask_v'][:], 'v', zlev).squeeze()
    # ADD VELOCITY STREAMS
    # set masked values to 0
    ud = u.data; ud[u.mask]=0
    vd = v.data; vd[v.mask]=0
    # create interpolant
    import scipy.interpolate as intp
    ui = intp.interp2d(G['lon_u'][0, :], G['lat_u'][:, 0], ud)
    vi = intp.interp2d(G['lon_v'][0, :], G['lat_v'][:, 0], vd)
    # create regular grid
    aaa = ax.axis()
    daax = aaa[1] - aaa[0]
    daay = aaa[3] - aaa[2]
    axrat = np.cos(np.deg2rad(aaa[2])) * daax / daay
    x = np.linspace(aaa[0], aaa[1], round(nngrid * axrat))
    y = np.linspace(aaa[2], aaa[3], nngrid)
    xx, yy = np.meshgrid(x, y)
    # interpolate to regular grid
    uu = ui(x, y)
    vv = vi(x, y)
    mask = uu != 0
    # plot velocity streams
    spd = np.sqrt(uu**2 + vv**2)
    ax.streamplot(x, y, uu, vv, density = 6,
    color='k', linewidth=spd*3, arrowstyle='-')
예제 #6
0
import tef_fun
from importlib import reload
reload(tef_fun)

import flux_fun
reload(flux_fun)

# select output location
outdir0 = Ldir['LOo'] + 'tef/'
Lfun.make_dir(outdir0)
outdir = outdir0 + 'volumes_' + Ldir['gridname'] + '/'
Lfun.make_dir(outdir)

fng = Ldir['grid'] + 'grid.nc'
G = zrfun.get_basic_info(fng, only_G=True)
h = np.ma.masked_where(G['mask_rho'] == False, G['h'])
x = G['lon_rho'].data
y = G['lat_rho'].data
xp = G['lon_psi'].data
yp = G['lat_psi'].data
m = G['mask_rho']

DA = G['DX'] * G['DY']

# get the DataFrame of all sections
sect_df = tef_fun.get_sect_df()

testing = False

# segment definitions, assembled by looking at the figure
예제 #7
0
opth = os.path.abspath('../ocn1/')
if opth not in sys.path:
    sys.path.append(opth)
import Ofun_nc
import Ofun
from importlib import reload
reload(Ofun_nc)
reload(Ofun)

start_time = datetime.now()
in_dir = Ldir['roms'] + 'output/cas3_v0_lo6m/f' + Ldir['date_string']
nc_dir = Ldir['LOogf_f']

# get grid and S info
G = zrfun.get_basic_info(Ldir['grid'] + 'grid.nc', only_G=True)
S_info_dict = Lfun.csv_to_dict(Ldir['grid'] + 'S_COORDINATE_INFO.csv')
S = zrfun.get_S(S_info_dict)
# get list of files to work on
h_list_full = os.listdir(in_dir)
h_list = [item for item in h_list_full if 'ocean_his' in item]
h_list.sort()

# debugging
testing = False
if testing:
    h_list = h_list[:2]

# get list of times
t_list = []
for h in h_list:
예제 #8
0
import numpy as np
import shutil

# generate a list of all the files
in_dir = (Ldir['roms'] + 'output/' + Ldir['gtagex'] + '/f' +
          Ldir['date_string'] + '/')
fn_list_raw = os.listdir(in_dir)
fn_list = [(in_dir + ff) for ff in fn_list_raw if 'ocean_his' in ff]
fn_list.sort()
if len(fn_list) != 73:
    print('Warning: We have %d history files' % (len(fn_list)))

if testing == True:
    fn_list = fn_list[:7]

G = zrfun.get_basic_info(fn_list[0], only_G=True)

lon_vec = G['lon_rho'][0, :]
lat_vec = G['lat_rho'][:, 0]

# test values for cascadia1
# XBS = [55, 80]  # x-limits
# YBS = [295, 325]  # y-limits

# Bounds of subdomain (rho grid) from Susan Allen 2018_11
lon0 = -125.016452048434
lon1 = -124.494612925929
lat0 = 48.3169463809796
lat1 = 48.7515055163539

# find indices containing these bonds
예제 #9
0
import numpy as np
import netCDF4 as nc4
from scipy.spatial import cKDTree
import pickle
from time import time

# Shared Constants
#
# criterion for deciding if particles are on land
maskr_crit = 0.5  # (maskr = 1 in water, 0 on land) [0.5 seems good]

# NEW CODE for nearest neighbor interpolation
Ldir = Lfun.Lstart()
EI = Lfun.csv_to_dict(Ldir['LOo'] + 'tracks2/exp_info.csv')
G, S, T = zrfun.get_basic_info(EI['fn00'])
Maskr = G['mask_rho']  # True over water
Masku = G['mask_u']  # True over water
Maskv = G['mask_v']  # True over water
Maskr3 = np.tile(G['mask_rho'].reshape(1, G['M'], G['L']), [S['N'], 1, 1])
Masku3 = np.tile(G['mask_u'].reshape(1, G['M'], G['L'] - 1), [S['N'], 1, 1])
Maskv3 = np.tile(G['mask_v'].reshape(1, G['M'] - 1, G['L']), [S['N'], 1, 1])
Maskw3 = np.tile(G['mask_rho'].reshape(1, G['M'], G['L']), [S['N'] + 1, 1, 1])
# load pre-made trees
tree_dir = Ldir['LOo'] + 'tracker_trees/' + EI['gridname'] + '/'
xyT_rho = pickle.load(open(tree_dir + 'xyT_rho.p', 'rb'))
xyT_u = pickle.load(open(tree_dir + 'xyT_u.p', 'rb'))
xyT_v = pickle.load(open(tree_dir + 'xyT_v.p', 'rb'))
xyT_rho_un = pickle.load(open(tree_dir + 'xyT_rho_un.p', 'rb'))

xyzT_rho = pickle.load(open(tree_dir + 'xyzT_rho.p', 'rb'))
예제 #10
0
파일: svar_1.py 프로젝트: eabrasse/etools
    print(str(tt))

    # get volume-integrated quantities from the history files
    h_list = os.listdir(dir0 + f_dir)
    h_list.sort()
    h_list = [x for x in h_list if x[:9] == 'ocean_his']
    if tt == 0:
        pass
    else:
        h_list = h_list[1:]  # drop the zero hour for all but the first day
    #
    for hi in h_list:
        fn = dir0 + f_dir + '/' + hi
        ds = nc.Dataset(fn)
        if tt == 0:
            G, S, T = zrfun.get_basic_info(fn)
            lon_vec = G['lon_rho'][0, :].squeeze()
            lat_vec = G['lat_rho'][:, 0].squeeze()
            ii0, ii1, ifr = zfun.get_interpolant(np.array([0.02, 1.5]),
                                                 lon_vec)
            jj0, jj1, jfr = zfun.get_interpolant(np.array([44.8, 45.2]),
                                                 lat_vec)
            i0 = ii0[0]
            i1 = ii1[1]
            j0 = jj0[0]
            j1 = jj1[1]
            h = G['h'][j0:j1, i0:i1]
            dx = G['DX'][j0:j1, i0:i1]
            dy = G['DY'][j0:j1, i0:i1]
            da = dx * dy
            ny, nx = da.shape
예제 #11
0
def get_layer(fn, NZ=-1, aa=[], print_info=False):
    # function to extract and process fields from a history file
    # returning a dict of arrays that can be passed to CO2SYS.m
    
    # default is to get full surface field
    
    ds = nc.Dataset(fn)
    G, S, T = zrfun.get_basic_info(fn)
    if len(aa) == 4:
        # find indices that encompass region aa
        i0 = zfun.find_nearest_ind(G['lon_rho'][0,:], aa[0]) - 1
        i1 = zfun.find_nearest_ind(G['lon_rho'][0,:], aa[1]) + 2
        j0 = zfun.find_nearest_ind(G['lat_rho'][:,0], aa[2]) - 1
        j1 = zfun.find_nearest_ind(G['lat_rho'][:,0], aa[3]) + 2
    else:
        # full region
        i0 = 0; j0 = 0
        j1, i1 = G['lon_rho'].shape
        i1 += 1; j1 += 1
        
    plon = G['lon_psi'][j0:j1-1, i0:i1-1]
    plat = G['lat_psi'][j0:j1-1, i0:i1-1]

    # extract needed info from history file
    v_dict = dict()
    if print_info:
        print('\nINPUT Variable Info:')
    for vn in ['alkalinity', 'TIC', 'salt', 'temp','rho']:
        v = ds[vn][0,NZ, j0:j1, i0:i1]
        v = fillit(v)
        v_dict[vn] = v
        if print_info:
            name = ds[vn].long_name
            try:
                units = ds[vn].units
            except AttributeError:
                units = ''
            vmax = np.nanmax(v)
            vmin = np.nanmin(v)
            v_dict[vn] = v
            print('%25s (%25s) max = %6.1f min = %6.1f' %
                (name, units, vmax, vmin))
                
    # create depth, pressure, and in situ temperature
    h = ds['h'][j0:j1, i0:i1]
    h = fillit(h)
    lat = G['lat_rho'][j0:j1, i0:i1]
    z_rho = zrfun.get_z(h, 0*h, S, only_rho=True)
    depth = -z_rho[NZ, :, :].squeeze()
    pres = sw.pres(depth, lat)
    v_dict['pres'] = pres
    # assume potential temperature is close enough
    # temp = sw.ptmp(v_dict['salt'], v_dict['temp'], 0, v_dict['pres'])
    # v_dict['temp'] = temp
    
    # convert from umol/L to umol/kg using in situ dentity
    v_dict['alkalinity'] = 1000 * v_dict['alkalinity'] / (v_dict['rho'] + 1000)
    v_dict['TIC'] = 1000 * v_dict['TIC'] / (v_dict['rho'] + 1000)
    
    # clean up
    v_dict.pop('rho') # no longer needed, so don't pass to worker
    ds.close()
    
    return v_dict, plon, plat
예제 #12
0
def add_velocity_vectors(ax,
                         ds,
                         fn,
                         v_scl=3,
                         v_leglen=0.5,
                         nngrid=80,
                         zlev='top',
                         center=(.8, .05)):
    # v_scl: scale velocity vector (smaller to get longer arrows)
    # v_leglen: m/s for velocity vector legend
    xc = center[0]
    yc = center[1]
    # GET DATA
    G = zrfun.get_basic_info(fn, only_G=True)
    if zlev == 'top':
        u = ds['u'][0, -1, :, :].squeeze()
        v = ds['v'][0, -1, :, :].squeeze()
    elif zlev == 'bot':
        u = ds['u'][0, 0, :, :].squeeze()
        v = ds['v'][0, 0, :, :].squeeze()
    else:
        zfull_u = get_zfull(ds, fn, 'u')
        zfull_v = get_zfull(ds, fn, 'v')
        u = get_laym(ds, zfull_u, ds['mask_u'][:], 'u', zlev).squeeze()
        v = get_laym(ds, zfull_v, ds['mask_v'][:], 'v', zlev).squeeze()
    # ADD VELOCITY VECTORS
    # set masked values to 0
    ud = u.data
    ud[u.mask] = 0
    vd = v.data
    vd[v.mask] = 0
    # create interpolant
    import scipy.interpolate as intp
    ui = intp.interp2d(G['lon_u'][0, :], G['lat_u'][:, 0], ud)
    vi = intp.interp2d(G['lon_v'][0, :], G['lat_v'][:, 0], vd)
    # create regular grid
    aaa = ax.axis()
    daax = aaa[1] - aaa[0]
    daay = aaa[3] - aaa[2]
    axrat = np.cos(np.deg2rad(aaa[2])) * daax / daay
    x = np.linspace(aaa[0], aaa[1], int(round(nngrid * axrat)))
    y = np.linspace(aaa[2], aaa[3], int(nngrid))
    xx, yy = np.meshgrid(x, y)
    # interpolate to regular grid
    uu = ui(x, y)
    vv = vi(x, y)
    mask = uu != 0
    # plot velocity vectors
    ax.quiver(xx[mask],
              yy[mask],
              uu[mask],
              vv[mask],
              units='y',
              scale=v_scl,
              scale_units='y',
              color='k')
    ax.quiver([xc, xc], [yc, yc], [v_leglen, v_leglen], [v_leglen, v_leglen],
              units='y',
              scale=v_scl,
              scale_units='y',
              color='k',
              transform=ax.transAxes)
    ax.text(xc + .05,
            yc,
            str(v_leglen) + ' m/s',
            horizontalalignment='left',
            transform=ax.transAxes)
예제 #13
0
import numpy as np
import shutil

# generate a list of all the files
in_dir = (Ldir['roms'] + 'output/' + Ldir['gtagex'] +
    '/f' + Ldir['date_string'] + '/')
fn_list_raw = os.listdir(in_dir)
fn_list = [(in_dir + ff) for ff in fn_list_raw if 'ocean_his' in ff]
fn_list.sort()
if len(fn_list) != 73:
    print('Warning: We have %d history files' % (len(fn_list)))
    
if testing == True:
    fn_list = fn_list[:7]

G = zrfun.get_basic_info(fn_list[0], only_G=True)

lon_vec = G['lon_rho'][0,:]
lat_vec = G['lat_rho'][:,0]

# test values for cascadia1
# XBS = [55, 80]  # x-limits
# YBS = [295, 325]  # y-limits

# Bounds of subdomain (rho grid) from Susan Allen 2018_11
lon0 = -125.016452048434
lon1 = -124.494612925929
lat0 = 48.3169463809796
lat1 = 48.7515055163539

# find indices containing these bonds
예제 #14
0
for t in range(ndays):

    fn = f_list[t] + '/ocean_his_0001.nc'

    oceanfn = dir0 + fn

    dso = nc4.Dataset(oceanfn)

    if t == 0:
        #get grid info from ocean
        lonr = dso['lon_rho'][:]
        latr = dso['lat_rho'][:]
        h = dso['h'][:]
        zeta = dso['zeta'][:]
        maskr = dso['mask_rho'][:]
        S = zrfun.get_basic_info(oceanfn, only_S=True)
        zr = zrfun.get_z(h, zeta, S, only_rho=True)
        NZ, NY, NX = np.shape(zr)
        midlat = zfun.find_nearest_ind(latr[:, 0], 45.0)

    bgsalt = dso['salt'][:]
    surfsalt = bgsalt[0, -1, :, :].squeeze()
    crosssalt = bgsalt[0, :, midlat, :].squeeze()
    nt = t * 24

    lon1 = lon0[nt, tma]
    lat1 = lat0[nt, tma]
    z1 = z0[nt, tma]

    fig = plt.figure(figsize=(12, 4))
예제 #15
0
    #%% extrapolate
    lon, lat, z, L, M, N, X, Y = Ofun.get_coords(fh_dir)
    a = os.listdir(fh_dir)
    a.sort()
    aa = [item for item in a if item[:2]=='fh']
    for fn in aa:
        print('-Extrapolating ' + fn)
        in_fn = fh_dir + fn
        V = Ofun.get_extrapolated(in_fn, L, M, N, X, Y, lon, lat, z, Ldir,
            add_CTD=add_CTD)
        pickle.dump(V, open(fh_dir + 'x' + fn, 'wb'))

    # and interpolate to ROMS format
    # get grid and S info
    G = zrfun.get_basic_info(Ldir['grid'] + 'grid.nc', only_G=True)
    S_info_dict = Lfun.csv_to_dict(Ldir['grid'] + 'S_COORDINATE_INFO.csv')
    S = zrfun.get_S(S_info_dict)
    # get list of files to work on
    a = os.listdir(fh_dir)
    a.sort()
    aa = [item for item in a if item[:3]=='xfh']
    # HyCOM grid info
    lon, lat, z, L, M, N, X, Y = Ofun.get_coords(fh_dir)
    # load a dict of hycom fields
    dt_list = []
    count = 0
    c_dict = dict()

    zinds = Ofun.get_zinds(G['h'], S, z)
    for fn in aa:
예제 #16
0
#next, try to calculate it using u_accel = <delta*du/dt> / <delta> + <u*ddelta/dt> / <delta>
# need u, delta, udelta and dt, du, ddelta

dt = ds_his1['ocean_time'][:] - ds_his0['ocean_time'][:]  #dt
du = ds_his1['u'][0, :] - ds_his0['u'][0, :]  #du

NZ, NY, NX = du.shape

#delta is a small pain to calculate :/
on000 = 1 / ds_his0['pn'][:]
on00 = 0.5 * (on000[:, :-1] + on000[:, 1:])
on0 = np.tile(on00, (NZ, 1, 1))
zeta0 = ds_his0['zeta'][:]
h = ds_his0['h'][:]
S0 = zrfun.get_basic_info(fn_his0, only_S=True)
zw0 = zrfun.get_z(h, zeta0, S0, only_w=True)
Hz00 = zw0[1:, :, :] - zw0[:-1, :, :]
Hz0 = 0.5 * (Hz00[:, :, :-1] + Hz00[:, :, 1:])
delta0 = Hz0 * on0  #delta0

on100 = 1 / ds_his1['pn'][:]
on10 = 0.5 * (on100[:, :-1] + on100[:, 1:])
on1 = np.tile(on10, (NZ, 1, 1))
zeta1 = ds_his1['zeta'][:]
S1 = zrfun.get_basic_info(fn_his1, only_S=True)
zw1 = zrfun.get_z(h, zeta1, S1, only_w=True)
Hz10 = zw1[1:, :, :] - zw1[:-1, :, :]
Hz1 = 0.5 * (Hz10[:, :, :-1] + Hz10[:, :, 1:])
delta1 = Hz1 * on1  #delta1
예제 #17
0
def get_cast(gridname, tag, ex_name, date_string, station, lon_str, lat_str):

    # get the dict Ldir
    Ldir = Lfun.Lstart(gridname, tag)
    Ldir['gtagex'] = Ldir['gtag'] + '_' + ex_name
    Ldir['date_string'] = date_string
    Ldir['station'] = station
    Ldir['lon_str'] = lon_str
    Ldir['lat_str'] = lat_str

    # make sure the output directory exists
    outdir0 = Ldir['LOo'] + 'cast/'
    Lfun.make_dir(outdir0)
    outdir = outdir0 + Ldir['gtagex'] + '/'
    Lfun.make_dir(outdir)

    dt = Ldir['date_string']

    #%% function definitions

    def get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy):
        dims = ds.variables[vv].dimensions
        if 'eta_rho' in dims:
            grd = 'rho'
        elif 'eta_u' in dims:
            grd = 'u'
        elif 'eta_v' in dims:
            grd = 'v'
        else:
            print('grid error!')
        xi0 = Xi0[grd]
        yi0 = Yi0[grd]
        xi1 = Xi1[grd]
        yi1 = Yi1[grd]
        aix = Aix[grd]
        aiy = Aiy[grd]

        xi01 = np.array([xi0, xi1]).flatten()
        yi01 = np.array([yi0, yi1]).flatten()
        return xi01, yi01, aix, aiy

    #%% set up for the extraction

    # target position
    Lon = np.array(float(Ldir['lon_str']))
    Lat = np.array(float(Ldir['lat_str']))

    # get grid info
    indir = Ldir['roms'] + 'output/' + Ldir['gtagex'] + '/f' + dt + '/'
    fn = indir + 'ocean_his_0021.nc'  # approx. noon local standard time

    if os.path.isfile(fn):
        pass
    else:
        print('Not found: ' + fn)
        return

    G = zrfun.get_basic_info(fn, only_G=True)
    S = zrfun.get_basic_info(fn, only_S=True)

    lon = G['lon_rho']
    lat = G['lat_rho']
    mask = G['mask_rho']
    xvec = lon[0, :].flatten()
    yvec = lat[:, 0].flatten()

    i0, i1, frx = zfun.get_interpolant(np.array([float(Ldir['lon_str'])]),
                                       xvec)
    j0, j1, fry = zfun.get_interpolant(np.array([float(Ldir['lat_str'])]),
                                       yvec)
    i0 = int(i0)
    j0 = int(j0)
    # find indices of nearest good point
    if mask[j0, i0] == 1:
        print('- ' + station + ': point OK')
    elif mask[j0, i0] == 0:
        print('- ' + station + ':point masked')
        i0, j0 = get_ij_good(lon, lat, xvec, yvec, i0, j0, mask)
        new_lon = xvec[i0]
        new_lat = yvec[j0]
        Lon = np.array(new_lon)
        Lat = np.array(new_lat)

    # get interpolants for this point
    Xi0 = dict()
    Yi0 = dict()
    Xi1 = dict()
    Yi1 = dict()
    Aix = dict()
    Aiy = dict()
    for grd in ['rho', 'u', 'v']:
        xx = G['lon_' + grd][1, :]
        yy = G['lat_' + grd][:, 1]
        xi0, xi1, xfr = zfun.get_interpolant(Lon, xx, extrap_nan=True)
        yi0, yi1, yfr = zfun.get_interpolant(Lat, yy, extrap_nan=True)
        Xi0[grd] = xi0
        Yi0[grd] = yi0
        Xi1[grd] = xi1
        Yi1[grd] = yi1
        # create little arrays that are used in the actual interpolation
        Aix[grd] = np.array([1 - xfr, xfr]).reshape((1, 1, 2))
        Aiy[grd] = np.array([1 - yfr, yfr]).reshape((1, 2))

    # generating some lists
    v0_list = ['h', 'lon_rho', 'lat_rho', 'lon_u', 'lat_u', 'lon_v', 'lat_v']
    v1_list = ['ocean_time']
    v2_list = []
    v3_list_rho = []
    v3_list_w = []
    ds = nc.Dataset(fn)
    for vv in ds.variables:
        vdim = ds.variables[vv].dimensions
        if (('ocean_time' in vdim) and ('s_rho' not in vdim)
                and ('s_w' not in vdim) and (vv != 'ocean_time')):
            v2_list.append(vv)
        elif (('ocean_time' in vdim) and ('s_rho' in vdim)):
            v3_list_rho.append(vv)
        elif (('ocean_time' in vdim) and ('s_w' in vdim)):
            v3_list_w.append(vv)

    V = dict()
    V_long_name = dict()
    V_units = dict()
    v_all_list = v0_list + v1_list + v2_list + v3_list_rho + v3_list_w
    for vv in v_all_list:
        V[vv] = np.array([])
        try:
            V_long_name[vv] = ds.variables[vv].long_name
        except:
            V_long_name[vv] = ''
        try:
            V_units[vv] = ds.variables[vv].units
        except:
            V_units[vv] = ''

    # get static variables
    for vv in v0_list:
        xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy)
        vvtemp = ds.variables[vv][yi01, xi01].squeeze()
        V[vv] = (aiy * ((aix * vvtemp).sum(-1))).sum(-1)

    ds.close()

    #%% extract time-dependent fields

    print('-- Working on date: ' + dt)
    sys.stdout.flush()

    ds = nc.Dataset(fn)
    for vv in v1_list:
        vtemp = ds.variables[vv][:].squeeze()
        V[vv] = np.append(V[vv], vtemp)
    for vv in v2_list:
        xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy)
        vvtemp = ds.variables[vv][:, yi01, xi01].squeeze()
        vtemp = (aiy * ((aix * vvtemp).sum(-1))).sum(-1)
        V[vv] = np.append(V[vv], vtemp)
    for vv in v3_list_rho:
        xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy)
        vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze()
        vtemp = (aiy * ((aix * vvtemp).sum(-1))).sum(-1)
        V[vv] = vtemp.reshape((S['N'], 1))
    for vv in v3_list_w:
        xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy)
        vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze()
        vtemp = (aiy * ((aix * vvtemp).sum(-1))).sum(-1)
        V[vv] = vtemp.reshape((S['N'] + 1, 1))

    ds.close()

    # create z_rho and z_w (has to be done after we have V['zeta'])
    hh = V['h'][:] * np.ones_like(V['zeta'])
    z_rho, z_w = zrfun.get_z(hh, V['zeta'][:], S)
    V['hh'] = hh
    V_long_name['hh'] = 'bottom depth (positive down) as a vector'
    V_units['hh'] = 'm'
    V['z_rho'] = z_rho
    V_long_name['z_rho'] = 'z on rho points (positive up)'
    V_units['z_rho'] = 'm'
    V['z_w'] = z_w
    V_long_name['z_w'] = 'z on w points (positive up)'
    V_units['z_w'] = 'm'
    v2_list.append('hh')
    v3_list_rho.append('z_rho')
    v3_list_w.append('z_w')

    #%% save the output to NetCDF
    out_fn = (outdir + Ldir['station'] + '_' + Ldir['date_string'] + '.nc')
    # get rid of the old version, if it exists
    try:
        os.remove(out_fn)
    except OSError:
        pass  # assume error was because the file did not exist
    foo = nc.Dataset(out_fn, 'w')

    N = S['N']
    NT = len(V['ocean_time'][:])

    foo.createDimension('scalar', 1)
    foo.createDimension('s_rho', N)
    foo.createDimension('s_w', N + 1)
    foo.createDimension('ocean_time', NT)

    for vv in v0_list:
        v_var = foo.createVariable(vv, float, ('scalar'))
        v_var[:] = V[vv][:]
        v_var.long_name = V_long_name[vv]
        v_var.units = V_units[vv]
    for vv in v1_list:
        v_var = foo.createVariable(vv, float, ('ocean_time', ))
        v_var[:] = V[vv][:]
        v_var.long_name = V_long_name[vv]
        v_var.units = V_units[vv]
    for vv in v2_list:
        v_var = foo.createVariable(vv, float, ('ocean_time', ))
        v_var[:] = V[vv][:]
        v_var.long_name = V_long_name[vv]
        v_var.units = V_units[vv]
    for vv in v3_list_rho:
        v_var = foo.createVariable(vv, float, ('s_rho', 'ocean_time'))
        v_var[:] = V[vv][:]
        v_var.long_name = V_long_name[vv]
        v_var.units = V_units[vv]
    for vv in v3_list_w:
        v_var = foo.createVariable(vv, float, ('s_w', 'ocean_time'))
        v_var[:] = V[vv][:]
        v_var.long_name = V_long_name[vv]
        v_var.units = V_units[vv]

    foo.close()
예제 #18
0
fnd = [x for x in fn_list if x[-11:-8] == 'dia']
fna = [x for x in fn_list if x[-11:-8] == 'avg']

oceanfnd = dir0 + f_list[0] + '/' + fnd[0]
oceanfna = dir0 + f_list[0] + '/' + fna[0]

dsd = nc.Dataset(oceanfnd)
dsa = nc.Dataset(oceanfna)

#load in remaining variable
u_prsgrd = dsd['u_prsgrd'][0, 0, :, :]
v = dsa['v'][0, 0, :, :]
salt = dsa['salt'][0, -1, :, :]

#get grid info from ocean
G, S, T = zrfun.get_basic_info(oceanfnd)
lonu = G['lon_u'][:]
lonv = G['lon_v'][:]
lonr = G['lon_rho'][:]
latu = G['lat_u'][:]
latv = G['lat_v'][:]
latr = G['lat_rho'][:]
masku = G['mask_u'][:]
maskr = G['mask_rho'][:]
maskv = G['mask_v'][:]

a_ll = [-1.0, 0.02, latr.min(), latr.max()]

bb = efun.box_inds(-0.65, -0.1, 44.2, 45.8, oceanfna)
bu = np.array([[bb['lonu0'], bb['lonu1']], [bb['latu0'], bb['latu1']]])
bv = np.array([[bb['lonv0'], bb['lonv1']], [bb['latv0'], bb['latv1']]])
예제 #19
0
def get_tracks(fn_list, plon0, plat0, pcs0, TR, trim_loc=False):
    """
    This is the main function doing the particle tracking.
    """
    # unpack items needed from TR
    surface = not TR['3d']
    turb = TR['turb']
    ndiv = TR['ndiv']
    windage = TR['windage']

    # get basic info
    G = zrfun.get_basic_info(fn_list[0], only_G=True)
    maskr = np.ones_like(G['mask_rho'])  # G['mask_rho'] = True in water
    maskr[G['mask_rho'] == False] = 0  # maskr = 1 in water
    maskr = maskr.flatten()
    S = zrfun.get_basic_info(fn_list[0], only_S=True)

    # minimum grid sizes
    dxg = np.diff(G['lon_rho'][0, :]).min()
    dyg = np.diff(G['lat_rho'][:, 0]).min()

    # get time vector of history files
    NT = len(fn_list)
    NTS = TR['sph'] * (NT - 1) + 1
    rot = np.nan * np.ones(NT)
    counter = 0
    for fn in fn_list:
        ds = nc4.Dataset(fn)
        rot[counter] = ds.variables['ocean_time'][:].squeeze()
        counter += 1
        ds.close
    delta_t_his = rot[1] - rot[0]  # seconds between saves
    delt = delta_t_his / ndiv  # time step in seconds
    delt_save = delta_t_his / TR['sph']
    rot_save = np.linspace(rot[0], rot[-1], NTS)

    # these lists are used internally to get other variables as needed
    # (they must all be present in the history files)
    vn_list_vel = ['u', 'v', 'w']
    vn_list_zh = ['zeta', 'h']
    vn_list_wind = ['Uwind', 'Vwind']
    vn_list_other = ['salt', 'temp'] + vn_list_zh + vn_list_vel
    if windage > 0:
        vn_list_other = vn_list_other + vn_list_wind
    # plist_main is what ends up written to output
    plist_main = ['lon', 'lat', 'cs', 'ot', 'z'] + vn_list_other

    # Step through times.
    #
    counter_his = 0
    for counter_his in range(len(fn_list) - 1):

        it0 = TR['sph'] * counter_his

        # get Datasets
        fn0 = fn_list[counter_his]
        fn1 = fn_list[counter_his + 1]
        ds0 = nc4.Dataset(fn0, mode='r')
        ds1 = nc4.Dataset(fn1, mode='r')

        # prepare the fields for nearest neighbor interpolation
        tt0 = time()
        if counter_his == 0:
            h = G['h']
            hf = h[Maskr].data
            if surface == True:
                u0 = ds0['u'][0, -1, :, :]
                u1 = ds1['u'][0, -1, :, :]
                uf0 = u0[Masku].data
                uf1 = u1[Masku].data
                v0 = ds0['v'][0, -1, :, :]
                v1 = ds1['v'][0, -1, :, :]
                vf0 = v0[Maskv].data
                vf1 = v1[Maskv].data
                wf0 = 0
                wf1 = 0
                s0 = ds0['salt'][0, -1, :, :]
                s1 = ds1['salt'][0, -1, :, :]
                sf0 = s0[Maskr].data
                sf1 = s1[Maskr].data
                t0 = ds0['temp'][0, -1, :, :]
                t1 = ds1['temp'][0, -1, :, :]
                tf0 = t0[Maskr].data
                tf1 = t1[Maskr].data
                if windage > 0:
                    Uwind0 = ds0['Uwind'][0, :, :]
                    Uwind1 = ds1['Uwind'][0, :, :]
                    Uwindf0 = Uwind0[Maskr].data
                    Uwindf1 = Uwind1[Maskr].data
                    Vwind0 = ds0['Vwind'][0, :, :]
                    Vwind1 = ds1['Vwind'][0, :, :]
                    Vwindf0 = Vwind0[Maskr].data
                    Vwindf1 = Vwind1[Maskr].data
            else:
                u0 = ds0['u'][0, :, :, :]
                u1 = ds1['u'][0, :, :, :]
                uf0 = u0[Masku3].data
                uf1 = u1[Masku3].data
                v0 = ds0['v'][0, :, :, :]
                v1 = ds1['v'][0, :, :, :]
                vf0 = v0[Maskv3].data
                vf1 = v1[Maskv3].data
                w0 = ds0['w'][0, :, :, :]
                w1 = ds1['w'][0, :, :, :]
                wf0 = w0[Maskw3].data
                wf1 = w1[Maskw3].data
                s0 = ds0['salt'][0, :, :, :]
                s1 = ds1['salt'][0, :, :, :]
                sf0 = s0[Maskr3].data
                sf1 = s1[Maskr3].data
                t0 = ds0['temp'][0, :, :, :]
                t1 = ds1['temp'][0, :, :, :]
                tf0 = t0[Maskr3].data
                tf1 = t1[Maskr3].data
                if turb == True:
                    AKs0 = ds0['AKs'][0, :, :, :]
                    AKs1 = ds1['AKs'][0, :, :, :]
                    AKsf0 = AKs0[Maskw3].data
                    AKsf1 = AKs1[Maskw3].data
            z0 = ds0['zeta'][0, :, :]
            z1 = ds1['zeta'][0, :, :]
            zf0 = z0[Maskr].data
            zf1 = z1[Maskr].data
            print('Prepare fields for tree %0.4f sec' % (time() - tt0))
        else:
            # subsequent time steps
            if surface == True:
                u1 = ds1['u'][0, -1, :, :]
                uf0 = uf1.copy()
                uf1 = u1[Masku].data
                v1 = ds1['v'][0, -1, :, :]
                vf0 = vf1.copy()
                vf1 = v1[Maskv].data
                wf0 = 0
                wf1 = 0
                s1 = ds1['salt'][0, -1, :, :]
                sf0 = sf1.copy()
                sf1 = s1[Maskr].data
                t1 = ds1['temp'][0, -1, :, :]
                tf0 = tf1.copy()
                tf1 = t1[Maskr].data
                if windage > 0:
                    Uwind1 = ds1['Uwind'][0, :, :]
                    Uwindf0 = Uwindf1
                    Uwindf1 = Uwind1[Maskr].data
                    Vwind1 = ds1['Vwind'][0, :, :]
                    Vwindf0 = Vwindf1
                    Vwindf1 = Vwind1[Maskr].data
            else:
                u1 = ds1['u'][0, :, :, :]
                uf0 = uf1.copy()
                uf1 = u1[Masku3].data
                v1 = ds1['v'][0, :, :, :]
                vf0 = vf1.copy()
                vf1 = v1[Maskv3].data
                w1 = ds1['w'][0, :, :, :]
                wf0 = wf1.copy()
                wf1 = w1[Maskw3].data
                s1 = ds1['salt'][0, :, :, :]
                sf0 = sf1.copy()
                sf1 = s1[Maskr3].data
                t1 = ds1['temp'][0, :, :, :]
                tf0 = tf1.copy()
                tf1 = t1[Maskr3].data
                if turb == True:
                    AKs1 = ds1['AKs'][0, :, :, :]
                    AKsf0 = AKsf1
                    AKsf1 = AKs1[Maskw3].data
            z1 = ds1['zeta'][0, :, :]
            zf0 = zf1.copy()
            zf1 = z1[Maskr].data
            #print('Prepare subsequent fields for tree %0.4f sec' % (time()-tt0))
        ds0.close()
        ds1.close()

        if counter_his == 0:
            if trim_loc == True:
                # remove points on land
                xy = np.array((plon0, plat0)).T
                #print(' ++ making pmask')
                #print(maskr.shape)
                pmask = maskr[xyT_rho_un.query(xy, n_jobs=-1)[1]]
                #print(pmask)
                # keep only points with pmask >= maskr_crit
                pcond = pmask >= maskr_crit
                #print(pcond.sum())
                plon = plon0[pcond]
                plat = plat0[pcond]
                pcs = pcs0[pcond]
            else:
                plon = plon0.copy()
                plat = plat0.copy()
                pcs = pcs0.copy()
            # create result arrays
            NP = len(plon)
            P = dict()
            for vn in plist_main:
                # NOTE: output is packed in a dict P of arrays ordered as [time, particle]
                P[vn] = np.nan * np.ones((NTS, NP))

            # write initial positions to the results arrays
            P['lon'][it0, :] = plon
            P['lat'][it0, :] = plat
            if surface == True:
                pcs[:] = S['Cs_r'][-1]
            P['cs'][it0, :] = pcs
            P['salt'][it0, :] = get_VR_new(sf0, sf1, plon, plat, pcs, 0,
                                           surface)
            P['temp'][it0, :] = get_VR_new(tf0, tf1, plon, plat, pcs, 0,
                                           surface)
            V = get_vel_new(uf0, uf1, vf0, vf1, wf0, wf1, plon, plat, pcs, 0,
                            surface)
            ZH = get_zh_new(zf0, zf1, hf, plon, plat, 0)
            P['u'][it0, :] = V[:, 0]
            P['v'][it0, :] = V[:, 1]
            P['w'][it0, :] = V[:, 2]
            P['zeta'][it0, :] = ZH[:, 0]
            P['h'][it0, :] = ZH[:, 1]
            P['z'][it0, :] = pcs * ZH.sum(axis=1) + ZH[:, 0]

        # do the particle tracking for a single pair of history files in ndiv steps
        it1 = it0
        for nd in range(ndiv):
            fr0 = nd / ndiv
            fr1 = (nd + 1) / ndiv
            frmid = (fr0 + fr1) / 2
            # RK4 integration
            V0 = get_vel_new(uf0, uf1, vf0, vf1, wf0, wf1, plon, plat, pcs,
                             fr0, surface)
            ZH0 = get_zh_new(zf0, zf1, hf, plon, plat, fr0)
            plon1, plat1, pcs1 = update_position(dxg, dyg, maskr, V0, ZH0, S,
                                                 delt / 2, plon, plat, pcs,
                                                 surface)
            V1 = get_vel_new(uf0, uf1, vf0, vf1, wf0, wf1, plon1, plat1, pcs1,
                             frmid, surface)
            ZH1 = get_zh_new(zf0, zf1, hf, plon1, plat1, frmid)
            plon2, plat2, pcs2 = update_position(dxg, dyg, maskr, V1, ZH1, S,
                                                 delt / 2, plon, plat, pcs,
                                                 surface)
            V2 = get_vel_new(uf0, uf1, vf0, vf1, wf0, wf1, plon2, plat2, pcs2,
                             frmid, surface)
            ZH2 = get_zh_new(zf0, zf1, hf, plon2, plat2, frmid)
            plon3, plat3, pcs3 = update_position(dxg, dyg, maskr, V2, ZH2, S,
                                                 delt, plon, plat, pcs,
                                                 surface)
            V3 = get_vel_new(uf0, uf1, vf0, vf1, wf0, wf1, plon3, plat3, pcs3,
                             fr1, surface)
            ZH3 = get_zh_new(zf0, zf1, hf, plon3, plat3, fr1)
            # add windage, calculated from the middle time
            if (surface == True) and (windage > 0):
                Vwind3 = get_wind_new(Uwindf0, Uwindf1, Vwindf0, Vwindf1, plon,
                                      plat, frmid, windage)
            else:
                Vwind3 = np.zeros((NP, 3))
            plon, plat, pcs = update_position(
                dxg, dyg, maskr, (V0 + 2 * V1 + 2 * V2 + V3) / 6 + Vwind3,
                (ZH0 + 2 * ZH1 + 2 * ZH2 + ZH3) / 6, S, delt, plon, plat, pcs,
                surface)
            # add turbulence to vertical position change (advection already added above)
            if turb == True:
                # pull values of VdAKs and add up to 3-dimensions
                VdAKs = get_dAKs_new(AKsf0, AKsf1, zf0, zf1, hf, plon, plat,
                                     pcs, S, frmid)
                VdAKs3 = np.zeros((NP, 3))
                VdAKs3[:, 2] = VdAKs
                # update position advecting vertically with 1/2 of AKs gradient
                ZH = get_zh_new(zf0, zf1, hf, plon, plat, frmid)
                plon_junk, plat_junk, pcs_half = update_position(
                    dxg, dyg, maskr, VdAKs3 / 2, ZH, S, delt / 2, plon, plat,
                    pcs, surface)
                # get AKs at this height, and thence the turbulent perturbation velocity
                Vturb = get_turb_new(VdAKs, AKsf0, AKsf1, delt, plon, plat,
                                     pcs_half, frmid)
                Vturb3 = np.zeros((NP, 3))
                Vturb3[:, 2] = Vturb
                # update vertical position for real
                plon_junk, plat_junk, pcs = update_position(
                    dxg, dyg, maskr, Vturb3, ZH, S, delt, plon, plat, pcs,
                    surface)

            ihr = nd + 1  # number of fractions 1/ndiv into the hour
            nihr = int(ndiv /
                       TR['sph'])  # number of fractions 1/ndiv between saves
            if np.mod(ihr, nihr) == 0:
                it1 += 1
                # write positions to the results arrays
                P['lon'][it1, :] = plon
                P['lat'][it1, :] = plat
                if surface == True:
                    pcs[:] = S['Cs_r'][-1]
                P['cs'][it1, :] = pcs
                P['salt'][it1, :] = get_VR_new(sf0, sf1, plon, plat, pcs, fr1,
                                               surface)
                P['temp'][it1, :] = get_VR_new(tf0, tf1, plon, plat, pcs, fr1,
                                               surface)
                P['u'][it1, :] = V3[:, 0]
                P['v'][it1, :] = V3[:, 1]
                P['w'][it1, :] = V3[:, 2]
                P['zeta'][it1, :] = ZH3[:, 0]
                P['h'][it1, :] = ZH3[:, 1]
                P['z'][it1, :] = pcs * ZH3.sum(axis=1) + ZH3[:, 0]

    # and save the time vector (seconds in whatever the model reports)
    P['ot'] = rot_save

    return P
예제 #20
0
def get_cast(gridname, tag, ex_name, date_string, station, lon_str, lat_str):

    # get the dict Ldir
    Ldir = Lfun.Lstart(gridname, tag)
    Ldir['gtagex'] = Ldir['gtag'] + '_' + ex_name
    Ldir['date_string'] = date_string
    Ldir['station'] = station
    Ldir['lon_str'] = lon_str
    Ldir['lat_str'] = lat_str
    
    # make sure the output directory exists
    outdir0 = Ldir['LOo'] + 'cast/'
    Lfun.make_dir(outdir0)
    outdir = outdir0 + Ldir['gtagex'] + '/'
    Lfun.make_dir(outdir)
    
    dt = Ldir['date_string']
    
    #%% function definitions
    
    def get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy):
        dims = ds.variables[vv].dimensions
        if 'eta_rho' in dims:
            grd = 'rho'
        elif 'eta_u' in dims:
            grd = 'u'
        elif 'eta_v' in dims:
            grd = 'v'
        else:
            print('grid error!')
        xi0 = Xi0[grd]; yi0 = Yi0[grd]
        xi1 = Xi1[grd]; yi1 = Yi1[grd]
        aix = Aix[grd]; aiy = Aiy[grd]
    
        xi01 = np.array([xi0, xi1]).flatten()
        yi01 = np.array([yi0, yi1]).flatten()
        return xi01, yi01, aix, aiy
    
    #%% set up for the extraction
        
    # target position
    Lon = np.array(float(Ldir['lon_str']))
    Lat = np.array(float(Ldir['lat_str']))
    
    
    # get grid info
    indir = Ldir['roms'] + 'output/' + Ldir['gtagex'] + '/f' + dt + '/'
    fn = indir + 'ocean_his_0021.nc' # approx. noon local standard time
    
    if os.path.isfile(fn):
        pass
    else:
        print('Not found: ' + fn)
        return
    
    G = zrfun.get_basic_info(fn, only_G=True)
    S = zrfun.get_basic_info(fn, only_S=True)

    lon = G['lon_rho']
    lat = G['lat_rho']
    mask = G['mask_rho']
    xvec = lon[0,:].flatten()
    yvec = lat[:,0].flatten()
    
    i0, i1, frx = zfun.get_interpolant(np.array([float(Ldir['lon_str'])]), xvec)
    j0, j1, fry = zfun.get_interpolant(np.array([float(Ldir['lat_str'])]), yvec)
    i0 = int(i0)
    j0 = int(j0)
    # find indices of nearest good point
    if mask[j0,i0] == 1:
        print('- ' + station + ': point OK')
    elif mask[j0,i0] == 0:
        print('- ' + station + ':point masked')
        i0, j0 = get_ij_good(lon, lat, xvec, yvec, i0, j0, mask)
        new_lon = xvec[i0]
        new_lat = yvec[j0]
        Lon = np.array(new_lon)
        Lat = np.array(new_lat)
   
    # get interpolants for this point
    Xi0 = dict(); Yi0 = dict()
    Xi1 = dict(); Yi1 = dict()
    Aix = dict(); Aiy = dict()
    for grd in ['rho', 'u', 'v']:
        xx = G['lon_' + grd][1,:]
        yy = G['lat_' + grd][:,1]
        xi0, xi1, xfr = zfun.get_interpolant(Lon, xx, extrap_nan=True)
        yi0, yi1, yfr = zfun.get_interpolant(Lat, yy, extrap_nan=True)
        Xi0[grd] = xi0
        Yi0[grd] = yi0
        Xi1[grd] = xi1
        Yi1[grd] = yi1
        # create little arrays that are used in the actual interpolation
        Aix[grd] = np.array([1-xfr, xfr]).reshape((1,1,2))
        Aiy[grd] = np.array([1-yfr, yfr]).reshape((1,2))
    
    # generating some lists
    v0_list = ['h', 'lon_rho', 'lat_rho', 'lon_u', 'lat_u', 'lon_v', 'lat_v']
    v1_list = ['ocean_time']
    v2_list = []
    v3_list_rho = []
    v3_list_w = []
    ds = nc.Dataset(fn)
    for vv in ds.variables:
        vdim = ds.variables[vv].dimensions
        if ( ('ocean_time' in vdim)
            and ('s_rho' not in vdim)
            and ('s_w' not in vdim)
            and (vv != 'ocean_time') ):
            v2_list.append(vv)
        elif ( ('ocean_time' in vdim) and ('s_rho' in vdim) ):
            v3_list_rho.append(vv)
        elif ( ('ocean_time' in vdim) and ('s_w' in vdim) ):
            v3_list_w.append(vv)
    
    V = dict()
    V_long_name = dict()
    V_units = dict()
    v_all_list = v0_list + v1_list + v2_list + v3_list_rho + v3_list_w
    for vv in v_all_list:
        V[vv] = np.array([])
        try:
            V_long_name[vv] = ds.variables[vv].long_name
        except:
            V_long_name[vv] = ''
        try:
            V_units[vv] = ds.variables[vv].units
        except:
            V_units[vv] = ''
    
    # get static variables
    for vv in v0_list:
        xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy)
        vvtemp = ds.variables[vv][yi01, xi01].squeeze()
        V[vv] =   ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1)
    
    ds.close()
    
    #%% extract time-dependent fields
        
    print('-- Working on date: ' + dt)
    sys.stdout.flush()
    
    ds = nc.Dataset(fn)
    for vv in v1_list:
        vtemp = ds.variables[vv][:].squeeze()
        V[vv] = np.append(V[vv], vtemp)
    for vv in v2_list:
        xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy)
        vvtemp = ds.variables[vv][:, yi01, xi01].squeeze()
        vtemp = ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1)
        V[vv] = np.append(V[vv], vtemp)
    for vv in v3_list_rho:
        xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy)
        vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze()
        vtemp = ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1)
        V[vv] = vtemp.reshape((S['N'],1))
    for vv in v3_list_w:
        xi01, yi01, aix, aiy = get_its(ds, vv, Xi0, Yi0, Xi1, Yi1, Aix, Aiy)
        vvtemp = ds.variables[vv][:, :, yi01, xi01].squeeze()
        vtemp = ( aiy*((aix*vvtemp).sum(-1)) ).sum(-1)
        V[vv] = vtemp.reshape((S['N']+1,1))
    
    ds.close()
    
    # create z_rho and z_w (has to be done after we have V['zeta'])
    hh = V['h'][:] * np.ones_like(V['zeta'])
    z_rho, z_w = zrfun.get_z(hh, V['zeta'][:], S)
    V['hh'] = hh
    V_long_name['hh'] = 'bottom depth (positive down) as a vector'
    V_units['hh'] = 'm'
    V['z_rho'] = z_rho
    V_long_name['z_rho'] = 'z on rho points (positive up)'
    V_units['z_rho'] = 'm'
    V['z_w'] = z_w
    V_long_name['z_w'] = 'z on w points (positive up)'
    V_units['z_w'] = 'm'
    v2_list.append('hh')
    v3_list_rho.append('z_rho')
    v3_list_w.append('z_w')
    
    #%% save the output to NetCDF
    out_fn = (outdir +
        Ldir['station'] + '_' +
        Ldir['date_string'] +
        '.nc')
    # get rid of the old version, if it exists
    try:
        os.remove(out_fn)
    except OSError:
        pass # assume error was because the file did not exist
    foo = nc.Dataset(out_fn, 'w')
    
    N = S['N']
    NT = len(V['ocean_time'][:])
    
    foo.createDimension('scalar', 1)
    foo.createDimension('s_rho', N)
    foo.createDimension('s_w', N+1)
    foo.createDimension('ocean_time', NT)
    
    for vv in v0_list:
        v_var = foo.createVariable(vv, float, ('scalar'))
        v_var[:] = V[vv][:]
        v_var.long_name = V_long_name[vv]
        v_var.units = V_units[vv]
    for vv in v1_list:
        v_var = foo.createVariable(vv, float, ('ocean_time',))
        v_var[:] = V[vv][:]
        v_var.long_name = V_long_name[vv]
        v_var.units = V_units[vv]
    for vv in v2_list:
        v_var = foo.createVariable(vv, float, ('ocean_time',))
        v_var[:] = V[vv][:]
        v_var.long_name = V_long_name[vv]
        v_var.units = V_units[vv]
    for vv in v3_list_rho:
        v_var = foo.createVariable(vv, float, ('s_rho', 'ocean_time'))
        v_var[:] = V[vv][:]
        v_var.long_name = V_long_name[vv]
        v_var.units = V_units[vv]
    for vv in v3_list_w:
        v_var = foo.createVariable(vv, float, ('s_w', 'ocean_time'))
        v_var[:] = V[vv][:]
        v_var.long_name = V_long_name[vv]
        v_var.units = V_units[vv]
            
    foo.close()
예제 #21
0
파일: pfun.py 프로젝트: parkermac/LiveOcean
def get_section(ds, vn, x, y, in_dict):
    
    # PLOT CODE
    from warnings import filterwarnings
    filterwarnings('ignore') # skip a warning message

    # GET DATA
    G, S, T = zrfun.get_basic_info(in_dict['fn'])
    h = G['h']
    zeta = ds['zeta'][:].squeeze()
    zr = zrfun.get_z(h, zeta, S, only_rho=True)

    sectvar = ds[vn][:].squeeze()

    L = G['L']
    M = G['M']
    N = S['N']

    lon = G['lon_rho']
    lat = G['lat_rho']
    mask = G['mask_rho']
    maskr = mask.reshape(1, M, L).copy()
    mask3 = np.tile(maskr, [N, 1, 1])
    zbot = -h # don't need .copy() because of the minus operation

    # make sure fields are masked
    zeta[mask==False] = np.nan
    zbot[mask==False] = np.nan
    sectvar[mask3==False] = np.nan

    # create dist
    earth_rad = zfun.earth_rad(np.mean(lat[:,0])) # m
    xrad = np.pi * x /180
    yrad = np.pi * y / 180
    dx = earth_rad * np.cos(yrad[1:]) * np.diff(xrad)
    dy = earth_rad * np.diff(yrad)
    ddist = np.sqrt(dx**2 + dy**2)
    dist = np.zeros(len(x))
    dist[1:] = ddist.cumsum()/1000 # km
    # find the index of zero
    i0, i1, fr = zfun.get_interpolant(np.zeros(1), dist)
    idist0 = i0
    distr = dist.reshape(1, len(dist)).copy()
    dista = np.tile(distr, [N, 1]) # array
    # pack fields to process in dicts
    d2 = dict()
    d2['zbot'] = zbot
    d2['zeta'] = zeta
    d2['lon'] = lon
    d2['lat'] = lat
    d3 = dict()
    d3['zr'] = zr
    d3['sectvar'] = sectvar
    # get vectors describing the (plaid) grid
    xx = lon[1,:]
    yy = lat[:,1]
    col0, col1, colf = zfun.get_interpolant(x, xx)
    row0, row1, rowf = zfun.get_interpolant(y, yy)
    # and prepare them to do the bilinear interpolation
    colff = 1 - colf
    rowff = 1 - rowf
    # now actually do the interpolation
    # 2-D fields
    v2 = dict()
    for fname in d2.keys():
        fld = d2[fname]
        fldi = (rowff*(colff*fld[row0, col0] + colf*fld[row0, col1])
        + rowf*(colff*fld[row1, col0] + colf*fld[row1, col1]))
        if type(fldi) == np.ma.core.MaskedArray:
            fldi = fldi.data # just the data, not the mask
        v2[fname] = fldi
    # 3-D fields
    v3 = dict()
    for fname in d3.keys():
        fld = d3[fname]
        fldi = (rowff*(colff*fld[:, row0, col0] + colf*fld[:, row0, col1])
        + rowf*(colff*fld[:, row1, col0] + colf*fld[:, row1, col1]))
        if type(fldi) == np.ma.core.MaskedArray:
            fldid = fldi.data # just the data, not the mask
            fldid[fldi.mask == True] = np.nan
        v3[fname] = fldid
    v3['dist'] = dista # distance in km
    # make "full" fields by padding top and bottom
    nana = np.nan * np.ones((N + 2, len(dist))) # blank array
    v3['zrf'] = nana.copy()
    v3['zrf'][0,:] = v2['zbot']
    v3['zrf'][1:-1,:] = v3['zr']
    v3['zrf'][-1,:] = v2['zeta']
    #
    v3['sectvarf'] = nana.copy()
    v3['sectvarf'][0,:] = v3['sectvar'][0,:]
    v3['sectvarf'][1:-1,:] = v3['sectvar']
    v3['sectvarf'][-1,:] = v3['sectvar'][-1,:]
    #
    v3['distf'] = nana.copy()
    v3['distf'][0,:] = v3['dist'][0,:]
    v3['distf'][1:-1,:] = v3['dist']
    v3['distf'][-1,:] = v3['dist'][-1,:]    
    
    # attempt to skip over nan's
    v3.pop('zr')
    v3.pop('sectvar')
    v3.pop('dist')
    mask3 = ~np.isnan(v3['sectvarf'][:])
    #print(mask3.shape)
    mask2 = mask3[-1,:]
    dist = dist[mask2]
    NC = len(dist)
    NR = mask3.shape[0]
    for k in v2.keys():
        #print('v2 key: ' + k)
        v2[k] = v2[k][mask2]
    for k in v3.keys():
        #print('v3 key: ' + k)
        v3[k] = v3[k][mask3]
        v3[k] = v3[k].reshape((NR, NC))
        #print(v3[k].shape)
    
    return v2, v3, dist, idist0
예제 #22
0
def get_section(ds, vn, x, y, in_dict):

    # PLOT CODE
    from warnings import filterwarnings
    filterwarnings('ignore')  # skip a warning message

    # GET DATA
    G, S, T = zrfun.get_basic_info(in_dict['fn'])
    h = G['h']
    zeta = ds['zeta'][:].squeeze()
    zr = zrfun.get_z(h, zeta, S, only_rho=True)

    sectvar = ds[vn][:].squeeze()

    L = G['L']
    M = G['M']
    N = S['N']

    lon = G['lon_rho']
    lat = G['lat_rho']
    mask = G['mask_rho']
    maskr = mask.reshape(1, M, L).copy()
    mask3 = np.tile(maskr, [N, 1, 1])
    zbot = -h  # don't need .copy() because of the minus operation

    # make sure fields are masked
    zeta[mask == False] = np.nan
    zbot[mask == False] = np.nan
    sectvar[mask3 == False] = np.nan

    # create dist
    earth_rad = zfun.earth_rad(np.mean(lat[:, 0]))  # m
    xrad = np.pi * x / 180
    yrad = np.pi * y / 180
    dx = earth_rad * np.cos(yrad[1:]) * np.diff(xrad)
    dy = earth_rad * np.diff(yrad)
    ddist = np.sqrt(dx**2 + dy**2)
    dist = np.zeros(len(x))
    dist[1:] = ddist.cumsum() / 1000  # km
    # find the index of zero
    i0, i1, fr = zfun.get_interpolant(np.zeros(1), dist)
    idist0 = i0
    distr = dist.reshape(1, len(dist)).copy()
    dista = np.tile(distr, [N, 1])  # array
    # pack fields to process in dicts
    d2 = dict()
    d2['zbot'] = zbot
    d2['zeta'] = zeta
    d2['lon'] = lon
    d2['lat'] = lat
    d3 = dict()
    d3['zr'] = zr
    d3['sectvar'] = sectvar
    # get vectors describing the (plaid) grid
    xx = lon[1, :]
    yy = lat[:, 1]
    col0, col1, colf = zfun.get_interpolant(x, xx)
    row0, row1, rowf = zfun.get_interpolant(y, yy)
    # and prepare them to do the bilinear interpolation
    colff = 1 - colf
    rowff = 1 - rowf
    # now actually do the interpolation
    # 2-D fields
    v2 = dict()
    for fname in d2.keys():
        fld = d2[fname]
        fldi = (rowff * (colff * fld[row0, col0] + colf * fld[row0, col1]) +
                rowf * (colff * fld[row1, col0] + colf * fld[row1, col1]))
        if type(fldi) == np.ma.core.MaskedArray:
            fldi = fldi.data  # just the data, not the mask
        v2[fname] = fldi
    # 3-D fields
    v3 = dict()
    for fname in d3.keys():
        fld = d3[fname]
        fldi = (rowff *
                (colff * fld[:, row0, col0] + colf * fld[:, row0, col1]) +
                rowf *
                (colff * fld[:, row1, col0] + colf * fld[:, row1, col1]))
        if type(fldi) == np.ma.core.MaskedArray:
            fldid = fldi.data  # just the data, not the mask
            fldid[fldi.mask == True] = np.nan
        v3[fname] = fldid
    v3['dist'] = dista  # distance in km
    # make "full" fields by padding top and bottom
    nana = np.nan * np.ones((N + 2, len(dist)))  # blank array
    v3['zrf'] = nana.copy()
    v3['zrf'][0, :] = v2['zbot']
    v3['zrf'][1:-1, :] = v3['zr']
    v3['zrf'][-1, :] = v2['zeta']
    #
    v3['sectvarf'] = nana.copy()
    v3['sectvarf'][0, :] = v3['sectvar'][0, :]
    v3['sectvarf'][1:-1, :] = v3['sectvar']
    v3['sectvarf'][-1, :] = v3['sectvar'][-1, :]
    #
    v3['distf'] = nana.copy()
    v3['distf'][0, :] = v3['dist'][0, :]
    v3['distf'][1:-1, :] = v3['dist']
    v3['distf'][-1, :] = v3['dist'][-1, :]

    # attempt to skip over nan's
    v3.pop('zr')
    v3.pop('sectvar')
    v3.pop('dist')
    mask3 = ~np.isnan(v3['sectvarf'][:])
    #print(mask3.shape)
    mask2 = mask3[-1, :]
    dist = dist[mask2]
    NC = len(dist)
    NR = mask3.shape[0]
    for k in v2.keys():
        #print('v2 key: ' + k)
        v2[k] = v2[k][mask2]
    for k in v3.keys():
        #print('v3 key: ' + k)
        v3[k] = v3[k][mask3]
        v3[k] = v3[k].reshape((NR, NC))
        #print(v3[k].shape)

    return v2, v3, dist, idist0
예제 #23
0
    #%% extrapolate
    lon, lat, z, L, M, N, X, Y = Ofun.get_coords(fh_dir)
    a = os.listdir(fh_dir)
    a.sort()
    aa = [item for item in a if item[:2]=='fh']
    for fn in aa:
        print('-Extrapolating ' + fn)
        in_fn = fh_dir + fn
        V = Ofun.get_extrapolated(in_fn, L, M, N, X, Y, lon, lat, z, Ldir,
            add_CTD=add_CTD)
        pickle.dump(V, open(fh_dir + 'x' + fn, 'wb'))

    # and interpolate to ROMS format
    # get grid and S info
    G = zrfun.get_basic_info(Ldir['grid'] + 'grid.nc', only_G=True)
    S_info_dict = Lfun.csv_to_dict(Ldir['grid'] + 'S_COORDINATE_INFO.csv')
    S = zrfun.get_S(S_info_dict)
    # get list of files to work on
    a = os.listdir(fh_dir)
    a.sort()
    aa = [item for item in a if item[:3]=='xfh']
    # HyCOM grid info
    lon, lat, z, L, M, N, X, Y = Ofun.get_coords(fh_dir)
    # load a dict of hycom fields
    dt_list = []
    count = 0
    c_dict = dict()

    if testing == False: # this is a slow step, so omit for testing
        for fn in aa:
opth = os.path.abspath('../ocn4/')
if opth not in sys.path:
    sys.path.append(opth)
import Ofun_nc
import Ofun
from importlib import reload
reload(Ofun_nc)
reload(Ofun)

start_time = datetime.now()
in_dir = Ldir['roms'] + 'output/cas6_v3_lo8b/f' + Ldir['date_string']
nc_dir = Ldir['LOogf_f']

# get grid and S info
G = zrfun.get_basic_info(Ldir['grid'] + 'grid.nc', only_G=True)
S_info_dict = Lfun.csv_to_dict(Ldir['grid'] + 'S_COORDINATE_INFO.csv')
S = zrfun.get_S(S_info_dict)
# get list of files to work on
h_list_full = os.listdir(in_dir)
h_list = [item for item in h_list_full if 'ocean_his' in item]
h_list.sort()

if Ldir['run_type'] == 'backfill':
    print('Using backfill list')
    h_list = h_list[:25]

# debugging
if Ldir['lo_env'] == 'pm_mac':
    testing = True
else:
예제 #25
0
    Lfun.make_dir(outdir)

    # split the calculation up into one-day chunks
    for nd in range(Ldir['days_to_track']):

        idt = idt0 + timedelta(days=nd)

        # make sure out file list starts at the start of the day
        if nd > 0:
            fn_last = fn_list[-1]
        fn_list = trackfun.get_fn_list(idt, Ldir, yr=yr)
        if nd > 0:
            fn_list = [fn_last] + fn_list

        # set dates
        T0 = zrfun.get_basic_info(fn_list[0], only_T=True)
        Tend = zrfun.get_basic_info(fn_list[-1], only_T=True)
        Ldir['date_string0'] = datetime.strftime(T0['tm'], '%Y.%m.%d')
        Ldir['date_string1'] = datetime.strftime(Tend['tm'], '%Y.%m.%d')

        # change depth ranges based on age
        juv = age >= 40
        juv_min = np.ones(len(plon0)) * -50
        juv_max = np.ones(len(plon0)) * -100
        #        if sum(juv) != 0:
        #            dep_range[0][juv] = juv_min[juv]
        #            dep_range[1][juv] = juv_max[juv]

        if nd == 0:  # first day

            # make the output subdirectory for this year
예제 #26
0
dir0 = Ldirroms + 'output/' + Ldir['gtagex'] + '/'

f_list = os.listdir(dir0)
f_list.sort()
f_list = [x for x in f_list if x[0]=='f']
first_f = [i for i, s in enumerate(f_list) if '2017.02.15' in s]
f_list = f_list[first_f[0]:]

ocean_grid = dir0+f_list[0]+'/ocean_his_0001.nc'

trackfn = '/pmr4/eab32/LiveOcean_output/tracks/eliz_ae1_ndiv12_3d_turb/release_2017.02.15.nc'

dia_keys = 'hadv','vadv','cor','prsgrd','accel','vvisc'

G, S, T = zrfun.get_basic_info(ocean_grid)
lonr = G['lon_rho'][:]
lonu = G['lon_u'][:]
lonv = G['lon_v'][:]
latr = G['lat_rho'][:]
latu = G['lat_u'][:]
latv = G['lat_v'][:]
maskr = G['mask_rho'][:]
masku = G['mask_u'][:]
maskv = G['mask_v'][:]
h = G['h'][:]


# GET PARTICLE PATHS
dst = nc.Dataset(trackfn)
tu0 = dst['u'][:]
예제 #27
0
cmap = 'jet'

# get model fields
fn = Ldir['roms'] + 'output/' + Ldir[
    'gtagex'] + '/f2017.07.20/ocean_his_0001.nc'
in_dict = dict()
in_dict['fn'] = fn
ds = nc.Dataset(fn)

# get forcing fields
ffn = Ldir['LOo'] + 'superplot/forcing_' + Ldir['gtagex'] + '_2017.p'
fdf = pd.read_pickle(ffn)
fdf['yearday'] = fdf.index.dayofyear - 0.5  # .5 to 364.5

# get section
G, S, T = zrfun.get_basic_info(in_dict['fn'])
# read in a section (or list of sections)
tracks_path = Ldir['data'] + 'tracks_new/'
#tracks = ['Line_jdf_v0.p', 'Line_ps_main_v0.p']
tracks = ['Line_ps_main_v0.p']
zdeep = -300
xx = np.array([])
yy = np.array([])
for track in tracks:
    track_fn = tracks_path + track
    # get the track to interpolate onto
    pdict = pickle.load(open(track_fn, 'rb'))
    xx = np.concatenate((xx, pdict['lon_poly']))
    yy = np.concatenate((yy, pdict['lat_poly']))
for ii in range(len(xx) - 1):
    x0 = xx[ii]
예제 #28
0
cmap = 'jet'

# get model fields
fn = Ldir['roms'] + 'output/' + Ldir['gtagex'] + '/f2017.07.20/ocean_his_0001.nc'
in_dict = dict()
in_dict['fn'] = fn
ds = nc.Dataset(fn)

# get forcing fields
ffn = Ldir['LOo'] + 'superplot/forcing_' + Ldir['gtagex'] + '_2017.p'
fdf = pd.read_pickle(ffn)
fdf['yearday'] = fdf.index.dayofyear - 0.5 # .5 to 364.5


# get section
G, S, T = zrfun.get_basic_info(in_dict['fn'])
# read in a section (or list of sections)
tracks_path = Ldir['data'] + 'tracks_new/'
#tracks = ['Line_jdf_v0.p', 'Line_ps_main_v0.p']
tracks = ['Line_ps_main_v0.p']
zdeep = -300
xx = np.array([])
yy = np.array([])
for track in tracks:
    track_fn = tracks_path + track
    # get the track to interpolate onto
    pdict = pickle.load(open(track_fn, 'rb'))
    xx = np.concatenate((xx,pdict['lon_poly']))
    yy = np.concatenate((yy,pdict['lat_poly']))
for ii in range(len(xx)-1):
    x0 = xx[ii]
예제 #29
0
def get_ic(ic_name, fn00):
    # routines to set particle initial locations, all numpy arrays

    if ic_name == 'hc0':  # Hood Canal
        # this fills a volume defined by a rectangular lon, lat region
        # with particles spaced every 10 m from the surface to near the bottom
        lonvec = np.linspace(-123.15, -122.9, 30)
        latvec = np.linspace(47.45, 47.65, 30)
        lonmat, latmat = np.meshgrid(lonvec, latvec)
        import zfun
        import zrfun
        G, S, T = zrfun.get_basic_info(fn00)
        hh = zfun.interp2(lonmat, latmat, G['lon_rho'], G['lat_rho'], G['h'])
        plon00 = np.array([])
        plat00 = np.array([])
        pcs00 = np.array([])
        plon_vec = lonmat.flatten()
        plat_vec = latmat.flatten()
        hh_vec = hh.flatten()
        for ii in range(len(plon_vec)):
            x = plon_vec[ii]
            y = plat_vec[ii]
            h10 = 10 * np.floor(
                hh_vec[ii] / 10)  # depth to closest 10 m (above the bottom)
            if h10 >= 10:
                zvec = np.arange(
                    -h10, 5,
                    10)  # a vector that goes from -h10 to 0 in steps of 10 m
                svec = zvec / hh_vec[ii]
                ns = len(svec)
                if ns > 0:
                    plon00 = np.append(plon00, x * np.ones(ns))
                    plat00 = np.append(plat00, y * np.ones(ns))
                    pcs00 = np.append(pcs00, svec)

    elif ic_name == 'hc1':  # Hood Canal using TEF "segments"
        # NOTE: this one requires information about grid indices in certain regions of
        # the Salish Sea, and is very specific to the analysis in x_tef.
        # Not for general use.
        import pickle
        # select the indir
        import Lfun
        Ldir = Lfun.Lstart()
        indir0 = Ldir['LOo'] + 'tef/cas6_v3_lo8b_2017.01.01_2017.12.31/'
        indir = indir0 + 'flux/'
        # load data
        ji_dict = pickle.load(open(indir + 'ji_dict.p', 'rb'))

        import zrfun
        G = zrfun.get_basic_info(fn00, only_G=True)
        h = G['h']
        xp = G['lon_rho']
        yp = G['lat_rho']

        plon_vec = np.array([])
        plat_vec = np.array([])
        hh_vec = np.array([])

        seg_list = ['H3', 'H4', 'H5', 'H6', 'H7', 'H8']
        for seg_name in seg_list:
            ji_seg = ji_dict[seg_name]
            for ji in ji_seg:
                plon_vec = np.append(plon_vec, xp[ji])
                plat_vec = np.append(plat_vec, yp[ji])
                hh_vec = np.append(hh_vec, h[ji])

        plon00 = np.array([])
        plat00 = np.array([])
        pcs00 = np.array([])

        for ii in range(len(plon_vec)):
            x = plon_vec[ii]
            y = plat_vec[ii]
            h10 = 10 * np.floor(
                hh_vec[ii] / 10)  # depth to closest 10 m (above the bottom)
            if h10 >= 10:
                zvec = np.arange(
                    -h10, 5,
                    10)  # a vector that goes from -h10 to 0 in steps of 10 m
                svec = zvec / hh_vec[ii]
                ns = len(svec)
                if ns > 0:
                    plon00 = np.append(plon00, x * np.ones(ns))
                    plat00 = np.append(plat00, y * np.ones(ns))
                    pcs00 = np.append(pcs00, svec)

        # trim the length of the I.C. vectors to a limited number
        NP = len(plon00)
        npmax = 10000
        nstep = max(1, int(NP / npmax))
        plon00 = plon00[::nstep]
        plat00 = plat00[::nstep]
        pcs00 = pcs00[::nstep]

    # cases that use ic_from_meshgrid
    elif ic_name == 'tn0':  # Tacoma Narrows region
        lonvec = np.linspace(-122.65, -122.45, 30)
        latvec = np.linspace(47.2, 47.35, 30)
        pcs_vec = np.array([-.05])
        plon00, plat00, pcs00 = ic_from_meshgrid(lonvec, latvec, pcs_vec)
    elif ic_name == 'jdf0':  # Mid-Juan de Fuca
        lonvec = np.linspace(-123.85, -123.6, 20)
        latvec = np.linspace(48.2, 48.4, 20)
        pcs_vec = np.array([-.05])
        plon00, plat00, pcs00 = ic_from_meshgrid(lonvec, latvec, pcs_vec)
    elif ic_name == 'skok':  # head of the Skokomish River
        lonvec = np.linspace(-123.171, -123.163, 10)
        latvec = np.linspace(47.306, 47.312, 10)
        pcs_vec = np.array([-.05])
        plon00, plat00, pcs00 = ic_from_meshgrid(lonvec, latvec, pcs_vec)

    return plon00, plat00, pcs00
예제 #30
0
def get_tracks(fn_list,
               plon0,
               plat0,
               pcs0,
               dir_tag,
               method,
               surface,
               turb,
               ndiv,
               windage,
               bound='stop',
               dep_range=None):
    '''
    Create tracks of particles starting from the locations at (plon0, plat0, pcs0)
    
    Inputs:
    fn_list - array of dataset file locations
    plon0/plat0/pcs0 - arrays of position (longitude/latitude/proportion of total depth)
    dir_tag - string determining direction of tracking ('forward' or 'reverse')
    method - string determining interpolation level ('rk2' or 'rk4')
    surface - Boolean, True traps particles to the surface
    turb - Boolean, True adds random vertical walk
    ndiv - int determining number of divisions to make between saves for the integration
    windage - float (0 to 1) determining proportion of velocity to add from surface wind
    bound - string determing boundary velocity conditions 
        'stop' (default) prevents particles from moving when a gridcell contains land
        'reflect' causes particles to move away from gridcells that contain land
    dep_range - tuple containing arrays (minimum depth, maximum depth) for all particles
    
    Outputs:
    P - dictionary containing variables in plist_main
        each variable has rows of time and columns of particles
        except the array 'ot', seconds since 1970
    G - dictionary containing grid and bathymetry information
    S - dictionary containing water surface information
    '''

    # Bartos - removed on land particle removal, so set these to plon, not plonA
    plon = plon0.copy()
    plat = plat0.copy()
    pcs = pcs0.copy()

    # get basic info
    G = zrfun.get_basic_info(fn_list[0], only_G=True)
    S = zrfun.get_basic_info(fn_list[0], only_S=True)

    # get time vector of history files
    NT = len(fn_list)
    rot = np.nan * np.ones(NT)
    counter = 0
    for fn in fn_list:
        ds = nc4.Dataset(fn)
        rot[counter] = ds.variables['ocean_time'][:].squeeze()
        counter += 1
        ds.close

    delta_t = rot[1] - rot[0]  # seconds between saves

    # this is how we track backwards in time
    if dir_tag == 'reverse':
        delta_t = -delta_t
        fn_list = fn_list[::-1]

    # make vectors to feed to interpolant maker
    R = dict()
    R['rlonr'] = G['lon_rho'][0, :].squeeze()
    R['rlatr'] = G['lat_rho'][:, 0].squeeze()
    R['rlonu'] = G['lon_u'][0, :].squeeze()
    R['rlatu'] = G['lat_u'][:, 0].squeeze()
    R['rlonv'] = G['lon_v'][0, :].squeeze()
    R['rlatv'] = G['lat_v'][:, 0].squeeze()
    R['rcsr'] = S['Cs_r'][:]
    R['rcsw'] = S['Cs_w'][:]

    # these lists are used internally to get other variables as needed
    vn_list_vel = ['u', 'v', 'w']
    vn_list_zh = ['zeta', 'h']
    vn_list_wind = ['Uwind', 'Vwind']
    vn_list_other = ['salt', 'temp', 'zeta', 'h', 'u', 'v', 'w']

    # Step through times.
    # Bartos - removed on land particle removal
    counter = 0
    nrot = len(rot)
    for pot in rot[:-1]:

        if np.mod(counter, 24) == 0:
            print(' - time %d out of %d' % (counter, nrot))
            sys.stdout.flush()

        # get time indices
        it0, it1, frt = zfun.get_interpolant(np.array(pot),
                                             rot,
                                             extrap_nan=False)

        # get the velocity zeta, and h at all points
        ds0 = nc4.Dataset(fn_list[it0[0]])
        ds1 = nc4.Dataset(fn_list[it1[0]])

        if counter == 0:

            # create result arrays
            NP = len(plon)
            P = dict()
            # plist main is what ends up written to output
            plist_main = [
                'lon', 'lat', 'cs', 'ot', 'z', 'zeta', 'zbot', 'salt', 'temp',
                'u', 'v', 'w', 'Uwind', 'Vwind', 'h'
            ]
            for vn in plist_main:
                P[vn] = np.nan * np.ones((NT, NP))

            # write positions to the results arrays
            P['lon'][it0, :] = plon
            P['lat'][it0, :] = plat
            if surface == True:
                pcs[:] = S['Cs_r'][-1]
            P['cs'][it0, :] = pcs

            P = get_properties(vn_list_other, ds0, it0, P, plon, plat, pcs, R,
                               surface)

        delt = delta_t / ndiv

        for nd in range(ndiv):

            fr0 = nd / ndiv
            fr1 = (nd + 1) / ndiv
            frmid = (fr0 + fr1) / 2

            if method == 'rk4':
                # RK4 integration

                V0, ZH0 = get_vel(vn_list_vel,
                                  vn_list_zh,
                                  ds0,
                                  ds1,
                                  plon,
                                  plat,
                                  pcs,
                                  R,
                                  fr0,
                                  surface,
                                  bound=bound)

                plon1, plat1, pcs1 = update_position(V0, ZH0, S, delt / 2,
                                                     plon, plat, pcs, surface)
                V1, ZH1 = get_vel(vn_list_vel,
                                  vn_list_zh,
                                  ds0,
                                  ds1,
                                  plon1,
                                  plat1,
                                  pcs1,
                                  R,
                                  frmid,
                                  surface,
                                  bound=bound)

                plon2, plat2, pcs2 = update_position(V1, ZH1, S, delt / 2,
                                                     plon, plat, pcs, surface)
                V2, ZH2 = get_vel(vn_list_vel,
                                  vn_list_zh,
                                  ds0,
                                  ds1,
                                  plon2,
                                  plat2,
                                  pcs2,
                                  R,
                                  frmid,
                                  surface,
                                  bound=bound)

                plon3, plat3, pcs3 = update_position(V2, ZH2, S, delt, plon,
                                                     plat, pcs, surface)
                V3, ZH3 = get_vel(vn_list_vel,
                                  vn_list_zh,
                                  ds0,
                                  ds1,
                                  plon3,
                                  plat3,
                                  pcs3,
                                  R,
                                  fr1,
                                  surface,
                                  bound=bound)

                # add windage, calculated from the middle time
                if (surface == True) and (windage > 0):
                    Vwind = get_wind(vn_list_wind, ds0, ds1, plon, plat, pcs,
                                     R, frmid, surface)
                    Vwind3 = np.concatenate((windage * Vwind, np.zeros(
                        (NP, 1))),
                                            axis=1)
                else:
                    Vwind3 = np.zeros((NP, 3))

                plon, plat, pcs = update_position(
                    (V0 + 2 * V1 + 2 * V2 + V3) / 6 + Vwind3,
                    (ZH0 + 2 * ZH1 + 2 * ZH2 + ZH3) / 6, S, delt, plon, plat,
                    pcs, surface)

                # Bartos - begin turbulence edit

                # add turbulence in two distinct timesteps
                if turb == True:
                    # pull values of VdAKs and add up to 3-dimensions
                    VdAKs = get_dAKs(vn_list_zh, ds0, ds1, plon, plat, pcs, R,
                                     S, frmid, surface)
                    VdAKs3 = np.concatenate((np.zeros(
                        (NP, 2)), VdAKs[:, np.newaxis]),
                                            axis=1)

                    # update position with 1/2 of AKs gradient
                    plon, plat, pcs = update_position(
                        VdAKs3 / 2, (ZH0 + 2 * ZH1 + 2 * ZH2 + ZH3) / 6, S,
                        delt, plon, plat, pcs, surface)

                    # update position with rest of turbulence
                    Vturb = get_turb(ds0, ds1, VdAKs, delta_t, plon, plat, pcs,
                                     R, frmid, surface)
                    Vturb3 = np.concatenate((np.zeros(
                        (NP, 2)), Vturb[:, np.newaxis]),
                                            axis=1)

                    plon, plat, pcs = update_position(
                        Vturb3, (ZH0 + 2 * ZH1 + 2 * ZH2 + ZH3) / 6, S, delt,
                        plon, plat, pcs, surface)

# Bartos - end edit

            elif method == 'rk2':
                # RK2 integration
                V0, ZH0 = get_vel(vn_list_vel,
                                  vn_list_zh,
                                  ds0,
                                  ds1,
                                  plon,
                                  plat,
                                  pcs,
                                  R,
                                  fr0,
                                  surface,
                                  bound=bound)

                plon1, plat1, pcs1 = update_position(V0, ZH0, S, delt / 2,
                                                     plon, plat, pcs, surface)
                V1, ZH1 = get_vel(vn_list_vel,
                                  vn_list_zh,
                                  ds0,
                                  ds1,
                                  plon1,
                                  plat1,
                                  pcs1,
                                  R,
                                  frmid,
                                  surface,
                                  bound=bound)

                # add windage, calculated from the middle time
                if (surface == True) and (windage > 0):
                    Vwind = get_wind(vn_list_wind, ds0, ds1, plon, plat, pcs,
                                     R, frmid, surface)
                    Vwind3 = np.concatenate((windage * Vwind, np.zeros(
                        (NP, 1))),
                                            axis=1)
                else:
                    Vwind3 = np.zeros((NP, 3))

                plon, plat, pcs = update_position(V1 + Vwind3, ZH1, S, delt,
                                                  plon, plat, pcs, surface)

                # Bartos - begin turbulence edit

                # add turbulence in two distinct timesteps
                if turb == True:
                    # pull values of VdAKs and add up to 3-dimensions
                    VdAKs = get_dAKs(vn_list_zh, ds0, ds1, plon, plat, pcs, R,
                                     S, frmid, surface)
                    VdAKs3 = np.concatenate((np.zeros(
                        (NP, 2)), VdAKs[:, np.newaxis]),
                                            axis=1)

                    # update position with 1/2 of AKs gradient
                    plon, plat, pcs = update_position(VdAKs3 / 2, ZH1, S, delt,
                                                      plon, plat, pcs, surface)

                    # update position with rest of turbulence
                    Vturb = get_turb(ds0, ds1, VdAKs, delta_t, plon, plat, pcs,
                                     R, frmid, surface)
                    Vturb3 = np.concatenate((np.zeros(
                        (NP, 2)), Vturb[:, np.newaxis]),
                                            axis=1)

                    plon, plat, pcs = update_position(Vturb3, ZH1, S, delt,
                                                      plon, plat, pcs, surface)

        # write positions to the results arrays
        P['lon'][it1, :] = plon
        P['lat'][it1, :] = plat
        if surface == True:
            pcs[:] = S['Cs_r'][-1]
        P['cs'][it1, :] = pcs
        P = get_properties(vn_list_other, ds1, it1, P, plon, plat, pcs, R,
                           surface)

        # Bartos - begin maximum and minimum depth edit

        if dep_range != None:
            dep_max = dep_range[1]
            # mask of depths below dep_max
            dep_mask = P['z'][it1, :] < dep_max
            dep_mask = dep_mask.squeeze()
            # total depth
            dep_tot = P['zeta'][it1, dep_mask] + P['h'][it1, dep_mask]
            # replace masked depths with dep_max
            P['z'][it1, dep_mask] = dep_max[dep_mask]
            P['cs'][it1, dep_mask] = dep_max[dep_mask] / dep_tot
            pcs[dep_mask] = dep_max[dep_mask] / dep_tot

            dep_min = dep_range[0]
            # mask of depths above dep_min
            dep_mask = P['z'][it1, :] > dep_min
            dep_mask = dep_mask.squeeze()
            # total depth
            dep_tot = P['zeta'][it1, dep_mask] + P['h'][it1, dep_mask]
            # replace masked depths with dep_min
            P['z'][it1, dep_mask] = dep_min[dep_mask]
            P['cs'][it1, dep_mask] = dep_min[dep_mask] / dep_tot
            pcs[dep_mask] = dep_max[dep_mask] / dep_tot


# Bartos - end edits

        ds0.close()
        ds1.close()

        counter += 1

    # by doing this the points are going forward in time
    if dir_tag == 'reverse':
        for vn in plist_main:
            P[vn] = P[vn][::-1, :]

    # and save the time vector (seconds in whatever the model reports)
    P['ot'] = rot

    return P, G, S
예제 #31
0
def TEF_line(lon, t):

    Ldir = Lfun.Lstart('aestus1', 'base')
    Ldir['gtagex'] = Ldir['gtag'] + '_' + 'ae1'

    Ldirroms = '/pmr4/eab32/LiveOcean_ROMS/'

    dir0 = Ldirroms + 'output/' + Ldir['gtagex'] + '/'

    f_list = os.listdir(dir0)
    f_list.sort()
    f_list = [x for x in f_list if x[0] == 'f']

    if len(t) == 1:
        f_list = f_list[t:]
    if len(t) == 2:
        f_list = f_list[t[0]:t[1] + 1]  # control number of days
    else:
        print('t must be of length 1 or 2')
        print('Defaulting to use whole f_list')

    # number of time steps for averages and diagnostics
    nt = len(f_list) * 24

    # initialize result arrays for average files
    # 0 and 1 mean ocean and river ends

    # initialize intermediate results arrays for TEF quantities
    sedges = np.linspace(0, 35, 1001)  # original was 35*20 + 1
    sbins = sedges[:-1] + np.diff(sedges) / 2
    ns = len(sbins)  # number of salinity bins

    # loop over hours
    tta = 0  # averages
    for f_dir in f_list:
        print(str(tta))

        # get arrays for flux claculations from the averages
        a_list = os.listdir(dir0 + f_dir)
        a_list.sort()
        a_list = [x for x in a_list if x[:9] == 'ocean_avg']
        for ai in a_list:
            fn = dir0 + f_dir + '/' + ai
            ds = nc.Dataset(fn)
            if tta == 0:

                Ind = efun.box_inds(lon, 1.5, 44.9, 45.1, fn)
                for vn in Ind.keys():
                    globals()[vn] = Ind[vn]
                    # print(vn)

                G, S, T = zrfun.get_basic_info(fn)
                h = G['h'][latr0:latr1 + 1, lonr0:lonr1 + 1]
                ny, nx = h.shape

                tef_q0 = np.zeros((ns, nt, ny))
                tef_q1 = np.zeros((ns, nt, ny))
                tef_qs0 = np.zeros((ns, nt, ny))
                tef_qs1 = np.zeros((ns, nt, ny))

            # getting fluxes of volume and salt
            dq0 = ds['Huon'][0, :, latu0:latu1 + 1, lonu0].squeeze()
            dq1 = ds['Huon'][0, :, latu0:latu1 + 1, lonu1 + 1].squeeze()
            dqs0 = ds['Huon_salt'][0, :, latu0:latu1 + 1, lonu0].squeeze()
            dqs1 = ds['Huon_salt'][0, :, latu0:latu1 + 1, lonu1 + 1].squeeze()

            # then get the salinity averaged onto the u-grid on both open boundaries
            s0 = (ds['salt'][0, :, latr0:latr1 + 1, lonr0 - 1].squeeze() +
                  ds['salt'][0, :, latr0:latr1 + 1, lonr0].squeeze()) / 2
            s1 = (ds['salt'][0, :, latr0:latr1 + 1, lonr1].squeeze() +
                  ds['salt'][0, :, latr0:latr1 + 1, lonr1 + 1].squeeze()) / 2

            # TEF variables
            # which are also area integrals at ocean and river ends
            # try:
            for yy in range(ny):
                s00 = s0[:, yy].squeeze()
                dq00 = dq0[:, yy].squeeze()
                dqs00 = dqs0[:, yy].squeeze()

                s000 = s00[dq00.mask == False]  # flattens the array
                dq000 = dq00[dq00.mask == False]
                dqs000 = dqs00[dqs00.mask == False]

                inds = np.digitize(s000, sedges, right=True)
                counter = 0
                for ii in inds:
                    tef_q0[ii - 1, tta, yy] += dq000[counter]
                    tef_qs0[ii - 1, tta, yy] += dqs000[counter]
                    counter += 1

                s11 = s1[:, yy].squeeze()
                dq11 = dq1[:, yy].squeeze()
                dqs11 = dqs1[:, yy].squeeze()

                s111 = s11[s11.mask == False]
                dq111 = dq11[dq11.mask == False]
                dqs111 = dqs11[dqs11.mask == False]

                inds = np.digitize(s111, sedges, right=True)
                counter = 0
                for ii in inds:
                    # at each time step these are vectors of hourly transport in
                    # salinity bins (centered at sbins)
                    tef_q1[ii - 1, tta, yy] += dq111[counter]
                    tef_qs1[ii - 1, tta, yy] += dqs111[counter]
                    counter += 1
            ds.close()

            tta += 1

    #%% TEF processing
    # first form tidal averages
    tef_q0_lp = np.mean(tef_q0, axis=1)
    tef_q1_lp = np.mean(tef_q1, axis=1)
    tef_qs0_lp = np.mean(tef_qs0, axis=1)
    tef_qs1_lp = np.mean(tef_qs1, axis=1)

    # start by making the low-passed flux arrays sorted
    # from high to low salinity
    rq0 = np.flipud(tef_q0_lp)
    rqs0 = np.flipud(tef_qs0_lp)
    # then form the cumulative sum (the functon Q(s))
    qcs = np.cumsum(rq0, axis=0)
    # and find its maximum: this is Qin, and the salinity
    # at which it occurs is the "dividing salinity" between
    # inflow and outflow that we will use to calculate
    # all TEF quantities
    imax = np.argmax(qcs, axis=0)
    Qin0 = np.zeros(ny)
    QSin0 = np.zeros(ny)
    Qout0 = np.zeros(ny)
    QSout0 = np.zeros(ny)
    for yy in range(ny):
        Qin0[yy] = np.sum(rq0[:imax[yy], yy], axis=0)
        Qout0[yy] = np.sum(rq0[imax[yy]:, yy], axis=0)
        QSin0[yy] = np.sum(rqs0[:imax[yy], yy], axis=0)
        QSout0[yy] = np.sum(rqs0[imax[yy]:, yy], axis=0)
    # then fix masking so that the nan's from the low-pass are retained
    nmask = np.isnan(tef_q0_lp[0, :])
    Qin0[nmask] = np.nan
    QSin0[nmask] = np.nan
    Qout0[nmask] = np.nan
    QSout0[nmask] = np.nan
    # form derived quantities
    Sin0 = QSin0 / Qin0
    Sout0 = QSout0 / Qout0

    # same steps for the river end
    # OK in this case to do this the original way because
    # it is all freshwater.
    qin = tef_q1_lp.copy()
    qout = tef_q1_lp.copy()
    qsin = tef_qs1_lp.copy()
    qsout = tef_qs1_lp.copy()
    #
    qin[tef_q1_lp > 0] = 0  # switch signs compared to open boundary 0
    qout[tef_q1_lp < 0] = 0
    qsin[tef_q1_lp > 0] = 0
    qsout[tef_q1_lp < 0] = 0
    #
    Qin1 = qin.sum(axis=0)
    Qout1 = qout.sum(axis=0)
    QSin1 = qsin.sum(axis=0)
    QSout1 = qsout.sum(axis=0)
    #
    #Sin1 = QSin1/Qin1

    # Make a dictionary to return variables of interest efficiently
    D = dict()
    D_list = ['ny', 'Qin0', 'Qout0', 'Sin0', 'Sout0']  #,
    #'Qin1']
    for vn in D_list:
        D[vn] = locals()[vn]

    return D
예제 #32
0
def get_tracks(fn_list, plon0, plat0, pcs0, TR, trim_loc=False):
    """
    This is the main function doing the particle tracking.
    """

    # unpack items needed from TR
    if TR['rev']:
        dir_tag = 'reverse'
    else:
        dir_tag = 'forward'
    surface = not TR['3d']
    turb = TR['turb']
    ndiv = TR['ndiv']
    windage = TR['windage']

    # get basic info
    G = zrfun.get_basic_info(fn_list[0], only_G=True)
    maskr = np.ones_like(G['mask_rho'])  # G['mask_rho'] = True in water
    maskr[G['mask_rho'] == False] = 0  # maskr = 1 in water
    S = zrfun.get_basic_info(fn_list[0], only_S=True)

    # get time vector of history files
    NT = len(fn_list)
    rot = np.nan * np.ones(NT)
    counter = 0
    for fn in fn_list:
        ds = nc4.Dataset(fn)
        rot[counter] = ds.variables['ocean_time'][:].squeeze()
        counter += 1
        ds.close

    # This is an attempt to fix a bug that happens avery couple of weeks,
    # one which I can't reliably reproduce!  In theory fn_list is sorted
    # by the calling function, but maybe ...??
    # Now (2019.11.08) I think this glitch was caused when the carbon code was
    # modifying the history files at the same time as the particle tracking was
    # going.
    rot.sort()

    delta_t = rot[1] - rot[0]  # second between saves

    # this is how we track backwards in time
    if dir_tag == 'reverse':
        delta_t = -delta_t
        fn_list = fn_list[::-1]

    # make vectors to feed to interpolant maker
    R = dict()
    R['rlonr'] = G['lon_rho'][0, :].squeeze()
    R['rlatr'] = G['lat_rho'][:, 0].squeeze()
    R['rlonu'] = G['lon_u'][0, :].squeeze()
    R['rlatu'] = G['lat_u'][:, 0].squeeze()
    R['rlonv'] = G['lon_v'][0, :].squeeze()
    R['rlatv'] = G['lat_v'][:, 0].squeeze()
    R['rcsr'] = S['Cs_r'][:]
    R['rcsw'] = S['Cs_w'][:]

    # these lists are used internally to get other variables as needed
    # (they must all be present in the history files)
    vn_list_vel = ['u', 'v', 'w']
    vn_list_zh = ['zeta', 'h']
    vn_list_wind = ['Uwind', 'Vwind']
    vn_list_other = ['salt', 'temp'] + vn_list_zh + vn_list_vel
    if windage > 0:
        vn_list_other = vn_list_other + vn_list_wind
    # plist_main is what ends up written to output
    plist_main = ['lon', 'lat', 'cs', 'ot', 'z'] + vn_list_other

    if save_dia:
        # diagnostic info
        vn_list_dia = ['hit_sidewall', 'hit_bottom', 'hit_top', 'bad_pcs']

    # Step through times.
    #
    counter = 0
    # rot is a list of all the ocean times (sec) in the current file list(e.g. 25)
    # and pot is a single one of these
    for pot in rot[:-1]:

        # get time indices of surrounding (in time) history files
        it0, it1, frt = zfun.get_interpolant(np.array(pot),
                                             rot,
                                             extrap_nan=False)
        # get Datasets
        ds0 = nc4.Dataset(fn_list[it0[0]])
        ds1 = nc4.Dataset(fn_list[it1[0]])

        if counter == 0:
            if trim_loc == True:
                # remove points on land
                pmask = zfun.interp_scattered_on_plaid(plon0,
                                                       plat0,
                                                       R['rlonr'],
                                                       R['rlatr'],
                                                       maskr,
                                                       exnan=True)
                # keep only points with pmask >= maskr_crit
                pcond = pmask >= maskr_crit
                plon = plon0[pcond]
                plat = plat0[pcond]
                pcs = pcs0[pcond]
            else:
                plon = plon0.copy()
                plat = plat0.copy()
                pcs = pcs0.copy()
            # create result arrays
            NP = len(plon)
            P = dict()
            for vn in plist_main:
                # NOTE: info is packed in a dict P of arrays
                # packed in order (time, particle)
                P[vn] = np.nan * np.ones((NT, NP))

            # initialize diagnostic arrays
            if save_dia:
                for vn in vn_list_dia:
                    P[vn] = np.zeros((NT, NP))  # 0=OK, 1=violation

            # write positions to the results arrays
            P['lon'][it0, :] = plon
            P['lat'][it0, :] = plat
            if surface == True:
                pcs[:] = S['Cs_r'][-1]
            P['cs'][it0, :] = pcs
            P = get_properties(vn_list_other, ds0, it0, P, plon, plat, pcs, R,
                               surface)

        delt = delta_t / ndiv
        # do the particle tracking for a single pair of history files in ndiv steps
        for nd in range(ndiv):

            fr0 = nd / ndiv
            fr1 = (nd + 1) / ndiv
            frmid = (fr0 + fr1) / 2

            # RK4 integration
            V0, ZH0 = get_vel(vn_list_vel, vn_list_zh, ds0, ds1, plon, plat,
                              pcs, R, fr0, surface)
            plon1, plat1, pcs1, dia_dict = update_position(
                R, maskr, V0, ZH0, S, delt / 2, plon, plat, pcs, surface)
            V1, ZH1 = get_vel(vn_list_vel, vn_list_zh, ds0, ds1, plon1, plat1,
                              pcs1, R, frmid, surface)
            plon2, plat2, pcs2, dia_dict = update_position(
                R, maskr, V1, ZH1, S, delt / 2, plon, plat, pcs, surface)
            V2, ZH2 = get_vel(vn_list_vel, vn_list_zh, ds0, ds1, plon2, plat2,
                              pcs2, R, frmid, surface)
            plon3, plat3, pcs3, dia_dict = update_position(
                R, maskr, V2, ZH2, S, delt, plon, plat, pcs, surface)
            V3, ZH3 = get_vel(vn_list_vel, vn_list_zh, ds0, ds1, plon3, plat3,
                              pcs3, R, fr1, surface)
            # add windage, calculated from the middle time
            if (surface == True) and (windage > 0):
                Vwind = get_wind(vn_list_wind, ds0, ds1, plon, plat, pcs, R,
                                 frmid, surface)
                Vwind3 = np.concatenate((windage * Vwind, np.zeros((NP, 1))),
                                        axis=1)
            else:
                Vwind3 = np.zeros((NP, 3))
            plon, plat, pcs, dia_dict = update_position(
                R, maskr, (V0 + 2 * V1 + 2 * V2 + V3) / 6 + Vwind3,
                (ZH0 + 2 * ZH1 + 2 * ZH2 + ZH3) / 6, S, delt, plon, plat, pcs,
                surface)
            if save_dia:
                # diagnostic info for horizontal advection
                P['hit_sidewall'][it1, :] = dia_dict['hit_sidewall']

            # add turbulence to vertical position change (advection already added above)
            if turb == True:
                # pull values of VdAKs and add up to 3-dimensions
                VdAKs = get_dAKs(vn_list_zh, ds0, ds1, plon, plat, pcs, R, S,
                                 frmid, surface)
                # print('VdAKs has %d nans out of %d' % (np.isnan(VdAKs).sum(), len(VdAKs)))
                VdAKs3 = np.concatenate((np.zeros(
                    (NP, 2)), VdAKs[:, np.newaxis]),
                                        axis=1)
                # update position advecting vertically with 1/2 of AKs gradient
                ZH = get_zh(vn_list_zh, ds0, ds1, plon, plat, pcs, R, frmid,
                            surface)
                plon_junk, plat_junk, pcs_half, dia_dict = update_position(
                    R, maskr, VdAKs3 / 2, ZH, S, delt, plon, plat, pcs,
                    surface)
                # get AKs at this height, and thence the turbulent velocity
                Vturb = get_turb(ds0, ds1, VdAKs, delt, plon, plat, pcs_half,
                                 R, frmid, surface)
                # update position vertically for real
                Vturb3 = np.concatenate((np.zeros(
                    (NP, 2)), Vturb[:, np.newaxis]),
                                        axis=1)
                plon_junk, plat_junk, pcs, dia_dict = update_position(
                    R, maskr, Vturb3, ZH, S, delt, plon, plat, pcs, surface)
            if save_dia:
                # diagnostic info for vertical advection
                P['bad_pcs'][it1, :] = dia_dict['bad_pcs']
                P['hit_top'][it1, :] = dia_dict['hit_top']
                P['hit_bottom'][it1, :] = dia_dict['hit_bottom']

        # write positions to the results arrays
        P['lon'][it1, :] = plon
        P['lat'][it1, :] = plat
        if surface == True:
            pcs[:] = S['Cs_r'][-1]
        P['cs'][it1, :] = pcs
        P = get_properties(vn_list_other, ds1, it1, P, plon, plat, pcs, R,
                           surface)

        ds0.close()
        ds1.close()

        counter += 1

    # by doing this the points are going forward in time
    if dir_tag == 'reverse':
        for vn in plist_main:
            P[vn] = P[vn][::-1, :]

    # and save the time vector (seconds in whatever the model reports)
    P['ot'] = rot

    return P
예제 #33
0
ndays = len(f_list)
cmap = 'rainbow'

fn_list = os.listdir(dir0 + f_list[0])
fna = [x for x in fn_list if x[-11:-8] == 'avg']

oceanfna = dir0 + f_list[0] + '/' + fna[0]

dsa = nc.Dataset(oceanfna)

#load in remaining variable
rho = dsa['rho'][0, :, :, :]
NZ, NY, NX = np.shape(rho)

#get grid info from ocean
G, S, T = zrfun.get_basic_info(oceanfna)
lonr = G['lon_rho'][:]
latr = G['lat_rho'][:]
maskr0 = G['mask_rho'][:]
H = G['h'][:]

maskr = np.tile(maskr0, (NZ, 1, 1))

# create cross sectional axes
latvec = latr[:, 0]
l_list = [45.6, 45.3, 45.0, 44.4]
llen = len(l_list)
l = np.zeros(len(l_list), dtype='int')
lonvec = np.zeros([len(l_list), np.shape(lonr)[1]])
for lat in range(len(l_list)):
    l0 = zfun.find_nearest_ind(latvec, l_list[lat])
예제 #34
0
elif args.layer_name == 'vave':
    # a custom extraction of vertically averaged properties
    nlay = -1
    vn_list_2d_t = []
    vn_list_3d_t = []
    vn_list_3d_t_custom = ['vave_salt', 'vave_temp', 'vave_rho']
    vn_list_2d_uv_t = []
    vn_list_3d_uv_t = []
    vn_list_2d_custom = []
else:
    print('Unsupported layer name')
    sys.exit()

# make some things
fn = fn_list[0]
G = zrfun.get_basic_info(fn, only_G=True)
DA = G['DX'] * G['DY']
ny, nx = DA.shape
h = G['h']
S = zrfun.get_basic_info(fn, only_S=True)
zr, zw = zrfun.get_z(h, 0 * h, S)
zlay = zr[nlay, :, :].squeeze()

dzr = np.diff(zw, axis=0)

ds1 = nc.Dataset(fn)
ds2 = nc.Dataset(out_fn, 'w')

# Create dimensions
for dname, the_dim in ds1.dimensions.items():
    if dname in dlist:
예제 #35
0
    data_fn = '/pmr4/eab32/etools/seasurface_data.p'
    zind = -5
    outdir = 'seasurface_movie'
    ax0_title = 'Density anomaly near sea surface'
    vmax = 5.0
    qscale = 250
    quiverleg = 50

#get ocean background
gridfn = '/pmr4/eab32/LiveOcean_ROMS/output/aestus1_base_ae1/f2017.01.01/ocean_his_0001.nc'

ds = nc4.Dataset(gridfn)
density0 = ds['rho'][0, zind, :, :].squeeze()
ds.close()

G = zrfun.get_basic_info(gridfn, only_G=True)
lonr = G['lon_rho'][:]
latr = G['lat_rho'][:]
maskr = G['mask_rho'][:]
maskuu = G['mask_u'][:]
maskvv = G['mask_v'][:]
lonr_s = lonr[1:-1, 1:-1]
latr_s = latr[1:-1, 1:-1]
b0 = efun.box_inds(-0.65, 0.0, 44.0, 46.0, gridfn)
b1 = efun.box_inds(-0.2, 1.5, 44.8, 45.2, gridfn)

D = pickle.load(open(data_fn, 'rb'))

density = D['density_gf'][:]
upd = D['u_prsgrd_gf'][:]
vpd = D['v_prsgrd_gf'][:]
예제 #36
0
Ldir, Lfun = ffun.intro()


#%% ****************** CASE-SPECIFIC CODE *****************

from datetime import datetime
import netCDF4 as nc
import zrfun
import numpy as np

start_time = datetime.now()

out_fn = Ldir['LOogf_f'] + 'tides.nc'
grid_fn = Ldir['grid'] + 'grid.nc'

G = zrfun.get_basic_info(grid_fn, only_G=True)

dst = nc.Dataset(out_fn, 'w', format='NETCDF3_CLASSIC')

# get sizes and initialize the NetCDF file
# specify the constituents to use
cons_list =  ['m2', 's2']
ncons = len(cons_list)

lon_rho = G['lon_rho']
lat_rho = G['lat_rho']
mask_rho = G['mask_rho'];
ny, nx = lon_rho.shape

dst.createDimension('tide_period', ncons)
dst.createDimension('slen', 10)
예제 #37
0
    print(str(nrl) + ': ' + rel_list[nrl])
my_nrl = input('-- Release number (return = 0) -- ')
if len(my_nrl) == 0:
    my_nrl = 0
rel = rel_list[int(my_nrl)]

# get Datasets
dsr = nc4.Dataset(indir0 + indir + rel)
dsg = nc4.Dataset(indir0 + indir + 'grid.nc')

# and a dict of run info
EI = Lfun.csv_to_dict(indir0 + indir + 'exp_info.csv')

# get grid info
G = zrfun.get_basic_info('/Users/pm7/Documents/LiveOcean_roms/output/' +
                         'cas6_v3_lo8b/f2019.07.04/ocean_his_0001.nc',
                         only_G=True)
xvec = G['lon_rho'][0, :]
yvec = G['lat_rho'][:, 0]

# The goal is to figure out how many of the particles do not lie in the
# region of the initial release.  To do this we will build lists of
# the (j,i) indices of the particles.

# First gather the ji list of the initial release
if EI['ic_name'] == 'hc1':  # Hood Canal using TEF "segments"
    # NOTE: this one requires information about grid indices in certain regions of
    # the Salish Sea, and is very specific to the analysis in x_tef.
    # Not for general use.
    import pickle
    # select the indir
예제 #38
0
#%% create the surface NetCDF file

# input files
in_dir = Ldir['roms'] + 'output/' + Ldir['gtagex'] + '/' + f_string + '/'
fn_list_raw = os.listdir(in_dir)
fn_list = []
for item in fn_list_raw:
    if 'ocean_his' in item and '.nc' in item:
        fn_list.append(in_dir + item)
fn_list.sort()

#%% make z
fn = fn_list[0]
ds = nc.Dataset(fn)
S = zrfun.get_basic_info(fn, only_S=True)
h = ds['h'][:]
z = zrfun.get_z(h, 0*h, S, only_rho=True)
z0 = z[-1,:,:].squeeze()
ds.close()

#%% Initialize the multi-file input dataset
ds1 = nc.MFDataset(fn_list)

#%% make surface velocity
u0 = ds1['u'][:, -1, :, :].squeeze()
v0 = ds1['v'][:, -1, :, :].squeeze()
u = np.nan * ds1['salt'][:, -1, :, :].squeeze()
v = u.copy()
u[:, :, 1:-1] = (u0[:, :, 1:] + u0[:, :, :-1])/2
v[:, 1:-1, :] = (v0[:, 1:, :] + v0[:, :-1, :])/2
예제 #39
0
    if 'ocean_his' in item and '.nc' in item:
        fn_list.append(in_dir + item)
fn_list.sort()

# output location (before transfer to Azure)
out_dir00 = Ldir['LOo'] + 'usrs/'
Lfun.make_dir(out_dir00)
out_dir0 = out_dir00 + Ldir['gtagex'] + '/'
Lfun.make_dir(out_dir0)
out_dir = out_dir0 + f_string + '/'
Lfun.make_dir(out_dir, clean=True)

# get grid info
fn = fn_list[0]
ds = nc.Dataset(fn)
G, S, T = zrfun.get_basic_info(fn)
ds.close()
# get grid and indices (rho-grid) for extraction domain
# x0 = -122.85
# x1 = -122.6
# y0 = 47.6
# y1 = 47.9
x0 = -122.5
x1 = -122.3
y0 = 47.4
y1 = 47.55
Lon = G['lon_rho']
Lat = G['lat_rho']
Lon_vec = Lon[0,:]
Lat_vec = Lat[:,0]
i0, junk, junk = zfun.get_interpolant(np.array(x0), Lon_vec, extrap_nan=True)