예제 #1
0
    fna = f_list[t] + '/ocean_avg_0001.nc'

    oceanfna = dir0 + fna
    oceanfnd = dir0 + fnd

    dsa = nc4.Dataset(oceanfna)
    dsd = nc4.Dataset(oceanfnd)

    if t == 0:
        #get grid info from ocean
        lonr = dsa['lon_rho'][:]
        latr = dsa['lat_rho'][:]
        lonr_s = lonr[1:-1, 1:-1]
        latr_s = latr[1:-1, 1:-1]
        maskr = dsa['mask_rho'][:]
        b0 = efun.box_inds(-0.65, 0.0, 44.0, 46.0, oceanfna)
        # lonu = dso['lon_u'][:]
        # latu = dso['lat_u'][:]
        # lonv = dso['lon_v'][:]
        # latv = dso['lat_v'][:]

    #plot background density
    rho = dsa['rho'][:]
    seafloor_rho = rho[0, 0, :, :].squeeze()

    fig = plt.figure(figsize=(6, 6))

    ax = plt.gca()
    a_ll = [lonr.min(), 1.2, latr.min(), latr.max()]
    rs = np.ma.masked_where(maskr == 0, seafloor_rho)
    ax.pcolormesh(lonr, latr, rs, vmin=-30, vmax=30, cmap='RdBu', alpha=.5)
예제 #2
0
# loop over hours
tta = 0  # averages
for f_dir in f_list:
    print(str(tta))

    # get arrays for flux claculations from the averages
    a_list = os.listdir(dir0 + f_dir)
    a_list.sort()
    a_list = [x for x in a_list if x[:9] == 'ocean_avg']
    for ai in a_list:
        fn = dir0 + f_dir + '/' + ai
        ds = nc.Dataset(fn)

        if tta == 0:

            Ind = efun.box_inds(-1.3, 0.02, 44.85, 45.15, fn)
            for vn in Ind.keys():
                globals()[vn] = Ind[vn]

            G, S, T = zrfun.get_basic_info(fn)
            h = G['h'][latr0:latr1 + 1, lonr0:lonr1 + 1]

            ny, nx = h.shape

            tef_q_mouth = np.zeros((ns, nt, ny))
            tef_q_plume = np.zeros((ns, nt, nx))
            tef_q_shelf = np.zeros((ns, nt, ny))
            tef_q_south = np.zeros((ns, nt, nx))
            tef_qs_mouth = np.zeros((ns, nt, ny))
            tef_qs_plume = np.zeros((ns, nt, nx))
            tef_qs_shelf = np.zeros((ns, nt, ny))
예제 #3
0
    # get volume-integrated quantities from the history files
    h_list = os.listdir(dir0 + f_dir)
    h_list.sort()
    h_list = [x for x in h_list if x[:9] == 'ocean_his']
    if tt == 0:
        pass
    else:
        h_list = h_list[1:]  # drop the zero hour for all but the first day

    for hi in h_list:
        fn = dir0 + f_dir + '/' + hi
        ds = nc.Dataset(fn)
        if tt == 0:

            Ind = efun.box_inds(-0.6, 1.5, 44.5, 45.5, fn)
            for vn in Ind.keys():
                globals()[vn] = Ind[vn]

            G, S, T = zrfun.get_basic_info(fn)
            h = G['h'][latr0:latr1 + 1, lonr0:lonr1 + 1]
            dx = G['DX'][latr0:latr1 + 1, lonr0:lonr1 + 1]
            dy = G['DY'][latr0:latr1 + 1, lonr0:lonr1 + 1]
            da = dx * dy

            ny, nx = h.shape

            tef_q_mouth0 = np.zeros((ns, nt, ny))
            tef_q_plume = np.zeros((ns, nt, nx))
            tef_q_shelf = np.zeros((ns, nt, ny))
            tef_q_south = np.zeros((ns, nt, nx))
예제 #4
0
#get ocean background
gridfn = '/pmr4/eab32/LiveOcean_ROMS/output/aestus1_base_ae1/f2017.01.01/ocean_his_0001.nc'

ds = nc4.Dataset(gridfn)
density0 = ds['rho'][0, zind, :, :].squeeze()
ds.close()

G = zrfun.get_basic_info(gridfn, only_G=True)
lonr = G['lon_rho'][:]
latr = G['lat_rho'][:]
maskr = G['mask_rho'][:]
maskuu = G['mask_u'][:]
maskvv = G['mask_v'][:]
lonr_s = lonr[1:-1, 1:-1]
latr_s = latr[1:-1, 1:-1]
b0 = efun.box_inds(-0.65, 0.0, 44.0, 46.0, gridfn)
b1 = efun.box_inds(-0.2, 1.5, 44.8, 45.2, gridfn)

D = pickle.load(open(data_fn, 'rb'))

density = D['density_gf'][:]
upd = D['u_prsgrd_gf'][:]
vpd = D['v_prsgrd_gf'][:]
uad = D['u_ageo_gf'][:]
vad = D['v_ageo_gf'][:]

nt, ny, nx = np.shape(density)
maskr_s = maskr[1:-1, 1:-1]
maskrr = np.reshape(maskr_s, (1, np.shape(maskr_s)[0], np.shape(maskr_s)[1]))
mask0 = np.tile(maskrr, (nt, 1, 1))
예제 #5
0
                       alpha=0.7)
    plt.colorbar(p)

    ax1.axis(a_ll)
    ax1.set_xlabel('Longitude', fontweight='bold')
    ax1.set_ylabel('Latitude', fontweight='bold')
    ax1.text(0.8,
             0.9,
             'Surface',
             transform=ax1.transAxes,
             horizontalalignment='center',
             verticalalignment='center')

    for lat in range(len(l)):
        if l_list[lat] == 45.0:
            bb = efun.box_inds(-0.65, 1.5, 44.0, 46.0, oceanfna)
        else:
            bb = efun.box_inds(-0.65, 0.0, 44.0, 46.0, oceanfna)
        ax1.plot(lonr[l[lat], :bb['lonr1']],
                 latr[l[lat], :bb['lonr1']],
                 'k--',
                 linewidth=3.0)

        #plot density crossection
        ax = plt.subplot2grid((llen, 2), (lat, 1), colspan=1, rowspan=1)
        rz = rho[:, l[lat], :]
        xx, zz = np.meshgrid(lonvec[lat, :], zvec[:, lat, 0])

        Nlist = np.linspace(vmin, vmax, 15)
        ax.pcolormesh(xx,
                      zvec[:, lat, :],
예제 #6
0
def TEF_line(lon, t):

    Ldir = Lfun.Lstart('aestus1', 'base')
    Ldir['gtagex'] = Ldir['gtag'] + '_' + 'ae1'

    Ldirroms = '/pmr4/eab32/LiveOcean_ROMS/'

    dir0 = Ldirroms + 'output/' + Ldir['gtagex'] + '/'

    f_list = os.listdir(dir0)
    f_list.sort()
    f_list = [x for x in f_list if x[0] == 'f']

    if len(t) == 1:
        f_list = f_list[t:]
    if len(t) == 2:
        f_list = f_list[t[0]:t[1] + 1]  # control number of days
    else:
        print('t must be of length 1 or 2')
        print('Defaulting to use whole f_list')

    # number of time steps for averages and diagnostics
    nt = len(f_list) * 24

    # initialize result arrays for average files
    # 0 and 1 mean ocean and river ends

    # initialize intermediate results arrays for TEF quantities
    sedges = np.linspace(0, 35, 1001)  # original was 35*20 + 1
    sbins = sedges[:-1] + np.diff(sedges) / 2
    ns = len(sbins)  # number of salinity bins

    # loop over hours
    tta = 0  # averages
    for f_dir in f_list:
        print(str(tta))

        # get arrays for flux claculations from the averages
        a_list = os.listdir(dir0 + f_dir)
        a_list.sort()
        a_list = [x for x in a_list if x[:9] == 'ocean_avg']
        for ai in a_list:
            fn = dir0 + f_dir + '/' + ai
            ds = nc.Dataset(fn)
            if tta == 0:

                Ind = efun.box_inds(lon, 1.5, 44.9, 45.1, fn)
                for vn in Ind.keys():
                    globals()[vn] = Ind[vn]
                    # print(vn)

                G, S, T = zrfun.get_basic_info(fn)
                h = G['h'][latr0:latr1 + 1, lonr0:lonr1 + 1]
                ny, nx = h.shape

                tef_q0 = np.zeros((ns, nt, ny))
                tef_q1 = np.zeros((ns, nt, ny))
                tef_qs0 = np.zeros((ns, nt, ny))
                tef_qs1 = np.zeros((ns, nt, ny))

            # getting fluxes of volume and salt
            dq0 = ds['Huon'][0, :, latu0:latu1 + 1, lonu0].squeeze()
            dq1 = ds['Huon'][0, :, latu0:latu1 + 1, lonu1 + 1].squeeze()
            dqs0 = ds['Huon_salt'][0, :, latu0:latu1 + 1, lonu0].squeeze()
            dqs1 = ds['Huon_salt'][0, :, latu0:latu1 + 1, lonu1 + 1].squeeze()

            # then get the salinity averaged onto the u-grid on both open boundaries
            s0 = (ds['salt'][0, :, latr0:latr1 + 1, lonr0 - 1].squeeze() +
                  ds['salt'][0, :, latr0:latr1 + 1, lonr0].squeeze()) / 2
            s1 = (ds['salt'][0, :, latr0:latr1 + 1, lonr1].squeeze() +
                  ds['salt'][0, :, latr0:latr1 + 1, lonr1 + 1].squeeze()) / 2

            # TEF variables
            # which are also area integrals at ocean and river ends
            # try:
            for yy in range(ny):
                s00 = s0[:, yy].squeeze()
                dq00 = dq0[:, yy].squeeze()
                dqs00 = dqs0[:, yy].squeeze()

                s000 = s00[dq00.mask == False]  # flattens the array
                dq000 = dq00[dq00.mask == False]
                dqs000 = dqs00[dqs00.mask == False]

                inds = np.digitize(s000, sedges, right=True)
                counter = 0
                for ii in inds:
                    tef_q0[ii - 1, tta, yy] += dq000[counter]
                    tef_qs0[ii - 1, tta, yy] += dqs000[counter]
                    counter += 1

                s11 = s1[:, yy].squeeze()
                dq11 = dq1[:, yy].squeeze()
                dqs11 = dqs1[:, yy].squeeze()

                s111 = s11[s11.mask == False]
                dq111 = dq11[dq11.mask == False]
                dqs111 = dqs11[dqs11.mask == False]

                inds = np.digitize(s111, sedges, right=True)
                counter = 0
                for ii in inds:
                    # at each time step these are vectors of hourly transport in
                    # salinity bins (centered at sbins)
                    tef_q1[ii - 1, tta, yy] += dq111[counter]
                    tef_qs1[ii - 1, tta, yy] += dqs111[counter]
                    counter += 1
            ds.close()

            tta += 1

    #%% TEF processing
    # first form tidal averages
    tef_q0_lp = np.mean(tef_q0, axis=1)
    tef_q1_lp = np.mean(tef_q1, axis=1)
    tef_qs0_lp = np.mean(tef_qs0, axis=1)
    tef_qs1_lp = np.mean(tef_qs1, axis=1)

    # start by making the low-passed flux arrays sorted
    # from high to low salinity
    rq0 = np.flipud(tef_q0_lp)
    rqs0 = np.flipud(tef_qs0_lp)
    # then form the cumulative sum (the functon Q(s))
    qcs = np.cumsum(rq0, axis=0)
    # and find its maximum: this is Qin, and the salinity
    # at which it occurs is the "dividing salinity" between
    # inflow and outflow that we will use to calculate
    # all TEF quantities
    imax = np.argmax(qcs, axis=0)
    Qin0 = np.zeros(ny)
    QSin0 = np.zeros(ny)
    Qout0 = np.zeros(ny)
    QSout0 = np.zeros(ny)
    for yy in range(ny):
        Qin0[yy] = np.sum(rq0[:imax[yy], yy], axis=0)
        Qout0[yy] = np.sum(rq0[imax[yy]:, yy], axis=0)
        QSin0[yy] = np.sum(rqs0[:imax[yy], yy], axis=0)
        QSout0[yy] = np.sum(rqs0[imax[yy]:, yy], axis=0)
    # then fix masking so that the nan's from the low-pass are retained
    nmask = np.isnan(tef_q0_lp[0, :])
    Qin0[nmask] = np.nan
    QSin0[nmask] = np.nan
    Qout0[nmask] = np.nan
    QSout0[nmask] = np.nan
    # form derived quantities
    Sin0 = QSin0 / Qin0
    Sout0 = QSout0 / Qout0

    # same steps for the river end
    # OK in this case to do this the original way because
    # it is all freshwater.
    qin = tef_q1_lp.copy()
    qout = tef_q1_lp.copy()
    qsin = tef_qs1_lp.copy()
    qsout = tef_qs1_lp.copy()
    #
    qin[tef_q1_lp > 0] = 0  # switch signs compared to open boundary 0
    qout[tef_q1_lp < 0] = 0
    qsin[tef_q1_lp > 0] = 0
    qsout[tef_q1_lp < 0] = 0
    #
    Qin1 = qin.sum(axis=0)
    Qout1 = qout.sum(axis=0)
    QSin1 = qsin.sum(axis=0)
    QSout1 = qsout.sum(axis=0)
    #
    #Sin1 = QSin1/Qin1

    # Make a dictionary to return variables of interest efficiently
    D = dict()
    D_list = ['ny', 'Qin0', 'Qout0', 'Sin0', 'Sout0']  #,
    #'Qin1']
    for vn in D_list:
        D[vn] = locals()[vn]

    return D
예제 #7
0
#get grid info from ocean
G, S, T = zrfun.get_basic_info(oceanfnd)
lonu = G['lon_u'][:]
lonv = G['lon_v'][:]
lonr = G['lon_rho'][:]
latu = G['lat_u'][:]
latv = G['lat_v'][:]
latr = G['lat_rho'][:]
masku = G['mask_u'][:]
maskr = G['mask_rho'][:]
maskv = G['mask_v'][:]

a_ll = [-1.0, 0.02, latr.min(), latr.max()]

bb = efun.box_inds(-0.65, -0.1, 44.2, 45.8, oceanfna)
bu = np.array([[bb['lonu0'], bb['lonu1']], [bb['latu0'], bb['latu1']]])
bv = np.array([[bb['lonv0'], bb['lonv1']], [bb['latv0'], bb['latv1']]])

upmin = u_prsgrd[bu[0, 0]:bu[0, 1], bu[1, 0]:bu[1, 1]].min()
upmax = u_prsgrd[bu[0, 0]:bu[0, 1], bu[1, 0]:bu[1, 1]].max()
upm = np.max([np.abs(upmin), np.abs(upmax)]) * 10
vmin = v[bv[0, 0]:bv[0, 1], bv[1, 0]:bv[1, 1]].min()
vmax = v[bv[0, 0]:bv[0, 1], bv[1, 0]:bv[1, 1]].max()
vm = np.max([np.abs(vmin), np.abs(vmax)]) * 50

for t in range(ndays):
    #identify file names for that day
    fn_list = os.listdir(dir0 + f_list[t])

    fnd = [x for x in fn_list if x[-11:-8] == 'dia']