def circumpolar_plot(file_path,
                     var_name,
                     depth_key,
                     depth,
                     depth_bounds,
                     colour_bounds=None,
                     save=False,
                     fig_name=None,
                     grid_path=None):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 50
    N = 31
    deg2rad = pi / 180

    # Read the variable and figure out if 2D or 3D (not including time)
    id = Dataset(file_path, 'r')
    if len(id.variables[var_name].shape) == 4:
        # 3D variable; will have to choose depth later
        data_full = mean(id.variables[var_name][:, :, :, :], axis=0)
        choose_depth = True
    elif len(id.variables[var_name].shape) == 3:
        # 2D variable
        data = mean(id.variables[var_name][:, :, :], axis=0)
        choose_depth = False
    if var_name == 'salt':
        units = 'psu'
    elif var_name == 'm':
        # Convert ice shelf melt rate from m/s to m/yr
        units = 'm/year'
        data = data * 60. * 60. * 24. * 365.25
    else:
        units = id.variables[var_name].units
    long_name = id.variables[var_name].long_name

    # Check for vector variables that need to be rotated
    if var_name in [
            'ubar', 'vbar', 'u', 'v', 'sustr', 'svstr', 'bustr', 'bvstr'
    ]:
        grid_id = Dataset(grid_path, 'r')
        angle = grid_id.variables['angle'][:, :]
        grid_id.close()
        if var_name in ['ubar', 'sustr', 'bustr']:
            # 2D u-variable
            u_data = data[:, :]
            v_data = mean(id.variables[var_name.replace('u', 'v')][:, :, :],
                          axis=0)
            u_data_lonlat, v_data_lonlat = rotate_vector_roms(
                u_data, v_data, angle)
            data = u_data_lonlat
        elif var_name in ['vbar', 'svstr', 'bvstr']:
            # 2D v-variable
            v_data = data[:, :]
            u_data = mean(id.variables[var_name.replace('v', 'u')][:, :, :],
                          axis=0)
            u_data_lonlat, v_data_lonlat = rotate_vector_roms(
                u_data, v_data, angle)
            data = v_data_lonlat
        elif var_name in ['u']:
            # 3D u-variable
            data_full_ugrid = data_full[:, :, :]
            data_full = ma.empty([
                data_full_ugrid.shape[0], data_full_ugrid.shape[1],
                data_full_ugrid.shape[2] + 1
            ])
            for k in range(N):
                u_data = data_full_ugrid[k, :, :]
                v_data = mean(id.variables[var_name.replace('u',
                                                            'v')][:, k, :, :],
                              axis=0)
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(
                    u_data, v_data, angle)
                data_full[k, :, :] = u_data_lonlat
        elif var_name in ['v']:
            # 3D v-variable
            data_full_vgrid = data_full[:, :, :]
            data_full = ma.empty([
                data_full_vgrid.shape[0], data_full_vgrid.shape[1] + 1,
                data_full_vgrid.shape[2]
            ])
            for k in range(N):
                v_data = data_full_vgrid[k, :, :]
                u_data = mean(id.variables[var_name.replace('v',
                                                            'u')][:, k, :, :],
                              axis=0)
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(
                    u_data, v_data, angle)
                data_full[k, :, :] = v_data_lonlat

    # Read grid variables
    h = id.variables['h'][:, :]
    zice = id.variables['zice'][:, :]
    lon = id.variables['lon_rho'][:, :]
    lat = id.variables['lat_rho'][:, :]
    mask = id.variables['mask_rho'][:, :]
    id.close()

    # Throw away the overlapping periodic boundary
    #if choose_depth:
    #    data_full = data_full[:,:,:-1]
    #else:
    #    data = data[:,:-1]
    #lon = lon[:,:-1]
    #lat = lat[:,:-1]
    #h = h[:,:-1]
    #zice = zice[:,:-1]

    # Convert to spherical coordinates
    #x = -(lat+90)*cos(lon*deg2rad+pi/2)
    #y = (lat+90)*sin(lon*deg2rad+pi/2)

    # Choose what to write on the title about depth
    if choose_depth:
        if depth_key == 0:
            depth_string = 'at surface'
        elif depth_key == 1:
            depth_string = 'at bottom'
        elif depth_key == 2:
            depth_string = 'at ' + str(int(round(-depth))) + ' m'
        elif depth_key == 3:
            depth_string = 'vertically averaged'
        elif depth_key == 4:
            depth_string = 'vertically averaged between ' + str(
                int(round(-depth_bounds[0]))) + ' and ' + str(
                    int(round(-depth_bounds[1]))) + ' m'
    else:
        depth_string = ''

    if choose_depth:
        # For 3D variables, select data corresponding to depth choice
        if depth_key == 0:
            # Surface layer
            data = data_full[-1, :, :]
        elif depth_key == 1:
            # Bottom layer
            data = data_full[0, :, :]
        else:
            # We will need z-coordinates and possibly dz
            dx, dy, dz, z = cartesian_grid_3d(lon, lat, h, zice, theta_s,
                                              theta_b, hc, N)
            if depth_key == 2:
                # Interpolate to given depth
                data = interp_depth(data_full, z, depth)
            elif depth_key == 3:
                # Vertically average entire water column
                data = sum(data_full * dz, axis=0) / sum(dz, axis=0)
            elif depth_key == 4:
                # Vertically average between given depths
                data = average_btw_depths(data_full, z, dz, depth_bounds)

    if colour_bounds is not None:
        # User has set bounds on colour scale
        lev = linspace(colour_bounds[0], colour_bounds[1], num=40)
        if colour_bounds[0] == -colour_bounds[1]:
            # Bounds are centered on zero, so choose a blue-to-red colourmap
            # centered on yellow
            colour_map = 'RdYlBu_r'
        else:
            colour_map = 'jet'
    else:
        # Determine bounds automatically
        if var_name in [
                'u', 'v', 'ubar', 'vbar', 'm', 'shflux', 'ssflux', 'sustr',
                'svstr', 'bustr', 'bvstr', 'ssflux_restoring'
        ]:
            # Center levels on 0 for certain variables, with a blue-to-red
            # colourmap
            max_val = amax(abs(data))
            lev = linspace(-max_val, max_val, num=40)
            colour_map = 'RdYlBu_r'
        else:
            lev = linspace(amin(data), amax(data), num=40)
            colour_map = 'jet'

    data = ma.masked_where(mask == 0, data)

    # Plot
    fig = figure(figsize=(16, 12))
    fig.add_subplot(1, 1, 1, aspect='equal')
    pcolormesh(mask, cmap='gray_r')
    pcolormesh(data,
               cmap=colour_map,
               vmin=colour_bounds[0],
               vmax=colour_bounds[1])
    ylim(0, len(data[:, 0]))
    xlim(0, len(data[0, :]))

    #contourf(x, y, data, lev, cmap=colour_map, extend='both')
    cbar = colorbar()
    cbar.ax.tick_params(labelsize=20)
    title('average ' + long_name + ' (' + units + ')\n' + depth_string,
          fontsize=30)
    axis('off')

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()
Ejemplo n.º 2
0
def bugs_ts_distribution(grid_file, ini_file, upwind_file, akima_file,
                         split_file):

    # Only consider regions with bathymetry deeper than 1500 m, not in ice
    # shelf cavities, and cells deeper than 200 m
    h0 = 1500
    z0 = 200
    # Bounds on temperature and salinity bins
    min_salt = 33.8
    max_salt = 36.6
    min_temp = -2
    max_temp = 21
    # Bounds to actually plot
    min_salt_plot = 34
    max_salt_plot = 34.7
    min_temp_plot = 2
    max_temp_plot = 8
    # Number of temperature and salinity bins
    num_bins = 1000
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31

    print 'Setting up bins'
    # Calculate boundaries of temperature bins
    temp_bins = linspace(min_temp, max_temp, num=num_bins)
    # Calculate centres of temperature bins (for plotting)
    temp_centres = 0.5 * (temp_bins[:-1] + temp_bins[1:])
    # Repeat for salinity
    salt_bins = linspace(min_salt, max_salt, num=num_bins)
    salt_centres = 0.5 * (salt_bins[:-1] + salt_bins[1:])
    # Set up 2D arrays of temperature bins x salinity bins to increment with
    # volume of water masses
    ts_vals_ini = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_upwind = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_akima = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_split = zeros([size(temp_centres), size(salt_centres)])
    # Get 2D versions of the temperature and salinity bins
    salt_2d, temp_2d = meshgrid(salt_centres, temp_centres)
    # Calculate potential density of each combination of temperature and
    # salinity bins
    density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres))) - 1000
    # Density contours to plot
    density_lev = arange(26.5, 28 + 0.25, 0.25)

    print 'Reading grid'
    id = Dataset(grid_file, 'r')
    lon = id.variables['lon_rho'][:, :]
    lat = id.variables['lat_rho'][:, :]
    h = id.variables['h'][:, :]
    zice = id.variables['zice'][:, :]
    id.close()
    num_lat = size(lat, 0)
    num_lon = size(lon, 1)
    # Get integrands on 3D grid
    dx, dy, dz, z = cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc,
                                      N)
    # Get volume integrand
    dV = dx * dy * dz

    print 'Reading data'
    # Read temp and salt from the first history output step, output before
    # timestepping even starts (i.e. initial conditions for January 1992)
    id = Dataset(ini_file, 'r')
    ini_temp = id.variables['temp'][0, :, :, :]
    ini_salt = id.variables['salt'][0, :, :, :]
    id.close()
    # Then read temp and salt averaged over the last January for each simulation
    id = Dataset(upwind_file, 'r')
    upwind_temp = id.variables['temp'][0, :, :, :]
    upwind_salt = id.variables['salt'][0, :, :, :]
    id.close()
    id = Dataset(akima_file, 'r')
    akima_temp = id.variables['temp'][0, :, :, :]
    akima_salt = id.variables['salt'][0, :, :, :]
    id.close()
    id = Dataset(split_file, 'r')
    split_temp = id.variables['temp'][0, :, :, :]
    split_salt = id.variables['salt'][0, :, :, :]
    id.close()

    print 'Binning temperature and salinity'
    # Loop over 2D grid boxes
    for j in range(num_lat):
        for i in range(num_lon):
            # Check for land mask
            if ini_temp[0, j, i] is ma.masked:
                continue
            # Check for ice shelf cavity
            if zice[j, i] < 0:
                continue
            # Check for too-shallow bathymetry
            if h[j, i] < h0:
                continue
            for k in range(N):
                # Check for too-shallow cells
                if abs(z[k, j, i]) < z0:
                    continue
                # First categorise the initial data
                # Figure out which bins this falls into
                temp_index = nonzero(temp_bins > ini_temp[k, j, i])[0][0] - 1
                salt_index = nonzero(salt_bins > ini_salt[k, j, i])[0][0] - 1
                # Increment bins with volume
                ts_vals_ini[temp_index, salt_index] += dV[k, j, i]
                # Upwind simulation
                temp_index = nonzero(
                    temp_bins > upwind_temp[k, j, i])[0][0] - 1
                salt_index = nonzero(
                    salt_bins > upwind_salt[k, j, i])[0][0] - 1
                ts_vals_upwind[temp_index, salt_index] += dV[k, j, i]
                # Akima simulation
                temp_index = nonzero(temp_bins > akima_temp[k, j, i])[0][0] - 1
                salt_index = nonzero(salt_bins > akima_salt[k, j, i])[0][0] - 1
                ts_vals_akima[temp_index, salt_index] += dV[k, j, i]
                # Split simulation
                temp_index = nonzero(temp_bins > split_temp[k, j, i])[0][0] - 1
                salt_index = nonzero(salt_bins > split_salt[k, j, i])[0][0] - 1
                ts_vals_split[temp_index, salt_index] += dV[k, j, i]
    # Mask bins with zero volume
    ts_vals_ini = ma.masked_where(ts_vals_ini == 0, ts_vals_ini)
    ts_vals_upwind = ma.masked_where(ts_vals_upwind == 0, ts_vals_upwind)
    ts_vals_akima = ma.masked_where(ts_vals_akima == 0, ts_vals_akima)
    ts_vals_split = ma.masked_where(ts_vals_split == 0, ts_vals_split)

    # Find the volume bounds for plotting
    min_val = log(
        amin(
            array([
                amin(ts_vals_ini),
                amin(ts_vals_upwind),
                amin(ts_vals_akima),
                amin(ts_vals_split)
            ])))
    max_val = log(
        amax(
            array([
                amax(ts_vals_ini),
                amax(ts_vals_upwind),
                amax(ts_vals_akima),
                amax(ts_vals_split)
            ])))

    print 'Plotting'
    fig = figure(figsize=(14, 24))
    gs = GridSpec(2, 2)
    gs.update(left=0.1,
              right=0.9,
              bottom=0.12,
              top=0.95,
              wspace=0.05,
              hspace=0.12)
    # Initial conditions
    ax = subplot(gs[0, 0])
    # Plot with log scale
    pcolor(salt_centres,
           temp_centres,
           log(ts_vals_ini),
           vmin=min_val,
           vmax=max_val,
           cmap='jet')
    # Add density contours
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6, 0.6, 0.6), fmt='%1.1f')
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    ylabel(r'Temperature ($^{\circ}$C)', fontsize=22)
    title('Initial conditions', fontsize=26)
    # Upwind
    ax = subplot(gs[0, 1])
    pcolor(salt_centres,
           temp_centres,
           log(ts_vals_upwind),
           vmin=min_val,
           vmax=max_val,
           cmap='jet')
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6, 0.6, 0.6), fmt='%1.1f')
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    title('Upwind third-order advection', fontsize=26)
    # Akima
    ax = subplot(gs[1, 0])
    pcolor(salt_centres,
           temp_centres,
           log(ts_vals_akima),
           vmin=min_val,
           vmax=max_val,
           cmap='jet')
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6, 0.6, 0.6), fmt='%1.1f')
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=22)
    ylabel(r'Temperature ($^{\circ}$C)', fontsize=22)
    title('Akima advection', fontsize=26)
    # Split
    ax = subplot(gs[1, 1])
    img = pcolor(salt_centres,
                 temp_centres,
                 log(ts_vals_split),
                 vmin=min_val,
                 vmax=max_val,
                 cmap='jet')
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6, 0.6, 0.6), fmt='%1.1f')
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=22)
    title('RSUP3 advection', fontsize=26)
    # Colorbar at bottom
    cbaxes = fig.add_axes([0.35, 0.04, 0.3, 0.02])
    cbar = colorbar(img, cax=cbaxes, orientation='horizontal')
    cbar.ax.tick_params(labelsize=14)
    text(0.5,
         0.01,
         'log of volume',
         fontsize=20,
         transform=fig.transFigure,
         ha='center')
    # Main title
    suptitle('Deep water masses after 25 years: AAIW', fontsize=30)
    fig.show()
    fig.savefig('bugs_ts_distribution.png')
Ejemplo n.º 3
0
def timeseries_3D(grid_path, file_path, log_path):

    # Grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    rho0 = 1000.0  # Reference density (kg/m^3)
    Cp = 3974  # Specific heat of polar seawater (J/K/kg)
    C2K = 273.15  # Celsius to Kelvin conversion

    time = []
    #    ohc = []
    avgsalt = []
    #    tke = []
    # Check if the log file exists
    if exists(log_path):
        print 'Reading previously calculated values'
        f = open(log_path, 'r')
        # Skip first line (header for time array)
        f.readline()
        for line in f:
            try:
                time.append(float(line))
            except (ValueError):
                # Reached the header for the next variable
                break
#        for line in f:
#            try:
#                ohc.append(float(line))
#            except(ValueError):
#                break
        for line in f:
            try:
                avgsalt.append(float(line))
            except (ValueError):
                break
#        for line in f:
#            tke.append(float(line))
        f.close()

    print 'Analysing grid'
    id = Dataset(grid_path, 'r')
    h = id.variables['h'][:-15, 1:-1]
    zice = id.variables['zice'][:-15, 1:-1]
    lon = id.variables['lon_rho'][:-15, 1:-1]
    lat = id.variables['lat_rho'][:-15, 1:-1]
    mask = id.variables['mask_rho'][:-15, 1:-1]
    # Keep the overlapping periodic boundary on "angle" for now
    angle = id.variables['angle'][:-15, :]
    id.close()

    id = Dataset(file_path, 'r')
    # Read time values and convert from seconds to years
    new_time = id.variables['ocean_time'][:] / (60 * 60 * 24 * 365.25)
    num_time = size(new_time)
    # Concatenate with time values from log file
    for t in range(num_time):
        time.append(new_time[t])

    # Process 10 time indices at a time so we don't use too much memory
    start_t = 0
    while True:
        end_t = min(start_t + 10, num_time)
        print 'Processing time indices ' + str(start_t +
                                               1) + ' to ' + str(end_t)
        num_time_curr = end_t - start_t

        print 'Calculating time-dependent dV'
        # Read time-dependent sea surface height
        zeta = id.variables['zeta'][start_t:end_t, :-15, 1:-1]
        # Calculate time-dependent dz
        dz = ma.empty([num_time_curr, N, size(lon, 0), size(lon, 1)])
        for t in range(num_time_curr):
            # dx and dy will be recomputed unnecessarily each timestep
            # but that's ok
            dx, dy, dz_tmp, z = cartesian_grid_3d(lon, lat, h, zice, theta_s,
                                                  theta_b, hc, N,
                                                  zeta[t, :, :])
            dz[t, :, :, :] = dz_tmp
        # Calculate time-dependent dV and mask with land mask
        # Here mask, dx, dy are all copied into arrays of dimension
        # time x depth x lat x lon
        dV = ma.masked_where(
            tile(mask, (num_time_curr, N, 1, 1)) == 0,
            tile(dx, (num_time_curr, 1, 1, 1)) *
            tile(dy, (num_time_curr, 1, 1, 1)) * dz)

        print 'Reading data'
        #        temp = id.variables['temp'][start_t:end_t,:,:-15,1:-1]
        salt = id.variables['salt'][start_t:end_t, :, :-15, 1:-1]
        rho = id.variables['rho'][start_t:end_t, :, :-15, 1:-1] + rho0
        # Keep overlapping periodic boundary for u and v
        #        u_xy = id.variables['u'][start_t:end_t,:,:-15,:]
        #        v_xy = id.variables['v'][start_t:end_t,:,:-15,:]

        #        print 'Interpolating velocities onto rho-grid'
        #        # We are actually rotating them at the same time as interpolating
        #        # which is a bit of unnecessary work (sum of squares won't change with
        #        # rotation) but not much extra work, and it's conveneint
        #        u = ma.empty(shape(temp))
        #        v = ma.empty(shape(temp))
        #        for t in range(num_time_curr):
        #            for k in range(N):
        #                u_tmp, v_tmp = rotate_vector_roms(u_xy[t,k,:,:], v_xy[t,k,:,:], angle)
        #                u[t,k,:,:] = u_tmp[:,1:-1]
        #                v[t,k,:,:] = v_tmp[:,1:-1]

        print 'Building timeseries'
        for t in range(num_time_curr):
            # Integrate temp*rho*Cp*dV to get OHC
            #            ohc.append(sum((temp[t,:,:,:]+C2K)*rho[t,:,:,:]*Cp*dV[t,:,:,:]))
            # Average salinity (weighted with rho*dV)
            avgsalt.append(
                sum(salt[t, :, :, :] * rho[t, :, :, :] * dV[t, :, :, :]) /
                sum(rho[t, :, :, :] * dV[t, :, :, :]))
            # Integrate 0.5*rho*speed^2*dV to get TKE
#            tke.append(sum(0.5*rho[t,:,:,:]*(u[t,:,:,:]**2 + v[t,:,:,:]**2)*dV[t,:,:,:]))

# Get ready for next 10 time indices
        if end_t == num_time:
            break
        start_t = end_t

    id.close()

    #    print 'Plotting ocean heat content'
    #    clf()
    #    plot(time, ohc)
    #    xlabel('Years')
    #    ylabel('Southern Ocean Heat Content (J)')
    #    grid(True)
    #    savefig('ohc.png')

    print 'Plotting average salinity'
    clf()
    plot(time, avgsalt)
    xlabel('Years')
    ylabel('Southern Ocean Average Salinity (psu)')
    grid(True)
    savefig('avgsalt.png')

    #    print 'Plotting total kinetic energy'
    #    clf()
    #    plot(time, tke)
    #    xlabel('Years')
    #    ylabel('Southern Ocean Total Kinetic Energy (J)')
    #    grid(True)
    #    savefig('tke.png')

    print 'Saving results to log file'
    f = open(log_path, 'w')
    f.write('Time (years):\n')
    for elm in time:
        f.write(str(elm) + '\n')
#    f.write('Southern Ocean Heat Content (J):\n')
#    for elm in ohc:
#        f.write(str(elm) + '\n')
    f.write('Southern Ocean Average Salinity (psu):\n')
    for elm in avgsalt:
        f.write(str(elm) + '\n')
#    f.write('Southern Ocean Total Kinetic Energy (J):\n')
#    for elm in tke:
#        f.write(str(elm) + '\n')
    f.close()
Ejemplo n.º 4
0
def mip_calc_watermasses(roms_grid, roms_file, fesom_mesh_lr, fesom_mesh_hr,
                         fesom_file_lr, fesom_file_hr):

    # Sectors to consider
    sector_names = [
        'Filchner-Ronne Ice Shelf Cavity', 'Eastern Weddell Region Cavities',
        'Amery Ice Shelf Cavity', 'Australian Sector Cavities',
        'Ross Sea Cavities', 'Amundsen Sea Cavities',
        'Bellingshausen Sea Cavities', 'Larsen Ice Shelf Cavities',
        'All Ice Shelf Cavities'
    ]
    num_sectors = len(sector_names)
    # Water masses to consider
    wm_names = ['ISW', 'AASW', 'CDW', 'MCDW', 'WW', 'HSSW']
    num_watermasses = len(wm_names)
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    # FESOM mesh parameters
    circumpolar = True
    cross_180 = False

    print 'Processing MetROMS'
    # Read ROMS grid variables we need
    id = Dataset(roms_grid, 'r')
    roms_lon = id.variables['lon_rho'][:, :]
    roms_lat = id.variables['lat_rho'][:, :]
    roms_h = id.variables['h'][:, :]
    roms_zice = id.variables['zice'][:, :]
    id.close()
    num_lat = size(roms_lat, 0)
    num_lon = size(roms_lon, 1)
    # Get integrands on 3D grid
    roms_dx, roms_dy, roms_dz, roms_z = cartesian_grid_3d(
        roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
    # Get volume integrand
    dV = roms_dx * roms_dy * roms_dz
    # Read ROMS output
    id = Dataset(roms_file, 'r')
    roms_temp = id.variables['temp'][0, :, :, :]
    roms_salt = id.variables['salt'][0, :, :, :]
    id.close()
    # Initialise volume of each water mass in each sector
    roms_vol_watermass = zeros([num_watermasses, num_sectors])
    # Calculate water mass breakdown
    for j in range(num_lat):
        for i in range(num_lon):
            # Select ice shelf points
            if roms_zice[j, i] < 0:
                # Figure out which sector this point falls into
                lon = roms_lon[j, i]
                if lon > 180:
                    lon -= 360
                lat = roms_lat[j, i]
                if lon >= -85 and lon < -30 and lat < -74:
                    # Filchner-Ronne
                    sector = 0
                elif lon >= -30 and lon < 65:
                    # Eastern Weddell region
                    sector = 1
                elif lon >= 65 and lon < 76:
                    # Amery
                    sector = 2
                elif lon >= 76 and lon < 165 and lat >= -74:
                    # Australian sector
                    sector = 3
                elif (lon >= 155 and lon < 165
                      and lat < -74) or (lon >= 165) or (lon < -140):
                    # Ross Sea
                    sector = 4
                elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98
                                                      and lat < -73.1):
                    # Amundsen Sea
                    sector = 5
                elif (lon >= -104 and lon < -98
                      and lat >= -73.1) or (lon >= -98 and lon < -66
                                            and lat >= -75):
                    # Bellingshausen Sea
                    sector = 6
                elif lon >= -66 and lon < -59 and lat >= -74:
                    # Larsen Ice Shelves
                    sector = 7
                else:
                    print 'No region found for lon=', str(lon), ', lat=', str(
                        lat)
                    break  #return
                # Loop downward
                for k in range(N):
                    curr_temp = roms_temp[k, j, i]
                    curr_salt = roms_salt[k, j, i]
                    curr_volume = dV[k, j, i]
                    # Get surface freezing point at this salinity
                    curr_tfrz = curr_salt / (-18.48 + 18.48 / 1e3 * curr_salt)
                    # Figure out what water mass this is
                    if curr_temp < curr_tfrz:
                        # ISW
                        wm_key = 0
                    elif curr_salt < 34:
                        # AASW
                        wm_key = 1
                    elif curr_temp > 0:
                        # CDW
                        wm_key = 2
                    elif curr_temp > -1:
                        # MCDW
                        wm_key = 3
                    elif curr_salt < 34.5:
                        # WW
                        wm_key = 4
                    else:
                        # HSSW
                        wm_key = 5
                    # Integrate volume for the right water mass and sector
                    roms_vol_watermass[wm_key, sector] += curr_volume
                    # Also integrate total Antarctica
                    roms_vol_watermass[wm_key, -1] += curr_volume
    # Find total volume of each sector by adding up the volume of each
    # water mass
    roms_vol_sectors = sum(roms_vol_watermass, axis=0)
    # Calculate percentage of each water mass in each sector
    roms_percent_watermass = zeros([num_watermasses, num_sectors])
    for wm_key in range(num_watermasses):
        for sector in range(num_sectors):
            roms_percent_watermass[wm_key, sector] = roms_vol_watermass[
                wm_key, sector] / roms_vol_sectors[sector] * 100

    print 'Processing low-res FESOM'
    # Build mesh
    elements_lr = fesom_grid(fesom_mesh_lr, circumpolar, cross_180)
    id = Dataset(fesom_file_lr, 'r')
    temp_nodes_lr = id.variables['temp'][0, :]
    salt_nodes_lr = id.variables['salt'][0, :]
    id.close()
    fesom_vol_watermass_lr = zeros([num_watermasses, num_sectors])
    for i in range(len(elements_lr)):
        elm = elements_lr[i]
        if elm.cavity:
            lon = mean(elm.lon)
            lat = mean(elm.lat)
            if lon >= -85 and lon < -30 and lat < -74:
                sector = 0
            elif lon >= -30 and lon < 65:
                sector = 1
            elif lon >= 65 and lon < 76:
                sector = 2
            elif lon >= 76 and lon < 165 and lat >= -74:
                sector = 3
            elif (lon >= 155 and lon < 165
                  and lat < -74) or (lon >= 165) or (lon < -140):
                sector = 4
            elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98
                                                  and lat < -73.1):
                sector = 5
            elif (lon >= -104 and lon < -98
                  and lat >= -73.1) or (lon >= -98 and lon < -66
                                        and lat >= -75):
                sector = 6
            elif lon >= -66 and lon < -59 and lat >= -74:
                sector = 7
            else:
                print 'No region found for lon=', str(lon), ', lat=', str(lat)
                break  #return
            # Get area of 2D element
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            # Loop downward
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[
                        2].below is None:
                    # Reached the bottom
                    break
                # Calculate average temperature, salinity, and
                # layer thickness for this 3D triangular prism
                temp_vals = []
                salt_vals = []
                dz_vals = []
                for n in range(3):
                    temp_vals.append(temp_nodes_lr[nodes[n].id])
                    salt_vals.append(salt_nodes_lr[nodes[n].id])
                    temp_vals.append(temp_nodes_lr[nodes[n].below.id])
                    salt_vals.append(salt_nodes_lr[nodes[n].below.id])
                    dz_vals.append(abs(nodes[n].depth - nodes[n].below.depth))
                    # Get ready for next iteration of loop
                    nodes[n] = nodes[n].below
                curr_temp = mean(array(temp_vals))
                curr_salt = mean(array(salt_vals))
                curr_volume = area * mean(array(dz_vals))
                curr_tfrz = -0.0575 * curr_salt + 1.7105e-3 * sqrt(
                    curr_salt**3) - 2.155e-4 * curr_salt**2
                if curr_temp < curr_tfrz:
                    wm_key = 0
                elif curr_salt < 34:
                    wm_key = 1
                elif curr_temp > 0:
                    wm_key = 2
                elif curr_temp > -1:
                    wm_key = 3
                elif curr_salt < 34.5:
                    wm_key = 4
                else:
                    wm_key = 5
                fesom_vol_watermass_lr[wm_key, sector] += curr_volume
                fesom_vol_watermass_lr[wm_key, -1] += curr_volume
    fesom_vol_sectors_lr = sum(fesom_vol_watermass_lr, axis=0)
    fesom_percent_watermass_lr = zeros([num_watermasses, num_sectors])
    for wm_key in range(num_watermasses):
        for sector in range(num_sectors):
            fesom_percent_watermass_lr[
                wm_key, sector] = fesom_vol_watermass_lr[
                    wm_key, sector] / fesom_vol_sectors_lr[sector] * 100

    print 'Processing high-res FESOM'
    elements_hr = fesom_grid(fesom_mesh_hr, circumpolar, cross_180)
    fesom_vol_watermass_hr = zeros([num_watermasses, num_sectors])
    id = Dataset(fesom_file_hr, 'r')
    temp_nodes_hr = id.variables['temp'][0, :]
    salt_nodes_hr = id.variables['salt'][0, :]
    id.close()
    for i in range(len(elements_hr)):
        elm = elements_hr[i]
        if elm.cavity:
            lon = mean(elm.lon)
            lat = mean(elm.lat)
            if lon >= -85 and lon < -30 and lat < -74:
                sector = 0
            elif lon >= -30 and lon < 65:
                sector = 1
            elif lon >= 65 and lon < 76:
                sector = 2
            elif lon >= 76 and lon < 165 and lat >= -74:
                sector = 3
            elif (lon >= 155 and lon < 165
                  and lat < -74) or (lon >= 165) or (lon < -140):
                sector = 4
            elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98
                                                  and lat < -73.1):
                sector = 5
            elif (lon >= -104 and lon < -98
                  and lat >= -73.1) or (lon >= -98 and lon < -66
                                        and lat >= -75):
                sector = 6
            elif lon >= -66 and lon < -59 and lat >= -74:
                sector = 7
            else:
                print 'No region found for lon=', str(lon), ', lat=', str(lat)
                break  #return
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[
                        2].below is None:
                    break
                temp_vals = []
                salt_vals = []
                dz_vals = []
                for n in range(3):
                    temp_vals.append(temp_nodes_hr[nodes[n].id])
                    salt_vals.append(salt_nodes_hr[nodes[n].id])
                    temp_vals.append(temp_nodes_hr[nodes[n].below.id])
                    salt_vals.append(salt_nodes_hr[nodes[n].below.id])
                    dz_vals.append(abs(nodes[n].depth - nodes[n].below.depth))
                    nodes[n] = nodes[n].below
                curr_temp = mean(array(temp_vals))
                curr_salt = mean(array(salt_vals))
                curr_volume = area * mean(array(dz_vals))
                curr_tfrz = -0.0575 * curr_salt + 1.7105e-3 * sqrt(
                    curr_salt**3) - 2.155e-4 * curr_salt**2
                if curr_temp < curr_tfrz:
                    wm_key = 0
                elif curr_salt < 34:
                    wm_key = 1
                elif curr_temp > 0:
                    wm_key = 2
                elif curr_temp > -1:
                    wm_key = 3
                elif curr_salt < 34.5:
                    wm_key = 4
                else:
                    wm_key = 5
                fesom_vol_watermass_hr[wm_key, sector] += curr_volume
                fesom_vol_watermass_hr[wm_key, -1] += curr_volume
    fesom_vol_sectors_hr = sum(fesom_vol_watermass_hr, axis=0)
    fesom_percent_watermass_hr = zeros([num_watermasses, num_sectors])
    for wm_key in range(num_watermasses):
        for sector in range(num_sectors):
            fesom_percent_watermass_hr[
                wm_key, sector] = fesom_vol_watermass_hr[
                    wm_key, sector] / fesom_vol_sectors_hr[sector] * 100

    # Print results
    for sector in range(num_sectors):
        print sector_names[sector]
        print 'MetROMS:'
        for wm_key in range(num_watermasses):
            print str(roms_percent_watermass[wm_key,
                                             sector]) + '% ' + wm_names[wm_key]
        print 'FESOM low-res:'
        for wm_key in range(num_watermasses):
            print str(
                fesom_percent_watermass_lr[wm_key,
                                           sector]) + '% ' + wm_names[wm_key]
        print 'FESOM high-res:'
        for wm_key in range(num_watermasses):
            print str(
                fesom_percent_watermass_hr[wm_key,
                                           sector]) + '% ' + wm_names[wm_key]
Ejemplo n.º 5
0
def mip_cavity_fields(var_name, roms_grid, roms_file, fesom_mesh_path,
                      fesom_file):

    # Name of each ice shelf
    shelf_names = [
        'Larsen D Ice Shelf', 'Larsen C Ice Shelf',
        'Wilkins & George VI & Stange Ice Shelves', 'Ronne-Filchner Ice Shelf',
        'Abbot Ice Shelf', 'Pine Island Glacier Ice Shelf',
        'Thwaites Ice Shelf', 'Dotson Ice Shelf', 'Getz Ice Shelf',
        'Nickerson Ice Shelf', 'Sulzberger Ice Shelf', 'Mertz Ice Shelf',
        'Totten & Moscow University Ice Shelves', 'Shackleton Ice Shelf',
        'West Ice Shelf', 'Amery Ice Shelf', 'Prince Harald Ice Shelf',
        'Baudouin & Borchgrevink Ice Shelves', 'Lazarev Ice Shelf',
        'Nivl Ice Shelf', 'Fimbul & Jelbart & Ekstrom Ice Shelves',
        'Brunt & Riiser-Larsen Ice Shelves', 'Ross Ice Shelf'
    ]
    # Beginnings of filenames for figures
    fig_heads = [
        'larsen_d', 'larsen_c', 'wilkins_georgevi_stange', 'ronne_filchner',
        'abbot', 'pig', 'thwaites', 'dotson', 'getz', 'nickerson',
        'sulzberger', 'mertz', 'totten_moscowuni', 'shackleton', 'west',
        'amery', 'prince_harald', 'baudouin_borchgrevink', 'lazarev', 'nivl',
        'fimbul_jelbart_ekstrom', 'brunt_riiser_larsen', 'ross'
    ]
    # Limits on longitude and latitude for each ice shelf
    # Note Ross crosses 180W=180E
    lon_min = [
        -62.67, -65.5, -79.17, -85, -104.17, -102.5, -108.33, -114.5, -135.67,
        -149.17, -155, 144, 115, 94.17, 80.83, 65, 33.83, 19, 12.9, 9.33,
        -10.05, -28.33, 158.33
    ]
    lon_max = [
        -59.33, -60, -66.67, -28.33, -88.83, -99.17, -103.33, -111.5, -114.33,
        -140, -145, 146.62, 123.33, 102.5, 89.17, 75, 37.67, 33.33, 16.17,
        12.88, 7.6, -10.33, -146.67
    ]
    lat_min = [
        -73.03, -69.35, -74.17, -83.5, -73.28, -75.5, -75.5, -75.33, -74.9,
        -76.42, -78, -67.83, -67.17, -66.67, -67.83, -73.67, -69.83, -71.67,
        -70.5, -70.75, -71.83, -76.33, -85
    ]
    lat_max = [
        -69.37, -66.13, -69.5, -74.67, -71.67, -74.17, -74.67, -73.67, -73,
        -75.17, -76.41, -66.67, -66.5, -64.83, -66.17, -68.33, -68.67, -68.33,
        -69.33, -69.83, -69.33, -71.5, -77
    ]
    num_shelves = len(shelf_names)

    # Constants
    sec_per_year = 365 * 24 * 3600
    deg2rad = pi / 180.0
    # Parameters for missing circle in ROMS grid
    lon_c = 50
    lat_c = -83
    radius = 10.1
    nbdry = -63 + 90
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    # Number of bins in each direction for vector overlay
    num_bins = 50

    print 'Reading ROMS fields'
    if var_name == 'draft':
        id = Dataset(roms_grid, 'r')
    else:
        id = Dataset(roms_file, 'r')
    roms_lon = id.variables['lon_rho'][:, :]
    roms_lat = id.variables['lat_rho'][:, :]
    roms_mask = id.variables['mask_rho'][:, :]
    roms_zice = id.variables['zice'][:, :]
    if var_name == 'draft':
        # Switch signs
        roms_data = -1 * id.variables['zice'][:, :]
    elif var_name == 'melt':
        # Convert from m/s to m/y
        roms_data = id.variables['m'][0, :, :] * sec_per_year
    elif var_name == 'temp':
        # Bottom layer
        roms_data = id.variables['temp'][0, 0, :, :]
    elif var_name == 'salt':
        # Bottom layer
        roms_data = id.variables['salt'][0, 0, :, :]
    elif var_name in ['vsfc', 'vavg']:
        # Get angle from the grid file
        id2 = Dataset(roms_grid, 'r')
        angle = id2.variables['angle'][:, :]
        id2.close()
        if var_name == 'vsfc':
            # Read surface u and v
            u_tmp = id.variables['u'][0, -1, :, :]
            v_tmp = id.variables['v'][0, -1, :, :]
            # Interpolate to rho grid and unrotate
            u_rho, v_rho = rotate_vector_roms(u_tmp, v_tmp, angle)
        elif var_name == 'vavg':
            # Read full 3D u and v
            u_3d_tmp = id.variables['u'][0, :, :, :]
            v_3d_tmp = id.variables['v'][0, :, :, :]
            # Read bathymetry from grid file
            id2 = Dataset(roms_grid, 'r')
            roms_h = id2.variables['h'][:, :]
            id2.close()
            # Get integrands on 3D grid; we only care about dz
            dx, dy, dz, z = cartesian_grid_3d(roms_lon, roms_lat, roms_h,
                                              roms_zice, theta_s, theta_b, hc,
                                              N)
            # Unrotate each vertical level
            u_3d = ma.empty(shape(dz))
            v_3d = ma.empty(shape(dz))
            for k in range(N):
                u_k, v_k = rotate_vector_roms(u_3d_tmp[k, :, :],
                                              v_3d_tmp[k, :, :], angle)
                u_3d[k, :, :] = u_k
                v_3d[k, :, :] = v_k
            # Vertically average u and v
            u_rho = sum(u_3d * dz, axis=0) / sum(dz, axis=0)
            v_rho = sum(v_3d * dz, axis=0) / sum(dz, axis=0)
        # Get speed
        roms_data = sqrt(u_rho**2 + v_rho**2)
    id.close()
    # Get land/zice mask
    open_ocn = copy(roms_mask)
    open_ocn[roms_zice != 0] = 0
    land_zice = ma.masked_where(open_ocn == 1, open_ocn)
    # Mask the open ocean and land out of the data field
    roms_data = ma.masked_where(roms_zice == 0, roms_data)
    # Convert grid to spherical coordinates
    roms_x = -(roms_lat + 90) * cos(roms_lon * deg2rad + pi / 2)
    roms_y = (roms_lat + 90) * sin(roms_lon * deg2rad + pi / 2)
    # Find centre in spherical coordinates
    x_c = -(lat_c + 90) * cos(lon_c * deg2rad + pi / 2)
    y_c = (lat_c + 90) * sin(lon_c * deg2rad + pi / 2)
    # Build a regular x-y grid and select the missing circle
    x_reg_roms, y_reg_roms = meshgrid(linspace(-nbdry, nbdry, num=1000),
                                      linspace(-nbdry, nbdry, num=1000))
    land_circle = zeros(shape(x_reg_roms))
    land_circle = ma.masked_where(
        sqrt((x_reg_roms - x_c)**2 + (y_reg_roms - y_c)**2) > radius,
        land_circle)

    print 'Reading FESOM fields'
    # Mask open ocean
    elements, mask_patches = make_patches(fesom_mesh_path,
                                          circumpolar=True,
                                          mask_cavities=True)
    # Unmask ice shelves
    patches = iceshelf_mask(elements)
    if var_name == 'draft':
        # Nothing more to read
        pass
    else:
        id = Dataset(fesom_file, 'r')
        if var_name == 'melt':
            # Convert from m/s to m/y
            node_data = id.variables['wnet'][0, :] * sec_per_year
        elif var_name == 'temp':
            # Read full 3D field for now
            node_data = id.variables['temp'][0, :]
        elif var_name == 'salt':
            # Read full 3D field for now
            node_data = id.variables['salt'][0, :]
        elif var_name in ['vsfc', 'vavg']:
            # The overlaid vectors are based on nodes not elements, so many
            # of the fesom_grid data structures fail to apply and we need to
            # read some of the FESOM grid files again.
            # Read the cavity flag for each 2D surface node
            fesom_cavity = []
            f = open(fesom_mesh_path + 'cavity_flag_nod2d.out', 'r')
            for line in f:
                tmp = int(line)
                if tmp == 1:
                    fesom_cavity.append(True)
                elif tmp == 0:
                    fesom_cavity.append(False)
                else:
                    print 'Problem'
                    return
            f.close()
            # Save the number of 2D nodes
            fesom_n2d = len(fesom_cavity)
            # Read rotated lat and lon for each node; also read depth which is
            # needed for vertically averaged velocity
            f = open(fesom_mesh_path + 'nod3d.out', 'r')
            f.readline()
            rlon = []
            rlat = []
            node_depth = []
            for line in f:
                tmp = line.split()
                lon_tmp = float(tmp[1])
                lat_tmp = float(tmp[2])
                node_depth_tmp = -1 * float(tmp[3])
                if lon_tmp < -180:
                    lon_tmp += 360
                elif lon_tmp > 180:
                    lon_tmp -= 360
                rlon.append(lon_tmp)
                rlat.append(lat_tmp)
                node_depth.append(node_depth_tmp)
            f.close()
            # For lat and lon, only care about the 2D nodes (the first
            # fesom_n2d indices)
            rlon = array(rlon[0:fesom_n2d])
            rlat = array(rlat[0:fesom_n2d])
            node_depth = array(node_depth)
            # Unrotate longitude
            fesom_lon, fesom_lat = unrotate_grid(rlon, rlat)
            # Calculate polar coordinates of each node
            fesom_x = -(fesom_lat + 90) * cos(fesom_lon * deg2rad + pi / 2)
            fesom_y = (fesom_lat + 90) * sin(fesom_lon * deg2rad + pi / 2)
            if var_name == 'vavg':
                # Read lists of which nodes are directly below which
                f = open(fesom_mesh_path + 'aux3d.out', 'r')
                max_num_layers = int(f.readline())
                node_columns = zeros([fesom_n2d, max_num_layers])
                for n in range(fesom_n2d):
                    for k in range(max_num_layers):
                        node_columns[n, k] = int(f.readline())
                node_columns = node_columns.astype(int)
                f.close()
            # Now we can actually read the data
            # Read full 3D field for both u and v
            node_ur_3d = id.variables['u'][0, :]
            node_vr_3d = id.variables['v'][0, :]
            if var_name == 'vsfc':
                # Only care about the first fesom_n2d nodes (surface)
                node_ur = node_ur_3d[0:fesom_n2d]
                node_vr = node_vr_3d[0:fesom_n2d]
            elif var_name == 'vavg':
                # Vertically average
                node_ur = zeros(fesom_n2d)
                node_vr = zeros(fesom_n2d)
                for n in range(fesom_n2d):
                    # Integrate udz, vdz, and dz over this water column
                    udz_col = 0
                    vdz_col = 0
                    dz_col = 0
                    for k in range(max_num_layers - 1):
                        if node_columns[n, k + 1] == -999:
                            # Reached the bottom
                            break
                        # Trapezoidal rule
                        top_id = node_columns[n, k]
                        bot_id = node_columns[n, k + 1]
                        dz_tmp = node_depth[bot_id - 1] - node_depth[top_id -
                                                                     1]
                        udz_col += 0.5 * (node_ur_3d[top_id - 1] +
                                          node_ur_3d[bot_id - 1]) * dz_tmp
                        vdz_col += 0.5 * (node_vr_3d[top_id - 1] +
                                          node_vr_3d[bot_id - 1]) * dz_tmp
                        dz_col += dz_tmp
                    # Convert from integrals to averages
                    node_ur[n] = udz_col / dz_col
                    node_vr[n] = vdz_col / dz_col
            # Unrotate
            node_u, node_v = unrotate_vector(rlon, rlat, node_ur, node_vr)
            # Calculate speed
            node_data = sqrt(node_u**2 + node_v**2)
        id.close()
    # Calculate given field at each element
    fesom_data = []
    for elm in elements:
        # For each element in an ice shelf cavity, append the mean value
        # for the 3 component Nodes
        if elm.cavity:
            if var_name == 'draft':
                # Ice shelf draft is depth of surface layer
                fesom_data.append(
                    mean([
                        elm.nodes[0].depth, elm.nodes[1].depth,
                        elm.nodes[2].depth
                    ]))
            elif var_name in ['melt', 'vsfc', 'vavg']:
                # Surface nodes (or 2D in the case of vavg)
                fesom_data.append(
                    mean([
                        node_data[elm.nodes[0].id], node_data[elm.nodes[1].id],
                        node_data[elm.nodes[2].id]
                    ]))
            elif var_name in ['temp', 'salt']:
                # Bottom nodes
                fesom_data.append(
                    mean([
                        node_data[elm.nodes[0].find_bottom().id],
                        node_data[elm.nodes[1].find_bottom().id],
                        node_data[elm.nodes[2].find_bottom().id]
                    ]))

    # Loop over ice shelves
    for index in range(num_shelves):
        print 'Processing ' + shelf_names[index]
        # Convert lat/lon bounds to polar coordinates for plotting
        x1 = -(lat_min[index] + 90) * cos(lon_min[index] * deg2rad + pi / 2)
        y1 = (lat_min[index] + 90) * sin(lon_min[index] * deg2rad + pi / 2)
        x2 = -(lat_min[index] + 90) * cos(lon_max[index] * deg2rad + pi / 2)
        y2 = (lat_min[index] + 90) * sin(lon_max[index] * deg2rad + pi / 2)
        x3 = -(lat_max[index] + 90) * cos(lon_min[index] * deg2rad + pi / 2)
        y3 = (lat_max[index] + 90) * sin(lon_min[index] * deg2rad + pi / 2)
        x4 = -(lat_max[index] + 90) * cos(lon_max[index] * deg2rad + pi / 2)
        y4 = (lat_max[index] + 90) * sin(lon_max[index] * deg2rad + pi / 2)
        # Find the new bounds on x and y
        x_min = amin(array([x1, x2, x3, x4]))
        x_max = amax(array([x1, x2, x3, x4]))
        y_min = amin(array([y1, y2, y3, y4]))
        y_max = amax(array([y1, y2, y3, y4]))
        # Now make the plot square: enlarge the smaller of delta_x and delta_y
        # so they are equal
        delta_x = x_max - x_min
        delta_y = y_max - y_min
        if delta_x > delta_y:
            diff = 0.5 * (delta_x - delta_y)
            y_min -= diff
            y_max += diff
        elif delta_y > delta_x:
            diff = 0.5 * (delta_y - delta_x)
            x_min -= diff
            x_max += diff
        # Set up a grey square for FESOM to fill the background with land
        x_reg_fesom, y_reg_fesom = meshgrid(linspace(x_min, x_max, num=100),
                                            linspace(y_min, y_max, num=100))
        land_square = zeros(shape(x_reg_fesom))
        # Find bounds on variable in this region, for both ROMS and FESOM
        # Start with ROMS
        loc = (roms_x >= x_min) * (roms_x <= x_max) * (roms_y >= y_min) * (
            roms_y <= y_max)
        var_min = amin(roms_data[loc])
        var_max = amax(roms_data[loc])
        # Modify with FESOM
        i = 0
        for elm in elements:
            if elm.cavity:
                if any(elm.x >= x_min) and any(elm.x <= x_max) and any(
                        elm.y >= y_min) and any(elm.y <= y_max):
                    if fesom_data[i] < var_min:
                        var_min = fesom_data[i]
                    if fesom_data[i] > var_max:
                        var_max = fesom_data[i]
                i += 1
        if var_name == 'melt':
            # Special colour map
            if var_min < 0:
                # There is refreezing here; include blue for elements below 0
                cmap_vals = array([
                    var_min, 0, 0.25 * var_max, 0.5 * var_max, 0.75 * var_max,
                    var_max
                ])
                cmap_colors = [(0.26, 0.45, 0.86), (1, 1, 1), (1, 0.9, 0.4),
                               (0.99, 0.59, 0.18), (0.5, 0.0, 0.08),
                               (0.96, 0.17, 0.89)]
                cmap_vals_norm = (cmap_vals - var_min) / (var_max - var_min)
                cmap_list = []
                for i in range(size(cmap_vals)):
                    cmap_list.append((cmap_vals_norm[i], cmap_colors[i]))
                mf_cmap = LinearSegmentedColormap.from_list(
                    'melt_freeze', cmap_list)
            else:
                # No refreezing
                cmap_vals = array([
                    0, 0.25 * var_max, 0.5 * var_max, 0.75 * var_max, var_max
                ])
                cmap_colors = [(1, 1, 1), (1, 0.9, 0.4), (0.99, 0.59, 0.18),
                               (0.5, 0.0, 0.08), (0.96, 0.17, 0.89)]
                cmap_vals_norm = cmap_vals / var_max
                cmap_list = []
                for i in range(size(cmap_vals)):
                    cmap_list.append((cmap_vals_norm[i], cmap_colors[i]))
                mf_cmap = LinearSegmentedColormap.from_list(
                    'melt_freeze', cmap_list)
            colour_map = mf_cmap
        else:
            colour_map = 'jet'
        if var_name in ['vsfc', 'vavg']:
            # Make vectors for overlay
            # Set up bins (edges)
            x_bins = linspace(x_min, x_max, num=num_bins + 1)
            y_bins = linspace(y_min, y_max, num=num_bins + 1)
            # Calculate centres of bins (for plotting)
            x_centres = 0.5 * (x_bins[:-1] + x_bins[1:])
            y_centres = 0.5 * (y_bins[:-1] + y_bins[1:])
            # ROMS
            # First set up arrays to integrate velocity in each bin
            # Simple averaging of all the points inside each bin
            roms_u = zeros([size(y_centres), size(x_centres)])
            roms_v = zeros([size(y_centres), size(x_centres)])
            roms_num_pts = zeros([size(y_centres), size(x_centres)])
            # First convert to polar coordinates, rotate to account for
            # longitude in circumpolar projection, and convert back to vector
            # components
            theta_roms = arctan2(v_rho, u_rho)
            theta_circ_roms = theta_roms - roms_lon * deg2rad
            u_circ_roms = roms_data * cos(
                theta_circ_roms)  # roms_data is speed
            v_circ_roms = roms_data * sin(theta_circ_roms)
            # Loop over all points (can't find a better way to do this)
            for j in range(size(roms_data, 0)):
                for i in range(size(roms_data, 1)):
                    # Make sure data isn't masked (i.e. land or open ocean)
                    if u_circ_roms[j, i] is not ma.masked:
                        # Check if we're in the region of interest
                        if roms_x[j, i] > x_min and roms_x[
                                j, i] < x_max and roms_y[
                                    j, i] > y_min and roms_y[j, i] < y_max:
                            # Figure out which bins this falls into
                            x_index = nonzero(x_bins > roms_x[j, i])[0][0] - 1
                            y_index = nonzero(y_bins > roms_y[j, i])[0][0] - 1
                            # Integrate
                            roms_u[y_index, x_index] += u_circ_roms[j, i]
                            roms_v[y_index, x_index] += v_circ_roms[j, i]
                            roms_num_pts[y_index, x_index] += 1
            # Convert from sums to averages
            # First mask out points with no data
            roms_u = ma.masked_where(roms_num_pts == 0, roms_u)
            roms_v = ma.masked_where(roms_num_pts == 0, roms_v)
            # Divide everything else by the number of points
            flag = roms_num_pts > 0
            roms_u[flag] = roms_u[flag] / roms_num_pts[flag]
            roms_v[flag] = roms_v[flag] / roms_num_pts[flag]
            # FESOM
            fesom_u = zeros([size(y_centres), size(x_centres)])
            fesom_v = zeros([size(y_centres), size(x_centres)])
            fesom_num_pts = zeros([size(y_centres), size(x_centres)])
            theta_fesom = arctan2(node_v, node_u)
            theta_circ_fesom = theta_fesom - fesom_lon * deg2rad
            u_circ_fesom = node_data * cos(
                theta_circ_fesom)  # node_data is speed
            v_circ_fesom = node_data * sin(theta_circ_fesom)
            # Loop over 2D nodes to fill in the velocity bins as before
            for n in range(fesom_n2d):
                if fesom_cavity[n]:
                    if fesom_x[n] > x_min and fesom_x[n] < x_max and fesom_y[
                            n] > y_min and fesom_y[n] < y_max:
                        x_index = nonzero(x_bins > fesom_x[n])[0][0] - 1
                        y_index = nonzero(y_bins > fesom_y[n])[0][0] - 1
                        fesom_u[y_index, x_index] += u_circ_fesom[n]
                        fesom_v[y_index, x_index] += v_circ_fesom[n]
                        fesom_num_pts[y_index, x_index] += 1
            fesom_u = ma.masked_where(fesom_num_pts == 0, fesom_u)
            fesom_v = ma.masked_where(fesom_num_pts == 0, fesom_v)
            flag = fesom_num_pts > 0
            fesom_u[flag] = fesom_u[flag] / fesom_num_pts[flag]
            fesom_v[flag] = fesom_v[flag] / fesom_num_pts[flag]
        # Plot
        fig = figure(figsize=(30, 12))
        fig.patch.set_facecolor('white')
        # ROMS
        ax1 = fig.add_subplot(1, 2, 1, aspect='equal')
        # First shade land and zice in grey
        contourf(roms_x, roms_y, land_zice, 1, colors=(('0.6', '0.6', '0.6')))
        # Fill in the missing circle
        contourf(x_reg_roms,
                 y_reg_roms,
                 land_circle,
                 1,
                 colors=(('0.6', '0.6', '0.6')))
        # Now shade the data
        pcolor(roms_x,
               roms_y,
               roms_data,
               vmin=var_min,
               vmax=var_max,
               cmap=colour_map)
        if var_name in ['vsfc', 'vavg']:
            # Overlay vectors
            quiver(x_centres,
                   y_centres,
                   roms_u,
                   roms_v,
                   scale=1.5,
                   color='black')
        xlim([x_min, x_max])
        ylim([y_min, y_max])
        axis('off')
        title('MetROMS', fontsize=24)
        # FESOM
        ax2 = fig.add_subplot(1, 2, 2, aspect='equal')
        # Start with land background
        contourf(x_reg_fesom,
                 y_reg_fesom,
                 land_square,
                 1,
                 colors=(('0.6', '0.6', '0.6')))
        # Add ice shelf elements
        img = PatchCollection(patches, cmap=colour_map)
        img.set_array(array(fesom_data))
        img.set_edgecolor('face')
        img.set_clim(vmin=var_min, vmax=var_max)
        ax2.add_collection(img)
        # Mask out the open ocean in white
        overlay = PatchCollection(mask_patches, facecolor=(1, 1, 1))
        overlay.set_edgecolor('face')
        ax2.add_collection(overlay)
        if var_name in ['vsfc', 'vavg']:
            quiver(x_centres,
                   y_centres,
                   fesom_u,
                   fesom_v,
                   scale=1.5,
                   color='black')
        xlim([x_min, x_max])
        ylim([y_min, y_max])
        axis('off')
        title('FESOM', fontsize=24)
        # Colourbar on the right
        cbaxes = fig.add_axes([0.92, 0.2, 0.01, 0.6])
        cbar = colorbar(img, cax=cbaxes)
        cbar.ax.tick_params(labelsize=20)
        # Main title
        if var_name == 'draft':
            title_string = ' draft (m)'
        elif var_name == 'melt':
            title_string = ' melt rate (m/y)'
        elif var_name == 'temp':
            title_string = r' bottom water temperature ($^{\circ}$C)'
        elif var_name == 'salt':
            title_string = ' bottom water salinity (psu)'
        elif var_name == 'vsfc':
            title_string = ' surface velocity (m/s)'
        elif var_name == 'vavg':
            title_string = ' vertically averaged velocity (m/s)'
        suptitle(shelf_names[index] + title_string, fontsize=30)
        subplots_adjust(wspace=0.05)
        #fig.show()
        fig.savefig(fig_heads[index] + '_' + var_name + '.png')
Ejemplo n.º 6
0
def convert_file (year):

    # Make sure input argument is an integer (sometimes the batch script likes
    # to pass it as a string)
    year = int(year)

    # Paths of ROMS grid file, input ECCO2 files (without the tail yyyymm.nc),
    # and output ROMS-CICE boundary condition file; other users will need to
    # change these
    grid_file = '../ROMS-CICE-MCT/apps/common/grid/circ30S_quarterdegree_good.nc'
    theta_base = '../ROMS-CICE-MCT/data/ECCO2/raw/THETA.1440x720x50.' + str(year)
    salt_base = '../ROMS-CICE-MCT/data/ECCO2/raw/SALT.1440x720x50.' + str(year)
    vvel_base = '../ROMS-CICE-MCT/data/ECCO2/raw/VVEL.1440x720x50.' + str(year)
    output_file = '../ROMS-CICE-MCT/data/ECCO2/ecco2_cube92_lbc_' + str(year) + '.nc'

    # Grid parameters; check grid_file and *.in to make sure these are correct
    Tcline = 40
    theta_s = 4.0
    theta_b = 0.9
    hc = 40
    N = 31
    # Northernmost index of ECCO2 grid to read (1-based)
    nbdry_ecco = 241

    # Read ECCO2 grid
    print 'Reading ECCO2 grid'
    ecco_fid = Dataset(theta_base + '01.nc', 'r')
    lon_ecco_raw = ecco_fid.variables['LONGITUDE_T'][:]
    lat_ecco = ecco_fid.variables['LATITUDE_T'][0:nbdry_ecco]
    depth_ecco_raw = ecco_fid.variables['DEPTH_T'][:]
    ecco_fid.close()

    # The ECCO2 longitude axis doesn't wrap around; there is a gap between
    # almost-180W and almost-180E, and the ROMS grid has points in this gap.
    # So copy the last longitude value (mod 360) to the beginning, and the
    # first longitude value (mod 360) to the end.
    lon_ecco = zeros(size(lon_ecco_raw)+2)
    lon_ecco[0] = lon_ecco_raw[-1]-360
    lon_ecco[1:-1] = lon_ecco_raw
    lon_ecco[-1] = lon_ecco_raw[0]+360

    # The shallowest ECCO2 depth value is 5 m, but ROMS needs 0 m. So add the
    # index depth = 0 m to the beginning. Later we will just copy the 5 m
    # values for theta and salt into this index. Similarly, the deepest ECCO2
    # depth value is not deep enough for ROMS, so make a 6000 m index at the end.
    depth_ecco = zeros(size(depth_ecco_raw)+2)
    depth_ecco[0] = 0.0
    depth_ecco[1:-1] = depth_ecco_raw
    depth_ecco[-1] = 6000.0

    # Read ROMS grid
    print 'Reading ROMS grid'
    grid_fid = Dataset(grid_file, 'r')
    lon_rho = grid_fid.variables['lon_rho'][:,:]
    lat_rho = grid_fid.variables['lat_rho'][:,:]
    lon_u = grid_fid.variables['lon_u'][:,:]
    lat_u = grid_fid.variables['lat_u'][:,:]
    lon_v = grid_fid.variables['lon_v'][:,:]
    lat_v = grid_fid.variables['lat_v'][:,:]
    h = grid_fid.variables['h'][:,:]    
    zice = grid_fid.variables['zice'][:,:]
    mask_rho = grid_fid.variables['mask_rho'][:,:]
    mask_zice = grid_fid.variables['mask_zice'][:,:]
    grid_fid.close()    

    # Save the lengths of the longitude axis for each grid
    num_lon_rho = size(lon_rho, 1)
    num_lon_u = size(lon_u, 1)
    num_lon_v = size(lon_v, 1)
    # Mask h and zice with zeros
    h = h*mask_rho
    zice = zice*mask_zice
    # Interpolate h and zice to u and v grids
    h_u = 0.5*(h[:,0:-1] + h[:,1:])
    h_v = 0.5*(h[0:-1,:] + h[1:,:])
    zice_u = 0.5*(zice[:,0:-1] + zice[:,1:])
    zice_v = 0.5*(zice[0:-1,:] + zice[1:,:])

    # Calculate Cartesian integrands and z-coordinates for each grid
    dx_rho, dy_rho, dz_rho, z_rho = cartesian_grid_3d(lon_rho, lat_rho, h, zice, theta_s, theta_b, hc, N)
    dx_u, dy_u, dz_u, z_u = cartesian_grid_3d(lon_u, lat_u, h_u, zice_u, theta_s, theta_b, hc, N)
    dx_v, dy_v, dz_v, z_v = cartesian_grid_3d(lon_v, lat_v, h_v, zice_v, theta_s, theta_b, hc, N)
    # Also call calc_z for the rho_grid just so we get sc_r and Cs_r
    z_rho, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N)

    # Select just the northern boundary for each field
    dx_rho = dx_rho[:,-1,:]
    dy_rho = dy_rho[:,-1,:]
    dz_rho = dz_rho[:,-1,:]
    z_rho = z_rho[:,-1,:]
    dx_u = dx_u[:,-1,:]
    dy_u = dy_u[:,-1,:]
    dz_u = dz_u[:,-1,:]
    z_u = z_u[:,-1,:]
    dx_v = dx_v[:,-1,:]
    dy_v = dy_v[:,-1,:]
    dz_v = dz_v[:,-1,:]
    z_v = z_v[:,-1,:]

    # Copy longitude and latitude at the northern boundary into arrays of
    # dimension depth x longitude
    lon_rho = tile(lon_rho[-1,:], (N,1))
    lat_rho = tile(lat_rho[-1,:], (N,1))
    lon_u = tile(lon_u[-1,:], (N,1))
    lat_u = tile(lat_u[-1,:], (N,1))
    lon_v = tile(lon_v[-1,:], (N,1))
    lat_v = tile(lat_v[-1,:], (N,1))

    # Make sure ROMS longitudes are between 0 and 360
    index = lon_rho < 0
    lon_rho[index] += 360
    index = lon_rho > 360
    lon_rho[index] -= 360
    index = lon_u < 0
    lon_u[index] += 360
    index = lon_u > 360
    lon_u[index] -= 360
    index = lon_v < 0
    lon_v[index] += 360
    index = lon_v > 360
    lon_v[index] -= 360

    # Set up output file
    print 'Setting up ', output_file
    out_fid = Dataset(output_file, 'w')
    out_fid.createDimension('xi_u', num_lon_u)
    out_fid.createDimension('xi_v', num_lon_v)
    out_fid.createDimension('xi_rho', num_lon_rho)
    out_fid.createDimension('s_rho', N)
    out_fid.createDimension('ocean_time', None)
    out_fid.createDimension('one', 1);
    out_fid.createVariable('theta_s', 'f8', ('one'))
    out_fid.variables['theta_s'].long_name = 'S-coordinate surface control parameter'
    out_fid.variables['theta_s'][:] = theta_s
    out_fid.createVariable('theta_b', 'f8', ('one'))
    out_fid.variables['theta_b'].long_name = 'S-coordinate bottom control parameter'
    out_fid.variables['theta_b'].units = 'nondimensional'
    out_fid.variables['theta_b'][:] = theta_b
    out_fid.createVariable('Tcline', 'f8', ('one'))
    out_fid.variables['Tcline'].long_name = 'S-coordinate surface/bottom layer width'
    out_fid.variables['Tcline'].units = 'meter'
    out_fid.variables['Tcline'][:] = Tcline
    out_fid.createVariable('hc', 'f8', ('one'))
    out_fid.variables['hc'].long_name = 'S-coordinate parameter, critical depth'
    out_fid.variables['hc'].units = 'meter'
    out_fid.variables['hc'][:] = hc
    out_fid.createVariable('sc_r', 'f8', ('s_rho'))
    out_fid.variables['sc_r'].long_name = 'S-coordinate at rho-points'
    out_fid.variables['sc_r'].units = 'nondimensional'
    out_fid.variables['sc_r'].valid_min = -1
    out_fid.variables['sc_r'].valid_max = 0
    out_fid.variables['sc_r'][:] = sc_r
    out_fid.createVariable('Cs_r', 'f8', ('s_rho'))
    out_fid.variables['Cs_r'].long_name = 'S-coordinate stretching curves at RHO-points'
    out_fid.variables['Cs_r'].units = 'nondimensional'
    out_fid.variables['Cs_r'].valid_min = -1
    out_fid.variables['Cs_r'].valid_max = 0
    out_fid.variables['Cs_r'][:] = Cs_r
    out_fid.createVariable('ocean_time', 'f8', ('ocean_time'))
    out_fid.variables['ocean_time'].long_name = 'time since initialization'
    out_fid.variables['ocean_time'].units = 'days'
    out_fid.createVariable('temp_north', 'f8', ('ocean_time', 's_rho', 'xi_rho'))
    out_fid.variables['temp_north'].long_name = 'northern boundary potential temperature'
    out_fid.variables['temp_north'].units = 'Celsius'
    out_fid.createVariable('salt_north', 'f8', ('ocean_time', 's_rho', 'xi_rho'))
    out_fid.variables['salt_north'].long_name = 'northern boundary salinity'
    out_fid.variables['salt_north'].units = 'PSU'
    out_fid.createVariable('u_north', 'f8', ('ocean_time', 's_rho', 'xi_u'))
    out_fid.variables['u_north'].long_name = 'northern boundary u-momentum component'
    out_fid.variables['u_north'].units = 'meter second-1'
    out_fid.createVariable('v_north', 'f8', ('ocean_time', 's_rho', 'xi_v'))
    out_fid.variables['v_north'].long_name = 'northern boundary v-momentum component'
    out_fid.variables['v_north'].units = 'meter second-1'
    out_fid.createVariable('ubar_north', 'f8', ('ocean_time', 'xi_u'))
    out_fid.variables['ubar_north'].long_name = 'northern boundary vertically integrated u-momentum component'
    out_fid.variables['ubar_north'].units = 'meter second-1'
    out_fid.createVariable('vbar_north', 'f8', ('ocean_time', 'xi_v'))
    out_fid.variables['vbar_north'].long_name = 'northern boundary vertically integrated v-momentum component'
    out_fid.variables['vbar_north'].units = 'meter second-1'
    out_fid.createVariable('zeta_north', 'f8', ('ocean_time', 'xi_rho'))
    out_fid.variables['zeta_north'].long_name = 'northern boundary sea surface height'
    out_fid.variables['zeta_north'].units = 'meter'
    out_fid.close()

    # Loop through each month of this year
    for month in range(12):

        print 'Processing month ', str(month+1), ' of 12'
        # Construct the rest of the file paths
        if month+1 < 10:
            tail = '0' + str(month+1) + '.nc'
        else:
            tail = str(month+1) + '.nc'

        # Read temperature, salinity, velocity data
        theta_fid = Dataset(theta_base + tail, 'r')
        theta_raw = transpose(theta_fid.variables['THETA'][0,:,0:nbdry_ecco,:])
        theta_fid.close()
        salt_fid = Dataset(salt_base + tail, 'r')
        salt_raw = transpose(salt_fid.variables['SALT'][0,:,0:nbdry_ecco,:])
        salt_fid.close()
        vvel_fid = Dataset(vvel_base + tail, 'r')
        vvel_raw = transpose(vvel_fid.variables['VVEL'][0,:,0:nbdry_ecco,:])
        vvel_fid.close()

        # Copy the data to the new longitude and depth indices, making sure
        # to preserve the mask.
        theta = ma.array(zeros((size(lon_ecco), size(lat_ecco), size(depth_ecco))))
        theta[1:-1,:,1:-1] = ma.copy(theta_raw)
        theta[0,:,1:-1] = ma.copy(theta_raw[-1,:,:])
        theta[-1,:,1:-1] = ma.copy(theta_raw[0,:,:])
        theta[:,:,0] = ma.copy(theta[:,:,1])
        theta[:,:,-1] = ma.copy(theta[:,:,-2])
        salt = ma.array(zeros((size(lon_ecco), size(lat_ecco), size(depth_ecco))))
        salt[1:-1,:,1:-1] = ma.copy(salt_raw)
        salt[0,:,1:-1] = ma.copy(salt_raw[-1,:,:])
        salt[-1,:,1:-1] = ma.copy(salt_raw[0,:,:])
        salt[:,:,0] = ma.copy(salt[:,:,1])
        salt[:,:,-1] = ma.copy(salt[:,:,-2])
        vvel = ma.array(zeros((size(lon_ecco), size(lat_ecco), size(depth_ecco))))
        vvel[1:-1,:,1:-1] = ma.copy(vvel_raw)
        vvel[0,:,1:-1] = ma.copy(vvel_raw[-1,:,:])
        vvel[-1,:,1:-1] = ma.copy(vvel_raw[0,:,:])
        vvel[:,:,0] = ma.copy(vvel[:,:,1])
        vvel[:,:,-1] = ma.copy(vvel[:,:,-2])

        # Regridding happens here...
        print 'Interpolating temperature'
        temp_interp = interp_ecco2roms(theta, lon_ecco, lat_ecco, depth_ecco, lon_rho, lat_rho, z_rho, mean(theta), True)
        print 'Interpolating salinity'
        salt_interp = interp_ecco2roms(salt, lon_ecco, lat_ecco, depth_ecco, lon_rho, lat_rho, z_rho, mean(salt), True)
        print 'Interpolating v'
        v_interp = interp_ecco2roms(vvel, lon_ecco, lat_ecco, depth_ecco, lon_v, lat_v, z_v, 0, False)

        # Calculate vertical average of v to get vbar
        # Be sure to treat land mask carefully so we don't divide by 0
        vbar_interp = sum(v_interp*dz_v, axis=0)
        wct_v = h_v[-1,:] + zice_v[-1,:]
        index = wct_v == 0
        vbar_interp[~index] = vbar_interp[~index]/wct_v[~index]
        vbar_interp[index] = 0.0

        # Calculate time values centered in the middle of each month,
        # relative to 1992
        time = 365.25*(year-1992) + 365.25/12*(month+0.5)

        # Save data to NetCDF file
        out_fid = Dataset(output_file, 'a')
        out_fid.variables['ocean_time'][month] = time
        out_fid.variables['temp_north'][month,:,:] = temp_interp
        out_fid.variables['salt_north'][month,:,:] = salt_interp
        # Clamp u to zero
        out_fid.variables['u_north'][month,:,:] = 0.0
        out_fid.variables['v_north'][month,:,:] = v_interp
        # Clamp ubar to zero
        out_fid.variables['ubar_north'][month,:] = 0.0
        out_fid.variables['vbar_north'][month,:] = vbar_interp
        out_fid.variables['zeta_north'][month,:] = 0.0
        out_fid.close()
Ejemplo n.º 7
0
def mip_circumpolar_drift():

    # File paths
    # ECCO2 initial conditions file for temperature
    ecco2_ini_file = '/short/m68/kaa561/metroms_iceshelf/data/originals/ECCO2/THETA.1440x720x50.199201.nc'
    # ROMS grid file
    roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc'
    # ROMS January 2016 mean temp
    roms_end_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/temp_salt_jan2016.nc'
    # FESOM mesh paths
    fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/'
    fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/'
    # FESOM January 2016 mean temp
    fesom_end_file_lr = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/temp_salt_jan2016.nc'
    fesom_end_file_hr = '/short/y99/kaa561/FESOM/intercomparison_highres/output/temp_salt_jan2016.nc'
    # Depth bounds to average between
    shallow_bound = 300
    deep_bound = 1000
    # ROMS grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    deg2rad = pi / 180
    # Bound for colour scale
    colour_bound = 3
    # Northern boundary for plot
    nbdry = -50 + 90

    print 'Processing ECCO2'
    id = Dataset(ecco2_ini_file, 'r')
    ecco_lon_tmp = id.variables['LONGITUDE_T'][:]
    ecco_lat = id.variables['LATITUDE_T'][:]
    ecco_depth = id.variables['DEPTH_T'][:]  # Depth is positive
    ecco_temp_3d_tmp = id.variables['THETA'][0, :, :, :]
    id.close()
    # Wrap periodic boundary
    ecco_lon = zeros(size(ecco_lon_tmp) + 2)
    ecco_lon[0] = ecco_lon_tmp[-1] - 360
    ecco_lon[1:-1] = ecco_lon_tmp
    ecco_lon[-1] = ecco_lon_tmp[0] + 360
    ecco_temp_3d = ma.array(
        zeros((size(ecco_depth), size(ecco_lat), size(ecco_lon))))
    ecco_temp_3d[:, :, 0] = ecco_temp_3d_tmp[:, :, -1]
    ecco_temp_3d[:, :, 1:-1] = ecco_temp_3d_tmp
    ecco_temp_3d[:, :, -1] = ecco_temp_3d_tmp[:, :, 0]
    # Calculate dz
    ecco_depth_edges = zeros(size(ecco_depth) + 1)
    ecco_depth_edges[1:-1] = 0.5 * (ecco_depth[:-1] + ecco_depth[1:])
    # Surface is zero
    # Extrapolate for bottom
    ecco_depth_edges[-1] = 2 * ecco_depth[-1] - ecco_depth_edges[-2]
    ecco_dz = ecco_depth_edges[1:] - ecco_depth_edges[:-1]
    # Average between bounds
    # Find the first level below shallow_bound
    k_start = nonzero(ecco_depth > shallow_bound)[0][0]
    # Find the first level below deep_bound
    # Don't worry about regions where this hits the seafloor, as they will
    # get masked out in the final plot
    k_end = nonzero(ecco_depth > deep_bound)[0][0]
    # Integrate between
    ecco_temp = sum(
        ecco_temp_3d[k_start:k_end, :, :] * ecco_dz[k_start:k_end, None, None],
        axis=0) / sum(ecco_dz[k_start:k_end])
    # Fill land mask with zeros
    index = ecco_temp.mask
    ecco_temp = ecco_temp.data
    ecco_temp[index] = 0.0
    # Prepare interpolation function
    interp_function = RegularGridInterpolator((ecco_lat, ecco_lon), ecco_temp)

    print 'Processing MetROMS'
    # Read grid
    id = Dataset(roms_grid, 'r')
    roms_h = id.variables['h'][:, :]
    roms_zice = id.variables['zice'][:, :]
    roms_mask = id.variables['mask_rho'][:, :]
    roms_lon = id.variables['lon_rho'][:, :]
    roms_lat = id.variables['lat_rho'][:, :]
    num_lon = size(roms_lon, 1)
    num_lat = size(roms_lat, 0)
    id.close()
    # Interpolate ECCO2 depth-averaged values to the ROMS grid
    roms_temp_ini = interp_function((roms_lat, roms_lon))
    # Apply ROMS land mask
    roms_temp_ini = ma.masked_where(roms_mask == 0, roms_temp_ini)
    # Read Jan 2016 values
    id = Dataset(roms_end_file, 'r')
    roms_temp_3d_end = id.variables['temp'][0, :, :, :]
    id.close()
    # Get z and dz
    roms_dx, roms_dy, roms_dz, roms_z = cartesian_grid_3d(
        roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
    # Vertically average between given depths
    roms_temp_end = average_btw_depths(roms_temp_3d_end, roms_z, roms_dz,
                                       [-1 * shallow_bound, -1 * deep_bound])
    # Mask regions shallower than 1000 m
    roms_temp_ini = ma.masked_where(roms_h < deep_bound, roms_temp_ini)
    roms_temp_end = ma.masked_where(roms_h < deep_bound, roms_temp_end)
    # Mask ice shelf cavities
    roms_temp_ini = ma.masked_where(roms_zice < 0, roms_temp_ini)
    roms_temp_end = ma.masked_where(roms_zice < 0, roms_temp_end)
    # Get difference
    roms_temp_drift = roms_temp_end - roms_temp_ini
    # Convert to spherical coordinates
    roms_x = -(roms_lat + 90) * cos(roms_lon * deg2rad + pi / 2)
    roms_y = (roms_lat + 90) * sin(roms_lon * deg2rad + pi / 2)

    print 'Processing low-res FESOM'
    print '...Building mesh'
    elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar=True)
    # Read rotated lat and lon for each 2D node
    f = open(fesom_mesh_path_lr + 'nod2d.out', 'r')
    f.readline()
    rlon_lr = []
    rlat_lr = []
    for line in f:
        tmp = line.split()
        lon_tmp = float(tmp[1])
        if lon_tmp < -180:
            lon_tmp += 360
        elif lon_tmp > 180:
            lon_tmp -= 360
        rlon_lr.append(lon_tmp)
        rlat_lr.append(float(tmp[2]))
    f.close()
    rlon_lr = array(rlon_lr)
    rlat_lr = array(rlat_lr)
    # Unrotate grid
    fesom_lon_lr, fesom_lat_lr = unrotate_grid(rlon_lr, rlat_lr)
    # Get longitude in the range (-180, 180) to match ECCO
    index = fesom_lon_lr < 0
    fesom_lon_lr[index] = fesom_lon_lr[index] + 360
    print '...Interpolating ECCO2'
    fesom_temp_nodes_ini_lr = interp_function((fesom_lat_lr, fesom_lon_lr))
    # Read January 2016 temp
    id = Dataset(fesom_end_file_lr, 'r')
    fesom_temp_3d_nodes_end_lr = id.variables['temp'][0, :]
    id.close()
    print '...Looping over elements'
    fesom_temp_ini_lr = []
    fesom_temp_end_lr = []
    patches_lr = []
    for elm in elements_lr:
        # Make sure we're not in an ice shelf cavity, or shallower than deep_bound
        if not elm.cavity:
            if all(
                    array([
                        elm.nodes[0].find_bottom().depth, elm.nodes[1].
                        find_bottom().depth, elm.nodes[2].find_bottom().depth
                    ]) > deep_bound):
                # Add a new patch
                coord = transpose(vstack((elm.x, elm.y)))
                patches_lr.append(Polygon(coord, True, linewidth=0.))
                # Average initial temp over element
                fesom_temp_ini_lr.append(
                    mean([
                        fesom_temp_nodes_ini_lr[elm.nodes[0].id],
                        fesom_temp_nodes_ini_lr[elm.nodes[1].id],
                        fesom_temp_nodes_ini_lr[elm.nodes[2].id]
                    ]))
                # Vertically integrate final temp for this element
                fesom_temp_end_lr.append(
                    fesom_element_average_btw_depths(
                        elm, shallow_bound, deep_bound,
                        fesom_temp_3d_nodes_end_lr))
    fesom_temp_ini_lr = array(fesom_temp_ini_lr)
    fesom_temp_end_lr = array(fesom_temp_end_lr)
    # Get difference
    fesom_temp_drift_lr = fesom_temp_end_lr - fesom_temp_ini_lr

    print 'Processing high-res FESOM'
    print '...Building mesh'
    elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar=True)
    f = open(fesom_mesh_path_hr + 'nod2d.out', 'r')
    f.readline()
    rlon_hr = []
    rlat_hr = []
    for line in f:
        tmp = line.split()
        lon_tmp = float(tmp[1])
        if lon_tmp < -180:
            lon_tmp += 360
        elif lon_tmp > 180:
            lon_tmp -= 360
        rlon_hr.append(lon_tmp)
        rlat_hr.append(float(tmp[2]))
    f.close()
    rlon_hr = array(rlon_hr)
    rlat_hr = array(rlat_hr)
    fesom_lon_hr, fesom_lat_hr = unrotate_grid(rlon_hr, rlat_hr)
    index = fesom_lon_hr < 0
    fesom_lon_hr[index] = fesom_lon_hr[index] + 360
    print '...Interpolating ECCO2'
    fesom_temp_nodes_ini_hr = interp_function((fesom_lat_hr, fesom_lon_hr))
    id = Dataset(fesom_end_file_hr, 'r')
    fesom_temp_3d_nodes_end_hr = id.variables['temp'][0, :]
    id.close()
    print '...Looping over elements'
    fesom_temp_ini_hr = []
    fesom_temp_end_hr = []
    patches_hr = []
    for elm in elements_hr:
        if not elm.cavity:
            if all(
                    array([
                        elm.nodes[0].find_bottom().depth, elm.nodes[1].
                        find_bottom().depth, elm.nodes[2].find_bottom().depth
                    ]) > deep_bound):
                coord = transpose(vstack((elm.x, elm.y)))
                patches_hr.append(Polygon(coord, True, linewidth=0.))
                fesom_temp_ini_hr.append(
                    mean([
                        fesom_temp_nodes_ini_hr[elm.nodes[0].id],
                        fesom_temp_nodes_ini_hr[elm.nodes[1].id],
                        fesom_temp_nodes_ini_hr[elm.nodes[2].id]
                    ]))
                fesom_temp_end_hr.append(
                    fesom_element_average_btw_depths(
                        elm, shallow_bound, deep_bound,
                        fesom_temp_3d_nodes_end_hr))
    fesom_temp_ini_hr = array(fesom_temp_ini_hr)
    fesom_temp_end_hr = array(fesom_temp_end_hr)
    fesom_temp_drift_hr = fesom_temp_end_hr - fesom_temp_ini_hr

    print 'Plotting'
    fig = figure(figsize=(19, 8))
    fig.patch.set_facecolor('white')
    gs = GridSpec(1, 3)
    gs.update(left=0.05, right=0.95, bottom=0.1, top=0.85, wspace=0.05)
    # ROMS
    ax = subplot(gs[0, 0], aspect='equal')
    ax.pcolor(roms_x,
              roms_y,
              roms_temp_drift,
              vmin=-colour_bound,
              vmax=colour_bound,
              cmap='RdBu_r')
    xlim([-nbdry, nbdry])
    ylim([-nbdry, nbdry])
    title('a) MetROMS', fontsize=28)
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM (low-res)
    ax = subplot(gs[0, 1], aspect='equal')
    img = PatchCollection(patches_lr, cmap='RdBu_r')
    img.set_array(fesom_temp_drift_lr)
    img.set_clim(vmin=-colour_bound, vmax=colour_bound)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry, nbdry])
    ylim([-nbdry, nbdry])
    title('b) FESOM (low-res)', fontsize=28)
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM (high-res)
    ax = subplot(gs[0, 2], aspect='equal')
    img = PatchCollection(patches_hr, cmap='RdBu_r')
    img.set_array(fesom_temp_drift_hr)
    img.set_clim(vmin=-colour_bound, vmax=colour_bound)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry, nbdry])
    ylim([-nbdry, nbdry])
    title('c) FESOM (high-res)', fontsize=28)
    ax.set_xticks([])
    ax.set_yticks([])
    # Add a horizontal colourbar on the bottom
    cbaxes = fig.add_axes([0.3, 0.05, 0.4, 0.04])
    cbar = colorbar(img,
                    orientation='horizontal',
                    cax=cbaxes,
                    ticks=arange(-colour_bound, colour_bound + 1, 1),
                    extend='both')
    cbar.ax.tick_params(labelsize=20)
    # Main title
    suptitle(r'Change in temperature from initial conditions ($^{\circ}$C), ' +
             str(shallow_bound) + '-' + str(deep_bound) + ' m average',
             fontsize=34)
    fig.show()
    fig.savefig('circumpolar_temp_drift.png')
Ejemplo n.º 8
0
def mip_ts_distribution_sose(roms_grid, roms_file, fesom_mesh_path,
                             fesom_file):

    # Northern boundary of water masses to consider
    nbdry = -65
    # Number of temperature and salinity bins
    num_bins = 1000
    # Bounds on temperature and salinity bins (pre-computed, change if needed)
    min_salt = 32.3
    max_salt = 35.1
    min_temp = -3.1
    max_temp = 3.8
    # Bounds to actually plot
    min_salt_plot = 33.25
    max_salt_plot = 35.0
    min_temp_plot = -3
    max_temp_plot = 3.8
    # FESOM grid generation parameters
    circumpolar = False
    cross_180 = False
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    # Path to SOSE annual climatology for temp and salt
    sose_file = '../SOSE_annual_climatology.nc'
    # Radius of the Earth in metres
    r = 6.371e6
    # Degrees to radians conversion factor
    deg2rad = pi / 180.0

    print 'Setting up bins'
    # Calculate boundaries of temperature bins
    temp_bins = linspace(min_temp, max_temp, num=num_bins)
    # Calculate centres of temperature bins (for plotting)
    temp_centres = 0.5 * (temp_bins[:-1] + temp_bins[1:])
    # Repeat for salinity
    salt_bins = linspace(min_salt, max_salt, num=num_bins)
    salt_centres = 0.5 * (salt_bins[:-1] + salt_bins[1:])
    # Set up 2D arrays of temperature bins x salinity bins to hold average
    # depth of water masses, weighted by volume
    ts_vals_roms = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_fesom = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_sose = zeros([size(temp_centres), size(salt_centres)])
    # Also arrays to integrate volume
    volume_roms = zeros([size(temp_centres), size(salt_centres)])
    volume_fesom = zeros([size(temp_centres), size(salt_centres)])
    volume_sose = zeros([size(temp_centres), size(salt_centres)])
    # Calculate surface freezing point as a function of salinity as seen by
    # each sea ice model
    freezing_pt_roms = salt_centres / (-18.48 + 18.48 / 1e3 * salt_centres)
    freezing_pt_fesom = -0.0575 * salt_centres + 1.7105e-3 * sqrt(
        salt_centres**3) - 2.155e-4 * salt_centres**2
    # Get 2D versions of the temperature and salinity bins
    salt_2d, temp_2d = meshgrid(salt_centres, temp_centres)
    # Calculate potential density of each combination of temperature and
    # salinity bins
    density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres))) - 1000
    # Density contours to plot
    density_lev = arange(26.6, 28.4, 0.2)

    print 'Processing ROMS'
    # Read ROMS grid variables we need
    id = Dataset(roms_grid, 'r')
    roms_lon = id.variables['lon_rho'][:, :]
    roms_lat = id.variables['lat_rho'][:, :]
    roms_h = id.variables['h'][:, :]
    roms_zice = id.variables['zice'][:, :]
    id.close()
    num_lat = size(roms_lat, 0)
    num_lon = size(roms_lon, 1)
    # Get integrands on 3D grid
    roms_dx, roms_dy, roms_dz, roms_z = cartesian_grid_3d(
        roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
    # Get volume integrand
    roms_dV = roms_dx * roms_dy * roms_dz
    # Read ROMS output
    id = Dataset(roms_file, 'r')
    roms_temp = id.variables['temp'][0, :, :, :]
    roms_salt = id.variables['salt'][0, :, :, :]
    id.close()
    # Loop over 2D grid boxes
    for j in range(num_lat):
        for i in range(num_lon):
            # Check for land mask
            if roms_temp[0, j, i] is ma.masked:
                continue
            # Check if we're in the region of interest
            if roms_lat[j, i] < nbdry:
                # Loop downward
                for k in range(N):
                    # Figure out which bins this falls into
                    temp_index = nonzero(
                        temp_bins > roms_temp[k, j, i])[0][0] - 1
                    salt_index = nonzero(
                        salt_bins > roms_salt[k, j, i])[0][0] - 1
                    # Integrate depth*dV in this bin
                    ts_vals_roms[
                        temp_index,
                        salt_index] += -roms_z[k, j, i] * roms_dV[k, j, i]
                    volume_roms[temp_index, salt_index] += roms_dV[k, j, i]
    # Mask bins with zero volume
    ts_vals_roms = ma.masked_where(volume_roms == 0, ts_vals_roms)
    volume_roms = ma.masked_where(volume_roms == 0, volume_roms)
    # Convert depths from integrals to volume-averages
    ts_vals_roms /= volume_roms

    print 'Processing FESOM'
    # Make FESOM grid elements
    elements = fesom_grid(fesom_mesh_path, circumpolar, cross_180)
    # Read temperature and salinity at each 3D node
    id = Dataset(fesom_file, 'r')
    fesom_temp = id.variables['temp'][0, :]
    fesom_salt = id.variables['salt'][0, :]
    id.close()
    # Loop over elements
    for elm in elements:
        # See if we're in the region of interest
        if all(elm.lat < nbdry):
            # Get area of 2D triangle
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            # Loop downward
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[
                        2].below is None:
                    # We've reached the bottom
                    break
                # Calculate average temperature, salinity, depth, and layer
                # thickness over this 3D triangular prism
                temp_vals = []
                salt_vals = []
                depth_vals = []
                dz = []
                for i in range(3):
                    # Average temperature over 6 nodes
                    temp_vals.append(fesom_temp[nodes[i].id])
                    temp_vals.append(fesom_temp[nodes[i].below.id])
                    # Average salinity over 6 nodes
                    salt_vals.append(fesom_salt[nodes[i].id])
                    salt_vals.append(fesom_salt[nodes[i].below.id])
                    # Average depth over 6 nodes
                    depth_vals.append(nodes[i].depth)
                    depth_vals.append(nodes[i].below.depth)
                    # Average dz over 3 vertical edges
                    dz.append(abs(nodes[i].depth - nodes[i].below.depth))
                    # Get ready for next repetition of loop
                    nodes[i] = nodes[i].below
                temp_elm = mean(array(temp_vals))
                salt_elm = mean(array(salt_vals))
                depth_elm = mean(array(depth_vals))
                # Calculate volume of 3D triangular prism
                volume = area * mean(array(dz))
                # Figure out which bins this falls into
                temp_index = nonzero(temp_bins > temp_elm)[0][0] - 1
                salt_index = nonzero(salt_bins > salt_elm)[0][0] - 1
                # Integrate depth*volume in this bin
                ts_vals_fesom[temp_index, salt_index] += depth_elm * volume
                volume_fesom[temp_index, salt_index] += volume
    # Mask bins with zero volume
    ts_vals_fesom = ma.masked_where(volume_fesom == 0, ts_vals_fesom)
    volume_fesom = ma.masked_where(volume_fesom == 0, volume_fesom)
    # Convert depths from integrals to volume-averages
    ts_vals_fesom /= volume_fesom

    print 'Processing SOSE'
    # Read grid
    id = Dataset(sose_file, 'r')
    sose_lon = id.variables['longitude'][:, :]
    sose_lat = id.variables['latitude'][:, :]
    sose_z = id.variables['depth'][:]
    sose_temp = id.variables['temp'][0, :, :, :]
    sose_salt = id.variables['salt'][0, :, :, :]
    id.close()
    num_lon = size(sose_lon, 1)
    num_lat = size(sose_lat, 0)
    num_depth = size(sose_z)
    # Calculate integrands
    # Interpolate to get longitude at the edges of each cell
    w_bdry = 0.5 * (sose_lon[:, 0] + sose_lon[:, -1] - 360)
    middle_lon = 0.5 * (sose_lon[:, 0:-1] + sose_lon[:, 1:])
    e_bdry = 0.5 * (sose_lon[:, 0] + 360 + sose_lon[:, -1])
    lon_edges = concatenate((w_bdry[:, None], middle_lon, e_bdry[:, None]),
                            axis=1)
    dlon = abs(lon_edges[:, 1:] - lon_edges[:, 0:-1])
    # Similarly for latitude; linearly extrapolate for latitude at edges of
    # N/S boundary cells
    middle_lat = 0.5 * (sose_lat[0:-1, :] + sose_lat[1:, :])
    s_bdry = 2 * sose_lat[0, :] - middle_lat[0, :]
    n_bdry = 2 * sose_lat[-1, :] - middle_lat[-1, :]
    lat_edges = concatenate((s_bdry[None, :], middle_lat, n_bdry[None, :]),
                            axis=0)
    dlat = lat_edges[1:, :] - lat_edges[0:-1, :]
    # Convert to Cartesian space
    sose_dx_2d = r * cos(sose_lat * deg2rad) * dlon * deg2rad
    sose_dy_2d = r * dlat * deg2rad
    # We have z at the midpoint of each cell, now find it on the top and
    # bottom edges of each cell
    z_edges = zeros(num_depth + 1)
    z_edges[1:-1] = 0.5 * (sose_z[0:-1] + sose_z[1:])
    # At surface, z=0
    # At bottom, extrapolate
    z_edges[-1] = 2 * sose_z[-1] - z_edges[-2]
    # Now find dz
    sose_dz_1d = abs(z_edges[1:] - z_edges[0:-1])
    # Tile each array to be 3D
    sose_dx = tile(sose_dx_2d, (num_depth, 1, 1))
    sose_dy = tile(sose_dy_2d, (num_depth, 1, 1))
    sose_dz = transpose(tile(sose_dz_1d, (num_lon, num_lat, 1)))
    # Get volume integrand
    sose_dV = sose_dx * sose_dy * sose_dz
    # Loop over 2D grid boxes
    # Find the first latitude index north of 65S; stop there
    j_max = nonzero(sose_lat[:, 0] > nbdry)[0][0]
    for k in range(num_depth):
        for j in range(j_max):
            for i in range(num_lon):
                # Values exactly zero are masked
                if sose_temp[k, j, i] == 0.0:
                    continue
                # Figure out which bins this falls into
                temp_index = nonzero(temp_bins > sose_temp[k, j, i])[0][0] - 1
                salt_index = nonzero(salt_bins > sose_salt[k, j, i])[0][0] - 1
                # Integrate depth*dV in this bin
                ts_vals_sose[temp_index,
                             salt_index] += -sose_z[k] * sose_dV[k, j, i]
                volume_sose[temp_index, salt_index] += sose_dV[k, j, i]
    # Mask bins with zero volume
    ts_vals_sose = ma.masked_where(volume_sose == 0, ts_vals_sose)
    volume_sose = ma.masked_where(volume_sose == 0, volume_sose)
    # Convert depths from integrals to volume-averages
    ts_vals_sose /= volume_sose

    # Find the maximum depth for plotting
    max_depth = amax(
        array([amax(ts_vals_roms),
               amax(ts_vals_fesom),
               amax(ts_vals_sose)]))
    # Make a nonlinear scale
    bounds = linspace(0, max_depth**(1.0 / 2.5), num=100)**2.5
    norm = BoundaryNorm(boundaries=bounds, ncolors=256)
    # Set labels for density contours
    manual_locations = [(33.4, 3.0), (33.65, 3.0), (33.9, 3.0), (34.2, 3.0),
                        (34.45, 3.5), (34.65, 3.25), (34.9, 3.0), (34.8, 0)]

    print "Plotting"
    fig = figure(figsize=(24, 10))
    # ROMS
    ax = fig.add_subplot(1, 3, 1)
    pcolor(salt_centres,
           temp_centres,
           ts_vals_roms,
           norm=norm,
           vmin=0,
           vmax=max_depth,
           cmap='jet')
    # Add surface freezing point line
    plot(salt_centres, freezing_pt_roms, color='black', linestyle='dashed')
    # Add density contours
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs,
           inline=1,
           fontsize=14,
           color=(0.6, 0.6, 0.6),
           fmt='%1.1f',
           manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=22)
    ylabel(r'Temperature ($^{\circ}$C)', fontsize=22)
    title('MetROMS, 2002-2016', fontsize=26)
    # FESOM
    ax = fig.add_subplot(1, 3, 2)
    pcolor(salt_centres,
           temp_centres,
           ts_vals_fesom,
           norm=norm,
           vmin=0,
           vmax=max_depth,
           cmap='jet')
    plot(salt_centres, freezing_pt_fesom, color='black', linestyle='dashed')
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs,
           inline=1,
           fontsize=14,
           color=(0.6, 0.6, 0.6),
           fmt='%1.1f',
           manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=22)
    title('FESOM (high-res), 2002-2016', fontsize=26)
    # SOSE
    ax = fig.add_subplot(1, 3, 3)
    img = pcolor(salt_centres,
                 temp_centres,
                 ts_vals_sose,
                 norm=norm,
                 vmin=0,
                 vmax=max_depth,
                 cmap='jet')
    # No surface freezing point line, because no ice shelves!
    # Add density contours
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs,
           inline=1,
           fontsize=14,
           color=(0.6, 0.6, 0.6),
           fmt='%1.1f',
           manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=22)
    title('SOSE, 2005-2010', fontsize=26)
    # Add a colourbar on the right
    cbaxes = fig.add_axes([0.93, 0.2, 0.02, 0.6])
    cbar = colorbar(img,
                    cax=cbaxes,
                    ticks=[0, 50, 100, 200, 500, 1000, 2000, 4000])
    cbar.ax.tick_params(labelsize=18)
    # Add the main title
    suptitle('Water masses south of 65$^{\circ}$S: depth (m)', fontsize=30)
    subplots_adjust(wspace=0.1)
    fig.show()
Ejemplo n.º 9
0
def convert_file (year):

    # Make sure input argument is an integer (sometimes the batch script likes
    # to pass it as a string)
    year = int(year)

    # Paths of ROMS grid file, input ECCO2 files (without the tail yyyymm.nc),
    # and output ROMS-CICE boundary condition file; other users will need to
    # change these
    grid_file = '../metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc'
    theta_base = '../metroms_iceshelf/data/originals/ECCO2/THETA.1440x720x50.' + str(year)
    salt_base = '../metroms_iceshelf/data/originals/ECCO2/SALT.1440x720x50.' + str(year)
    vvel_base = '../metroms_iceshelf/data/originals/ECCO2/VVEL.1440x720x50.' + str(year)
    output_file = '../metroms_iceshelf/data/ECCO2/ecco2_cube92_lbc_' + str(year) + '.nc'

    # Grid parameters; check grid_file and *.in to make sure these are correct
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    # Northernmost index of ECCO2 grid to read (1-based)
    nbdry_ecco = 241

    # Read ECCO2 grid
    print 'Reading ECCO2 grid'
    ecco_fid = Dataset(theta_base + '01.nc', 'r')
    lon_ecco_raw = ecco_fid.variables['LONGITUDE_T'][:]
    lat_ecco = ecco_fid.variables['LATITUDE_T'][0:nbdry_ecco]
    depth_ecco_raw = ecco_fid.variables['DEPTH_T'][:]
    ecco_fid.close()

    # The ECCO2 longitude axis doesn't wrap around; there is a gap between
    # almost-180W and almost-180E, and the ROMS grid has points in this gap.
    # So copy the last longitude value (mod 360) to the beginning, and the
    # first longitude value (mod 360) to the end.
    lon_ecco = zeros(size(lon_ecco_raw)+2)
    lon_ecco[0] = lon_ecco_raw[-1]-360
    lon_ecco[1:-1] = lon_ecco_raw
    lon_ecco[-1] = lon_ecco_raw[0]+360

    # The shallowest ECCO2 depth value is 5 m, but ROMS needs 0 m. So add the
    # index depth = 0 m to the beginning. Later we will just copy the 5 m
    # values for theta and salt into this index. Similarly, the deepest ECCO2
    # depth value is not deep enough for ROMS, so make a 6000 m index at the end.
    depth_ecco = zeros(size(depth_ecco_raw)+2)
    depth_ecco[0] = 0.0
    depth_ecco[1:-1] = depth_ecco_raw
    depth_ecco[-1] = 6000.0

    # Read ROMS grid
    print 'Reading ROMS grid'
    grid_fid = Dataset(grid_file, 'r')
    lon_rho = grid_fid.variables['lon_rho'][:,:]
    lat_rho = grid_fid.variables['lat_rho'][:,:]
    lon_u = grid_fid.variables['lon_u'][:,:]
    lat_u = grid_fid.variables['lat_u'][:,:]
    lon_v = grid_fid.variables['lon_v'][:,:]
    lat_v = grid_fid.variables['lat_v'][:,:]
    h = grid_fid.variables['h'][:,:]    
    zice = grid_fid.variables['zice'][:,:]
    mask_rho = grid_fid.variables['mask_rho'][:,:]
    mask_zice = grid_fid.variables['mask_zice'][:,:]
    grid_fid.close()    

    # Save the lengths of the longitude axis for each grid
    num_lon_rho = size(lon_rho, 1)
    num_lon_u = size(lon_u, 1)
    num_lon_v = size(lon_v, 1)
    # Mask h and zice with zeros
    h = h*mask_rho
    zice = zice*mask_zice
    # Interpolate h and zice to u and v grids
    h_u = 0.5*(h[:,0:-1] + h[:,1:])
    h_v = 0.5*(h[0:-1,:] + h[1:,:])
    zice_u = 0.5*(zice[:,0:-1] + zice[:,1:])
    zice_v = 0.5*(zice[0:-1,:] + zice[1:,:])

    # Calculate Cartesian integrands and z-coordinates for each grid
    dx_rho, dy_rho, dz_rho, z_rho = cartesian_grid_3d(lon_rho, lat_rho, h, zice, theta_s, theta_b, hc, N)
    dx_u, dy_u, dz_u, z_u = cartesian_grid_3d(lon_u, lat_u, h_u, zice_u, theta_s, theta_b, hc, N)
    dx_v, dy_v, dz_v, z_v = cartesian_grid_3d(lon_v, lat_v, h_v, zice_v, theta_s, theta_b, hc, N)
    # Also call calc_z for the rho_grid just so we get sc_r and Cs_r
    z_rho, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N)

    # Select just the northern boundary for each field
    dx_rho = dx_rho[:,-1,:]
    dy_rho = dy_rho[:,-1,:]
    dz_rho = dz_rho[:,-1,:]
    z_rho = z_rho[:,-1,:]
    dx_u = dx_u[:,-1,:]
    dy_u = dy_u[:,-1,:]
    dz_u = dz_u[:,-1,:]
    z_u = z_u[:,-1,:]
    dx_v = dx_v[:,-1,:]
    dy_v = dy_v[:,-1,:]
    dz_v = dz_v[:,-1,:]
    z_v = z_v[:,-1,:]

    # Copy longitude and latitude at the northern boundary into arrays of
    # dimension depth x longitude
    lon_rho = tile(lon_rho[-1,:], (N,1))
    lat_rho = tile(lat_rho[-1,:], (N,1))
    lon_u = tile(lon_u[-1,:], (N,1))
    lat_u = tile(lat_u[-1,:], (N,1))
    lon_v = tile(lon_v[-1,:], (N,1))
    lat_v = tile(lat_v[-1,:], (N,1))

    # Make sure ROMS longitudes are between 0 and 360
    index = lon_rho < 0
    lon_rho[index] += 360
    index = lon_rho > 360
    lon_rho[index] -= 360
    index = lon_u < 0
    lon_u[index] += 360
    index = lon_u > 360
    lon_u[index] -= 360
    index = lon_v < 0
    lon_v[index] += 360
    index = lon_v > 360
    lon_v[index] -= 360

    # Set up output file
    print 'Setting up ', output_file
    out_fid = Dataset(output_file, 'w')
    out_fid.createDimension('xi_u', num_lon_u)
    out_fid.createDimension('xi_v', num_lon_v)
    out_fid.createDimension('xi_rho', num_lon_rho)
    out_fid.createDimension('s_rho', N)
    out_fid.createDimension('ocean_time', None)
    out_fid.createDimension('one', 1);
    out_fid.createVariable('theta_s', 'f8', ('one'))
    out_fid.variables['theta_s'].long_name = 'S-coordinate surface control parameter'
    out_fid.variables['theta_s'][:] = theta_s
    out_fid.createVariable('theta_b', 'f8', ('one'))
    out_fid.variables['theta_b'].long_name = 'S-coordinate bottom control parameter'
    out_fid.variables['theta_b'].units = 'nondimensional'
    out_fid.variables['theta_b'][:] = theta_b
    out_fid.createVariable('Tcline', 'f8', ('one'))
    out_fid.variables['Tcline'].long_name = 'S-coordinate surface/bottom layer width'
    out_fid.variables['Tcline'].units = 'meter'
    out_fid.variables['Tcline'][:] = hc
    out_fid.createVariable('hc', 'f8', ('one'))
    out_fid.variables['hc'].long_name = 'S-coordinate parameter, critical depth'
    out_fid.variables['hc'].units = 'meter'
    out_fid.variables['hc'][:] = hc
    out_fid.createVariable('sc_r', 'f8', ('s_rho'))
    out_fid.variables['sc_r'].long_name = 'S-coordinate at rho-points'
    out_fid.variables['sc_r'].units = 'nondimensional'
    out_fid.variables['sc_r'].valid_min = -1
    out_fid.variables['sc_r'].valid_max = 0
    out_fid.variables['sc_r'][:] = sc_r
    out_fid.createVariable('Cs_r', 'f8', ('s_rho'))
    out_fid.variables['Cs_r'].long_name = 'S-coordinate stretching curves at RHO-points'
    out_fid.variables['Cs_r'].units = 'nondimensional'
    out_fid.variables['Cs_r'].valid_min = -1
    out_fid.variables['Cs_r'].valid_max = 0
    out_fid.variables['Cs_r'][:] = Cs_r
    out_fid.createVariable('ocean_time', 'f8', ('ocean_time'))
    out_fid.variables['ocean_time'].long_name = 'time since initialization'
    out_fid.variables['ocean_time'].units = 'days'
    out_fid.createVariable('temp_north', 'f8', ('ocean_time', 's_rho', 'xi_rho'))
    out_fid.variables['temp_north'].long_name = 'northern boundary potential temperature'
    out_fid.variables['temp_north'].units = 'Celsius'
    out_fid.createVariable('salt_north', 'f8', ('ocean_time', 's_rho', 'xi_rho'))
    out_fid.variables['salt_north'].long_name = 'northern boundary salinity'
    out_fid.variables['salt_north'].units = 'PSU'
    out_fid.createVariable('u_north', 'f8', ('ocean_time', 's_rho', 'xi_u'))
    out_fid.variables['u_north'].long_name = 'northern boundary u-momentum component'
    out_fid.variables['u_north'].units = 'meter second-1'
    out_fid.createVariable('v_north', 'f8', ('ocean_time', 's_rho', 'xi_v'))
    out_fid.variables['v_north'].long_name = 'northern boundary v-momentum component'
    out_fid.variables['v_north'].units = 'meter second-1'
    out_fid.createVariable('ubar_north', 'f8', ('ocean_time', 'xi_u'))
    out_fid.variables['ubar_north'].long_name = 'northern boundary vertically integrated u-momentum component'
    out_fid.variables['ubar_north'].units = 'meter second-1'
    out_fid.createVariable('vbar_north', 'f8', ('ocean_time', 'xi_v'))
    out_fid.variables['vbar_north'].long_name = 'northern boundary vertically integrated v-momentum component'
    out_fid.variables['vbar_north'].units = 'meter second-1'
    out_fid.createVariable('zeta_north', 'f8', ('ocean_time', 'xi_rho'))
    out_fid.variables['zeta_north'].long_name = 'northern boundary sea surface height'
    out_fid.variables['zeta_north'].units = 'meter'
    out_fid.close()

    # Loop through each month of this year
    for month in range(12):

        print 'Processing month ', str(month+1), ' of 12'
        # Construct the rest of the file paths
        if month+1 < 10:
            tail = '0' + str(month+1) + '.nc'
        else:
            tail = str(month+1) + '.nc'

        # Read temperature, salinity, velocity data
        theta_fid = Dataset(theta_base + tail, 'r')
        theta_raw = transpose(theta_fid.variables['THETA'][0,:,0:nbdry_ecco,:])
        theta_fid.close()
        salt_fid = Dataset(salt_base + tail, 'r')
        salt_raw = transpose(salt_fid.variables['SALT'][0,:,0:nbdry_ecco,:])
        salt_fid.close()
        vvel_fid = Dataset(vvel_base + tail, 'r')
        vvel_raw = transpose(vvel_fid.variables['VVEL'][0,:,0:nbdry_ecco,:])
        vvel_fid.close()

        # Copy the data to the new longitude and depth indices, making sure
        # to preserve the mask.
        theta = ma.array(zeros((size(lon_ecco), size(lat_ecco), size(depth_ecco))))
        theta[1:-1,:,1:-1] = ma.copy(theta_raw)
        theta[0,:,1:-1] = ma.copy(theta_raw[-1,:,:])
        theta[-1,:,1:-1] = ma.copy(theta_raw[0,:,:])
        theta[:,:,0] = ma.copy(theta[:,:,1])
        theta[:,:,-1] = ma.copy(theta[:,:,-2])
        salt = ma.array(zeros((size(lon_ecco), size(lat_ecco), size(depth_ecco))))
        salt[1:-1,:,1:-1] = ma.copy(salt_raw)
        salt[0,:,1:-1] = ma.copy(salt_raw[-1,:,:])
        salt[-1,:,1:-1] = ma.copy(salt_raw[0,:,:])
        salt[:,:,0] = ma.copy(salt[:,:,1])
        salt[:,:,-1] = ma.copy(salt[:,:,-2])
        vvel = ma.array(zeros((size(lon_ecco), size(lat_ecco), size(depth_ecco))))
        vvel[1:-1,:,1:-1] = ma.copy(vvel_raw)
        vvel[0,:,1:-1] = ma.copy(vvel_raw[-1,:,:])
        vvel[-1,:,1:-1] = ma.copy(vvel_raw[0,:,:])
        vvel[:,:,0] = ma.copy(vvel[:,:,1])
        vvel[:,:,-1] = ma.copy(vvel[:,:,-2])

        # Regridding happens here...
        print 'Interpolating temperature'
        temp_interp = interp_ecco2roms_nbc(theta, lon_ecco, lat_ecco, depth_ecco, lon_rho, lat_rho, z_rho, mean(theta), True)
        print 'Interpolating salinity'
        salt_interp = interp_ecco2roms_nbc(salt, lon_ecco, lat_ecco, depth_ecco, lon_rho, lat_rho, z_rho, mean(salt), True)
        print 'Interpolating v'
        v_interp = interp_ecco2roms_nbc(vvel, lon_ecco, lat_ecco, depth_ecco, lon_v, lat_v, z_v, 0, False)

        # Calculate vertical average of v to get vbar
        # Be sure to treat land mask carefully so we don't divide by 0
        vbar_interp = sum(v_interp*dz_v, axis=0)
        wct_v = h_v[-1,:] + zice_v[-1,:]
        index = wct_v == 0
        vbar_interp[~index] = vbar_interp[~index]/wct_v[~index]
        vbar_interp[index] = 0.0

        # Calculate time values centered in the middle of each month,
        # relative to 1992
        time = 365.25*(year-1992) + 365.25/12*(month+0.5)

        # Save data to NetCDF file
        out_fid = Dataset(output_file, 'a')
        out_fid.variables['ocean_time'][month] = time
        out_fid.variables['temp_north'][month,:,:] = temp_interp
        out_fid.variables['salt_north'][month,:,:] = salt_interp
        # Clamp u to zero
        out_fid.variables['u_north'][month,:,:] = 0.0
        out_fid.variables['v_north'][month,:,:] = v_interp
        # Clamp ubar to zero
        out_fid.variables['ubar_north'][month,:] = 0.0
        out_fid.variables['vbar_north'][month,:] = vbar_interp
        out_fid.variables['zeta_north'][month,:] = 0.0
        out_fid.close()
Ejemplo n.º 10
0
def circumpolar_plot(
    file_path,
    var_name,
    tstep,
    depth_key,
    depth,
    depth_bounds,
    colour_bounds=None,
    save=False,
    fig_name=None,
    grid_path=None,
):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 40
    N = 31
    deg2rad = pi / 180

    # Read the variable and figure out if 2D or 3D (not including time)
    id = Dataset(file_path, "r")
    if len(id.variables[var_name].shape) == 4:
        # 3D variable; will have to choose depth later
        data_full = id.variables[var_name][tstep - 1, :, :-15, :]
        choose_depth = True
    elif len(id.variables[var_name].shape) == 3:
        # 2D variable
        data = id.variables[var_name][tstep - 1, :-15, :]
        choose_depth = False
    if var_name == "salt":
        units = "psu"
    elif var_name == "m":
        # Convert ice shelf melt rate from m/s to m/yr
        units = "m/year"
        data = data * 60.0 * 60.0 * 24.0 * 365.25
    else:
        units = id.variables[var_name].units
    long_name = id.variables[var_name].long_name

    # Check for vector variables that need to be rotated
    if var_name in ["ubar", "vbar", "u", "v", "sustr", "svstr", "bustr", "bvstr"]:
        grid_id = Dataset(grid_path, "r")
        angle = grid_id.variables["angle"][:-15, :]
        grid_id.close()
        if var_name in ["ubar", "sustr", "bustr"]:
            # 2D u-variable
            u_data = data[:, :]
            v_data = id.variables[var_name.replace("u", "v")][tstep - 1, :-15, :]
            u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle)
            data = u_data_lonlat
        elif var_name in ["vbar", "svstr", "bvstr"]:
            # 2D v-variable
            v_data = data[:, :]
            u_data = id.variables[var_name.replace("v", "u")][tstep - 1, :-15, :]
            u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle)
            data = v_data_lonlat
        elif var_name in ["u"]:
            # 3D u-variable
            data_full_ugrid = data_full[:, :, :]
            data_full = ma.empty([data_full_ugrid.shape[0], data_full_ugrid.shape[1], data_full_ugrid.shape[2] + 1])
            for k in range(N):
                u_data = data_full_ugrid[k, :, :]
                v_data = id.variables[var_name.replace("u", "v")][tstep - 1, k, :-15, :]
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle)
                data_full[k, :, :] = u_data_lonlat
        elif var_name in ["v"]:
            # 3D v-variable
            data_full_vgrid = data_full[:, :, :]
            data_full = ma.empty([data_full_vgrid.shape[0], data_full_vgrid.shape[1] + 1, data_full_vgrid.shape[2]])
            for k in range(N):
                v_data = data_full_vgrid[k, :, :]
                u_data = id.variables[var_name.replace("v", "u")][tstep - 1, k, :-15, :]
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle)
                data_full[k, :, :] = v_data_lonlat

    # Read grid variables
    h = id.variables["h"][:-15, :]
    zice = id.variables["zice"][:-15, :]
    lon = id.variables["lon_rho"][:-15, :]
    lat = id.variables["lat_rho"][:-15, :]
    id.close()

    # Throw away the overlapping periodic boundary
    if choose_depth:
        data_full = data_full[:, :, :-1]
    else:
        data = data[:, :-1]
    lon = lon[:, :-1]
    lat = lat[:, :-1]
    h = h[:, :-1]
    zice = zice[:, :-1]

    # Convert to spherical coordinates
    x = -(lat + 90) * cos(lon * deg2rad + pi / 2)
    y = (lat + 90) * sin(lon * deg2rad + pi / 2)

    # Choose what to write on the title about depth
    if choose_depth:
        if depth_key == 0:
            depth_string = "at surface"
        elif depth_key == 1:
            depth_string = "at bottom"
        elif depth_key == 2:
            depth_string = "at " + str(int(round(-depth))) + " m"
        elif depth_key == 3:
            depth_string = "vertically averaged"
        elif depth_key == 4:
            depth_string = (
                "vertically averaged between "
                + str(int(round(-depth_bounds[0])))
                + " and "
                + str(int(round(-depth_bounds[1])))
                + " m"
            )
    else:
        depth_string = ""

    if choose_depth:
        # For 3D variables, select data corresponding to depth choice
        if depth_key == 0:
            # Surface layer
            data = data_full[-1, :, :]
        elif depth_key == 1:
            # Bottom layer
            data = data_full[0, :, :]
        else:
            # We will need z-coordinates and possibly dz
            dx, dy, dz, z = cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc, N)
            if depth_key == 2:
                # Interpolate to given depth
                data = interp_depth(data_full, z, depth)
            elif depth_key == 3:
                # Vertically average entire water column
                data = sum(data_full * dz, axis=0) / sum(dz, axis=0)
            elif depth_key == 4:
                # Vertically average between given depths
                data = average_btw_depths(data_full, z, dz, depth_bounds)

    if colour_bounds is not None:
        # User has set bounds on colour scale
        lev = linspace(colour_bounds[0], colour_bounds[1], num=40)
        if colour_bounds[0] == -colour_bounds[1]:
            # Bounds are centered on zero, so choose a blue-to-red colourmap
            # centered on yellow
            colour_map = "RdYlBu_r"
        else:
            colour_map = "jet"
    else:
        # Determine bounds automatically
        if var_name in ["u", "v", "ubar", "vbar", "m", "shflux", "ssflux", "sustr", "svstr", "bustr", "bvstr"]:
            # Center levels on 0 for certain variables, with a blue-to-red
            # colourmap
            max_val = amax(abs(data))
            lev = linspace(-max_val, max_val, num=40)
            colour_map = "RdYlBu_r"
        else:
            lev = linspace(amin(data), amax(data), num=40)
            colour_map = "jet"

    # Plot
    fig = figure(figsize=(16, 12))
    fig.add_subplot(1, 1, 1, aspect="equal")
    contourf(x, y, data, lev, cmap=colour_map, extend="both")
    cbar = colorbar()
    cbar.ax.tick_params(labelsize=20)
    title(long_name + " (" + units + ")\n" + depth_string, fontsize=30)
    axis("off")

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()
Ejemplo n.º 11
0
def slope_current ():

# File paths
roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc'
roms_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/2002_2016_avg.nc'
fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/'
fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/'
fesom_file_lr = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/oce_2002_2016_avg.nc'
fesom_file_hr = '/short/y99/kaa561/FESOM/intercomparison_highres/output/oce_2002_2016_avg.nc'
# ROMS vertical grid parameters
theta_s = 7.0
theta_b = 2.0
hc = 250
N = 31
# FESOM mesh parameters
circumpolar = False
cross_180 = False
# Spacing of longitude bins
dlon = 1
# Parameters for continental shelf selection
lat0 = -64  # Maximum latitude to consider
h0 = 2500  # Deepest depth to consider

# Set up longitude bins
# Start with edges
lon_bins = arange(-180, 180+dlon, dlon)
# Centres for plotting
lon_centres = 0.5*(lon_bins[:-1] + lon_bins[1:])
num_bins = size(lon_centres)
# Set up arrays to store maximum barotropic speed in each bin
current_roms = zeros(num_bins)
current_fesom_lr = zeros(num_bins)
current_fesom_hr = zeros(num_bins)

print 'Processing MetROMS'

print 'Reading grid'
id = Dataset(roms_grid, 'r')
roms_lon = id.variables['lon_rho'][:,:]
roms_lat = id.variables['lat_rho'][:,:]
roms_h = id.variables['h'][:,:]
roms_zice = id.variables['zice'][:,:]
roms_angle = id.variables['angle'][:,:]
id.close()
print 'Reading data'
# Read full 3D u and v
id = Dataset(roms_file, 'r')
u_3d_tmp = id.variables['u'][0,:,:,:]
v_3d_tmp = id.variables['v'][0,:,:,:]
id.close()
print 'Vertically averaging velocity'
# Get integrands on 3D grid; we only care about dz
dx, dy, dz, z = cartesian_grid_3d(roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
# Unrotate each vertical level
u_3d = ma.empty(shape(dz))
v_3d = ma.empty(shape(dz))
num_lat_u = size(u_3d_tmp,1)
num_lon_u = size(u_3d_tmp,2)
num_lat_v = size(v_3d_tmp,1)
num_lon_v = size(v_3d_tmp,2)
for k in range(N):
    u_k, v_k = rotate_vector_roms(u_3d_tmp[k,:,:], v_3d_tmp[k,:,:], roms_angle)
    u_3d[k,:,:] = u_k
    v_3d[k,:,:] = v_k
# Vertically average u and v
roms_u = sum(u_3d*dz, axis=0)/sum(dz, axis=0)
roms_v = sum(v_3d*dz, axis=0)/sum(dz, axis=0)
# Calculate speed
roms_speed = sqrt(roms_u**2 + roms_v**2)
print 'Selecting slope current'
# First make sure longitude is between -180 and 180
index = roms_lon > 180
roms_lon[index] = roms_lon[index] - 360
for j in range(size(roms_speed,0)):
    for i in range(size(roms_speed,1)):
        # Check if we care about this point
        if roms_lat[j,i] <= lat0 and roms_h[j,i] <= h0 and roms_zice[j,i] == 0:
            # Find longitude bin
            lon_index = nonzero(lon_bins > roms_lon[j,i])[0][0] - 1
            # Update slope current speed in this bin if needed
            if roms_speed[j,i] > current_roms[lon_index]:
                current_roms[lon_index] = roms_speed[j,i]

print 'Processing low-res FESOM'

print 'Building mesh'
# We only care about nodes, not elements, so don't need to use the
# fesom_grid function.
# Read cavity flag for each 2D surface node
fesom_cavity_lr = []
f = open(fesom_mesh_path_lr + 'cavity_flag_nod2d.out', 'r')
for line in f:
    tmp = int(line)
    if tmp == 1:
        fesom_cavity_lr.append(True)
    elif tmp == 0:
        fesom_cavity_lr.append(False)
    else:
        print 'Problem'
f.close()
# Save the number of 2D nodes
fesom_n2d_lr = len(fesom_cavity_lr)
# Read rotated lat and lon for each node, also depth
f = open(fesom_mesh_path_lr + 'nod3d.out', 'r')
f.readline()
rlon_lr = []
rlat_lr = []
node_depth_lr = []
for line in f:
    tmp = line.split()
    lon_tmp = float(tmp[1])
    lat_tmp = float(tmp[2])
    node_depth_tmp = -1*float(tmp[3])
    if lon_tmp < -180:
        lon_tmp += 360
    elif lon_tmp > 180:
        lon_tmp -= 360
    rlon_lr.append(lon_tmp)
    rlat_lr.append(lat_tmp)
    node_depth_lr.append(node_depth_tmp)
f.close()
# For lat and lon, only care about the 2D nodes (the first
# fesom_n2d indices)
rlon_lr = array(rlon_lr[0:fesom_n2d_lr])
rlat_lr = array(rlat_lr[0:fesom_n2d_lr])
node_depth_lr = array(node_depth_lr)
# Unrotate longitude
fesom_lon_lr, fesom_lat_lr = unrotate_grid(rlon_lr, rlat_lr)
# Read lists of which nodes are directly below which
f = open(fesom_mesh_path_lr + 'aux3d.out', 'r')
max_num_layers_lr = int(f.readline())
node_columns_lr = zeros([fesom_n2d_lr, max_num_layers_lr])
for n in range(fesom_n2d_lr):
    for k in range(max_num_layers_lr):
        node_columns_lr[n,k] = int(f.readline())
node_columns_lr = node_columns_lr.astype(int)
f.close()
# Now figure out the bottom depth of each 2D node
bottom_depth_lr = zeros(fesom_n2d_lr)
for n in range(fesom_n2d_lr):
    node_id = node_columns_lr[n,0] - 1
    for k in range(1, max_num_layers_lr):
        if node_columns_lr[n,k] == -999:
            # Reached the bottom
            break
        node_id = node_columns_lr[n,k] - 1
    # Save the last valid depth
    bottom_depth_lr[n] = node_depth_lr[n]
print 'Reading data'
# Read full 3D field for both u and v
id = Dataset(fesom_file_lr, 'r')
node_ur_3d_lr = id.variables['u'][0,:]
node_vr_3d_lr = id.variables['v'][0,:]
id.close()
print 'Vertically averaging velocity'
# Vertically average
node_ur_lr = zeros(fesom_n2d_lr)
node_vr_lr = zeros(fesom_n2d_lr)
for n in range(fesom_n2d_lr):
    # Integrate udz, vdz, and dz over this water column
    udz_col = 0
    vdz_col = 0
    dz_col = 0
    for k in range(max_num_layers_lr-1):
        if node_columns_lr[n,k+1] == -999:
            # Reached the bottom
            break
        # Trapezoidal rule
        top_id = node_columns_lr[n,k]
        bot_id = node_columns_lr[n,k+1]
        dz_tmp = node_depth_lr[bot_id-1] - node_depth_lr[top_id-1]
        udz_col += 0.5*(node_ur_3d_lr[top_id-1]+node_ur_3d_lr[bot_id-1])*dz_tmp
        vdz_col += 0.5*(node_vr_3d_lr[top_id-1]+node_vr_3d_lr[bot_id-1])*dz_tmp
        dz_col += dz_tmp
    # Convert from integrals to averages
    node_ur_lr[n] = udz_col/dz_col
    node_vr_lr[n] = vdz_col/dz_col
# Unrotate
node_u_lr, node_v_lr = unrotate_vector(rlon_lr, rlat_lr, node_ur_lr, node_vr_lr)
# Calculate speed
node_speed_lr = sqrt(node_u_lr**2 + node_v_lr**2)
print 'Selecting slope current'
for n in range(fesom_n2d_lr):
    # Check if we care about this node
    if fesom_lat_lr[n] <= lat0 and bottom_depth_lr[n] <= h0 and not fesom_cavity_lr[n]:
        # Find longitude bin
        lon_index = nonzero(lon_bins > fesom_lon_lr[n])[0][0] - 1
        # Update slope current speed in this bin if needed
        if node_speed_lr[n] > current_fesom_lr[lon_index]:
            current_fesom_lr[lon_index] = node_speed_lr[n]

print 'Processing high-res FESOM'

print 'Building mesh'
fesom_cavity_hr = []
f = open(fesom_mesh_path_hr + 'cavity_flag_nod2d.out', 'r')
for line in f:
    tmp = int(line)
    if tmp == 1:
        fesom_cavity_hr.append(True)
    elif tmp == 0:
        fesom_cavity_hr.append(False)
    else:
        print 'Problem'
f.close()
fesom_n2d_hr = len(fesom_cavity_hr)
f = open(fesom_mesh_path_hr + 'nod3d.out', 'r')
f.readline()
rlon_hr = []
rlat_hr = []
node_depth_hr = []
for line in f:
    tmp = line.split()
    lon_tmp = float(tmp[1])
    lat_tmp = float(tmp[2])
    node_depth_tmp = -1*float(tmp[3])
    if lon_tmp < -180:
        lon_tmp += 360
    elif lon_tmp > 180:
        lon_tmp -= 360
    rlon_hr.append(lon_tmp)
    rlat_hr.append(lat_tmp)
    node_depth_hr.append(node_depth_tmp)
f.close()
rlon_hr = array(rlon_hr[0:fesom_n2d_hr])
rlat_hr = array(rlat_hr[0:fesom_n2d_hr])
node_depth_hr = array(node_depth_hr)
fesom_lon_hr, fesom_lat_hr = unrotate_grid(rlon_hr, rlat_hr)
f = open(fesom_mesh_path_hr + 'aux3d.out', 'r')
max_num_layers_hr = int(f.readline())
node_columns_hr = zeros([fesom_n2d_hr, max_num_layers_hr])
for n in range(fesom_n2d_hr):
    for k in range(max_num_layers_hr):
        node_columns_hr[n,k] = int(f.readline())
node_columns_hr = node_columns_hr.astype(int)
f.close()
bottom_depth_hr = zeros(fesom_n2d_hr)
for n in range(fesom_n2d_hr):
    node_id = node_columns_hr[n,0] - 1
    for k in range(1, max_num_layers_hr):
        if node_columns_hr[n,k] == -999:
            break
        node_id = node_columns_hr[n,k] - 1
    bottom_depth_hr[n] = node_depth_hr[n]
print 'Reading data'
id = Dataset(fesom_file_hr, 'r')
node_ur_3d_hr = id.variables['u'][0,:]
node_vr_3d_hr = id.variables['v'][0,:]
id.close()
print 'Vertically averaging velocity'
node_ur_hr = zeros(fesom_n2d_hr)
node_vr_hr = zeros(fesom_n2d_hr)
for n in range(fesom_n2d_hr):
    udz_col = 0
    vdz_col = 0
    dz_col = 0
    for k in range(max_num_layers_hr-1):
        if node_columns_hr[n,k+1] == -999:
            break
        top_id = node_columns_hr[n,k]
        bot_id = node_columns_hr[n,k+1]
        dz_tmp = node_depth_hr[bot_id-1] - node_depth_hr[top_id-1]
        udz_col += 0.5*(node_ur_3d_hr[top_id-1]+node_ur_3d_hr[bot_id-1])*dz_tmp
        vdz_col += 0.5*(node_vr_3d_hr[top_id-1]+node_vr_3d_hr[bot_id-1])*dz_tmp
        dz_col += dz_tmp
    node_ur_hr[n] = udz_col/dz_col
    node_vr_hr[n] = vdz_col/dz_col
node_u_hr, node_v_hr = unrotate_vector(rlon_hr, rlat_hr, node_ur_hr, node_vr_hr)
node_speed_hr = sqrt(node_u_hr**2 + node_v_hr**2)
print 'Selecting slope current'
for n in range(fesom_n2d_hr):
    if fesom_lat_hr[n] <= lat0 and bottom_depth_hr[n] <= h0 and not fesom_cavity_hr[n]:
        lon_index = nonzero(lon_bins > fesom_lon_hr[n])[0][0] - 1
        if node_speed_hr[n] > current_fesom_hr[lon_index]:
            current_fesom_hr[lon_index] = node_speed_hr[n]

print 'Plotting'
fig = figure(figsize=(12,8))
plot(lon_centres, current_roms, color='blue', label='MetROMS')
plot(lon_centres, current_fesom_lr, color='green', label='FESOM low-res')
plot(lon_centres, current_fesom_hr, color='magenta', label='FESOM high-res')
grid(True)
title('Slope current speed', fontsize=20)
xlabel('Longitude', fontsize=14)
ylabel('m/s', fontsize=14)
xlim([-180, 180])
legend()
fig.savefig('slope_current.png')

print 'Mean slope current in MetROMS: ' + str(mean(current_roms)) + ' m/s'
print 'Mean slope current in low-res FESOM: ' + str(mean(current_fesom_lr)) + ' m/s'
print 'Mean slope current in high-res FESOM: ' + str(mean(current_fesom_hr)) + ' m/s'


# Command-line interface
if __name__ == "__main__":

    coastal_current()
Ejemplo n.º 12
0
def overturning_plot (grid_path, file_path, fig_name):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 40
    N = 31

    # Read angle from the grid file
    grid_id = Dataset(grid_path, 'r')
    angle = grid_id.variables['angle'][:-15,:]
    grid_id.close()
    # Read grid variables
    id = Dataset(file_path, 'r')
    h = id.variables['h'][:-15,1:-1]
    zice = id.variables['zice'][:-15,1:-1]
    zeta = id.variables['zeta'][-1,:-15,1:-1]
    lon = id.variables['lon_rho'][:-15,1:-1]
    lat = id.variables['lat_rho'][:-15,1:-1]
    # Read both velocities in x-y space
    u_xy = id.variables['u'][-1,:,:-15,:]
    v_xy = id.variables['v'][-1,:,:-15,:]    
    id.close()

    # Rotate velocities to lat-lon space
    v = ma.empty([N,v_xy.shape[1]+1,v_xy.shape[2]])
    for k in range(N):
        u_lonlat, v_lonlat = rotate_vector_roms(u_xy[k,:,:], v_xy[k,:,:], angle)
        v[k,:,:] = v_lonlat[:,:]
    # Throw away the periodic boundary overlap
    v = v[:,:,1:-1]

    # Calculate Cartesian integrands and z-coordinates
    dx, dy, dz, z = cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc, N, zeta)

    # Calculate transport in each cell
    transport = v*dx*dz
    # Definite integral over longitude
    transport = sum(transport, axis=2)
    # Indefinite integral over depth; flip before and after so the integral
    # starts at the surface, not the bottom. Also convert to Sv.
    transport = flipud(cumsum(flipud(transport), axis=0))*1e-6

    # Calculate latitude and z coordinates, averaged over longitude,
    # for plotting
    avg_lat = mean(lat, axis=1)
    avg_lat = tile(avg_lat, (N,1))
    avg_z = mean(z, axis=2)

    # Centre colour scale on 0
    max_val = amax(abs(transport))
    lev = linspace(-max_val, max_val, num=40)

    # Make the plot
    figure(figsize=(16,8))
    contourf(avg_lat, avg_z, transport, lev, cmap='RdBu_r')
    colorbar()
    xlabel('Latitude')
    ylabel('Depth (m)')
    title('Meridional Overturning Streamfunction (Sv)')

    #savefig(fig_name)
    show()
Ejemplo n.º 13
0
def mip_watermass_barchart (roms_grid, roms_file, fesom_mesh_lr, fesom_mesh_hr, fesom_file_lr, fesom_file_hr):

    # Sectors to consider
    sector_names = ['a) Filchner-Ronne Ice Shelf', 'b) Eastern Weddell Region', 'c) Amery Ice Shelf', 'd) Australian Sector', 'e) Ross Sea', 'f) Amundsen Sea', 'g) Bellingshausen Sea', 'h) Larsen Ice Shelves', 'i) All Ice Shelves']
    num_sectors = len(sector_names)
    # Water masses to consider
    wm_names = ['ISW', 'MCDW', 'HSSW', 'LSSW', 'AASW']
    num_watermasses = len(wm_names)
    wm_colours = [(0.73, 0.6, 1), (1, 0.4, 0.4), (0.52, 0.88, 0.52), (0.6, 0.8, 1), (1, 1, 0)]
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    # FESOM mesh parameters
    circumpolar = True
    cross_180 = False

    print 'Processing MetROMS'
    # Read ROMS grid variables we need
    id = Dataset(roms_grid, 'r')
    roms_lon = id.variables['lon_rho'][:,:]
    roms_lat = id.variables['lat_rho'][:,:]
    roms_h = id.variables['h'][:,:]
    roms_zice = id.variables['zice'][:,:]
    id.close()
    num_lat = size(roms_lat, 0)
    num_lon = size(roms_lon, 1)
    # Get integrands on 3D grid
    roms_dx, roms_dy, roms_dz, roms_z = cartesian_grid_3d(roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
    # Get volume integrand
    dV = roms_dx*roms_dy*roms_dz
    # Read ROMS output
    id = Dataset(roms_file, 'r')
    roms_temp = id.variables['temp'][0,:,:,:]
    roms_salt = id.variables['salt'][0,:,:,:]
    id.close()
    # Initialise volume of each water mass in each sector
    roms_vol_watermass = zeros([num_watermasses, num_sectors])
    # Calculate water mass breakdown
    for j in range(num_lat):
        for i in range(num_lon):
            # Select ice shelf points
            if roms_zice[j,i] < 0:
                # Figure out which sector this point falls into
                lon = roms_lon[j,i]
                if lon > 180:
                    lon -= 360
                lat = roms_lat[j,i]
                if lon >= -85 and lon < -30 and lat < -74:
                    # Filchner-Ronne
                    sector = 0
                elif lon >= -30 and lon < 65:
                    # Eastern Weddell region
                    sector = 1
                elif lon >= 65 and lon < 76:
                    # Amery
                    sector = 2
                elif lon >= 76 and lon < 165 and lat >= -74:
                    # Australian sector
                    sector = 3
                elif (lon >= 155 and lon < 165 and lat < -74) or (lon >= 165) or (lon < -140):
                    # Ross Sea
                    sector = 4
                elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98 and lat < -73.1):
                    # Amundsen Sea
                    sector = 5
                elif (lon >= -104 and lon < -98 and lat >= -73.1) or (lon >= -98 and lon < -66 and lat >= -75):
                    # Bellingshausen Sea
                    sector = 6
                elif lon >= -66 and lon < -59 and lat >= -74:
                    # Larsen Ice Shelves
                    sector = 7
                else:
                    print 'No region found for lon=',str(lon),', lat=',str(lat)
                    break #return
                # Loop downward
                for k in range(N):
                    curr_temp = roms_temp[k,j,i]
                    curr_salt = roms_salt[k,j,i]
                    curr_volume = dV[k,j,i]
                    # Get surface freezing point at this salinity
                    curr_tfrz = curr_salt/(-18.48 + 18.48/1e3*curr_salt)
                    # Figure out what water mass this is
                    if curr_temp < curr_tfrz:
                        # ISW
                        wm_key = 0
                    elif curr_salt < 34:
                        # AASW
                        wm_key = 4
                    elif curr_temp > -1.5:
                        # MCDW
                        wm_key = 1
                    elif curr_salt < 34.5:
                        # LSSW
                        wm_key = 3
                    else:
                        # HSSW
                        wm_key = 2
                    # Integrate volume for the right water mass and sector
                    roms_vol_watermass[wm_key, sector] += curr_volume
                    # Also integrate total Antarctica
                    roms_vol_watermass[wm_key, -1] += curr_volume
    # Find total volume of each sector by adding up the volume of each
    # water mass
    roms_vol_sectors = sum(roms_vol_watermass, axis=0)
    # Calculate percentage of each water mass in each sector
    roms_percent_watermass = zeros([num_watermasses, num_sectors])
    for wm_key in range(num_watermasses):
        for sector in range(num_sectors):
            roms_percent_watermass[wm_key, sector] = roms_vol_watermass[wm_key, sector]/roms_vol_sectors[sector]*100                

    print 'Processing low-res FESOM'
    # Build mesh
    elements_lr = fesom_grid(fesom_mesh_lr, circumpolar, cross_180)
    id = Dataset(fesom_file_lr, 'r')
    temp_nodes_lr = id.variables['temp'][0,:]
    salt_nodes_lr = id.variables['salt'][0,:]
    id.close()
    fesom_vol_watermass_lr = zeros([num_watermasses, num_sectors])
    for i in range(len(elements_lr)):
        elm = elements_lr[i]
        if elm.cavity:
            lon = mean(elm.lon)
            lat = mean(elm.lat)
            if lon >= -85 and lon < -30 and lat < -74:
                sector = 0
            elif lon >= -30 and lon < 65:
                sector = 1
            elif lon >= 65 and lon < 76:
                sector = 2
            elif lon >= 76 and lon < 165 and lat >= -74:
                sector = 3
            elif (lon >= 155 and lon < 165 and lat < -74) or (lon >= 165) or (lon < -140):
                sector = 4
            elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98 and lat < -73.1):
                sector = 5
            elif (lon >= -104 and lon < -98 and lat >= -73.1) or (lon >= -98 and lon < -66 and lat >= -75):
                sector = 6
            elif lon >= -66 and lon < -59 and lat >= -74:
                sector = 7
            else:
                print 'No region found for lon=',str(lon),', lat=',str(lat)
                break #return
            # Get area of 2D element
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            # Loop downward
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None:
                    # Reached the bottom
                    break
                # Calculate average temperature, salinity, and
                # layer thickness for this 3D triangular prism
                temp_vals = []
                salt_vals = []
                dz_vals = []
                for n in range(3):
                    temp_vals.append(temp_nodes_lr[nodes[n].id])
                    salt_vals.append(salt_nodes_lr[nodes[n].id])
                    temp_vals.append(temp_nodes_lr[nodes[n].below.id])
                    salt_vals.append(salt_nodes_lr[nodes[n].below.id])
                    dz_vals.append(abs(nodes[n].depth - nodes[n].below.depth))
                    # Get ready for next iteration of loop
                    nodes[n] = nodes[n].below
                curr_temp = mean(array(temp_vals))
                curr_salt = mean(array(salt_vals))
                curr_volume = area*mean(array(dz_vals))
                curr_tfrz = -0.0575*curr_salt + 1.7105e-3*sqrt(curr_salt**3) - 2.155e-4*curr_salt**2
                if curr_temp < curr_tfrz:
                    wm_key = 0
                elif curr_salt < 34:
                    wm_key = 4
                elif curr_temp > -1.5:
                    wm_key = 1
                elif curr_salt < 34.5:
                    wm_key = 3
                else:
                    wm_key = 2
                fesom_vol_watermass_lr[wm_key, sector] += curr_volume
                fesom_vol_watermass_lr[wm_key, -1] += curr_volume
    fesom_vol_sectors_lr = sum(fesom_vol_watermass_lr, axis=0)
    fesom_percent_watermass_lr = zeros([num_watermasses, num_sectors])
    for wm_key in range(num_watermasses):
        for sector in range(num_sectors):
            fesom_percent_watermass_lr[wm_key, sector] = fesom_vol_watermass_lr[wm_key, sector]/fesom_vol_sectors_lr[sector]*100

    print 'Processing high-res FESOM'
    elements_hr = fesom_grid(fesom_mesh_hr, circumpolar, cross_180)
    fesom_vol_watermass_hr = zeros([num_watermasses, num_sectors])
    id = Dataset(fesom_file_hr, 'r')
    temp_nodes_hr = id.variables['temp'][0,:]
    salt_nodes_hr = id.variables['salt'][0,:]
    id.close()
    for i in range(len(elements_hr)):
        elm = elements_hr[i]
        if elm.cavity:
            lon = mean(elm.lon)
            lat = mean(elm.lat)
            if lon >= -85 and lon < -30 and lat < -74:
                sector = 0
            elif lon >= -30 and lon < 65:
                sector = 1
            elif lon >= 65 and lon < 76:
                sector = 2
            elif lon >= 76 and lon < 165 and lat >= -74:
                sector = 3
            elif (lon >= 155 and lon < 165 and lat < -74) or (lon >= 165) or (lon < -140):
                sector = 4
            elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98 and lat < -73.1):
                sector = 5
            elif (lon >= -104 and lon < -98 and lat >= -73.1) or (lon >= -98 and lon < -66 and lat >= -75):
                sector = 6
            elif lon >= -66 and lon < -59 and lat >= -74:
                sector = 7
            else:
                print 'No region found for lon=',str(lon),', lat=',str(lat)
                break #return
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None:
                    break
                temp_vals = []
                salt_vals = []
                dz_vals = []
                for n in range(3):
                    temp_vals.append(temp_nodes_hr[nodes[n].id])
                    salt_vals.append(salt_nodes_hr[nodes[n].id])
                    temp_vals.append(temp_nodes_hr[nodes[n].below.id])
                    salt_vals.append(salt_nodes_hr[nodes[n].below.id])
                    dz_vals.append(abs(nodes[n].depth - nodes[n].below.depth))
                    nodes[n] = nodes[n].below
                curr_temp = mean(array(temp_vals))
                curr_salt = mean(array(salt_vals))
                curr_volume = area*mean(array(dz_vals))
                curr_tfrz = -0.0575*curr_salt + 1.7105e-3*sqrt(curr_salt**3) - 2.155e-4*curr_salt**2
                if curr_temp < curr_tfrz:
                    wm_key = 0
                elif curr_salt < 34:
                    wm_key = 4
                elif curr_temp > -1.5:
                    wm_key = 1
                elif curr_salt < 34.5:
                    wm_key = 3
                else:
                    wm_key = 2
                fesom_vol_watermass_hr[wm_key, sector] += curr_volume
                fesom_vol_watermass_hr[wm_key, -1] += curr_volume
    fesom_vol_sectors_hr = sum(fesom_vol_watermass_hr, axis=0)
    fesom_percent_watermass_hr = zeros([num_watermasses, num_sectors])
    for wm_key in range(num_watermasses):
        for sector in range(num_sectors):
            fesom_percent_watermass_hr[wm_key, sector] = fesom_vol_watermass_hr[wm_key, sector]/fesom_vol_sectors_hr[sector]*100

    print 'Plotting'
    fig = figure(figsize=(12,6))
    gs = GridSpec(3,3)
    gs.update(left=0.15, right=0.98, bottom=0.2, top=0.88, wspace=0.1, hspace=0.28)
    handles = []
    for sector in range(num_sectors):
        ax = subplot(gs[sector/3, sector%3])
        lefts = 0
        for wm_key in range(num_watermasses):
            ax.barh(0, roms_percent_watermass[wm_key, sector], color=wm_colours[wm_key], left=lefts, align='center')
            lefts += roms_percent_watermass[wm_key, sector]
        lefts = 0
        for wm_key in range(num_watermasses):
            ax.barh(1, fesom_percent_watermass_lr[wm_key, sector], color=wm_colours[wm_key], left=lefts, align='center')
            lefts += fesom_percent_watermass_lr[wm_key, sector]
        lefts = 0
        for wm_key in range(num_watermasses):
            tmp = ax.barh(2, fesom_percent_watermass_hr[wm_key, sector], color=wm_colours[wm_key], left=lefts, align='center')
            if sector == num_sectors-1:
                handles.append(tmp)
            lefts += fesom_percent_watermass_hr[wm_key, sector]
        xlim([0, 100])
        ax.invert_yaxis()
        ax.set_yticks(range(3))
        if sector % 3 == 0:
            ax.set_yticklabels(('MetROMS', 'FESOM (low-res)', 'FESOM (high-res)'))
        else:
            ax.set_yticklabels(('','',''))
        if sector >= num_sectors-3:
            ax.set_xlabel('% volume')
        else:
            ax.set_xticklabels([])
        ax.set_title(sector_names[sector])
    legend(handles, wm_names, ncol=num_watermasses, bbox_to_anchor=(0.35,-0.4))
    subplots_adjust(wspace=0.05, hspace=0.2)
    suptitle('Water masses in ice shelf cavities (2002-2016 average)', fontsize=20)
    fig.show()
    fig.savefig('wm_barchart.png')
Ejemplo n.º 14
0
def timeseries_3D (grid_path, file_path, log_path):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 40
    N = 31
    rho0 = 1000.0    # Reference density (kg/m^3)
    Cp = 3974        # Specific heat of polar seawater (J/K/kg)
    C2K = 273.15     # Celsius to Kelvin conversion

    time = []
    ohc = []
    avgsalt = []
    tke = []
    # Check if the log file exists
    if exists(log_path):
        print 'Reading previously calculated values'
        f = open(log_path, 'r')
        # Skip first line (header for time array)
        f.readline()
        for line in f:
            try:
                time.append(float(line))
            except(ValueError):
                # Reached the header for the next variable
                break
        for line in f:
            try:
                ohc.append(float(line))
            except(ValueError):
                break
        for line in f:
            try:
                avgsalt.append(float(line))
            except(ValueError):
                break
        for line in f:
            tke.append(float(line))
        f.close()

    print 'Analysing grid'
    id = Dataset(grid_path, 'r')
    h = id.variables['h'][:-15,1:-1]
    zice = id.variables['zice'][:-15,1:-1]    
    lon = id.variables['lon_rho'][:-15,1:-1]
    lat = id.variables['lat_rho'][:-15,1:-1]
    mask = id.variables['mask_rho'][:-15,1:-1]
    # Keep the overlapping periodic boundary on "angle" for now
    angle = id.variables['angle'][:-15,:]
    id.close()

    id = Dataset(file_path, 'r')
    # Read time values and convert from seconds to years
    new_time = id.variables['ocean_time'][:]/(60*60*24*365.25)
    num_time = size(new_time)
    # Concatenate with time values from log file
    for t in range(num_time):        
        time.append(new_time[t])

    # Process 10 time indices at a time so we don't use too much memory
    start_t = 0
    while True:
        end_t = min(start_t+10, num_time)
        print 'Processing time indices ' + str(start_t+1) + ' to ' + str(end_t)
        num_time_curr = end_t-start_t

        print 'Calculating time-dependent dV'
        # Read time-dependent sea surface height
        zeta = id.variables['zeta'][start_t:end_t,:-15,1:-1]
        # Calculate time-dependent dz
        dz = ma.empty([num_time_curr, N, size(lon,0), size(lon,1)])
        for t in range(num_time_curr):
            # dx and dy will be recomputed unnecessarily each timestep
            # but that's ok
            dx, dy, dz_tmp, z = cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc, N, zeta[t,:,:])
            dz[t,:,:,:] = dz_tmp
        # Calculate time-dependent dV and mask with land mask
        # Here mask, dx, dy are all copied into arrays of dimension
        # time x depth x lat x lon
        dV = ma.masked_where(tile(mask, (num_time_curr,N,1,1))==0, tile(dx, (num_time_curr,1,1,1))*tile(dy, (num_time_curr,1,1,1))*dz)

        print 'Reading data'
        temp = id.variables['temp'][start_t:end_t,:,:-15,1:-1]
        salt = id.variables['salt'][start_t:end_t,:,:-15,1:-1]
        rho = id.variables['rho'][start_t:end_t,:,:-15,1:-1] + rho0
        # Keep overlapping periodic boundary for u and v
        u_xy = id.variables['u'][start_t:end_t,:,:-15,:]
        v_xy = id.variables['v'][start_t:end_t,:,:-15,:]

        print 'Interpolating velocities onto rho-grid'
        # We are actually rotating them at the same time as interpolating
        # which is a bit of unnecessary work (sum of squares won't change with
        # rotation) but not much extra work, and it's conveneint
        u = ma.empty(shape(temp))
        v = ma.empty(shape(temp))
        for t in range(num_time_curr):
            for k in range(N):
                u_tmp, v_tmp = rotate_vector_roms(u_xy[t,k,:,:], v_xy[t,k,:,:], angle)
                u[t,k,:,:] = u_tmp[:,1:-1]
                v[t,k,:,:] = v_tmp[:,1:-1]

        print 'Building timeseries'
        for t in range(num_time_curr):
            # Integrate temp*rho*Cp*dV to get OHC
            ohc.append(sum((temp[t,:,:,:]+C2K)*rho[t,:,:,:]*Cp*dV[t,:,:,:]))
            # Average salinity (weighted with rho*dV)
            avgsalt.append(sum(salt[t,:,:,:]*rho[t,:,:,:]*dV[t,:,:,:])/sum(rho[t,:,:,:]*dV[t,:,:,:]))
            # Integrate 0.5*rho*speed^2*dV to get TKE
            tke.append(sum(0.5*rho[t,:,:,:]*(u[t,:,:,:]**2 + v[t,:,:,:]**2)*dV[t,:,:,:]))

        # Get ready for next 10 time indices
        if end_t == num_time:
            break
        start_t = end_t

    id.close()

    print 'Plotting ocean heat content'
    clf()
    plot(time, ohc)
    xlabel('Years')
    ylabel('Southern Ocean Heat Content (J)')
    grid(True)
    savefig('ohc.png')

    print 'Plotting average salinity'
    clf()
    plot(time, avgsalt)
    xlabel('Years')
    ylabel('Southern Ocean Average Salinity (psu)')
    grid(True)
    savefig('avgsalt.png')

    print 'Plotting total kinetic energy'
    clf()
    plot(time, tke)
    xlabel('Years')
    ylabel('Southern Ocean Total Kinetic Energy (J)')
    grid(True)
    savefig('tke.png')

    print 'Saving results to log file'
    f = open(log_path, 'w')
    f.write('Time (years):\n')
    for elm in time:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Heat Content (J):\n')
    for elm in ohc:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Average Salinity (psu):\n')
    for elm in avgsalt:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Total Kinetic Energy (J):\n')
    for elm in tke:
        f.write(str(elm) + '\n')
    f.close()
Ejemplo n.º 15
0
def mip_regions_1var ():

    # Path to ROMS grid file
    roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc'
    # Path to ROMS time-averaged file
    roms_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/2002_2016_avg.nc'
    # Path to FESOM mesh directories
    fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/'
    fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/'
    # Path to FESOM time-averaged ocean files (temp, salt, u, v)
    fesom_file_lr_o = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/oce_2002_2016_avg.nc'
    fesom_file_hr_o = '/short/y99/kaa561/FESOM/intercomparison_highres/output/oce_2002_2016_avg.nc'
    # Path to FESOM time-averaged ice shelf files (wnet)
    fesom_file_lr_i = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/wnet_2002_2016_avg.nc'
    fesom_file_hr_i = '/short/y99/kaa561/FESOM/intercomparison_highres/output/wnet_2002_2016_avg.nc'

    # Name of each region
    region_names = ['Filchner-Ronne Ice Shelf', 'Eastern Weddell Region', 'Amery Ice Shelf', 'Australian Sector', 'Ross Sea', 'Amundsen Sea', 'Bellingshausen Sea', 'Larsen Ice Shelves']
    num_regions = len(region_names)
    # Beginning of filenames for figures
    fig_heads = ['filchner_ronne', 'eweddell', 'amery', 'australian', 'ross', 'amundsen', 'bellingshausen', 'larsen']
    # Bounds for each region (using polar coordinate transformation as below)
    x_min = [-14, -8, 15.25, 12, -9.5, -15.5, -20.25, -22.5]
    x_max = [-4.5, 13, 20.5, 25.5, 4, -10.5, -15.5, -14.5]
    y_min = [1, 12, 4.75, -20, -13, -11.25, -4.5, 8.3]
    y_max = [10, 21, 8, 4, -4.75, -2.25, 7.6, 13]
    # Size of each plot in the y direction
    ysize = [8, 6, 7, 9, 7, 9, 10, 7]
    # Variables to process
    var_names = ['vel'] #['bathy', 'draft', 'wct', 'melt', 'temp', 'salt', 'vel']
    # Constants
    sec_per_year = 365*24*3600
    deg2rad = pi/180.0
    # Parameters for missing circle in ROMS grid
    lon_c = 50
    lat_c = -83
    radius = 10.1
    nbdry = -63+90
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    # Number of bins in each direction for vector overlay
    num_bins = 30

    print 'Reading ROMS grid'
    # Read the fields we need
    id = Dataset(roms_grid, 'r')
    roms_lon = id.variables['lon_rho'][:,:]
    roms_lat = id.variables['lat_rho'][:,:]
    roms_h = id.variables['h'][:,:]
    roms_mask = id.variables['mask_rho'][:,:]
    roms_zice = id.variables['zice'][:,:]
    roms_angle = id.variables['angle'][:,:]
    id.close()
    # Get land/zice mask
    open_ocn = copy(roms_mask)
    open_ocn[roms_zice!=0] = 0
    land_zice = ma.masked_where(open_ocn==1, open_ocn)
    # Convert grid to spherical coordinates
    roms_x = -(roms_lat+90)*cos(roms_lon*deg2rad+pi/2)
    roms_y = (roms_lat+90)*sin(roms_lon*deg2rad+pi/2)
    # Find centre in spherical coordinates
    x_c = -(lat_c+90)*cos(lon_c*deg2rad+pi/2)
    y_c = (lat_c+90)*sin(lon_c*deg2rad+pi/2)
    # Build a regular x-y grid and select the missing circle
    x_reg_roms, y_reg_roms = meshgrid(linspace(-nbdry, nbdry, num=1000), linspace(-nbdry, nbdry, num=1000))
    land_circle = zeros(shape(x_reg_roms))
    land_circle = ma.masked_where(sqrt((x_reg_roms-x_c)**2 + (y_reg_roms-y_c)**2) > radius, land_circle)

    print 'Building FESOM low-res mesh'
    elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar=True)
    # Make patches for all elements, ice shelf elements, and open ocean elements
    patches_lr = []
    patches_shelf_lr = []
    patches_ocn_lr = []
    for elm in elements_lr:
        coord = transpose(vstack((elm.x, elm.y)))
        patches_lr.append(Polygon(coord, True, linewidth=0.))
        if elm.cavity:
            patches_shelf_lr.append(Polygon(coord, True, linewidth=0.))
        else:
            patches_ocn_lr.append(Polygon(coord, True, linewidth=0.))

    print 'Building FESOM high-res mesh'
    elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar=True)
    patches_hr = []
    patches_shelf_hr = []
    patches_ocn_hr = []
    for elm in elements_hr:
        coord = transpose(vstack((elm.x, elm.y)))
        patches_hr.append(Polygon(coord, True, linewidth=0.))
        if elm.cavity:
            patches_shelf_hr.append(Polygon(coord, True, linewidth=0.))
        else:
            patches_ocn_hr.append(Polygon(coord, True, linewidth=0.))

    for var in var_names:
        print 'Processing variable ' + var

        print 'Reading ROMS fields'
        if var == 'draft':
            # Swap sign on existing zice field; nothing more to read
            roms_data = -1*roms_zice
        elif var == 'bathy':
            # Point to h field and mask out land mask; nothing more to read
            roms_data = ma.masked_where(roms_mask==0, roms_h)
        elif var == 'wct':
            # Add h (positive) and zice (negative); nothing more to read
            roms_data = roms_h + roms_zice
        else:
            id = Dataset(roms_file, 'r')
            if var == 'melt':
                # Convert from m/s to m/y
                roms_data = id.variables['m'][0,:,:]*sec_per_year
            elif var in ['temp', 'salt']:
                # Bottom layer
                roms_data = id.variables[var][0,0,:,:]
            elif var == 'vel':
                # Read full 3D u and v
                u_3d_tmp = id.variables['u'][0,:,:,:]
                v_3d_tmp = id.variables['v'][0,:,:,:]
                # Get integrands on 3D grid; we only care about dz
                dx, dy, dz, z = cartesian_grid_3d(roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
                # Unrotate each vertical level
                u_3d = ma.empty(shape(dz))
                v_3d = ma.empty(shape(dz))
                num_lat_u = size(u_3d_tmp,1)
                num_lon_u = size(u_3d_tmp,2)
                num_lat_v = size(v_3d_tmp,1)
                num_lon_v = size(v_3d_tmp,2)
                for k in range(N):
                    # Extend into land mask before interpolation to rho-grid so
                    # the land mask doesn't change in the final plot
                    for j in range(1,num_lat_u-1):
                        for i in range(1,num_lon_u-1):
                            # Check for masked points
                            if u_3d_tmp[k,j,i] is ma.masked:
                                # Look at 4 neighbours
                                neighbours = ma.array([u_3d_tmp[k,j-1,i], u_3d_tmp[k,j,i-1], u_3d_tmp[k,j+1,i], u_3d_tmp[k,j,i+1]])
                                # Find how many of them are unmasked
                                num_unmasked = MaskedArray.count(neighbours)
                                if num_unmasked > 0:
                                    # There is at least one unmasked neighbour;
                                    # set u_3d_tmp to their average
                                    u_3d_tmp[k,j,i] = sum(neighbours)/num_unmasked
                    # Repeat for v
                    for j in range(1,num_lat_v-1):
                        for i in range(1,num_lon_v-1):
                            if v_3d_tmp[k,j,i] is ma.masked:
                                neighbours = ma.array([v_3d_tmp[k,j-1,i], v_3d_tmp[k,j,i-1], v_3d_tmp[k,j+1,i], v_3d_tmp[k,j,i+1]])
                                num_unmasked = MaskedArray.count(neighbours)
                                if num_unmasked > 0:
                                    v_3d_tmp[k,j,i] = sum(neighbours)/num_unmasked
                    # Interpolate to rho grid and rotate
                    u_k, v_k = rotate_vector_roms(u_3d_tmp[k,:,:], v_3d_tmp[k,:,:], roms_angle)
                    u_3d[k,:,:] = u_k
                    v_3d[k,:,:] = v_k
                # Vertically average u and v
                u_rho = sum(u_3d*dz, axis=0)/sum(dz, axis=0)
                v_rho = sum(v_3d*dz, axis=0)/sum(dz, axis=0)    
                # Get speed
                roms_data = sqrt(u_rho**2 + v_rho**2)
                # Mask out land
                u_rho = ma.masked_where(roms_mask==0, u_rho)
                v_rho = ma.masked_where(roms_mask==0, v_rho)
                roms_data = ma.masked_where(roms_mask==0, roms_data)
            id.close()
        if var in ['draft', 'melt', 'wct']:
            # Mask out open ocean
            roms_data = ma.masked_where(roms_zice==0, roms_data)

        print 'Reading FESOM low-res fields'
        if var not in ['draft', 'bathy', 'wct']:
            if var == 'melt':
                id = Dataset(fesom_file_lr_i, 'r')
                # Convert from m/s to m/y
                node_data_lr = id.variables['wnet'][0,:]*sec_per_year
            elif var in ['temp', 'salt']:
                id = Dataset(fesom_file_lr_o, 'r')
                # Read full 3D field for now
                node_data_lr = id.variables[var][0,:]
            elif var == 'vel':
                id = Dataset(fesom_file_lr_o, 'r')
                # The overlaid vectors are based on nodes not elements, so many
                # of the fesom_grid data structures fail to apply and we need to
                # read some of the FESOM grid files again.
                # Read the cavity flag for each 2D surface node
                fesom_cavity_lr = []
                f = open(fesom_mesh_path_lr + 'cavity_flag_nod2d.out', 'r')
                for line in f:
                    tmp = int(line)
                    if tmp == 1:
                        fesom_cavity_lr.append(True)
                    elif tmp == 0:
                        fesom_cavity_lr.append(False)
                    else:
                        print 'Problem'
                        #return
                f.close()
                # Save the number of 2D nodes
                fesom_n2d_lr = len(fesom_cavity_lr)
                # Read rotated lat and lon for each node, also depth
                f = open(fesom_mesh_path_lr + 'nod3d.out', 'r')
                f.readline()
                rlon_lr = []
                rlat_lr = []
                node_depth_lr = []
                for line in f:
                    tmp = line.split()
                    lon_tmp = float(tmp[1])
                    lat_tmp = float(tmp[2])
                    node_depth_tmp = -1*float(tmp[3])
                    if lon_tmp < -180:
                        lon_tmp += 360
                    elif lon_tmp > 180:
                        lon_tmp -= 360
                    rlon_lr.append(lon_tmp)
                    rlat_lr.append(lat_tmp)
                    node_depth_lr.append(node_depth_tmp)
                f.close()
                # For lat and lon, only care about the 2D nodes (the first
                # fesom_n2d indices)
                rlon_lr = array(rlon_lr[0:fesom_n2d_lr])
                rlat_lr = array(rlat_lr[0:fesom_n2d_lr])
                node_depth_lr = array(node_depth_lr)
                # Unrotate longitude
                fesom_lon_lr, fesom_lat_lr = unrotate_grid(rlon_lr, rlat_lr)
                # Calculate polar coordinates of each node
                fesom_x_lr = -(fesom_lat_lr+90)*cos(fesom_lon_lr*deg2rad+pi/2)
                fesom_y_lr = (fesom_lat_lr+90)*sin(fesom_lon_lr*deg2rad+pi/2)
                # Read lists of which nodes are directly below which
                f = open(fesom_mesh_path_lr + 'aux3d.out', 'r')
                max_num_layers_lr = int(f.readline())
                node_columns_lr = zeros([fesom_n2d_lr, max_num_layers_lr])
                for n in range(fesom_n2d_lr):
                    for k in range(max_num_layers_lr):
                        node_columns_lr[n,k] = int(f.readline())
                node_columns_lr = node_columns_lr.astype(int)
                f.close()
                # Now we can actually read the data
                # Read full 3D field for both u and v
                node_ur_3d_lr = id.variables['u'][0,:]
                node_vr_3d_lr = id.variables['v'][0,:]
                # Vertically average
                node_ur_lr = zeros(fesom_n2d_lr)
                node_vr_lr = zeros(fesom_n2d_lr)
                for n in range(fesom_n2d_lr):
                    # Integrate udz, vdz, and dz over this water column
                    udz_col = 0
                    vdz_col = 0
                    dz_col = 0
                    for k in range(max_num_layers_lr-1):
                        if node_columns_lr[n,k+1] == -999:
                            # Reached the bottom
                            break
                        # Trapezoidal rule
                        top_id = node_columns_lr[n,k]
                        bot_id = node_columns_lr[n,k+1]
                        dz_tmp = node_depth_lr[bot_id-1] - node_depth_lr[top_id-1]
                        udz_col += 0.5*(node_ur_3d_lr[top_id-1]+node_ur_3d_lr[bot_id-1])*dz_tmp
                        vdz_col += 0.5*(node_vr_3d_lr[top_id-1]+node_vr_3d_lr[bot_id-1])*dz_tmp
                        dz_col += dz_tmp
                    # Convert from integrals to averages
                    node_ur_lr[n] = udz_col/dz_col
                    node_vr_lr[n] = vdz_col/dz_col
                # Unrotate
                node_u_lr, node_v_lr = unrotate_vector(rlon_lr, rlat_lr, node_ur_lr, node_vr_lr)
                # Calculate speed
                node_data_lr = sqrt(node_u_lr**2 + node_v_lr**2)
            id.close()
        # Calculate given field at each element
        fesom_data_lr = []
        for elm in elements_lr:
            # For each element, append the mean value for the 3 component Nodes
            # Restrict to ice shelf cavities for draft, melt, wct
            if elm.cavity or var not in ['draft', 'melt', 'wct']:
                if var == 'draft':
                    # Ice shelf draft is depth of surface layer
                    fesom_data_lr.append(mean([elm.nodes[0].depth, elm.nodes[1].depth, elm.nodes[2].depth]))
                elif var == 'bathy':
                    # Bathymetry is depth of bottom layer
                    fesom_data_lr.append(mean([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]))
                elif var == 'wct':
                    # Water column thickness is depth of bottom layer minus
                    # depth of surface layer
                    fesom_data_lr.append(mean([elm.nodes[0].find_bottom().depth - elm.nodes[0].depth, elm.nodes[1].find_bottom().depth - elm.nodes[1].depth, elm.nodes[2].find_bottom().depth - elm.nodes[2].depth]))
                elif var in ['melt', 'vel']:
                    # Surface nodes
                    fesom_data_lr.append(mean([node_data_lr[elm.nodes[0].id], node_data_lr[elm.nodes[1].id], node_data_lr[elm.nodes[2].id]]))
                elif var in ['temp', 'salt']:
                    # Bottom nodes
                    fesom_data_lr.append(mean([node_data_lr[elm.nodes[0].find_bottom().id], node_data_lr[elm.nodes[1].find_bottom().id], node_data_lr[elm.nodes[2].find_bottom().id]]))

        print 'Reading FESOM high-res fields'
        # As before
        if var not in ['draft', 'bathy', 'wct']:
            if var == 'melt':
                id = Dataset(fesom_file_hr_i, 'r')
                node_data_hr = id.variables['wnet'][0,:]*sec_per_year
            elif var in ['temp', 'salt']:
                id = Dataset(fesom_file_hr_o, 'r')
                node_data_hr = id.variables[var][0,:]
            elif var == 'vel':
                id = Dataset(fesom_file_hr_o, 'r')
                fesom_cavity_hr = []
                f = open(fesom_mesh_path_hr + 'cavity_flag_nod2d.out', 'r')
                for line in f:
                    tmp = int(line)
                    if tmp == 1:
                        fesom_cavity_hr.append(True)
                    elif tmp == 0:
                        fesom_cavity_hr.append(False)
                    else:
                        print 'Problem'
                        #return
                f.close()
                fesom_n2d_hr = len(fesom_cavity_hr)
                f = open(fesom_mesh_path_hr + 'nod3d.out', 'r')
                f.readline()
                rlon_hr = []
                rlat_hr = []
                node_depth_hr = []
                for line in f:
                    tmp = line.split()
                    lon_tmp = float(tmp[1])
                    lat_tmp = float(tmp[2])
                    node_depth_tmp = -1*float(tmp[3])
                    if lon_tmp < -180:
                        lon_tmp += 360
                    elif lon_tmp > 180:
                        lon_tmp -= 360
                    rlon_hr.append(lon_tmp)
                    rlat_hr.append(lat_tmp)
                    node_depth_hr.append(node_depth_tmp)
                f.close()
                rlon_hr = array(rlon_hr[0:fesom_n2d_hr])
                rlat_hr = array(rlat_hr[0:fesom_n2d_hr])
                node_depth_hr = array(node_depth_hr)
                fesom_lon_hr, fesom_lat_hr = unrotate_grid(rlon_hr, rlat_hr)
                fesom_x_hr = -(fesom_lat_hr+90)*cos(fesom_lon_hr*deg2rad+pi/2)
                fesom_y_hr = (fesom_lat_hr+90)*sin(fesom_lon_hr*deg2rad+pi/2)
                f = open(fesom_mesh_path_hr + 'aux3d.out', 'r')
                max_num_layers_hr = int(f.readline())
                node_columns_hr = zeros([fesom_n2d_hr, max_num_layers_hr])
                for n in range(fesom_n2d_hr):
                    for k in range(max_num_layers_hr):
                        node_columns_hr[n,k] = int(f.readline())
                node_columns_hr = node_columns_hr.astype(int)
                f.close()
                node_ur_3d_hr = id.variables['u'][0,:]
                node_vr_3d_hr = id.variables['v'][0,:]
                node_ur_hr = zeros(fesom_n2d_hr)
                node_vr_hr = zeros(fesom_n2d_hr)
                for n in range(fesom_n2d_hr):
                    udz_col = 0
                    vdz_col = 0
                    dz_col = 0
                    for k in range(max_num_layers_hr-1):
                        if node_columns_hr[n,k+1] == -999:
                            break
                        top_id = node_columns_hr[n,k]
                        bot_id = node_columns_hr[n,k+1]
                        dz_tmp = node_depth_hr[bot_id-1] - node_depth_hr[top_id-1]
                        udz_col += 0.5*(node_ur_3d_hr[top_id-1]+node_ur_3d_hr[bot_id-1])*dz_tmp
                        vdz_col += 0.5*(node_vr_3d_hr[top_id-1]+node_vr_3d_hr[bot_id-1])*dz_tmp
                        dz_col += dz_tmp
                    node_ur_hr[n] = udz_col/dz_col
                    node_vr_hr[n] = vdz_col/dz_col
                node_u_hr, node_v_hr = unrotate_vector(rlon_hr, rlat_hr, node_ur_hr, node_vr_hr)
                node_data_hr = sqrt(node_u_hr**2 + node_v_hr**2)
            id.close()
        fesom_data_hr = []
        for elm in elements_hr:
            if elm.cavity or var not in ['draft', 'melt', 'wct']:
                if var == 'draft':
                    fesom_data_hr.append(mean([elm.nodes[0].depth, elm.nodes[1].depth, elm.nodes[2].depth]))
                elif var == 'bathy':
                    fesom_data_hr.append(mean([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]))
                elif var == 'wct':
                    fesom_data_hr.append(mean([elm.nodes[0].find_bottom().depth - elm.nodes[0].depth, elm.nodes[1].find_bottom().depth - elm.nodes[1].depth, elm.nodes[2].find_bottom().depth - elm.nodes[2].depth]))
                elif var in ['melt', 'vel']:
                    fesom_data_hr.append(mean([node_data_hr[elm.nodes[0].id], node_data_hr[elm.nodes[1].id], node_data_hr[elm.nodes[2].id]]))
                elif var in ['temp', 'salt']:
                    fesom_data_hr.append(mean([node_data_hr[elm.nodes[0].find_bottom().id], node_data_hr[elm.nodes[1].find_bottom().id], node_data_hr[elm.nodes[2].find_bottom().id]]))

        # Loop over regions
        for index in range(num_regions):
            print 'Processing ' + region_names[index]
            # Set up a grey square for FESOM to fill the background with land
            x_reg_fesom, y_reg_fesom = meshgrid(linspace(x_min[index], x_max[index], num=100), linspace(y_min[index], y_max[index], num=100))
            land_square = zeros(shape(x_reg_fesom))
            # Find bounds on variable in this region, for both ROMS and FESOM
            # Start with ROMS
            loc = (roms_x >= x_min[index])*(roms_x <= x_max[index])*(roms_y >= y_min[index])*(roms_y <= y_max[index])
            var_min = amin(roms_data[loc])
            var_max = amax(roms_data[loc])
            # Modify with FESOM
            # Low-res
            i = 0
            for elm in elements_lr:
                if elm.cavity or var not in ['draft', 'melt', 'wct']:
                    if any(elm.x >= x_min[index]) and any(elm.x <= x_max[index]) and any(elm.y >= y_min[index]) and any(elm.y <= y_max[index]):
                        if fesom_data_lr[i] < var_min:
                            var_min = fesom_data_lr[i]
                        if fesom_data_lr[i] > var_max:
                            var_max = fesom_data_lr[i]
                    i += 1
            # High-res
            i = 0
            for elm in elements_hr:
                if elm.cavity or var not in ['draft', 'melt', 'wct']:
                    if any(elm.x >= x_min[index]) and any(elm.x <= x_max[index]) and any(elm.y >= y_min[index]) and any(elm.y <= y_max[index]):
                        if fesom_data_hr[i] < var_min:
                            var_min = fesom_data_hr[i]
                        if fesom_data_hr[i] > var_max:
                            var_max = fesom_data_hr[i]
                    i += 1
            if var == 'melt':
                # Special colour map
                if var_min < 0:
                    # There is refreezing here; include blue for elements < 0
                    cmap_vals = array([var_min, 0, 0.25*var_max, 0.5*var_max, 0.75*var_max, var_max])
                    cmap_colors = [(0.26, 0.45, 0.86), (1, 1, 1), (1, 0.9, 0.4), (0.99, 0.59, 0.18), (0.5, 0.0, 0.08), (0.96, 0.17, 0.89)]
                    cmap_vals_norm = (cmap_vals - var_min)/(var_max - var_min)
                    cmap_list = []
                    for i in range(size(cmap_vals)):
                        cmap_list.append((cmap_vals_norm[i], cmap_colors[i]))
                    mf_cmap = LinearSegmentedColormap.from_list('melt_freeze', cmap_list)
                else:
                    # No refreezing
                    cmap_vals = array([0, 0.25*var_max, 0.5*var_max, 0.75*var_max, var_max])
                    cmap_colors = [(1, 1, 1), (1, 0.9, 0.4), (0.99, 0.59, 0.18), (0.5, 0.0, 0.08), (0.96, 0.17, 0.89)]
                    cmap_vals_norm = cmap_vals/var_max
                    cmap_list = []
                    for i in range(size(cmap_vals)):
                        cmap_list.append((cmap_vals_norm[i], cmap_colors[i]))
                    mf_cmap = LinearSegmentedColormap.from_list('melt_freeze', cmap_list)
                colour_map = mf_cmap            
            elif var == 'vel':
                colour_map = 'cool'
            else:
                colour_map = 'jet'
            if var == 'vel':
                # Make vectors for overlay
                # Set up bins (edges)
                x_bins = linspace(x_min[index], x_max[index], num=num_bins+1)
                y_bins = linspace(y_min[index], y_max[index], num=num_bins+1)
                # Calculate centres of bins (for plotting)
                x_centres = 0.5*(x_bins[:-1] + x_bins[1:])
                y_centres = 0.5*(y_bins[:-1] + y_bins[1:])
                # ROMS
                # First set up arrays to integrate velocity in each bin
                # Simple averaging of all the points inside each bin
                roms_u = zeros([size(y_centres), size(x_centres)])
                roms_v = zeros([size(y_centres), size(x_centres)])
                roms_num_pts = zeros([size(y_centres), size(x_centres)])
                # First convert to polar coordinates, rotate to account for
                # longitude in circumpolar projection, and convert back to vector
                # components
                theta_roms = arctan2(v_rho, u_rho)
                theta_circ_roms = theta_roms - roms_lon*deg2rad
                u_circ_roms = roms_data*cos(theta_circ_roms) # roms_data is speed
                v_circ_roms = roms_data*sin(theta_circ_roms)
                # Loop over all points (can't find a better way to do this)
                for j in range(size(roms_data,0)):
                    for i in range(size(roms_data,1)):
                        # Make sure data isn't masked (i.e. land)
                        if u_circ_roms[j,i] is not ma.masked:
                            # Check if we're in the region of interest
                            if roms_x[j,i] > x_min[index] and roms_x[j,i] < x_max[index] and roms_y[j,i] > y_min[index] and roms_y[j,i] < y_max[index]:
                                # Figure out which bins this falls into
                                x_index = nonzero(x_bins > roms_x[j,i])[0][0]-1
                                y_index = nonzero(y_bins > roms_y[j,i])[0][0]-1
                                # Integrate
                                roms_u[y_index, x_index] += u_circ_roms[j,i]
                                roms_v[y_index, x_index] += v_circ_roms[j,i]
                                roms_num_pts[y_index, x_index] += 1
                # Convert from sums to averages
                # First mask out points with no data
                roms_u = ma.masked_where(roms_num_pts==0, roms_u)
                roms_v = ma.masked_where(roms_num_pts==0, roms_v)
                # Divide everything else by the number of points
                flag = roms_num_pts > 0
                roms_u[flag] = roms_u[flag]/roms_num_pts[flag]
                roms_v[flag] = roms_v[flag]/roms_num_pts[flag]
                # FESOM low-res
                fesom_u_lr = zeros([size(y_centres), size(x_centres)])
                fesom_v_lr = zeros([size(y_centres), size(x_centres)])
                fesom_num_pts_lr = zeros([size(y_centres), size(x_centres)])
                theta_fesom_lr = arctan2(node_v_lr, node_u_lr)
                theta_circ_fesom_lr = theta_fesom_lr - fesom_lon_lr*deg2rad
                u_circ_fesom_lr = node_data_lr*cos(theta_circ_fesom_lr) # node_data is speed
                v_circ_fesom_lr = node_data_lr*sin(theta_circ_fesom_lr)
                # Loop over 2D nodes to fill in the velocity bins as before
                for n in range(fesom_n2d_lr):
                    if fesom_x_lr[n] > x_min[index] and fesom_x_lr[n] < x_max[index] and fesom_y_lr[n] > y_min[index] and fesom_y_lr[n] < y_max[index]:
                        x_index = nonzero(x_bins > fesom_x_lr[n])[0][0]-1
                        y_index = nonzero(y_bins > fesom_y_lr[n])[0][0]-1
                        fesom_u_lr[y_index, x_index] += u_circ_fesom_lr[n]
                        fesom_v_lr[y_index, x_index] += v_circ_fesom_lr[n]
                        fesom_num_pts_lr[y_index, x_index] += 1
                fesom_u_lr = ma.masked_where(fesom_num_pts_lr==0, fesom_u_lr)
                fesom_v_lr = ma.masked_where(fesom_num_pts_lr==0, fesom_v_lr)
                flag = fesom_num_pts_lr > 0
                fesom_u_lr[flag] = fesom_u_lr[flag]/fesom_num_pts_lr[flag]
                fesom_v_lr[flag] = fesom_v_lr[flag]/fesom_num_pts_lr[flag]
                # FESOM high-res
                fesom_u_hr = zeros([size(y_centres), size(x_centres)])
                fesom_v_hr = zeros([size(y_centres), size(x_centres)])
                fesom_num_pts_hr = zeros([size(y_centres), size(x_centres)])
                theta_fesom_hr = arctan2(node_v_hr, node_u_hr)
                theta_circ_fesom_hr = theta_fesom_hr - fesom_lon_hr*deg2rad
                u_circ_fesom_hr = node_data_hr*cos(theta_circ_fesom_hr) # node_data is speed
                v_circ_fesom_hr = node_data_hr*sin(theta_circ_fesom_hr)
                # Loop over 2D nodes to fill in the velocity bins as before
                for n in range(fesom_n2d_hr):
                    if fesom_x_hr[n] > x_min[index] and fesom_x_hr[n] < x_max[index] and fesom_y_hr[n] > y_min[index] and fesom_y_hr[n] < y_max[index]:
                        x_index = nonzero(x_bins > fesom_x_hr[n])[0][0]-1
                        y_index = nonzero(y_bins > fesom_y_hr[n])[0][0]-1
                        fesom_u_hr[y_index, x_index] += u_circ_fesom_hr[n]
                        fesom_v_hr[y_index, x_index] += v_circ_fesom_hr[n]
                        fesom_num_pts_hr[y_index, x_index] += 1
                fesom_u_hr = ma.masked_where(fesom_num_pts_hr==0, fesom_u_hr)
                fesom_v_hr = ma.masked_where(fesom_num_pts_hr==0, fesom_v_hr)
                flag = fesom_num_pts_hr > 0
                fesom_u_hr[flag] = fesom_u_hr[flag]/fesom_num_pts_hr[flag]
                fesom_v_hr[flag] = fesom_v_hr[flag]/fesom_num_pts_hr[flag]
            # Plot
            fig = figure(figsize=(20, ysize[index]))
            fig.patch.set_facecolor('white')
            # MetROMS
            ax = fig.add_subplot(1,3,1, aspect='equal')
            # First shade land and zice in grey
            contourf(roms_x, roms_y, land_zice, 1, colors=(('0.6', '0.6', '0.6')))
            # Fill in the missing circle
            contourf(x_reg_roms, y_reg_roms, land_circle, 1, colors=(('0.6', '0.6', '0.6')))
            # Now shade the data
            pcolor(roms_x, roms_y, roms_data, vmin=var_min, vmax=var_max, cmap=colour_map)
            if var == 'vel':
                # Overlay vectors
                quiver(x_centres, y_centres, roms_u, roms_v, scale=1.5, headwidth=6, headlength=7, color='black')
            xlim([x_min[index], x_max[index]])
            ylim([y_min[index], y_max[index]])
            axis('off')
            title('MetROMS', fontsize=24)
            # FESOM low-res
            ax = fig.add_subplot(1,3,2, aspect='equal')
            # Start with land background
            contourf(x_reg_fesom, y_reg_fesom, land_square, 1, colors=(('0.6', '0.6', '0.6')))
            # Add elements
            if var in ['draft', 'melt', 'wct']:
                # Ice shelf elements only
                img = PatchCollection(patches_shelf_lr, cmap=colour_map)
            else:
                img = PatchCollection(patches_lr, cmap=colour_map)
            img.set_array(array(fesom_data_lr))
            img.set_edgecolor('face')
            img.set_clim(vmin=var_min, vmax=var_max)
            ax.add_collection(img)
            if var in ['draft', 'melt', 'wct']:
                # Mask out the open ocean in white
                overlay = PatchCollection(patches_ocn_lr, facecolor=(1,1,1))
                overlay.set_edgecolor('face')
                ax.add_collection(overlay)
            if var == 'vel':
                # Overlay vectors
                quiver(x_centres, y_centres, fesom_u_lr, fesom_v_lr, scale=1.5, headwidth=6, headlength=7, color='black')
            xlim([x_min[index], x_max[index]])
            ylim([y_min[index], y_max[index]])
            axis('off')
            title('FESOM (low-res)', fontsize=24)
            # FESOM high-res
            ax = fig.add_subplot(1,3,3, aspect='equal')
            contourf(x_reg_fesom, y_reg_fesom, land_square, 1, colors=(('0.6', '0.6', '0.6')))
            if var in ['draft', 'melt', 'wct']:
                # Ice shelf elements only
                img = PatchCollection(patches_shelf_hr, cmap=colour_map)
            else:
                img = PatchCollection(patches_hr, cmap=colour_map)
            img.set_array(array(fesom_data_hr))
            img.set_edgecolor('face')
            img.set_clim(vmin=var_min, vmax=var_max)
            ax.add_collection(img)
            if var in ['draft', 'melt', 'wct']:
                overlay = PatchCollection(patches_ocn_hr, facecolor=(1,1,1))
                overlay.set_edgecolor('face')
                ax.add_collection(overlay)
            if var == 'vel':
                # Overlay vectors
                quiver(x_centres, y_centres, fesom_u_hr, fesom_v_hr, scale=1.5, headwidth=6, headlength=7, color='black')
            xlim([x_min[index], x_max[index]])
            ylim([y_min[index], y_max[index]])
            axis('off')
            title('FESOM (high-res)', fontsize=24)
            # Colourbar on the right
            cbaxes = fig.add_axes([0.92, 0.2, 0.01, 0.6])
            cbar = colorbar(img, cax=cbaxes)
            cbar.ax.tick_params(labelsize=20)
            # Main title
            if var == 'draft':
                title_string = ' draft (m)'
            elif var == 'bathy':
                title_string = ' bathymetry (m)'
            elif var == 'wct':
                title_string = ' water column thickness (m)'
            elif var == 'melt':
                title_string = ' melt rate (m/y)'
            elif var == 'temp':
                title_string = r' bottom water temperature ($^{\circ}$C)'
            elif var == 'salt':
                title_string = ' bottom water salinity (psu)'
            elif var == 'vel':
                title_string = ' vertically averaged ocean velocity (m/s)'
            suptitle(region_names[index] + title_string, fontsize=30)
            subplots_adjust(wspace=0.05)
            #fig.show()
            fig.savefig(fig_heads[index] + '_' + var + '.png')
Ejemplo n.º 16
0
def mip_ts_distribution (roms_grid, roms_file, fesom_mesh_path_lr, fesom_file_lr, fesom_mesh_path_hr, fesom_file_hr):

    # Northern boundary of water masses to consider
    nbdry = -65
    # Number of temperature and salinity bins
    num_bins = 1000
    # Bounds on temperature and salinity bins (pre-computed, change if needed)
    min_salt = 32.3
    max_salt = 35.1
    min_temp = -3.1
    max_temp = 3.8
    # Bounds to actually plot
    min_salt_plot = 33.25
    max_salt_plot = 35.1
    min_temp_plot = -3
    max_temp_plot = 3.8
    # FESOM grid generation parameters
    circumpolar = False
    cross_180 = False
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31

    print 'Setting up bins'
    # Calculate boundaries of temperature bins
    temp_bins = linspace(min_temp, max_temp, num=num_bins)
    # Calculate centres of temperature bins (for plotting)
    temp_centres = 0.5*(temp_bins[:-1] + temp_bins[1:])
    # Repeat for salinity
    salt_bins = linspace(min_salt, max_salt, num=num_bins)
    salt_centres = 0.5*(salt_bins[:-1] + salt_bins[1:])
    # Set up 2D arrays of temperature bins x salinity bins to hold average
    # depth of water masses, weighted by volume
    ts_vals_roms = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_fesom_lr = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_fesom_hr = zeros([size(temp_centres), size(salt_centres)])
    # Also arrays to integrate volume
    volume_roms = zeros([size(temp_centres), size(salt_centres)])
    volume_fesom_lr = zeros([size(temp_centres), size(salt_centres)])
    volume_fesom_hr = zeros([size(temp_centres), size(salt_centres)])
    # Calculate surface freezing point as a function of salinity as seen by
    # each sea ice model
    freezing_pt_roms = salt_centres/(-18.48 + 18.48/1e3*salt_centres)
    freezing_pt_fesom = -0.0575*salt_centres + 1.7105e-3*sqrt(salt_centres**3) - 2.155e-4*salt_centres**2
    # Get 2D versions of the temperature and salinity bins
    salt_2d, temp_2d = meshgrid(salt_centres, temp_centres)
    # Calculate potential density of each combination of temperature and
    # salinity bins
    density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres)))-1000
    # Density contours to plot
    density_lev = arange(26.6, 28.4, 0.2)

    print 'Processing ROMS'
    # Read ROMS grid variables we need
    id = Dataset(roms_grid, 'r')
    roms_lon = id.variables['lon_rho'][:,:]
    roms_lat = id.variables['lat_rho'][:,:]
    roms_h = id.variables['h'][:,:]
    roms_zice = id.variables['zice'][:,:]
    id.close()
    num_lat = size(roms_lat, 0)
    num_lon = size(roms_lon, 1)
    # Get integrands on 3D grid
    roms_dx, roms_dy, roms_dz, roms_z = cartesian_grid_3d(roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
    # Get volume integrand
    dV = roms_dx*roms_dy*roms_dz
    # Read ROMS output
    id = Dataset(roms_file, 'r')
    roms_temp = id.variables['temp'][0,:,:,:]
    roms_salt = id.variables['salt'][0,:,:,:]
    id.close()
    # Loop over 2D grid boxes
    for j in range(num_lat):
        for i in range(num_lon):
            # Check for land mask
            if roms_temp[0,j,i] is ma.masked:
                continue
            # Check if we're in the region of interest
            if roms_lat[j,i] < nbdry:
                # Loop downward
                for k in range(N):
                    # Figure out which bins this falls into
                    temp_index = nonzero(temp_bins > roms_temp[k,j,i])[0][0] - 1
                    salt_index = nonzero(salt_bins > roms_salt[k,j,i])[0][0] - 1
                    # Integrate depth*dV in this bin
                    ts_vals_roms[temp_index, salt_index] += -roms_z[k,j,i]*dV[k,j,i]
                    volume_roms[temp_index, salt_index] += dV[k,j,i]
    # Mask bins with zero volume
    ts_vals_roms = ma.masked_where(volume_roms==0, ts_vals_roms)
    volume_roms = ma.masked_where(volume_roms==0, volume_roms)
    # Convert depths from integrals to volume-averages
    ts_vals_roms /= volume_roms

    print 'Processing low-res FESOM'
    # Make FESOM grid elements
    elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar, cross_180)
    # Read temperature and salinity at each 3D node
    id = Dataset(fesom_file_lr, 'r')
    fesom_temp_lr = id.variables['temp'][0,:]
    fesom_salt_lr = id.variables['salt'][0,:]
    id.close()
    # Loop over elements
    for elm in elements_lr:
        # See if we're in the region of interest
        if all(elm.lat < nbdry):
            # Get area of 2D triangle
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            # Loop downward
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None:
                    # We've reached the bottom
                    break
                # Calculate average temperature, salinity, depth, and layer
                # thickness over this 3D triangular prism
                temp_vals = []
                salt_vals = []
                depth_vals = []
                dz = []
                for i in range(3):
                    # Average temperature over 6 nodes
                    temp_vals.append(fesom_temp_lr[nodes[i].id])
                    temp_vals.append(fesom_temp_lr[nodes[i].below.id])
                    # Average salinity over 6 nodes
                    salt_vals.append(fesom_salt_lr[nodes[i].id])
                    salt_vals.append(fesom_salt_lr[nodes[i].below.id])
                    # Average depth over 6 nodes
                    depth_vals.append(nodes[i].depth)
                    depth_vals.append(nodes[i].below.depth)
                    # Average dz over 3 vertical edges
                    dz.append(abs(nodes[i].depth - nodes[i].below.depth))
                    # Get ready for next repetition of loop
                    nodes[i] = nodes[i].below
                temp_elm = mean(array(temp_vals))
                salt_elm = mean(array(salt_vals))
                depth_elm = mean(array(depth_vals))
                # Calculate volume of 3D triangular prism
                volume = area*mean(array(dz))
                # Figure out which bins this falls into
                temp_index = nonzero(temp_bins > temp_elm)[0][0] - 1
                salt_index = nonzero(salt_bins > salt_elm)[0][0] - 1
                # Integrate depth*volume in this bin
                ts_vals_fesom_lr[temp_index, salt_index] += depth_elm*volume
                volume_fesom_lr[temp_index, salt_index] += volume
    # Mask bins with zero volume
    ts_vals_fesom_lr = ma.masked_where(volume_fesom_lr==0, ts_vals_fesom_lr)
    volume_fesom_lr = ma.masked_where(volume_fesom_lr==0, volume_fesom_lr)
    # Convert depths from integrals to volume-averages
    ts_vals_fesom_lr /= volume_fesom_lr

    print 'Processing high-res FESOM'
    elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar, cross_180)
    id = Dataset(fesom_file_hr, 'r')
    fesom_temp_hr = id.variables['temp'][0,:]
    fesom_salt_hr = id.variables['salt'][0,:]
    id.close()
    for elm in elements_hr:
        if all(elm.lat < nbdry):
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None:
                    break
                temp_vals = []
                salt_vals = []
                depth_vals = []
                dz = []
                for i in range(3):
                    temp_vals.append(fesom_temp_hr[nodes[i].id])
                    temp_vals.append(fesom_temp_hr[nodes[i].below.id])
                    salt_vals.append(fesom_salt_hr[nodes[i].id])
                    salt_vals.append(fesom_salt_hr[nodes[i].below.id])
                    depth_vals.append(nodes[i].depth)
                    depth_vals.append(nodes[i].below.depth)
                    dz.append(abs(nodes[i].depth - nodes[i].below.depth))
                    nodes[i] = nodes[i].below
                temp_elm = mean(array(temp_vals))
                salt_elm = mean(array(salt_vals))
                depth_elm = mean(array(depth_vals))
                volume = area*mean(array(dz))
                temp_index = nonzero(temp_bins > temp_elm)[0][0] - 1
                salt_index = nonzero(salt_bins > salt_elm)[0][0] - 1
                ts_vals_fesom_hr[temp_index, salt_index] += depth_elm*volume
                volume_fesom_hr[temp_index, salt_index] += volume
    ts_vals_fesom_hr = ma.masked_where(volume_fesom_hr==0, ts_vals_fesom_hr)
    volume_fesom_hr = ma.masked_where(volume_fesom_hr==0, volume_fesom_hr)
    ts_vals_fesom_hr /= volume_fesom_hr

    # Find the maximum depth for plotting
    max_depth = amax(array([amax(ts_vals_roms), amax(ts_vals_fesom_lr), amax(ts_vals_fesom_hr)]))
    # Make a nonlinear scale
    bounds = linspace(0, max_depth**(1.0/2.5), num=100)**2.5
    norm = BoundaryNorm(boundaries=bounds, ncolors=256)
    # Set labels for density contours
    manual_locations = [(33.4, 3.0), (33.65, 3.0), (33.9, 3.0), (34.2, 3.0), (34.45, 3.5), (34.65, 3.25), (34.9, 3.0), (35, 1.5)]

    print "Plotting"
    fig = figure(figsize=(20,9))
    # ROMS
    ax = fig.add_subplot(1, 3, 1)
    pcolor(salt_centres, temp_centres, ts_vals_roms, norm=norm, vmin=0, vmax=max_depth, cmap='jet')
    # Add surface freezing point line
    plot(salt_centres, freezing_pt_roms, color='black', linestyle='dashed')
    # Add density contours
    cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6,0.6,0.6), fmt='%1.1f', manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=20)
    ylabel(r'Temperature ($^{\circ}$C)', fontsize=20)
    title('MetROMS', fontsize=24)
    # FESOM low-res
    ax = fig.add_subplot(1, 3, 2)
    img = pcolor(salt_centres, temp_centres, ts_vals_fesom_lr, norm=norm, vmin=0, vmax=max_depth, cmap='jet')
    plot(salt_centres, freezing_pt_fesom, color='black', linestyle='dashed')
    cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6,0.6,0.6), fmt='%1.1f', manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=20)
    title('FESOM (low-res)', fontsize=24)
    # FESOM high-res
    ax = fig.add_subplot(1, 3, 3)
    img = pcolor(salt_centres, temp_centres, ts_vals_fesom_hr, norm=norm, vmin=0, vmax=max_depth, cmap='jet')
    plot(salt_centres, freezing_pt_fesom, color='black', linestyle='dashed')
    cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6,0.6,0.6), fmt='%1.1f', manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=20)
    title('FESOM (high-res)', fontsize=24)
    # Add a colourbar on the right
    cbaxes = fig.add_axes([0.93, 0.2, 0.02, 0.6])
    cbar = colorbar(img, cax=cbaxes, ticks=[0,50,100,200,500,1000,2000,4000])
    cbar.ax.tick_params(labelsize=18)
    # Add the main title
    suptitle('Water masses south of 65$^{\circ}$S: depth (m), 2002-2016 average', fontsize=30)
    subplots_adjust(wspace=0.1)
    fig.show()
    fig.savefig('ts_distribution_orig.png')