Пример #1
0
def bugs_ts_distribution(grid_file, ini_file, upwind_file, akima_file,
                         split_file):

    # Only consider regions with bathymetry deeper than 1500 m, not in ice
    # shelf cavities, and cells deeper than 200 m
    h0 = 1500
    z0 = 200
    # Bounds on temperature and salinity bins
    min_salt = 33.8
    max_salt = 36.6
    min_temp = -2
    max_temp = 21
    # Bounds to actually plot
    min_salt_plot = 34
    max_salt_plot = 34.7
    min_temp_plot = 2
    max_temp_plot = 8
    # Number of temperature and salinity bins
    num_bins = 1000
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31

    print 'Setting up bins'
    # Calculate boundaries of temperature bins
    temp_bins = linspace(min_temp, max_temp, num=num_bins)
    # Calculate centres of temperature bins (for plotting)
    temp_centres = 0.5 * (temp_bins[:-1] + temp_bins[1:])
    # Repeat for salinity
    salt_bins = linspace(min_salt, max_salt, num=num_bins)
    salt_centres = 0.5 * (salt_bins[:-1] + salt_bins[1:])
    # Set up 2D arrays of temperature bins x salinity bins to increment with
    # volume of water masses
    ts_vals_ini = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_upwind = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_akima = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_split = zeros([size(temp_centres), size(salt_centres)])
    # Get 2D versions of the temperature and salinity bins
    salt_2d, temp_2d = meshgrid(salt_centres, temp_centres)
    # Calculate potential density of each combination of temperature and
    # salinity bins
    density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres))) - 1000
    # Density contours to plot
    density_lev = arange(26.5, 28 + 0.25, 0.25)

    print 'Reading grid'
    id = Dataset(grid_file, 'r')
    lon = id.variables['lon_rho'][:, :]
    lat = id.variables['lat_rho'][:, :]
    h = id.variables['h'][:, :]
    zice = id.variables['zice'][:, :]
    id.close()
    num_lat = size(lat, 0)
    num_lon = size(lon, 1)
    # Get integrands on 3D grid
    dx, dy, dz, z = cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc,
                                      N)
    # Get volume integrand
    dV = dx * dy * dz

    print 'Reading data'
    # Read temp and salt from the first history output step, output before
    # timestepping even starts (i.e. initial conditions for January 1992)
    id = Dataset(ini_file, 'r')
    ini_temp = id.variables['temp'][0, :, :, :]
    ini_salt = id.variables['salt'][0, :, :, :]
    id.close()
    # Then read temp and salt averaged over the last January for each simulation
    id = Dataset(upwind_file, 'r')
    upwind_temp = id.variables['temp'][0, :, :, :]
    upwind_salt = id.variables['salt'][0, :, :, :]
    id.close()
    id = Dataset(akima_file, 'r')
    akima_temp = id.variables['temp'][0, :, :, :]
    akima_salt = id.variables['salt'][0, :, :, :]
    id.close()
    id = Dataset(split_file, 'r')
    split_temp = id.variables['temp'][0, :, :, :]
    split_salt = id.variables['salt'][0, :, :, :]
    id.close()

    print 'Binning temperature and salinity'
    # Loop over 2D grid boxes
    for j in range(num_lat):
        for i in range(num_lon):
            # Check for land mask
            if ini_temp[0, j, i] is ma.masked:
                continue
            # Check for ice shelf cavity
            if zice[j, i] < 0:
                continue
            # Check for too-shallow bathymetry
            if h[j, i] < h0:
                continue
            for k in range(N):
                # Check for too-shallow cells
                if abs(z[k, j, i]) < z0:
                    continue
                # First categorise the initial data
                # Figure out which bins this falls into
                temp_index = nonzero(temp_bins > ini_temp[k, j, i])[0][0] - 1
                salt_index = nonzero(salt_bins > ini_salt[k, j, i])[0][0] - 1
                # Increment bins with volume
                ts_vals_ini[temp_index, salt_index] += dV[k, j, i]
                # Upwind simulation
                temp_index = nonzero(
                    temp_bins > upwind_temp[k, j, i])[0][0] - 1
                salt_index = nonzero(
                    salt_bins > upwind_salt[k, j, i])[0][0] - 1
                ts_vals_upwind[temp_index, salt_index] += dV[k, j, i]
                # Akima simulation
                temp_index = nonzero(temp_bins > akima_temp[k, j, i])[0][0] - 1
                salt_index = nonzero(salt_bins > akima_salt[k, j, i])[0][0] - 1
                ts_vals_akima[temp_index, salt_index] += dV[k, j, i]
                # Split simulation
                temp_index = nonzero(temp_bins > split_temp[k, j, i])[0][0] - 1
                salt_index = nonzero(salt_bins > split_salt[k, j, i])[0][0] - 1
                ts_vals_split[temp_index, salt_index] += dV[k, j, i]
    # Mask bins with zero volume
    ts_vals_ini = ma.masked_where(ts_vals_ini == 0, ts_vals_ini)
    ts_vals_upwind = ma.masked_where(ts_vals_upwind == 0, ts_vals_upwind)
    ts_vals_akima = ma.masked_where(ts_vals_akima == 0, ts_vals_akima)
    ts_vals_split = ma.masked_where(ts_vals_split == 0, ts_vals_split)

    # Find the volume bounds for plotting
    min_val = log(
        amin(
            array([
                amin(ts_vals_ini),
                amin(ts_vals_upwind),
                amin(ts_vals_akima),
                amin(ts_vals_split)
            ])))
    max_val = log(
        amax(
            array([
                amax(ts_vals_ini),
                amax(ts_vals_upwind),
                amax(ts_vals_akima),
                amax(ts_vals_split)
            ])))

    print 'Plotting'
    fig = figure(figsize=(14, 24))
    gs = GridSpec(2, 2)
    gs.update(left=0.1,
              right=0.9,
              bottom=0.12,
              top=0.95,
              wspace=0.05,
              hspace=0.12)
    # Initial conditions
    ax = subplot(gs[0, 0])
    # Plot with log scale
    pcolor(salt_centres,
           temp_centres,
           log(ts_vals_ini),
           vmin=min_val,
           vmax=max_val,
           cmap='jet')
    # Add density contours
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6, 0.6, 0.6), fmt='%1.1f')
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    ylabel(r'Temperature ($^{\circ}$C)', fontsize=22)
    title('Initial conditions', fontsize=26)
    # Upwind
    ax = subplot(gs[0, 1])
    pcolor(salt_centres,
           temp_centres,
           log(ts_vals_upwind),
           vmin=min_val,
           vmax=max_val,
           cmap='jet')
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6, 0.6, 0.6), fmt='%1.1f')
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    title('Upwind third-order advection', fontsize=26)
    # Akima
    ax = subplot(gs[1, 0])
    pcolor(salt_centres,
           temp_centres,
           log(ts_vals_akima),
           vmin=min_val,
           vmax=max_val,
           cmap='jet')
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6, 0.6, 0.6), fmt='%1.1f')
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=22)
    ylabel(r'Temperature ($^{\circ}$C)', fontsize=22)
    title('Akima advection', fontsize=26)
    # Split
    ax = subplot(gs[1, 1])
    img = pcolor(salt_centres,
                 temp_centres,
                 log(ts_vals_split),
                 vmin=min_val,
                 vmax=max_val,
                 cmap='jet')
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6, 0.6, 0.6), fmt='%1.1f')
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=22)
    title('RSUP3 advection', fontsize=26)
    # Colorbar at bottom
    cbaxes = fig.add_axes([0.35, 0.04, 0.3, 0.02])
    cbar = colorbar(img, cax=cbaxes, orientation='horizontal')
    cbar.ax.tick_params(labelsize=14)
    text(0.5,
         0.01,
         'log of volume',
         fontsize=20,
         transform=fig.transFigure,
         ha='center')
    # Main title
    suptitle('Deep water masses after 25 years: AAIW', fontsize=30)
    fig.show()
    fig.savefig('bugs_ts_distribution.png')
Пример #2
0
def make_density_file(input_file, output_file):

    # Grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31

    # Read grid variables
    in_id = Dataset(input_file, 'r')
    h = in_id.variables['h'][:, :]
    zice = in_id.variables['zice'][:, :]
    lon = in_id.variables['lon_rho'][:, :]
    lat = in_id.variables['lat_rho'][:, :]
    num_lon = size(lon, 1)
    num_lat = size(lon, 0)

    # Get a 3D array of z-coordinates (metres)
    z, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N)
    # Pressure is approximately equal to |z|/10
    press = abs(z) / 10.0

    # Set up output file
    out_id = Dataset(output_file, 'w')
    # Define dimensions
    out_id.createDimension('xi_rho', num_lon)
    out_id.createDimension('eta_rho', num_lat)
    out_id.createDimension('s_rho', N)
    out_id.createDimension('ocean_time', None)
    # Define variables
    out_id.createVariable('lon_rho', 'f8', ('eta_rho', 'xi_rho'))
    out_id.variables['lon_rho'][:, :] = lon
    out_id.createVariable('lat_rho', 'f8', ('eta_rho', 'xi_rho'))
    out_id.variables['lat_rho'][:, :] = lat
    out_id.createVariable('sc_r', 'f8', ('s_rho'))
    out_id.variables['sc_r'].long_name = 'S-coordinate at rho-points'
    out_id.variables['sc_r'][:] = sc_r
    out_id.createVariable('ocean_time', 'f8', ('ocean_time'))
    out_id.variables['ocean_time'].units = 'seconds'
    out_id.createVariable('rho', 'f8',
                          ('ocean_time', 's_rho', 'eta_rho', 'xi_rho'))
    out_id.variables['rho'].long_name = 'density'
    out_id.variables['rho'].units = 'kg/m^3'

    # Read time values from input file
    time = in_id.variables['ocean_time'][:]

    # Process each timestep individually to conserve memory
    for t in range(size(time)):
        print 'Processing timestep ' + str(t + 1) + ' of ' + str(size(time))
        # Set a new time value in the output file
        out_id.variables['ocean_time'][t] = time[t]
        # Read temperature and salinity (convert to float128 to prevent
        # overflow in UNESCO calculations)
        temp = ma.asarray(in_id.variables['temp'][t, :, :, :], dtype=float128)
        salt = ma.asarray(in_id.variables['salt'][t, :, :, :], dtype=float128)
        # Magic happens here
        rho = unesco(temp, salt, press)
        # Save the results for this timestep
        out_id.variables['rho'][t, :, :, :] = rho

    in_id.close()
    out_id.close()
Пример #3
0
def plot_mld (elements, patches, file_path, tstep, circumpolar, save=False, fig_name=None, limit=None):

    # Definition of mixed layer depth: where potential density exceeds
    # surface density by this amount (kg/m^3) as in Sallee et al 2013
    density_anom = 0.03
    mask_cavities=True

    # Set bounds for domain
    if circumpolar:
        # Northern boundary 30S
        lat_max = -30+90
    else:
        lon_min = -180
        lon_max = 180
        lat_min = -90
        lat_max = 90
        # Configure position of latitude and longitude labels
        lon_ticks = arange(-120, 120+1, 60)
        lat_ticks = arange(-60, 60+1, 30)
    # Set font sizes
    font_sizes = [30, 24, 20]

    # Read temperature and salinity at each node
    print 'Reading data'
    id = Dataset(file_path, 'r')
    temp = id.variables['temp'][tstep-1,:]
    salt = id.variables['salt'][tstep-1,:]
    id.close()
    # Calculate potential density (depth 0)
    print 'Calculating density'
    density = unesco(temp, salt, zeros(shape(temp)))

    # Build an array of mixed layer depth corresponding to each 2D Element
    print 'Calculating mixed layer depth'
    values = []
    for elm in elements:
        if (mask_cavities and not elm.cavity) or (not mask_cavities):
            # Get mixed layer depth at each node
            mld_nodes = []
            # Make sure we exclude ice shelf cavity nodes from element mean
            # (an Element can be  a non-cavity element and still have up to 2
            # cavity nodes)
            for i in range(3):
                if (mask_cavities and not elm.cavity_nodes[i]) or (not mask_cavities):
                    node = elm.nodes[i]
                    density_sfc = density[node.id]
                    temp_depth = node.depth
                    curr_node = node.below
                    while True:
                        if curr_node is None:
                            mld_nodes.append(temp_depth)
                            break
                        if density[curr_node.id] >= density_sfc + density_anom:
                            mld_nodes.append(curr_node.depth)
                            break
                        temp_depth = curr_node.depth
                        curr_node = curr_node.below
            # For this element, save the mean mixed layer depth across
            # non-cavity nodes (up to 3)
            values.append(mean(array(mld_nodes)))

    if mask_cavities:
        # Get mask array of patches for ice shelf cavity elements
        mask_patches = iceshelf_mask(elements)

    # Choose colour bounds
    var_min = 0
    if limit is not None:
        var_max = limit
    else:
        var_max = amax(array(values))

    print 'Plotting'
    # Set up plot
    if circumpolar:
        fig = figure(figsize=(16,12))
        ax = fig.add_subplot(1,1,1, aspect='equal')
    else:
        fig = figure(figsize=(16,8))
        ax = fig.add_subplot(1,1,1)
    # Set colourmap for patches, and refer it to the values array
    img = PatchCollection(patches, cmap='jet')
    img.set_array(array(values))
    img.set_edgecolor('face')
    # Add patches to plot
    ax.add_collection(img)
    if mask_cavities:
        # Set colour to light grey for patches in mask
        overlay = PatchCollection(mask_patches, facecolor=(0.6, 0.6, 0.6))
        overlay.set_edgecolor('face')
        # Add mask to plot
        ax.add_collection(overlay)

    # Configure plot
    if circumpolar:
        xlim([-lat_max, lat_max])
        ylim([-lat_max, lat_max])
        ax.get_xaxis().set_ticks([])
        ax.get_yaxis().set_ticks([])
        axis('off')
    else:
        xlim([lon_min, lon_max])
        ylim([lat_min, lat_max])
        xticks(lon_ticks)
        yticks(lat_ticks)
        xlabel('Longitude', fontsize=font_sizes[1])
        ylabel('Latitude', fontsize=font_sizes[1])
        setp(ax.get_xticklabels(), fontsize=font_sizes[2])
        setp(ax.get_yticklabels(), fontsize=font_sizes[2])
    title('Mixed layer depth (m)', fontsize=font_sizes[0])
    cbar = colorbar(img)
    cbar.ax.tick_params(labelsize=font_sizes[2])
    img.set_clim(vmin=var_min, vmax=var_max)

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()
Пример #4
0
def watermass_schematic():

    min_salt = 33.7
    max_salt = 34.8
    min_temp = -2.4
    max_temp = 0.5
    salt_ticks = [34, 34.5]
    salt_tick_labels = ['34', '34.5']
    temp_ticks = [tfreeze(min_salt), -1.5, 0]
    temp_tick_labels = [r'$T_f$', '-1.5', '0']

    # Get potential density
    salt_vals = linspace(min_salt, max_salt, num=500)
    temp_vals = linspace(min_temp, max_temp, num=500)
    salt_2d, temp_2d = meshgrid(salt_vals, temp_vals)
    density = unesco(temp_2d, salt_2d, zeros(shape(temp_2d))) - 1000

    fig = figure(figsize=(8, 6))
    gs = GridSpec(1, 1)
    gs.update(left=0.2, right=0.9, bottom=0.2, top=0.9)
    ax = subplot(gs[0, 0])
    ax.spines['top'].set_visible(False)
    ax.spines['right'].set_visible(False)
    ax.xaxis.set_ticks_position('bottom')
    ax.yaxis.set_ticks_position('left')
    # First contour potential density
    contour(salt_vals,
            temp_vals,
            density,
            colors='black',
            linewidth=1,
            linestyles='dotted')
    # Draw all the lines
    plot([min_salt, max_salt],
         [tfreeze(min_salt), tfreeze(max_salt)],
         color='black',
         linewidth=2,
         linestyle='dashed')
    arrow(max_salt * 0.999,
          tfreeze(max_salt * 0.999),
          max_salt * 0.001,
          tfreeze(max_salt) - tfreeze(max_salt * 0.999),
          head_width=0.09,
          head_length=0.025,
          fc='k',
          ec='k',
          length_includes_head=True,
          clip_on=False)
    arrow(min_salt * 1.001,
          tfreeze(min_salt * 1.001),
          -0.001 * min_salt,
          tfreeze(min_salt) - tfreeze(min_salt * 1.001),
          head_width=0.09,
          head_length=0.025,
          fc='k',
          ec='k',
          length_includes_head=True,
          clip_on=False)
    plot([34, 34], [tfreeze(34), max_temp], color='black', linewidth=2)
    arrow(34,
          max_temp * 0.95,
          0,
          max_temp * 0.05,
          head_width=0.025,
          head_length=0.1,
          fc='k',
          ec='k',
          length_includes_head=True,
          clip_on=False)
    plot([34, max_salt], [-1.5, -1.5], color='black', linewidth=2)
    arrow(max_salt * 0.999,
          -1.5,
          max_salt * 0.001,
          0,
          head_width=0.09,
          head_length=0.025,
          fc='k',
          ec='k',
          length_includes_head=True,
          clip_on=False)
    plot([34.5, 34.5], [tfreeze(34.5), -1.5], color='black', linewidth=2)
    plot([34, max_salt], [0, 0], color='black', linewidth=2)
    arrow(max_salt * 0.999,
          0,
          max_salt * 0.001,
          0,
          head_width=0.09,
          head_length=0.025,
          fc='k',
          ec='k',
          length_includes_head=True,
          clip_on=False)
    # Label all the sections
    text(34.25, -2.1, 'ISW', fontsize=30, ha='center', va='center')
    text(33.85, -0.75, 'AASW', fontsize=30, ha='center', va='center')
    text(34.25, -1.7, 'LSSW', fontsize=30, ha='center', va='center')
    text(34.65, -1.7, 'HSSW', fontsize=30, ha='center', va='center')
    text(34.4, -0.75, 'MCDW', fontsize=30, ha='center', va='center')
    text(34.4, 0.25, 'CDW', fontsize=30, ha='center', va='center')
    # Set up axis labels
    ax.set_xticks(salt_ticks)
    ax.set_xticklabels(salt_tick_labels, fontsize=27)
    ax.set_yticks(temp_ticks)
    ax.set_yticklabels(temp_tick_labels, fontsize=27)
    xlabel('Salinity (psu)', fontsize=27)
    ylabel(r'Temperature ($^{\circ}$C)', fontsize=27)
    xlim([min_salt, max_salt])
    ylim([min_temp, max_temp])
    fig.show()
    fig.savefig('watermass_schematic.png')
Пример #5
0
def timeseries_3D (mesh_path, ocn_file, log_file):

    circumpolar = True   # Only consider elements south of 30S
    cross_180 = False    # Don't make second copies of elements that cross 180E
    days_per_output = 5  # Number of days for each output step
    rhoCp = 4.2e6            # Volumetric heat capacity of seawater (J/K/m^3)
    C2K = 273.15         # Celsius to Kelvin conversion

    ohc = []
    avgsalt = []
    tke = []
    # Check if the log file exists
    if exists(log_file):
        print 'Reading previously calculated values'
        f = open(log_file, 'r')
        # Skip the first line (header)
        f.readline()
        for line in f:
            try:
                ohc.append(float(line))
            except(ValueError):
                # Reached the header for the next variable
                break
        for line in f:
            try:
                avgsalt.append(float(line))
            except(ValueError):
                break
        for line in f:
            tke.append(float(line))
        f.close()

    print 'Building grid'
    elements = fesom_grid(mesh_path, circumpolar, cross_180)
    # Also read the depth of each node
    f = open(mesh_path + 'nod3d.out', 'r')
    f.readline()
    depth = []
    for line in f:
        tmp = line.split()
        depth.append(float(tmp[3]))
    f.close()
    # Convert to pressure in bar
    press = abs(array(depth))/10.0

    print 'Reading data'
    id = Dataset(ocn_file, 'r')
    num_time = id.variables['time'].shape[0]
    temp = id.variables['temp'][:,:]
    salt = id.variables['salt'][:,:]
    u = id.variables['u'][:,:]
    v = id.variables['v'][:,:]
    id.close()

    print 'Calculating density'
    rho = unesco(temp, salt, tile(press, (num_time,1)))

    print 'Setting up arrays'
    # First calculate volume of each element
    dV_e3d = []
    # Loop over 2D elements
    for elm in elements:
        # Select the three nodes making up this element
        nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
        # Calculate area of the surface triangle
        area = elm.area()
        # Loop downward through the water column
        while True:
            if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None:
                # We've reached the bottom
                break
            # Calculate volume as area * average depth
            dV_e3d.append(area*(abs(nodes[0].depth - nodes[0].below.depth) + abs(nodes[1].depth - nodes[1].below.depth) + abs(nodes[2].depth - nodes[2].below.depth))/3.0)
            # Update nodes
            for i in range(3):
                nodes[i] = nodes[i].below
    dV_e3d = array(dV_e3d)

    # Set up arrays for timeseries of variables at each 3D element
    temp_e3d = zeros([num_time,size(dV_e3d)])
    salt_e3d = zeros([num_time,size(dV_e3d)])
    rho_e3d = zeros([num_time,size(dV_e3d)])
    u_e3d = zeros([num_time,size(dV_e3d)])
    v_e3d = zeros([num_time,size(dV_e3d)])
    # Loop over 2D elements again
    j = 0
    for elm in elements:
        # Select the three nodes making up this element
        nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
        # Loop downward through the water column
        while True:
            if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None:
                # We've reached the bottom
                break
            # Value of each variable in this triangular prism is the
            # average of the six vertices
            temp_e3d[:,j] = (temp[:,nodes[0].id] + temp[:,nodes[1].id] + temp[:,nodes[2].id] + temp[:,nodes[0].below.id] + temp[:,nodes[1].below.id] + temp[:,nodes[2].below.id])/6.0
            salt_e3d[:,j] = (salt[:,nodes[0].id] + salt[:,nodes[1].id] + salt[:,nodes[2].id] + salt[:,nodes[0].below.id] + salt[:,nodes[1].below.id] + salt[:,nodes[2].below.id])/6.0
            rho_e3d[:,j] = (rho[:,nodes[0].id] + rho[:,nodes[1].id] + rho[:,nodes[2].id] + rho[:,nodes[0].below.id] + rho[:,nodes[1].below.id] + rho[:,nodes[2].below.id])/6.0
            u_e3d[:,j] = (u[:,nodes[0].id] + u[:,nodes[1].id] + u[:,nodes[2].id] + u[:,nodes[0].below.id] + u[:,nodes[1].below.id] + u[:,nodes[2].below.id])/6.0
            v_e3d[:,j] = (v[:,nodes[0].id] + v[:,nodes[1].id] + v[:,nodes[2].id] + v[:,nodes[0].below.id] + v[:,nodes[1].below.id] + v[:,nodes[2].below.id])/6.0
            # Update nodes
            for i in range(3):
                nodes[i] = nodes[i].below
            j += 1
    
    print 'Building timeseries'
    for t in range(num_time):
        # Integrate temp*rhoCp*dV to get OHC
        ohc.append(sum((temp_e3d[t,:]+C2K)*rhoCp*dV_e3d))
        # Average salinity (weighted with rho*dV)
        avgsalt.append(sum(salt_e3d[t,:]*rho_e3d[t,:]*dV_e3d)/sum(rho_e3d[t,:]*dV_e3d))
        # Integrate 0.5*rho*speed^2*dV to get TKE
        tke.append(sum(0.5*rho_e3d[t,:]*(u_e3d[t,:]**2 + v_e3d[t,:]**2)*dV_e3d))

    # Calculate time values
    time = arange(len(ohc))*days_per_output/365.

    print 'Plotting ocean heat content'
    clf()
    plot(time, ohc)
    xlabel('Years')
    ylabel('Southern Ocean Heat Content (J)')
    grid(True)
    savefig('ohc.png')

    print 'Plotting average salinity'
    clf()
    plot(time, avgsalt)
    xlabel('Years')
    ylabel('Southern Ocean Average Salinity (psu)')
    grid(True)
    savefig('avgsalt.png')

    print 'Plotting total kinetic energy'
    clf()
    plot(time, tke)
    xlabel('Years')
    ylabel('Southern Ocean Total Kinetic Energy (J)')
    grid(True)
    savefig('tke.png')

    print 'Saving results to log file'
    f = open(log_file, 'w')
    f.write('Southern Ocean Heat Content (J):\n')
    for elm in ohc:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Average Salinity (psu):\n')
    for elm in avgsalt:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Total Kinetic Energy (J):\n')
    for elm in tke:
        f.write(str(elm) + '\n')
    f.close()
Пример #6
0
def ts_animation(mesh_path, directory, start_year, end_year, fig_dir):

    # Northern boundary of water masses to consider
    nbdry = -50
    # Number of temperature and salinity bins
    num_bins = 1000
    # Plotting parameters
    circumpolar = False
    cross_180 = False
    # Bounds on temperature and salinity bins (pre-computed, change if needed)
    min_salt = 31.8
    max_salt = 35.2
    min_temp = -3
    max_temp = 12
    # Bounds on volume log scale (pre-computed, change if needed)
    min_vol = 18
    max_vol = 33
    # Naming conventions for FESOM oce.mean.nc files
    file_head = 'MK44005.'
    file_tail = '.oce.mean.nc'

    # Calculate boundaries of temperature bins
    temp_bins = linspace(min_temp, max_temp, num=num_bins)
    # Calculate centres of temperature bins (for plotting)
    temp_centres = 0.5 * (temp_bins[:-1] + temp_bins[1:])
    # Repeat for salinity
    salt_bins = linspace(min_salt, max_salt, num=num_bins)
    salt_centres = 0.5 * (salt_bins[:-1] + salt_bins[1:])

    # Calculate surface freezing point as a function of salinity: this is the
    # equation the FESOM sea ice code uses
    freezing_pt = -0.0575 * salt_centres + 1.7105e-3 * sqrt(
        salt_centres**3) - 2.155e-4 * salt_centres**2
    # Get 2D versions of the temperature and salinity bins
    salt_2d, temp_2d = meshgrid(salt_centres, temp_centres)
    # Calculate potential density of each combination of temperature and
    # salinity bins
    density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres))) - 1000
    # Density contours to plot
    density_lev = arange(24.4, 28.4, 0.2)

    # Make FESOM grid elements
    elements = fesom_grid(mesh_path, circumpolar, cross_180)

    # Loop over years
    for year in range(start_year, end_year + 1):
        print 'Processing ' + str(year)
        # Read temperature and salinity at each 3D node, annually averaged
        id = Dataset(directory + file_head + str(year) + file_tail, 'r')
        temp = mean(id.variables['temp'][:, :], axis=0)
        salt = mean(id.variables['salt'][:, :], axis=0)
        id.close()
        # Set up a 2D array of temperature bins x salinity bins to increment
        # with volume of water masses
        ts_vals = zeros([size(temp_centres), size(salt_centres)])
        # Loop over elements
        for elm in elements:
            # See if we're in the region of interest
            if all(elm.lat < nbdry):
                # Get area of 2D triangle
                area = elm.area()
                nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
                # Loop downward
                while True:
                    if nodes[0].below is None or nodes[
                            1].below is None or nodes[2].below is None:
                        # We've reached the bottom
                        break
                    # Calculate average temperature, salinity, and layer
                    # thickness over this 3D triangular prism
                    temp_vals = []
                    salt_vals = []
                    dz = []
                    for i in range(3):
                        # Average temperature over 6 nodes
                        temp_vals.append(temp[nodes[i].id])
                        temp_vals.append(temp[nodes[i].below.id])
                        # Average salinity over 6 nodes
                        salt_vals.append(salt[nodes[i].id])
                        salt_vals.append(salt[nodes[i].below.id])
                        # Average dz over 3 vertical edges
                        dz.append(abs(nodes[i].depth - nodes[i].below.depth))
                        # Get ready for next repetition of loop
                        nodes[i] = nodes[i].below
                    temp_elm = mean(array(temp_vals))
                    salt_elm = mean(array(salt_vals))
                    # Calculate volume of 3D triangular prism
                    volume = area * mean(array(dz))
                    # Figure out which bins this falls into
                    temp_index = nonzero(temp_bins > temp_elm)[0][0] - 1
                    salt_index = nonzero(salt_bins > salt_elm)[0][0] - 1
                    # Increment bins with volume
                    ts_vals[temp_index, salt_index] += volume
        # Mask bins with zero volume
        ts_vals = ma.masked_where(ts_vals == 0, ts_vals)
        # Plot
        fig = figure(figsize=(12, 12))
        # Log scale is more visible
        img = pcolor(salt_centres,
                     temp_centres,
                     log(ts_vals),
                     vmin=min_vol,
                     vmax=max_vol,
                     cmap='jet')
        # Add surface freezing point line
        plot(salt_centres, freezing_pt, color='black', linestyle='dashed')
        # Add density contours
        cs = contour(salt_centres,
                     temp_centres,
                     density,
                     density_lev,
                     colors=(0.6, 0.6, 0.6),
                     linestyles='dotted')
        # Label density contours
        manual_locations = [(32, 11.4), (32.3, 11.4),
                            (32.5, 11.4), (32.8, 11.4), (33.1, 11.4),
                            (33.3, 11.4), (33.5, 11.3), (33.8, 11.3),
                            (34.1, 11.3), (34.3, 11.3), (34.6, 11.3),
                            (34.8, 11.4), (35, 10.8), (35, 9.9), (35, 8.1),
                            (35, 7.5), (35, 6), (35, 4.4), (35, 2.6),
                            (35.1, 0)]
        clabel(cs,
               inline=1,
               fontsize=12,
               color=(0.6, 0.6, 0.6),
               fmt='%1.1f',
               manual=manual_locations)
        xlim([min_salt, max_salt])
        ylim([min_temp, max_temp])
        xlabel('Salinity (psu)', fontsize=16)
        ylabel(r'Temperature ($^{\circ}$C)', fontsize=16)
        title('Water masses south of ' + str(-nbdry) +
              r'$^{\circ}$S, log(volume)',
              fontsize=24)
        colorbar(img)
        # Add year in the bottom corner
        text(35.8, -4, str(year), fontsize=30)

        # Save figure with year in the filename
        fig.savefig(fig_dir + str(year) + '.png')
Пример #7
0
def make_density_file(input_file, output_file):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 40
    N = 31

    # Read grid variables
    in_id = Dataset(input_file, "r")
    h = in_id.variables["h"][:, :]
    zice = in_id.variables["zice"][:, :]
    lon = in_id.variables["lon_rho"][:, :]
    lat = in_id.variables["lat_rho"][:, :]
    num_lon = size(lon, 1)
    num_lat = size(lon, 0)

    # Get a 3D array of z-coordinates (metres)
    z, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N)
    # Pressure is approximately equal to |z|/10
    press = abs(z) / 10.0

    # Set up output file
    out_id = Dataset(output_file, "w")
    # Define dimensions
    out_id.createDimension("xi_rho", num_lon)
    out_id.createDimension("eta_rho", num_lat)
    out_id.createDimension("s_rho", N)
    out_id.createDimension("ocean_time", None)
    # Define variables
    out_id.createVariable("lon_rho", "f8", ("eta_rho", "xi_rho"))
    out_id.variables["lon_rho"][:, :] = lon
    out_id.createVariable("lat_rho", "f8", ("eta_rho", "xi_rho"))
    out_id.variables["lat_rho"][:, :] = lat
    out_id.createVariable("sc_r", "f8", ("s_rho"))
    out_id.variables["sc_r"].long_name = "S-coordinate at rho-points"
    out_id.variables["sc_r"][:] = sc_r
    out_id.createVariable("ocean_time", "f8", ("ocean_time"))
    out_id.variables["ocean_time"].units = "seconds"
    out_id.createVariable("rho", "f8", ("ocean_time", "s_rho", "eta_rho", "xi_rho"))
    out_id.variables["rho"].long_name = "density"
    out_id.variables["rho"].units = "kg/m^3"

    # Read time values from input file
    time = in_id.variables["ocean_time"][:]

    # Process each timestep individually to conserve memory
    for t in range(size(time)):
        print "Processing timestep " + str(t + 1) + " of " + str(size(time))
        # Set a new time value in the output file
        out_id.variables["ocean_time"][t] = time[t]
        # Read temperature and salinity (convert to float128 to prevent
        # overflow in UNESCO calculations)
        temp = ma.asarray(in_id.variables["temp"][t, :, :, :], dtype=float128)
        salt = ma.asarray(in_id.variables["salt"][t, :, :, :], dtype=float128)
        # Magic happens here
        rho = unesco(temp, salt, press)
        # Save the results for this timestep
        out_id.variables["rho"][t, :, :, :] = rho

    in_id.close()
    out_id.close()
def mip_ts_distribution_ecco2():

    # Beginning of ECCO2 filenames
    temp_file_head = '/short/m68/kaa561/metroms_iceshelf/data/originals/ECCO2/THETA.1440x720x50.1992'
    salt_file_head = '/short/m68/kaa561/metroms_iceshelf/data/originals/ECCO2/SALT.1440x720x50.1992'
    # Northern boundary of water masses to consider
    nbdry = -65
    # Number of temperature and salinity bins
    num_bins_temp = 1000
    num_bins_salt = 2000
    # Bounds on temperature and salinity bins (pre-computed, change if needed)
    min_salt = 32.3
    max_salt = 40.1
    min_temp = -3.1
    max_temp = 3.8
    # Bounds to actually plot
    min_salt_plot = 33.25
    max_salt_plot = 35.1
    min_temp_plot = -3
    max_temp_plot = 3.8
    # Radius of the Earth in metres
    r = 6.371e6
    # Degrees to radians conversion factor
    deg2rad = pi / 180.0

    print 'Setting up bins'
    # Calculate boundaries of temperature bins
    temp_bins = linspace(min_temp, max_temp, num=num_bins_temp)
    # Calculate centres of temperature bins (for plotting)
    temp_centres = 0.5 * (temp_bins[:-1] + temp_bins[1:])
    # Repeat for salinity
    salt_bins = linspace(min_salt, max_salt, num=num_bins_salt)
    salt_centres = 0.5 * (salt_bins[:-1] + salt_bins[1:])
    # Set up 2D array of temperature bins x salinity bins to hold average
    # depth of water masses, weighted by volume
    ts_vals = zeros([size(temp_centres), size(salt_centres)])
    # Also array to integrate volume
    volume = zeros([size(temp_centres), size(salt_centres)])
    # Calculate surface freezing point as a function of salinity as seen by
    # CICE
    freezing_pt = salt_centres / (-18.48 + 18.48 / 1e3 * salt_centres)
    # Get 2D versions of the temperature and salinity bins
    salt_2d, temp_2d = meshgrid(salt_centres, temp_centres)
    # Calculate potential density of each combination of temperature and
    # salinity bins
    density = unesco(temp_2d, salt_2d, zeros(shape(temp_2d))) - 1000
    # Density contours to plot
    density_lev = arange(26.6, 28.4, 0.2)

    print 'Reading grid'
    # Read grid from first file
    id = Dataset(temp_file_head + '01.nc', 'r')
    lon = id.variables['LONGITUDE_T'][:]
    lat = id.variables['LATITUDE_T'][:]
    z = id.variables['DEPTH_T'][:]
    id.close()
    num_lon = size(lon)
    num_lat = size(lat)
    num_depth = size(z)
    # Calculate integrands
    # Interpolate to get longitude at the edges of each cell
    lon_edges = zeros(num_lon + 1)
    lon_edges[1:-1] = 0.5 * (lon[:-1] + lon[1:])
    lon_edges[0] = 0.5 * (lon[0] + lon[-1] - 360)
    lon_edges[-1] = 0.5 * (lon[0] + 360 + lon[-1])
    dlon = lon_edges[1:] - lon_edges[:-1]
    # Similarly for latitude; linearly extrapolate for edges (which don't matter)
    lat_edges = zeros(num_lat + 1)
    lat_edges[1:-1] = 0.5 * (lat[:-1] + lat[1:])
    lat_edges[0] = 2 * lat[0] - lat_edges[1]
    lat_edges[-1] = 2 * lat[-1] - lat_edges[-2]
    dlat = lat_edges[1:] - lat_edges[:-1]
    # Make 2D versions
    lon_2d, lat_2d = meshgrid(lon, lat)
    dlon_2d, dlat_2d = meshgrid(dlon, dlat)
    # Convert to Cartesian space
    dx_2d = r * cos(lat_2d * deg2rad) * dlon_2d * deg2rad
    dy_2d = r * dlat_2d * deg2rad
    # We have z at the midpoint of each cell, now find it on the top and
    # bottom edges of each cell
    z_edges = zeros(num_depth + 1)
    z_edges[1:-1] = 0.5 * (z[:-1] + z[1:])
    # At the surface, z=0
    # At bottom, extrapolate
    z_edges[-1] = 2 * z[-1] - z_edges[-2]
    # Now find dz
    dz_1d = z_edges[1:] - z_edges[:-1]
    # Tile each array to be 3D
    dx_3d = tile(dx_2d, (num_depth, 1, 1))
    dy_3d = tile(dy_2d, (num_depth, 1, 1))
    dz_3d = transpose(tile(dz_1d, (num_lon, num_lat, 1)))
    # Get volume integrand
    dV = dx_3d * dy_3d * dz_3d

    print 'Reading data'
    # Annual average over 1992
    temp = ma.empty([num_depth, num_lat, num_lon])
    salt = ma.empty([num_depth, num_lat, num_lon])
    temp[:, :, :] = 0.0
    salt[:, :, :] = 0.0
    for month in range(12):
        if month + 1 < 10:
            month_string = '0' + str(month + 1)
        else:
            month_string = str(month + 1)
        id = Dataset(temp_file_head + month_string + '.nc', 'r')
        temp[:, :, :] += id.variables['THETA'][0, :, :, :]
        id.close()
        id = Dataset(salt_file_head + month_string + '.nc', 'r')
        salt[:, :, :] += id.variables['SALT'][0, :, :, :]
        id.close()
    # Convert from integrals to averages
    temp /= 12.0
    salt /= 12.0

    print 'Binning temperature and salinity'
    # Loop over grid boxes
    # Find the first latitude index north of 65S; stop there
    j_max = nonzero(lat > nbdry)[0][0]
    for k in range(num_depth):
        for j in range(j_max):
            for i in range(num_lon):
                if temp[k, j, i] is ma.masked:
                    # Land
                    continue
                # Figure out which bins this falls into
                temp_index = nonzero(temp_bins > temp[k, j, i])[0][0] - 1
                salt_index = nonzero(salt_bins > salt[k, j, i])[0][0] - 1
                # Integrate depth*dV in this bin
                ts_vals[temp_index, salt_index] += z[k] * dV[k, j, i]
                volume[temp_index, salt_index] += dV[k, j, i]
    # Mask bins with zero volume
    ts_vals = ma.masked_where(volume == 0, ts_vals)
    volume = ma.masked_where(volume == 0, volume)
    # Convert depths from integrals to volume-averages
    ts_vals /= volume

    # Find the maximum depth for plotting
    max_depth = amax(ts_vals)
    # Make a nonlinear scale
    bounds = linspace(0, max_depth**(1.0 / 2.5), num=100)**2.5
    norm = BoundaryNorm(boundaries=bounds, ncolors=256)
    # Set labels for density contours
    manual_locations = [(33.4, 3.0), (33.65, 3.0), (33.9, 3.0), (34.2, 3.0),
                        (34.45, 3.5), (34.65, 3.25), (34.9, 3.0), (35, 1.5)]
Пример #9
0
def rcp_maps(var):

    # File paths
    mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/'
    directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/'
    directories = [
        '/short/y99/kaa561/FESOM/rcp45_M/', '/short/y99/kaa561/FESOM/rcp45_A/',
        '/short/y99/kaa561/FESOM/rcp85_M/', '/short/y99/kaa561/FESOM/rcp85_A/',
        '/short/y99/kaa561/FESOM/highres_spinup/'
    ]
    if var in ['bwtemp', 'bwsalt', 'velsfc']:
        file_beg = 'annual_avg.oce.mean.1996.2005.nc'
        file_end = 'annual_avg.oce.mean.2091.2100.nc'
    elif var in ['sst', 'mld']:
        file_beg = 'seasonal_climatology_oce_1996_2005.nc'
        file_end = 'seasonal_climatology_oce_2091_2100.nc'
    elif var == 'aice':
        file_beg = 'seasonal_climatology_ice_1996_2005.nc'
        file_end = 'seasonal_climatology_ice_2091_2100.nc'
    elif var == 'fwflux':
        file_beg = 'annual_avg.forcing.diag.1996.2005.nc'
        file_end = 'annual_avg.forcing.diag.2091.2100.nc'
    elif var == 'thdgr':
        file_beg = 'annual_avg.ice.diag.1996.2005.nc'
        file_end = 'annual_avg.ice.diag.2091.2100.nc'
    num_expts = len(directories)
    expt_names = [
        'RCP 4.5 M', 'RCP 4.5 A', 'RCP 8.5 M', 'RCP 8.5 A', 'CONTROL'
    ]
    # Northern boundary for plot: 64S
    nbdry = -64 + 90
    # Mesh parameters
    circumpolar = True
    if var in ['aice', 'sst', 'thdgr']:
        mask_cavities = True
    else:
        mask_cavities = False
    # Density anomaly for definition of mixed layer (from Sallee et al 2013)
    density_anom = 0.03
    # Seconds to years conversion factor
    sec_per_year = 365.25 * 24 * 60 * 60
    # Bounds on colour scale, and colourmap for initial plot
    if var == 'bwtemp':
        abs_min = -2
        abs_max = 0.6
        diff_max = 1.8
        abs_cmap = 'jet'
        abs_extend = 'both'
    elif var == 'bwsalt':
        abs_min = 34.3
        abs_max = 34.9
        diff_max = 0.5
        abs_cmap = 'jet'
        abs_extend = 'both'
    elif var == 'velsfc':
        abs_min = 0
        abs_max = 0.2
        diff_max = 0.12
        abs_cmap = 'cool'
        abs_extend = 'max'
    elif var == 'sst':
        abs_min = -2
        abs_max = 5
        diff_max = 4
        abs_cmap = 'jet'
        abs_extend = 'both'
    elif var == 'aice':
        abs_min = 0
        abs_max = 1
        diff_max = 1
        abs_cmap = 'jet'
        abs_extend = 'neither'
    elif var == 'mld':
        abs_min = 0
        abs_max = 600
        diff_max = 400
        abs_cmap = 'jet'
        abs_extend = 'max'
    elif var == 'thdgr':
        abs_min = -15
        abs_max = 15
        diff_max = 3
        abs_cmap = 'PiYG_r'
        abs_extend = 'both'

    print 'Building mesh'
    elements, patches = make_patches(mesh_path, circumpolar, mask_cavities)
    num_patches = len(patches)
    if var == 'mld':
        # Read a couple of extra things for the mesh
        # Number of 2D nodes
        f = open(mesh_path + 'nod2d.out', 'r')
        n2d = int(f.readline())
        f.close()
        # Lists of which nodes are directly below which
        f = open(mesh_path + 'aux3d.out', 'r')
        max_num_layers = int(f.readline())
        node_columns = zeros([n2d, max_num_layers])
        for n in range(n2d):
            for k in range(max_num_layers):
                node_columns[n, k] = int(f.readline())
        node_columns = node_columns.astype(int)
        f.close()
        # Depth of each 3D node
        f = open(mesh_path + 'nod3d.out', 'r')
        f.readline()
        node_depth = []
        for line in f:
            tmp = line.split()
            node_depth.append(-1 * float(tmp[3]))
        f.close()
        node_depth = array(node_depth)

    print 'Processing 1996-2005'
    id = Dataset(directory_beg + file_beg, 'r')
    if var in ['bwtemp', 'sst']:
        # bwtemp is annually averaged, sst is DJF. Either way, index 0.
        var_nodes_beg = id.variables['temp'][0, :]
    elif var == 'bwsalt':
        var_nodes_beg = id.variables['salt'][0, :]
    elif var == 'velsfc':
        u_nodes_beg = id.variables['u'][0, :]
        v_nodes_beg = id.variables['v'][0, :]
        var_nodes_beg = sqrt(u_nodes_beg**2 + v_nodes_beg**2)
    elif var == 'thdgr':
        # Convert from m/s to m/y
        var_nodes_beg = id.variables['thdgr'][0, :] * sec_per_year
    elif var == 'aice':
        # Read DJF
        var_nodes_beg = id.variables['area'][0, :]
    elif var == 'mld':
        # Read JJA temp and salt
        temp_nodes_beg = id.variables['temp'][2, :]
        salt_nodes_beg = id.variables['salt'][2, :]
        density_nodes_beg = unesco(temp_nodes_beg, salt_nodes_beg,
                                   zeros(shape(temp_nodes_beg)))
        var_nodes_beg = zeros(n2d)
        for n in range(n2d):
            node_id = node_columns[n, 0] - 1
            # Save surface density and depth (might be nonzero because cavities)
            density_sfc = density_nodes_beg[node_id]
            depth_sfc = node_depth[node_id]
            # Now loop down
            for k in range(1, max_num_layers):
                if node_columns[n, k] == -999:
                    # Reached the bottom
                    # Save the last depth
                    var_nodes_beg[n] = node_depth[node_id] - depth_sfc
                    break
                node_id = node_columns[n, k] - 1
                if density_nodes_beg[node_id] >= density_sfc + density_anom:
                    # Reached the critical density anomaly
                    # Save this depth
                    var_nodes_beg[n] = node_depth[node_id] - depth_sfc
                    break
    id.close()

    # Now set up arrays for anomalies
    var_nodes_diff = zeros([num_expts, len(var_nodes_beg)])
    for expt in range(num_expts):
        print 'Processing ' + expt_names[expt]
        id = Dataset(directories[expt] + file_end, 'r')
        if var in ['bwtemp', 'sst']:
            var_nodes_diff[
                expt, :] = id.variables['temp'][0, :] - var_nodes_beg
        elif var == 'bwsalt':
            var_nodes_diff[
                expt, :] = id.variables['salt'][0, :] - var_nodes_beg
        elif var == 'velsfc':
            u_nodes_end = id.variables['u'][0, :]
            v_nodes_end = id.variables['v'][0, :]
            var_nodes_diff[expt, :] = sqrt(u_nodes_end**2 +
                                           v_nodes_end**2) - var_nodes_beg
        elif var == 'thdgr':
            var_nodes_diff[expt, :] = id.variables['thdgr'][
                0, :] * sec_per_year - var_nodes_beg
        elif var == 'aice':
            var_nodes_diff[
                expt, :] = id.variables['area'][0, :] - var_nodes_beg
        elif var == 'mld':
            temp_nodes_end = id.variables['temp'][2, :]
            salt_nodes_end = id.variables['salt'][2, :]
            density_nodes_end = unesco(temp_nodes_end, salt_nodes_end,
                                       zeros(shape(temp_nodes_end)))
            var_nodes_end = zeros(n2d)
            for n in range(n2d):
                node_id = node_columns[n, 0] - 1
                density_sfc = density_nodes_end[node_id]
                depth_sfc = node_depth[node_id]
                for k in range(1, max_num_layers):
                    if node_columns[n, k] == -999:
                        var_nodes_end[n] = node_depth[node_id] - depth_sfc
                        break
                    node_id = node_columns[n, k] - 1
                    if density_nodes_end[node_id] >= density_sfc + density_anom:
                        var_nodes_end[n] = node_depth[node_id] - depth_sfc
                        break
            var_nodes_diff[expt, :] = var_nodes_end[:] - var_nodes_beg
        id.close()

    print 'Calculating element-averages'
    var_beg = zeros(num_patches)
    var_diff = zeros([num_expts, num_patches])
    i = 0
    for elm in elements:
        # Skip cavity elements for some variables
        if mask_cavities and elm.cavity:
            continue
        if var in ['bwtemp', 'bwsalt']:
            # Bottom nodes
            var_beg[i] = (var_nodes_beg[elm.nodes[0].find_bottom().id] +
                          var_nodes_beg[elm.nodes[1].find_bottom().id] +
                          var_nodes_beg[elm.nodes[2].find_bottom().id]) / 3.0
            var_diff[:, i] = (
                var_nodes_diff[:, elm.nodes[0].find_bottom().id] +
                var_nodes_diff[:, elm.nodes[1].find_bottom().id] +
                var_nodes_diff[:, elm.nodes[2].find_bottom().id]) / 3.0
        else:
            # Surface or 2D nodes
            var_beg[i] = (var_nodes_beg[elm.nodes[0].id] +
                          var_nodes_beg[elm.nodes[1].id] +
                          var_nodes_beg[elm.nodes[2].id]) / 3.0
            var_diff[:, i] = (var_nodes_diff[:, elm.nodes[0].id] +
                              var_nodes_diff[:, elm.nodes[1].id] +
                              var_nodes_diff[:, elm.nodes[2].id]) / 3.0
        i += 1

    print 'Plotting'
    fig = figure(figsize=(24, 5))
    gs = GridSpec(1, num_expts + 1)
    gs.update(left=0.07, right=0.93, bottom=0.05, top=0.85, wspace=0.02)
    # Beginning
    ax = subplot(gs[0, 0], aspect='equal')
    img = PatchCollection(patches, cmap=abs_cmap)
    img.set_array(var_beg)
    img.set_clim(vmin=abs_min, vmax=abs_max)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry, nbdry])
    ylim([-nbdry, nbdry])
    ax.set_xticks([])
    ax.set_yticks([])
    title('1996-2005', fontsize=20)
    # Add a colourbar on the left
    cbaxes = fig.add_axes([0.02, 0.25, 0.015, 0.4])
    cbar = colorbar(img, cax=cbaxes, extend=abs_extend)
    cbar.ax.tick_params(labelsize=16)
    # Anomalies for each experiment
    for expt in range(num_expts):
        ax = subplot(gs[0, expt + 1], aspect='equal')
        img = PatchCollection(patches, cmap='RdBu_r')
        img.set_array(var_diff[expt, :])
        img.set_clim(vmin=-diff_max, vmax=diff_max)
        img.set_edgecolor('face')
        ax.add_collection(img)
        xlim([-nbdry, nbdry])
        ylim([-nbdry, nbdry])
        ax.set_xticks([])
        ax.set_yticks([])
        title(expt_names[expt], fontsize=20)
        if expt == 0:
            xlabel('2091-2100 anomalies', fontsize=18)
        if expt == num_expts - 1:
            # Add a colourbar on the right
            cbaxes = fig.add_axes([0.95, 0.25, 0.015, 0.4])
            cbar = colorbar(img, cax=cbaxes, extend='both')
            cbar.ax.tick_params(labelsize=16)
    if var == 'bwtemp':
        suptitle(r'Bottom water temperature ($^{\circ}$C)', fontsize=30)
    elif var == 'bwsalt':
        suptitle('Bottom water salinity (psu)', fontsize=30)
    elif var == 'velsfc':
        suptitle('Surface velocity (m/s)', fontsize=30)
    elif var == 'thdgr':
        suptitle('Net sea ice growth rate (m/y)', fontsize=30)
    elif var == 'sst':
        suptitle(r'DJF sea surface temperature ($^{\circ}$C)', fontsize=30)
    elif var == 'aice':
        suptitle('DJF sea ice concentration', fontsize=30)
    elif var == 'mld':
        suptitle('JJA mixed layer depth (m)', fontsize=30)
    fig.show()
    fig.savefig(var + '_maps.png')
Пример #10
0
def process_var (var, output_dir, mesh_path, start_year, end_year, out_file_head):

    file_head = 'MK44005.'
    [xmin, xmax, ymin, ymax] = [40, 180, -80, -40]
    res = 1/8.
    dt = 5  # days
    days_per_year = 365
    num_time = days_per_year/dt
    sec_per_day = 24*60*60
    mps_to_mpy = days_per_year*sec_per_day
    deg2rad = np.pi/180.
    time_units = 'seconds since 1979-01-01 00:00:00'
    calendar = 'noleap'
    density_anom = 0.03

    # Set parameters for each possible variable
    depth = None
    double_var = False
    mask_outside_cavity = False
    factor = 1
    if var == 'bottom_temp':
        file_tail = '.oce.mean.nc'
        var_in = 'temp'
        depth = 'bottom'
        title = 'Bottom temperature'
        units = 'degC'
    elif var == 'sfc_temp':
        file_tail = '.oce.mean.nc'
        var_in = 'temp'
        depth = 'surface'
        title = 'Surface temperature'
        units = 'degC'
    if var == 'bottom_salt':
        file_tail = '.oce.mean.nc'
        var_in = 'salt'
        depth = 'bottom'
        title = 'Bottom salinity'
        units = 'psu'
    elif var == 'sfc_salt':
        file_tail = '.oce.mean.nc'
        var_in = 'salt'
        depth = 'surface'
        title = 'Surface salinity'
        units = 'psu'
    elif var == 'bottom_speed':
        file_tail = '.oce.mean.nc'
        double_var = True
        var_in_1 = 'u'
        var_in_2 = 'v'
        depth = 'bottom'
        title = 'Bottom current speed'
        units = 'm/s'
    elif var == 'sfc_speed':
        file_tail = '.oce.mean.nc'
        double_var = True
        var_in_1 = 'u'
        var_in_2 = 'v'
        depth = 'surface'
        title = 'Surface current speed'
        units = 'm/s'
    elif var == 'ssh':
        file_tail = '.oce.mean.nc'
        var_in = 'ssh'
        title = 'Sea surface height'
        units = 'm'
    elif var == 'ismr':
        file_tail = '.forcing.diag.nc'
        var_in = 'wnet'
        mask_outside_cavity = True
        factor = mps_to_mpy
        title = 'Ice shelf melt rate'
        units = 'm/y'
    elif var == 'seaice_conc':
        file_tail = '.ice.mean.nc'
        var_in = 'area'
        title = 'Sea ice concentration'
        units = 'fraction'
    elif var == 'seaice_thick':
        file_tail = '.ice.mean.nc'
        var_in = 'hice'
        title = 'Sea ice thickness'
        units = 'm'
    elif var == 'seaice_growth':
        file_tail = '.ice.diag.nc'
        var_in = 'thdgr'
        factor = mps_to_mpy
        title = 'Sea ice thermodynamic growth rate'
        units = 'm/y'
    elif var == 'sfc_stress':
        file_tail = '.forcing.diag.nc'
        double_var = True
        var_in_1 = 'stress_x'
        var_in_2 = 'stress_y'
        title = 'Surface stress'
        units = 'N/m^2'
    elif var == 'mixed_layer_depth':
        file_tail = '.oce.mean.nc'
        double_var = True
        var_in_1 = 'temp'
        var_in_2 = 'salt'
        title = 'Mixed layer depth'
        units = 'm'

    print 'Building FESOM mesh'
    nodes, elements = fesom_grid(mesh_path, return_nodes=True)
    # Count the number of 2D nodes
    f = open(mesh_path+'nod2d.out', 'r')
    n2d = int(f.readline())
    f.close()    
    if mask_outside_cavity:
        # Read the cavity flag
        f = open(mesh_path+'cavity_flag_nod2d.out', 'r')
        cavity = []
        for line in f:
            cavity.append(int(line))
        f.close()
        cavity = np.array(cavity)
    
    print 'Building regular grid'
    lon_reg = np.arange(xmin, xmax+res, res)
    # Iterative latitude axis scaled by cos of latitude, as in mitgcm_python
    lat_reg = [ymin]
    while lat_reg[-1] < ymax+res:
        lat_reg.append(lat_reg[-1] + res*np.cos(lat_reg[-1]*deg2rad))
    lat_reg = np.array(lat_reg)
    for i in range(10):
        lat_reg_c = 0.5*(lat_reg[:-1] + lat_reg[1:])
        j = 0
        lat_reg = [ymin]
        while lat_reg[-1] < ymax+res and j < lat_reg_c.size:
            lat_reg.append(lat_reg[-1] + res*np.cos(lat_reg_c[j]*deg2rad))
            j += 1
        lat_reg = np.array(lat_reg)
    ymax = lat_reg[-1]
    num_lon = lon_reg.size
    num_lat = lat_reg.size

    for year in range(start_year, end_year+1):
        print 'Processing ' + str(year)
        out_file = out_file_head+'_'+str(year)+'.nc'

        print '...setting up '+out_file
        id_out = nc.Dataset(out_file, 'w')
        id_out.createDimension('time', None)
        id_out.createVariable('time', 'f8', ('time'))
        id_out.variables['time'].units = time_units
        id_out.variables['time'].calendar = calendar
        id_out.createDimension('lon', num_lon)
        id_out.createVariable('lon', 'f8', ('lon'))
        id_out.variables['lon'].long_name = 'longitude'
        id_out.variables['lon'].units = 'degrees'
        id_out.variables['lon'][:] = lon_reg
        id_out.createDimension('lat', num_lat)
        id_out.createVariable('lat', 'f8', ('lat'))
        id_out.variables['lat'].long_name = 'latitude'
        id_out.variables['lat'].units = 'degrees'
        id_out.variables['lat'][:] = lat_reg
        id_out.createVariable(var, 'f8', ('time', 'lat', 'lon'))
        id_out.variables[var].long_name = title
        id_out.variables[var].units = units

        # Set time axis
        time = [nc.date2num(datetime.datetime(year, 1, 1), time_units, calendar=calendar)]
        for t in range(num_time - 1):
            time.append(time[-1] + dt*sec_per_day)
        id_out.variables['time'][:] = np.array(time)
        
        # Read data
        in_file = output_dir+file_head+str(year)+file_tail
        print '...reading '+in_file
        id_in = nc.Dataset(in_file, 'r')
        if double_var:
            # Two variables to read
            data1 = id_in.variables[var_in_1][:]
            data2 = id_in.variables[var_in_2][:]
        else:
            data = id_in.variables[var_in][:]
        id_in.close()
        
        # Process data as needed
        if double_var and var.endswith('speed') or var.endswith('stress'):
            # Get magnitude of vector
            print '...calculating magnitude'
            data = np.sqrt(data1**2 + data2**2)
        if var == 'mixed_layer_depth':
            print '...calculating density'
            # Calculate density of each node
            density = unesco(data1, data2, np.zeros(data1.shape))
            # Set up array for mixed layer depth
            data = np.zeros([num_time, n2d])
            # Loop over timesteps (I know this is gross)
            print '...calculating mixed layer depth'
            for t in range(num_time):
                # Loop over surface nodes
                for i in range(n2d):
                    node = nodes[i]
                    density_sfc = density[t,i]
                    depth_sfc = node.depth
                    depth_tmp = node.depth
                    # Now travel down through the water column until the density exceeds the surface density by density_anom
                    curr_node = node.below
                    while True:
                        if curr_node is None:
                            # Reached the bottom: mixed layer depth is full depth
                            data[t,i] = depth_tmp-depth_sfc
                            break
                        if density[t, curr_node.id] >= density_sfc + density_anom:
                            # Reached the critical density anomaly
                            data[t,i] = curr_node.depth-depth_sfc
                            break
                        depth_tmp = curr_node.depth
                        curr_node = curr_node.below
        if depth == 'surface':
            print '...selecting surface'
            # Select only the surface nodes
            data = data[:,:n2d]
        elif depth == 'bottom':
            print '...selecting bottom'
            # Select the bottom of each water column
            data_bottom = np.zeros([num_time, n2d])
            for i in range(n2d):
                bottom_id = nodes[i].find_bottom().id
                data_bottom[:,i] = data[:,bottom_id]
            data = data_bottom
        if mask_outside_cavity:
            # Multiply by cavity flag (1 in cavity, 0 outside)
            data *= cavity
        data *= factor

        print '...interpolating to regular grid'
        # Interpolate to regular grid
        data_reg = np.zeros([num_time, num_lat, num_lon])
        valid_mask = np.zeros([num_lat, num_lon])
        # For each element, check if a point on the regular lat-lon grid lies within. If so, do barycentric interpolation to that point.
        for elm in elements:
            # Check if we are within domain of regular grid
            if np.amax(elm.lon) < xmin or np.amin(elm.lon) > xmax or np.amax(elm.lat) < ymin or np.amin(elm.lat) > ymax:
                continue
            # Find largest regular longitude west of element
            tmp = np.nonzero(lon_reg > np.amin(elm.lon))[0]
            if len(tmp) == 0:
                # Element crosses the western boundary
                iW = 0
            else:
                iW = tmp[0] - 1
            # Find smallest regular longitude east of element
            tmp = np.nonzero(lon_reg > np.amax(elm.lon))[0]
            if len(tmp) == 0:
                # Element crosses the eastern boundary
                iE = num_lon
            else:
                iE = tmp[0]
            # Find largest regular latitude south of Element
            tmp = np.nonzero(lat_reg > np.amin(elm.lat))[0]
            if len(tmp) == 0:
                # Element crosses the southern boundary
                jS = 0
            else:
                jS = tmp[0] - 1
            # Find smallest regular latitude value north of Element
            tmp = np.nonzero(lat_reg > np.amax(elm.lat))[0]
            if len(tmp) == 0:
                jN = num_lat
            else:
                jN = tmp[0]
            for i in range(iW+1, iE):
                for j in range(jS+1, jN):
                    # There is a chance that the regular gridpoint at (i,j) lies within this element
                    lon0 = lon_reg[i]
                    lat0 = lat_reg[j]
                    if in_triangle(elm, lon0, lat0):
                        # Get area of entire triangle
                        area = triangle_area(elm.lon, elm.lat)
                        # Get area of each sub-triangle formed by (lon0, lat0)
                        area0 = triangle_area([lon0, elm.lon[1], elm.lon[2]], [lat0, elm.lat[1], elm.lat[2]])
                        area1 = triangle_area([lon0, elm.lon[0], elm.lon[2]], [lat0, elm.lat[0], elm.lat[2]])
                        area2 = triangle_area([lon0, elm.lon[0], elm.lon[1]], [lat0, elm.lat[0], elm.lat[1]])
                        # Find fractional area of each
                        cff = np.array([area0/area, area1/area, area2/area])
                        data_tmp = np.zeros([num_time,3])
                        for n in range(3):
                            data_tmp[:,n] = data[:,elm.nodes[n].id]
                        # Barycentric interpolation to lon0, lat0
                        data_reg[:,j,i] = np.sum(cff[None,:]*data_tmp, axis=1)
                        valid_mask[j,i] = 1
                        # Make sure it doesn't go outside the bounds given by the 3 nodes, at each timestep (truncation errors can cause this)
                        for t in range(num_time):
                            data_reg[t,j,i] = max(data_reg[t,j,i], np.amin(data_tmp[t,:]))
                            data_reg[t,j,i] = min(data_reg[t,j,i], np.amax(data_tmp[t,:]))
        # Mask out anywhere that had nothing to interpolate to
        valid_mask = np.tile(valid_mask, [num_time,1,1])
        data_reg = np.ma.masked_where(valid_mask==0, data_reg)
        # Write to output file
        print '...writing result'
        id_out.variables[var][:] = data_reg
        id_out.close()
Пример #11
0
def mip_ts_distribution_sose(roms_grid, roms_file, fesom_mesh_path,
                             fesom_file):

    # Northern boundary of water masses to consider
    nbdry = -65
    # Number of temperature and salinity bins
    num_bins = 1000
    # Bounds on temperature and salinity bins (pre-computed, change if needed)
    min_salt = 32.3
    max_salt = 35.1
    min_temp = -3.1
    max_temp = 3.8
    # Bounds to actually plot
    min_salt_plot = 33.25
    max_salt_plot = 35.0
    min_temp_plot = -3
    max_temp_plot = 3.8
    # FESOM grid generation parameters
    circumpolar = False
    cross_180 = False
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    # Path to SOSE annual climatology for temp and salt
    sose_file = '../SOSE_annual_climatology.nc'
    # Radius of the Earth in metres
    r = 6.371e6
    # Degrees to radians conversion factor
    deg2rad = pi / 180.0

    print 'Setting up bins'
    # Calculate boundaries of temperature bins
    temp_bins = linspace(min_temp, max_temp, num=num_bins)
    # Calculate centres of temperature bins (for plotting)
    temp_centres = 0.5 * (temp_bins[:-1] + temp_bins[1:])
    # Repeat for salinity
    salt_bins = linspace(min_salt, max_salt, num=num_bins)
    salt_centres = 0.5 * (salt_bins[:-1] + salt_bins[1:])
    # Set up 2D arrays of temperature bins x salinity bins to hold average
    # depth of water masses, weighted by volume
    ts_vals_roms = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_fesom = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_sose = zeros([size(temp_centres), size(salt_centres)])
    # Also arrays to integrate volume
    volume_roms = zeros([size(temp_centres), size(salt_centres)])
    volume_fesom = zeros([size(temp_centres), size(salt_centres)])
    volume_sose = zeros([size(temp_centres), size(salt_centres)])
    # Calculate surface freezing point as a function of salinity as seen by
    # each sea ice model
    freezing_pt_roms = salt_centres / (-18.48 + 18.48 / 1e3 * salt_centres)
    freezing_pt_fesom = -0.0575 * salt_centres + 1.7105e-3 * sqrt(
        salt_centres**3) - 2.155e-4 * salt_centres**2
    # Get 2D versions of the temperature and salinity bins
    salt_2d, temp_2d = meshgrid(salt_centres, temp_centres)
    # Calculate potential density of each combination of temperature and
    # salinity bins
    density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres))) - 1000
    # Density contours to plot
    density_lev = arange(26.6, 28.4, 0.2)

    print 'Processing ROMS'
    # Read ROMS grid variables we need
    id = Dataset(roms_grid, 'r')
    roms_lon = id.variables['lon_rho'][:, :]
    roms_lat = id.variables['lat_rho'][:, :]
    roms_h = id.variables['h'][:, :]
    roms_zice = id.variables['zice'][:, :]
    id.close()
    num_lat = size(roms_lat, 0)
    num_lon = size(roms_lon, 1)
    # Get integrands on 3D grid
    roms_dx, roms_dy, roms_dz, roms_z = cartesian_grid_3d(
        roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
    # Get volume integrand
    roms_dV = roms_dx * roms_dy * roms_dz
    # Read ROMS output
    id = Dataset(roms_file, 'r')
    roms_temp = id.variables['temp'][0, :, :, :]
    roms_salt = id.variables['salt'][0, :, :, :]
    id.close()
    # Loop over 2D grid boxes
    for j in range(num_lat):
        for i in range(num_lon):
            # Check for land mask
            if roms_temp[0, j, i] is ma.masked:
                continue
            # Check if we're in the region of interest
            if roms_lat[j, i] < nbdry:
                # Loop downward
                for k in range(N):
                    # Figure out which bins this falls into
                    temp_index = nonzero(
                        temp_bins > roms_temp[k, j, i])[0][0] - 1
                    salt_index = nonzero(
                        salt_bins > roms_salt[k, j, i])[0][0] - 1
                    # Integrate depth*dV in this bin
                    ts_vals_roms[
                        temp_index,
                        salt_index] += -roms_z[k, j, i] * roms_dV[k, j, i]
                    volume_roms[temp_index, salt_index] += roms_dV[k, j, i]
    # Mask bins with zero volume
    ts_vals_roms = ma.masked_where(volume_roms == 0, ts_vals_roms)
    volume_roms = ma.masked_where(volume_roms == 0, volume_roms)
    # Convert depths from integrals to volume-averages
    ts_vals_roms /= volume_roms

    print 'Processing FESOM'
    # Make FESOM grid elements
    elements = fesom_grid(fesom_mesh_path, circumpolar, cross_180)
    # Read temperature and salinity at each 3D node
    id = Dataset(fesom_file, 'r')
    fesom_temp = id.variables['temp'][0, :]
    fesom_salt = id.variables['salt'][0, :]
    id.close()
    # Loop over elements
    for elm in elements:
        # See if we're in the region of interest
        if all(elm.lat < nbdry):
            # Get area of 2D triangle
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            # Loop downward
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[
                        2].below is None:
                    # We've reached the bottom
                    break
                # Calculate average temperature, salinity, depth, and layer
                # thickness over this 3D triangular prism
                temp_vals = []
                salt_vals = []
                depth_vals = []
                dz = []
                for i in range(3):
                    # Average temperature over 6 nodes
                    temp_vals.append(fesom_temp[nodes[i].id])
                    temp_vals.append(fesom_temp[nodes[i].below.id])
                    # Average salinity over 6 nodes
                    salt_vals.append(fesom_salt[nodes[i].id])
                    salt_vals.append(fesom_salt[nodes[i].below.id])
                    # Average depth over 6 nodes
                    depth_vals.append(nodes[i].depth)
                    depth_vals.append(nodes[i].below.depth)
                    # Average dz over 3 vertical edges
                    dz.append(abs(nodes[i].depth - nodes[i].below.depth))
                    # Get ready for next repetition of loop
                    nodes[i] = nodes[i].below
                temp_elm = mean(array(temp_vals))
                salt_elm = mean(array(salt_vals))
                depth_elm = mean(array(depth_vals))
                # Calculate volume of 3D triangular prism
                volume = area * mean(array(dz))
                # Figure out which bins this falls into
                temp_index = nonzero(temp_bins > temp_elm)[0][0] - 1
                salt_index = nonzero(salt_bins > salt_elm)[0][0] - 1
                # Integrate depth*volume in this bin
                ts_vals_fesom[temp_index, salt_index] += depth_elm * volume
                volume_fesom[temp_index, salt_index] += volume
    # Mask bins with zero volume
    ts_vals_fesom = ma.masked_where(volume_fesom == 0, ts_vals_fesom)
    volume_fesom = ma.masked_where(volume_fesom == 0, volume_fesom)
    # Convert depths from integrals to volume-averages
    ts_vals_fesom /= volume_fesom

    print 'Processing SOSE'
    # Read grid
    id = Dataset(sose_file, 'r')
    sose_lon = id.variables['longitude'][:, :]
    sose_lat = id.variables['latitude'][:, :]
    sose_z = id.variables['depth'][:]
    sose_temp = id.variables['temp'][0, :, :, :]
    sose_salt = id.variables['salt'][0, :, :, :]
    id.close()
    num_lon = size(sose_lon, 1)
    num_lat = size(sose_lat, 0)
    num_depth = size(sose_z)
    # Calculate integrands
    # Interpolate to get longitude at the edges of each cell
    w_bdry = 0.5 * (sose_lon[:, 0] + sose_lon[:, -1] - 360)
    middle_lon = 0.5 * (sose_lon[:, 0:-1] + sose_lon[:, 1:])
    e_bdry = 0.5 * (sose_lon[:, 0] + 360 + sose_lon[:, -1])
    lon_edges = concatenate((w_bdry[:, None], middle_lon, e_bdry[:, None]),
                            axis=1)
    dlon = abs(lon_edges[:, 1:] - lon_edges[:, 0:-1])
    # Similarly for latitude; linearly extrapolate for latitude at edges of
    # N/S boundary cells
    middle_lat = 0.5 * (sose_lat[0:-1, :] + sose_lat[1:, :])
    s_bdry = 2 * sose_lat[0, :] - middle_lat[0, :]
    n_bdry = 2 * sose_lat[-1, :] - middle_lat[-1, :]
    lat_edges = concatenate((s_bdry[None, :], middle_lat, n_bdry[None, :]),
                            axis=0)
    dlat = lat_edges[1:, :] - lat_edges[0:-1, :]
    # Convert to Cartesian space
    sose_dx_2d = r * cos(sose_lat * deg2rad) * dlon * deg2rad
    sose_dy_2d = r * dlat * deg2rad
    # We have z at the midpoint of each cell, now find it on the top and
    # bottom edges of each cell
    z_edges = zeros(num_depth + 1)
    z_edges[1:-1] = 0.5 * (sose_z[0:-1] + sose_z[1:])
    # At surface, z=0
    # At bottom, extrapolate
    z_edges[-1] = 2 * sose_z[-1] - z_edges[-2]
    # Now find dz
    sose_dz_1d = abs(z_edges[1:] - z_edges[0:-1])
    # Tile each array to be 3D
    sose_dx = tile(sose_dx_2d, (num_depth, 1, 1))
    sose_dy = tile(sose_dy_2d, (num_depth, 1, 1))
    sose_dz = transpose(tile(sose_dz_1d, (num_lon, num_lat, 1)))
    # Get volume integrand
    sose_dV = sose_dx * sose_dy * sose_dz
    # Loop over 2D grid boxes
    # Find the first latitude index north of 65S; stop there
    j_max = nonzero(sose_lat[:, 0] > nbdry)[0][0]
    for k in range(num_depth):
        for j in range(j_max):
            for i in range(num_lon):
                # Values exactly zero are masked
                if sose_temp[k, j, i] == 0.0:
                    continue
                # Figure out which bins this falls into
                temp_index = nonzero(temp_bins > sose_temp[k, j, i])[0][0] - 1
                salt_index = nonzero(salt_bins > sose_salt[k, j, i])[0][0] - 1
                # Integrate depth*dV in this bin
                ts_vals_sose[temp_index,
                             salt_index] += -sose_z[k] * sose_dV[k, j, i]
                volume_sose[temp_index, salt_index] += sose_dV[k, j, i]
    # Mask bins with zero volume
    ts_vals_sose = ma.masked_where(volume_sose == 0, ts_vals_sose)
    volume_sose = ma.masked_where(volume_sose == 0, volume_sose)
    # Convert depths from integrals to volume-averages
    ts_vals_sose /= volume_sose

    # Find the maximum depth for plotting
    max_depth = amax(
        array([amax(ts_vals_roms),
               amax(ts_vals_fesom),
               amax(ts_vals_sose)]))
    # Make a nonlinear scale
    bounds = linspace(0, max_depth**(1.0 / 2.5), num=100)**2.5
    norm = BoundaryNorm(boundaries=bounds, ncolors=256)
    # Set labels for density contours
    manual_locations = [(33.4, 3.0), (33.65, 3.0), (33.9, 3.0), (34.2, 3.0),
                        (34.45, 3.5), (34.65, 3.25), (34.9, 3.0), (34.8, 0)]

    print "Plotting"
    fig = figure(figsize=(24, 10))
    # ROMS
    ax = fig.add_subplot(1, 3, 1)
    pcolor(salt_centres,
           temp_centres,
           ts_vals_roms,
           norm=norm,
           vmin=0,
           vmax=max_depth,
           cmap='jet')
    # Add surface freezing point line
    plot(salt_centres, freezing_pt_roms, color='black', linestyle='dashed')
    # Add density contours
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs,
           inline=1,
           fontsize=14,
           color=(0.6, 0.6, 0.6),
           fmt='%1.1f',
           manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=22)
    ylabel(r'Temperature ($^{\circ}$C)', fontsize=22)
    title('MetROMS, 2002-2016', fontsize=26)
    # FESOM
    ax = fig.add_subplot(1, 3, 2)
    pcolor(salt_centres,
           temp_centres,
           ts_vals_fesom,
           norm=norm,
           vmin=0,
           vmax=max_depth,
           cmap='jet')
    plot(salt_centres, freezing_pt_fesom, color='black', linestyle='dashed')
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs,
           inline=1,
           fontsize=14,
           color=(0.6, 0.6, 0.6),
           fmt='%1.1f',
           manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=22)
    title('FESOM (high-res), 2002-2016', fontsize=26)
    # SOSE
    ax = fig.add_subplot(1, 3, 3)
    img = pcolor(salt_centres,
                 temp_centres,
                 ts_vals_sose,
                 norm=norm,
                 vmin=0,
                 vmax=max_depth,
                 cmap='jet')
    # No surface freezing point line, because no ice shelves!
    # Add density contours
    cs = contour(salt_centres,
                 temp_centres,
                 density,
                 density_lev,
                 colors=(0.6, 0.6, 0.6),
                 linestyles='dotted')
    clabel(cs,
           inline=1,
           fontsize=14,
           color=(0.6, 0.6, 0.6),
           fmt='%1.1f',
           manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=22)
    title('SOSE, 2005-2010', fontsize=26)
    # Add a colourbar on the right
    cbaxes = fig.add_axes([0.93, 0.2, 0.02, 0.6])
    cbar = colorbar(img,
                    cax=cbaxes,
                    ticks=[0, 50, 100, 200, 500, 1000, 2000, 4000])
    cbar.ax.tick_params(labelsize=18)
    # Add the main title
    suptitle('Water masses south of 65$^{\circ}$S: depth (m)', fontsize=30)
    subplots_adjust(wspace=0.1)
    fig.show()
Пример #12
0
def timeseries_3D(mesh_path, ocn_file, log_file):

    circumpolar = True  # Only consider elements south of 30S
    cross_180 = False  # Don't make second copies of elements that cross 180E
    days_per_output = 5  # Number of days for each output step
    rhoCp = 4.2e6  # Volumetric heat capacity of seawater (J/K/m^3)
    C2K = 273.15  # Celsius to Kelvin conversion

    ohc = []
    avgsalt = []
    tke = []
    # Check if the log file exists
    if exists(log_file):
        print 'Reading previously calculated values'
        f = open(log_file, 'r')
        # Skip the first line (header)
        f.readline()
        for line in f:
            try:
                ohc.append(float(line))
            except (ValueError):
                # Reached the header for the next variable
                break
        for line in f:
            try:
                avgsalt.append(float(line))
            except (ValueError):
                break
        for line in f:
            tke.append(float(line))
        f.close()

    print 'Building grid'
    elements = fesom_grid(mesh_path, circumpolar, cross_180)
    # Also read the depth of each node
    f = open(mesh_path + 'nod3d.out', 'r')
    f.readline()
    depth = []
    for line in f:
        tmp = line.split()
        depth.append(float(tmp[3]))
    f.close()
    # Convert to pressure in bar
    press = abs(array(depth)) / 10.0

    print 'Reading data'
    id = Dataset(ocn_file, 'r')
    num_time = id.variables['time'].shape[0]
    temp = id.variables['temp'][:, :]
    salt = id.variables['salt'][:, :]
    u = id.variables['u'][:, :]
    v = id.variables['v'][:, :]
    id.close()

    print 'Calculating density'
    rho = unesco(temp, salt, tile(press, (num_time, 1)))

    print 'Setting up arrays'
    # First calculate volume of each element
    dV_e3d = []
    # Loop over 2D elements
    for elm in elements:
        # Select the three nodes making up this element
        nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
        # Calculate area of the surface triangle
        area = elm.area()
        # Loop downward through the water column
        while True:
            if nodes[0].below is None or nodes[1].below is None or nodes[
                    2].below is None:
                # We've reached the bottom
                break
            # Calculate volume as area * average depth
            dV_e3d.append(area * (abs(nodes[0].depth - nodes[0].below.depth) +
                                  abs(nodes[1].depth - nodes[1].below.depth) +
                                  abs(nodes[2].depth - nodes[2].below.depth)) /
                          3.0)
            # Update nodes
            for i in range(3):
                nodes[i] = nodes[i].below
    dV_e3d = array(dV_e3d)

    # Set up arrays for timeseries of variables at each 3D element
    temp_e3d = zeros([num_time, size(dV_e3d)])
    salt_e3d = zeros([num_time, size(dV_e3d)])
    rho_e3d = zeros([num_time, size(dV_e3d)])
    u_e3d = zeros([num_time, size(dV_e3d)])
    v_e3d = zeros([num_time, size(dV_e3d)])
    # Loop over 2D elements again
    j = 0
    for elm in elements:
        # Select the three nodes making up this element
        nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
        # Loop downward through the water column
        while True:
            if nodes[0].below is None or nodes[1].below is None or nodes[
                    2].below is None:
                # We've reached the bottom
                break
            # Value of each variable in this triangular prism is the
            # average of the six vertices
            temp_e3d[:,
                     j] = (temp[:, nodes[0].id] + temp[:, nodes[1].id] +
                           temp[:, nodes[2].id] + temp[:, nodes[0].below.id] +
                           temp[:, nodes[1].below.id] +
                           temp[:, nodes[2].below.id]) / 6.0
            salt_e3d[:,
                     j] = (salt[:, nodes[0].id] + salt[:, nodes[1].id] +
                           salt[:, nodes[2].id] + salt[:, nodes[0].below.id] +
                           salt[:, nodes[1].below.id] +
                           salt[:, nodes[2].below.id]) / 6.0
            rho_e3d[:, j] = (rho[:, nodes[0].id] + rho[:, nodes[1].id] +
                             rho[:, nodes[2].id] + rho[:, nodes[0].below.id] +
                             rho[:, nodes[1].below.id] +
                             rho[:, nodes[2].below.id]) / 6.0
            u_e3d[:, j] = (u[:, nodes[0].id] + u[:, nodes[1].id] +
                           u[:, nodes[2].id] + u[:, nodes[0].below.id] +
                           u[:, nodes[1].below.id] +
                           u[:, nodes[2].below.id]) / 6.0
            v_e3d[:, j] = (v[:, nodes[0].id] + v[:, nodes[1].id] +
                           v[:, nodes[2].id] + v[:, nodes[0].below.id] +
                           v[:, nodes[1].below.id] +
                           v[:, nodes[2].below.id]) / 6.0
            # Update nodes
            for i in range(3):
                nodes[i] = nodes[i].below
            j += 1

    print 'Building timeseries'
    for t in range(num_time):
        # Integrate temp*rhoCp*dV to get OHC
        ohc.append(sum((temp_e3d[t, :] + C2K) * rhoCp * dV_e3d))
        # Average salinity (weighted with rho*dV)
        avgsalt.append(
            sum(salt_e3d[t, :] * rho_e3d[t, :] * dV_e3d) /
            sum(rho_e3d[t, :] * dV_e3d))
        # Integrate 0.5*rho*speed^2*dV to get TKE
        tke.append(
            sum(0.5 * rho_e3d[t, :] * (u_e3d[t, :]**2 + v_e3d[t, :]**2) *
                dV_e3d))

    # Calculate time values
    time = arange(len(ohc)) * days_per_output / 365.

    print 'Plotting ocean heat content'
    clf()
    plot(time, ohc)
    xlabel('Years')
    ylabel('Southern Ocean Heat Content (J)')
    grid(True)
    savefig('ohc.png')

    print 'Plotting average salinity'
    clf()
    plot(time, avgsalt)
    xlabel('Years')
    ylabel('Southern Ocean Average Salinity (psu)')
    grid(True)
    savefig('avgsalt.png')

    print 'Plotting total kinetic energy'
    clf()
    plot(time, tke)
    xlabel('Years')
    ylabel('Southern Ocean Total Kinetic Energy (J)')
    grid(True)
    savefig('tke.png')

    print 'Saving results to log file'
    f = open(log_file, 'w')
    f.write('Southern Ocean Heat Content (J):\n')
    for elm in ohc:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Average Salinity (psu):\n')
    for elm in avgsalt:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Total Kinetic Energy (J):\n')
    for elm in tke:
        f.write(str(elm) + '\n')
    f.close()
Пример #13
0
def amundsen_slices_before_after(rcp, model, save=False, fig_name=None):

    # File paths
    mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/'
    file_beg = '/short/y99/kaa561/FESOM/highres_spinup/sept.oce.1996.2005.nc'
    file_end = '/short/y99/kaa561/FESOM/rcp' + rcp + '_' + model + '/sept.oce.2091.2100.nc'
    # Spatial bounds and labels
    lon0 = -104
    lon_string = str(int(round(-lon0))) + r'$^{\circ}$W'
    lat_min = -75.1
    lat_max = -70.8
    lat_ticks = arange(-75, -71 + 1, 1)
    lat_labels = []
    for lat in lat_ticks:
        lat_labels.append(str(int(round(-lat))) + r'$^{\circ}$S')
    depth_min = -1200
    depth_max = 0
    depth_ticks = arange(-1000, 0 + 250, 250)
    depth_labels = []
    for depth in depth_ticks:
        depth_labels.append(str(int(round(-depth))))
    if model == 'M':
        model_title = 'MMM'
    elif model == 'A':
        model_title = 'ACCESS'
    # Bounds on colourbars
    temp_min = -1.8
    temp_max = 1.1
    temp_ticks = arange(-1.5, 1 + 0.5, 0.5)
    salt_min = 33.6
    salt_max = 34.6
    salt_ticks = arange(33.6, 34.6 + 0.2, 0.2)
    # Density contours to plot
    density_contours = [27.45, 27.55]
    # Parameters for regular grid interpolation (needed for density contours)
    num_lat = 500
    num_depth = 250

    print 'Building FESOM mesh'
    elm2D = fesom_grid(mesh_path)
    print 'Reading temperature and salinity data'
    id = Dataset(file_beg, 'r')
    temp_nodes_beg = id.variables['temp'][0, :]
    salt_nodes_beg = id.variables['salt'][0, :]
    id.close()
    id = Dataset(file_end, 'r')
    temp_nodes_end = id.variables['temp'][0, :]
    salt_nodes_end = id.variables['salt'][0, :]

    print 'Calculating density'
    density_nodes_beg = unesco(temp_nodes_beg, salt_nodes_beg,
                               zeros(shape(temp_nodes_beg))) - 1000
    density_nodes_end = unesco(temp_nodes_end, salt_nodes_end,
                               zeros(shape(temp_nodes_end))) - 1000

    print 'Interpolating to ' + str(lon0)
    # Build arrays of SideElements making up zonal slices
    # Start with beginning
    selements_temp_beg = fesom_sidegrid(elm2D, temp_nodes_beg, lon0, lat_max)
    selements_salt_beg = fesom_sidegrid(elm2D, salt_nodes_beg, lon0, lat_max)
    # Build array of quadrilateral patches for the plots, and data values
    # corresponding to each SideElement
    patches = []
    temp_beg = []
    for selm in selements_temp_beg:
        # Make patch
        coord = transpose(vstack((selm.y, selm.z)))
        patches.append(Polygon(coord, True, linewidth=0.))
        # Save data value
        temp_beg.append(selm.var)
    temp_beg = array(temp_beg)
    print 'Temp bounds, beginning: ' + str(amin(temp_beg)) + ' ' + str(
        amax(temp_beg))
    # Salinity has same patches but different values
    salt_beg = []
    for selm in selements_salt_beg:
        salt_beg.append(selm.var)
    salt_beg = array(salt_beg)
    print 'Salt bounds, beginning: ' + str(amin(salt_beg)) + ' ' + str(
        amax(salt_beg))
    # Repeat for end
    selements_temp_end = fesom_sidegrid(elm2D, temp_nodes_end, lon0, lat_max)
    selements_salt_end = fesom_sidegrid(elm2D, salt_nodes_end, lon0, lat_max)
    temp_end = []
    for selm in selements_temp_end:
        temp_end.append(selm.var)
    temp_end = array(temp_end)
    print 'Temp bounds, end: ' + str(amin(temp_end)) + ' ' + str(
        amax(temp_end))
    salt_end = []
    for selm in selements_salt_end:
        salt_end.append(selm.var)
    salt_end = array(salt_end)
    print 'Salt bounds, end: ' + str(amin(salt_end)) + ' ' + str(
        amax(salt_end))

    print 'Interpolating density to regular grid'
    lat_reg = linspace(lat_min, lat_max, num_lat)
    depth_reg = linspace(-depth_max, -depth_min, num_depth)
    density_reg_beg = zeros([num_depth, num_lat])
    density_reg_end = zeros([num_depth, num_lat])
    density_reg_beg[:, :] = NaN
    density_reg_end[:, :] = NaN
    # For each element, check if a point on the regular grid lies
    # within. If so, do barycentric interpolation to that point, at each
    # depth on the regular grid.
    for elm in elm2D:
        # Check if this element crosses lon0
        if amin(elm.lon) < lon0 and amax(elm.lon) > lon0:
            # Check if we are within the latitude bounds
            if amax(elm.lat) > lat_min and amin(elm.lat) < lat_max:
                # Find largest regular latitude value south of Element
                tmp = nonzero(lat_reg > amin(elm.lat))[0]
                if len(tmp) == 0:
                    # Element crosses the southern boundary
                    jS = 0
                else:
                    jS = tmp[0] - 1
                # Find smallest regular latitude north of Element
                tmp = nonzero(lat_reg > amax(elm.lat))[0]
                if len(tmp) == 0:
                    # Element crosses the northern boundary
                    jN = num_lat
                else:
                    jN = tmp[0]
                for j in range(jS + 1, jN):
                    # There is a chance that the regular gridpoint at j
                    # lies within this element
                    lat0 = lat_reg[j]
                    if in_triangle(elm, lon0, lat0):
                        # Yes it does
                        # Get area of entire triangle
                        area = triangle_area(elm.lon, elm.lat)
                        # Get area of each sub-triangle formed by (lon0, lat0)
                        area0 = triangle_area([lon0, elm.lon[1], elm.lon[2]],
                                              [lat0, elm.lat[1], elm.lat[2]])
                        area1 = triangle_area([lon0, elm.lon[0], elm.lon[2]],
                                              [lat0, elm.lat[0], elm.lat[2]])
                        area2 = triangle_area([lon0, elm.lon[0], elm.lon[1]],
                                              [lat0, elm.lat[0], elm.lat[1]])
                        # Find fractional area of each
                        cff = [area0 / area, area1 / area, area2 / area]
                        # Interpolate each depth value
                        for k in range(num_depth):
                            # Linear interpolation in the vertical for the
                            # value at each corner of the triangle
                            node_vals_beg = []
                            node_vals_end = []
                            for n in range(3):
                                id1, id2, coeff1, coeff2 = elm.nodes[
                                    n].find_depth(depth_reg[k])
                                if any(isnan(array([id1, id2, coeff1,
                                                    coeff2]))):
                                    # No ocean data here (seafloor or ice shelf)
                                    node_vals_beg.append(NaN)
                                    node_vals_end.append(NaN)
                                else:
                                    node_vals_beg.append(
                                        coeff1 * density_nodes_beg[id1] +
                                        coeff2 * density_nodes_beg[id2])
                                    node_vals_end.append(
                                        coeff1 * density_nodes_end[id1] +
                                        coeff2 * density_nodes_end[id2])
                            if any(isnan(node_vals_beg)):
                                pass
                            else:
                                # Barycentric interpolation for the value at
                                # lon0, lat0
                                density_reg_beg[k, j] = sum(
                                    array(cff) * array(node_vals_beg))
                                density_reg_end[k, j] = sum(
                                    array(cff) * array(node_vals_end))
    density_reg_beg = ma.masked_where(isnan(density_reg_beg), density_reg_beg)
    density_reg_end = ma.masked_where(isnan(density_reg_end), density_reg_end)
    depth_reg = -1 * depth_reg

    print 'Plotting'
    fig = figure(figsize=(16, 10))
    # Temperature
    gs_temp = GridSpec(1, 2)
    gs_temp.update(left=0.08,
                   right=0.9,
                   bottom=0.5,
                   top=0.88,
                   wspace=0.05,
                   hspace=0.5)
    # Beginning
    ax = subplot(gs_temp[0, 0])
    img = PatchCollection(patches, cmap='jet')
    img.set_array(temp_beg)
    img.set_edgecolor('face')
    img.set_clim(vmin=temp_min, vmax=temp_max)
    ax.add_collection(img)
    # Overlay density contours on regular grid
    contour(lat_reg,
            depth_reg,
            density_reg_beg,
            levels=density_contours,
            colors='black')
    xlim([lat_min, lat_max])
    ylim([depth_min, depth_max])
    title(r'Temperature ($^{\circ}$C), 1996-2005', fontsize=24)
    ax.set_xticks(lat_ticks)
    ax.set_xticklabels([])
    ax.set_yticks(depth_ticks)
    ax.set_yticklabels(depth_labels, fontsize=16)
    ylabel('Depth (m)', fontsize=18)
    # End
    ax = subplot(gs_temp[0, 1])
    img = PatchCollection(patches, cmap='jet')
    img.set_array(temp_end)
    img.set_edgecolor('face')
    img.set_clim(vmin=temp_min, vmax=temp_max)
    ax.add_collection(img)
    contour(lat_reg,
            depth_reg,
            density_reg_end,
            levels=density_contours,
            colors='black')
    xlim([lat_min, lat_max])
    ylim([depth_min, depth_max])
    title(r'Temperature ($^{\circ}$C), 2091-2100', fontsize=24)
    ax.set_xticks(lat_ticks)
    ax.set_xticklabels([])
    ax.set_yticks(depth_ticks)
    ax.set_yticklabels([])
    # Add a colorbar on the right
    cbaxes = fig.add_axes([0.92, 0.55, 0.02, 0.28])
    cbar = colorbar(img, cax=cbaxes, extend='both', ticks=temp_ticks)
    cbar.ax.tick_params(labelsize=16)
    # Salinity
    gs_salt = GridSpec(1, 2)
    gs_salt.update(left=0.08,
                   right=0.9,
                   bottom=0.07,
                   top=0.45,
                   wspace=0.05,
                   hspace=0.5)
    # Beginning
    ax = subplot(gs_salt[0, 0])
    img = PatchCollection(patches, cmap='jet')
    img.set_array(salt_beg)
    img.set_edgecolor('face')
    img.set_clim(vmin=salt_min, vmax=salt_max)
    ax.add_collection(img)
    contour(lat_reg,
            depth_reg,
            density_reg_beg,
            levels=density_contours,
            colors='black')
    xlim([lat_min, lat_max])
    ylim([depth_min, depth_max])
    title('Salinity (psu), 1996-2005', fontsize=24)
    ax.set_xticks(lat_ticks)
    ax.set_xticklabels(lat_labels, fontsize=16)
    xlabel('Latitude', fontsize=18)
    ax.set_yticks(depth_ticks)
    ax.set_yticklabels(depth_labels, fontsize=16)
    ylabel('Depth (m)', fontsize=18)
    # End
    ax = subplot(gs_salt[0, 1])
    img = PatchCollection(patches, cmap='jet')
    img.set_array(salt_end)
    img.set_edgecolor('face')
    img.set_clim(vmin=salt_min, vmax=salt_max)
    ax.add_collection(img)
    contour(lat_reg,
            depth_reg,
            density_reg_end,
            levels=density_contours,
            colors='black')
    xlim([lat_min, lat_max])
    ylim([depth_min, depth_max])
    title('Salinity (psu), 2091-2100', fontsize=24)
    ax.set_xticks(lat_ticks)
    ax.set_xticklabels(lat_labels, fontsize=16)
    xlabel('Latitude', fontsize=18)
    ax.set_yticks(lat_ticks)
    ax.set_yticklabels([])
    # Add a colorbar on the right
    cbaxes = fig.add_axes([0.92, 0.12, 0.02, 0.28])
    cbar = colorbar(img, cax=cbaxes, extend='both', ticks=salt_ticks)
    cbar.ax.tick_params(labelsize=16)
    # Main title
    suptitle('RCP ' + rcp[0] + '.' + rcp[1] + ' ' + model_title + ', ' +
             lon_string + ' (Amundsen Sea), September',
             fontsize=28)

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()
Пример #14
0
def mip_mld(roms_grid, roms_seasonal_file, fesom_mesh_path_lr,
            fesom_seasonal_file_lr, fesom_mesh_path_hr,
            fesom_seasonal_file_hr):

    # Path to Sallee's observations
    obs_file = '/short/m68/kaa561/Climatology_MLD003_v2017.nc'
    # Days per month
    days_per_month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
    # Definition of mixed layer depth: where potential density exceeds
    # surface density by this amount (kg/m^3) as in Sallee et al 2013
    density_anom = 0.03
    # Northern boundary for ACC plot: 30S
    nbdry1 = -30 + 90
    # Northern boundary for continental shelf plot: 64S
    nbdry2 = -64 + 90
    # Degrees to radians conversion factor
    deg2rad = pi / 180.0
    # FESOM parameters
    circumpolar = True
    mask_cavities = False
    # ROMS parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    # Season names
    season_names = ['DJF', 'MAM', 'JJA', 'SON']
    # Maximum for colour scale in each season
    max_bound_summer = 150
    max_bound_winter = 600
    # Longitude labels for first panel
    lon_ticks = array([-120, -60, 60, 120])
    lat_ticks = array([-28, -25, -25, -28])
    lon_labels = [
        r'120$^{\circ}$W', r'60$^{\circ}$W', r'60$^{\circ}$E',
        r'120$^{\circ}$E'
    ]
    lon_rot = [-60, 60, -60, 60]

    print 'Processing MetROMS:'
    print 'Reading grid'
    id = Dataset(roms_grid, 'r')
    roms_h = id.variables['h'][:, :]
    roms_zice = id.variables['zice'][:, :]
    roms_lon = id.variables['lon_rho'][:, :]
    roms_lat = id.variables['lat_rho'][:, :]
    id.close()
    # Polar coordinates for plotting
    roms_x = -(roms_lat + 90) * cos(roms_lon * deg2rad + pi / 2)
    roms_y = (roms_lat + 90) * sin(roms_lon * deg2rad + pi / 2)
    # Longitude labels
    x_ticks = -(lat_ticks + 90) * cos(lon_ticks * deg2rad + pi / 2)
    y_ticks = (lat_ticks + 90) * sin(lon_ticks * deg2rad + pi / 2)
    # Get a 3D array of z-coordinates; sc_r and Cs_r are unused in this script
    roms_z, sc_r, Cs_r = calc_z(roms_h, roms_zice, theta_s, theta_b, hc, N)
    # Make depth positive
    roms_z = -1 * roms_z
    print 'Reading data'
    id = Dataset(roms_seasonal_file, 'r')
    roms_temp = id.variables['temp'][:, :, :, :]
    roms_salt = id.variables['salt'][:, :, :, :]
    id.close()
    print 'Calculating density'
    roms_density = unesco(roms_temp, roms_salt, zeros(shape(roms_temp)))
    print 'Calculating mixed layer depth'
    roms_mld = ma.empty([4, size(roms_lon, 0), size(roms_lon, 1)])
    # Awful triple loop here, can't find a cleaner way
    for season in range(4):
        print '...' + season_names[season]
        for j in range(size(roms_lon, 0)):
            for i in range(size(roms_lon, 1)):
                # Get surface density
                density_sfc = roms_density[season, -1, j, i]
                # Get surface depth (only nonzero in ice shelf cavities)
                depth_sfc = roms_z[-1, j, i]
                if density_sfc is ma.masked:
                    # Land
                    roms_mld[season, j, i] = ma.masked
                else:
                    # Loop downward
                    k = size(roms_density, 1) - 2
                    while True:
                        if k < 0:
                            # Reached the bottom
                            roms_mld[season, j,
                                     i] = roms_z[0, j, i] - depth_sfc
                            break
                        if roms_density[season, k, j,
                                        i] >= density_sfc + density_anom:
                            # Reached the critical density anomaly
                            roms_mld[season, j,
                                     i] = roms_z[k, j, i] - depth_sfc
                            break
                        k -= 1

    print 'Processing low-res FESOM:'
    print 'Building mesh'
    elements_lr, patches_lr = make_patches(fesom_mesh_path_lr, circumpolar,
                                           mask_cavities)
    print 'Reading data'
    id = Dataset(fesom_seasonal_file_lr, 'r')
    fesom_temp_nodes_lr = id.variables['temp'][:, :]
    fesom_salt_nodes_lr = id.variables['salt'][:, :]
    id.close()
    print 'Calculating density'
    fesom_density_nodes_lr = unesco(fesom_temp_nodes_lr, fesom_salt_nodes_lr,
                                    zeros(shape(fesom_temp_nodes_lr)))
    print 'Calculating mixed layer depth'
    # Set up array for mixed layer depth at each element, at each season
    fesom_mld_lr = zeros([4, len(elements_lr)])
    # Loop over seasons and elements to fill these in
    for season in range(4):
        print '...' + season_names[season]
        mld_season = []
        for elm in elements_lr:
            # Get mixed layer depth at each node
            mld_nodes = []
            for i in range(3):
                node = elm.nodes[i]
                density_sfc = fesom_density_nodes_lr[season, node.id]
                # Save surface depth (only nonzero in ice shelf cavities)
                depth_sfc = node.depth
                temp_depth = node.depth
                curr_node = node.below
                while True:
                    if curr_node is None:
                        # Reached the bottom
                        mld_nodes.append(temp_depth - depth_sfc)
                        break
                    if fesom_density_nodes_lr[
                            season,
                            curr_node.id] >= density_sfc + density_anom:
                        # Reached the critical density anomaly
                        mld_nodes.append(curr_node.depth - depth_sfc)
                        break
                    temp_depth = curr_node.depth
                    curr_node = curr_node.below
            # For this element, save the mean mixed layer depth
            mld_season.append(mean(array(mld_nodes)))
        fesom_mld_lr[season, :] = array(mld_season)

    print 'Processing high-res FESOM:'
    print 'Building mesh'
    elements_hr, patches_hr = make_patches(fesom_mesh_path_hr, circumpolar,
                                           mask_cavities)
    print 'Reading data'
    id = Dataset(fesom_seasonal_file_hr, 'r')
    fesom_temp_nodes_hr = id.variables['temp'][:, :]
    fesom_salt_nodes_hr = id.variables['salt'][:, :]
    id.close()
    print 'Calculating density'
    fesom_density_nodes_hr = unesco(fesom_temp_nodes_hr, fesom_salt_nodes_hr,
                                    zeros(shape(fesom_temp_nodes_hr)))
    print 'Calculating mixed layer depth'
    # Set up array for mixed layer depth at each element, at each season
    fesom_mld_hr = zeros([4, len(elements_hr)])
    # Loop over seasons and elements to fill these in
    for season in range(4):
        print '...' + season_names[season]
        mld_season = []
        for elm in elements_hr:
            # Get mixed layer depth at each node
            mld_nodes = []
            for i in range(3):
                node = elm.nodes[i]
                density_sfc = fesom_density_nodes_hr[season, node.id]
                # Save surface depth (only nonzero in ice shelf cavities)
                depth_sfc = node.depth
                temp_depth = node.depth
                curr_node = node.below
                while True:
                    if curr_node is None:
                        # Reached the bottom
                        mld_nodes.append(temp_depth - depth_sfc)
                        break
                    if fesom_density_nodes_hr[
                            season,
                            curr_node.id] >= density_sfc + density_anom:
                        # Reached the critical density anomaly
                        mld_nodes.append(curr_node.depth - depth_sfc)
                        break
                    temp_depth = curr_node.depth
                    curr_node = curr_node.below
            # For this element, save the mean mixed layer depth
            mld_season.append(mean(array(mld_nodes)))
        fesom_mld_hr[season, :] = array(mld_season)

    print 'Processing obs'
    # Read grid and monthly climatology
    id = Dataset(obs_file, 'r')
    obs_lon = id.variables['lon'][:]
    obs_lat = id.variables['lat'][:]
    obs_mld_monthly = id.variables['ML_Press'][:, :, :]
    id.close()
    # Polar coordinates for plotting
    obs_lon_2d, obs_lat_2d = meshgrid(obs_lon, obs_lat)
    obs_x = -(obs_lat_2d + 90) * cos(obs_lon_2d * deg2rad + pi / 2)
    obs_y = (obs_lat_2d + 90) * sin(obs_lon_2d * deg2rad + pi / 2)
    # Integrate seasonal averages
    obs_mld = zeros([4, size(obs_lat), size(obs_lon)])
    ndays = zeros(4)
    for month in range(12):
        if month + 1 in [12, 1, 2]:
            # DJF
            season = 0
        elif month + 1 in [3, 4, 5]:
            # MAM
            season = 1
        elif month + 1 in [6, 7, 8]:
            # JJA
            season = 2
        elif month + 1 in [9, 10, 11]:
            # SON
            season = 3
        obs_mld[season, :, :] += obs_mld_monthly[
            month, :, :] * days_per_month[month]
        ndays[season] += days_per_month[month]
    # Convert from integrals to averages
    for season in range(4):
        obs_mld[season, :, :] = obs_mld[season, :, :] / ndays[season]
    # Apply land mask
    obs_mld = ma.masked_where(isnan(obs_mld), obs_mld)

    print 'Plotting'
    # ACC
    fig1 = figure(figsize=(18, 9))
    # Summer
    # MetROMS
    ax = fig1.add_subplot(2, 4, 1, aspect='equal')
    pcolor(roms_x,
           roms_y,
           roms_mld[0, :, :],
           vmin=0,
           vmax=max_bound_summer,
           cmap='jet')
    text(-67, 0, season_names[0], fontsize=24, ha='right')
    title('MetROMS', fontsize=24)
    xlim([-nbdry1, nbdry1])
    ylim([-nbdry1, nbdry1])
    # Add longitude labels
    for i in range(size(x_ticks)):
        text(x_ticks[i],
             y_ticks[i],
             lon_labels[i],
             ha='center',
             rotation=lon_rot[i],
             fontsize=12)
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM low-res
    ax = fig1.add_subplot(2, 4, 2, aspect='equal')
    img = PatchCollection(patches_lr, cmap='jet')
    img.set_array(fesom_mld_lr[0, :])
    img.set_clim(vmin=0, vmax=max_bound_summer)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry1, nbdry1])
    ylim([-nbdry1, nbdry1])
    ax.set_xticks([])
    ax.set_yticks([])
    title('FESOM (low-res)', fontsize=24)
    # FESOM high-res
    ax = fig1.add_subplot(2, 4, 3, aspect='equal')
    img = PatchCollection(patches_hr, cmap='jet')
    img.set_array(fesom_mld_hr[0, :])
    img.set_clim(vmin=0, vmax=max_bound_summer)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry1, nbdry1])
    ylim([-nbdry1, nbdry1])
    ax.set_xticks([])
    ax.set_yticks([])
    title('FESOM (high-res)', fontsize=24)
    # Obs
    ax = fig1.add_subplot(2, 4, 4, aspect='equal')
    img = pcolor(obs_x,
                 obs_y,
                 obs_mld[0, :, :],
                 vmin=0,
                 vmax=max_bound_summer,
                 cmap='jet')
    xlim([-nbdry1, nbdry1])
    ylim([-nbdry1, nbdry1])
    ax.set_xticks([])
    ax.set_yticks([])
    title('Observations', fontsize=24)
    # Add a colorbar for summer
    cbaxes = fig1.add_axes([0.93, 0.55, 0.02, 0.3])
    cbar = colorbar(img,
                    cax=cbaxes,
                    extend='max',
                    ticks=arange(0, max_bound_summer + 50, 50))
    cbar.ax.tick_params(labelsize=20)
    # Winter
    # MetROMS
    ax = fig1.add_subplot(2, 4, 5, aspect='equal')
    pcolor(roms_x,
           roms_y,
           roms_mld[2, :, :],
           vmin=0,
           vmax=max_bound_winter,
           cmap='jet')
    text(-67, 0, season_names[2], fontsize=24, ha='right')
    xlim([-nbdry1, nbdry1])
    ylim([-nbdry1, nbdry1])
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM low-res
    ax = fig1.add_subplot(2, 4, 6, aspect='equal')
    img = PatchCollection(patches_lr, cmap='jet')
    img.set_array(fesom_mld_lr[2, :])
    img.set_clim(vmin=0, vmax=max_bound_winter)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry1, nbdry1])
    ylim([-nbdry1, nbdry1])
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM high-res
    ax = fig1.add_subplot(2, 4, 7, aspect='equal')
    img = PatchCollection(patches_hr, cmap='jet')
    img.set_array(fesom_mld_hr[2, :])
    img.set_clim(vmin=0, vmax=max_bound_winter)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry1, nbdry1])
    ylim([-nbdry1, nbdry1])
    ax.set_xticks([])
    ax.set_yticks([])
    # Obs
    ax = fig1.add_subplot(2, 4, 8, aspect='equal')
    img = pcolor(obs_x,
                 obs_y,
                 obs_mld[2, :, :],
                 vmin=0,
                 vmax=max_bound_winter,
                 cmap='jet')
    xlim([-nbdry1, nbdry1])
    ylim([-nbdry1, nbdry1])
    ax.set_xticks([])
    ax.set_yticks([])
    # Add a colorbar for winter
    cbaxes = fig1.add_axes([0.93, 0.15, 0.02, 0.3])
    cbar = colorbar(img,
                    cax=cbaxes,
                    extend='max',
                    ticks=arange(0, max_bound_winter + 200, 200))
    cbar.ax.tick_params(labelsize=20)
    # Add the main title
    suptitle('Mixed layer depth (m), 2002-2016 average', fontsize=30)
    # Decrease space between plots
    subplots_adjust(wspace=0.025, hspace=0.025)
    fig1.show()
    fig1.savefig('mld_acc.png')

    # Continental shelf
    fig2 = figure(figsize=(13, 9))
    # Summer
    # MetROMS
    ax = fig2.add_subplot(2, 3, 1, aspect='equal')
    pcolor(roms_x,
           roms_y,
           roms_mld[0, :, :],
           vmin=0,
           vmax=max_bound_summer,
           cmap='jet')
    text(-28, 0, season_names[0], fontsize=24, ha='right')
    title('MetROMS', fontsize=24)
    xlim([-nbdry2, nbdry2])
    ylim([-nbdry2, nbdry2])
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM low-res
    ax = fig2.add_subplot(2, 3, 2, aspect='equal')
    img = PatchCollection(patches_lr, cmap='jet')
    img.set_array(fesom_mld_lr[0, :])
    img.set_clim(vmin=0, vmax=max_bound_summer)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry2, nbdry2])
    ylim([-nbdry2, nbdry2])
    ax.set_xticks([])
    ax.set_yticks([])
    title('FESOM (low-res)', fontsize=24)
    # FESOM high-res
    ax = fig2.add_subplot(2, 3, 3, aspect='equal')
    img = PatchCollection(patches_hr, cmap='jet')
    img.set_array(fesom_mld_hr[0, :])
    img.set_clim(vmin=0, vmax=max_bound_summer)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry2, nbdry2])
    ylim([-nbdry2, nbdry2])
    ax.set_xticks([])
    ax.set_yticks([])
    title('FESOM (high-res)', fontsize=24)
    # Add a colorbar for summer
    cbaxes = fig2.add_axes([0.93, 0.55, 0.02, 0.3])
    cbar = colorbar(img,
                    cax=cbaxes,
                    extend='max',
                    ticks=arange(0, max_bound_summer + 50, 50))
    cbar.ax.tick_params(labelsize=20)
    # Winter
    # MetROMS
    ax = fig2.add_subplot(2, 3, 4, aspect='equal')
    pcolor(roms_x,
           roms_y,
           roms_mld[2, :, :],
           vmin=0,
           vmax=max_bound_winter,
           cmap='jet')
    text(-28, 0, season_names[2], fontsize=24, ha='right')
    xlim([-nbdry2, nbdry2])
    ylim([-nbdry2, nbdry2])
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM low-res
    ax = fig2.add_subplot(2, 3, 5, aspect='equal')
    img = PatchCollection(patches_lr, cmap='jet')
    img.set_array(fesom_mld_lr[2, :])
    img.set_clim(vmin=0, vmax=max_bound_winter)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry2, nbdry2])
    ylim([-nbdry2, nbdry2])
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM high-res
    ax = fig2.add_subplot(2, 3, 6, aspect='equal')
    img = PatchCollection(patches_hr, cmap='jet')
    img.set_array(fesom_mld_hr[2, :])
    img.set_clim(vmin=0, vmax=max_bound_winter)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry2, nbdry2])
    ylim([-nbdry2, nbdry2])
    ax.set_xticks([])
    ax.set_yticks([])
    # Add a colorbar for winter
    cbaxes = fig2.add_axes([0.93, 0.15, 0.02, 0.3])
    cbar = colorbar(img,
                    cax=cbaxes,
                    extend='max',
                    ticks=arange(0, max_bound_winter + 200, 200))
    cbar.ax.tick_params(labelsize=20)
    # Add the main title
    suptitle('Mixed layer depth (m), 2002-2016 average', fontsize=30)
    # Decrease space between plots
    subplots_adjust(wspace=0.025, hspace=0.025)
    fig2.show()
    fig2.savefig('mld_shelf.png')
Пример #15
0
def moc_lat_density(mesh_path, file_path, save=False, fig_name=None):

    # Options for grid objects
    circumpolar = False
    cross_180 = False

    # Read vertical velocity, temperature, and salinity at every node
    id = Dataset(file_path, 'r')
    w = mean(id.variables['w'][:, :], axis=0)
    temp = mean(id.variables['temp'][:, :], axis=0)
    salt = mean(id.variables['salt'][:, :], axis=0)
    id.close()

    # Calculate potential density (depth 0) at every node
    density = unesco(temp, salt, zeros(shape(temp))) - 1000

    # Build FESOM grid
    elements = fesom_grid(mesh_path, circumpolar, cross_180)

    # Set up arrays of vertical transport, latitude, upstream density, and
    # downstream density at every interface between vertical layers of elements
    transport_all = []
    lat_all = []
    density_us_all = []
    density_ds_all = []
    # Loop over 2D elements
    for elm in elements:
        # Get area and latitude (average over 3 nodes)
        area = elm.area()
        lat = mean(elm.lat)
        nodes_above = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
        nodes = [
            nodes_above[0].below, nodes_above[1].below, nodes_above[2].below
        ]
        # Loop from the second layer from the surface, down to the second layer
        # from the bottom
        while True:
            nodes_below = [nodes[0].below, nodes[1].below, nodes[2].below]
            if None in nodes_below:
                # Reached the bottom
                break
            # Vertical velocity average over 3 nodes
            w_avg = mean([w[nodes[0].id], w[nodes[1].id], w[nodes[2].id]])
            # Vertical transport through this triangular interface
            transport = abs(w_avg) * area * 1e-6
            # Density average over 3D triangular prism above
            density_above = mean([
                density[nodes[0].id], density[nodes[1].id],
                density[nodes[2].id], density[nodes_above[0].id],
                density[nodes_above[1].id], density[nodes_above[2].id]
            ])
            # Density average over 3D triangular prism below
            density_below = mean([
                density[nodes[0].id], density[nodes[1].id],
                density[nodes[2].id], density[nodes_below[0].id],
                density[nodes_below[1].id], density[nodes_below[2].id]
            ])
            # Figure out which is triangular prism upstream and which is
            # downstream; save the density values correspondingly
            if w_avg > 0:
                density_us = density_below
                density_ds = density_above
            else:
                density_us = density_above
                density_ds = density_below
            # Save vertical transport, latitude, upstream and downstream
            # densities for this interface
            transport_all.append(transport)
            lat_all.append(lat)
            density_us_all.append(density_us)
            density_ds_all.append(density_ds)
            # Get ready for next layer down
            nodes_above = nodes
            nodes = nodes_below

    # Get regular values of latitude and density
    lat_reg = linspace(-90, 90, num=50)
    density_reg = linspace(floor(amin(density)), ceil(amax(density)), num=25)
    # Set up array for overturning streamfunction
    moc = zeros([size(density_reg), size(lat_reg)])
    # Loop over latitude
    for j in range(size(lat_reg)):
        print 'Processing latitude ' + str(j + 1) + ' of ' + str(size(lat_reg))
        # Make a flag which is 1 for interfaces south of the current latitude,
        # 0 otherwise
        flag_lat = zeros(shape(lat_all))
        index = lat_all <= lat_reg[j]
        flag_lat[index] = 1
        # Loop over density
        for k in range(size(density_reg)):
            # Make a flag which is 1 or -1 (depending on direction) for
            # interfaces where the upstream-downstream density gradient crosses
            # the current density, 0 otherwise
            flag_density = zeros(shape(density_us_all))
            index = (density_us_all <= density_reg[k]) * (density_ds_all >=
                                                          density_reg[k])
            flag_density[index] = 1
            index = (density_ds_all <= density_reg[k]) * (density_us_all >=
                                                          density_reg[k])
            flag_density[index] = -1
            # Calculate MOC
            moc[k, j] = sum(transport_all * flag_lat * flag_density)

    # Make colour levels
    bound = amax(abs(moc))
    lev = linspace(-bound, bound, num=50)

    # Plot
    fig = figure()
    img = contourf(lat_reg, density_reg, moc, lev, cmap='RdBu_r')
    ylim([density_reg[-1], density_reg[0]])
    xlabel('Latitude')
    ylabel(r'Density (kg/m$^3$)')
    title('Meridional Overturning Streamfunction (Sv)')
    colorbar(img)

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()
Пример #16
0
def mld_jja_diff(mesh_path,
                 file_path_beg,
                 file_path_end,
                 save=False,
                 fig_name=None,
                 limit=None):

    # Definition of mixed layer depth: where potential density exceeds
    # surface density by this amount (kg/m^3) as in Sallee et al 2013
    density_anom = 0.03
    # Plotting parameters
    circumpolar = True
    mask_cavities = True
    lat_max = -30 + 90
    font_sizes = [30, 24, 20]

    print 'Building grid'
    elements, patches = make_patches(mesh_path, circumpolar, mask_cavities)

    print 'Reading data'
    # First 10 years
    # Read temperature and salinity at each node, seasonally averaged over JJA
    tmp = seasonal_avg(file_path_beg, file_path_beg, 'temp')
    temp_beg = tmp[2, :]
    tmp = seasonal_avg(file_path_beg, file_path_beg, 'salt')
    salt_beg = tmp[2, :]
    # Last 10 years
    tmp = seasonal_avg(file_path_beg, file_path_end, 'temp')
    temp_end = tmp[2, :]
    tmp = seasonal_avg(file_path_beg, file_path_end, 'salt')
    salt_end = tmp[2, :]
    # Calculate potential density (depth 0)
    print 'Calculating density'
    density_beg = unesco(temp_beg, salt_beg, zeros(shape(temp_beg)))
    density_end = unesco(temp_end, salt_end, zeros(shape(temp_end)))

    # Calculate mixed layer depth at each element
    print 'Calculating mixed layer depth'
    # First 10 years
    mld_beg = []
    for elm in elements:
        if (mask_cavities and not elm.cavity) or (not mask_cavities):
            # Get mixed layer depth at each node
            mld_nodes = []
            # Make sure we exclude ice shelf cavity nodes from element mean
            # (an Element can be a non-cavity element and still have up to 2
            # cavity nodes)
            for i in range(3):
                if (mask_cavities
                        and not elm.cavity_nodes[i]) or (not mask_cavities):
                    node = elm.nodes[i]
                    density_sfc = density_beg[node.id]
                    temp_depth = node.depth
                    curr_node = node.below
                    while True:
                        if curr_node is None:
                            # Reached bottom
                            mld_nodes.append(temp_depth)
                            break
                        if density_beg[
                                curr_node.id] >= density_sfc + density_anom:
                            # Reached critical density anomaly
                            mld_nodes.append(curr_node.depth)
                            break
                        temp_depth = curr_node.depth
                        curr_node = curr_node.below
            # For this element, save the mean mixed layer depth across
            # non-cavity nodes (up to 3)
            mld_beg.append(mean(array(mld_nodes)))
    # Last 10 years
    mld_end = []
    for elm in elements:
        if (mask_cavities and not elm.cavity) or (not mask_cavities):
            # Get mixed layer depth at each node
            mld_nodes = []
            # Make sure we exclude ice shelf cavity nodes from element mean
            # (an Element can be a non-cavity element and still have up to 2
            # cavity nodes)
            for i in range(3):
                if (mask_cavities
                        and not elm.cavity_nodes[i]) or (not mask_cavities):
                    node = elm.nodes[i]
                    density_sfc = density_end[node.id]
                    temp_depth = node.depth
                    curr_node = node.below
                    while True:
                        if curr_node is None:
                            mld_nodes.append(temp_depth)
                            break
                        if density_end[
                                curr_node.id] >= density_sfc + density_anom:
                            mld_nodes.append(curr_node.depth)
                            break
                        temp_depth = curr_node.depth
                        curr_node = curr_node.below
            # For this element, save the mean mixed layer depth across
            # non-cavity nodes (up to 3)
            mld_end.append(mean(array(mld_nodes)))
    # Calculate change in mixed layer depth
    mld_change = array(mld_end) - array(mld_beg)

    if mask_cavities:
        # Get mask array of patches for ice shelf cavity elements
        mask_patches = iceshelf_mask(elements)

    # Choose colour bounds
    if limit is not None:
        bound = limit
    else:
        bound = amax(array(mld_change))

    print 'Plotting'
    # Set up plot
    fig = figure(figsize=(16, 12))
    ax = fig.add_subplot(1, 1, 1, aspect='equal')
    # Set colourmap for patches, and refer it to the values array
    img = PatchCollection(patches, cmap='RdBu_r')
    img.set_array(array(mld_change))
    img.set_edgecolor('face')
    # Add patches to plot
    ax.add_collection(img)
    if mask_cavities:
        # Set colour to light grey for patches in mask
        overlay = PatchCollection(mask_patches, facecolor=(0.6, 0.6, 0.6))
        overlay.set_edgecolor('face')
        # Add mask to plot
        ax.add_collection(overlay)

    # Configure plot
    xlim([-lat_max, lat_max])
    ylim([-lat_max, lat_max])
    ax.get_xaxis().set_ticks([])
    ax.get_yaxis().set_ticks([])
    axis('off')
    title('Change in JJA mixed layer depth (m)\n2091-2100 vs 2006-2015',
          fontsize=font_sizes[0])
    cbar = colorbar(img)
    cbar.ax.tick_params(labelsize=font_sizes[2])
    img.set_clim(vmin=-bound, vmax=bound)

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()
Пример #17
0
def timeseries_watermass_meltpotential(mesh_path, output_path, start_year,
                                       end_year, log_file):

    # Titles for each sector
    sector_names = [
        'Filchner-Ronne Ice Shelf Cavity', 'Eastern Weddell Region Cavities',
        'Amery Ice Shelf Cavity', 'Australian Sector Cavities',
        'Ross Sea Cavities', 'Amundsen Sea Cavities',
        'Bellingshausen Sea Cavities', 'Larsen Ice Shelf Cavities',
        'All Ice Shelf Cavities'
    ]
    num_sectors = len(sector_names)
    # Water masses to consider
    wm_names = ['ISW', 'HSSW', 'LSSW', 'AASW', 'MCDW', 'CDW']
    num_watermasses = len(wm_names)
    # Only consider elements south of 30S
    circumpolar = True
    # Don't make second copies of elements that cross 180E
    cross_180 = False
    # Naming conventions for FESOM output files
    file_head = output_path + 'MK44005.'
    file_tail = '.oce.mean.nc'
    num_years = end_year - start_year + 1
    # Specific heat of seawater (J/K/kg)
    cpw = 4180
    # Coefficients for in-situ freezing point calculation
    a = -0.0575  # Salinity dependence (K/psu)
    b = 0.0901  # Surface freezing point at 0 salinity (C)
    c = 7.61e-4  # Depth dependence (K/m)

    print 'Building mesh'
    elements = fesom_grid(mesh_path, circumpolar, cross_180)

    print 'Categorising elements into sectors'
    location_flag = zeros([num_sectors, len(elements)])
    for i in range(len(elements)):
        elm = elements[i]
        # Make sure we're actually in an ice shelf cavity
        if elm.cavity:
            # Figure out which sector this ice shelf element falls into
            lon = mean(elm.lon)
            lat = mean(elm.lat)
            if lon >= -85 and lon < -30 and lat < -74:
                # Filchner-Ronne
                location_flag[0, i] = 1
            elif lon >= -30 and lon < 65:
                # Eastern Weddell region
                location_flag[1, i] = 1
            elif lon >= 65 and lon < 76:
                # Amery
                location_flag[2, i] = 1
            elif lon >= 76 and lon < 165 and lat >= -74:
                # Australian sector
                location_flag[3, i] = 1
            elif (lon >= 155 and lon < 165
                  and lat < -74) or (lon >= 165) or (lon < -140):
                # Ross Sea
                location_flag[4, i] = 1
            elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98
                                                  and lat < -73.1):
                # Amundsen Sea
                location_flag[5, i] = 1
            elif (lon >= -104 and lon < -98
                  and lat >= -73.1) or (lon >= -98 and lon < -66
                                        and lat >= -75):
                # Bellingshausen Sea
                location_flag[6, i] = 1
            elif lon >= -66 and lon < -59 and lat >= -74:
                # Larsen Ice Shelves
                location_flag[7, i] = 1
            else:
                print 'No region found for lon=', str(lon), ', lat=', str(lat)
                break  #return
            # All ice shelf elements are in Total Antarctica
            location_flag[8, i] = 1

    print 'Calculating melt potential'
    mp = zeros([num_watermasses, num_sectors, num_years])
    for year in range(start_year, end_year + 1):
        print 'Processing ' + str(year)
        # Read temperature and salinity
        id = Dataset(file_head + str(year) + file_tail, 'r')
        temp = mean(id.variables['temp'][:, :], axis=0)
        salt = mean(id.variables['salt'][:, :], axis=0)
        id.close()
        # Loop over elements
        for i in range(len(elements)):
            elm = elements[i]
            # Check if we're in an ice shelf cavity
            if elm.cavity:
                # Get area of 2D element
                area = elm.area()
                nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
                # Loop downward
                while True:
                    if nodes[0].below is None or nodes[
                            1].below is None or nodes[2].below is None:
                        # Reached the bottom
                        break
                    # Calculate average temperature, salinity, depth, and
                    # layer thickness for this 3D triangular prism
                    temp_vals = []
                    salt_vals = []
                    z_vals = []
                    dz_vals = []
                    for n in range(3):
                        temp_vals.append(temp[nodes[n].id])
                        salt_vals.append(salt[nodes[n].id])
                        z_vals.append(nodes[n].depth)
                        temp_vals.append(temp[nodes[n].below.id])
                        salt_vals.append(salt[nodes[n].below.id])
                        z_vals.append(nodes[n].below.depth)
                        dz_vals.append(
                            abs(nodes[n].depth - nodes[n].below.depth))
                        # Get ready for next iteration of loop
                        nodes[n] = nodes[n].below
                    curr_temp = mean(array(temp_vals))
                    curr_salt = mean(array(salt_vals))
                    curr_z = mean(array(z_vals))
                    curr_volume = area * mean(array(dz_vals))
                    # Get surface freezing point at this salinity
                    curr_tfrz = -0.0575 * curr_salt + 1.7105e-3 * sqrt(
                        curr_salt**3) - 2.155e-4 * curr_salt**2
                    # Figure out what water mass this is
                    if curr_temp < curr_tfrz:
                        # ISW
                        wm_key = 0
                    elif curr_salt < 34:
                        # AASW
                        wm_key = 3
                    elif curr_temp > 0:
                        # CDW
                        wm_key = 5
                    elif curr_temp > -1.5:
                        # MCDW
                        wm_key = 4
                    elif curr_salt < 34.5:
                        # LSSW
                        wm_key = 2
                    else:
                        # HSSW
                        wm_key = 1
                    # Integrate melt potential
                    # First need (potential) density
                    curr_rho = unesco(curr_temp, curr_salt, 0)
                    # And in-situ freezing point
                    curr_tfrz_insitu = a * curr_salt + b + c * (-1 * curr_z)
                    curr_sectors = 0
                    for sector in range(num_sectors):
                        if location_flag[sector, i] == 1:
                            curr_sectors += 1
                            mp[wm_key, sector, year -
                               start_year] += (curr_temp - curr_tfrz_insitu
                                               ) * curr_volume * cpw * curr_rho
                    # Should be in exactly 2 sectors (1 + total Antarctica)
                    if curr_sectors != 2:
                        print 'Wrong number of sectors for element ' + str(i)

    print 'Saving results to log file'
    f = open(log_file, 'w')
    for wm_key in range(num_watermasses):
        for sector in range(num_sectors):
            f.write('Melt potential of ' + wm_names[wm_key] + ' in ' +
                    sector_names[sector] + '(J)\n')
            for t in range(num_years):
                f.write(str(mp[wm_key, sector, t]) + '\n')
    f.close()
Пример #18
0
def mld_diff ():

    # File paths
    mesh_path = '/short/y99/kaa561/FESOM/mesh/high_res/'
    directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/'
    directories = ['/short/y99/kaa561/FESOM/rcp45_M_highres/output/', '/short/y99/kaa561/FESOM/rcp45_A_highres/output/', '/short/y99/kaa561/FESOM/rcp85_M_highres/output/', '/short/y99/kaa561/FESOM/rcp85_A_highres/output/', '/short/y99/kaa561/FESOM/highres_spinup/']
    seasonal_file_beg = 'seasonal_climatology_oce_1996_2005.nc'
    seasonal_file_end = 'seasonal_climatology_oce_2091_2100.nc'
    # Titles for plotting
    expt_names = ['RCP 4.5 M', 'RCP 4.5 A', 'RCP 8.5 M', 'RCP 8.5 A', 'CONTROL']
    season_names = ['DJF', 'MAM', 'JJA', 'SON']
    num_expts = len(directories)
    middle_expt = (num_expts+1)/2 - 1
    # Start and end years for each period
    beg_years = [1996, 2005]
    end_years = [2091, 2100]
    # Definition of mixed layer depth: where potential density exceeds
    # surface density by this amount (kg/m^3) as in Sallee et al 2013
    density_anom = 0.03
    # Northern boundary for plot: 63S
    nbdry = -64 + 90
    # Mesh parameters
    circumpolar = True
    mask_cavities = False
    # Maximum for colour scale in each season
    max_bound_summer = 150
    max_bound_winter = 600
    diff_bound_summer = 80
    diff_bound_winter = 1000

    print 'Building mesh'
    elements, patches = make_patches(mesh_path, circumpolar, mask_cavities)
    print 'Processing 1996-2005'
    print 'Reading data'
    id = Dataset(directory_beg + seasonal_file_beg, 'r')
    temp_nodes_beg = id.variables['temp'][:,:]
    salt_nodes_beg = id.variables['salt'][:,:]
    id.close()
    print 'Calculating density'
    density_nodes_beg = unesco(temp_nodes_beg, salt_nodes_beg, zeros(shape(temp_nodes_beg)))
    print 'Calculating mixed layer depth'
    # Set up arrays for mixed layer depth at each element, at each season
    mld_summer_beg = zeros(len(elements))
    mld_winter_beg = zeros(len(elements))
    # Loop over seasons and elements to fill these in
    for season in [0,2]:
        print '...' + season_names[season]
        mld_season = []
        for elm in elements:
            # Get mixed layer depth at each node
            mld_nodes = []
            for i in range(3):
                node = elm.nodes[i]
                density_sfc = density_nodes_beg[season,node.id]
                # Save surface depth (only nonzero in ice shelf cavities)
                depth_sfc = node.depth
                temp_depth = node.depth
                curr_node = node.below
                while True:
                    if curr_node is None:
                        # Reached the bottom
                        mld_nodes.append(temp_depth-depth_sfc)
                        break
                    if density_nodes_beg[season,curr_node.id] >= density_sfc + density_anom:
                        # Reached the critical density anomaly
                        mld_nodes.append(curr_node.depth-depth_sfc)
                        break
                    temp_depth = curr_node.depth
                    curr_node = curr_node.below
            # For this element, save the mean mixed layer depth
            mld_season.append(mean(array(mld_nodes)))
        if season == 0:
            mld_summer_beg[:] = mld_season
        elif season == 2:
            mld_winter_beg[:] = mld_season
    # Now calculate anomalies for each experiment
    mld_summer_diff = zeros([num_expts, len(elements)])
    mld_winter_diff = zeros([num_expts, len(elements)])
    for expt in range(num_expts):
        print 'Processing ' + expt_names[expt]
        print 'Reading data'
        id = Dataset(directories[expt] + seasonal_file_end, 'r')
        temp_nodes_end = id.variables['temp'][:,:]
        salt_nodes_end = id.variables['salt'][:,:]
        id.close()
        print 'Calculating density'
        density_nodes_end = unesco(temp_nodes_end, salt_nodes_end, zeros(shape(temp_nodes_end)))
        print 'Calculating mixed layer depth'
        for season in [0,2]:
            print '...' + season_names[season]
            mld_season = []
            for elm in elements:
                mld_nodes = []
                for i in range(3):
                    node = elm.nodes[i]
                    density_sfc = density_nodes_end[season,node.id]
                    depth_sfc = node.depth
                    temp_depth = node.depth
                    curr_node = node.below
                    while True:
                        if curr_node is None:
                            mld_nodes.append(temp_depth-depth_sfc)
                            break
                        if density_nodes_beg[season,curr_node.id] >= density_sfc + density_anom:
                            mld_nodes.append(curr_node.depth-depth_sfc)
                            break
                        temp_depth = curr_node.depth
                        curr_node = curr_node.below
                mld_season.append(mean(array(mld_nodes)))                
            if season == 0:
                mld_summer_diff[expt,:] = array(mld_season) - mld_summer_beg
            elif season == 2:
                mld_winter_diff[expt,:] = array(mld_season) - mld_winter_beg

    print 'Plotting'
    fig = figure(figsize=(24,8))
    # Summer, beginning
    ax = fig.add_subplot(2, num_expts+1, 1, aspect='equal')
    img = PatchCollection(patches, cmap='jet')
    img.set_array(mld_summer_beg)
    img.set_clim(vmin=0, vmax=max_bound_summer)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry, nbdry])
    ylim([-nbdry, nbdry])
    ax.set_xticks([])
    ax.set_yticks([])
    title(str(beg_years[0])+'-'+str(beg_years[1]), fontsize=20)
    text(-35, 0, season_names[0], fontsize=24)
    # Add a colorbar on the left
    cbaxes = fig.add_axes([0.05, 0.57, 0.02, 0.3])
    cbar = colorbar(img, cax=cbaxes, extend='max', ticks=arange(0, max_bound_summer+50, 50))
    cbar.ax.tick_params(labelsize=16)
    # Summer, anomalies for each experiment
    for expt in range(num_expts):
        ax = fig.add_subplot(2, num_expts+1, expt+2, aspect='equal')
        img = PatchCollection(patches, cmap='RdBu_r')
        img.set_array(mld_summer_diff[expt,:])
        img.set_clim(vmin=-diff_bound_summer, vmax=diff_bound_summer)
        img.set_edgecolor('face')
        ax.add_collection(img)
        xlim([-nbdry, nbdry])
        ylim([-nbdry, nbdry])
        ax.set_xticks([])
        ax.set_yticks([])
        title(expt_names[expt], fontsize=20)
        if expt == num_expts-1:
            # Add a colorbar on the right
            cbaxes = fig.add_axes([0.92, 0.57, 0.02, 0.3])
            cbar = colorbar(img, cax=cbaxes, extend='both', ticks=arange(-diff_bound_summer, diff_bound_summer+40, 40))
            cbar.ax.tick_params(labelsize=16)
    # Winter, beginning
    ax = fig.add_subplot(2, num_expts+1, num_expts+2, aspect='equal')
    img = PatchCollection(patches, cmap='jet')
    img.set_array(mld_winter_beg)
    img.set_clim(vmin=0, vmax=max_bound_winter)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry, nbdry])
    ylim([-nbdry, nbdry])
    ax.set_xticks([])
    ax.set_yticks([])
    text(-35, 0, season_names[2], fontsize=24)
    # Add a colorbar on the left
    cbaxes = fig.add_axes([0.05, 0.13, 0.02, 0.3])
    cbar = colorbar(img, cax=cbaxes, extend='max', ticks=arange(0, max_bound_winter+200, 200))
    cbar.ax.tick_params(labelsize=16)
    # Winter, anomalies for each experiment
    for expt in range(num_expts):
        ax =fig.add_subplot(2, num_expts+1, num_expts+expt+3, aspect='equal')
        img = PatchCollection(patches, cmap='RdBu_r')
        img.set_array(mld_winter_diff[expt,:])
        img.set_clim(vmin=-diff_bound_winter, vmax=diff_bound_winter)
        img.set_edgecolor('face')
        ax.add_collection(img)
        xlim([-nbdry, nbdry])
        ylim([-nbdry, nbdry])
        ax.set_xticks([])
        ax.set_yticks([])
        if expt == middle_expt:
            # Add subtitle for anomalies as xlabel
            xlabel(str(end_years[0])+'-'+str(end_years[1])+' anomalies', fontsize=20)
        if expt == num_expts-1:
            # Add a colorbar on the right
            cbaxes = fig.add_axes([0.92, 0.13, 0.02, 0.3])
            cbar = colorbar(img, cax=cbaxes, extend='both', ticks=arange(-diff_bound_winter, diff_bound_winter+500, 500))
            cbar.ax.tick_params(labelsize=16)
    suptitle('Mixed layer depth (m)', fontsize=30)
    subplots_adjust(wspace=0.025, hspace=0.025)
    fig.show()
    fig.savefig('mld_scenarios.png')
Пример #19
0
def rcp_ts_distribution (key=1):

    # File paths
    mesh_path = '/short/y99/kaa561/FESOM/mesh/high_res/'
    directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/'
    directories = ['/short/y99/kaa561/FESOM/rcp45_M_highres/output/', '/short/y99/kaa561/FESOM/rcp45_A_highres/output/', '/short/y99/kaa561/FESOM/rcp85_M_highres/output/', '/short/y99/kaa561/FESOM/rcp85_A_highres/output/', '/short/y99/kaa561/FESOM/highres_spinup/']
    file_beg = 'annual_avg.oce.mean.1996.2005.nc'
    file_end = 'annual_avg.oce.mean.2091.2100.nc'
    # Titles for plotting
    expt_names = ['RCP 4.5 M', 'RCP 4.5 A', 'RCP 8.5 M', 'RCP 8.5 A', 'CONTROL']
    num_expts = len(directories)
    # Start and end years for each period
    beg_years = [1996, 2005]
    end_years = [2091, 2100]
    # Northern boundary of water masses to consider
    nbdry = -65
    # Number of temperature and salinity bins
    num_bins = 1000
    # Bounds on temperature and salinity bins (pre-computed, change if needed)
    min_salt = 32.3
    max_salt = 35.1
    min_temp = -3.1
    max_temp = 3.8
    # Bounds to actually plot
    if key==1:
        min_salt_plot = 32.25
        max_salt_plot = 35
        min_temp_plot = -3
        max_temp_plot = 3.25
    elif key==2:
        min_salt_plot = 34
        max_salt_plot = 35
        min_temp_plot = -2.5
        max_temp_plot = -1
    # FESOM grid generation parameters
    circumpolar = False
    cross_180 = False

    print 'Setting up bins'
    # Calculate boundaries of temperature bins
    temp_bins = linspace(min_temp, max_temp, num=num_bins)
    # Calculate centres of temperature bins (for plotting)
    temp_centres = 0.5*(temp_bins[:-1] + temp_bins[1:])
    # Repeat for salinity
    salt_bins = linspace(min_salt, max_salt, num=num_bins)
    salt_centres = 0.5*(salt_bins[:-1] + salt_bins[1:])
    # Set up 3D array of experiment x temperature bins x salinity bins to hold
    # average depth of water masses, weighted by volume
    ts_vals = zeros([num_expts+1, size(temp_centres), size(salt_centres)])
    # Also array to integrate volume of each bin
    volume = zeros([num_expts+1, size(temp_centres), size(salt_centres)])
    # Calculate surface freezing point as a function of salinity as seen by
    # sea ice model
    freezing_pt = -0.0575*salt_centres + 1.7105e-3*sqrt(salt_centres**3) - 2.155e-4*salt_centres**2
    # Get 2D versions of the temperature and salinity bins
    salt_2d, temp_2d = meshgrid(salt_centres, temp_centres)
    # Calculate potential density of each combination of temperature and
    # salinity bins
    density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres)))-1000
    # Density contours to plot
    if key == 1:
        density_lev = arange(25.8, 28.4, 0.2)
    elif key == 2:
        density_lev = arange(27.2, 28.4, 0.2)

    print 'Building grid'
    elements = fesom_grid(mesh_path, circumpolar, cross_180)

    print 'Reading data'
    # 1996-2005
    id = Dataset(directory_beg + file_beg)
    n3d = id.variables['temp'].shape[1]
    temp_nodes = empty([num_expts+1, n3d])
    salt_nodes = empty([num_expts+1, n3d])
    temp_nodes[0,:] = id.variables['temp'][0,:]
    salt_nodes[0,:] = id.variables['salt'][0,:]
    id.close()
    # Loop over RCPs
    for expt in range(num_expts):
        id = Dataset(directories[expt] + file_end)
        temp_nodes[expt+1,:] = id.variables['temp'][0,:]
        salt_nodes[expt+1,:] = id.variables['salt'][0,:]
        id.close()

    print 'Binning elements'
    for elm in elements:
        # See if we're in the region of interest
        if all(elm.lat < nbdry):
            # Get area of 2D triangle
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            # Loop downward
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None:
                    # We've reached the bottom
                    break
                # Calculate average temperature and salinity for each
                # experiment, as well as depth and layer thickness, over this
                # 3D triangular prism.
                temp_vals = empty([num_expts+1, 6])
                salt_vals = empty([num_expts+1, 6])
                depth_vals = empty(6)
                dz = empty(3)
                for i in range(3):
                    # Loop over experiments
                    for expt in range(num_expts+1):
                        # Average temperature over 6 nodes
                        temp_vals[expt,i] = temp_nodes[expt,nodes[i].id]
                        temp_vals[expt,i+3] = temp_nodes[expt,nodes[i].below.id]
                        salt_vals[expt,i] = salt_nodes[expt,nodes[i].id]
                        salt_vals[expt,i+3] = salt_nodes[expt,nodes[i].below.id]
                    # Average depth over 6 nodes
                    depth_vals[i] = nodes[i].depth
                    depth_vals[i+3] = nodes[i].below.depth
                    # Average dz over 3 vertical edges
                    dz[i] = abs(nodes[i].depth - nodes[i].below.depth)
                    # Get ready for next repetition of loop
                    nodes[i] = nodes[i].below
                temp_elm = mean(temp_vals, axis=1)
                salt_elm = mean(salt_vals, axis=1)
                depth_elm = mean(depth_vals)
                # Calculate volume of 3D triangular prism
                curr_volume = area*mean(dz)
                # Loop over experiments again
                for expt in range(num_expts+1):
                    # Figure out which bins this falls into
                    temp_index = nonzero(temp_bins > temp_elm[expt])[0][0] - 1
                    salt_index = nonzero(salt_bins > salt_elm[expt])[0][0] - 1
                    # Integrate depth*volume in this bin
                    ts_vals[expt, temp_index, salt_index] += depth_elm*curr_volume
                    volume[expt, temp_index, salt_index] += curr_volume
    # Mask bins with zero volume
    ts_vals = ma.masked_where(volume==0, ts_vals)
    volume = ma.masked_where(volume==0, volume)
    # Convert depths from integrals to volume-averages
    ts_vals /= volume

    # Find the maximum depth for plotting
    if key == 1:
        max_depth = amax(ts_vals)
    elif key == 2:
        temp_start = nonzero(temp_bins > min_temp_plot)[0][0]-2
        temp_end = nonzero(temp_bins > max_temp_plot)[0][0]
        salt_start = nonzero(salt_bins > min_salt_plot)[0][0]-2
        salt_end = nonzero(salt_bins > max_salt_plot)[0][0]
        max_depth = amax(ts_vals[:,temp_start:temp_end, salt_start:salt_end])
    # Make a nonlinear colour scale
    bounds = linspace(0, max_depth**(1.0/2.5), num=100)**2.5
    norm = BoundaryNorm(boundaries=bounds, ncolors=256)

    print 'Plotting'
    fig = figure(figsize=(24,6))
    gs = GridSpec(1,num_expts+1)
    gs.update(left=0.04, right=0.99, bottom=0.12, top=0.86)
    for expt in range(num_expts+1):
        ax = subplot(gs[0,expt])
        img = pcolor(salt_centres, temp_centres, ts_vals[expt,:,:], norm=norm, vmin=0, vmax=max_depth, cmap='jet')
        plot(salt_centres, freezing_pt, color='black', linestyle='dashed')
        cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted')
        clabel(cs, inline=1, fontsize=10, color=(0.6,0.6,0.6), fmt='%1.1f')
        xlim([min_salt_plot, max_salt_plot])
        ylim([min_temp_plot, max_temp_plot])
        ax.tick_params(axis='x', labelsize=12)
        ax.tick_params(axis='y', labelsize=12)
        if expt == 0:
            xlabel('Salinity (psu)', fontsize=14)
            ylabel(r'Temperature ($^{\circ}$C)', fontsize=14)
            title(str(beg_years[0]) + '-' + str(beg_years[1]), fontsize=20)
        elif expt == 1:
            title(expt_names[expt-1] + ' (' + str(end_years[0]) + '-' + str(end_years[1]) + ')', fontsize=20)
        else:
            title(expt_names[expt-1], fontsize=20)
        if expt == num_expts:
            # Add a horizontal colourbar below
            cbaxes = fig.add_axes([0.35, 0.05, 0.3, 0.02])
            if key == 1:
                cbar = colorbar(img, cax=cbaxes, orientation='horizontal', ticks=[0,50,100,200,500,1000,2000,4000])
            elif key == 2:
                cbar = colorbar(img, cax=cbaxes, orientation='horizontal', ticks=[0,50,100,200,500,1000,2000])
            cbar.ax.tick_params(labelsize=14)
    # Add the main title
    if key == 1:
        suptitle(r'Water masses south of 65$^{\circ}$S: depth (m)', fontsize=24)
    elif key == 2:
        suptitle(r'Water masses south of 65$^{\circ}$S, zoomed into HSSW: depth (m)', fontsize=24)
    fig.show()
    if key == 1:
        fig.savefig('ts_distribution_full.png')
    elif key ==2:
        fig.savefig('ts_distribution_hssw.png')
Пример #20
0
def mip_ts_distribution (roms_grid, roms_file, fesom_mesh_path_lr, fesom_file_lr, fesom_mesh_path_hr, fesom_file_hr):

    # Northern boundary of water masses to consider
    nbdry = -65
    # Number of temperature and salinity bins
    num_bins = 1000
    # Bounds on temperature and salinity bins (pre-computed, change if needed)
    min_salt = 32.3
    max_salt = 35.1
    min_temp = -3.1
    max_temp = 3.8
    # Bounds to actually plot
    min_salt_plot = 33.25
    max_salt_plot = 35.1
    min_temp_plot = -3
    max_temp_plot = 3.8
    # FESOM grid generation parameters
    circumpolar = False
    cross_180 = False
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31

    print 'Setting up bins'
    # Calculate boundaries of temperature bins
    temp_bins = linspace(min_temp, max_temp, num=num_bins)
    # Calculate centres of temperature bins (for plotting)
    temp_centres = 0.5*(temp_bins[:-1] + temp_bins[1:])
    # Repeat for salinity
    salt_bins = linspace(min_salt, max_salt, num=num_bins)
    salt_centres = 0.5*(salt_bins[:-1] + salt_bins[1:])
    # Set up 2D arrays of temperature bins x salinity bins to hold average
    # depth of water masses, weighted by volume
    ts_vals_roms = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_fesom_lr = zeros([size(temp_centres), size(salt_centres)])
    ts_vals_fesom_hr = zeros([size(temp_centres), size(salt_centres)])
    # Also arrays to integrate volume
    volume_roms = zeros([size(temp_centres), size(salt_centres)])
    volume_fesom_lr = zeros([size(temp_centres), size(salt_centres)])
    volume_fesom_hr = zeros([size(temp_centres), size(salt_centres)])
    # Calculate surface freezing point as a function of salinity as seen by
    # each sea ice model
    freezing_pt_roms = salt_centres/(-18.48 + 18.48/1e3*salt_centres)
    freezing_pt_fesom = -0.0575*salt_centres + 1.7105e-3*sqrt(salt_centres**3) - 2.155e-4*salt_centres**2
    # Get 2D versions of the temperature and salinity bins
    salt_2d, temp_2d = meshgrid(salt_centres, temp_centres)
    # Calculate potential density of each combination of temperature and
    # salinity bins
    density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres)))-1000
    # Density contours to plot
    density_lev = arange(26.6, 28.4, 0.2)

    print 'Processing ROMS'
    # Read ROMS grid variables we need
    id = Dataset(roms_grid, 'r')
    roms_lon = id.variables['lon_rho'][:,:]
    roms_lat = id.variables['lat_rho'][:,:]
    roms_h = id.variables['h'][:,:]
    roms_zice = id.variables['zice'][:,:]
    id.close()
    num_lat = size(roms_lat, 0)
    num_lon = size(roms_lon, 1)
    # Get integrands on 3D grid
    roms_dx, roms_dy, roms_dz, roms_z = cartesian_grid_3d(roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
    # Get volume integrand
    dV = roms_dx*roms_dy*roms_dz
    # Read ROMS output
    id = Dataset(roms_file, 'r')
    roms_temp = id.variables['temp'][0,:,:,:]
    roms_salt = id.variables['salt'][0,:,:,:]
    id.close()
    # Loop over 2D grid boxes
    for j in range(num_lat):
        for i in range(num_lon):
            # Check for land mask
            if roms_temp[0,j,i] is ma.masked:
                continue
            # Check if we're in the region of interest
            if roms_lat[j,i] < nbdry:
                # Loop downward
                for k in range(N):
                    # Figure out which bins this falls into
                    temp_index = nonzero(temp_bins > roms_temp[k,j,i])[0][0] - 1
                    salt_index = nonzero(salt_bins > roms_salt[k,j,i])[0][0] - 1
                    # Integrate depth*dV in this bin
                    ts_vals_roms[temp_index, salt_index] += -roms_z[k,j,i]*dV[k,j,i]
                    volume_roms[temp_index, salt_index] += dV[k,j,i]
    # Mask bins with zero volume
    ts_vals_roms = ma.masked_where(volume_roms==0, ts_vals_roms)
    volume_roms = ma.masked_where(volume_roms==0, volume_roms)
    # Convert depths from integrals to volume-averages
    ts_vals_roms /= volume_roms

    print 'Processing low-res FESOM'
    # Make FESOM grid elements
    elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar, cross_180)
    # Read temperature and salinity at each 3D node
    id = Dataset(fesom_file_lr, 'r')
    fesom_temp_lr = id.variables['temp'][0,:]
    fesom_salt_lr = id.variables['salt'][0,:]
    id.close()
    # Loop over elements
    for elm in elements_lr:
        # See if we're in the region of interest
        if all(elm.lat < nbdry):
            # Get area of 2D triangle
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            # Loop downward
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None:
                    # We've reached the bottom
                    break
                # Calculate average temperature, salinity, depth, and layer
                # thickness over this 3D triangular prism
                temp_vals = []
                salt_vals = []
                depth_vals = []
                dz = []
                for i in range(3):
                    # Average temperature over 6 nodes
                    temp_vals.append(fesom_temp_lr[nodes[i].id])
                    temp_vals.append(fesom_temp_lr[nodes[i].below.id])
                    # Average salinity over 6 nodes
                    salt_vals.append(fesom_salt_lr[nodes[i].id])
                    salt_vals.append(fesom_salt_lr[nodes[i].below.id])
                    # Average depth over 6 nodes
                    depth_vals.append(nodes[i].depth)
                    depth_vals.append(nodes[i].below.depth)
                    # Average dz over 3 vertical edges
                    dz.append(abs(nodes[i].depth - nodes[i].below.depth))
                    # Get ready for next repetition of loop
                    nodes[i] = nodes[i].below
                temp_elm = mean(array(temp_vals))
                salt_elm = mean(array(salt_vals))
                depth_elm = mean(array(depth_vals))
                # Calculate volume of 3D triangular prism
                volume = area*mean(array(dz))
                # Figure out which bins this falls into
                temp_index = nonzero(temp_bins > temp_elm)[0][0] - 1
                salt_index = nonzero(salt_bins > salt_elm)[0][0] - 1
                # Integrate depth*volume in this bin
                ts_vals_fesom_lr[temp_index, salt_index] += depth_elm*volume
                volume_fesom_lr[temp_index, salt_index] += volume
    # Mask bins with zero volume
    ts_vals_fesom_lr = ma.masked_where(volume_fesom_lr==0, ts_vals_fesom_lr)
    volume_fesom_lr = ma.masked_where(volume_fesom_lr==0, volume_fesom_lr)
    # Convert depths from integrals to volume-averages
    ts_vals_fesom_lr /= volume_fesom_lr

    print 'Processing high-res FESOM'
    elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar, cross_180)
    id = Dataset(fesom_file_hr, 'r')
    fesom_temp_hr = id.variables['temp'][0,:]
    fesom_salt_hr = id.variables['salt'][0,:]
    id.close()
    for elm in elements_hr:
        if all(elm.lat < nbdry):
            area = elm.area()
            nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]]
            while True:
                if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None:
                    break
                temp_vals = []
                salt_vals = []
                depth_vals = []
                dz = []
                for i in range(3):
                    temp_vals.append(fesom_temp_hr[nodes[i].id])
                    temp_vals.append(fesom_temp_hr[nodes[i].below.id])
                    salt_vals.append(fesom_salt_hr[nodes[i].id])
                    salt_vals.append(fesom_salt_hr[nodes[i].below.id])
                    depth_vals.append(nodes[i].depth)
                    depth_vals.append(nodes[i].below.depth)
                    dz.append(abs(nodes[i].depth - nodes[i].below.depth))
                    nodes[i] = nodes[i].below
                temp_elm = mean(array(temp_vals))
                salt_elm = mean(array(salt_vals))
                depth_elm = mean(array(depth_vals))
                volume = area*mean(array(dz))
                temp_index = nonzero(temp_bins > temp_elm)[0][0] - 1
                salt_index = nonzero(salt_bins > salt_elm)[0][0] - 1
                ts_vals_fesom_hr[temp_index, salt_index] += depth_elm*volume
                volume_fesom_hr[temp_index, salt_index] += volume
    ts_vals_fesom_hr = ma.masked_where(volume_fesom_hr==0, ts_vals_fesom_hr)
    volume_fesom_hr = ma.masked_where(volume_fesom_hr==0, volume_fesom_hr)
    ts_vals_fesom_hr /= volume_fesom_hr

    # Find the maximum depth for plotting
    max_depth = amax(array([amax(ts_vals_roms), amax(ts_vals_fesom_lr), amax(ts_vals_fesom_hr)]))
    # Make a nonlinear scale
    bounds = linspace(0, max_depth**(1.0/2.5), num=100)**2.5
    norm = BoundaryNorm(boundaries=bounds, ncolors=256)
    # Set labels for density contours
    manual_locations = [(33.4, 3.0), (33.65, 3.0), (33.9, 3.0), (34.2, 3.0), (34.45, 3.5), (34.65, 3.25), (34.9, 3.0), (35, 1.5)]

    print "Plotting"
    fig = figure(figsize=(20,9))
    # ROMS
    ax = fig.add_subplot(1, 3, 1)
    pcolor(salt_centres, temp_centres, ts_vals_roms, norm=norm, vmin=0, vmax=max_depth, cmap='jet')
    # Add surface freezing point line
    plot(salt_centres, freezing_pt_roms, color='black', linestyle='dashed')
    # Add density contours
    cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6,0.6,0.6), fmt='%1.1f', manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=20)
    ylabel(r'Temperature ($^{\circ}$C)', fontsize=20)
    title('MetROMS', fontsize=24)
    # FESOM low-res
    ax = fig.add_subplot(1, 3, 2)
    img = pcolor(salt_centres, temp_centres, ts_vals_fesom_lr, norm=norm, vmin=0, vmax=max_depth, cmap='jet')
    plot(salt_centres, freezing_pt_fesom, color='black', linestyle='dashed')
    cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6,0.6,0.6), fmt='%1.1f', manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=20)
    title('FESOM (low-res)', fontsize=24)
    # FESOM high-res
    ax = fig.add_subplot(1, 3, 3)
    img = pcolor(salt_centres, temp_centres, ts_vals_fesom_hr, norm=norm, vmin=0, vmax=max_depth, cmap='jet')
    plot(salt_centres, freezing_pt_fesom, color='black', linestyle='dashed')
    cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted')
    clabel(cs, inline=1, fontsize=14, color=(0.6,0.6,0.6), fmt='%1.1f', manual=manual_locations)
    xlim([min_salt_plot, max_salt_plot])
    ylim([min_temp_plot, max_temp_plot])
    ax.tick_params(axis='x', labelsize=16)
    ax.tick_params(axis='y', labelsize=16)
    xlabel('Salinity (psu)', fontsize=20)
    title('FESOM (high-res)', fontsize=24)
    # Add a colourbar on the right
    cbaxes = fig.add_axes([0.93, 0.2, 0.02, 0.6])
    cbar = colorbar(img, cax=cbaxes, ticks=[0,50,100,200,500,1000,2000,4000])
    cbar.ax.tick_params(labelsize=18)
    # Add the main title
    suptitle('Water masses south of 65$^{\circ}$S: depth (m), 2002-2016 average', fontsize=30)
    subplots_adjust(wspace=0.1)
    fig.show()
    fig.savefig('ts_distribution_orig.png')