Beispiel #1
0
def common_grid(roms_file, cice_file, out_file):

    # Resolution of common grid (degrees, same for lat and lon)
    res = 0.25
    # Northern boundary to interpolate to
    nbdry = -50
    # Radius of the Earth in metres
    r = 6.371e6
    # Degrees to radians conversion factor
    deg2rad = pi / 180.0
    N = 31

    print 'Calculating grids'

    # Make the latitude and longitude arrays for the common grid
    lon_common = arange(-180, 180 + res, res)
    lat_common = arange(-90, nbdry + res, res)
    # Get a 2D version of each to calculate dx and dy in metres
    lon_2d, lat_2d = meshgrid(lon_common, lat_common)
    # dx = r*cos(lat)*dlon where lat and dlon (i.e. res) are in radians
    dx = r * cos(lat_2d * deg2rad) * res * deg2rad
    # dy = r*dlat where dlat (i.e. res) is in radians
    # This is constant so reshape to an array of the right dimensions
    dy = zeros(shape(dx)) + r * res * deg2rad

    # Read the ROMS grid
    id = Dataset(roms_file, 'r')
    # We only need lat and lon on the rho grid
    lon_rho = id.variables['lon_rho'][:, :]
    lat_rho = id.variables['lat_rho'][:, :]
    # Get shape of u and v grids
    u_shape = id.variables['lon_u'].shape
    v_shape = id.variables['lon_v'].shape
    # Read land mask
    mask_roms = id.variables['mask_rho'][:, :]
    # Mask out ice shelves too
    zice = id.variables['zice'][:, :]
    mask_roms[zice != 0] = 0.0
    # Read angle (for rotation of vector components)
    angle_roms = id.variables['angle'][:, :]
    # Get time as an array of Date objects
    time_id = id.variables['ocean_time']
    time = num2date(time_id[:],
                    units=time_id.units,
                    calendar=time_id.calendar.lower())
    id.close()

    # Read the CICE grid
    id = Dataset(cice_file, 'r')
    # We only need lat and lon on the velocity grid
    lon_cice = id.variables['ULON'][:, :]
    lat_cice = id.variables['ULAT'][:, :]
    # Read angle (for rotation of vector components)
    angle_cice = id.variables['ANGLE'][:, :]
    id.close()

    # Make sure longitude is between -180 and 180
    index = lon_rho > 180
    lon_rho[index] = lon_rho[index] - 360
    index = lon_cice > 180
    lon_cice[index] = lon_cice[index] - 360

    print 'Counting months'
    # Assume we start at the beginning of a year
    # Figure out how many complete years have happened since then
    num_full_years = time[-1].year - time[0].year
    if time[-1].month == 12 and time[-1].day in range(29, 31 + 1):
        # We happen to end at the very end of a year
        num_full_years += 1
    else:
        # Count the complete months that have happened this year
        num_extra_months = time[-1].month - 1
        # Don't bother with the hassle of considering cases where we end at
        # the very end of a month. Just ignore the month.
    num_months = 12 * num_full_years + num_extra_months

    print 'Interpolating land mask to new grid'
    mask_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho,
                                     mask_roms)
    mask_common[isnan(mask_common)] = 0
    # Cut it off at 1
    mask_common[mask_common < 0.5] = 0
    mask_common[mask_common >= 0.5] = 1

    #    print 'Setting up ' + out_file
    #    id = Dataset(out_file, 'w')
    #    id.createDimension('longitude', size(lon_common))
    #    id.createDimension('latitude', size(lat_common))
    #    id.createDimension('time', None)
    #    id.createVariable('longitude', 'f8', ('longitude'))
    #    id.variables['longitude'].units = 'degrees'
    #    id.variables['longitude'][:] = lon_common
    #    id.createVariable('latitude', 'f8', ('latitude'))
    #    id.variables['latitude'].units = 'degrees'
    #    id.variables['latitude'][:] = lat_common
    #    id.createVariable('time', 'f8', ('time'))
    #    id.variables['time'].units = 'months'
    #    id.createVariable('mask', 'f8', ('latitude', 'longitude'))
    #    id.variables['mask'].units = '1'
    #    id.variables['mask'][:,:] = mask_common
    #    id.createVariable('sst', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['sst'].long_name = 'sea surface temperature'
    #    id.variables['sst'].units = 'C'
    #    id.createVariable('sss', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['sss'].long_name = 'sea surface salinity'
    #    id.variables['sss'].units = 'psu'
    #    id.createVariable('shflux', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['shflux'].long_name = 'surface heat flux into ocean'
    #    id.variables['shflux'].units = 'W/m^2'
    #    id.createVariable('ssflux', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['ssflux'].long_name = 'surface virtual salinity flux into ocean'
    #    id.variables['ssflux'].units = 'psu m/s'
    #    id.createVariable('aice', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['aice'].long_name = 'sea ice concentration'
    #    id.variables['aice'].units = '1'
    #    id.createVariable('hice', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['hice'].long_name = 'sea ice thickness'
    #    id.variables['hice'].units = 'm'
    #    id.createVariable('uocn', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['uocn'].long_name = 'ocean surface velocity eastward'
    #    id.variables['uocn'].units = 'm/s'
    #    id.createVariable('vocn', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['vocn'].long_name = 'ocean surface velocity northward'
    #    id.variables['vocn'].units = 'm/s'
    #    id.createVariable('uice', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['uice'].long_name = 'sea ice velocity eastward'
    #    id.variables['uice'].units = 'm/s'
    #    id.createVariable('vice', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['vice'].long_name = 'sea ice velocity northward'
    #    id.variables['vice'].units = 'm/s'
    #    id.createVariable('sustr', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['sustr'].long_name = 'zonal surface stress'
    #    id.variables['sustr'].units = 'N/m^2'
    #    id.createVariable('svstr', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['svstr'].long_name = 'meridional surface stress'
    #    id.variables['svstr'].units = 'N/m^2'
    #    id.createVariable('curl_str', 'f8', ('time', 'latitude', 'longitude'))
    #    id.variables['curl_str'].long_name = 'curl of surface stress'
    #    id.variables['curl_str'].units = 'N/m^3'
    #    id.close()

    # Loop over months
    for month in range(18, num_months):
        print 'Processing month ' + str(month + 1) + ' of ' + str(num_months)
        id = Dataset(out_file, 'a')
        # Write time value for this month
        id.variables['time'][month] = month + 1

        print '...sea surface temperature'
        # Get monthly average of 3D variable
        temp_roms = monthly_avg_roms(
            roms_file,
            'temp',
            [N, size(lon_rho, 0), size(lon_rho, 1)],
            month % 12,
            instance=month / 12 + 1)
        # Select surface layer
        sst_roms = temp_roms[-1, :, :]
        # Interpolate to common grid
        sst_common = interp_roms2common(lon_common, lat_common, lon_rho,
                                        lat_rho, sst_roms)
        # Apply land mask
        sst = ma.masked_where(mask_common == 0, sst_common)
        # Write to file
        id.variables['sst'][month, :, :] = sst

        print '...sea surface salinity'
        # Get monthly average of 3D variable
        salt_roms = monthly_avg_roms(
            roms_file,
            'salt',
            [N, size(lon_rho, 0), size(lon_rho, 1)],
            month % 12,
            instance=month / 12 + 1)
        # Select surface layer
        sss_roms = salt_roms[-1, :, :]
        # Interpolate to common grid
        sss_common = interp_roms2common(lon_common, lat_common, lon_rho,
                                        lat_rho, sss_roms)
        # Apply land mask
        sss = ma.masked_where(mask_common == 0, sss_common)
        # Write to file
        id.variables['sss'][month, :, :] = sss

        print '...surface heat flux'
        # Get monthly average
        shflux_roms = monthly_avg_roms(roms_file,
                                       'shflux',
                                       shape(lon_rho),
                                       month % 12,
                                       instance=month / 12 + 1)
        # Interpolate to common grid
        shflux_common = interp_roms2common(lon_common, lat_common, lon_rho,
                                           lat_rho, shflux_roms)
        # Apply land mask
        shflux = ma.masked_where(mask_common == 0, shflux_common)
        # Write to file
        id.variables['shflux'][month, :, :] = shflux

        print '...surface salt flux'
        # Get monthly average
        ssflux_roms = monthly_avg_roms(roms_file,
                                       'ssflux',
                                       shape(lon_rho),
                                       month % 12,
                                       instance=month / 12 + 1)
        # Interpolate to common grid
        ssflux_common = interp_roms2common(lon_common, lat_common, lon_rho,
                                           lat_rho, ssflux_roms)
        # Apply land mask
        ssflux = ma.masked_where(mask_common == 0, ssflux_common)
        # Write to file
        id.variables['ssflux'][month, :, :] = ssflux

        print '...sea ice concentration'
        # Get monthly average (use CICE file)
        aice_cice = monthly_avg_cice(cice_file,
                                     'aice',
                                     shape(lon_cice),
                                     month % 12,
                                     instance=month / 12 + 1)
        # Interpolate to common grid (note CICE grid not ROMS)
        aice_common = interp_roms2common(lon_common, lat_common, lon_cice,
                                         lat_cice, aice_cice)
        # Apply land mask
        aice = ma.masked_where(mask_common == 0, aice_common)
        # Write to file
        id.variables['aice'][month, :, :] = aice

        print '...sea ice thickness'
        # Get monthly average (use CICE file)
        hice_cice = monthly_avg_cice(cice_file,
                                     'hi',
                                     shape(lon_cice),
                                     month % 12,
                                     instance=month / 12 + 1)
        # Interpolate to common grid (note CICE grid not ROMS)
        hice_common = interp_roms2common(lon_common, lat_common, lon_cice,
                                         lat_cice, hice_cice)
        # Apply land mask
        hice = ma.masked_where(mask_common == 0, hice_common)
        # Write to file
        id.variables['hice'][month, :, :] = hice

        print '...surface ocean velocity vector'
        # Surface ocean velocity
        # Get monthly averages of both 3D vector components
        uocn_3d_tmp = monthly_avg_roms(roms_file,
                                       'u', [N, u_shape[0], u_shape[1]],
                                       month % 12,
                                       instance=month / 12 + 1)
        vocn_3d_tmp = monthly_avg_roms(roms_file,
                                       'v', [N, v_shape[0], v_shape[1]],
                                       month % 12,
                                       instance=month / 12 + 1)
        # Select surface layer
        uocn_tmp = uocn_3d_tmp[-1, :, :]
        vocn_tmp = vocn_3d_tmp[-1, :, :]
        # Rotate to lon-lat space (note they are on the rho grid now)
        uocn_roms, vocn_roms = rotate_vector_roms(uocn_tmp, vocn_tmp,
                                                  angle_roms)
        # Interpolate to common grid
        uocn_common = interp_roms2common(lon_common, lat_common, lon_rho,
                                         lat_rho, uocn_roms)
        vocn_common = interp_roms2common(lon_common, lat_common, lon_rho,
                                         lat_rho, vocn_roms)
        # Apply land mask
        uocn = ma.masked_where(mask_common == 0, uocn_common)
        vocn = ma.masked_where(mask_common == 0, vocn_common)
        # Write to file
        id.variables['uocn'][month, :, :] = uocn
        id.variables['vocn'][month, :, :] = vocn

        print '...sea ice velocity vector'
        # Sea ice velocity (CICE variable not ROMS)
        # Get monthly averages of both vector components
        uice_tmp = monthly_avg_cice(cice_file,
                                    'uvel',
                                    shape(lon_cice),
                                    month % 12,
                                    instance=month / 12 + 1)
        vice_tmp = monthly_avg_cice(cice_file,
                                    'vvel',
                                    shape(lon_cice),
                                    month % 12,
                                    instance=month / 12 + 1)
        # Rotate to lon-lat space
        uice_cice, vice_cice = rotate_vector_cice(uice_tmp, vice_tmp,
                                                  angle_cice)
        # Interpolate to common grid (note CICE grid not ROMS)
        uice_common = interp_roms2common(lon_common, lat_common, lon_cice,
                                         lat_cice, uice_cice)
        vice_common = interp_roms2common(lon_common, lat_common, lon_cice,
                                         lat_cice, vice_cice)
        # Apply land mask
        uice = ma.masked_where(mask_common == 0, uice_common)
        vice = ma.masked_where(mask_common == 0, vice_common)
        # Write to file
        id.variables['uice'][month, :, :] = uice
        id.variables['vice'][month, :, :] = vice

        print '...surface stress vector'
        # Surface stresses
        # Get monthly averages of both vector components
        sustr_tmp = monthly_avg_roms(roms_file,
                                     'sustr',
                                     u_shape,
                                     month % 12,
                                     instance=month / 12 + 1)
        svstr_tmp = monthly_avg_roms(roms_file,
                                     'svstr',
                                     v_shape,
                                     month % 12,
                                     instance=month / 12 + 1)
        # Rotate to lon-lat space (note they are on the rho grid now)
        sustr_roms, svstr_roms = rotate_vector_roms(sustr_tmp, svstr_tmp,
                                                    angle_roms)
        # Interpolate to common grid
        sustr_common = interp_roms2common(lon_common, lat_common, lon_rho,
                                          lat_rho, sustr_roms)
        svstr_common = interp_roms2common(lon_common, lat_common, lon_rho,
                                          lat_rho, svstr_roms)
        # Apply land mask
        sustr = ma.masked_where(mask_common == 0, sustr_common)
        svstr = ma.masked_where(mask_common == 0, svstr_common)
        # Write to file
        id.variables['sustr'][month, :, :] = sustr
        id.variables['svstr'][month, :, :] = svstr

        print '...curl of surface stress vector'
        # Curl of surface stress = d/dx (svstr) - d/dy (sustr)
        # First calculate the two derivatives
        dsvstr_dx = ma.empty(shape(svstr_common))
        # Forward difference approximation
        dsvstr_dx[:, :-1] = (svstr_common[:, 1:] -
                             svstr_common[:, :-1]) / dx[:, :-1]
        # Backward difference for the last row
        dsvstr_dx[:,
                  -1] = (svstr_common[:, -1] - svstr_common[:, -2]) / dx[:, -1]
        dsustr_dy = ma.empty(shape(sustr_common))
        dsustr_dy[:-1, :] = (sustr_common[1:, :] -
                             sustr_common[:-1, :]) / dy[:-1, :]
        dsustr_dy[-1, :] = (sustr_common[-1, :] -
                            sustr_common[-2, :]) / dy[-1, :]
        curl_str = dsvstr_dx - dsustr_dy
        curl_str = ma.masked_where(mask_common == 0, curl_str)
        # Write to file
        id.variables['curl_str'][month, :, :] = curl_str

        id.close()

    print 'Finished'
Beispiel #2
0
def mip_cavity_fields(var_name, roms_grid, roms_file, fesom_mesh_path,
                      fesom_file):

    # Name of each ice shelf
    shelf_names = [
        'Larsen D Ice Shelf', 'Larsen C Ice Shelf',
        'Wilkins & George VI & Stange Ice Shelves', 'Ronne-Filchner Ice Shelf',
        'Abbot Ice Shelf', 'Pine Island Glacier Ice Shelf',
        'Thwaites Ice Shelf', 'Dotson Ice Shelf', 'Getz Ice Shelf',
        'Nickerson Ice Shelf', 'Sulzberger Ice Shelf', 'Mertz Ice Shelf',
        'Totten & Moscow University Ice Shelves', 'Shackleton Ice Shelf',
        'West Ice Shelf', 'Amery Ice Shelf', 'Prince Harald Ice Shelf',
        'Baudouin & Borchgrevink Ice Shelves', 'Lazarev Ice Shelf',
        'Nivl Ice Shelf', 'Fimbul & Jelbart & Ekstrom Ice Shelves',
        'Brunt & Riiser-Larsen Ice Shelves', 'Ross Ice Shelf'
    ]
    # Beginnings of filenames for figures
    fig_heads = [
        'larsen_d', 'larsen_c', 'wilkins_georgevi_stange', 'ronne_filchner',
        'abbot', 'pig', 'thwaites', 'dotson', 'getz', 'nickerson',
        'sulzberger', 'mertz', 'totten_moscowuni', 'shackleton', 'west',
        'amery', 'prince_harald', 'baudouin_borchgrevink', 'lazarev', 'nivl',
        'fimbul_jelbart_ekstrom', 'brunt_riiser_larsen', 'ross'
    ]
    # Limits on longitude and latitude for each ice shelf
    # Note Ross crosses 180W=180E
    lon_min = [
        -62.67, -65.5, -79.17, -85, -104.17, -102.5, -108.33, -114.5, -135.67,
        -149.17, -155, 144, 115, 94.17, 80.83, 65, 33.83, 19, 12.9, 9.33,
        -10.05, -28.33, 158.33
    ]
    lon_max = [
        -59.33, -60, -66.67, -28.33, -88.83, -99.17, -103.33, -111.5, -114.33,
        -140, -145, 146.62, 123.33, 102.5, 89.17, 75, 37.67, 33.33, 16.17,
        12.88, 7.6, -10.33, -146.67
    ]
    lat_min = [
        -73.03, -69.35, -74.17, -83.5, -73.28, -75.5, -75.5, -75.33, -74.9,
        -76.42, -78, -67.83, -67.17, -66.67, -67.83, -73.67, -69.83, -71.67,
        -70.5, -70.75, -71.83, -76.33, -85
    ]
    lat_max = [
        -69.37, -66.13, -69.5, -74.67, -71.67, -74.17, -74.67, -73.67, -73,
        -75.17, -76.41, -66.67, -66.5, -64.83, -66.17, -68.33, -68.67, -68.33,
        -69.33, -69.83, -69.33, -71.5, -77
    ]
    num_shelves = len(shelf_names)

    # Constants
    sec_per_year = 365 * 24 * 3600
    deg2rad = pi / 180.0
    # Parameters for missing circle in ROMS grid
    lon_c = 50
    lat_c = -83
    radius = 10.1
    nbdry = -63 + 90
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    # Number of bins in each direction for vector overlay
    num_bins = 50

    print 'Reading ROMS fields'
    if var_name == 'draft':
        id = Dataset(roms_grid, 'r')
    else:
        id = Dataset(roms_file, 'r')
    roms_lon = id.variables['lon_rho'][:, :]
    roms_lat = id.variables['lat_rho'][:, :]
    roms_mask = id.variables['mask_rho'][:, :]
    roms_zice = id.variables['zice'][:, :]
    if var_name == 'draft':
        # Switch signs
        roms_data = -1 * id.variables['zice'][:, :]
    elif var_name == 'melt':
        # Convert from m/s to m/y
        roms_data = id.variables['m'][0, :, :] * sec_per_year
    elif var_name == 'temp':
        # Bottom layer
        roms_data = id.variables['temp'][0, 0, :, :]
    elif var_name == 'salt':
        # Bottom layer
        roms_data = id.variables['salt'][0, 0, :, :]
    elif var_name in ['vsfc', 'vavg']:
        # Get angle from the grid file
        id2 = Dataset(roms_grid, 'r')
        angle = id2.variables['angle'][:, :]
        id2.close()
        if var_name == 'vsfc':
            # Read surface u and v
            u_tmp = id.variables['u'][0, -1, :, :]
            v_tmp = id.variables['v'][0, -1, :, :]
            # Interpolate to rho grid and unrotate
            u_rho, v_rho = rotate_vector_roms(u_tmp, v_tmp, angle)
        elif var_name == 'vavg':
            # Read full 3D u and v
            u_3d_tmp = id.variables['u'][0, :, :, :]
            v_3d_tmp = id.variables['v'][0, :, :, :]
            # Read bathymetry from grid file
            id2 = Dataset(roms_grid, 'r')
            roms_h = id2.variables['h'][:, :]
            id2.close()
            # Get integrands on 3D grid; we only care about dz
            dx, dy, dz, z = cartesian_grid_3d(roms_lon, roms_lat, roms_h,
                                              roms_zice, theta_s, theta_b, hc,
                                              N)
            # Unrotate each vertical level
            u_3d = ma.empty(shape(dz))
            v_3d = ma.empty(shape(dz))
            for k in range(N):
                u_k, v_k = rotate_vector_roms(u_3d_tmp[k, :, :],
                                              v_3d_tmp[k, :, :], angle)
                u_3d[k, :, :] = u_k
                v_3d[k, :, :] = v_k
            # Vertically average u and v
            u_rho = sum(u_3d * dz, axis=0) / sum(dz, axis=0)
            v_rho = sum(v_3d * dz, axis=0) / sum(dz, axis=0)
        # Get speed
        roms_data = sqrt(u_rho**2 + v_rho**2)
    id.close()
    # Get land/zice mask
    open_ocn = copy(roms_mask)
    open_ocn[roms_zice != 0] = 0
    land_zice = ma.masked_where(open_ocn == 1, open_ocn)
    # Mask the open ocean and land out of the data field
    roms_data = ma.masked_where(roms_zice == 0, roms_data)
    # Convert grid to spherical coordinates
    roms_x = -(roms_lat + 90) * cos(roms_lon * deg2rad + pi / 2)
    roms_y = (roms_lat + 90) * sin(roms_lon * deg2rad + pi / 2)
    # Find centre in spherical coordinates
    x_c = -(lat_c + 90) * cos(lon_c * deg2rad + pi / 2)
    y_c = (lat_c + 90) * sin(lon_c * deg2rad + pi / 2)
    # Build a regular x-y grid and select the missing circle
    x_reg_roms, y_reg_roms = meshgrid(linspace(-nbdry, nbdry, num=1000),
                                      linspace(-nbdry, nbdry, num=1000))
    land_circle = zeros(shape(x_reg_roms))
    land_circle = ma.masked_where(
        sqrt((x_reg_roms - x_c)**2 + (y_reg_roms - y_c)**2) > radius,
        land_circle)

    print 'Reading FESOM fields'
    # Mask open ocean
    elements, mask_patches = make_patches(fesom_mesh_path,
                                          circumpolar=True,
                                          mask_cavities=True)
    # Unmask ice shelves
    patches = iceshelf_mask(elements)
    if var_name == 'draft':
        # Nothing more to read
        pass
    else:
        id = Dataset(fesom_file, 'r')
        if var_name == 'melt':
            # Convert from m/s to m/y
            node_data = id.variables['wnet'][0, :] * sec_per_year
        elif var_name == 'temp':
            # Read full 3D field for now
            node_data = id.variables['temp'][0, :]
        elif var_name == 'salt':
            # Read full 3D field for now
            node_data = id.variables['salt'][0, :]
        elif var_name in ['vsfc', 'vavg']:
            # The overlaid vectors are based on nodes not elements, so many
            # of the fesom_grid data structures fail to apply and we need to
            # read some of the FESOM grid files again.
            # Read the cavity flag for each 2D surface node
            fesom_cavity = []
            f = open(fesom_mesh_path + 'cavity_flag_nod2d.out', 'r')
            for line in f:
                tmp = int(line)
                if tmp == 1:
                    fesom_cavity.append(True)
                elif tmp == 0:
                    fesom_cavity.append(False)
                else:
                    print 'Problem'
                    return
            f.close()
            # Save the number of 2D nodes
            fesom_n2d = len(fesom_cavity)
            # Read rotated lat and lon for each node; also read depth which is
            # needed for vertically averaged velocity
            f = open(fesom_mesh_path + 'nod3d.out', 'r')
            f.readline()
            rlon = []
            rlat = []
            node_depth = []
            for line in f:
                tmp = line.split()
                lon_tmp = float(tmp[1])
                lat_tmp = float(tmp[2])
                node_depth_tmp = -1 * float(tmp[3])
                if lon_tmp < -180:
                    lon_tmp += 360
                elif lon_tmp > 180:
                    lon_tmp -= 360
                rlon.append(lon_tmp)
                rlat.append(lat_tmp)
                node_depth.append(node_depth_tmp)
            f.close()
            # For lat and lon, only care about the 2D nodes (the first
            # fesom_n2d indices)
            rlon = array(rlon[0:fesom_n2d])
            rlat = array(rlat[0:fesom_n2d])
            node_depth = array(node_depth)
            # Unrotate longitude
            fesom_lon, fesom_lat = unrotate_grid(rlon, rlat)
            # Calculate polar coordinates of each node
            fesom_x = -(fesom_lat + 90) * cos(fesom_lon * deg2rad + pi / 2)
            fesom_y = (fesom_lat + 90) * sin(fesom_lon * deg2rad + pi / 2)
            if var_name == 'vavg':
                # Read lists of which nodes are directly below which
                f = open(fesom_mesh_path + 'aux3d.out', 'r')
                max_num_layers = int(f.readline())
                node_columns = zeros([fesom_n2d, max_num_layers])
                for n in range(fesom_n2d):
                    for k in range(max_num_layers):
                        node_columns[n, k] = int(f.readline())
                node_columns = node_columns.astype(int)
                f.close()
            # Now we can actually read the data
            # Read full 3D field for both u and v
            node_ur_3d = id.variables['u'][0, :]
            node_vr_3d = id.variables['v'][0, :]
            if var_name == 'vsfc':
                # Only care about the first fesom_n2d nodes (surface)
                node_ur = node_ur_3d[0:fesom_n2d]
                node_vr = node_vr_3d[0:fesom_n2d]
            elif var_name == 'vavg':
                # Vertically average
                node_ur = zeros(fesom_n2d)
                node_vr = zeros(fesom_n2d)
                for n in range(fesom_n2d):
                    # Integrate udz, vdz, and dz over this water column
                    udz_col = 0
                    vdz_col = 0
                    dz_col = 0
                    for k in range(max_num_layers - 1):
                        if node_columns[n, k + 1] == -999:
                            # Reached the bottom
                            break
                        # Trapezoidal rule
                        top_id = node_columns[n, k]
                        bot_id = node_columns[n, k + 1]
                        dz_tmp = node_depth[bot_id - 1] - node_depth[top_id -
                                                                     1]
                        udz_col += 0.5 * (node_ur_3d[top_id - 1] +
                                          node_ur_3d[bot_id - 1]) * dz_tmp
                        vdz_col += 0.5 * (node_vr_3d[top_id - 1] +
                                          node_vr_3d[bot_id - 1]) * dz_tmp
                        dz_col += dz_tmp
                    # Convert from integrals to averages
                    node_ur[n] = udz_col / dz_col
                    node_vr[n] = vdz_col / dz_col
            # Unrotate
            node_u, node_v = unrotate_vector(rlon, rlat, node_ur, node_vr)
            # Calculate speed
            node_data = sqrt(node_u**2 + node_v**2)
        id.close()
    # Calculate given field at each element
    fesom_data = []
    for elm in elements:
        # For each element in an ice shelf cavity, append the mean value
        # for the 3 component Nodes
        if elm.cavity:
            if var_name == 'draft':
                # Ice shelf draft is depth of surface layer
                fesom_data.append(
                    mean([
                        elm.nodes[0].depth, elm.nodes[1].depth,
                        elm.nodes[2].depth
                    ]))
            elif var_name in ['melt', 'vsfc', 'vavg']:
                # Surface nodes (or 2D in the case of vavg)
                fesom_data.append(
                    mean([
                        node_data[elm.nodes[0].id], node_data[elm.nodes[1].id],
                        node_data[elm.nodes[2].id]
                    ]))
            elif var_name in ['temp', 'salt']:
                # Bottom nodes
                fesom_data.append(
                    mean([
                        node_data[elm.nodes[0].find_bottom().id],
                        node_data[elm.nodes[1].find_bottom().id],
                        node_data[elm.nodes[2].find_bottom().id]
                    ]))

    # Loop over ice shelves
    for index in range(num_shelves):
        print 'Processing ' + shelf_names[index]
        # Convert lat/lon bounds to polar coordinates for plotting
        x1 = -(lat_min[index] + 90) * cos(lon_min[index] * deg2rad + pi / 2)
        y1 = (lat_min[index] + 90) * sin(lon_min[index] * deg2rad + pi / 2)
        x2 = -(lat_min[index] + 90) * cos(lon_max[index] * deg2rad + pi / 2)
        y2 = (lat_min[index] + 90) * sin(lon_max[index] * deg2rad + pi / 2)
        x3 = -(lat_max[index] + 90) * cos(lon_min[index] * deg2rad + pi / 2)
        y3 = (lat_max[index] + 90) * sin(lon_min[index] * deg2rad + pi / 2)
        x4 = -(lat_max[index] + 90) * cos(lon_max[index] * deg2rad + pi / 2)
        y4 = (lat_max[index] + 90) * sin(lon_max[index] * deg2rad + pi / 2)
        # Find the new bounds on x and y
        x_min = amin(array([x1, x2, x3, x4]))
        x_max = amax(array([x1, x2, x3, x4]))
        y_min = amin(array([y1, y2, y3, y4]))
        y_max = amax(array([y1, y2, y3, y4]))
        # Now make the plot square: enlarge the smaller of delta_x and delta_y
        # so they are equal
        delta_x = x_max - x_min
        delta_y = y_max - y_min
        if delta_x > delta_y:
            diff = 0.5 * (delta_x - delta_y)
            y_min -= diff
            y_max += diff
        elif delta_y > delta_x:
            diff = 0.5 * (delta_y - delta_x)
            x_min -= diff
            x_max += diff
        # Set up a grey square for FESOM to fill the background with land
        x_reg_fesom, y_reg_fesom = meshgrid(linspace(x_min, x_max, num=100),
                                            linspace(y_min, y_max, num=100))
        land_square = zeros(shape(x_reg_fesom))
        # Find bounds on variable in this region, for both ROMS and FESOM
        # Start with ROMS
        loc = (roms_x >= x_min) * (roms_x <= x_max) * (roms_y >= y_min) * (
            roms_y <= y_max)
        var_min = amin(roms_data[loc])
        var_max = amax(roms_data[loc])
        # Modify with FESOM
        i = 0
        for elm in elements:
            if elm.cavity:
                if any(elm.x >= x_min) and any(elm.x <= x_max) and any(
                        elm.y >= y_min) and any(elm.y <= y_max):
                    if fesom_data[i] < var_min:
                        var_min = fesom_data[i]
                    if fesom_data[i] > var_max:
                        var_max = fesom_data[i]
                i += 1
        if var_name == 'melt':
            # Special colour map
            if var_min < 0:
                # There is refreezing here; include blue for elements below 0
                cmap_vals = array([
                    var_min, 0, 0.25 * var_max, 0.5 * var_max, 0.75 * var_max,
                    var_max
                ])
                cmap_colors = [(0.26, 0.45, 0.86), (1, 1, 1), (1, 0.9, 0.4),
                               (0.99, 0.59, 0.18), (0.5, 0.0, 0.08),
                               (0.96, 0.17, 0.89)]
                cmap_vals_norm = (cmap_vals - var_min) / (var_max - var_min)
                cmap_list = []
                for i in range(size(cmap_vals)):
                    cmap_list.append((cmap_vals_norm[i], cmap_colors[i]))
                mf_cmap = LinearSegmentedColormap.from_list(
                    'melt_freeze', cmap_list)
            else:
                # No refreezing
                cmap_vals = array([
                    0, 0.25 * var_max, 0.5 * var_max, 0.75 * var_max, var_max
                ])
                cmap_colors = [(1, 1, 1), (1, 0.9, 0.4), (0.99, 0.59, 0.18),
                               (0.5, 0.0, 0.08), (0.96, 0.17, 0.89)]
                cmap_vals_norm = cmap_vals / var_max
                cmap_list = []
                for i in range(size(cmap_vals)):
                    cmap_list.append((cmap_vals_norm[i], cmap_colors[i]))
                mf_cmap = LinearSegmentedColormap.from_list(
                    'melt_freeze', cmap_list)
            colour_map = mf_cmap
        else:
            colour_map = 'jet'
        if var_name in ['vsfc', 'vavg']:
            # Make vectors for overlay
            # Set up bins (edges)
            x_bins = linspace(x_min, x_max, num=num_bins + 1)
            y_bins = linspace(y_min, y_max, num=num_bins + 1)
            # Calculate centres of bins (for plotting)
            x_centres = 0.5 * (x_bins[:-1] + x_bins[1:])
            y_centres = 0.5 * (y_bins[:-1] + y_bins[1:])
            # ROMS
            # First set up arrays to integrate velocity in each bin
            # Simple averaging of all the points inside each bin
            roms_u = zeros([size(y_centres), size(x_centres)])
            roms_v = zeros([size(y_centres), size(x_centres)])
            roms_num_pts = zeros([size(y_centres), size(x_centres)])
            # First convert to polar coordinates, rotate to account for
            # longitude in circumpolar projection, and convert back to vector
            # components
            theta_roms = arctan2(v_rho, u_rho)
            theta_circ_roms = theta_roms - roms_lon * deg2rad
            u_circ_roms = roms_data * cos(
                theta_circ_roms)  # roms_data is speed
            v_circ_roms = roms_data * sin(theta_circ_roms)
            # Loop over all points (can't find a better way to do this)
            for j in range(size(roms_data, 0)):
                for i in range(size(roms_data, 1)):
                    # Make sure data isn't masked (i.e. land or open ocean)
                    if u_circ_roms[j, i] is not ma.masked:
                        # Check if we're in the region of interest
                        if roms_x[j, i] > x_min and roms_x[
                                j, i] < x_max and roms_y[
                                    j, i] > y_min and roms_y[j, i] < y_max:
                            # Figure out which bins this falls into
                            x_index = nonzero(x_bins > roms_x[j, i])[0][0] - 1
                            y_index = nonzero(y_bins > roms_y[j, i])[0][0] - 1
                            # Integrate
                            roms_u[y_index, x_index] += u_circ_roms[j, i]
                            roms_v[y_index, x_index] += v_circ_roms[j, i]
                            roms_num_pts[y_index, x_index] += 1
            # Convert from sums to averages
            # First mask out points with no data
            roms_u = ma.masked_where(roms_num_pts == 0, roms_u)
            roms_v = ma.masked_where(roms_num_pts == 0, roms_v)
            # Divide everything else by the number of points
            flag = roms_num_pts > 0
            roms_u[flag] = roms_u[flag] / roms_num_pts[flag]
            roms_v[flag] = roms_v[flag] / roms_num_pts[flag]
            # FESOM
            fesom_u = zeros([size(y_centres), size(x_centres)])
            fesom_v = zeros([size(y_centres), size(x_centres)])
            fesom_num_pts = zeros([size(y_centres), size(x_centres)])
            theta_fesom = arctan2(node_v, node_u)
            theta_circ_fesom = theta_fesom - fesom_lon * deg2rad
            u_circ_fesom = node_data * cos(
                theta_circ_fesom)  # node_data is speed
            v_circ_fesom = node_data * sin(theta_circ_fesom)
            # Loop over 2D nodes to fill in the velocity bins as before
            for n in range(fesom_n2d):
                if fesom_cavity[n]:
                    if fesom_x[n] > x_min and fesom_x[n] < x_max and fesom_y[
                            n] > y_min and fesom_y[n] < y_max:
                        x_index = nonzero(x_bins > fesom_x[n])[0][0] - 1
                        y_index = nonzero(y_bins > fesom_y[n])[0][0] - 1
                        fesom_u[y_index, x_index] += u_circ_fesom[n]
                        fesom_v[y_index, x_index] += v_circ_fesom[n]
                        fesom_num_pts[y_index, x_index] += 1
            fesom_u = ma.masked_where(fesom_num_pts == 0, fesom_u)
            fesom_v = ma.masked_where(fesom_num_pts == 0, fesom_v)
            flag = fesom_num_pts > 0
            fesom_u[flag] = fesom_u[flag] / fesom_num_pts[flag]
            fesom_v[flag] = fesom_v[flag] / fesom_num_pts[flag]
        # Plot
        fig = figure(figsize=(30, 12))
        fig.patch.set_facecolor('white')
        # ROMS
        ax1 = fig.add_subplot(1, 2, 1, aspect='equal')
        # First shade land and zice in grey
        contourf(roms_x, roms_y, land_zice, 1, colors=(('0.6', '0.6', '0.6')))
        # Fill in the missing circle
        contourf(x_reg_roms,
                 y_reg_roms,
                 land_circle,
                 1,
                 colors=(('0.6', '0.6', '0.6')))
        # Now shade the data
        pcolor(roms_x,
               roms_y,
               roms_data,
               vmin=var_min,
               vmax=var_max,
               cmap=colour_map)
        if var_name in ['vsfc', 'vavg']:
            # Overlay vectors
            quiver(x_centres,
                   y_centres,
                   roms_u,
                   roms_v,
                   scale=1.5,
                   color='black')
        xlim([x_min, x_max])
        ylim([y_min, y_max])
        axis('off')
        title('MetROMS', fontsize=24)
        # FESOM
        ax2 = fig.add_subplot(1, 2, 2, aspect='equal')
        # Start with land background
        contourf(x_reg_fesom,
                 y_reg_fesom,
                 land_square,
                 1,
                 colors=(('0.6', '0.6', '0.6')))
        # Add ice shelf elements
        img = PatchCollection(patches, cmap=colour_map)
        img.set_array(array(fesom_data))
        img.set_edgecolor('face')
        img.set_clim(vmin=var_min, vmax=var_max)
        ax2.add_collection(img)
        # Mask out the open ocean in white
        overlay = PatchCollection(mask_patches, facecolor=(1, 1, 1))
        overlay.set_edgecolor('face')
        ax2.add_collection(overlay)
        if var_name in ['vsfc', 'vavg']:
            quiver(x_centres,
                   y_centres,
                   fesom_u,
                   fesom_v,
                   scale=1.5,
                   color='black')
        xlim([x_min, x_max])
        ylim([y_min, y_max])
        axis('off')
        title('FESOM', fontsize=24)
        # Colourbar on the right
        cbaxes = fig.add_axes([0.92, 0.2, 0.01, 0.6])
        cbar = colorbar(img, cax=cbaxes)
        cbar.ax.tick_params(labelsize=20)
        # Main title
        if var_name == 'draft':
            title_string = ' draft (m)'
        elif var_name == 'melt':
            title_string = ' melt rate (m/y)'
        elif var_name == 'temp':
            title_string = r' bottom water temperature ($^{\circ}$C)'
        elif var_name == 'salt':
            title_string = ' bottom water salinity (psu)'
        elif var_name == 'vsfc':
            title_string = ' surface velocity (m/s)'
        elif var_name == 'vavg':
            title_string = ' vertically averaged velocity (m/s)'
        suptitle(shelf_names[index] + title_string, fontsize=30)
        subplots_adjust(wspace=0.05)
        #fig.show()
        fig.savefig(fig_heads[index] + '_' + var_name + '.png')
Beispiel #3
0
def mip_sfc_stress():

    # File paths
    roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc'
    roms_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/stress_firstyear.nc'  # Already averaged over first year
    fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/'
    fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/'
    fesom_file_lr = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/MK44005.1992.forcing.diag.nc'
    fesom_file_hr = '/short/y99/kaa561/FESOM/intercomparison_highres/output/MK44005.1992.forcing.diag.nc'
    # Degrees to radians conversion factor
    deg2rad = pi / 180.0
    # Northern boundaries for plots
    nbdry_acc = -30 + 90
    nbdry_shelf = -64 + 90
    # Bounds for colour scale
    colour_bound_acc = 0.25
    colour_bound_shelf = 0.25

    print 'Processing ROMS'
    # Read grid
    id = Dataset(roms_grid, 'r')
    roms_lat = id.variables['lat_rho'][:, :]
    roms_lon = id.variables['lon_rho'][:, :]
    angle = id.variables['angle'][:, :]
    zice = id.variables['zice'][:, :]
    id.close()
    # Read surface stress
    id = Dataset(roms_file, 'r')
    sustr_tmp = id.variables['sustr'][0, :, :]
    svstr_tmp = id.variables['svstr'][0, :, :]
    id.close()
    # Unrotate
    sustr, svstr = rotate_vector_roms(sustr_tmp, svstr_tmp, angle)
    # Get magnitude
    roms_stress = sqrt(sustr**2 + svstr**2)
    # Mask cavities
    roms_stress = ma.masked_where(zice < 0, roms_stress)
    # Calculate polar projection
    roms_x = -(roms_lat + 90) * cos(roms_lon * deg2rad + pi / 2)
    roms_y = (roms_lat + 90) * sin(roms_lon * deg2rad + pi / 2)

    print 'Processing low-res FESOM'
    # Build mesh and patches
    elements_lr, patches_lr = make_patches(fesom_mesh_path_lr,
                                           circumpolar=True,
                                           mask_cavities=True)
    # Read rotated and and lon
    f = open(fesom_mesh_path_lr + 'nod2d.out', 'r')
    f.readline()
    rlon_lr = []
    rlat_lr = []
    for line in f:
        tmp = line.split()
        lon_tmp = float(tmp[1])
        if lon_tmp < -180:
            lon_tmp += 360
        elif lon_tmp > 180:
            lon_tmp -= 360
        rlon_lr.append(lon_tmp)
        rlat_lr.append(float(tmp[2]))
    f.close()
    rlon_lr = array(rlon_lr)
    rlat_lr = array(rlat_lr)
    # Read surface stress
    id = Dataset(fesom_file_lr, 'r')
    stress_x_tmp = mean(id.variables['stress_x'][:, :], axis=0)
    stress_y_tmp = mean(id.variables['stress_y'][:, :], axis=0)
    id.close()
    # Unrotate
    stress_x_lr, stress_y_lr = unrotate_vector(rlon_lr, rlat_lr, stress_x_tmp,
                                               stress_y_tmp)
    # Get magnitude
    fesom_stress_lr_nodes = sqrt(stress_x_lr**2 + stress_y_lr**2)
    # Average over elements
    fesom_stress_lr = []
    for elm in elements_lr:
        if not elm.cavity:
            fesom_stress_lr.append(
                mean([
                    fesom_stress_lr_nodes[elm.nodes[0].id],
                    fesom_stress_lr_nodes[elm.nodes[1].id],
                    fesom_stress_lr_nodes[elm.nodes[2].id]
                ]))

    print 'Processing high-res FESOM'
    elements_hr, patches_hr = make_patches(fesom_mesh_path_hr,
                                           circumpolar=True,
                                           mask_cavities=True)
    f = open(fesom_mesh_path_hr + 'nod2d.out', 'r')
    f.readline()
    rlon_hr = []
    rlat_hr = []
    for line in f:
        tmp = line.split()
        lon_tmp = float(tmp[1])
        if lon_tmp < -180:
            lon_tmp += 360
        elif lon_tmp > 180:
            lon_tmp -= 360
        rlon_hr.append(lon_tmp)
        rlat_hr.append(float(tmp[2]))
    f.close()
    rlon_hr = array(rlon_hr)
    rlat_hr = array(rlat_hr)
    id = Dataset(fesom_file_hr, 'r')
    stress_x_tmp = mean(id.variables['stress_x'][:, :], axis=0)
    stress_y_tmp = mean(id.variables['stress_y'][:, :], axis=0)
    id.close()
    stress_x_hr, stress_y_hr = unrotate_vector(rlon_hr, rlat_hr, stress_x_tmp,
                                               stress_y_tmp)
    fesom_stress_hr_nodes = sqrt(stress_x_hr**2 + stress_y_hr**2)
    fesom_stress_hr = []
    for elm in elements_hr:
        if not elm.cavity:
            fesom_stress_hr.append(
                mean([
                    fesom_stress_hr_nodes[elm.nodes[0].id],
                    fesom_stress_hr_nodes[elm.nodes[1].id],
                    fesom_stress_hr_nodes[elm.nodes[2].id]
                ]))

    print 'Plotting'

    # ACC
    fig = figure(figsize=(19, 8))
    fig.patch.set_facecolor('white')
    gs = GridSpec(1, 3)
    gs.update(left=0.05, right=0.95, bottom=0.1, top=0.85, wspace=0.05)
    # ROMS
    ax = subplot(gs[0, 0], aspect='equal')
    ax.pcolor(roms_x,
              roms_y,
              roms_stress,
              vmin=0,
              vmax=colour_bound_acc,
              cmap='jet')
    xlim([-nbdry_acc, nbdry_acc])
    ylim([-nbdry_acc, nbdry_acc])
    title('a) MetROMS', fontsize=28)
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM (low-res)
    ax = subplot(gs[0, 1], aspect='equal')
    img = PatchCollection(patches_lr, cmap='jet')
    img.set_array(array(fesom_stress_lr))
    img.set_clim(vmin=0, vmax=colour_bound_acc)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry_acc, nbdry_acc])
    ylim([-nbdry_acc, nbdry_acc])
    title('b) FESOM (low-res)', fontsize=28)
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM (high-res)
    ax = subplot(gs[0, 2], aspect='equal')
    img = PatchCollection(patches_hr, cmap='jet')
    img.set_array(array(fesom_stress_hr))
    img.set_clim(vmin=0, vmax=colour_bound_acc)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry_acc, nbdry_acc])
    ylim([-nbdry_acc, nbdry_acc])
    title('c) FESOM (high-res)', fontsize=28)
    ax.set_xticks([])
    ax.set_yticks([])
    # Add a horizontal colourbar on the bottom
    cbaxes = fig.add_axes([0.3, 0.05, 0.4, 0.04])
    cbar = colorbar(img,
                    orientation='horizontal',
                    cax=cbaxes,
                    extend='max',
                    ticks=arange(0, colour_bound_acc + 0.05, 0.05))
    cbar.ax.tick_params(labelsize=20)
    # Main title
    suptitle(r'Ocean surface stress (N/m$^2$), 1992 mean', fontsize=34)
    fig.show()
    fig.savefig('sfc_stress_acc.png')

    # Continental shelf
    fig = figure(figsize=(19, 8))
    fig.patch.set_facecolor('white')
    gs = GridSpec(1, 3)
    gs.update(left=0.05, right=0.95, bottom=0.1, top=0.85, wspace=0.05)
    # ROMS
    ax = subplot(gs[0, 0], aspect='equal')
    ax.pcolor(roms_x,
              roms_y,
              roms_stress,
              vmin=0,
              vmax=colour_bound_shelf,
              cmap='jet')
    xlim([-nbdry_shelf, nbdry_shelf])
    ylim([-nbdry_shelf, nbdry_shelf])
    title('a) MetROMS', fontsize=28)
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM (low-res)
    ax = subplot(gs[0, 1], aspect='equal')
    img = PatchCollection(patches_lr, cmap='jet')
    img.set_array(array(fesom_stress_lr))
    img.set_clim(vmin=0, vmax=colour_bound_shelf)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry_shelf, nbdry_shelf])
    ylim([-nbdry_shelf, nbdry_shelf])
    title('b) FESOM (low-res)', fontsize=28)
    ax.set_xticks([])
    ax.set_yticks([])
    # FESOM (high-res)
    ax = subplot(gs[0, 2], aspect='equal')
    img = PatchCollection(patches_hr, cmap='jet')
    img.set_array(array(fesom_stress_hr))
    img.set_clim(vmin=0, vmax=colour_bound_shelf)
    img.set_edgecolor('face')
    ax.add_collection(img)
    xlim([-nbdry_shelf, nbdry_shelf])
    ylim([-nbdry_shelf, nbdry_shelf])
    title('c) FESOM (high-res)', fontsize=28)
    ax.set_xticks([])
    ax.set_yticks([])
    # Add a horizontal colourbar on the bottom
    cbaxes = fig.add_axes([0.3, 0.05, 0.4, 0.04])
    cbar = colorbar(img,
                    orientation='horizontal',
                    cax=cbaxes,
                    extend='max',
                    ticks=arange(0, colour_bound_shelf + 0.05, 0.05))
    cbar.ax.tick_params(labelsize=20)
    # Main title
    suptitle(r'Ocean surface stress (N/m$^2$), 1992 mean', fontsize=34)
    fig.show()
    fig.savefig('sfc_stress_shelf.png')
Beispiel #4
0
def holland_fig1(grid_path, file_path):

    deg2rad = pi / 180.0

    # Read grid
    id = Dataset(grid_path, 'r')
    lon = id.variables['lon_rho'][:-15, 1:]
    lat = id.variables['lat_rho'][:-15, 1:]
    h = id.variables['h'][:-15, 1:]
    zice = id.variables['zice'][:-15, 1:]
    angle = id.variables['angle'][:-15, :]
    id.close()

    # Set up figure
    x = -(lat + 90) * cos(lon * deg2rad + pi / 2)
    y = (lat + 90) * sin(lon * deg2rad + pi / 2)
    fig = figure(figsize=(16, 12))

    # Barotropic streamfunction
    # First read bartropic velocity vector
    id = Dataset(file_path, 'r')
    ubar_xy = mean(id.variables['ubar'][:, :-15, :], axis=0)
    vbar_xy = mean(id.variables['vbar'][:, :-15, :], axis=0)
    id.close()
    # Rotate to lon-lat space
    ubar, vbar = rotate_vector_roms(ubar_xy, vbar_xy, angle)
    # Throw away the overlapping periodic boundary
    ubar = ubar[:, 1:]
    # Mask ice shelves
    ubar = ma.masked_where(zice != 0, ubar)
    # Water column thickness
    wct = h + zice
    # Horizontal differentials
    dx, dy = cartesian_grid_2d(lon, lat)
    # Indefinite integral from south to north of u*dz*dy, convert to Sv
    baro_strf = cumsum(ubar * wct * dy, axis=0) * 1e-6
    # Colour levels
    lev1 = arange(-50, 150 + 10, 10)
    # Plot
    ax1 = fig.add_subplot(2, 2, 1, aspect='equal')
    img = contourf(x, y, baro_strf, lev1, extend='both')
    # Contour 0 Sv in black
    contour(x, y, baro_strf, levels=[0], colors=('black'))
    title('Barotropic streamfunction (Sv)', fontsize=24)
    xlim([-35, 39])
    ylim([-35, 39])
    axis('off')
    cbaxes1 = fig.add_axes([0.07, 0.6, 0.02, 0.3])
    cbar1 = colorbar(img, ticks=arange(-50, 150 + 50, 50), cax=cbaxes1)
    cbar1.ax.tick_params(labelsize=16)

    # JJA mixed layer depth
    start_month = 6  # Start in June
    end_month = 8  # End in August
    start_day = 1  # First day in June
    next_startday = 1  # First day in September
    end_day = 31  # Last day in August
    prev_endday = 31  # Last day in May
    ndays_season = 92  # Number of days in June+July+August
    id = Dataset(file_path, 'r')
    # Read time axis and get dates
    time_id = id.variables['ocean_time']
    time = num2date(time_id[:],
                    units=time_id.units,
                    calendar=time_id.calendar.lower())
    # Find the last timestep we care about
    end_t = -1  # Missing value flag
    for t in range(size(time) - 1, -1, -1):
        if time[t].month == end_month and time[t].day in range(
                end_day - 2, end_day + 1):
            end_t = t
            break
        if time[t].month == end_month + 1 and time[t].day in range(
                next_startday, next_startday + 2):
            end_t = t
            break
    # Make sure we actually found it
    if end_t == -1:
        print 'Error: ' + file_path + ' does not contain a complete JJA'
        return
    # Find the first timestep we care about
    start_t = -1  # Missing value flag
    for t in range(end_t, -1, -1):
        if time[t].month == start_month - 1 and time[t].day in range(
                prev_endday - 1, prev_endday + 1):
            start_t = t
            break
        if time[t].month == start_month and time[t].day in range(
                start_day, start_day + 3):
            start_t = t
            break
    # Make sure we found it
    if start_t == -1:
        print 'Error: ' + file_path + ' does not contain a complete JJA'
        return
    # Initialise time-averaged KPP boundary layer depth
    hsbl = ma.empty(shape(lon))
    hsbl[:, :] = 0.0
    ndays = 0
    # Figure out how many of the 5 days represented in start_t we care about
    if time[start_t].month == start_month and time[
            start_t].day == start_day + 2:
        start_days = 5
    elif time[start_t].month == start_month and time[
            start_t].day == start_day + 1:
        start_days = 4
    elif time[start_t].month == start_month and time[start_t].day == start_day:
        start_days = 3
    elif time[start_t].month == start_month - 1 and time[
            start_t].day == prev_endday:
        start_days = 2
    elif time[start_t].month == start_month - 1 and time[
            start_t].day == prev_endday - 1:
        start_days = 1
    else:
        print 'Error: starting index is month ' + str(
            time[start_t].month) + ', day ' + str(time[start_t].day)
        return
    # Integrate Hsbl weighted by start_days
    hsbl += id.variables['Hsbl'][start_t, :-15, 1:] * start_days
    ndays += start_days
    # Between start_t and end_t, we care about all the days
    for t in range(start_t + 1, end_t):
        hsbl += id.variables['Hsbl'][t, :-15, 1:] * 5
        ndays += 5
    # Figure out how many of the 5 days represented in end_t we care about
    if time[end_t].month == end_month + 1 and time[
            end_t].day == next_startday + 1:
        end_days = 1
    elif time[end_t].month == end_month + 1 and time[
            end_t].day == next_startday:
        end_days = 2
    elif time[end_t].month == end_month and time[end_t].day == end_day:
        end_days = 3
    elif time[end_t].month == end_month and time[end_t].day == end_day - 1:
        end_days = 4
    elif time[end_t].month == end_month and time[end_t].day == end_day - 2:
        end_days = 5
    else:
        print 'Error: ending index is month ' + str(
            time[end_t].month) + ', day ' + str(time[end_t].day)
        return
    # Integrate weighted by end_days
    hsbl += id.variables['Hsbl'][end_t, :-15, 1:] * end_days
    ndays += end_days
    if ndays != ndays_season:
        print 'Error: found ' + str(ndays) + ' days instead of ' + str(
            ndays_season)
        return
    id.close()
    # Convert from integral to average
    hsbl[:, :] = hsbl[:, :] / ndays
    # Mask out ice shelves, change sign, and call it mixed layer depth
    mld = ma.masked_where(zice != 0, -hsbl)
    # Colour levels
    lev2 = arange(0, 300 + 25, 25)
    # Plot
    ax2 = fig.add_subplot(2, 2, 2, aspect='equal')
    img = contourf(x, y, mld, lev2, extend='both')
    # Contour 100 m in black
    contour(x, y, mld, levels=[100], colors=('black'))
    title('Winter mixed layer depth (m)', fontsize=24)
    xlim([-35, 39])
    ylim([-35, 39])
    axis('off')
    cbaxes2 = fig.add_axes([0.9, 0.6, 0.02, 0.3])
    cbar2 = colorbar(img, ticks=arange(0, 300 + 100, 100), cax=cbaxes2)
    cbar2.ax.tick_params(labelsize=16)

    # Bottom water temperature
    id = Dataset(file_path, 'r')
    bwtemp = mean(id.variables['temp'][:, 0, :-15, 1:], axis=0)
    id.close()
    # Mask ice shelves
    bwtemp = ma.masked_where(zice != 0, bwtemp)
    # Colour levels
    lev3 = arange(-2, 2 + 0.2, 0.2)
    # Plot
    ax3 = fig.add_subplot(2, 2, 3, aspect='equal')
    img = contourf(x, y, bwtemp, lev3, extend='both')
    # Contour 0C in black
    contour(x, y, bwtemp, levels=[0], colors=('black'))
    title(r'Bottom temperature ($^{\circ}$C', fontsize=24)
    xlim([-35, 39])
    ylim([-35, 39])
    axis('off')
    cbaxes3 = fig.add_axes([0.07, 0.1, 0.02, 0.3])
    cbar3 = colorbar(img, ticks=arange(-2, 2 + 1, 1), cax=cbaxes3)
    cbar3.ax.tick_params(labelsize=16)

    # Bottom water salinity
    id = Dataset(file_path, 'r')
    bwsalt = mean(id.variables['salt'][:, 0, :-15, 1:], axis=0)
    bwsalt = ma.masked_where(zice != 0, bwsalt)
    id.close()
    lev4 = arange(34.5, 34.8 + 0.025, 0.025)
    ax4 = fig.add_subplot(2, 2, 4, aspect='equal')
    img = contourf(x, y, bwsalt, lev4, extend='both')
    # Contour 34.65 psu in black
    contour(x, y, bwsalt, levels=[34.65], colors=('black'))
    title('Bottom salinity (psu)', fontsize=24)
    xlim([-35, 39])
    ylim([-35, 39])
    axis('off')
    cbaxes4 = fig.add_axes([0.9, 0.1, 0.02, 0.3])
    cbar4 = colorbar(img, ticks=arange(34.5, 34.8 + 0.1, 0.1), cax=cbaxes4)
    cbar4.ax.tick_params(labelsize=16)

    fig.show()
Beispiel #5
0
def zonal_plot(file_path,
               var_name,
               tstep,
               lon_key,
               lon0,
               lon_bounds,
               depth_min,
               colour_bounds=None,
               save=False,
               fig_name=None,
               grid_path=None):

    # Grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    if var_name in ['w', 'AKv', 'AKt', 'AKs']:
        N = 32

    # Read the variable
    id = Dataset(file_path, 'r')
    data_3d = id.variables[var_name][tstep - 1, :, :-15, :]
    # Also read sea surface height
    zeta = id.variables['zeta'][tstep - 1, :-15, :]
    if var_name == 'salt':
        units = 'psu'
    else:
        units = id.variables[var_name].units
    long_name = id.variables[var_name].long_name

    # Rotate velocity if necessary
    if var_name in ['u', 'v']:
        grid_id = Dataset(grid_path, 'r')
        angle = grid_id.variables['angle'][:-15, :]
        grid_id.close()
        if var_name == 'u':
            data_3d_ugrid = data_3d[:, :, :]
            data_3d = ma.empty([
                data_3d_ugrid.shape[0], data_3d_ugrid.shape[1],
                data_3d_ugrid.shape[2] + 1
            ])
            for k in range(N):
                u_data = data_3d_ugrid[k, :, :]
                v_data = id.variables['v'][tstep - 1, k, :-15, :]
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(
                    u_data, v_data, angle)
                data_3d[k, :, :] = u_data_lonlat
        elif var_name == 'v':
            data_3d_vgrid = data_3d[:, :, :]
            data_3d = ma.empty([
                data_3d_vgrid.shape[0], data_3d_vgrid.shape[1] + 1,
                data_3d_vgrid.shape[2]
            ])
            for k in range(N):
                v_data = data_3d_vgrid[k, :, :]
                u_data = id.variables['u'][tstep - 1, k, :-15, :]
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(
                    u_data, v_data, angle)
                data_3d[k, :, :] = v_data_lonlat

    # Read grid variables
    h = id.variables['h'][:-15, :]
    zice = id.variables['zice'][:-15, :]
    lon_2d = id.variables['lon_rho'][:-15, :]
    lat_2d = id.variables['lat_rho'][:-15, :]
    id.close()

    # Get a 3D array of z-coordinates; sc_r and Cs_r are unused in this script
    z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta)

    # Warning message for zonal averages
    if lon_key != 0:
        print 'WARNING: this script assumes regular i-indices for zonal averages. In heavily rotated regions eg inner Weddell Sea it may not be appropriate.'

    # Choose what to write on the title about longitude
    if lon_key == 0:
        if lon0 < 0:
            lon_string = 'at ' + str(int(round(-lon0))) + r'$^{\circ}$W'
        else:
            lon_string = 'at ' + str(int(round(lon0))) + r'$^{\circ}$E'
    elif lon_key == 1:
        lon_string = 'zonally averaged'
    elif lon_key == 2:
        lon_string = 'zonally averaged between '
        if lon_bounds[0] < 0:
            lon_string += str(int(round(-lon_bounds[0]))) + r'$^{\circ}$W and '
        else:
            lon_string += str(int(round(lon_bounds[0]))) + r'$^{\circ}$E and '
        if lon_bounds[1] < 0:
            lon_string += str(int(round(-lon_bounds[1]))) + r'$^{\circ}$W'
        else:
            lon_string += str(int(round(lon_bounds[1]))) + r'$^{\circ}$E'

    # Edit longitude bounds to be from 0 to 360, to fit with ROMS convention
    if lon_key == 0:
        if lon0 < 0:
            lon0 += 360
    elif lon_key == 2:
        if lon_bounds[0] < 0:
            lon_bounds[0] += 360
        if lon_bounds[1] < 0:
            lon_bounds[1] += 360

    # Interpolate or average data
    if lon_key == 0:
        # Interpolate to lon0
        data, z, lat = interp_lon_roms(data_3d, z_3d, lat_2d, lon_2d, lon0)
    elif lon_key == 1:
        # Zonally average over all longitudes
        # dlon is constant on this grid (0.25 degrees) so this is easy
        data = mean(data_3d, axis=2)
        z = mean(z_3d, axis=2)
        # Zonally average latitude, and copy into N depth levels
        lat = tile(mean(lat_2d, axis=1), (N, 1))
    elif lon_key == 2:
        # Zonally average between lon_bounds
        data, z, lat = average_btw_lons(data_3d, z_3d, lat_2d, lon_2d,
                                        lon_bounds)

    if colour_bounds is not None:
        # User has set bounds on colour scale
        lev = linspace(colour_bounds[0], colour_bounds[1], num=40)
        if colour_bounds[0] == -colour_bounds[1]:
            # Bounds are centered on zero, so choose a blue-to-red colourmap
            # centered on yellow
            colour_map = 'RdYlBu_r'
        else:
            colour_map = 'jet'
    else:
        # Determine bounds automatically
        if var_name in ['u', 'v']:
            # Center levels on 0 for certain variables, with a blue-to-red
            # colourmap
            max_val = amax(abs(data))
            lev = linspace(-max_val, max_val, num=40)
            colour_map = 'RdYlBu_r'
        else:
            lev = linspace(amin(data), amax(data), num=40)
            colour_map = 'jet'

    # Plot
    fig = figure(figsize=(18, 6))
    contourf(lat, z, data, lev, cmap=colour_map, extend='both')
    colorbar()

    title(long_name + ' (' + units + ')\n' + lon_string)
    xlabel('Latitude')
    ylabel('Depth (m)')

    # Choose latitude bounds based on land mask
    data_sum = sum(data, axis=0)
    # Find southernmost and northernmost unmasked j-indices
    edges = ma.flatnotmasked_edges(data_sum)
    j_min = edges[0]
    j_max = edges[1]
    if j_min == 0:
        # There are ocean points right to the southern boundary
        # Don't do anything special
        lat_min = min(lat[:, j_min])
    else:
        # There is land everywhere at the southern boundary
        # Show the last 2 degrees of this land mask
        lat_min = min(lat[:, j_min]) - 2
    if j_max == size(data_sum) - 1:
        # There are ocean points right to the northern boundary
        # Don't do anything special
        lat_max = max(lat[:, j_max])
    else:
        # There is land everywhere at the northern boundary
        # Show the first 2 degrees of this land mask
        lat_max = max(lat[:, j_max]) + 2
#    lat_max = -50
    xlim([lat_min, lat_max])
    ylim([depth_min, 0])

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()

    # Reset lon0 or lon_bounds to (-180, 180) range in case we
    # use them again for the next plot
    if lon_key == 0:
        if lon0 > 180:
            lon0 -= 360
    elif lon_key == 2:
        if lon_bounds[0] > 180:
            lon_bounds[0] -= 360
        if lon_bounds[1] > 180:
            lon_bounds[1] -= 360
Beispiel #6
0
def mip_regions_1var ():

    # Path to ROMS grid file
    roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc'
    # Path to ROMS time-averaged file
    roms_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/2002_2016_avg.nc'
    # Path to FESOM mesh directories
    fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/'
    fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/'
    # Path to FESOM time-averaged ocean files (temp, salt, u, v)
    fesom_file_lr_o = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/oce_2002_2016_avg.nc'
    fesom_file_hr_o = '/short/y99/kaa561/FESOM/intercomparison_highres/output/oce_2002_2016_avg.nc'
    # Path to FESOM time-averaged ice shelf files (wnet)
    fesom_file_lr_i = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/wnet_2002_2016_avg.nc'
    fesom_file_hr_i = '/short/y99/kaa561/FESOM/intercomparison_highres/output/wnet_2002_2016_avg.nc'

    # Name of each region
    region_names = ['Filchner-Ronne Ice Shelf', 'Eastern Weddell Region', 'Amery Ice Shelf', 'Australian Sector', 'Ross Sea', 'Amundsen Sea', 'Bellingshausen Sea', 'Larsen Ice Shelves']
    num_regions = len(region_names)
    # Beginning of filenames for figures
    fig_heads = ['filchner_ronne', 'eweddell', 'amery', 'australian', 'ross', 'amundsen', 'bellingshausen', 'larsen']
    # Bounds for each region (using polar coordinate transformation as below)
    x_min = [-14, -8, 15.25, 12, -9.5, -15.5, -20.25, -22.5]
    x_max = [-4.5, 13, 20.5, 25.5, 4, -10.5, -15.5, -14.5]
    y_min = [1, 12, 4.75, -20, -13, -11.25, -4.5, 8.3]
    y_max = [10, 21, 8, 4, -4.75, -2.25, 7.6, 13]
    # Size of each plot in the y direction
    ysize = [8, 6, 7, 9, 7, 9, 10, 7]
    # Variables to process
    var_names = ['vel'] #['bathy', 'draft', 'wct', 'melt', 'temp', 'salt', 'vel']
    # Constants
    sec_per_year = 365*24*3600
    deg2rad = pi/180.0
    # Parameters for missing circle in ROMS grid
    lon_c = 50
    lat_c = -83
    radius = 10.1
    nbdry = -63+90
    # ROMS vertical grid parameters
    theta_s = 7.0
    theta_b = 2.0
    hc = 250
    N = 31
    # Number of bins in each direction for vector overlay
    num_bins = 30

    print 'Reading ROMS grid'
    # Read the fields we need
    id = Dataset(roms_grid, 'r')
    roms_lon = id.variables['lon_rho'][:,:]
    roms_lat = id.variables['lat_rho'][:,:]
    roms_h = id.variables['h'][:,:]
    roms_mask = id.variables['mask_rho'][:,:]
    roms_zice = id.variables['zice'][:,:]
    roms_angle = id.variables['angle'][:,:]
    id.close()
    # Get land/zice mask
    open_ocn = copy(roms_mask)
    open_ocn[roms_zice!=0] = 0
    land_zice = ma.masked_where(open_ocn==1, open_ocn)
    # Convert grid to spherical coordinates
    roms_x = -(roms_lat+90)*cos(roms_lon*deg2rad+pi/2)
    roms_y = (roms_lat+90)*sin(roms_lon*deg2rad+pi/2)
    # Find centre in spherical coordinates
    x_c = -(lat_c+90)*cos(lon_c*deg2rad+pi/2)
    y_c = (lat_c+90)*sin(lon_c*deg2rad+pi/2)
    # Build a regular x-y grid and select the missing circle
    x_reg_roms, y_reg_roms = meshgrid(linspace(-nbdry, nbdry, num=1000), linspace(-nbdry, nbdry, num=1000))
    land_circle = zeros(shape(x_reg_roms))
    land_circle = ma.masked_where(sqrt((x_reg_roms-x_c)**2 + (y_reg_roms-y_c)**2) > radius, land_circle)

    print 'Building FESOM low-res mesh'
    elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar=True)
    # Make patches for all elements, ice shelf elements, and open ocean elements
    patches_lr = []
    patches_shelf_lr = []
    patches_ocn_lr = []
    for elm in elements_lr:
        coord = transpose(vstack((elm.x, elm.y)))
        patches_lr.append(Polygon(coord, True, linewidth=0.))
        if elm.cavity:
            patches_shelf_lr.append(Polygon(coord, True, linewidth=0.))
        else:
            patches_ocn_lr.append(Polygon(coord, True, linewidth=0.))

    print 'Building FESOM high-res mesh'
    elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar=True)
    patches_hr = []
    patches_shelf_hr = []
    patches_ocn_hr = []
    for elm in elements_hr:
        coord = transpose(vstack((elm.x, elm.y)))
        patches_hr.append(Polygon(coord, True, linewidth=0.))
        if elm.cavity:
            patches_shelf_hr.append(Polygon(coord, True, linewidth=0.))
        else:
            patches_ocn_hr.append(Polygon(coord, True, linewidth=0.))

    for var in var_names:
        print 'Processing variable ' + var

        print 'Reading ROMS fields'
        if var == 'draft':
            # Swap sign on existing zice field; nothing more to read
            roms_data = -1*roms_zice
        elif var == 'bathy':
            # Point to h field and mask out land mask; nothing more to read
            roms_data = ma.masked_where(roms_mask==0, roms_h)
        elif var == 'wct':
            # Add h (positive) and zice (negative); nothing more to read
            roms_data = roms_h + roms_zice
        else:
            id = Dataset(roms_file, 'r')
            if var == 'melt':
                # Convert from m/s to m/y
                roms_data = id.variables['m'][0,:,:]*sec_per_year
            elif var in ['temp', 'salt']:
                # Bottom layer
                roms_data = id.variables[var][0,0,:,:]
            elif var == 'vel':
                # Read full 3D u and v
                u_3d_tmp = id.variables['u'][0,:,:,:]
                v_3d_tmp = id.variables['v'][0,:,:,:]
                # Get integrands on 3D grid; we only care about dz
                dx, dy, dz, z = cartesian_grid_3d(roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
                # Unrotate each vertical level
                u_3d = ma.empty(shape(dz))
                v_3d = ma.empty(shape(dz))
                num_lat_u = size(u_3d_tmp,1)
                num_lon_u = size(u_3d_tmp,2)
                num_lat_v = size(v_3d_tmp,1)
                num_lon_v = size(v_3d_tmp,2)
                for k in range(N):
                    # Extend into land mask before interpolation to rho-grid so
                    # the land mask doesn't change in the final plot
                    for j in range(1,num_lat_u-1):
                        for i in range(1,num_lon_u-1):
                            # Check for masked points
                            if u_3d_tmp[k,j,i] is ma.masked:
                                # Look at 4 neighbours
                                neighbours = ma.array([u_3d_tmp[k,j-1,i], u_3d_tmp[k,j,i-1], u_3d_tmp[k,j+1,i], u_3d_tmp[k,j,i+1]])
                                # Find how many of them are unmasked
                                num_unmasked = MaskedArray.count(neighbours)
                                if num_unmasked > 0:
                                    # There is at least one unmasked neighbour;
                                    # set u_3d_tmp to their average
                                    u_3d_tmp[k,j,i] = sum(neighbours)/num_unmasked
                    # Repeat for v
                    for j in range(1,num_lat_v-1):
                        for i in range(1,num_lon_v-1):
                            if v_3d_tmp[k,j,i] is ma.masked:
                                neighbours = ma.array([v_3d_tmp[k,j-1,i], v_3d_tmp[k,j,i-1], v_3d_tmp[k,j+1,i], v_3d_tmp[k,j,i+1]])
                                num_unmasked = MaskedArray.count(neighbours)
                                if num_unmasked > 0:
                                    v_3d_tmp[k,j,i] = sum(neighbours)/num_unmasked
                    # Interpolate to rho grid and rotate
                    u_k, v_k = rotate_vector_roms(u_3d_tmp[k,:,:], v_3d_tmp[k,:,:], roms_angle)
                    u_3d[k,:,:] = u_k
                    v_3d[k,:,:] = v_k
                # Vertically average u and v
                u_rho = sum(u_3d*dz, axis=0)/sum(dz, axis=0)
                v_rho = sum(v_3d*dz, axis=0)/sum(dz, axis=0)    
                # Get speed
                roms_data = sqrt(u_rho**2 + v_rho**2)
                # Mask out land
                u_rho = ma.masked_where(roms_mask==0, u_rho)
                v_rho = ma.masked_where(roms_mask==0, v_rho)
                roms_data = ma.masked_where(roms_mask==0, roms_data)
            id.close()
        if var in ['draft', 'melt', 'wct']:
            # Mask out open ocean
            roms_data = ma.masked_where(roms_zice==0, roms_data)

        print 'Reading FESOM low-res fields'
        if var not in ['draft', 'bathy', 'wct']:
            if var == 'melt':
                id = Dataset(fesom_file_lr_i, 'r')
                # Convert from m/s to m/y
                node_data_lr = id.variables['wnet'][0,:]*sec_per_year
            elif var in ['temp', 'salt']:
                id = Dataset(fesom_file_lr_o, 'r')
                # Read full 3D field for now
                node_data_lr = id.variables[var][0,:]
            elif var == 'vel':
                id = Dataset(fesom_file_lr_o, 'r')
                # The overlaid vectors are based on nodes not elements, so many
                # of the fesom_grid data structures fail to apply and we need to
                # read some of the FESOM grid files again.
                # Read the cavity flag for each 2D surface node
                fesom_cavity_lr = []
                f = open(fesom_mesh_path_lr + 'cavity_flag_nod2d.out', 'r')
                for line in f:
                    tmp = int(line)
                    if tmp == 1:
                        fesom_cavity_lr.append(True)
                    elif tmp == 0:
                        fesom_cavity_lr.append(False)
                    else:
                        print 'Problem'
                        #return
                f.close()
                # Save the number of 2D nodes
                fesom_n2d_lr = len(fesom_cavity_lr)
                # Read rotated lat and lon for each node, also depth
                f = open(fesom_mesh_path_lr + 'nod3d.out', 'r')
                f.readline()
                rlon_lr = []
                rlat_lr = []
                node_depth_lr = []
                for line in f:
                    tmp = line.split()
                    lon_tmp = float(tmp[1])
                    lat_tmp = float(tmp[2])
                    node_depth_tmp = -1*float(tmp[3])
                    if lon_tmp < -180:
                        lon_tmp += 360
                    elif lon_tmp > 180:
                        lon_tmp -= 360
                    rlon_lr.append(lon_tmp)
                    rlat_lr.append(lat_tmp)
                    node_depth_lr.append(node_depth_tmp)
                f.close()
                # For lat and lon, only care about the 2D nodes (the first
                # fesom_n2d indices)
                rlon_lr = array(rlon_lr[0:fesom_n2d_lr])
                rlat_lr = array(rlat_lr[0:fesom_n2d_lr])
                node_depth_lr = array(node_depth_lr)
                # Unrotate longitude
                fesom_lon_lr, fesom_lat_lr = unrotate_grid(rlon_lr, rlat_lr)
                # Calculate polar coordinates of each node
                fesom_x_lr = -(fesom_lat_lr+90)*cos(fesom_lon_lr*deg2rad+pi/2)
                fesom_y_lr = (fesom_lat_lr+90)*sin(fesom_lon_lr*deg2rad+pi/2)
                # Read lists of which nodes are directly below which
                f = open(fesom_mesh_path_lr + 'aux3d.out', 'r')
                max_num_layers_lr = int(f.readline())
                node_columns_lr = zeros([fesom_n2d_lr, max_num_layers_lr])
                for n in range(fesom_n2d_lr):
                    for k in range(max_num_layers_lr):
                        node_columns_lr[n,k] = int(f.readline())
                node_columns_lr = node_columns_lr.astype(int)
                f.close()
                # Now we can actually read the data
                # Read full 3D field for both u and v
                node_ur_3d_lr = id.variables['u'][0,:]
                node_vr_3d_lr = id.variables['v'][0,:]
                # Vertically average
                node_ur_lr = zeros(fesom_n2d_lr)
                node_vr_lr = zeros(fesom_n2d_lr)
                for n in range(fesom_n2d_lr):
                    # Integrate udz, vdz, and dz over this water column
                    udz_col = 0
                    vdz_col = 0
                    dz_col = 0
                    for k in range(max_num_layers_lr-1):
                        if node_columns_lr[n,k+1] == -999:
                            # Reached the bottom
                            break
                        # Trapezoidal rule
                        top_id = node_columns_lr[n,k]
                        bot_id = node_columns_lr[n,k+1]
                        dz_tmp = node_depth_lr[bot_id-1] - node_depth_lr[top_id-1]
                        udz_col += 0.5*(node_ur_3d_lr[top_id-1]+node_ur_3d_lr[bot_id-1])*dz_tmp
                        vdz_col += 0.5*(node_vr_3d_lr[top_id-1]+node_vr_3d_lr[bot_id-1])*dz_tmp
                        dz_col += dz_tmp
                    # Convert from integrals to averages
                    node_ur_lr[n] = udz_col/dz_col
                    node_vr_lr[n] = vdz_col/dz_col
                # Unrotate
                node_u_lr, node_v_lr = unrotate_vector(rlon_lr, rlat_lr, node_ur_lr, node_vr_lr)
                # Calculate speed
                node_data_lr = sqrt(node_u_lr**2 + node_v_lr**2)
            id.close()
        # Calculate given field at each element
        fesom_data_lr = []
        for elm in elements_lr:
            # For each element, append the mean value for the 3 component Nodes
            # Restrict to ice shelf cavities for draft, melt, wct
            if elm.cavity or var not in ['draft', 'melt', 'wct']:
                if var == 'draft':
                    # Ice shelf draft is depth of surface layer
                    fesom_data_lr.append(mean([elm.nodes[0].depth, elm.nodes[1].depth, elm.nodes[2].depth]))
                elif var == 'bathy':
                    # Bathymetry is depth of bottom layer
                    fesom_data_lr.append(mean([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]))
                elif var == 'wct':
                    # Water column thickness is depth of bottom layer minus
                    # depth of surface layer
                    fesom_data_lr.append(mean([elm.nodes[0].find_bottom().depth - elm.nodes[0].depth, elm.nodes[1].find_bottom().depth - elm.nodes[1].depth, elm.nodes[2].find_bottom().depth - elm.nodes[2].depth]))
                elif var in ['melt', 'vel']:
                    # Surface nodes
                    fesom_data_lr.append(mean([node_data_lr[elm.nodes[0].id], node_data_lr[elm.nodes[1].id], node_data_lr[elm.nodes[2].id]]))
                elif var in ['temp', 'salt']:
                    # Bottom nodes
                    fesom_data_lr.append(mean([node_data_lr[elm.nodes[0].find_bottom().id], node_data_lr[elm.nodes[1].find_bottom().id], node_data_lr[elm.nodes[2].find_bottom().id]]))

        print 'Reading FESOM high-res fields'
        # As before
        if var not in ['draft', 'bathy', 'wct']:
            if var == 'melt':
                id = Dataset(fesom_file_hr_i, 'r')
                node_data_hr = id.variables['wnet'][0,:]*sec_per_year
            elif var in ['temp', 'salt']:
                id = Dataset(fesom_file_hr_o, 'r')
                node_data_hr = id.variables[var][0,:]
            elif var == 'vel':
                id = Dataset(fesom_file_hr_o, 'r')
                fesom_cavity_hr = []
                f = open(fesom_mesh_path_hr + 'cavity_flag_nod2d.out', 'r')
                for line in f:
                    tmp = int(line)
                    if tmp == 1:
                        fesom_cavity_hr.append(True)
                    elif tmp == 0:
                        fesom_cavity_hr.append(False)
                    else:
                        print 'Problem'
                        #return
                f.close()
                fesom_n2d_hr = len(fesom_cavity_hr)
                f = open(fesom_mesh_path_hr + 'nod3d.out', 'r')
                f.readline()
                rlon_hr = []
                rlat_hr = []
                node_depth_hr = []
                for line in f:
                    tmp = line.split()
                    lon_tmp = float(tmp[1])
                    lat_tmp = float(tmp[2])
                    node_depth_tmp = -1*float(tmp[3])
                    if lon_tmp < -180:
                        lon_tmp += 360
                    elif lon_tmp > 180:
                        lon_tmp -= 360
                    rlon_hr.append(lon_tmp)
                    rlat_hr.append(lat_tmp)
                    node_depth_hr.append(node_depth_tmp)
                f.close()
                rlon_hr = array(rlon_hr[0:fesom_n2d_hr])
                rlat_hr = array(rlat_hr[0:fesom_n2d_hr])
                node_depth_hr = array(node_depth_hr)
                fesom_lon_hr, fesom_lat_hr = unrotate_grid(rlon_hr, rlat_hr)
                fesom_x_hr = -(fesom_lat_hr+90)*cos(fesom_lon_hr*deg2rad+pi/2)
                fesom_y_hr = (fesom_lat_hr+90)*sin(fesom_lon_hr*deg2rad+pi/2)
                f = open(fesom_mesh_path_hr + 'aux3d.out', 'r')
                max_num_layers_hr = int(f.readline())
                node_columns_hr = zeros([fesom_n2d_hr, max_num_layers_hr])
                for n in range(fesom_n2d_hr):
                    for k in range(max_num_layers_hr):
                        node_columns_hr[n,k] = int(f.readline())
                node_columns_hr = node_columns_hr.astype(int)
                f.close()
                node_ur_3d_hr = id.variables['u'][0,:]
                node_vr_3d_hr = id.variables['v'][0,:]
                node_ur_hr = zeros(fesom_n2d_hr)
                node_vr_hr = zeros(fesom_n2d_hr)
                for n in range(fesom_n2d_hr):
                    udz_col = 0
                    vdz_col = 0
                    dz_col = 0
                    for k in range(max_num_layers_hr-1):
                        if node_columns_hr[n,k+1] == -999:
                            break
                        top_id = node_columns_hr[n,k]
                        bot_id = node_columns_hr[n,k+1]
                        dz_tmp = node_depth_hr[bot_id-1] - node_depth_hr[top_id-1]
                        udz_col += 0.5*(node_ur_3d_hr[top_id-1]+node_ur_3d_hr[bot_id-1])*dz_tmp
                        vdz_col += 0.5*(node_vr_3d_hr[top_id-1]+node_vr_3d_hr[bot_id-1])*dz_tmp
                        dz_col += dz_tmp
                    node_ur_hr[n] = udz_col/dz_col
                    node_vr_hr[n] = vdz_col/dz_col
                node_u_hr, node_v_hr = unrotate_vector(rlon_hr, rlat_hr, node_ur_hr, node_vr_hr)
                node_data_hr = sqrt(node_u_hr**2 + node_v_hr**2)
            id.close()
        fesom_data_hr = []
        for elm in elements_hr:
            if elm.cavity or var not in ['draft', 'melt', 'wct']:
                if var == 'draft':
                    fesom_data_hr.append(mean([elm.nodes[0].depth, elm.nodes[1].depth, elm.nodes[2].depth]))
                elif var == 'bathy':
                    fesom_data_hr.append(mean([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]))
                elif var == 'wct':
                    fesom_data_hr.append(mean([elm.nodes[0].find_bottom().depth - elm.nodes[0].depth, elm.nodes[1].find_bottom().depth - elm.nodes[1].depth, elm.nodes[2].find_bottom().depth - elm.nodes[2].depth]))
                elif var in ['melt', 'vel']:
                    fesom_data_hr.append(mean([node_data_hr[elm.nodes[0].id], node_data_hr[elm.nodes[1].id], node_data_hr[elm.nodes[2].id]]))
                elif var in ['temp', 'salt']:
                    fesom_data_hr.append(mean([node_data_hr[elm.nodes[0].find_bottom().id], node_data_hr[elm.nodes[1].find_bottom().id], node_data_hr[elm.nodes[2].find_bottom().id]]))

        # Loop over regions
        for index in range(num_regions):
            print 'Processing ' + region_names[index]
            # Set up a grey square for FESOM to fill the background with land
            x_reg_fesom, y_reg_fesom = meshgrid(linspace(x_min[index], x_max[index], num=100), linspace(y_min[index], y_max[index], num=100))
            land_square = zeros(shape(x_reg_fesom))
            # Find bounds on variable in this region, for both ROMS and FESOM
            # Start with ROMS
            loc = (roms_x >= x_min[index])*(roms_x <= x_max[index])*(roms_y >= y_min[index])*(roms_y <= y_max[index])
            var_min = amin(roms_data[loc])
            var_max = amax(roms_data[loc])
            # Modify with FESOM
            # Low-res
            i = 0
            for elm in elements_lr:
                if elm.cavity or var not in ['draft', 'melt', 'wct']:
                    if any(elm.x >= x_min[index]) and any(elm.x <= x_max[index]) and any(elm.y >= y_min[index]) and any(elm.y <= y_max[index]):
                        if fesom_data_lr[i] < var_min:
                            var_min = fesom_data_lr[i]
                        if fesom_data_lr[i] > var_max:
                            var_max = fesom_data_lr[i]
                    i += 1
            # High-res
            i = 0
            for elm in elements_hr:
                if elm.cavity or var not in ['draft', 'melt', 'wct']:
                    if any(elm.x >= x_min[index]) and any(elm.x <= x_max[index]) and any(elm.y >= y_min[index]) and any(elm.y <= y_max[index]):
                        if fesom_data_hr[i] < var_min:
                            var_min = fesom_data_hr[i]
                        if fesom_data_hr[i] > var_max:
                            var_max = fesom_data_hr[i]
                    i += 1
            if var == 'melt':
                # Special colour map
                if var_min < 0:
                    # There is refreezing here; include blue for elements < 0
                    cmap_vals = array([var_min, 0, 0.25*var_max, 0.5*var_max, 0.75*var_max, var_max])
                    cmap_colors = [(0.26, 0.45, 0.86), (1, 1, 1), (1, 0.9, 0.4), (0.99, 0.59, 0.18), (0.5, 0.0, 0.08), (0.96, 0.17, 0.89)]
                    cmap_vals_norm = (cmap_vals - var_min)/(var_max - var_min)
                    cmap_list = []
                    for i in range(size(cmap_vals)):
                        cmap_list.append((cmap_vals_norm[i], cmap_colors[i]))
                    mf_cmap = LinearSegmentedColormap.from_list('melt_freeze', cmap_list)
                else:
                    # No refreezing
                    cmap_vals = array([0, 0.25*var_max, 0.5*var_max, 0.75*var_max, var_max])
                    cmap_colors = [(1, 1, 1), (1, 0.9, 0.4), (0.99, 0.59, 0.18), (0.5, 0.0, 0.08), (0.96, 0.17, 0.89)]
                    cmap_vals_norm = cmap_vals/var_max
                    cmap_list = []
                    for i in range(size(cmap_vals)):
                        cmap_list.append((cmap_vals_norm[i], cmap_colors[i]))
                    mf_cmap = LinearSegmentedColormap.from_list('melt_freeze', cmap_list)
                colour_map = mf_cmap            
            elif var == 'vel':
                colour_map = 'cool'
            else:
                colour_map = 'jet'
            if var == 'vel':
                # Make vectors for overlay
                # Set up bins (edges)
                x_bins = linspace(x_min[index], x_max[index], num=num_bins+1)
                y_bins = linspace(y_min[index], y_max[index], num=num_bins+1)
                # Calculate centres of bins (for plotting)
                x_centres = 0.5*(x_bins[:-1] + x_bins[1:])
                y_centres = 0.5*(y_bins[:-1] + y_bins[1:])
                # ROMS
                # First set up arrays to integrate velocity in each bin
                # Simple averaging of all the points inside each bin
                roms_u = zeros([size(y_centres), size(x_centres)])
                roms_v = zeros([size(y_centres), size(x_centres)])
                roms_num_pts = zeros([size(y_centres), size(x_centres)])
                # First convert to polar coordinates, rotate to account for
                # longitude in circumpolar projection, and convert back to vector
                # components
                theta_roms = arctan2(v_rho, u_rho)
                theta_circ_roms = theta_roms - roms_lon*deg2rad
                u_circ_roms = roms_data*cos(theta_circ_roms) # roms_data is speed
                v_circ_roms = roms_data*sin(theta_circ_roms)
                # Loop over all points (can't find a better way to do this)
                for j in range(size(roms_data,0)):
                    for i in range(size(roms_data,1)):
                        # Make sure data isn't masked (i.e. land)
                        if u_circ_roms[j,i] is not ma.masked:
                            # Check if we're in the region of interest
                            if roms_x[j,i] > x_min[index] and roms_x[j,i] < x_max[index] and roms_y[j,i] > y_min[index] and roms_y[j,i] < y_max[index]:
                                # Figure out which bins this falls into
                                x_index = nonzero(x_bins > roms_x[j,i])[0][0]-1
                                y_index = nonzero(y_bins > roms_y[j,i])[0][0]-1
                                # Integrate
                                roms_u[y_index, x_index] += u_circ_roms[j,i]
                                roms_v[y_index, x_index] += v_circ_roms[j,i]
                                roms_num_pts[y_index, x_index] += 1
                # Convert from sums to averages
                # First mask out points with no data
                roms_u = ma.masked_where(roms_num_pts==0, roms_u)
                roms_v = ma.masked_where(roms_num_pts==0, roms_v)
                # Divide everything else by the number of points
                flag = roms_num_pts > 0
                roms_u[flag] = roms_u[flag]/roms_num_pts[flag]
                roms_v[flag] = roms_v[flag]/roms_num_pts[flag]
                # FESOM low-res
                fesom_u_lr = zeros([size(y_centres), size(x_centres)])
                fesom_v_lr = zeros([size(y_centres), size(x_centres)])
                fesom_num_pts_lr = zeros([size(y_centres), size(x_centres)])
                theta_fesom_lr = arctan2(node_v_lr, node_u_lr)
                theta_circ_fesom_lr = theta_fesom_lr - fesom_lon_lr*deg2rad
                u_circ_fesom_lr = node_data_lr*cos(theta_circ_fesom_lr) # node_data is speed
                v_circ_fesom_lr = node_data_lr*sin(theta_circ_fesom_lr)
                # Loop over 2D nodes to fill in the velocity bins as before
                for n in range(fesom_n2d_lr):
                    if fesom_x_lr[n] > x_min[index] and fesom_x_lr[n] < x_max[index] and fesom_y_lr[n] > y_min[index] and fesom_y_lr[n] < y_max[index]:
                        x_index = nonzero(x_bins > fesom_x_lr[n])[0][0]-1
                        y_index = nonzero(y_bins > fesom_y_lr[n])[0][0]-1
                        fesom_u_lr[y_index, x_index] += u_circ_fesom_lr[n]
                        fesom_v_lr[y_index, x_index] += v_circ_fesom_lr[n]
                        fesom_num_pts_lr[y_index, x_index] += 1
                fesom_u_lr = ma.masked_where(fesom_num_pts_lr==0, fesom_u_lr)
                fesom_v_lr = ma.masked_where(fesom_num_pts_lr==0, fesom_v_lr)
                flag = fesom_num_pts_lr > 0
                fesom_u_lr[flag] = fesom_u_lr[flag]/fesom_num_pts_lr[flag]
                fesom_v_lr[flag] = fesom_v_lr[flag]/fesom_num_pts_lr[flag]
                # FESOM high-res
                fesom_u_hr = zeros([size(y_centres), size(x_centres)])
                fesom_v_hr = zeros([size(y_centres), size(x_centres)])
                fesom_num_pts_hr = zeros([size(y_centres), size(x_centres)])
                theta_fesom_hr = arctan2(node_v_hr, node_u_hr)
                theta_circ_fesom_hr = theta_fesom_hr - fesom_lon_hr*deg2rad
                u_circ_fesom_hr = node_data_hr*cos(theta_circ_fesom_hr) # node_data is speed
                v_circ_fesom_hr = node_data_hr*sin(theta_circ_fesom_hr)
                # Loop over 2D nodes to fill in the velocity bins as before
                for n in range(fesom_n2d_hr):
                    if fesom_x_hr[n] > x_min[index] and fesom_x_hr[n] < x_max[index] and fesom_y_hr[n] > y_min[index] and fesom_y_hr[n] < y_max[index]:
                        x_index = nonzero(x_bins > fesom_x_hr[n])[0][0]-1
                        y_index = nonzero(y_bins > fesom_y_hr[n])[0][0]-1
                        fesom_u_hr[y_index, x_index] += u_circ_fesom_hr[n]
                        fesom_v_hr[y_index, x_index] += v_circ_fesom_hr[n]
                        fesom_num_pts_hr[y_index, x_index] += 1
                fesom_u_hr = ma.masked_where(fesom_num_pts_hr==0, fesom_u_hr)
                fesom_v_hr = ma.masked_where(fesom_num_pts_hr==0, fesom_v_hr)
                flag = fesom_num_pts_hr > 0
                fesom_u_hr[flag] = fesom_u_hr[flag]/fesom_num_pts_hr[flag]
                fesom_v_hr[flag] = fesom_v_hr[flag]/fesom_num_pts_hr[flag]
            # Plot
            fig = figure(figsize=(20, ysize[index]))
            fig.patch.set_facecolor('white')
            # MetROMS
            ax = fig.add_subplot(1,3,1, aspect='equal')
            # First shade land and zice in grey
            contourf(roms_x, roms_y, land_zice, 1, colors=(('0.6', '0.6', '0.6')))
            # Fill in the missing circle
            contourf(x_reg_roms, y_reg_roms, land_circle, 1, colors=(('0.6', '0.6', '0.6')))
            # Now shade the data
            pcolor(roms_x, roms_y, roms_data, vmin=var_min, vmax=var_max, cmap=colour_map)
            if var == 'vel':
                # Overlay vectors
                quiver(x_centres, y_centres, roms_u, roms_v, scale=1.5, headwidth=6, headlength=7, color='black')
            xlim([x_min[index], x_max[index]])
            ylim([y_min[index], y_max[index]])
            axis('off')
            title('MetROMS', fontsize=24)
            # FESOM low-res
            ax = fig.add_subplot(1,3,2, aspect='equal')
            # Start with land background
            contourf(x_reg_fesom, y_reg_fesom, land_square, 1, colors=(('0.6', '0.6', '0.6')))
            # Add elements
            if var in ['draft', 'melt', 'wct']:
                # Ice shelf elements only
                img = PatchCollection(patches_shelf_lr, cmap=colour_map)
            else:
                img = PatchCollection(patches_lr, cmap=colour_map)
            img.set_array(array(fesom_data_lr))
            img.set_edgecolor('face')
            img.set_clim(vmin=var_min, vmax=var_max)
            ax.add_collection(img)
            if var in ['draft', 'melt', 'wct']:
                # Mask out the open ocean in white
                overlay = PatchCollection(patches_ocn_lr, facecolor=(1,1,1))
                overlay.set_edgecolor('face')
                ax.add_collection(overlay)
            if var == 'vel':
                # Overlay vectors
                quiver(x_centres, y_centres, fesom_u_lr, fesom_v_lr, scale=1.5, headwidth=6, headlength=7, color='black')
            xlim([x_min[index], x_max[index]])
            ylim([y_min[index], y_max[index]])
            axis('off')
            title('FESOM (low-res)', fontsize=24)
            # FESOM high-res
            ax = fig.add_subplot(1,3,3, aspect='equal')
            contourf(x_reg_fesom, y_reg_fesom, land_square, 1, colors=(('0.6', '0.6', '0.6')))
            if var in ['draft', 'melt', 'wct']:
                # Ice shelf elements only
                img = PatchCollection(patches_shelf_hr, cmap=colour_map)
            else:
                img = PatchCollection(patches_hr, cmap=colour_map)
            img.set_array(array(fesom_data_hr))
            img.set_edgecolor('face')
            img.set_clim(vmin=var_min, vmax=var_max)
            ax.add_collection(img)
            if var in ['draft', 'melt', 'wct']:
                overlay = PatchCollection(patches_ocn_hr, facecolor=(1,1,1))
                overlay.set_edgecolor('face')
                ax.add_collection(overlay)
            if var == 'vel':
                # Overlay vectors
                quiver(x_centres, y_centres, fesom_u_hr, fesom_v_hr, scale=1.5, headwidth=6, headlength=7, color='black')
            xlim([x_min[index], x_max[index]])
            ylim([y_min[index], y_max[index]])
            axis('off')
            title('FESOM (high-res)', fontsize=24)
            # Colourbar on the right
            cbaxes = fig.add_axes([0.92, 0.2, 0.01, 0.6])
            cbar = colorbar(img, cax=cbaxes)
            cbar.ax.tick_params(labelsize=20)
            # Main title
            if var == 'draft':
                title_string = ' draft (m)'
            elif var == 'bathy':
                title_string = ' bathymetry (m)'
            elif var == 'wct':
                title_string = ' water column thickness (m)'
            elif var == 'melt':
                title_string = ' melt rate (m/y)'
            elif var == 'temp':
                title_string = r' bottom water temperature ($^{\circ}$C)'
            elif var == 'salt':
                title_string = ' bottom water salinity (psu)'
            elif var == 'vel':
                title_string = ' vertically averaged ocean velocity (m/s)'
            suptitle(region_names[index] + title_string, fontsize=30)
            subplots_adjust(wspace=0.05)
            #fig.show()
            fig.savefig(fig_heads[index] + '_' + var + '.png')
def timeseries_dpt (grid_path, file_path, log_path):

    # Radius of the Earth in metres
    r = 6.371e6
    # Degrees to radians conversion factor
    deg2rad = pi/180.0
    # Longitude of Drake Passage zonal slice (convert to ROMS bounds 0-360)
    lon0 = -67 + 360
    # Latitude bounds on Drake Passage zonal slice
    lat_min = -68
    lat_max = -54.5

    time = []
    dpt = []
    # Check if the log file exists
    if exists(log_path):
        print 'Reading previously calculated values'
        f = open(log_path, 'r')
        # Skip first line (header for time array)
        f.readline()
        for line in f:
            try:
                time.append(float(line))
            except(ValueError):
                # Reached the header for the next variable
                break
        for line in f:
            dpt.append(float(line))
        f.close()

    print 'Reading grid'
    id = Dataset(grid_path, 'r')
    h = id.variables['h'][:-15,1:-1]
    zice = id.variables['zice'][:-15,1:-1]
    lon = id.variables['lon_rho'][:-15,1:-1]
    lat = id.variables['lat_rho'][:-15,1:-1]
    # Keep the overlapping periodic boundary on "angle" for now
    angle = id.variables['angle'][:-15,:]
    id.close()

    print 'Reading data'
    id = Dataset(file_path, 'r')
    # Read time values and convert from seconds to years
    new_time = id.variables['ocean_time'][:]/(60*60*24*365.25)
    num_time = size(new_time)
    # Concatenate with time values from log file
    for t in range(num_time):
        time.append(new_time[t])
    # Calculate time-dependent water column thickness: h + zice + zeta
    zeta = id.variables['zeta'][:,:-15,1:-1]
    wct = tile(h, (num_time,1,1)) + tile(zice, (num_time,1,1)) + zeta
    # Read barotropic velocities in x-y space
    ubar_xy = id.variables['ubar'][:,:-15,:]
    vbar_xy = id.variables['vbar'][:,:-15,:]
    id.close()

    print 'Rotating velocity vector'
    ubar = ma.empty([num_time, size(lon,0), size(lon,1)])
    # Rotate one time index at a time
    for t in range(num_time):        
        ubar_tmp, vbar_tmp = rotate_vector_roms(ubar_xy[t,:,:], vbar_xy[t,:,:], angle)
        # Throw away the overlapping periodic boundary before saving
        ubar[t,:,:] = ubar_tmp[:,1:-1]

    print 'Extracting zonal slice through Drake Passage'    #
    num_lat = size(lat,0)
    # Set up arrays for zonal slices of ubar, water column thickness, latitude
    ubar_DP = ma.empty([num_time, num_lat])
    wct_DP = ma.empty([num_time, num_lat])
    lat_DP = empty([num_lat])
    # Loop over longitudes
    for j in range(num_lat):        
        lon_tmp = lon[j,:]
        # Find indices and coefficients to interpolate to lon0
        ie, iw, coeffe, coeffw = interp_lon_helper(lon_tmp, lon0)
        # Use these to interpolate all 3 variables we care about
        ubar_DP[:,j] = coeffe*ubar[:,j,ie] + coeffw*ubar[:,j,iw]
        wct_DP[:,j] = coeffe*wct[:,j,ie] + coeffw*wct[:,j,iw]
        lat_DP[j] = coeffe*lat[j,ie] + coeffw*lat[j,iw]
    # Find indices for latitude bounds
    jS = nonzero(lat_DP > lat_min)[0][0]
    jN = nonzero(lat_DP > lat_max)[0][0]
    # Trim everything to these bounds
    ubar_DP = ubar_DP[:,jS:jN]
    wct_DP = wct_DP[:,jS:jN]
    lat_DP = lat_DP[jS:jN]
    # Calculate dy
    # First calculate latitude on edges of each cell
    middle_lat = 0.5*(lat_DP[:-1] + lat_DP[1:])
    s_bdry = 2*lat_DP[0] - middle_lat[0]
    n_bdry = 2*lat_DP[-1] - middle_lat[-1]
    lat_edges = zeros(size(lat_DP)+1)
    lat_edges[0] = s_bdry
    lat_edges[1:-1] = middle_lat
    lat_edges[-1] = n_bdry
    # Now calculate difference in latitude across each cell
    dlat_DP = lat_edges[1:] - lat_edges[:-1]
    # Convert to Cartesian space for dy in metres
    dy_DP = r*dlat_DP*deg2rad

    for t in range(num_time):
        # Integrate ubar*wct*dy and convert to Sv
        dpt.append(sum(ubar_DP[t,:]*wct_DP[t,:]*dy_DP)*1e-6)

    print 'Plotting'
    clf()
    plot(time, dpt)
    xlabel('Years')
    ylabel('Drake Passage Transport (Sv)')
    grid(True)
    savefig('drakepsgtrans.png')

    print 'Saving results to log file'
    f = open(log_path, 'w')
    f.write('Time (years):\n')
    for elm in time:
        f.write(str(elm) + '\n')
    f.write('Drake Passage Transport (Sv):\n')
    for elm in dpt:
        f.write(str(elm) + '\n')
    f.close()
Beispiel #8
0
def slope_current ():

# File paths
roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc'
roms_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/2002_2016_avg.nc'
fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/'
fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/'
fesom_file_lr = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/oce_2002_2016_avg.nc'
fesom_file_hr = '/short/y99/kaa561/FESOM/intercomparison_highres/output/oce_2002_2016_avg.nc'
# ROMS vertical grid parameters
theta_s = 7.0
theta_b = 2.0
hc = 250
N = 31
# FESOM mesh parameters
circumpolar = False
cross_180 = False
# Spacing of longitude bins
dlon = 1
# Parameters for continental shelf selection
lat0 = -64  # Maximum latitude to consider
h0 = 2500  # Deepest depth to consider

# Set up longitude bins
# Start with edges
lon_bins = arange(-180, 180+dlon, dlon)
# Centres for plotting
lon_centres = 0.5*(lon_bins[:-1] + lon_bins[1:])
num_bins = size(lon_centres)
# Set up arrays to store maximum barotropic speed in each bin
current_roms = zeros(num_bins)
current_fesom_lr = zeros(num_bins)
current_fesom_hr = zeros(num_bins)

print 'Processing MetROMS'

print 'Reading grid'
id = Dataset(roms_grid, 'r')
roms_lon = id.variables['lon_rho'][:,:]
roms_lat = id.variables['lat_rho'][:,:]
roms_h = id.variables['h'][:,:]
roms_zice = id.variables['zice'][:,:]
roms_angle = id.variables['angle'][:,:]
id.close()
print 'Reading data'
# Read full 3D u and v
id = Dataset(roms_file, 'r')
u_3d_tmp = id.variables['u'][0,:,:,:]
v_3d_tmp = id.variables['v'][0,:,:,:]
id.close()
print 'Vertically averaging velocity'
# Get integrands on 3D grid; we only care about dz
dx, dy, dz, z = cartesian_grid_3d(roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N)
# Unrotate each vertical level
u_3d = ma.empty(shape(dz))
v_3d = ma.empty(shape(dz))
num_lat_u = size(u_3d_tmp,1)
num_lon_u = size(u_3d_tmp,2)
num_lat_v = size(v_3d_tmp,1)
num_lon_v = size(v_3d_tmp,2)
for k in range(N):
    u_k, v_k = rotate_vector_roms(u_3d_tmp[k,:,:], v_3d_tmp[k,:,:], roms_angle)
    u_3d[k,:,:] = u_k
    v_3d[k,:,:] = v_k
# Vertically average u and v
roms_u = sum(u_3d*dz, axis=0)/sum(dz, axis=0)
roms_v = sum(v_3d*dz, axis=0)/sum(dz, axis=0)
# Calculate speed
roms_speed = sqrt(roms_u**2 + roms_v**2)
print 'Selecting slope current'
# First make sure longitude is between -180 and 180
index = roms_lon > 180
roms_lon[index] = roms_lon[index] - 360
for j in range(size(roms_speed,0)):
    for i in range(size(roms_speed,1)):
        # Check if we care about this point
        if roms_lat[j,i] <= lat0 and roms_h[j,i] <= h0 and roms_zice[j,i] == 0:
            # Find longitude bin
            lon_index = nonzero(lon_bins > roms_lon[j,i])[0][0] - 1
            # Update slope current speed in this bin if needed
            if roms_speed[j,i] > current_roms[lon_index]:
                current_roms[lon_index] = roms_speed[j,i]

print 'Processing low-res FESOM'

print 'Building mesh'
# We only care about nodes, not elements, so don't need to use the
# fesom_grid function.
# Read cavity flag for each 2D surface node
fesom_cavity_lr = []
f = open(fesom_mesh_path_lr + 'cavity_flag_nod2d.out', 'r')
for line in f:
    tmp = int(line)
    if tmp == 1:
        fesom_cavity_lr.append(True)
    elif tmp == 0:
        fesom_cavity_lr.append(False)
    else:
        print 'Problem'
f.close()
# Save the number of 2D nodes
fesom_n2d_lr = len(fesom_cavity_lr)
# Read rotated lat and lon for each node, also depth
f = open(fesom_mesh_path_lr + 'nod3d.out', 'r')
f.readline()
rlon_lr = []
rlat_lr = []
node_depth_lr = []
for line in f:
    tmp = line.split()
    lon_tmp = float(tmp[1])
    lat_tmp = float(tmp[2])
    node_depth_tmp = -1*float(tmp[3])
    if lon_tmp < -180:
        lon_tmp += 360
    elif lon_tmp > 180:
        lon_tmp -= 360
    rlon_lr.append(lon_tmp)
    rlat_lr.append(lat_tmp)
    node_depth_lr.append(node_depth_tmp)
f.close()
# For lat and lon, only care about the 2D nodes (the first
# fesom_n2d indices)
rlon_lr = array(rlon_lr[0:fesom_n2d_lr])
rlat_lr = array(rlat_lr[0:fesom_n2d_lr])
node_depth_lr = array(node_depth_lr)
# Unrotate longitude
fesom_lon_lr, fesom_lat_lr = unrotate_grid(rlon_lr, rlat_lr)
# Read lists of which nodes are directly below which
f = open(fesom_mesh_path_lr + 'aux3d.out', 'r')
max_num_layers_lr = int(f.readline())
node_columns_lr = zeros([fesom_n2d_lr, max_num_layers_lr])
for n in range(fesom_n2d_lr):
    for k in range(max_num_layers_lr):
        node_columns_lr[n,k] = int(f.readline())
node_columns_lr = node_columns_lr.astype(int)
f.close()
# Now figure out the bottom depth of each 2D node
bottom_depth_lr = zeros(fesom_n2d_lr)
for n in range(fesom_n2d_lr):
    node_id = node_columns_lr[n,0] - 1
    for k in range(1, max_num_layers_lr):
        if node_columns_lr[n,k] == -999:
            # Reached the bottom
            break
        node_id = node_columns_lr[n,k] - 1
    # Save the last valid depth
    bottom_depth_lr[n] = node_depth_lr[n]
print 'Reading data'
# Read full 3D field for both u and v
id = Dataset(fesom_file_lr, 'r')
node_ur_3d_lr = id.variables['u'][0,:]
node_vr_3d_lr = id.variables['v'][0,:]
id.close()
print 'Vertically averaging velocity'
# Vertically average
node_ur_lr = zeros(fesom_n2d_lr)
node_vr_lr = zeros(fesom_n2d_lr)
for n in range(fesom_n2d_lr):
    # Integrate udz, vdz, and dz over this water column
    udz_col = 0
    vdz_col = 0
    dz_col = 0
    for k in range(max_num_layers_lr-1):
        if node_columns_lr[n,k+1] == -999:
            # Reached the bottom
            break
        # Trapezoidal rule
        top_id = node_columns_lr[n,k]
        bot_id = node_columns_lr[n,k+1]
        dz_tmp = node_depth_lr[bot_id-1] - node_depth_lr[top_id-1]
        udz_col += 0.5*(node_ur_3d_lr[top_id-1]+node_ur_3d_lr[bot_id-1])*dz_tmp
        vdz_col += 0.5*(node_vr_3d_lr[top_id-1]+node_vr_3d_lr[bot_id-1])*dz_tmp
        dz_col += dz_tmp
    # Convert from integrals to averages
    node_ur_lr[n] = udz_col/dz_col
    node_vr_lr[n] = vdz_col/dz_col
# Unrotate
node_u_lr, node_v_lr = unrotate_vector(rlon_lr, rlat_lr, node_ur_lr, node_vr_lr)
# Calculate speed
node_speed_lr = sqrt(node_u_lr**2 + node_v_lr**2)
print 'Selecting slope current'
for n in range(fesom_n2d_lr):
    # Check if we care about this node
    if fesom_lat_lr[n] <= lat0 and bottom_depth_lr[n] <= h0 and not fesom_cavity_lr[n]:
        # Find longitude bin
        lon_index = nonzero(lon_bins > fesom_lon_lr[n])[0][0] - 1
        # Update slope current speed in this bin if needed
        if node_speed_lr[n] > current_fesom_lr[lon_index]:
            current_fesom_lr[lon_index] = node_speed_lr[n]

print 'Processing high-res FESOM'

print 'Building mesh'
fesom_cavity_hr = []
f = open(fesom_mesh_path_hr + 'cavity_flag_nod2d.out', 'r')
for line in f:
    tmp = int(line)
    if tmp == 1:
        fesom_cavity_hr.append(True)
    elif tmp == 0:
        fesom_cavity_hr.append(False)
    else:
        print 'Problem'
f.close()
fesom_n2d_hr = len(fesom_cavity_hr)
f = open(fesom_mesh_path_hr + 'nod3d.out', 'r')
f.readline()
rlon_hr = []
rlat_hr = []
node_depth_hr = []
for line in f:
    tmp = line.split()
    lon_tmp = float(tmp[1])
    lat_tmp = float(tmp[2])
    node_depth_tmp = -1*float(tmp[3])
    if lon_tmp < -180:
        lon_tmp += 360
    elif lon_tmp > 180:
        lon_tmp -= 360
    rlon_hr.append(lon_tmp)
    rlat_hr.append(lat_tmp)
    node_depth_hr.append(node_depth_tmp)
f.close()
rlon_hr = array(rlon_hr[0:fesom_n2d_hr])
rlat_hr = array(rlat_hr[0:fesom_n2d_hr])
node_depth_hr = array(node_depth_hr)
fesom_lon_hr, fesom_lat_hr = unrotate_grid(rlon_hr, rlat_hr)
f = open(fesom_mesh_path_hr + 'aux3d.out', 'r')
max_num_layers_hr = int(f.readline())
node_columns_hr = zeros([fesom_n2d_hr, max_num_layers_hr])
for n in range(fesom_n2d_hr):
    for k in range(max_num_layers_hr):
        node_columns_hr[n,k] = int(f.readline())
node_columns_hr = node_columns_hr.astype(int)
f.close()
bottom_depth_hr = zeros(fesom_n2d_hr)
for n in range(fesom_n2d_hr):
    node_id = node_columns_hr[n,0] - 1
    for k in range(1, max_num_layers_hr):
        if node_columns_hr[n,k] == -999:
            break
        node_id = node_columns_hr[n,k] - 1
    bottom_depth_hr[n] = node_depth_hr[n]
print 'Reading data'
id = Dataset(fesom_file_hr, 'r')
node_ur_3d_hr = id.variables['u'][0,:]
node_vr_3d_hr = id.variables['v'][0,:]
id.close()
print 'Vertically averaging velocity'
node_ur_hr = zeros(fesom_n2d_hr)
node_vr_hr = zeros(fesom_n2d_hr)
for n in range(fesom_n2d_hr):
    udz_col = 0
    vdz_col = 0
    dz_col = 0
    for k in range(max_num_layers_hr-1):
        if node_columns_hr[n,k+1] == -999:
            break
        top_id = node_columns_hr[n,k]
        bot_id = node_columns_hr[n,k+1]
        dz_tmp = node_depth_hr[bot_id-1] - node_depth_hr[top_id-1]
        udz_col += 0.5*(node_ur_3d_hr[top_id-1]+node_ur_3d_hr[bot_id-1])*dz_tmp
        vdz_col += 0.5*(node_vr_3d_hr[top_id-1]+node_vr_3d_hr[bot_id-1])*dz_tmp
        dz_col += dz_tmp
    node_ur_hr[n] = udz_col/dz_col
    node_vr_hr[n] = vdz_col/dz_col
node_u_hr, node_v_hr = unrotate_vector(rlon_hr, rlat_hr, node_ur_hr, node_vr_hr)
node_speed_hr = sqrt(node_u_hr**2 + node_v_hr**2)
print 'Selecting slope current'
for n in range(fesom_n2d_hr):
    if fesom_lat_hr[n] <= lat0 and bottom_depth_hr[n] <= h0 and not fesom_cavity_hr[n]:
        lon_index = nonzero(lon_bins > fesom_lon_hr[n])[0][0] - 1
        if node_speed_hr[n] > current_fesom_hr[lon_index]:
            current_fesom_hr[lon_index] = node_speed_hr[n]

print 'Plotting'
fig = figure(figsize=(12,8))
plot(lon_centres, current_roms, color='blue', label='MetROMS')
plot(lon_centres, current_fesom_lr, color='green', label='FESOM low-res')
plot(lon_centres, current_fesom_hr, color='magenta', label='FESOM high-res')
grid(True)
title('Slope current speed', fontsize=20)
xlabel('Longitude', fontsize=14)
ylabel('m/s', fontsize=14)
xlim([-180, 180])
legend()
fig.savefig('slope_current.png')

print 'Mean slope current in MetROMS: ' + str(mean(current_roms)) + ' m/s'
print 'Mean slope current in low-res FESOM: ' + str(mean(current_fesom_lr)) + ' m/s'
print 'Mean slope current in high-res FESOM: ' + str(mean(current_fesom_hr)) + ' m/s'


# Command-line interface
if __name__ == "__main__":

    coastal_current()
def uv_vectorplot (grid_path, file_path, tstep, depth_key, save=False, fig_name=None):

    # Radius of the Earth in metres
    r = 6.371e6
    # Degrees to radians conversion factor
    deg2rad = pi/180
    # Side length of blocks to average vectors over (can't plot vector at every
    # single point or the plot will be way too crowded)
    block = 15

    # Read angle from grid file
    grid_id = Dataset(grid_path, 'r')
    angle = grid_id.variables['angle'][:-15,:]
    grid_id.close()
    # Read grid and velocity data
    id = Dataset(file_path, 'r')
    lon = id.variables['lon_rho'][:-15,:-1]
    lat = id.variables['lat_rho'][:-15,:-1]
    if depth_key == 1:
        # Surface u and v
        u = id.variables['u'][tstep-1,-1,:-15,:]
        v = id.variables['v'][tstep-1,-1,:-15,:]
    elif depth_key == 2:
        # Bottom u and v
        u = id.variables['u'][tstep-1,0,:-15,:]
        v = id.variables['v'][tstep-1,0,:-15,:]
    elif depth_key == 3:
        # Vertically averaged u and v
        u = id.variables['ubar'][tstep-1,:-15,:]
        v = id.variables['vbar'][tstep-1,:-15,:]
    id.close()

    # Rotate velocities to lat-lon space
    u_lonlat, v_lonlat = rotate_vector_roms(u, v, angle)
    # Throw away the overlapping periodic boundary
    u_rho = u_lonlat[:,:-1]
    v_rho = v_lonlat[:,:-1]
    # Calculate speed for the background filled contour plot
    speed = sqrt(u_rho**2 + v_rho**2)

    # Calculate X and Y coordinates for plotting circumpolar projection
    X = -(lat+90)*cos(lon*deg2rad+pi/2)
    Y  = (lat+90)*sin(lon*deg2rad+pi/2)

    # Calculate velocity components in spherical coordinate space
    # (just differentiate and rearrange spherical coordinate transformations)
    dlon_dt = u_rho/(r*cos(lat*deg2rad)*deg2rad)
    dlat_dt = v_rho/(r*deg2rad)
    # Calculate velocity components in X-Y space (just differentiate and
    # rearrange equations for X and Y above)
    dX_dt = -dlat_dt*cos(lon*deg2rad+pi/2) + (lat+90)*sin(lon*deg2rad+pi/2)*dlon_dt*deg2rad
    dY_dt = dlat_dt*sin(lon*deg2rad+pi/2) + (lat+90)*cos(lon*deg2rad+pi/2)*dlon_dt*deg2rad

    # Average X, Y, dX_dt, and dY_dt over block x block intervals
    # Calculate number of blocks
    size0 = int(ceil(size(X,0)/float(block)))
    size1 = int(ceil((size(X,1)-1)/float(block)))
    # Set up arrays for averaged fields
    X_block = ma.empty([size0, size1])
    Y_block = ma.empty([size0, size1])
    dX_dt_block = ma.empty([size0, size1])
    dY_dt_block = ma.empty([size0, size1])
    # Set up arrays containing boundary indices
    posn0 = range(0, size(X,0), block)
    posn0.append(size(X,0))
    posn1 = range(0, size(X,1), block)
    posn1.append(size(X,1))
    # Double loop to average each block (can't find a more efficient way to do
    # this)
    for j in range(size0):
        for i in range(size1):
            start0 = posn0[j]
            end0 = posn0[j+1]
            start1 = posn1[i]
            end1 = posn1[i+1]
            X_block[j,i] = mean(X[start0:end0, start1:end1])
            Y_block[j,i] = mean(Y[start0:end0, start1:end1])
            dX_dt_block[j,i] = mean(dX_dt[start0:end0, start1:end1])
            dY_dt_block[j,i] = mean(dY_dt[start0:end0, start1:end1])

    # Make the plot
    fig = figure(figsize=(16,12))
    fig.add_subplot(1,1,1, aspect='equal')
    # Contour speed values at every point
    # Use pastel colour map so overlaid vectors will show up
    contourf(X, Y, speed, 50, cmap='Paired')
    cbar = colorbar()
    cbar.ax.tick_params(labelsize=20)
    # Add vectors for each block
    quiver(X_block, Y_block, dX_dt_block, dY_dt_block, color='black')
    if depth_key == 1:
        title('Surface velocity (m/s)', fontsize=30)
    elif depth_key == 2:
        title('Bottom velocity (m/s)', fontsize=30)
    elif depth_key == 3:
        title('Vertically averaged velocity (m/s)', fontsize=30)
    axis('off')

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()
def circumpolar_plot(
    file_path,
    var_name,
    tstep,
    depth_key,
    depth,
    depth_bounds,
    colour_bounds=None,
    save=False,
    fig_name=None,
    grid_path=None,
):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 40
    N = 31
    deg2rad = pi / 180

    # Read the variable and figure out if 2D or 3D (not including time)
    id = Dataset(file_path, "r")
    if len(id.variables[var_name].shape) == 4:
        # 3D variable; will have to choose depth later
        data_full = id.variables[var_name][tstep - 1, :, :-15, :]
        choose_depth = True
    elif len(id.variables[var_name].shape) == 3:
        # 2D variable
        data = id.variables[var_name][tstep - 1, :-15, :]
        choose_depth = False
    if var_name == "salt":
        units = "psu"
    elif var_name == "m":
        # Convert ice shelf melt rate from m/s to m/yr
        units = "m/year"
        data = data * 60.0 * 60.0 * 24.0 * 365.25
    else:
        units = id.variables[var_name].units
    long_name = id.variables[var_name].long_name

    # Check for vector variables that need to be rotated
    if var_name in ["ubar", "vbar", "u", "v", "sustr", "svstr", "bustr", "bvstr"]:
        grid_id = Dataset(grid_path, "r")
        angle = grid_id.variables["angle"][:-15, :]
        grid_id.close()
        if var_name in ["ubar", "sustr", "bustr"]:
            # 2D u-variable
            u_data = data[:, :]
            v_data = id.variables[var_name.replace("u", "v")][tstep - 1, :-15, :]
            u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle)
            data = u_data_lonlat
        elif var_name in ["vbar", "svstr", "bvstr"]:
            # 2D v-variable
            v_data = data[:, :]
            u_data = id.variables[var_name.replace("v", "u")][tstep - 1, :-15, :]
            u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle)
            data = v_data_lonlat
        elif var_name in ["u"]:
            # 3D u-variable
            data_full_ugrid = data_full[:, :, :]
            data_full = ma.empty([data_full_ugrid.shape[0], data_full_ugrid.shape[1], data_full_ugrid.shape[2] + 1])
            for k in range(N):
                u_data = data_full_ugrid[k, :, :]
                v_data = id.variables[var_name.replace("u", "v")][tstep - 1, k, :-15, :]
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle)
                data_full[k, :, :] = u_data_lonlat
        elif var_name in ["v"]:
            # 3D v-variable
            data_full_vgrid = data_full[:, :, :]
            data_full = ma.empty([data_full_vgrid.shape[0], data_full_vgrid.shape[1] + 1, data_full_vgrid.shape[2]])
            for k in range(N):
                v_data = data_full_vgrid[k, :, :]
                u_data = id.variables[var_name.replace("v", "u")][tstep - 1, k, :-15, :]
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle)
                data_full[k, :, :] = v_data_lonlat

    # Read grid variables
    h = id.variables["h"][:-15, :]
    zice = id.variables["zice"][:-15, :]
    lon = id.variables["lon_rho"][:-15, :]
    lat = id.variables["lat_rho"][:-15, :]
    id.close()

    # Throw away the overlapping periodic boundary
    if choose_depth:
        data_full = data_full[:, :, :-1]
    else:
        data = data[:, :-1]
    lon = lon[:, :-1]
    lat = lat[:, :-1]
    h = h[:, :-1]
    zice = zice[:, :-1]

    # Convert to spherical coordinates
    x = -(lat + 90) * cos(lon * deg2rad + pi / 2)
    y = (lat + 90) * sin(lon * deg2rad + pi / 2)

    # Choose what to write on the title about depth
    if choose_depth:
        if depth_key == 0:
            depth_string = "at surface"
        elif depth_key == 1:
            depth_string = "at bottom"
        elif depth_key == 2:
            depth_string = "at " + str(int(round(-depth))) + " m"
        elif depth_key == 3:
            depth_string = "vertically averaged"
        elif depth_key == 4:
            depth_string = (
                "vertically averaged between "
                + str(int(round(-depth_bounds[0])))
                + " and "
                + str(int(round(-depth_bounds[1])))
                + " m"
            )
    else:
        depth_string = ""

    if choose_depth:
        # For 3D variables, select data corresponding to depth choice
        if depth_key == 0:
            # Surface layer
            data = data_full[-1, :, :]
        elif depth_key == 1:
            # Bottom layer
            data = data_full[0, :, :]
        else:
            # We will need z-coordinates and possibly dz
            dx, dy, dz, z = cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc, N)
            if depth_key == 2:
                # Interpolate to given depth
                data = interp_depth(data_full, z, depth)
            elif depth_key == 3:
                # Vertically average entire water column
                data = sum(data_full * dz, axis=0) / sum(dz, axis=0)
            elif depth_key == 4:
                # Vertically average between given depths
                data = average_btw_depths(data_full, z, dz, depth_bounds)

    if colour_bounds is not None:
        # User has set bounds on colour scale
        lev = linspace(colour_bounds[0], colour_bounds[1], num=40)
        if colour_bounds[0] == -colour_bounds[1]:
            # Bounds are centered on zero, so choose a blue-to-red colourmap
            # centered on yellow
            colour_map = "RdYlBu_r"
        else:
            colour_map = "jet"
    else:
        # Determine bounds automatically
        if var_name in ["u", "v", "ubar", "vbar", "m", "shflux", "ssflux", "sustr", "svstr", "bustr", "bvstr"]:
            # Center levels on 0 for certain variables, with a blue-to-red
            # colourmap
            max_val = amax(abs(data))
            lev = linspace(-max_val, max_val, num=40)
            colour_map = "RdYlBu_r"
        else:
            lev = linspace(amin(data), amax(data), num=40)
            colour_map = "jet"

    # Plot
    fig = figure(figsize=(16, 12))
    fig.add_subplot(1, 1, 1, aspect="equal")
    contourf(x, y, data, lev, cmap=colour_map, extend="both")
    cbar = colorbar()
    cbar.ax.tick_params(labelsize=20)
    title(long_name + " (" + units + ")\n" + depth_string, fontsize=30)
    axis("off")

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()
Beispiel #11
0
def bugs_acc_fig(grid_path, laplacian_file, biharmonic_file, tstep):

    # Month names for title
    month_names = [
        'January', 'February', 'March', 'April', 'May', 'June', 'July',
        'August', 'September', 'October', 'November', 'December'
    ]
    # Degrees to radians conversion factor
    deg2rad = pi / 180.0
    # Bounds on plot (in polar coordinate transformation)
    x_min = -40
    x_max = 0
    y_min = 0
    y_max = 40
    # Minimum speed to plot (mask out values below)
    threshold = 0.12
    # Maximum speed to plot
    bound = 1

    # Read angle and grid
    id = Dataset(grid_path, 'r')
    angle = id.variables['angle'][:-15, :]
    lon = id.variables['lon_rho'][:-15, :-1]
    lat = id.variables['lat_rho'][:-15, :-1]
    mask = id.variables['mask_rho'][:-15, :-1]
    id.close()

    # Set up grey shading of land
    mask = ma.masked_where(mask == 1, mask)
    grey_cmap = ListedColormap([(0.6, 0.6, 0.6)])
    x_reg, y_reg = meshgrid(linspace(x_min, x_max, num=1000),
                            linspace(y_min, y_max, num=1000))
    land_circle = zeros(shape(x_reg))
    lon_c = 50
    lat_c = -83
    radius = 10.1
    x_c = -(lat_c + 90) * cos(lon_c * deg2rad + pi / 2)
    y_c = (lat_c + 90) * sin(lon_c * deg2rad + pi / 2)
    land_circle = ma.masked_where(
        sqrt((x_reg - x_c)**2 + (y_reg - y_c)**2) > radius, land_circle)
    # Truncate colourmap
    min_colour = threshold / bound
    max_colour = 1
    trunc_cmap = truncate_colormap(get_cmap('jet'), min_colour, max_colour)

    # Read surface velocity
    id = Dataset(laplacian_file, 'r')
    ur_lap = id.variables['u'][tstep - 1, -1, :-15, :]
    vr_lap = id.variables['v'][tstep - 1, -1, :-15, :]
    # Rotate to lon-lat space
    u_lap, v_lap = rotate_vector_roms(ur_lap, vr_lap, angle)
    # Now that they're on the same grid, get the speed
    speed_laplacian = sqrt(u_lap**2 + v_lap**2)
    # Also read time and convert to Date object
    time_id = id.variables['ocean_time']
    time = num2date(time_id[tstep - 1],
                    units=time_id.units,
                    calendar=time_id.calendar.lower())
    # Get the date for the title
    date_string = str(
        time.day) + ' ' + month_names[time.month - 1] + ' ' + str(time.year)
    id.close()
    # Repeat velocity for biharmonic simulation
    id = Dataset(biharmonic_file, 'r')
    ur_bih = id.variables['u'][tstep - 1, -1, :-15, :]
    vr_bih = id.variables['v'][tstep - 1, -1, :-15, :]
    u_bih, v_bih = rotate_vector_roms(ur_bih, vr_bih, angle)
    speed_biharmonic = sqrt(u_bih**2 + v_bih**2)
    id.close()

    # Calculate x and y coordinates for plotting circumpolar projection
    x = -(lat + 90) * cos(lon * deg2rad + pi / 2)
    y = (lat + 90) * sin(lon * deg2rad + pi / 2)
    speed_laplacian = ma.masked_where(speed_laplacian < threshold,
                                      speed_laplacian)
    speed_biharmonic = ma.masked_where(speed_biharmonic < threshold,
                                       speed_biharmonic)

    # Make the plot
    fig = figure(figsize=(20, 10))
    # Laplacian
    ax = fig.add_subplot(1, 2, 1, aspect='equal')
    # First shade land in grey
    pcolor(x, y, mask, cmap=grey_cmap)
    pcolor(x_reg, y_reg, land_circle, cmap=grey_cmap)
    pcolor(x, y, speed_laplacian, vmin=threshold, vmax=bound, cmap=trunc_cmap)
    title('Laplacian viscosity', fontsize=24)
    xlim([x_min, x_max])
    ylim([y_min, y_max])
    ax.set_xticks([])
    ax.set_yticks([])
    # Biharmonic
    ax = fig.add_subplot(1, 2, 2, aspect='equal')
    pcolor(x, y, mask, cmap=grey_cmap)
    pcolor(x_reg, y_reg, land_circle, cmap=grey_cmap)
    img = pcolor(x,
                 y,
                 speed_biharmonic,
                 vmin=threshold,
                 vmax=bound,
                 cmap=trunc_cmap)
    title('Biharmonic viscosity', fontsize=24)
    xlim([-40, 0])
    ylim([0, 40])
    ax.set_xticks([])
    ax.set_yticks([])
    # Colourbar on the right
    cbaxes = fig.add_axes([0.94, 0.3, 0.02, 0.4])
    cbar = colorbar(img,
                    cax=cbaxes,
                    extend='max',
                    ticks=arange(0.2, 1 + 0.2, 0.2))
    cbar.ax.tick_params(labelsize=16)
    suptitle('Surface speed (m/s): snapshot on ' + date_string, fontsize=30)
    subplots_adjust(wspace=0.05)
    fig.show()
    fig.savefig('bugs_acc.png')
Beispiel #12
0
def uv_vectorplot(grid_path,
                  file_path,
                  tstep,
                  depth_key,
                  save=False,
                  fig_name=None):

    # Radius of the Earth in metres
    r = 6.371e6
    # Degrees to radians conversion factor
    deg2rad = pi / 180
    # Side length of blocks to average vectors over (can't plot vector at every
    # single point or the plot will be way too crowded)
    block = 15

    # Read angle from grid file
    grid_id = Dataset(grid_path, 'r')
    angle = grid_id.variables['angle'][:-15, :]
    grid_id.close()
    # Read grid and velocity data
    id = Dataset(file_path, 'r')
    lon = id.variables['lon_rho'][:-15, :-1]
    lat = id.variables['lat_rho'][:-15, :-1]
    if depth_key == 1:
        # Surface u and v
        u = id.variables['u'][tstep - 1, -1, :-15, :]
        v = id.variables['v'][tstep - 1, -1, :-15, :]
    elif depth_key == 2:
        # Bottom u and v
        u = id.variables['u'][tstep - 1, 0, :-15, :]
        v = id.variables['v'][tstep - 1, 0, :-15, :]
    elif depth_key == 3:
        # Vertically averaged u and v
        u = id.variables['ubar'][tstep - 1, :-15, :]
        v = id.variables['vbar'][tstep - 1, :-15, :]
    id.close()

    # Rotate velocities to lat-lon space
    u_lonlat, v_lonlat = rotate_vector_roms(u, v, angle)
    # Throw away the overlapping periodic boundary
    u_rho = u_lonlat[:, :-1]
    v_rho = v_lonlat[:, :-1]
    # Calculate speed
    speed = sqrt(u_rho**2 + v_rho**2)
    # Convert velocity to polar coordinates, rotate to account for longitude in
    # circumpolar projection, and convert back to vector components
    theta = arctan2(v_rho, u_rho)
    theta_circ = theta - lon * deg2rad
    u_circ = speed * cos(theta_circ)
    v_circ = speed * sin(theta_circ)

    # Calculate x and y coordinates for plotting circumpolar projection
    x = -(lat + 90) * cos(lon * deg2rad + pi / 2)
    y = (lat + 90) * sin(lon * deg2rad + pi / 2)

    # Average x, y, u_circ, and v_circ over block x block intervals
    # Calculate number of blocks
    size0 = int(ceil(size(x, 0) / float(block)))
    size1 = int(ceil((size(x, 1) - 1) / float(block)))
    # Set up arrays for averaged fields
    x_block = ma.empty([size0, size1])
    y_block = ma.empty([size0, size1])
    u_circ_block = ma.empty([size0, size1])
    v_circ_block = ma.empty([size0, size1])
    # Set up arrays containing boundary indices
    posn0 = range(0, size(x, 0), block)
    posn0.append(size(x, 0))
    posn1 = range(0, size(x, 1), block)
    posn1.append(size(x, 1))
    # Double loop to average each block (can't find a more efficient way to do
    # this)
    for j in range(size0):
        for i in range(size1):
            start0 = posn0[j]
            end0 = posn0[j + 1]
            start1 = posn1[i]
            end1 = posn1[i + 1]
            x_block[j, i] = mean(x[start0:end0, start1:end1])
            y_block[j, i] = mean(y[start0:end0, start1:end1])
            u_circ_block[j, i] = mean(u_circ[start0:end0, start1:end1])
            v_circ_block[j, i] = mean(v_circ[start0:end0, start1:end1])

    # Make the plot
    fig = figure(figsize=(16, 12))
    fig.add_subplot(1, 1, 1, aspect='equal')
    # Contour speed values at every point
    # Use pastel colour map so overlaid vectors will show up
    contourf(x, y, speed, 50, cmap='Paired')
    cbar = colorbar()
    cbar.ax.tick_params(labelsize=20)
    # Add vectors for each block
    quiver(x_block, y_block, u_circ_block, v_circ_block, color='black')
    if depth_key == 1:
        title('Surface velocity (m/s)', fontsize=30)
    elif depth_key == 2:
        title('Bottom velocity (m/s)', fontsize=30)
    elif depth_key == 3:
        title('Vertically averaged velocity (m/s)', fontsize=30)
    axis('off')

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()
def overturning_plot (grid_path, file_path, fig_name):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 40
    N = 31

    # Read angle from the grid file
    grid_id = Dataset(grid_path, 'r')
    angle = grid_id.variables['angle'][:-15,:]
    grid_id.close()
    # Read grid variables
    id = Dataset(file_path, 'r')
    h = id.variables['h'][:-15,1:-1]
    zice = id.variables['zice'][:-15,1:-1]
    zeta = id.variables['zeta'][-1,:-15,1:-1]
    lon = id.variables['lon_rho'][:-15,1:-1]
    lat = id.variables['lat_rho'][:-15,1:-1]
    # Read both velocities in x-y space
    u_xy = id.variables['u'][-1,:,:-15,:]
    v_xy = id.variables['v'][-1,:,:-15,:]    
    id.close()

    # Rotate velocities to lat-lon space
    v = ma.empty([N,v_xy.shape[1]+1,v_xy.shape[2]])
    for k in range(N):
        u_lonlat, v_lonlat = rotate_vector_roms(u_xy[k,:,:], v_xy[k,:,:], angle)
        v[k,:,:] = v_lonlat[:,:]
    # Throw away the periodic boundary overlap
    v = v[:,:,1:-1]

    # Calculate Cartesian integrands and z-coordinates
    dx, dy, dz, z = cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc, N, zeta)

    # Calculate transport in each cell
    transport = v*dx*dz
    # Definite integral over longitude
    transport = sum(transport, axis=2)
    # Indefinite integral over depth; flip before and after so the integral
    # starts at the surface, not the bottom. Also convert to Sv.
    transport = flipud(cumsum(flipud(transport), axis=0))*1e-6

    # Calculate latitude and z coordinates, averaged over longitude,
    # for plotting
    avg_lat = mean(lat, axis=1)
    avg_lat = tile(avg_lat, (N,1))
    avg_z = mean(z, axis=2)

    # Centre colour scale on 0
    max_val = amax(abs(transport))
    lev = linspace(-max_val, max_val, num=40)

    # Make the plot
    figure(figsize=(16,8))
    contourf(avg_lat, avg_z, transport, lev, cmap='RdBu_r')
    colorbar()
    xlabel('Latitude')
    ylabel('Depth (m)')
    title('Meridional Overturning Streamfunction (Sv)')

    #savefig(fig_name)
    show()
Beispiel #14
0
def timeseries_3D (grid_path, file_path, log_path):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 40
    N = 31
    rho0 = 1000.0    # Reference density (kg/m^3)
    Cp = 3974        # Specific heat of polar seawater (J/K/kg)
    C2K = 273.15     # Celsius to Kelvin conversion

    time = []
    ohc = []
    avgsalt = []
    tke = []
    # Check if the log file exists
    if exists(log_path):
        print 'Reading previously calculated values'
        f = open(log_path, 'r')
        # Skip first line (header for time array)
        f.readline()
        for line in f:
            try:
                time.append(float(line))
            except(ValueError):
                # Reached the header for the next variable
                break
        for line in f:
            try:
                ohc.append(float(line))
            except(ValueError):
                break
        for line in f:
            try:
                avgsalt.append(float(line))
            except(ValueError):
                break
        for line in f:
            tke.append(float(line))
        f.close()

    print 'Analysing grid'
    id = Dataset(grid_path, 'r')
    h = id.variables['h'][:-15,1:-1]
    zice = id.variables['zice'][:-15,1:-1]    
    lon = id.variables['lon_rho'][:-15,1:-1]
    lat = id.variables['lat_rho'][:-15,1:-1]
    mask = id.variables['mask_rho'][:-15,1:-1]
    # Keep the overlapping periodic boundary on "angle" for now
    angle = id.variables['angle'][:-15,:]
    id.close()

    id = Dataset(file_path, 'r')
    # Read time values and convert from seconds to years
    new_time = id.variables['ocean_time'][:]/(60*60*24*365.25)
    num_time = size(new_time)
    # Concatenate with time values from log file
    for t in range(num_time):        
        time.append(new_time[t])

    # Process 10 time indices at a time so we don't use too much memory
    start_t = 0
    while True:
        end_t = min(start_t+10, num_time)
        print 'Processing time indices ' + str(start_t+1) + ' to ' + str(end_t)
        num_time_curr = end_t-start_t

        print 'Calculating time-dependent dV'
        # Read time-dependent sea surface height
        zeta = id.variables['zeta'][start_t:end_t,:-15,1:-1]
        # Calculate time-dependent dz
        dz = ma.empty([num_time_curr, N, size(lon,0), size(lon,1)])
        for t in range(num_time_curr):
            # dx and dy will be recomputed unnecessarily each timestep
            # but that's ok
            dx, dy, dz_tmp, z = cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc, N, zeta[t,:,:])
            dz[t,:,:,:] = dz_tmp
        # Calculate time-dependent dV and mask with land mask
        # Here mask, dx, dy are all copied into arrays of dimension
        # time x depth x lat x lon
        dV = ma.masked_where(tile(mask, (num_time_curr,N,1,1))==0, tile(dx, (num_time_curr,1,1,1))*tile(dy, (num_time_curr,1,1,1))*dz)

        print 'Reading data'
        temp = id.variables['temp'][start_t:end_t,:,:-15,1:-1]
        salt = id.variables['salt'][start_t:end_t,:,:-15,1:-1]
        rho = id.variables['rho'][start_t:end_t,:,:-15,1:-1] + rho0
        # Keep overlapping periodic boundary for u and v
        u_xy = id.variables['u'][start_t:end_t,:,:-15,:]
        v_xy = id.variables['v'][start_t:end_t,:,:-15,:]

        print 'Interpolating velocities onto rho-grid'
        # We are actually rotating them at the same time as interpolating
        # which is a bit of unnecessary work (sum of squares won't change with
        # rotation) but not much extra work, and it's conveneint
        u = ma.empty(shape(temp))
        v = ma.empty(shape(temp))
        for t in range(num_time_curr):
            for k in range(N):
                u_tmp, v_tmp = rotate_vector_roms(u_xy[t,k,:,:], v_xy[t,k,:,:], angle)
                u[t,k,:,:] = u_tmp[:,1:-1]
                v[t,k,:,:] = v_tmp[:,1:-1]

        print 'Building timeseries'
        for t in range(num_time_curr):
            # Integrate temp*rho*Cp*dV to get OHC
            ohc.append(sum((temp[t,:,:,:]+C2K)*rho[t,:,:,:]*Cp*dV[t,:,:,:]))
            # Average salinity (weighted with rho*dV)
            avgsalt.append(sum(salt[t,:,:,:]*rho[t,:,:,:]*dV[t,:,:,:])/sum(rho[t,:,:,:]*dV[t,:,:,:]))
            # Integrate 0.5*rho*speed^2*dV to get TKE
            tke.append(sum(0.5*rho[t,:,:,:]*(u[t,:,:,:]**2 + v[t,:,:,:]**2)*dV[t,:,:,:]))

        # Get ready for next 10 time indices
        if end_t == num_time:
            break
        start_t = end_t

    id.close()

    print 'Plotting ocean heat content'
    clf()
    plot(time, ohc)
    xlabel('Years')
    ylabel('Southern Ocean Heat Content (J)')
    grid(True)
    savefig('ohc.png')

    print 'Plotting average salinity'
    clf()
    plot(time, avgsalt)
    xlabel('Years')
    ylabel('Southern Ocean Average Salinity (psu)')
    grid(True)
    savefig('avgsalt.png')

    print 'Plotting total kinetic energy'
    clf()
    plot(time, tke)
    xlabel('Years')
    ylabel('Southern Ocean Total Kinetic Energy (J)')
    grid(True)
    savefig('tke.png')

    print 'Saving results to log file'
    f = open(log_path, 'w')
    f.write('Time (years):\n')
    for elm in time:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Heat Content (J):\n')
    for elm in ohc:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Average Salinity (psu):\n')
    for elm in avgsalt:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Total Kinetic Energy (J):\n')
    for elm in tke:
        f.write(str(elm) + '\n')
    f.close()
def circumpolar_plot(file_path,
                     var_name,
                     depth_key,
                     depth,
                     depth_bounds,
                     colour_bounds=None,
                     save=False,
                     fig_name=None,
                     grid_path=None):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 50
    N = 31
    deg2rad = pi / 180

    # Read the variable and figure out if 2D or 3D (not including time)
    id = Dataset(file_path, 'r')
    if len(id.variables[var_name].shape) == 4:
        # 3D variable; will have to choose depth later
        data_full = mean(id.variables[var_name][:, :, :, :], axis=0)
        choose_depth = True
    elif len(id.variables[var_name].shape) == 3:
        # 2D variable
        data = mean(id.variables[var_name][:, :, :], axis=0)
        choose_depth = False
    if var_name == 'salt':
        units = 'psu'
    elif var_name == 'm':
        # Convert ice shelf melt rate from m/s to m/yr
        units = 'm/year'
        data = data * 60. * 60. * 24. * 365.25
    else:
        units = id.variables[var_name].units
    long_name = id.variables[var_name].long_name

    # Check for vector variables that need to be rotated
    if var_name in [
            'ubar', 'vbar', 'u', 'v', 'sustr', 'svstr', 'bustr', 'bvstr'
    ]:
        grid_id = Dataset(grid_path, 'r')
        angle = grid_id.variables['angle'][:, :]
        grid_id.close()
        if var_name in ['ubar', 'sustr', 'bustr']:
            # 2D u-variable
            u_data = data[:, :]
            v_data = mean(id.variables[var_name.replace('u', 'v')][:, :, :],
                          axis=0)
            u_data_lonlat, v_data_lonlat = rotate_vector_roms(
                u_data, v_data, angle)
            data = u_data_lonlat
        elif var_name in ['vbar', 'svstr', 'bvstr']:
            # 2D v-variable
            v_data = data[:, :]
            u_data = mean(id.variables[var_name.replace('v', 'u')][:, :, :],
                          axis=0)
            u_data_lonlat, v_data_lonlat = rotate_vector_roms(
                u_data, v_data, angle)
            data = v_data_lonlat
        elif var_name in ['u']:
            # 3D u-variable
            data_full_ugrid = data_full[:, :, :]
            data_full = ma.empty([
                data_full_ugrid.shape[0], data_full_ugrid.shape[1],
                data_full_ugrid.shape[2] + 1
            ])
            for k in range(N):
                u_data = data_full_ugrid[k, :, :]
                v_data = mean(id.variables[var_name.replace('u',
                                                            'v')][:, k, :, :],
                              axis=0)
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(
                    u_data, v_data, angle)
                data_full[k, :, :] = u_data_lonlat
        elif var_name in ['v']:
            # 3D v-variable
            data_full_vgrid = data_full[:, :, :]
            data_full = ma.empty([
                data_full_vgrid.shape[0], data_full_vgrid.shape[1] + 1,
                data_full_vgrid.shape[2]
            ])
            for k in range(N):
                v_data = data_full_vgrid[k, :, :]
                u_data = mean(id.variables[var_name.replace('v',
                                                            'u')][:, k, :, :],
                              axis=0)
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(
                    u_data, v_data, angle)
                data_full[k, :, :] = v_data_lonlat

    # Read grid variables
    h = id.variables['h'][:, :]
    zice = id.variables['zice'][:, :]
    lon = id.variables['lon_rho'][:, :]
    lat = id.variables['lat_rho'][:, :]
    mask = id.variables['mask_rho'][:, :]
    id.close()

    # Throw away the overlapping periodic boundary
    #if choose_depth:
    #    data_full = data_full[:,:,:-1]
    #else:
    #    data = data[:,:-1]
    #lon = lon[:,:-1]
    #lat = lat[:,:-1]
    #h = h[:,:-1]
    #zice = zice[:,:-1]

    # Convert to spherical coordinates
    #x = -(lat+90)*cos(lon*deg2rad+pi/2)
    #y = (lat+90)*sin(lon*deg2rad+pi/2)

    # Choose what to write on the title about depth
    if choose_depth:
        if depth_key == 0:
            depth_string = 'at surface'
        elif depth_key == 1:
            depth_string = 'at bottom'
        elif depth_key == 2:
            depth_string = 'at ' + str(int(round(-depth))) + ' m'
        elif depth_key == 3:
            depth_string = 'vertically averaged'
        elif depth_key == 4:
            depth_string = 'vertically averaged between ' + str(
                int(round(-depth_bounds[0]))) + ' and ' + str(
                    int(round(-depth_bounds[1]))) + ' m'
    else:
        depth_string = ''

    if choose_depth:
        # For 3D variables, select data corresponding to depth choice
        if depth_key == 0:
            # Surface layer
            data = data_full[-1, :, :]
        elif depth_key == 1:
            # Bottom layer
            data = data_full[0, :, :]
        else:
            # We will need z-coordinates and possibly dz
            dx, dy, dz, z = cartesian_grid_3d(lon, lat, h, zice, theta_s,
                                              theta_b, hc, N)
            if depth_key == 2:
                # Interpolate to given depth
                data = interp_depth(data_full, z, depth)
            elif depth_key == 3:
                # Vertically average entire water column
                data = sum(data_full * dz, axis=0) / sum(dz, axis=0)
            elif depth_key == 4:
                # Vertically average between given depths
                data = average_btw_depths(data_full, z, dz, depth_bounds)

    if colour_bounds is not None:
        # User has set bounds on colour scale
        lev = linspace(colour_bounds[0], colour_bounds[1], num=40)
        if colour_bounds[0] == -colour_bounds[1]:
            # Bounds are centered on zero, so choose a blue-to-red colourmap
            # centered on yellow
            colour_map = 'RdYlBu_r'
        else:
            colour_map = 'jet'
    else:
        # Determine bounds automatically
        if var_name in [
                'u', 'v', 'ubar', 'vbar', 'm', 'shflux', 'ssflux', 'sustr',
                'svstr', 'bustr', 'bvstr', 'ssflux_restoring'
        ]:
            # Center levels on 0 for certain variables, with a blue-to-red
            # colourmap
            max_val = amax(abs(data))
            lev = linspace(-max_val, max_val, num=40)
            colour_map = 'RdYlBu_r'
        else:
            lev = linspace(amin(data), amax(data), num=40)
            colour_map = 'jet'

    data = ma.masked_where(mask == 0, data)

    # Plot
    fig = figure(figsize=(16, 12))
    fig.add_subplot(1, 1, 1, aspect='equal')
    pcolormesh(mask, cmap='gray_r')
    pcolormesh(data,
               cmap=colour_map,
               vmin=colour_bounds[0],
               vmax=colour_bounds[1])
    ylim(0, len(data[:, 0]))
    xlim(0, len(data[0, :]))

    #contourf(x, y, data, lev, cmap=colour_map, extend='both')
    cbar = colorbar()
    cbar.ax.tick_params(labelsize=20)
    title('average ' + long_name + ' (' + units + ')\n' + depth_string,
          fontsize=30)
    axis('off')

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()
Beispiel #16
0
def timeseries_3D(grid_path, file_path, log_path):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 50
    N = 31
    rho0 = 1027.0  # Reference density (kg/m^3)
    Cp = 3974  # Specific heat of polar seawater (J/K/kg)
    C2K = 273.15  # Celsius to Kelvin conversion

    time = []
    ohc = []
    ohc_deep = []
    avgsalt = []
    tke = []
    # Check if the log file exists
    if exists(log_path):
        print('Reading previously calculated values')
        f = open(log_path, 'r')
        # Skip first line (header for time array)
        f.readline()
        for line in f:
            try:
                time.append(float(line))
            except (ValueError):
                # Reached the header for the next variable
                break
        for line in f:
            try:
                ohc.append(float(line))
            except (ValueError):
                break
        for line in f:
            try:
                ohc_deep.append(float(line))
            except (ValueError):
                break
        for line in f:
            try:
                avgsalt.append(float(line))
            except (ValueError):
                break
        for line in f:
            tke.append(float(line))
        f.close()

    print('Analysing grid')
    id = Dataset(grid_path, 'r')
    h = id.variables['h'][:, :]
    zice = id.variables['zice'][:, :]
    lon = id.variables['lon_rho'][:, :]
    lat = id.variables['lat_rho'][:, :]
    lon_u = id.variables['lon_u'][:, :]
    lat_u = id.variables['lat_u'][:, :]
    lon_v = id.variables['lon_v'][:, :]
    lat_v = id.variables['lat_v'][:, :]
    mask = id.variables['mask_rho'][:, :]
    # Keep the overlapping periodic boundary on "angle" for now
    angle = id.variables['angle'][:, :]
    id.close()

    id = Dataset(file_path, 'r')
    # Read time values and convert from seconds to years
    new_time = id.variables['ocean_time'][:] / (60 * 60 * 24 * 365.25)
    num_time = size(new_time)
    # Concatenate with time values from log file
    for t in range(num_time):
        time.append(new_time[t])

    # Process 10 time indices at a time so we don't use too much memory
    start_t = 0
    while True:
        end_t = min(start_t + 10, num_time)
        print('Processing time indices ' + str(start_t + 1) + ' to ' +
              str(end_t))
        num_time_curr = end_t - start_t

        print('Calculating time-dependent dV')
        # Read time-dependent sea surface height
        zeta = id.variables['zeta'][start_t:end_t, :, :]
        # Calculate time-dependent dz
        dz = ma.empty([num_time_curr, N, size(lon, 0), size(lon, 1)])
        for t in range(num_time_curr):
            # dx and dy will be recomputed unnecessarily each timestep
            # but that's ok
            dx, dy, dz_tmp, z = cartesian_grid_3d(lon, lat, lon_u, lat_u,
                                                  lon_v, lat_v, h, zice,
                                                  theta_s, theta_b, hc, N,
                                                  zeta[t, :, :])
            dz[t, :, :, :] = dz_tmp
        # Calculate time-dependent dV and mask with land mask
        # Here mask, dx, dy are all copied into arrays of dimension
        # time x depth x lat x lon
        dV = ma.masked_where(
            tile(mask, (num_time_curr, N, 1, 1)) == 0,
            tile(dx, (num_time_curr, 1, 1, 1)) *
            tile(dy, (num_time_curr, 1, 1, 1)) * dz)

        print('Reading data')
        temp = id.variables['temp'][start_t:end_t, :, :, :]
        salt = id.variables['salt'][start_t:end_t, :, :, :]
        rho = id.variables['rho'][start_t:end_t, :, :, :] + rho0
        # Keep overlapping periodic boundary for u and v
        u_xy = id.variables['u'][start_t:end_t, :, :, :]
        v_xy = id.variables['v'][start_t:end_t, :, :, :]

        print('Interpolating velocities onto rho-grid')
        # We are actually rotating them at the same time as interpolating
        # which is a bit of unnecessary work (sum of squares won't change with
        # rotation) but not much extra work, and it's conveneint
        u = ma.empty(shape(temp))
        v = ma.empty(shape(temp))
        for t in range(num_time_curr):
            for k in range(N):
                u_tmp, v_tmp = rotate_vector_roms(u_xy[t, k, :, :],
                                                  v_xy[t, k, :, :], angle)
                u[t, k, :, :] = u_tmp[:, :]
                v[t, k, :, :] = v_tmp[:, :]

        print('Building timeseries')
        index = z < -2000.0
        for t in range(num_time_curr):
            # Integrate temp*rho*Cp*dV to get OHC
            ohc.append(
                sum((temp[t, :, :, :] + C2K) * rho[t, :, :, :] * Cp *
                    dV[t, :, :, :]))
            # Integrate temp*rho*Cp*dV, where deeper than 2000m to get OHC deep
            ohc_deep.append(
                sum((temp[t, index] + C2K) * rho[t, index] * Cp *
                    dV[t, index]))
            # Average salinity (weighted with rho*dV)
            avgsalt.append(
                sum(salt[t, :, :, :] * rho[t, :, :, :] * dV[t, :, :, :]) /
                sum(rho[t, :, :, :] * dV[t, :, :, :]))
            # Integrate 0.5*rho*speed^2*dV to get TKE
            tke.append(
                sum(0.5 * rho[t, :, :, :] *
                    (u[t, :, :, :]**2 + v[t, :, :, :]**2) * dV[t, :, :, :]))

        # Get ready for next 10 time indices
        if end_t == num_time:
            break
        start_t = end_t

    id.close()

    print('Plotting ocean heat content')
    clf()
    plot(time, ohc)
    title('Southern Ocean Heat Content')
    xlabel('Years')
    ylabel('Heat Content (J)')
    grid(True)
    savefig('ohc.png')

    print('Plotting ocean heat content below 2000m')
    clf()
    plot(time, ohc_deep)
    xlabel('Years')
    ylabel('Southern Ocean Heat Content (J)')
    grid(True)
    savefig('ohc_deep.png')

    print('Plotting average salinity')
    clf()
    plot(time, avgsalt)
    xlabel('Years')
    ylabel('Southern Ocean Average Salinity (psu)')
    grid(True)
    savefig('avgsalt.png')

    print('Plotting total kinetic energy')
    clf()
    plot(time, tke)
    xlabel('Years')
    ylabel('Southern Ocean Total Kinetic Energy (J)')
    grid(True)
    savefig('tke.png')

    print('Saving results to log file')
    f = open(log_path, 'w')
    f.write('Time (years):\n')
    for elm in time:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Heat Content (J):\n')
    for elm in ohc:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Heat Content below 2000 m(J):\n')
    for elm in ohc_deep:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Average Salinity (psu):\n')
    for elm in avgsalt:
        f.write(str(elm) + '\n')
    f.write('Southern Ocean Total Kinetic Energy (J):\n')
    for elm in tke:
        f.write(str(elm) + '\n')
    f.close()
Beispiel #17
0
def timeseries_dpt(grid_path, file_path, log_path, add_years=0):

    # Radius of the Earth in metres
    r = 6.371e6
    # Degrees to radians conversion factor
    deg2rad = pi / 180.0
    # Longitude of Drake Passage zonal slice (convert to ROMS bounds 0-360)
    lon0 = -67 + 360
    # Latitude bounds on Drake Passage zonal slice
    lat_min = -68
    lat_max = -54.5

    time = []
    dpt = []
    # Check if the log file exists
    if exists(log_path):
        print 'Reading previously calculated values'
        f = open(log_path, 'r')
        # Skip first line (header for time array)
        f.readline()
        for line in f:
            try:
                time.append(float(line))
            except (ValueError):
                # Reached the header for the next variable
                break
        for line in f:
            dpt.append(float(line))
        f.close()

    print 'Reading grid'
    id = Dataset(grid_path, 'r')
    h = id.variables['h'][:-15, 1:-1]
    zice = id.variables['zice'][:-15, 1:-1]
    lon = id.variables['lon_rho'][:-15, 1:-1]
    lat = id.variables['lat_rho'][:-15, 1:-1]
    # Keep the overlapping periodic boundary on "angle" for now
    angle = id.variables['angle'][:-15, :]
    id.close()

    print 'Reading data'
    id = Dataset(file_path, 'r')
    # Read time values and convert from seconds to years
    new_time = id.variables['ocean_time'][:] / (60 * 60 * 24 *
                                                365.25) + add_years
    num_time = size(new_time)
    # Concatenate with time values from log file
    for t in range(num_time):
        time.append(new_time[t])
    # Calculate time-dependent water column thickness: h + zice + zeta
    zeta = id.variables['zeta'][:, :-15, 1:-1]
    wct = tile(h, (num_time, 1, 1)) + tile(zice, (num_time, 1, 1)) + zeta
    # Read barotropic velocities in x-y space
    ubar_xy = id.variables['ubar'][:, :-15, :]
    vbar_xy = id.variables['vbar'][:, :-15, :]
    id.close()

    print 'Rotating velocity vector'
    ubar = ma.empty([num_time, size(lon, 0), size(lon, 1)])
    # Rotate one time index at a time
    for t in range(num_time):
        ubar_tmp, vbar_tmp = rotate_vector_roms(ubar_xy[t, :, :],
                                                vbar_xy[t, :, :], angle)
        # Throw away the overlapping periodic boundary before saving
        ubar[t, :, :] = ubar_tmp[:, 1:-1]

    print 'Extracting zonal slice through Drake Passage'  #
    num_lat = size(lat, 0)
    # Set up arrays for zonal slices of ubar, water column thickness, latitude
    ubar_DP = ma.empty([num_time, num_lat])
    wct_DP = ma.empty([num_time, num_lat])
    lat_DP = empty([num_lat])
    # Loop over longitudes
    for j in range(num_lat):
        lon_tmp = lon[j, :]
        # Find indices and coefficients to interpolate to lon0
        ie, iw, coeffe, coeffw = interp_lon_helper(lon_tmp, lon0)
        # Use these to interpolate all 3 variables we care about
        ubar_DP[:, j] = coeffe * ubar[:, j, ie] + coeffw * ubar[:, j, iw]
        wct_DP[:, j] = coeffe * wct[:, j, ie] + coeffw * wct[:, j, iw]
        lat_DP[j] = coeffe * lat[j, ie] + coeffw * lat[j, iw]
    # Find indices for latitude bounds
    jS = nonzero(lat_DP > lat_min)[0][0]
    jN = nonzero(lat_DP > lat_max)[0][0]
    # Trim everything to these bounds
    ubar_DP = ubar_DP[:, jS:jN]
    wct_DP = wct_DP[:, jS:jN]
    lat_DP = lat_DP[jS:jN]
    # Calculate dy
    # First calculate latitude on edges of each cell
    middle_lat = 0.5 * (lat_DP[:-1] + lat_DP[1:])
    s_bdry = 2 * lat_DP[0] - middle_lat[0]
    n_bdry = 2 * lat_DP[-1] - middle_lat[-1]
    lat_edges = zeros(size(lat_DP) + 1)
    lat_edges[0] = s_bdry
    lat_edges[1:-1] = middle_lat
    lat_edges[-1] = n_bdry
    # Now calculate difference in latitude across each cell
    dlat_DP = lat_edges[1:] - lat_edges[:-1]
    # Convert to Cartesian space for dy in metres
    dy_DP = r * dlat_DP * deg2rad

    for t in range(num_time):
        # Integrate ubar*wct*dy and convert to Sv
        dpt.append(sum(ubar_DP[t, :] * wct_DP[t, :] * dy_DP) * 1e-6)

    print 'Plotting'
    clf()
    plot(time, dpt)
    xlabel('Years')
    ylabel('Drake Passage Transport (Sv)')
    grid(True)
    savefig('drakepsgtrans.png')

    print 'Saving results to log file'
    f = open(log_path, 'w')
    f.write('Time (years):\n')
    for elm in time:
        f.write(str(elm) + '\n')
    f.write('Drake Passage Transport (Sv):\n')
    for elm in dpt:
        f.write(str(elm) + '\n')
    f.close()
Beispiel #18
0
def zonal_plot (file_path, var_name, tstep, lon_key, lon0, lon_bounds, depth_min, colour_bounds=None, save=False, fig_name=None, grid_path=None):

    # Grid parameters
    theta_s = 4.0
    theta_b = 0.9
    hc = 40
    N = 31

    # Read the variable
    id = Dataset(file_path, 'r')
    data_3d = id.variables[var_name][tstep-1,:,:-15,:]
    # Also read sea surface height
    zeta = id.variables['zeta'][tstep-1,:-15,:]
    if var_name == 'salt':
        units = 'psu'
    else:
        units = id.variables[var_name].units
    long_name = id.variables[var_name].long_name

    # Rotate velocity if necessary
    if var_name in ['u', 'v']:
        grid_id = Dataset(grid_path, 'r')
        angle = grid_id.variables['angle'][:-15,:]
        grid_id.close()
        if var_name == 'u':
            data_3d_ugrid = data_3d[:,:,:]
            data_3d = ma.empty([data_3d_ugrid.shape[0], data_3d_ugrid.shape[1], data_3d_ugrid.shape[2]+1])
            for k in range(N):
                u_data = data_3d_ugrid[k,:,:]
                v_data = id.variables['v'][tstep-1,k,:-15,:]
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle)
                data_3d[k,:,:] = u_data_lonlat
        elif var_name == 'v':
            data_3d_vgrid = data_3d[:,:,:]
            data_3d = ma.empty([data_3d_vgrid.shape[0], data_3d_vgrid.shape[1]+1, data_3d_vgrid.shape[2]])
            for k in range(N):
                v_data = data_3d_vgrid[k,:,:]
                u_data = id.variables['u'][tstep-1,k,:-15,:]
                u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle)
                data_3d[k,:,:] = v_data_lonlat

    # Read grid variables
    h = id.variables['h'][:-15,:]
    zice = id.variables['zice'][:-15,:]
    lon_2d = id.variables['lon_rho'][:-15,:]
    lat_2d = id.variables['lat_rho'][:-15,:]
    id.close()

    # Get a 3D array of z-coordinates; sc_r and Cs_r are unused in this script
    z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta)

    # Choose what to write on the title about longitude
    if lon_key == 0:
        if lon0 < 0:
            lon_string = 'at ' + str(int(round(-lon0))) + r'$^{\circ}$W'
        else:
            lon_string = 'at ' + str(int(round(lon0))) + r'$^{\circ}$E'
    elif lon_key == 1:
        lon_string = 'zonally averaged'
    elif lon_key == 2:
        lon_string = 'zonally averaged between '
        if lon_bounds[0] < 0:
            lon_string += str(int(round(-lon_bounds[0]))) + r'$^{\circ}$W and '
        else:
            lon_string += str(int(round(lon_bounds[0]))) + r'$^{\circ}$E and '
        if lon_bounds[1] < 0:
            lon_string += str(int(round(-lon_bounds[1]))) + r'$^{\circ}$W'
        else:
            lon_string += str(int(round(lon_bounds[1]))) + r'$^{\circ}$E'

    # Edit longitude bounds to be from 0 to 360, to fit with ROMS convention
    if lon_key == 0:
        if lon0 < 0:
            lon0 += 360
    elif lon_key == 2:
        if lon_bounds[0] < 0:
            lon_bounds[0] += 360
        if lon_bounds[1] < 0:
            lon_bounds[1] += 360

    # Interpolate or average data
    if lon_key == 0:
        # Interpolate to lon0
        data, z, lat = interp_lon(data_3d, z_3d, lat_2d, lon_2d, lon0)
    elif lon_key == 1:
        # Zonally average over all longitudes
        # dlon is constant on this grid (0.25 degrees) so this is easy
        data = mean(data_3d, axis=2)
        z = mean(z_3d, axis=2)
        # Zonally average latitude, and copy into N depth levels
        lat = tile(mean(lat_2d, axis=1), (N,1))
    elif lon_key == 2:
        # Zonally average between lon_bounds
        data, z, lat = average_btw_lons(data_3d, z_3d, lat_2d, lon_2d, lon_bounds)

    if colour_bounds is not None:
        # User has set bounds on colour scale
        lev = linspace(colour_bounds[0], colour_bounds[1], num=40)
        if colour_bounds[0] == -colour_bounds[1]:
            # Bounds are centered on zero, so choose a blue-to-red colourmap
            # centered on yellow
            colour_map = 'RdYlBu_r'
        else:
            colour_map = 'jet'
    else:
        # Determine bounds automatically
        if var_name in ['u', 'v']:
            # Center levels on 0 for certain variables, with a blue-to-red
            # colourmap
            max_val = amax(abs(data))
            lev = linspace(-max_val, max_val, num=40)
            colour_map = 'RdYlBu_r'
        else:
            lev = linspace(amin(data), amax(data), num=40)
            colour_map = 'jet'

    # Plot
    fig = figure(figsize=(18,6))
    contourf(lat, z, data, lev, cmap=colour_map, extend='both')
    colorbar()

    title(long_name + ' (' + units + ')\n' + lon_string)
    xlabel('Latitude')
    ylabel('Depth (m)')

    # Choose latitude bounds based on land mask
    data_sum = sum(data, axis=0)    
    # Find southernmost and northernmost unmasked j-indices
    edges = ma.flatnotmasked_edges(data_sum)
    j_min = edges[0]
    j_max = edges[1]
    if j_min == 0:
        # There are ocean points right to the southern boundary
        # Don't do anything special
        lat_min = min(lat[:,j_min])
    else:
        # There is land everywhere at the southern boundary
        # Show the last 2 degrees of this land mask
        lat_min = min(lat[:,j_min]) - 2
    if j_max == size(data_sum) - 1:
        # There are ocean points right to the northern boundary
        # Don't do anything special
        lat_max = max(lat[:,j_max])
    else:
        # There is land everywhere at the northern boundary
        # Show the first 2 degrees of this land mask
        lat_max = max(lat[:,j_max]) + 2
#    lat_max = -65
    xlim([lat_min, lat_max])
    ylim([depth_min, 0])

    if save:
        fig.savefig(fig_name)
    else:
        fig.show()

    # Reset lon0 or lon_bounds to (-180, 180) range in case we
    # use them again for the next plot
    if lon_key == 0:
        if lon0 > 180:
            lon0 -= 360
    elif lon_key == 2:
        if lon_bounds[0] > 180:
            lon_bounds[0] -= 360
        if lon_bounds[1] > 180:
            lon_bounds[1] -= 360