Esempio n. 1
0
def interp_reg_xyz(source_lon,
                   source_lat,
                   source_z,
                   source_data,
                   target_lon,
                   target_lat,
                   target_z,
                   fill_value=-9999):

    from scipy.interpolate import RegularGridInterpolator

    # Build an interpolant
    # Make depth positive so it's strictly increasing
    interpolant = RegularGridInterpolator((-source_z, source_lat, source_lon),
                                          source_data,
                                          bounds_error=False,
                                          fill_value=fill_value)
    # Make target axes 3D
    if len(target_lon.shape) == 1:
        target_lon, target_lat = np.meshgrid(target_lon, target_lat)
    dimensions = [target_lon.shape[1], target_lon.shape[0], target_z.size]
    target_lon = xy_to_xyz(target_lon, dimensions)
    target_lat = xy_to_xyz(target_lat, dimensions)
    target_z = z_to_xyz(target_z, dimensions)
    # Interpolate
    data_interp = interpolant((-target_z, target_lat, target_lon))
    return data_interp
Esempio n. 2
0
def pierre_obs_grid(f, xy_dim=2, z_dim=1, dA_dim=2):

    lon = np.squeeze(f['lonvec'])
    lat = np.squeeze(f['latvec'])
    depth = np.squeeze(f['depthvec'])

    nx = lon.size
    ny = lat.size
    nz = depth.size
    shape = [ny, nx, nz]

    # Calculate differentials
    dA = np.transpose(dA_from_latlon(lon, lat))
    z_edges = np.concatenate((np.array([0]), 0.5 * (depth[:-1] + depth[1:]),
                              np.array([2 * depth[-1] - depth[-2]])))
    dz = z_edges[:-1] - z_edges[1:]
    dV = xy_to_xyz(dA, shape) * z_to_xyz(dz, shape)

    if xy_dim > 1:
        lat, lon = np.meshgrid(lat, lon)
    if xy_dim == 3:
        lon = xy_to_xyz(lon, shape)
        lat = xy_to_xyz(lat, shape)
    if dA_dim == 3:
        dA = xy_to_xyz(dA, shape)
    if z_dim == 3:
        depth = z_to_xyz(depth, shape)

    return lon, lat, depth, dA, dV
Esempio n. 3
0
def prepare_integrand_mask(option,
                           data,
                           grid,
                           gtype='t',
                           time_dependent=False):

    if option in ['dA', 'dV'] and gtype != 't':
        print 'Error (prepare_integrand_mask): non-tracer grids not yet supported'
        sys.exit()
    elif option == 'dx' and gtype == 'u':
        print 'Error (prepare_integrand_mask): u-grid not yet supported for dx'
        sys.exit()
    elif option == 'dy' and gtype == 'v':
        print 'Error (prepare_integrand_mask): v-grid not yet supported for dy'
        sys.exit()

    # Get the mask as 1s and 0s
    if isinstance(data, np.ma.MaskedArray):
        mask = np.invert(data.mask).astype(float)
    else:
        # No mask, just use 1s everywhere
        mask = np.ones(data.shape)
    # Get the integrand
    if option == 'dA':
        integrand = grid.dA
    elif option == 'dV':
        integrand = grid.dV
    elif option == 'dx':
        integrand = grid.dx_s
    elif option == 'dy':
        integrand = grid.dy_w
    else:
        print 'Error (prepare_integrand_mask): invalid option ' + option
    if (len(integrand.shape)
            == 2) and ((time_dependent and len(data.shape) == 4) or
                       (not time_dependent and len(data.shape) == 3)):
        # There's also a depth dimension; tile in z
        integrand = xy_to_xyz(integrand, grid)
    if time_dependent:
        # Tile in time
        integrand = add_time_dim(integrand, data.shape[0])
    return integrand, mask
Esempio n. 4
0
def prepare_coord(shape, grid, option, gtype='t', time_dependent=False):

    if option == 'lon':
        # Get 2D lon
        coordinates = grid.get_lon_lat(gtype=gtype)[0]
    elif option == 'lat':
        # Get 2D lat
        coordinates = grid.get_lon_lat(gtype=gtype)[1]
    elif option == 'depth':
        # Get 3D z
        if gtype == 'w':
            print 'Error (prepare_coord): w-grid not yet supported for depth derivatives'
            sys.exit()
        coordinates = z_to_xyz(grid.z, z)
    if option in ['lon', 'lat'] and ((len(shape) == 3 and not time_dependent)
                                     or (len(shape) == 4 and time_dependent)):
        # Add depth dimension
        coordinates = xy_to_xyz(coordinates, grid)
    if time_dependent:
        # Add time dimension
        coordinates = add_time_dim(coordinates, shape[0])
    return coordinates
Esempio n. 5
0
def balance_obcs (grid_path, option='balance', obcs_file_w_u=None, obcs_file_e_u=None, obcs_file_s_v=None, obcs_file_n_v=None, d_eta=None, d_t=None, max_deta_dt=0.5, prec=32):

    if option == 'correct' and (d_eta is None or d_t is None):
        print 'Error (balance_obcs): must set d_eta and d_t for option="correct"'
        sys.exit()
    
    print 'Building grid'
    grid = Grid(grid_path)

    # Calculate integrands of area, scaled by hFacC
    # Note that dx and dy are only available on western and southern edges of cells respectively; for the eastern and northern boundary, will just have to use 1 cell in. Not perfect, but this correction wouldn't perfectly conserve anyway.
    # Area of western face = dy*dz*hfac
    dA_w = xy_to_xyz(grid.dy_w, grid)*z_to_xyz(grid.dz, grid)*grid.hfac
    # Area of southern face = dx*dz*hfac
    dA_s = xy_to_xyz(grid.dx_s, grid)*z_to_xyz(grid.dz, grid)*grid.hfac

    # Now extract the area array at each boundary, and wrap up into a list for easy iteration later
    dA_bdry = [dA_w[:,:,0], dA_w[:,:,-1], dA_s[:,0,:], dA_s[:,-1,:]]
    # Some more lists:
    bdry_key = ['W', 'E', 'S', 'N']
    files = [obcs_file_w_u, obcs_file_e_u, obcs_file_s_v, obcs_file_n_v]
    dimensions = ['yzt', 'yzt', 'xzt', 'xzt']
    sign = [1, -1, 1, -1]  # Multiply velocity variable by this to get incoming transport
    # Initialise number of timesteps
    num_time = None

    # Integrate the total area of ocean cells on boundaries
    total_area = 0
    for i in range(len(files)):
        if files[i] is not None:
            print 'Calculating area of ' + bdry_key[i] + ' boundary'
            total_area += np.sum(dA_bdry[i])

    # Calculate the net transport into the domain
    if option in ['balance', 'dampen']:
        # Transport based on OBCS normal velocities
        if option == 'balance':
            net_transport = 0
        elif option == 'dampen':
            net_transport = None
        for i in range(len(files)):
            if files[i] is not None:
                print 'Processing ' + bdry_key[i] + ' boundary from ' + files[i]
                # Read data
                vel = read_binary(files[i], [grid.nx, grid.ny, grid.nz], dimensions[i], prec=prec)
                if num_time is None:
                    # Find number of time indices
                    num_time = vel.shape[0]
                elif num_time != vel.shape[0]:
                    print 'Error (balance_obcs): inconsistent number of time indices between OBCS files'
                    sys.exit()
                if option == 'dampen' and net_transport is None:
                    # Initialise transport per month
                    net_transport = np.zeros(num_time)
                if option == 'balance':
                    # Time-average velocity (this is equivalent to calculating the transport at each month and then time-averaging at the end - it's all just sums)
                    vel = np.mean(vel, axis=0)
                    # Integrate net transport through this boundary into the domain, and add to global sum
                    net_transport += np.sum(sign[i]*vel*dA_bdry[i])
                elif option == 'dampen':
                    # Integrate net transport at each month
                    for t in range(num_time):
                        net_transport[t] += np.sum(sign[i]*vel[t,:]*dA_bdry[i])
    elif option == 'correct':
        # Transport based on simulated changes in sea surface height
        # Need area of sea surface
        dA_sfc = np.sum(grid.dA*np.invert(grid.land_mask).astype(float))
        # Calculate transport in m^3/s
        net_transport = d_eta*dA_sfc/(d_t*sec_per_year)        

    # Inner function to nicely print the net transport to the user
    def print_net_transport (transport):
        if transport < 0:
            direction = 'out of the domain'
        else:
            direction = 'into the domain'
        print 'Net transport is ' + str(abs(transport*1e-6)) + ' Sv ' + direction

    if option == 'dampen':
        for t in range(num_time):
            print 'Month ' + str(t+1)
            print_net_transport(net_transport[t])
    else:
        print_net_transport(net_transport)

    if option == 'dampen':
        # Calculate the acceptable maximum absolute transport
        # First need total area of sea surface (including cavities) in domain
        surface_area = np.sum(mask_land(grid.dA, grid))
        max_transport = max_deta_dt*surface_area/(sec_per_day*30)
        print 'Maximum allowable transport is ' + str(max_transport*1e-6) + ' Sv'
        if np.max(np.abs(net_transport)) <= max_transport:
            print 'OBCS satisfy this; nothing to do'
            return
        # Work out by what factor to dampen the transports
        scale_factor = max_transport/np.max(np.abs(net_transport))
        print 'Will scale transports by ' + str(scale_factor)
        # Calculate corresponding velocity correction at each month
        correction = np.zeros(num_time)
        for t in range(num_time):
            correction[t] = (scale_factor-1)*net_transport[t]/total_area
            print 'Month ' + str(t+1) + ': will apply correction of ' + str(correction[t]) + ' m/s to normal velocity at each boundary'
    else:
        # Calculate single correction in m/s
        correction = -1*net_transport/total_area
        print 'Will apply correction of ' + str(correction) + ' m/s to normal velocity at each boundary'

    # Now apply the correction
    for i in range(len(files)):
        if files[i] is not None:
            print 'Correcting ' + files[i]
            # Read all the data again
            vel = read_binary(files[i], [grid.nx, grid.ny, grid.nz], dimensions[i], prec=prec)
            # Apply the correction
            if option == 'dampen':
                for t in range(num_time):
                    vel[t,:] += sign[i]*correction[t]
            else:
                vel += sign[i]*correction
            # Overwrite the file
            write_binary(vel, files[i], prec=prec)

    if option in ['balance', 'dampen']:
        # Recalculate the transport to make sure it worked
        if option == 'balance':
            net_transport_new = 0
        elif option == 'dampen':
            net_transport_new = np.zeros(num_time)
        for i in range(len(files)):
            if files[i] is not None:
                vel = read_binary(files[i], [grid.nx, grid.ny, grid.nz], dimensions[i], prec=prec)
                if option == 'balance':
                    vel = np.mean(vel, axis=0)
                    net_transport_new += np.sum(sign[i]*vel*dA_bdry[i])
                elif option == 'dampen':
                    for t in range(num_time):
                        net_transport_new[t] += np.sum(sign[i]*vel[t,:]*dA_bdry[i])
        if option == 'balance':
            print_net_transport(net_transport_new)
        elif option == 'dampen':
            for t in range(num_time):
                print 'Month ' + str(t+1)
                print_net_transport(net_transport_new[t])
Esempio n. 6
0
def sose_ics (grid_path, sose_dir, output_dir, nc_out=None, constant_t=-1.9, constant_s=34.4, split=180, prec=64):

    from grid import SOSEGrid
    from file_io import NCfile
    from interpolation import interp_reg

    sose_dir = real_dir(sose_dir)
    output_dir = real_dir(output_dir)

    # Fields to interpolate
    fields = ['THETA', 'SALT', 'SIarea', 'SIheff']
    # Flag for 2D or 3D
    dim = [3, 3, 2, 2]
    # Constant values for ice shelf cavities
    constant_value = [constant_t, constant_s, 0, 0]
    # End of filenames for input
    infile_tail = '_climatology.data'
    # End of filenames for output
    outfile_tail = '_SOSE.ini'
    
    print 'Building grids'
    # First build the model grid and check that we have the right value for split
    model_grid = grid_check_split(grid_path, split)
    # Now build the SOSE grid
    sose_grid = SOSEGrid(sose_dir+'grid/', model_grid=model_grid, split=split)
    # Extract land mask
    sose_mask = sose_grid.hfac == 0
    
    print 'Building mask for SOSE points to fill'
    # Figure out which points we need for interpolation
    # Find open cells according to the model, interpolated to SOSE grid
    model_open = np.ceil(interp_reg(model_grid, sose_grid, np.ceil(model_grid.hfac), fill_value=1))
    # Find ice shelf cavity points according to model, interpolated to SOSE grid
    model_cavity = np.ceil(interp_reg(model_grid, sose_grid, xy_to_xyz(model_grid.ice_mask, model_grid), fill_value=0)).astype(bool)
    # Select open, non-cavity cells
    fill = model_open*np.invert(model_cavity)
    # Extend into the mask a few times to make sure there are no artifacts near the coast
    fill = extend_into_mask(fill, missing_val=0, use_3d=True, num_iters=3)

    # Set up a NetCDF file so the user can check the results
    if nc_out is not None:
        ncfile = NCfile(nc_out, model_grid, 'xyz')

    # Process fields
    for n in range(len(fields)):
        print 'Processing ' + fields[n]
        in_file = sose_dir + fields[n] + infile_tail
        out_file = output_dir + fields[n] + outfile_tail
        print '...reading ' + in_file
        # Just keep the January climatology
        if dim[n] == 3:
            sose_data = sose_grid.read_field(in_file, 'xyzt')[0,:]
        else:
            # Fill any missing regions with zero sea ice, as we won't be extrapolating them later
            sose_data = sose_grid.read_field(in_file, 'xyt', fill_value=0)[0,:]
        # Discard the land mask, and extrapolate slightly into missing regions so the interpolation doesn't get messed up.
        print '...extrapolating into missing regions'
        if dim[n] == 3:
            sose_data = discard_and_fill(sose_data, sose_mask, fill)
            # Fill cavity points with constant values
            sose_data[model_cavity] = constant_value[n]
        else:
            # Just care about surface layer
            sose_data = discard_and_fill(sose_data, sose_mask[0,:], fill[0,:], use_3d=False)
        print '...interpolating to model grid'
        data_interp = interp_reg(sose_grid, model_grid, sose_data, dim=dim[n])
        # Fill the land mask with zeros
        if dim[n] == 3:
            data_interp[model_grid.hfac==0] = 0
        else:
            data_interp[model_grid.hfac[0,:]==0] = 0
        write_binary(data_interp, out_file, prec=prec)
        if nc_out is not None:
            print '...adding to ' + nc_out
            if dim[n] == 3:
                ncfile.add_variable(fields[n], data_interp, 'xyz')
            else:
                ncfile.add_variable(fields[n], data_interp, 'xy')

    if nc_out is not None:
        ncfile.close()
Esempio n. 7
0
def calc_load_anomaly (grid, out_file, option='constant', ini_temp_file=None, ini_salt_file=None, ini_temp=None, ini_salt=None, constant_t=-1.9, constant_s=34.4, eosType='MDJWF', rhoConst=1035, tAlpha=None, sBeta=None, Tref=None, Sref=None, hfac=None, prec=64, check_grid=True):

    errorTol = 1e-13  # convergence criteria

    # Build the grid if needed
    if check_grid:
        grid = choose_grid(grid, None)
    # Decide which hfac to use
    if hfac is None:
        hfac = grid.hfac

    # Set temperature and salinity
    if ini_temp is not None and ini_salt is not None:
        # Deep copy of the arrays
        temp = np.copy(ini_temp)
        salt = np.copy(ini_salt)
    elif ini_temp_file is not None and ini_salt_file is not None:
        # Read from file
        temp = read_binary(ini_temp_file, [grid.nx, grid.ny, grid.nz], 'xyz', prec=prec)
        salt = read_binary(ini_salt_file, [grid.nx, grid.ny, grid.nz], 'xyz', prec=prec)
    else:
        print 'Error (calc_load_anomaly): Must either specify ini_temp and ini_salt OR ini_temp_file and ini_salt_file'
        sys.exit()

    # Fill in the ice shelves
    # The bathymetry will get filled too, but that doesn't matter because pressure is integrated from the top down
    closed = hfac==0
    if option == 'constant':
        # Fill with constant values
        temp[closed] = constant_t
        salt[closed] = constant_s
    elif option == 'nearest':
        # Select the layer immediately below the ice shelves and tile to make it 3D
        temp_top = xy_to_xyz(select_top(np.ma.masked_where(closed, temp), return_masked=False), grid)
        salt_top = xy_to_xyz(select_top(np.ma.masked_where(closed, salt), return_masked=False), grid)
        # Fill the mask with these values
        temp[closed] = temp_top[closed]
        salt[closed] = salt_top[closed]    
    elif option == 'precomputed':
        for data in [temp, salt]:
            # Make sure there are no missing values
            if (data[~closed]==0).any():
                print 'Error (calc_load_anomaly): you selected the precomputed option, but there are appear to be missing values in the land mask.'
                sys.exit()
            # Make sure it's not a masked array as this will break the rms
            if isinstance(data, np.ma.MaskedArray):
                # Fill the mask with zeros
                data[data.mask] = 0
                data = data.data
    else:
        print 'Error (calc_load_anomaly): invalid option ' + option
        sys.exit()

    # Get vertical integrands considering z at both centres and edges of layers
    dz_merged = np.zeros(2*grid.nz)
    dz_merged[::2] = abs(grid.z - grid.z_edges[:-1])  # dz of top half of each cell
    dz_merged[1::2] = abs(grid.z_edges[1:] - grid.z)  # dz of bottom half of each cell
    # Tile to make 3D
    z = z_to_xyz(grid.z, grid)
    dz_merged = z_to_xyz(dz_merged, grid)

    # Initial guess for pressure (dbar) at centres of cells
    press = abs(z)*gravity*rhoConst*1e-4

    # Iteratively calculate pressure load anomaly until it converges
    press_old = np.zeros(press.shape)  # Dummy initial value for pressure from last iteration
    rms_error = 0
    while True:
        rms_old = rms_error
        rms_error = rms(press, press_old)
        print 'RMS error = ' + str(rms_error)
        if rms_error < errorTol or np.abs(rms_error-rms_old) < 0.1*errorTol:
            print 'Converged'
            break
        # Save old pressure
        press_old = np.copy(press)
        # Calculate density anomaly at centres of cells
        drho_c = density(eosType, salt, temp, press, rhoConst=rhoConst, Tref=Tref, Sref=Sref, tAlpha=tAlpha, sBeta=sBeta) - rhoConst
        # Use this for both centres and edges of cells
        drho = np.zeros(dz_merged.shape)
        drho[::2,...] = drho_c
        drho[1::2,...] = drho_c
        # Integrate pressure load anomaly (Pa)
        pload_full = np.cumsum(drho*gravity*dz_merged, axis=0)
        # Update estimate of pressure
        press = (abs(z)*gravity*rhoConst + pload_full[1::2,...])*1e-4

    # Extract pload at each level edge (don't care about centres anymore)
    pload_edges = pload_full[::2,...]

    # Now find pload at the ice shelf base
    # For each xy point, calculate three variables:
    # (1) pload at the base of the last fully dry ice shelf cell
    # (2) pload at the base of the cell beneath that
    # (3) hFacC for that cell
    # To calculate (1) we have to shift pload_3d_edges upward by 1 cell
    pload_edges_above = neighbours_z(pload_edges)[0]
    pload_above = select_top(np.ma.masked_where(closed, pload_edges_above), return_masked=False)
    pload_below = select_top(np.ma.masked_where(closed, pload_edges), return_masked=False)
    hfac_below = select_top(np.ma.masked_where(closed, hfac), return_masked=False)
    # Now we can interpolate to the ice base
    pload = pload_above + (1-hfac_below)*(pload_below - pload_above)

    # Write to file
    write_binary(pload, out_file, prec=prec)
Esempio n. 8
0
def ts_distribution_plot(file_path,
                         option='fris',
                         grid=None,
                         time_index=None,
                         t_start=None,
                         t_end=None,
                         time_average=False,
                         second_file_path=None,
                         tmin=None,
                         tmax=None,
                         smin=None,
                         smax=None,
                         num_bins=1000,
                         date_string=None,
                         figsize=(8, 6),
                         fig_name=None):

    # Build the grid if needed
    grid = choose_grid(grid, file_path)
    # Make sure we'll end up with a single record in time
    check_single_time(time_index, time_average)
    # Determine what to write about the date
    date_string = check_date_string(date_string, file_path, time_index)

    # Quick inner function to read data (THETA or SALT)
    def read_data(var_name):
        # First choose the right file
        if second_file_path is not None:
            file_path_use = find_variable(file_path, second_file_path)
        else:
            file_path_use = file_path
        data = read_netcdf(file_path_use,
                           var_name,
                           time_index=time_index,
                           t_start=t_start,
                           t_end=t_end,
                           time_average=time_average)
        return data

    # Call this function for each variable
    temp = read_data('THETA')
    salt = read_data('SALT')

    # Select the points we care about
    if option == 'fris':
        # Select all points in the FRIS cavity
        loc_index = (grid.hfac > 0) * xy_to_xyz(grid.fris_mask, grid)
    elif option == 'cavities':
        # Select all points in ice shelf cavities
        loc_index = (grid.hfac > 0) * xy_to_xyz(grid.ice_mask, grid)
    elif option == 'all':
        # Select all unmasked points
        loc_index = grid.hfac > 0
    else:
        print 'Error (plot_misc): invalid option ' + option
        sys.exit()

    # Inner function to set up bins for a given variable (temp or salt)
    def set_bins(data):
        # Find the bounds on the data at the points we care about
        vmin = np.amin(data[loc_index])
        vmax = np.amax(data[loc_index])
        # Choose a small epsilon to add/subtract from the boundaries
        # This way nothing will be at the edge of a beginning/end bin
        eps = (vmax - vmin) * 1e-3
        # Calculate boundaries of bins
        bins = np.linspace(vmin - eps, vmax + eps, num=num_bins)
        # Now calculate the centres of bins for plotting
        centres = 0.5 * (bins[:-1] + bins[1:])
        return bins, centres

    # Call this function for each variable
    temp_bins, temp_centres = set_bins(temp)
    salt_bins, salt_centres = set_bins(salt)
    # Now set up a 2D array to increment with volume of water masses
    volume = np.zeros([temp_centres.size, salt_centres.size])

    # Loop over all cells to increment volume
    # This can't really be vectorised unfortunately
    for i in range(grid.nx):
        for j in range(grid.ny):
            if option == 'fris' and not grid.fris_mask[j, i]:
                # Disregard all points not in FRIS cavity
                continue
            if option == 'cavities' and not grid.ice_mask[j, i]:
                # Disregard all points not in ice shelf cavities
                continue
            for k in range(grid.nz):
                if grid.hfac[k, j, i] == 0:
                    # Disregard all masked points
                    continue
                # If we're still here, it's a point we care about
                # Figure out which bins it falls into
                temp_index = np.nonzero(temp_bins > temp[k, j, i])[0][0] - 1
                salt_index = np.nonzero(salt_bins > salt[k, j, i])[0][0] - 1
                # Increment volume array
                volume[temp_index, salt_index] += grid.dV[k, j, i]
    # Mask bins with zero volume
    volume = np.ma.masked_where(volume == 0, volume)

    # Find the volume bounds for plotting
    min_vol = np.log(np.amin(volume))
    max_vol = np.log(np.amax(volume))
    # Calculate the surface freezing point for plotting
    tfreeze_sfc = tfreeze(salt_centres, 0)
    # Choose the plotting bounds if not set
    if tmin is None:
        tmin = temp_bins[0]
    if tmax is None:
        tmax = temp_bins[-1]
    if smin is None:
        smin = salt_bins[0]
    if smax is None:
        smax = salt_bins[-1]
    # Construct the title
    title = 'Water masses'
    if option == 'fris':
        title += ' in FRIS cavity'
    elif option == 'cavities':
        title += ' in ice shelf cavities'
    if date_string != '':
        title += ', ' + date_string

    # Plot
    fig, ax = plt.subplots(figsize=figsize)
    # Use a log scale for visibility
    img = plt.pcolor(salt_centres,
                     temp_centres,
                     np.log(volume),
                     vmin=min_vol,
                     vmax=max_vol)
    # Add the surface freezing point
    plt.plot(salt_centres,
             tfreeze_sfc,
             color='black',
             linestyle='dashed',
             linewidth=2)
    ax.grid(True)
    ax.set_xlim([smin, smax])
    ax.set_ylim([tmin, tmax])
    plt.xlabel('Salinity (psu)')
    plt.ylabel('Temperature (' + deg_string + 'C)')
    plt.colorbar(img)
    plt.text(.9,
             .6,
             'log of volume',
             ha='center',
             rotation=-90,
             transform=fig.transFigure)
    plt.title(title)
    finished_plot(fig, fig_name=fig_name)
Esempio n. 9
0
    def __init__(self, path, x_is_lon=True, max_lon=None):

        if path.endswith('.nc'):
            use_netcdf = True
        elif os.path.isdir(path):
            use_netcdf = False
            path = real_dir(path)
            from MITgcmutils import rdmds
        else:
            print 'Error (Grid): ' + path + ' is neither a NetCDF file nor a directory'
            sys.exit()

        # Read variables
        # Note that some variables are capitalised differently in NetCDF versus binary, so can't make this more efficient...
        if use_netcdf:
            self.lon_2d = read_netcdf(path, 'XC')
            self.lat_2d = read_netcdf(path, 'YC')
            self.lon_corners_2d = read_netcdf(path, 'XG')
            self.lat_corners_2d = read_netcdf(path, 'YG')
            self.dx_s = read_netcdf(path, 'dxG')
            self.dy_w = read_netcdf(path, 'dyG')
            # I have no idea why this requires .data but it does, otherwise WSS breaks (?!?!)
            self.dA = read_netcdf(path, 'rA').data
            self.z = read_netcdf(path, 'Z')
            self.z_edges = read_netcdf(path, 'Zp1')
            self.dz = read_netcdf(path, 'drF')
            self.dz_t = read_netcdf(path, 'drC')
            self.hfac = read_netcdf(path, 'hFacC')
            self.hfac_w = read_netcdf(path, 'hFacW')
            self.hfac_s = read_netcdf(path, 'hFacS')
        else:
            self.lon_2d = rdmds(path + 'XC')
            self.lat_2d = rdmds(path + 'YC')
            self.lon_corners_2d = rdmds(path + 'XG')
            self.lat_corners_2d = rdmds(path + 'YG')
            self.dx_s = rdmds(path + 'DXG')
            self.dy_w = rdmds(path + 'DYG')
            self.dA = rdmds(path + 'RAC')
            # Remove singleton dimensions from 1D depth variables
            self.z = rdmds(path + 'RC').squeeze()
            self.z_edges = rdmds(path + 'RF').squeeze()
            self.dz = rdmds(path + 'DRF').squeeze()
            self.dz_t = rdmds(path + 'DRC').squeeze()
            self.hfac = rdmds(path + 'hFacC')
            self.hfac_w = rdmds(path + 'hFacW')
            self.hfac_s = rdmds(path + 'hFacS')

        # Make 1D versions of latitude and longitude arrays (only useful for regular lat-lon grids)
        if len(self.lon_2d.shape) == 2:
            self.lon_1d = self.lon_2d[0, :]
            self.lat_1d = self.lat_2d[:, 0]
            self.lon_corners_1d = self.lon_corners_2d[0, :]
            self.lat_corners_1d = self.lat_corners_2d[:, 0]
        elif len(self.lon_2d.shape) == 1:
            # xmitgcm output has these variables as 1D already. So make 2D ones.
            self.lon_1d = np.copy(self.lon_2d)
            self.lat_1d = np.copy(self.lat_2d)
            self.lon_corners_1d = np.copy(self.lon_corners_2d)
            self.lat_corners_1d = np.copy(self.lat_corners_2d)
            self.lon_2d, self.lat_2d = np.meshgrid(self.lon_1d, self.lat_1d)
            self.lon_corners_2d, self.lat_corners_2d = np.meshgrid(
                self.lon_corners_1d, self.lat_corners_1d)

        # Decide on longitude range
        if max_lon is None and x_is_lon:
            # Choose range automatically
            if np.amin(self.lon_1d) < 180 and np.amax(self.lon_1d) > 180:
                # Domain crosses 180E, so use the range (0, 360)
                max_lon = 360
            else:
                # Use the range (-180, 180)
                max_lon = 180
            # Do one array to test
            self.lon_1d = fix_lon_range(self.lon_1d, max_lon=max_lon)
            # Make sure it's strictly increasing now
            if not np.all(np.diff(self.lon_1d) > 0):
                print 'Error (Grid): Longitude is not strictly increasing either in the range (0, 360) or (-180, 180).'
                sys.exit()
        if max_lon == 360:
            self.split = 0
        elif max_lon == 180:
            self.split = 180
        self.lon_1d = fix_lon_range(self.lon_1d, max_lon=max_lon)
        self.lon_corners_1d = fix_lon_range(self.lon_corners_1d,
                                            max_lon=max_lon)
        self.lon_2d = fix_lon_range(self.lon_2d, max_lon=max_lon)
        self.lon_corners_2d = fix_lon_range(self.lon_corners_2d,
                                            max_lon=max_lon)

        # Save dimensions
        self.nx = self.lon_1d.size
        self.ny = self.lat_1d.size
        self.nz = self.z.size

        # Calculate volume
        self.dV = xy_to_xyz(self.dA, [self.nx, self.ny, self.nz]) * z_to_xyz(
            self.dz, [self.nx, self.ny, self.nz]) * self.hfac

        # Calculate bathymetry and ice shelf draft
        self.bathy = bdry_from_hfac('bathy', self.hfac, self.z_edges)
        self.draft = bdry_from_hfac('draft', self.hfac, self.z_edges)

        # Create masks on the t, u, and v grids
        # Land masks
        self.land_mask = self.build_land_mask(self.hfac)
        self.land_mask_u = self.build_land_mask(self.hfac_w)
        self.land_mask_v = self.build_land_mask(self.hfac_s)
        # Ice shelf masks
        self.ice_mask = self.build_ice_mask(self.hfac)
        self.ice_mask_u = self.build_ice_mask(self.hfac_w)
        self.ice_mask_v = self.build_ice_mask(self.hfac_s)
Esempio n. 10
0
    def __init__(self, path, model_grid=None, split=0):

        self.split = split

        if path.endswith('.nc'):
            use_netcdf = True
        elif os.path.isdir(path):
            use_netcdf = False
            path = real_dir(path)
            from MITgcmutils import rdmds
        else:
            print 'Error (SOSEGrid): ' + path + ' is neither a NetCDF file nor a directory'
            sys.exit()

        self.trim_extend = True
        if model_grid is None:
            self.trim_extend = False

        if self.trim_extend:
            # Error checking for which longitude range we're in
            if split == 180:
                max_lon = 180
                if np.amax(model_grid.lon_2d) > max_lon:
                    print 'Error (SOSEGrid): split=180 does not match model grid'
                    sys.exit()
            elif split == 0:
                max_lon = 360
                if np.amin(model_grid.lon_2d) < 0:
                    print 'Error (SOSEGrid): split=0 does not match model grid'
                    sys.exit()
            else:
                print 'Error (SOSEGrid): split must be 180 or 0'
                sys.exit()
        else:
            max_lon = 360

        # Read variables
        if use_netcdf:
            # Make the 2D grid 1D so it's regular
            self.lon_1d = read_netcdf(path, 'XC')[0, :]
            self.lon_corners_1d = read_netcdf(path, 'XG')[0, :]
            self.lat_1d = read_netcdf(path, 'YC')[:, 0]
            self.lat_corners_1d = read_netcdf(path, 'YG')[:, 0]
            self.z = read_netcdf(path, 'Z')
            self.z_edges = read_netcdf(path, 'RF')
        else:
            self.lon_1d = rdmds(path + 'XC')[0, :]
            self.lon_corners_1d = rdmds(path + 'XG')[0, :]
            self.lat_1d = rdmds(path + 'YC')[:, 0]
            self.lat_corners_1d = rdmds(path + 'YG')[:, 0]
            self.z = rdmds(path + 'RC').squeeze()
            self.z_edges = rdmds(path + 'RF').squeeze()

        # Fix longitude range
        self.lon_1d = fix_lon_range(self.lon_1d, max_lon=max_lon)
        self.lon_corners_1d = fix_lon_range(self.lon_corners_1d,
                                            max_lon=max_lon)
        if split == 180:
            # Split the domain at 180E=180W and rearrange the two halves so longitude is strictly ascending
            self.i_split = np.nonzero(self.lon_1d < 0)[0][0]
        else:
            # Set i_split to 0 which won't actually do anything
            self.i_split = 0
        self.lon_1d = split_longitude(self.lon_1d, self.i_split)
        self.lon_corners_1d = split_longitude(self.lon_corners_1d,
                                              self.i_split)
        if self.lon_corners_1d[0] > 0:
            # The split happened between lon_corners[i_split] and lon[i_split].
            # Take mod 360 on this index of lon_corners to make sure it's strictly increasing.
            self.lon_corners_1d[0] -= 360
        # Make sure the longitude axes are strictly increasing after the splitting
        if not np.all(np.diff(self.lon_1d) > 0) or not np.all(
                np.diff(self.lon_corners_1d) > 0):
            print 'Error (SOSEGrid): longitude is not strictly increasing'
            sys.exit()

        # Save original dimensions
        sose_nx = self.lon_1d.size
        sose_ny = self.lat_1d.size
        sose_nz = self.z.size

        if self.trim_extend:

            # Trim and/or extend the axes
            # Notes about this:
            # Longitude can only be trimmed as SOSE considers all longitudes (someone doing a high-resolution circumpolar model with points in the gap might need to write a patch to wrap the SOSE grid around)
            # Latitude can be trimmed in both directions, or extended to the south (not extended to the north - if you need to do this, SOSE is not the right product for you!)
            # Depth can be extended by one level in both directions, and the deeper bound can also be trimmed
            # The indices i, j, and k will be kept track of with 4 variables each. For example, with longitude:
            # i0_before = first index we care about
            #           = how many cells to trim at beginning
            # i0_after = i0_before's position in the new grid
            #          = how many cells to extend at beginning
            # i1_before = first index we don't care about
            #           sose_nx - i1_before = how many cells to trim at end
            # i1_after = i1_before's position in the new grid
            #          = i1_before - i0_before + i0_after
            # nx = length of new grid
            #      nx - i1_after = how many cells to extend at end

            # Find bounds on model grid
            xmin = np.amin(model_grid.lon_corners_2d)
            xmax = np.amax(model_grid.lon_2d)
            ymin = np.amin(model_grid.lat_corners_2d)
            ymax = np.amax(model_grid.lat_2d)
            z_shallow = model_grid.z[0]
            z_deep = model_grid.z[-1]

            # Western bound (use longitude at cell centres to make sure all grid types clear the bound)
            if xmin == self.lon_1d[0]:
                # Nothing to do
                self.i0_before = 0
            elif xmin > self.lon_1d[0]:
                # Trim
                self.i0_before = np.nonzero(self.lon_1d > xmin)[0][0] - 1
            else:
                print 'Error (SOSEGrid): not allowed to extend westward'
                sys.exit()
            self.i0_after = 0

            # Eastern bound (use longitude at cell corners, i.e. western edge)
            if xmax == self.lon_corners_1d[-1]:
                # Nothing to do
                self.i1_before = sose_nx
            elif xmax < self.lon_corners_1d[-1]:
                # Trim
                self.i1_before = np.nonzero(
                    self.lon_corners_1d > xmax)[0][0] + 1
            else:
                print 'Error (SOSEGrid): not allowed to extend eastward'
                sys.exit()
            self.i1_after = self.i1_before - self.i0_before + self.i0_after
            self.nx = self.i1_after

            # Southern bound (use latitude at cell centres)
            if ymin == self.lat_1d[0]:
                # Nothing to do
                self.j0_before = 0
                self.j0_after = 0
            elif ymin > self.lat_1d[0]:
                # Trim
                self.j0_before = np.nonzero(self.lat_1d > ymin)[0][0] - 1
                self.j0_after = 0
            elif ymin < self.lat_1d[0]:
                # Extend
                self.j0_after = int(np.ceil(
                    (self.lat_1d[0] - ymin) / sose_res))
                self.j0_before = 0

            # Northern bound (use latitude at cell corners, i.e. southern edge)
            if ymax == self.lat_corners_1d[-1]:
                # Nothing to do
                self.j1_before = sose_ny
            elif ymax < self.lat_corners_1d[-1]:
                # Trim
                self.j1_before = np.nonzero(
                    self.lat_corners_1d > ymax)[0][0] + 1
            else:
                print 'Error (SOSEGrid): not allowed to extend northward'
                sys.exit()
            self.j1_after = self.j1_before - self.j0_before + self.j0_after
            self.ny = self.j1_after

            # Depth
            self.k0_before = 0
            if z_shallow <= self.z[0]:
                # Nothing to do
                self.k0_after = 0
            else:
                # Extend
                self.k0_after = 1
            if z_deep > self.z[-1]:
                # Trim
                self.k1_before = np.nonzero(self.z < z_deep)[0][0] + 1
            else:
                # Either extend or do nothing
                self.k1_before = sose_nz
            self.k1_after = self.k1_before + self.k0_after
            if z_deep < self.z[-1]:
                # Extend
                self.nz = self.k1_after + 1
            else:
                self.nz = self.k1_after

            # Now we have the indices we need, so trim/extend the axes as needed
            # Longitude: can only trim
            self.lon_1d = self.lon_1d[self.i0_before:self.i1_before]
            self.lon_corners_1d = self.lon_corners_1d[self.i0_before:self.
                                                      i1_before]
            # Latitude: can extend on south side, trim on both sides
            lat_extend = np.flipud(-1 *
                                   (np.arange(self.j0_after) + 1) * sose_res +
                                   self.lat_1d[self.j0_before])
            lat_trim = self.lat_1d[self.j0_before:self.j1_before]
            self.lat_1d = np.concatenate((lat_extend, lat_trim))
            lat_corners_extend = np.flipud(-1 *
                                           (np.arange(self.j0_after) + 1) *
                                           sose_res +
                                           self.lat_corners_1d[self.j0_before])
            lat_corners_trim = self.lat_corners_1d[self.j0_before:self.
                                                   j1_before]
            self.lat_corners_1d = np.concatenate(
                (lat_corners_extend, lat_corners_trim))
            # Depth: can extend on both sides (depth 0 at top and extrapolated at bottom to clear the deepest model depth), trim on deep side
            z_above = 0 * np.ones([self.k0_after
                                   ])  # Will either be [0] or empty
            z_middle = self.z[self.k0_before:self.k1_before]
            z_edges_middle = self.z_edges[self.k0_before:self.k1_before + 1]
            z_below = (2 * model_grid.z[-1] - model_grid.z[-2]) * np.ones([
                self.nz - self.k1_after
            ])  # Will either be [something deeper than z_deep] or empty
            self.z = np.concatenate((z_above, z_middle, z_below))
            self.z_edges = np.concatenate((z_above, z_edges_middle, z_below))

            # Make sure we cleared those bounds
            if self.lon_corners_1d[0] > xmin:
                print 'Error (SOSEGrid): western bound not cleared'
                sys.exit()
            if self.lon_corners_1d[-1] < xmax:
                print 'Error (SOSEGrid): eastern bound not cleared'
                sys.exit()
            if self.lat_corners_1d[0] > ymin:
                print 'Error (SOSEGrid): southern bound not cleared'
                sys.exit()
            if self.lat_corners_1d[-1] < ymax:
                print 'Error (SOSEGrid): northern bound not cleared'
                sys.exit()
            if self.z[0] < z_shallow:
                print 'Error (SOSEGrid): shallow bound not cleared'
                sys.exit()
            if self.z[-1] > z_deep:
                print 'Error (SOSEGrid): deep bound not cleared'
                sys.exit()

        else:

            # Nothing fancy to do
            self.nx = sose_nx
            self.ny = sose_ny
            self.nz = sose_nz

        # Now read the rest of the variables we need, splitting/trimming/extending them if needed
        if use_netcdf:
            self.hfac = self.read_field(path,
                                        'xyz',
                                        var_name='hFacC',
                                        fill_value=0)
            self.hfac_w = self.read_field(path,
                                          'xyz',
                                          var_name='hFacW',
                                          fill_value=0)
            self.hfac_s = self.read_field(path,
                                          'xyz',
                                          var_name='hFacS',
                                          fill_value=0)
            self.dA = self.read_field(path, 'xy', var_name='rA', fill_value=0)
            self.dz = self.read_field(path, 'z', var_name='DRF', fill_value=0)
        else:
            self.hfac = self.read_field(path + 'hFacC', 'xyz', fill_value=0)
            self.hfac_w = self.read_field(path + 'hFacW', 'xyz', fill_value=0)
            self.hfac_s = self.read_field(path + 'hFacS', 'xyz', fill_value=0)
            self.dA = self.read_field(path + 'RAC', 'xy', fill_value=0)
            self.dz = self.read_field(path + 'DRF', 'z', fill_value=0)
        # Calculate volume
        self.dV = xy_to_xyz(self.dA, [self.nx, self.ny, self.nz]) * z_to_xyz(
            self.dz, [self.nx, self.ny, self.nz]) * self.hfac

        # Mesh lat and lon
        self.lon_2d, self.lat_2d = np.meshgrid(self.lon_1d, self.lat_1d)
        self.lon_corners_2d, self.lat_corners_2d = np.meshgrid(
            self.lon_corners_1d, self.lat_corners_1d)

        # Calculate bathymetry
        self.bathy = bdry_from_hfac('bathy', self.hfac, self.z_edges)

        # Create land masks
        self.land_mask = self.build_land_mask(self.hfac)
        self.land_mask_u = self.build_land_mask(self.hfac_w)
        self.land_mask_v = self.build_land_mask(self.hfac_s)
        # Dummy ice mask with all False
        self.ice_mask = np.zeros(self.land_mask.shape).astype(bool)