예제 #1
0
def merge_bedmap2_changes (orig_file, updated_files, out_file):

    # Read all the files
    data_orig = np.fromfile(orig_file, dtype='<f4')
    num_files = len(updated_files)
    data_new = np.empty([num_files, data_orig.size])
    for i in range(num_files):
        data_new[i,:] = np.fromfile(updated_files[i], dtype='<f4')

    # Make sure none of the changes overlap
    changes = (data_new!=data_orig).astype(float)
    if np.amax(np.sum(changes, axis=0)) > 1:
        # Some changes overlap, but maybe they are the same changes.
        stop = False
        for i in range(num_files):
            for j in range(i+1, num_files):
                index = (changes[i,:]==1)*(changes[j,:]==1)
                if (data_new[i,:][index] != data_new[j,:][index]).any():
                    stop = True
                    print updated_files[i] + ' contradicts ' + updated_files[j]
        if stop:
            print 'Error (merge_bedmap2_changes): some changes are contradictory'
            sys.exit()

    # Apply the changes
    data_final = np.copy(data_orig)
    for i in range(num_files):
        data_tmp = data_new[i,:]
        index = data_tmp != data_orig
        data_final[index] = data_tmp[index]

    # Write to file
    write_binary(data_final, out_file, prec=32, endian='little')
예제 #2
0
def fix_eraint_humidity (in_dir, out_dir, prec=32):

    in_dir = real_dir(in_dir)
    out_dir = real_dir(out_dir)

    # File paths
    in_head = in_dir + 'era_a_'
    in_tail = '_075.nc'
    out_head = out_dir + 'ERAinterim_spfh2m_'
    start_year = 1979
    end_year = 2017

    for year in range(start_year, end_year+1):
        in_file = in_head + str(year) + in_tail
        print 'Reading ' + in_file
        # Need temperature, pressure, and dew point
        temp = read_netcdf(in_file, 't2m')
        press = read_netcdf(in_file, 'msl')
        dewpoint = read_netcdf(in_file, 'd2m')
        # Calculate vapour pressure
        e = es0*np.exp(Lv/Rv*(1/temp - 1/dewpoint))
        # Calculate specific humidity
        spf = sh_coeff*e/(press - (1-sh_coeff)*e)
        # Now flip in latitude to match Matlab-generated files
        spf = spf[:,::-1,:]
        out_file = out_head + str(year)
        write_binary(spf, out_file, prec=prec)
        if year == end_year:
            # Copy the last timestep as in era_dummy_year
            spf_last = spf[-1,:]
            out_file = out_head + str(year+1)
            write_binary(spf_last, out_file, prec=prec)
예제 #3
0
def mit_ics (grid_path, source_file, output_dir, nc_out=None, prec=64):

    from file_io import NCfile, read_netcdf
    from interpolation import interp_reg

    output_dir = real_dir(output_dir)

     # Fields to interpolate
    fields = ['THETA', 'SALT', 'SIarea', 'SIheff', 'SIhsnow']
    # Flag for 2D or 3D
    dim = [3, 3, 2, 2, 2]
    # End of filenames for output
    outfile_tail = '_MIT.ini'

    print 'Building grids'
    source_grid = Grid(source_file)
    model_grid = Grid(grid_path)
    # Extract land mask of source grid
    source_mask = source_grid.hfac==0

    print 'Building mask for points to fill'
    # Select open cells according to the model, interpolated to the source grid
    fill = np.ceil(interp_reg(model_grid, source_grid, np.ceil(model_grid.hfac), fill_value=0)).astype(bool)
    # Extend into mask a few times to make sure there are no artifacts near the coast
    fill = extend_into_mask(fill, missing_val=0, use_3d=True, num_iters=3)

    # Set up a NetCDF file so the user can check the results
    if nc_out is not None:
        ncfile = NCfile(nc_out, model_grid, 'xyz')

    # Process fields
    for n in range(len(fields)):
        print 'Processing ' + fields[n]
        out_file = output_dir + fields[n] + outfile_tail
        # Read the January climatology
        source_data = read_netcdf(source_file, fields[n], time_index=0)
        # Discard the land mask, and extrapolate slightly into missing regions so the interpolation doesn't get messed up.
        print '...extrapolating into missing regions'
        if dim[n] == 3:
            source_data = discard_and_fill(source_data, source_mask, fill)
        else:
            # Just care about the surface layer
            source_data = discard_and_fill(source_data, source_mask[0,:], fill[0,:], use_3d=False)
        print '...interpolating to model grid'
        data_interp = interp_reg(source_grid, model_grid, source_data, dim=dim[n])
        # Fill the land mask with zeros
        if dim[n] == 3:
            data_interp[model_grid.hfac==0] = 0
        else:
            data_interp[model_grid.hfac[0,:]==0] = 0
        write_binary(data_interp, out_file, prec=prec)
        if nc_out is not None:
            print '...adding to ' + nc_out
            if dim[n] == 3:
                ncfile.add_variable(fields[n], data_interp, 'xyz')
            else:
                ncfile.add_variable(fields[n], data_interp, 'xy')

    if nc_out is not None:
        ncfile.close()    
예제 #4
0
def seaice_drag_scaling(grid_path,
                        output_file,
                        rd_scale=1,
                        bb_scale=1,
                        ft_scale=1,
                        prec=64):

    # Longitude bounds on each region
    rd_bounds = [-80, -58]  # Western bound is well into land
    bb_bounds = [-49, -45]
    ft_bounds = [-42, -38]
    # Max distance from the ice front (km)
    max_dist = 100

    print 'Building grid'
    grid = Grid(grid_path)
    lon, lat = grid.get_lon_lat()

    print 'Selecting regions'
    # First find ice shelf front points
    front_points = ice_shelf_front_points(grid, ice_mask=grid.fris_mask)
    # Also coastal points for Berkner Island
    bi_front_points = ice_shelf_front_points(grid, ice_mask=grid.get_bi_mask())
    # Combine the two arrays
    front_points = np.maximum(front_points, bi_front_points)
    # Now get i and j indices of these points
    i_vals, j_vals = np.meshgrid(range(grid.nx), range(grid.ny))
    i_front = i_vals[front_points]
    j_front = j_vals[front_points]
    num_points = len(i_front)
    # Find the distance from each point in the domain to the closest ice shelf front point, by looping over all the ice shelf front points.
    # Start with an array of infinity, and update it with any smaller values each iteration. So the first iteration will fully overwrite it.
    dist_to_front = np.zeros([grid.ny, grid.nx]) + np.inf
    for posn in range(num_points):
        # Calculate the distance of each point to this point, and convert to km
        dist_to_point = dist_btw_points(
            (grid.lon_1d[i_front[posn]], grid.lat_1d[j_front[posn]]),
            (lon, lat)) * 1e-3
        dist_to_front = np.minimum(dist_to_front, dist_to_point)
        # Now select the three regions
        # Must be between the given longitude bounds and not more than max_dist km away from the ice shelf front
        rd_mask = (lon >= rd_bounds[0]) * (lon <= rd_bounds[1]) * (
            dist_to_front <= max_dist)
        bb_mask = (lon >= bb_bounds[0]) * (lon <= bb_bounds[1]) * (
            dist_to_front <= max_dist)
        ft_mask = (lon >= ft_bounds[0]) * (lon <= ft_bounds[1]) * (
            dist_to_front <= max_dist)

    print 'Setting scaling factors'
    scale = np.ones([grid.ny, grid.nx])
    scale[rd_mask] = rd_scale
    scale[bb_mask] = bb_scale
    scale[ft_mask] = ft_scale
    # Smooth
    scale = smooth_xy(scale, sigma=2)
    # Reset ice shelf points
    scale[grid.ice_mask] = 1

    # Write to file
    write_binary(scale, output_file, prec=prec)
예제 #5
0
def era_dummy_year (bin_dir, last_year, option='era5', nlon=None, nlat=None, out_dir=None, prec=32):

    bin_dir = real_dir(bin_dir)
    if out_dir is None:
        out_dir = bin_dir
        
    if nlon is None:
        if option == 'era5':
            nlon = 1440
        elif option == 'eraint':
            nlon = 480
        else:
            print 'Error (era_dummy_year): invalid option ' + option
            sys.exit()
    if nlat is None:
        # The same for both cases, assuming ERA5 was cut off at 30S
        nlat = 241

    # Figure out the file paths
    if option == 'era5':
        var_names = ['apressure', 'atemp', 'aqh', 'uwind', 'vwind', 'precip', 'swdown', 'lwdown', 'evap']
        file_head = 'ERA5_'
    elif option == 'eraint':
        var_names = ['msl', 'tmp2m_degC', 'spfh2m', 'u10m', 'v10m', 'rain', 'dsw', 'dlw']
        file_head = 'ERAinterim_'            

    for var in var_names:
        file_in = bin_dir + file_head + var + '_' + str(last_year)
        # Select the last time index
        data = read_binary(file_in, [nlon, nlat], 'xyt', prec=prec)[-1,:]
        file_out = out_dir + file_head + var + '_' + str(last_year+1)
        write_binary(data, file_out, prec=prec)
예제 #6
0
def write_topo_files (nc_grid, bathy_file, draft_file, prec=64):

    bathy = read_netcdf(nc_grid, 'bathy')
    draft = read_netcdf(nc_grid, 'draft')
    write_binary(bathy, bathy_file, prec=prec)
    write_binary(draft, draft_file, prec=prec)
    print 'Files written successfully. Now go try them out! Make sure you update all the necessary variables in data, data.shelfice, SIZE.h, job scripts, etc.'
예제 #7
0
def iceberg_meltwater(grid_path, input_dir, output_file, nc_out=None, prec=32):

    from plot_latlon import latlon_plot

    input_dir = real_dir(input_dir)
    file_head = 'icebergs_'
    file_tail = '.nc'

    print 'Building grids'
    # Read the NEMO grid from the first file
    # It has longitude in the range -180 to 180
    file_path = input_dir + file_head + '01' + file_tail
    nemo_lon = read_netcdf(file_path, 'nav_lon')
    nemo_lat = read_netcdf(file_path, 'nav_lat')
    # Build the model grid
    model_grid = Grid(grid_path, max_lon=180)

    print 'Interpolating'
    icebergs_interp = np.zeros([12, model_grid.ny, model_grid.nx])
    for month in range(12):
        print '...month ' + str(month + 1)
        # Read the data
        file_path = input_dir + file_head + '{0:02d}'.format(month +
                                                             1) + file_tail
        icebergs = read_netcdf(file_path, 'berg_total_melt', time_index=0)
        # Interpolate
        icebergs_interp_tmp = interp_nonreg_xy(nemo_lon,
                                               nemo_lat,
                                               icebergs,
                                               model_grid.lon_1d,
                                               model_grid.lat_1d,
                                               fill_value=0)
        # Make sure the land and ice shelf cavities don't get any iceberg melt
        icebergs_interp_tmp[model_grid.land_mask + model_grid.ice_mask] = 0
        # Save to the master array
        icebergs_interp[month, :] = icebergs_interp_tmp

    write_binary(icebergs_interp, output_file, prec=prec)

    print 'Plotting'
    # Make a nice plot of the annual mean
    latlon_plot(mask_land_ice(np.mean(icebergs_interp, axis=0), model_grid),
                model_grid,
                include_shelf=False,
                vmin=0,
                title=r'Annual mean iceberg melt (kg/m$^2$/s)')
    if nc_out is not None:
        # Also write to NetCDF file
        print 'Writing ' + nc_out
        ncfile = NCfile(nc_out, model_grid, 'xyt')
        ncfile.add_time(np.arange(12) + 1, units='months')
        ncfile.add_variable('iceberg_melt',
                            icebergs_interp,
                            'xyt',
                            units='kg/m^2/s')
        ncfile.close()
예제 #8
0
def make_sose_climatology (in_file, out_file):

    from MITgcmutils import rdmds

    # Strip .data from filename before reading
    data = rdmds(in_file.replace('.data', ''))
    climatology = np.zeros(tuple([12]) + data.shape[1:])
    for month in range(12):
        climatology[month,:] = np.mean(data[month::12,:], axis=0)
    write_binary(climatology, out_file)
예제 #9
0
def thermo_correction(grid_dir,
                      var_name,
                      cmip_file,
                      era5_file,
                      out_file,
                      prec=64):

    grid = Grid(grid_dir)
    data = []
    for fname in [cmip_file, era5_file]:
        data.append(read_netcdf(fname, var_name))
    data_diff = data[1] - data[0]
    latlon_plot(data_diff, grid, ctype='plusminus', figsize=(10, 6))
    write_binary(data_diff, out_file, prec=prec)
예제 #10
0
def polynya_mask(grid_path, polynya, mask_file, prec=64):

    from plot_latlon import latlon_plot

    # Define the centre and radii of the ellipse bounding the polynya
    if polynya == 'maud_rise':  # Area 2.6 x 10^5 km^2
        lon0 = 0.
        lat0 = -65.
        rlon = 8.
        rlat = 2.
    elif polynya == 'near_shelf':  # Area 2.6 x 10^5 km^2
        lon0 = -30.
        lat0 = -70.
        rlon = 9.
        rlat = 2.2
    elif polynya == 'maud_rise_big':  # Area 6.2 x 10^5 km^2
        lon0 = 0.
        lat0 = -65.
        rlon = 15.
        rlat = 2.5
    elif polynya == 'maud_rise_small':  # Area 0.34 x 10^5 km^2
        lon0 = 0
        lat0 = -65.
        rlon = 2.8
        rlat = 0.75
    else:
        print 'Error (polynya_mask): invalid polynya option ' + polynya
        sys.exit()

    # Build the grid
    grid = Grid(grid_path)
    # Set up the mask
    mask = np.zeros([grid.ny, grid.nx])
    # Select the polynya region
    index = (grid.lon_2d - lon0)**2 / rlon**2 + (grid.lat_2d -
                                                 lat0)**2 / rlat**2 <= 1
    mask[index] = 1

    # Print the area of the polynya
    print 'Polynya area is ' + str(area_integral(mask, grid) * 1e-6) + ' km^2'
    # Plot the mask
    latlon_plot(mask_land_ice(mask, grid),
                grid,
                include_shelf=False,
                title='Polynya mask',
                figsize=(10, 6))

    # Write to file
    write_binary(mask, mask_file, prec=prec)
예제 #11
0
def monthly_era5_files (file_head_in, start_year, end_year, file_head_out):

    grid = ERA5Grid()
    per_day = 24/6

    for year in range(start_year, end_year+1):
        print 'Processing year ' + str(year)
        data = read_binary(file_head_in+'_'+str(year), [grid.nx, grid.ny], 'xyt')
        data_monthly = np.empty([12, grid.ny, grid.nx])
        t = 0
        for month in range(12):
            nt = days_per_month(month+1, year)*per_day
            print 'Indices ' + str(t) + ' to ' + str(t+nt-1)
            data_monthly[month,:] = np.mean(data[t:t+nt,:], axis=0)
            t += nt
        write_binary(data_monthly, file_head_out+'_'+str(year))
예제 #12
0
def calc_load_anomaly (grid, out_file, option='constant', ini_temp_file=None, ini_salt_file=None, ini_temp=None, ini_salt=None, constant_t=-1.9, constant_s=34.4, eosType='MDJWF', rhoConst=1035, tAlpha=None, sBeta=None, Tref=None, Sref=None, hfac=None, prec=64, check_grid=True):

    errorTol = 1e-13  # convergence criteria

    # Build the grid if needed
    if check_grid:
        grid = choose_grid(grid, None)
    # Decide which hfac to use
    if hfac is None:
        hfac = grid.hfac

    # Set temperature and salinity
    if ini_temp is not None and ini_salt is not None:
        # Deep copy of the arrays
        temp = np.copy(ini_temp)
        salt = np.copy(ini_salt)
    elif ini_temp_file is not None and ini_salt_file is not None:
        # Read from file
        temp = read_binary(ini_temp_file, [grid.nx, grid.ny, grid.nz], 'xyz', prec=prec)
        salt = read_binary(ini_salt_file, [grid.nx, grid.ny, grid.nz], 'xyz', prec=prec)
    else:
        print 'Error (calc_load_anomaly): Must either specify ini_temp and ini_salt OR ini_temp_file and ini_salt_file'
        sys.exit()

    # Fill in the ice shelves
    # The bathymetry will get filled too, but that doesn't matter because pressure is integrated from the top down
    closed = hfac==0
    if option == 'constant':
        # Fill with constant values
        temp[closed] = constant_t
        salt[closed] = constant_s
    elif option == 'nearest':
        # Select the layer immediately below the ice shelves and tile to make it 3D
        temp_top = xy_to_xyz(select_top(np.ma.masked_where(closed, temp), return_masked=False), grid)
        salt_top = xy_to_xyz(select_top(np.ma.masked_where(closed, salt), return_masked=False), grid)
        # Fill the mask with these values
        temp[closed] = temp_top[closed]
        salt[closed] = salt_top[closed]    
    elif option == 'precomputed':
        for data in [temp, salt]:
            # Make sure there are no missing values
            if (data[~closed]==0).any():
                print 'Error (calc_load_anomaly): you selected the precomputed option, but there are appear to be missing values in the land mask.'
                sys.exit()
            # Make sure it's not a masked array as this will break the rms
            if isinstance(data, np.ma.MaskedArray):
                # Fill the mask with zeros
                data[data.mask] = 0
                data = data.data
    else:
        print 'Error (calc_load_anomaly): invalid option ' + option
        sys.exit()

    # Get vertical integrands considering z at both centres and edges of layers
    dz_merged = np.zeros(2*grid.nz)
    dz_merged[::2] = abs(grid.z - grid.z_edges[:-1])  # dz of top half of each cell
    dz_merged[1::2] = abs(grid.z_edges[1:] - grid.z)  # dz of bottom half of each cell
    # Tile to make 3D
    z = z_to_xyz(grid.z, grid)
    dz_merged = z_to_xyz(dz_merged, grid)

    # Initial guess for pressure (dbar) at centres of cells
    press = abs(z)*gravity*rhoConst*1e-4

    # Iteratively calculate pressure load anomaly until it converges
    press_old = np.zeros(press.shape)  # Dummy initial value for pressure from last iteration
    rms_error = 0
    while True:
        rms_old = rms_error
        rms_error = rms(press, press_old)
        print 'RMS error = ' + str(rms_error)
        if rms_error < errorTol or np.abs(rms_error-rms_old) < 0.1*errorTol:
            print 'Converged'
            break
        # Save old pressure
        press_old = np.copy(press)
        # Calculate density anomaly at centres of cells
        drho_c = density(eosType, salt, temp, press, rhoConst=rhoConst, Tref=Tref, Sref=Sref, tAlpha=tAlpha, sBeta=sBeta) - rhoConst
        # Use this for both centres and edges of cells
        drho = np.zeros(dz_merged.shape)
        drho[::2,...] = drho_c
        drho[1::2,...] = drho_c
        # Integrate pressure load anomaly (Pa)
        pload_full = np.cumsum(drho*gravity*dz_merged, axis=0)
        # Update estimate of pressure
        press = (abs(z)*gravity*rhoConst + pload_full[1::2,...])*1e-4

    # Extract pload at each level edge (don't care about centres anymore)
    pload_edges = pload_full[::2,...]

    # Now find pload at the ice shelf base
    # For each xy point, calculate three variables:
    # (1) pload at the base of the last fully dry ice shelf cell
    # (2) pload at the base of the cell beneath that
    # (3) hFacC for that cell
    # To calculate (1) we have to shift pload_3d_edges upward by 1 cell
    pload_edges_above = neighbours_z(pload_edges)[0]
    pload_above = select_top(np.ma.masked_where(closed, pload_edges_above), return_masked=False)
    pload_below = select_top(np.ma.masked_where(closed, pload_edges), return_masked=False)
    hfac_below = select_top(np.ma.masked_where(closed, hfac), return_masked=False)
    # Now we can interpolate to the ice base
    pload = pload_above + (1-hfac_below)*(pload_below - pload_above)

    # Write to file
    write_binary(pload, out_file, prec=prec)
예제 #13
0
def make_obcs (location, grid_path, input_path, output_dir, source='SOSE', use_seaice=True, nc_out=None, prec=32, split=180):

    from grid import SOSEGrid
    from file_io import NCfile, read_netcdf
    from interpolation import interp_bdry

    if source == 'SOSE':
        input_path = real_dir(input_path)
    output_dir = real_dir(output_dir)

    # Fields to interpolate
    # Important: SIarea has to be before SIuice and SIvice so it can be used for masking
    fields = ['THETA', 'SALT', 'UVEL', 'VVEL', 'SIarea', 'SIheff', 'SIuice', 'SIvice', 'ETAN']  
    # Flag for 2D or 3D
    dim = [3, 3, 3, 3, 2, 2, 2, 2, 2]
    # Flag for grid type
    gtype = ['t', 't', 'u', 'v', 't', 't', 'u', 'v', 't']
    if source == 'MIT':
        # Also consider snow thickness
        fields += ['SIhsnow']
        dim += [2]
        gtype += ['t']
    # End of filenames for input
    infile_tail = '_climatology.data'
    # End of filenames for output
    outfile_tail = '_'+source+'.OBCS_'+location

    print 'Building MITgcm grid'
    if source == 'SOSE':
        model_grid = grid_check_split(grid_path, split)
    elif source == 'MIT':
        model_grid = Grid(grid_path)
    # Figure out what the latitude or longitude is on the boundary, both on the centres and outside edges of those cells
    if location == 'S':
        lat0 = model_grid.lat_1d[0]
        lat0_e = model_grid.lat_corners_1d[0]
        print 'Southern boundary at ' + str(lat0) + ' (cell centre), ' + str(lat0_e) + ' (cell edge)'
    elif location == 'N':
        lat0 = model_grid.lat_1d[-1]
        lat0_e = 2*model_grid.lat_corners_1d[-1] - model_grid.lat_corners_1d[-2]
        print 'Northern boundary at ' + str(lat0) + ' (cell centre), ' + str(lat0_e) + ' (cell edge)'
    elif location == 'W':
        lon0 = model_grid.lon_1d[0]
        lon0_e = model_grid.lon_corners_1d[0]
        print 'Western boundary at ' + str(lon0) + ' (cell centre), ' + str(lon0_e) + ' (cell edge)'
    elif location == 'E':
        lon0 = model_grid.lon_1d[-1]
        lon0_e = 2*model_grid.lon_corners_1d[-1] - model_grid.lon_corners_1d[-2]
        print 'Eastern boundary at ' + str(lon0) + ' (cell centre), ' + str(lon0_e) + ' (cell edge)'
    else:
        print 'Error (make_obcs): invalid location ' + str(location)
        sys.exit()

    if source == 'SOSE':
        print 'Building SOSE grid'
        source_grid = SOSEGrid(input_path+'grid/', model_grid=model_grid, split=split)
    elif source == 'MIT':
        print 'Building grid from source model'
        source_grid = Grid(input_path)
    else:
        print 'Error (make_obcs): invalid source ' + source
        sys.exit()
    # Calculate interpolation indices and coefficients to the boundary latitude or longitude
    if location in ['N', 'S']:
        # Cell centre
        j1, j2, c1, c2 = interp_slice_helper(source_grid.lat_1d, lat0)
        # Cell edge
        j1_e, j2_e, c1_e, c2_e = interp_slice_helper(source_grid.lat_corners_1d, lat0_e)
    else:
        # Pass lon=True to consider the possibility of boundary near 0E
        i1, i2, c1, c2 = interp_slice_helper(source_grid.lon_1d, lon0, lon=True)
        i1_e, i2_e, c1_e, c2_e = interp_slice_helper(source_grid.lon_corners_1d, lon0_e, lon=True)

    # Set up a NetCDF file so the user can check the results
    if nc_out is not None:
        ncfile = NCfile(nc_out, model_grid, 'xyzt')
        ncfile.add_time(np.arange(12)+1, units='months')  

    # Process fields
    for n in range(len(fields)):
        if fields[n].startswith('SI') and not use_seaice:
            continue

        print 'Processing ' + fields[n]
        if source == 'SOSE':
            in_file = input_path + fields[n] + infile_tail
        out_file = output_dir + fields[n] + outfile_tail
        # Read the monthly climatology at all points
        if source == 'SOSE':
            if dim[n] == 3:
                source_data = source_grid.read_field(in_file, 'xyzt')
            else:
                source_data = source_grid.read_field(in_file, 'xyt')
        else:
            source_data = read_netcdf(input_path, fields[n])

        if fields[n] == 'SIarea' and source == 'SOSE':
            # We'll need this field later for SIuice and SIvice, as SOSE didn't mask those variables properly
            print 'Interpolating sea ice area to u and v grids for masking of sea ice velocity'
            source_aice_u = interp_grid(source_data, source_grid, 't', 'u', time_dependent=True, mask_with_zeros=True, periodic=True)
            source_aice_v = interp_grid(source_data, source_grid, 't', 'v', time_dependent=True, mask_with_zeros=True, periodic=True)
        # Set sea ice velocity to zero wherever sea ice area is zero
        if fields[n] in ['SIuice', 'SIvice'] and source == 'SOSE':
            print 'Masking sea ice velocity with sea ice area'
            if fields[n] == 'SIuice':
                index = source_aice_u==0
            else:
                index = source_aice_v==0
            source_data[index] = 0            

        # Choose the correct grid for lat, lon, hfac
        source_lon, source_lat = source_grid.get_lon_lat(gtype=gtype[n], dim=1)
        source_hfac = source_grid.get_hfac(gtype=gtype[n])
        model_lon, model_lat = model_grid.get_lon_lat(gtype=gtype[n], dim=1)
        model_hfac = model_grid.get_hfac(gtype=gtype[n])
        # Interpolate to the correct grid and choose the correct horizontal axis
        if location in ['N', 'S']:
            if gtype[n] == 'v':
                source_data = c1_e*source_data[...,j1_e,:] + c2_e*source_data[...,j2_e,:]
                # Multiply hfac by the ceiling of hfac on each side, to make sure we're not averaging over land
                source_hfac = (c1_e*source_hfac[...,j1_e,:] + c2_e*source_hfac[...,j2_e,:])*np.ceil(source_hfac[...,j1_e,:])*np.ceil(source_hfac[...,j2_e,:])
            else:
                source_data = c1*source_data[...,j1,:] + c2*source_data[...,j2,:]
                source_hfac = (c1*source_hfac[...,j1,:] + c2*source_hfac[...,j2,:])*np.ceil(source_hfac[...,j1,:])*np.ceil(source_hfac[...,j2,:])
            source_haxis = source_lon
            model_haxis = model_lon
            if location == 'S':
                model_hfac = model_hfac[:,0,:]
            else:
                model_hfac = model_hfac[:,-1,:]
        else:
            if gtype[n] == 'u':
                source_data = c1_e*source_data[...,i1_e] + c2_e*source_data[...,i2_e]
                source_hfac = (c1_e*source_hfac[...,i1_e] + c2_e*source_hfac[...,i2_e])*np.ceil(source_hfac[...,i1_e])*np.ceil(source_hfac[...,i2_e])
            else:
                source_data = c1*source_data[...,i1] + c2*source_data[...,i2]
                source_hfac = (c1*source_hfac[...,i1] + c2*source_hfac[...,i2])*np.ceil(source_hfac[...,i1])*np.ceil(source_hfac[...,i2])
            source_haxis = source_lat
            model_haxis = model_lat
            if location == 'W':
                model_hfac = model_hfac[...,0]
            else:
                model_hfac = model_hfac[...,-1]
        if source == 'MIT' and model_haxis[0] < source_haxis[0]:
            # Need to extend source data to the west or south. Just add one row.
            source_haxis = np.concatenate(([model_haxis[0]-0.1], source_haxis))
            source_data = np.concatenate((np.expand_dims(source_data[:,...,0], -1), source_data), axis=-1)
            source_hfac = np.concatenate((np.expand_dims(source_hfac[:,0], 1), source_hfac), axis=1)
        # For 2D variables, just need surface hfac
        if dim[n] == 2:
            source_hfac = source_hfac[0,:]
            model_hfac = model_hfac[0,:]

        # Now interpolate each month to the model grid
        if dim[n] == 3:
            data_interp = np.zeros([12, model_grid.nz, model_haxis.size])
        else:
            data_interp = np.zeros([12, model_haxis.size])
        for month in range(12):
            print '...interpolating month ' + str(month+1)
            data_interp_tmp = interp_bdry(source_haxis, source_grid.z, source_data[month,:], source_hfac, model_haxis, model_grid.z, model_hfac, depth_dependent=(dim[n]==3))
            if fields[n] not in ['THETA', 'SALT']:
                # Zero in land mask is more physical than extrapolated data
                index = model_hfac==0
                data_interp_tmp[index] = 0
            data_interp[month,:] = data_interp_tmp

        write_binary(data_interp, out_file, prec=prec)
        
        if nc_out is not None:
            print '...adding to ' + nc_out
            # Construct the dimension code
            if location in ['S', 'N']:
                dimension = 'x'
            else:
                dimension = 'y'
            if dim[n] == 3:
                dimension += 'z'
            dimension += 't'
            ncfile.add_variable(fields[n] + '_' + location, data_interp, dimension)

    if nc_out is not None:
        ncfile.close()
예제 #14
0
def seaice_drag_scaling (grid_path, output_file, rd_scale=1, bb_scale=1, ft_scale=1, prec=64):

    from plot_latlon import latlon_plot

    # Cutoff latitude
    max_lat = -74
    # Longitude bounds on each region
    rd_bounds = [-62, -58]
    bb_bounds = [-57, -49]
    ft_bounds = [-48, -35]
    bounds = [rd_bounds, bb_bounds, ft_bounds]
    scale_factors = [rd_scale, bb_scale, ft_scale]
    # Max distance from the coast (km)
    scale_dist = 150
    # Sigma for smoothing
    sigma = 2

    print 'Building grid'
    grid = Grid(grid_path)
    print 'Selecting coastal points'
    coast_mask = grid.get_coast_mask(ignore_iceberg=True)
    lon_coast = grid.lon_2d[coast_mask].ravel()
    lat_coast = grid.lat_2d[coast_mask].ravel()

    print 'Selecting regions'
    scale_coast = np.ones(lon_coast.shape)
    for n in range(3):
        index = (lon_coast >= bounds[n][0])*(lon_coast <= bounds[n][1])*(lat_coast <= max_lat)
        scale_coast[index] = scale_factors[n]

    print 'Calculating distance from the coast'
    min_dist = None
    nearest_scale = None
    # Loop over all the coastal points
    for i in range(lon_coast.size):
        # Calculate distance of every point in the model grid to this specific coastal point, in km
        dist_to_pt = dist_btw_points([lon_coast[i], lat_coast[i]], [grid.lon_2d, grid.lat_2d])*1e-3
        if min_dist is None:
            # Initialise the arrays
            min_dist = dist_to_pt
            nearest_scale = np.zeros(min_dist.shape) + scale_coast[i]
        else:
            # Figure out which cells have this coastal point as the closest one yet, and update the arrays
            index = dist_to_pt < min_dist
            min_dist[index] = dist_to_pt[index]
            nearest_scale[index] = scale_coast[i]
    # Smooth the result, and mask out the land and ice shelves
    min_dist = mask_land_ice(min_dist, grid)
    nearest_scale = mask_land_ice(smooth_xy(nearest_scale, sigma=sigma), grid)

    print 'Extending scale factors offshore'
    # Cosine function moving from scaling factor to 1 over distance of 300 km offshore
    scale_extend = (min_dist < scale_dist)*(nearest_scale - 1)*np.cos(np.pi/2*min_dist/scale_dist) + 1

    print 'Plotting'
    latlon_plot(scale_extend, grid, ctype='ratio', include_shelf=False, title='Scaling factor', figsize=(10,6))
    latlon_plot(scale_extend, grid, ctype='ratio', include_shelf=False, title='Scaling factor', zoom_fris=True)

    print 'Writing to file'
    # Replace mask with zeros
    mask = scale_extend.mask
    scale_extend = scale_extend.data
    scale_extend[mask] = 0
    write_binary(scale_extend, output_file, prec=prec)
예제 #15
0
def process_era5 (in_dir, out_dir, year, six_hourly=True, first_year=False, last_year=False, prec=32):

    in_dir = real_dir(in_dir)
    out_dir = real_dir(out_dir)

    if year == 1979 and not first_year:
        print 'Warning (process_era): last we checked, 1979 was the first year of ERA5. Unless this has changed, you need to set first_year=True.'
    if year == 2018 and not last_year:
        print 'Warning (process_era): last we checked, 2018 was the last year of ERA5. Unless this has changed, you need to set last_year=True.'

    # Construct file paths for input and output files
    in_head = in_dir + 'era5_'
    var_in = ['msl', 't2m', 'd2m', 'u10', 'v10', 'tp', 'ssrd', 'strd', 'e']
    if six_hourly:
        accum_flag = '_2'
    in_tail = '_' + str(year) + '.nc'
    out_head = out_dir + 'ERA5_'
    var_out = ['apressure', 'atemp', 'aqh', 'uwind', 'vwind', 'precip', 'swdown', 'lwdown', 'evap']
    out_tail = '_' + str(year)

    # Northermost latitude to keep
    lat0 = -30
    # Length of ERA5 time interval in seconds
    dt = 3600.

    # Read the grid from the first file
    first_file = in_head + var_in[0] + in_tail
    lon = read_netcdf(first_file, 'longitude')
    lat = read_netcdf(first_file, 'latitude')
    # Find the index of the last latitude we don't care about (remember that latitude goes from north to south in ERA files!)
    j_bound = np.nonzero(lat < lat0)[0][0] - 2
    # Trim and flip latitude
    lat = lat[:j_bound:-1]
    # Also read the first time index for the starting date
    start_date = netcdf_time(first_file, monthly=False)[0]

    if first_year:
        # Print grid information to the reader
        print '\n'
        print 'For var in ' + str(var_out) + ', make these changes in input/data.exf:\n'
        print 'varstartdate1 = ' + start_date.strftime('%Y%m%d')
        if six_hourly:
            print 'varperiod = ' + str(6*dt)
        else:
            print 'varperiod = ' + str(dt)
        print 'varfile = ' + 'ERA5_var'
        print 'var_lon0 = ' + str(lon[0])
        print 'var_lon_inc = ' + str(lon[1]-lon[0])
        print 'var_lat0 = ' + str(lat[0])
        print 'var_lat_inc = ' + str(lat.size-1) + '*' + str(lat[1]-lat[0])
        print 'var_nlon = ' + str(lon.size)
        print 'var_nlat = ' + str(lat.size)
        print '\n'

    # Loop over variables
    for i in range(len(var_in)):
        
        in_file = in_head + var_in[i] + in_tail
        print 'Reading ' + in_file
        data = read_netcdf(in_file, var_in[i])
        
        print 'Processing'
        # Trim and flip over latitude
        data = data[:,:j_bound:-1,:]
        
        if var_in[i] == 'msl':
            # Save pressure for later conversions
            press = np.copy(data)

        elif var_in[i] == 't2m':
            # Convert from Kelvin to Celsius
            data -= temp_C2K

        elif var_in[i] == 'd2m':
            # Calculate specific humidity from dew point temperature and pressure
            # Start with vapour pressure
            e = es0*np.exp(Lv/Rv*(1/temp_C2K - 1/data))
            data = sh_coeff*e/(press - (1-sh_coeff)*e)
            
        elif var_in[i] in ['tp', 'ssrd', 'strd', 'e']:
            # Accumulated variables
            # This is more complicated
            
            if six_hourly:
                # Need to read data from the following hour to interpolate to this hour. This was downloaded into separate files.
                in_file_2 = in_head + var_in[i] + accum_flag + in_tail
                print 'Reading ' + in_file_2
                data_2 = read_netcdf(in_file_2, var_in[i])
                data_2 = data_2[:,:j_bound:-1,:]
            # not six_hourly will be dealt with after the first_year check
            
            if first_year:
                # The first 7 hours of the accumulated variables are missing during the first year of ERA5. Fill this missing period with data from the next available time indices.
                if six_hourly:
                    # The first file is missing two indices (hours 0 and 6)
                    data = np.concatenate((data[:2,:], data), axis=0)
                    # The second file is missing one index (hour 1)
                    data_2 = np.concatenate((data_2[:1,:], data_2), axis=0)
                else:
                    # The first file is missing 7 indices (hours 0 to 6)
                    data = np.concatenate((data[:7,:], data), axis=0)
                    
            if not six_hourly:
                # Now get data from the following hour. Just shift one timestep ahead.
                # First need data from the first hour of next year
                if last_year:
                    # There is no such data; just copy the last hour of this year
                    data_next = data[-1,:]
                else:
                    in_file_2 = in_head + var_in[i] + '_' + str(year+1) + '.nc'
                    data_next = read_netcdf(in_file_2, var_in[i], time_index=0)
                    data_next = data_next[:j_bound:-1,:]  
                data_2 = np.concatenate((data[1:,:], np.expand_dims(data_next,0)), axis=0)
                
            # Now we can interpolate to the given hour: just the mean of either side
            data = 0.5*(data + data_2)
            # Convert from integrals to time-averages
            data /= dt
            if var_in[i] in ['ssrd', 'strd', 'e']:
                # Swap sign on fluxes
                data *= -1

        out_file = out_head + var_out[i] + out_tail
        write_binary(data, out_file, prec=prec)
예제 #16
0
def cmip6_atm_forcing (var, expt, mit_start_year=None, mit_end_year=None, model_path='/badc/cmip6/data/CMIP6/CMIP/MOHC/UKESM1-0-LL/', ensemble_member='r1i1p1f2', out_dir='./', out_file_head=None):

    import netCDF4 as nc

    # Days per year (assumes 30-day months)
    days_per_year = 12*30

    # Make sure it's a real variable
    if var not in ['tas', 'huss', 'uas', 'vas', 'psl', 'pr', 'rsds', 'rlds']:
        print 'Error (cmip6_atm_forcing): unknown variable ' + var
        sys.exit()

    # Construct out_file_head if needed
    if out_file_head is None:
        out_file_head = expt+'_'+var+'_'
    elif out_file_head[-1] != '_':
        # Add an underscore if it's not already there
        out_file_head += '_'
    out_dir = real_dir(out_dir)

    # Figure out where all the files are, and which years they cover
    in_files, start_years, end_years = find_cmip6_files(model_path, expt, ensemble_member, var, 'day')
    if mit_start_year is None:
        mit_start_year = start_years[0]
    if mit_end_year is None:
        mit_end_year = end_years[-1]

    # Tell the user what to write about the grid
    lat = read_netcdf(in_files[0], 'lat')
    lon = read_netcdf(in_files[0], 'lon') 
    print '\nChanges to make in data.exf:'
    print '*_lon0='+str(lon[0])
    print '*_lon_inc='+str(lon[1]-lon[0])
    print '*_lat0='+str(lat[0])
    print '*_lat_inc='+str(lat[1]-lat[0])
    print '*_nlon='+str(lon.size)
    print '*_nlat='+str(lat.size)

    # Loop over each file
    for t in range(len(in_files)):

        file_path = in_files[t]
        print 'Processing ' + file_path        
        print 'Covers years '+str(start_years[t])+' to '+str(end_years[t])
        
        # Loop over years
        t_start = 0  # Time index in file
        t_end = t_start+days_per_year
        for year in range(start_years[t], end_years[t]+1):
            if year >= mit_start_year and year <= mit_end_year:
                print 'Processing ' + str(year)

                # Read data
                print 'Reading ' + str(year) + ' from indicies ' + str(t_start) + '-' + str(t_end)
                data = read_netcdf(file_path, var, t_start=t_start, t_end=t_end)
                # Conversions if necessary
                if var == 'tas':
                    # Kelvin to Celsius
                    data -= temp_C2K
                elif var == 'pr':
                    # kg/m^2/s to m/s
                    data /= rho_fw
                elif var in ['rsds', 'rlds']:
                    # Swap sign on radiation fluxes
                    data *= -1
                # Write data
                write_binary(data, out_dir+out_file_head+str(year))
            # Update time range for next time
            t_start = t_end
            t_end = t_start + days_per_year
예제 #17
0
def sose_ics (grid_path, sose_dir, output_dir, nc_out=None, constant_t=-1.9, constant_s=34.4, split=180, prec=64):

    from grid import SOSEGrid
    from file_io import NCfile
    from interpolation import interp_reg

    sose_dir = real_dir(sose_dir)
    output_dir = real_dir(output_dir)

    # Fields to interpolate
    fields = ['THETA', 'SALT', 'SIarea', 'SIheff']
    # Flag for 2D or 3D
    dim = [3, 3, 2, 2]
    # Constant values for ice shelf cavities
    constant_value = [constant_t, constant_s, 0, 0]
    # End of filenames for input
    infile_tail = '_climatology.data'
    # End of filenames for output
    outfile_tail = '_SOSE.ini'
    
    print 'Building grids'
    # First build the model grid and check that we have the right value for split
    model_grid = grid_check_split(grid_path, split)
    # Now build the SOSE grid
    sose_grid = SOSEGrid(sose_dir+'grid/', model_grid=model_grid, split=split)
    # Extract land mask
    sose_mask = sose_grid.hfac == 0
    
    print 'Building mask for SOSE points to fill'
    # Figure out which points we need for interpolation
    # Find open cells according to the model, interpolated to SOSE grid
    model_open = np.ceil(interp_reg(model_grid, sose_grid, np.ceil(model_grid.hfac), fill_value=1))
    # Find ice shelf cavity points according to model, interpolated to SOSE grid
    model_cavity = np.ceil(interp_reg(model_grid, sose_grid, xy_to_xyz(model_grid.ice_mask, model_grid), fill_value=0)).astype(bool)
    # Select open, non-cavity cells
    fill = model_open*np.invert(model_cavity)
    # Extend into the mask a few times to make sure there are no artifacts near the coast
    fill = extend_into_mask(fill, missing_val=0, use_3d=True, num_iters=3)

    # Set up a NetCDF file so the user can check the results
    if nc_out is not None:
        ncfile = NCfile(nc_out, model_grid, 'xyz')

    # Process fields
    for n in range(len(fields)):
        print 'Processing ' + fields[n]
        in_file = sose_dir + fields[n] + infile_tail
        out_file = output_dir + fields[n] + outfile_tail
        print '...reading ' + in_file
        # Just keep the January climatology
        if dim[n] == 3:
            sose_data = sose_grid.read_field(in_file, 'xyzt')[0,:]
        else:
            # Fill any missing regions with zero sea ice, as we won't be extrapolating them later
            sose_data = sose_grid.read_field(in_file, 'xyt', fill_value=0)[0,:]
        # Discard the land mask, and extrapolate slightly into missing regions so the interpolation doesn't get messed up.
        print '...extrapolating into missing regions'
        if dim[n] == 3:
            sose_data = discard_and_fill(sose_data, sose_mask, fill)
            # Fill cavity points with constant values
            sose_data[model_cavity] = constant_value[n]
        else:
            # Just care about surface layer
            sose_data = discard_and_fill(sose_data, sose_mask[0,:], fill[0,:], use_3d=False)
        print '...interpolating to model grid'
        data_interp = interp_reg(sose_grid, model_grid, sose_data, dim=dim[n])
        # Fill the land mask with zeros
        if dim[n] == 3:
            data_interp[model_grid.hfac==0] = 0
        else:
            data_interp[model_grid.hfac[0,:]==0] = 0
        write_binary(data_interp, out_file, prec=prec)
        if nc_out is not None:
            print '...adding to ' + nc_out
            if dim[n] == 3:
                ncfile.add_variable(fields[n], data_interp, 'xyz')
            else:
                ncfile.add_variable(fields[n], data_interp, 'xy')

    if nc_out is not None:
        ncfile.close()
예제 #18
0
def latlon_points (xmin, xmax, ymin, ymax, res, dlat_file, prec=64):

    # Number of iterations for latitude convergence
    num_lat_iter = 10

    if xmin > xmax:
        print "Error (latlon_points): looks like your domain crosses 180E. Try again with your longitude in the range (0, 360) instead of (-180, 180)."
        sys.exit()

    # Build longitude values
    lon = np.arange(xmin, xmax+res, res)
    # Update xmax if the range doesn't evenly divide by res
    if xmax != lon[-1]:
        xmax = lon[-1]
        print 'Eastern boundary moved to ' + str(xmax)
    # Put xmin in the range (0, 360) for namelist
    if xmin < 0:
        xmin += 360

    # First guess for latitude: resolution scaled by latitude of southern edge
    lat = [ymin]
    while lat[-1] < ymax:
        lat.append(lat[-1] + res*np.cos(lat[-1]*deg2rad))
    lat = np.array(lat)
    # Now iterate to converge on resolution scaled by latitude of centres
    for iter in range(num_lat_iter):
        lat_old = np.copy(lat)
        # Latitude at centres    
        lat_c = 0.5*(lat[:-1] + lat[1:])
        j = 0
        lat = [ymin]
        while lat[-1] < ymax and j < lat_c.size:
            lat.append(lat[-1] + res*np.cos(lat_c[j]*deg2rad))
            j += 1
        lat = np.array(lat)
    # Update ymax
    ymax = lat[-1]
    print 'Northern boundary moved to ' + str(ymax)

    # Write latitude resolutions to file
    dlat = lat[1:] - lat[:-1]
    write_binary(dlat, dlat_file, prec=prec)

    # Remind the user what to do in their namelist
    print '\nChanges to make to input/data:'
    print 'xgOrigin=' + str(xmin)
    print 'ygOrigin=' + str(ymin)
    print 'dxSpacing=' + str(res)
    print "delYfile='" + dlat_file + "' (and copy this file into input/)"

    # Find dimensions of tracer grid
    Nx = lon.size-1
    Ny = lat.size-1
    # Find all the factors
    factors_x = factors(Nx)
    factors_y = factors(Ny)
    print '\nNx = ' + str(Nx) + ' which has the factors ' + str(factors_x)
    print 'Ny = ' + str(Ny) + ' which has the factors ' + str(factors_y)
    print 'If you are happy with this, proceed with interp_bedmap2. At some point, choose your tile size based on the factors and update code/SIZE.h.'
    print 'Otherwise, tweak the boundaries and try again.'

    return lon, lat
예제 #19
0
def sose_sss_restoring (grid_path, sose_dir, output_salt_file, output_mask_file, nc_out=None, h0=-1250, obcs_sponge=0, split=180, prec=64):

    sose_dir = real_dir(sose_dir)

    print 'Building grids'
    # First build the model grid and check that we have the right value for split
    model_grid = grid_check_split(grid_path, split)
    # Now build the SOSE grid
    sose_grid = SOSEGrid(sose_dir+'grid/', model_grid=model_grid, split=split)
    # Extract surface land mask
    sose_mask = sose_grid.hfac[0,:] == 0

    print 'Building mask'
    mask_surface = np.ones([model_grid.ny, model_grid.nx])
    # Mask out land and ice shelves
    mask_surface[model_grid.hfac[0,:]==0] = 0
    # Save this for later
    mask_land_ice = np.copy(mask_surface)
    # Mask out continental shelf
    mask_surface[model_grid.bathy > h0] = 0
    # Smooth, and remask the land and ice shelves
    mask_surface = smooth_xy(mask_surface, sigma=2)*mask_land_ice
    if obcs_sponge > 0:
        # Also mask the cells affected by OBCS and/or its sponge
        mask_surface[:obcs_sponge,:] = 0
        mask_surface[-obcs_sponge:,:] = 0
        mask_surface[:,:obcs_sponge] = 0
        mask_surface[:,-obcs_sponge:] = 0
    # Make a 3D version with zeros in deeper layers
    mask_3d = np.zeros([model_grid.nz, model_grid.ny, model_grid.nx])
    mask_3d[0,:] = mask_surface
    
    print 'Reading SOSE salinity'
    # Just keep the surface layer
    sose_sss = sose_grid.read_field(sose_dir+'SALT_climatology.data', 'xyzt')[:,0,:,:]
    
    # Figure out which SOSE points we need for interpolation
    # Restoring mask interpolated to the SOSE grid
    fill = np.ceil(interp_reg(model_grid, sose_grid, mask_3d[0,:], dim=2, fill_value=1))
    # Extend into the mask a few times to make sure there are no artifacts near the coast
    fill = extend_into_mask(fill, missing_val=0, num_iters=3)

    # Process one month at a time
    sss_interp = np.zeros([12, model_grid.nz, model_grid.ny, model_grid.nx])
    for month in range(12):
        print 'Month ' + str(month+1)
        print '...filling missing values'
        sose_sss_filled = discard_and_fill(sose_sss[month,:], sose_mask, fill, use_3d=False)
        print '...interpolating'
        # Mask out land and ice shelves
        sss_interp[month,0,:] = interp_reg(sose_grid, model_grid, sose_sss_filled, dim=2)*mask_land_ice

    write_binary(sss_interp, output_salt_file, prec=prec)
    write_binary(mask_3d, output_mask_file, prec=prec)

    if nc_out is not None:
        print 'Writing ' + nc_out
        ncfile = NCfile(nc_out, model_grid, 'xyzt')
        ncfile.add_time(np.arange(12)+1, units='months')
        ncfile.add_variable('salinity', sss_interp, 'xyzt', units='psu')
        ncfile.add_variable('restoring_mask', mask_3d, 'xyz')
        ncfile.close()
예제 #20
0
def katabatic_correction(grid_dir,
                         cmip_file,
                         era5_file,
                         out_file_scale,
                         out_file_rotate,
                         scale_cap=3,
                         xmin=None,
                         xmax=None,
                         ymin=None,
                         ymax=None,
                         prec=64):

    var_names = ['uwind', 'vwind']
    scale_dist = 150.
    # Radius for smoothing
    sigma = 2

    print 'Building grid'
    grid = Grid(grid_dir)
    print 'Selecting coastal points'
    coast_mask = grid.get_coast_mask(ignore_iceberg=True)
    lon_coast = grid.lon_2d[coast_mask].ravel()
    lat_coast = grid.lat_2d[coast_mask].ravel()
    if xmin is None:
        xmin = np.amin(grid.lon_2d)
    if xmax is None:
        xmax = np.amax(grid.lon_2d)
    if ymin is None:
        ymin = np.amin(grid.lat_2d)
    if ymax is None:
        ymax = np.amax(grid.lat_2d)

    print 'Calculating winds in polar coordinates'
    magnitudes = []
    angles = []
    for fname in [cmip_file, era5_file]:
        u = read_netcdf(fname, var_names[0])
        v = read_netcdf(fname, var_names[1])
        magnitudes.append(np.sqrt(u**2 + v**2))
        angle = np.arctan2(v, u)
        angles.append(angle)

    print 'Calculating corrections'
    # Take minimum of the ratio of ERA5 to CMIP wind magnitude, and the scale cap
    scale = np.minimum(magnitudes[1] / magnitudes[0], scale_cap)
    # Smooth and mask the land and ice shelf
    scale = mask_land_ice(smooth_xy(scale, sigma=sigma), grid)
    # Take difference in angles
    rotate = angles[1] - angles[0]
    # Take mod 2pi when necessary
    index = rotate < -np.pi
    rotate[index] += 2 * np.pi
    index = rotate > np.pi
    rotate[index] -= 2 * np.pi
    # Smoothing would be weird with the periodic angle, so just mask
    rotate = mask_land_ice(rotate, grid)

    print 'Calculating distance from the coast'
    min_dist = None
    # Loop over all the coastal points
    for i in range(lon_coast.size):
        # Skip over any points that are out of bounds
        if lon_coast[i] < xmin or lon_coast[i] > xmax or lat_coast[
                i] < ymin or lat_coast[i] > ymax:
            continue
        # Calculate distance of every point in the model grid to this specific coastal point, in km
        dist_to_pt = dist_btw_points([lon_coast[i], lat_coast[i]],
                                     [grid.lon_2d, grid.lat_2d]) * 1e-3
        if min_dist is None:
            # Initialise the array
            min_dist = dist_to_pt
        else:
            # Figure out which cells have this coastal point as the closest one yet, and update the array
            index = dist_to_pt < min_dist
            min_dist[index] = dist_to_pt[index]

    print 'Tapering function offshore'
    # Cosine function moving from scaling factor to 1 over distance of scale_dist km offshore
    scale_tapered = (min_dist < scale_dist) * (scale - 1) * np.cos(
        np.pi / 2 * min_dist / scale_dist) + 1
    # For the rotation, move from scaling factor to 0
    rotate_tapered = (min_dist < scale_dist) * rotate * np.cos(
        np.pi / 2 * min_dist / scale_dist)

    print 'Plotting'
    data_to_plot = [min_dist, scale_tapered, rotate_tapered]
    titles = ['Distance to coast (km)', 'Scaling factor', 'Rotation factor']
    ctype = ['basic', 'ratio', 'plusminus']
    fig_names = ['min_dist.png', 'scale.png', 'rotate.png']
    for i in range(len(data_to_plot)):
        for fig_name in [None, fig_names[i]]:
            latlon_plot(data_to_plot[i],
                        grid,
                        ctype=ctype[i],
                        include_shelf=False,
                        title=titles[i],
                        figsize=(10, 6),
                        fig_name=fig_name)

    print 'Writing to file'
    fields = [scale_tapered, rotate_tapered]
    out_files = [out_file_scale, out_file_rotate]
    for n in range(len(fields)):
        # Replace mask with zeros
        mask = fields[n].mask
        data = fields[n].data
        data[mask] = 0
        write_binary(data, out_files[n], prec=prec)
예제 #21
0
def balance_obcs (grid_path, option='balance', obcs_file_w_u=None, obcs_file_e_u=None, obcs_file_s_v=None, obcs_file_n_v=None, d_eta=None, d_t=None, max_deta_dt=0.5, prec=32):

    if option == 'correct' and (d_eta is None or d_t is None):
        print 'Error (balance_obcs): must set d_eta and d_t for option="correct"'
        sys.exit()
    
    print 'Building grid'
    grid = Grid(grid_path)

    # Calculate integrands of area, scaled by hFacC
    # Note that dx and dy are only available on western and southern edges of cells respectively; for the eastern and northern boundary, will just have to use 1 cell in. Not perfect, but this correction wouldn't perfectly conserve anyway.
    # Area of western face = dy*dz*hfac
    dA_w = xy_to_xyz(grid.dy_w, grid)*z_to_xyz(grid.dz, grid)*grid.hfac
    # Area of southern face = dx*dz*hfac
    dA_s = xy_to_xyz(grid.dx_s, grid)*z_to_xyz(grid.dz, grid)*grid.hfac

    # Now extract the area array at each boundary, and wrap up into a list for easy iteration later
    dA_bdry = [dA_w[:,:,0], dA_w[:,:,-1], dA_s[:,0,:], dA_s[:,-1,:]]
    # Some more lists:
    bdry_key = ['W', 'E', 'S', 'N']
    files = [obcs_file_w_u, obcs_file_e_u, obcs_file_s_v, obcs_file_n_v]
    dimensions = ['yzt', 'yzt', 'xzt', 'xzt']
    sign = [1, -1, 1, -1]  # Multiply velocity variable by this to get incoming transport
    # Initialise number of timesteps
    num_time = None

    # Integrate the total area of ocean cells on boundaries
    total_area = 0
    for i in range(len(files)):
        if files[i] is not None:
            print 'Calculating area of ' + bdry_key[i] + ' boundary'
            total_area += np.sum(dA_bdry[i])

    # Calculate the net transport into the domain
    if option in ['balance', 'dampen']:
        # Transport based on OBCS normal velocities
        if option == 'balance':
            net_transport = 0
        elif option == 'dampen':
            net_transport = None
        for i in range(len(files)):
            if files[i] is not None:
                print 'Processing ' + bdry_key[i] + ' boundary from ' + files[i]
                # Read data
                vel = read_binary(files[i], [grid.nx, grid.ny, grid.nz], dimensions[i], prec=prec)
                if num_time is None:
                    # Find number of time indices
                    num_time = vel.shape[0]
                elif num_time != vel.shape[0]:
                    print 'Error (balance_obcs): inconsistent number of time indices between OBCS files'
                    sys.exit()
                if option == 'dampen' and net_transport is None:
                    # Initialise transport per month
                    net_transport = np.zeros(num_time)
                if option == 'balance':
                    # Time-average velocity (this is equivalent to calculating the transport at each month and then time-averaging at the end - it's all just sums)
                    vel = np.mean(vel, axis=0)
                    # Integrate net transport through this boundary into the domain, and add to global sum
                    net_transport += np.sum(sign[i]*vel*dA_bdry[i])
                elif option == 'dampen':
                    # Integrate net transport at each month
                    for t in range(num_time):
                        net_transport[t] += np.sum(sign[i]*vel[t,:]*dA_bdry[i])
    elif option == 'correct':
        # Transport based on simulated changes in sea surface height
        # Need area of sea surface
        dA_sfc = np.sum(grid.dA*np.invert(grid.land_mask).astype(float))
        # Calculate transport in m^3/s
        net_transport = d_eta*dA_sfc/(d_t*sec_per_year)        

    # Inner function to nicely print the net transport to the user
    def print_net_transport (transport):
        if transport < 0:
            direction = 'out of the domain'
        else:
            direction = 'into the domain'
        print 'Net transport is ' + str(abs(transport*1e-6)) + ' Sv ' + direction

    if option == 'dampen':
        for t in range(num_time):
            print 'Month ' + str(t+1)
            print_net_transport(net_transport[t])
    else:
        print_net_transport(net_transport)

    if option == 'dampen':
        # Calculate the acceptable maximum absolute transport
        # First need total area of sea surface (including cavities) in domain
        surface_area = np.sum(mask_land(grid.dA, grid))
        max_transport = max_deta_dt*surface_area/(sec_per_day*30)
        print 'Maximum allowable transport is ' + str(max_transport*1e-6) + ' Sv'
        if np.max(np.abs(net_transport)) <= max_transport:
            print 'OBCS satisfy this; nothing to do'
            return
        # Work out by what factor to dampen the transports
        scale_factor = max_transport/np.max(np.abs(net_transport))
        print 'Will scale transports by ' + str(scale_factor)
        # Calculate corresponding velocity correction at each month
        correction = np.zeros(num_time)
        for t in range(num_time):
            correction[t] = (scale_factor-1)*net_transport[t]/total_area
            print 'Month ' + str(t+1) + ': will apply correction of ' + str(correction[t]) + ' m/s to normal velocity at each boundary'
    else:
        # Calculate single correction in m/s
        correction = -1*net_transport/total_area
        print 'Will apply correction of ' + str(correction) + ' m/s to normal velocity at each boundary'

    # Now apply the correction
    for i in range(len(files)):
        if files[i] is not None:
            print 'Correcting ' + files[i]
            # Read all the data again
            vel = read_binary(files[i], [grid.nx, grid.ny, grid.nz], dimensions[i], prec=prec)
            # Apply the correction
            if option == 'dampen':
                for t in range(num_time):
                    vel[t,:] += sign[i]*correction[t]
            else:
                vel += sign[i]*correction
            # Overwrite the file
            write_binary(vel, files[i], prec=prec)

    if option in ['balance', 'dampen']:
        # Recalculate the transport to make sure it worked
        if option == 'balance':
            net_transport_new = 0
        elif option == 'dampen':
            net_transport_new = np.zeros(num_time)
        for i in range(len(files)):
            if files[i] is not None:
                vel = read_binary(files[i], [grid.nx, grid.ny, grid.nz], dimensions[i], prec=prec)
                if option == 'balance':
                    vel = np.mean(vel, axis=0)
                    net_transport_new += np.sum(sign[i]*vel*dA_bdry[i])
                elif option == 'dampen':
                    for t in range(num_time):
                        net_transport_new[t] += np.sum(sign[i]*vel[t,:]*dA_bdry[i])
        if option == 'balance':
            print_net_transport(net_transport_new)
        elif option == 'dampen':
            for t in range(num_time):
                print 'Month ' + str(t+1)
                print_net_transport(net_transport_new[t])
예제 #22
0
def pace_atm_forcing (var, ens, in_dir, out_dir):

    import netCDF4 as nc
    start_year = 1920
    end_year = 2013
    days_per_year = 365
    months_per_year = 12
    ens_str = str(ens).zfill(2)

    if var not in ['TREFHT', 'QBOT', 'PSL', 'UBOT', 'VBOT', 'PRECT', 'FLDS', 'FSDS']:
        print 'Error (pace_atm_forcing): Invalid variable ' + var
        sys.exit()

    path = real_dir(in_dir)
    # Decide if monthly or daily data
    monthly = var in ['FLDS', 'FSDS']
    if monthly:        
        path += 'monthly/'
    else:
        path += 'daily/'
    path += var + '/'

    for year in range(start_year, end_year+1):
        print 'Processing ' + str(year)
        # Construct the file based on the year (after 2006 use RCP 8.5) and whether it's monthly or daily
        if year < 2006:
            file_head = 'b.e11.B20TRLENS.f09_g16.SST.restoring.ens'
            if monthly:
                file_tail = '.192001-200512.nc'
            else:
                file_tail = '.19200101-20051231.nc'
        else:
            file_head = 'b.e11.BRCP85LENS.f09_g16.SST.restoring.ens'
            if monthly:
                file_tail = '.200601-201312.nc'
            else:
                file_tail = '.20060101-20131231.nc'
        if monthly:
            file_mid = '.cam.h0.'
        else:
            file_mid = '.cam.h1.'
        file_path = path + file_head + ens_str + file_mid + var + file_tail
        # Choose time indicies
        if monthly:
            per_year = months_per_year
        else:
            per_year = days_per_year
        t_start = (year-start_year)*per_year
        t_end = t_start + per_year
        print 'Reading indices ' + str(t_start) + '-' + str(t_end-1)
        # Read data
        data = read_netcdf(file_path, var, t_start=t_start, t_end=t_end)
        # Unit conversions
        if var in ['FLDS', 'FSDS']:
            # Swap sign
            data *= -1
        elif var == 'TREFHT':
            # Convert from K to C
            data -= temp_C2K
        elif var == 'QBOT':
            # Convert from mixing ratio to specific humidity
            data = data/(1.0 + data)
        # Write data
        out_file = real_dir(out_dir) + 'PACE_ens' + ens_str + '_' + var + '_' + str(year)
        write_binary(data, out_file)