Ejemplo n.º 1
0
def fix_eraint_humidity (in_dir, out_dir, prec=32):

    in_dir = real_dir(in_dir)
    out_dir = real_dir(out_dir)

    # File paths
    in_head = in_dir + 'era_a_'
    in_tail = '_075.nc'
    out_head = out_dir + 'ERAinterim_spfh2m_'
    start_year = 1979
    end_year = 2017

    for year in range(start_year, end_year+1):
        in_file = in_head + str(year) + in_tail
        print 'Reading ' + in_file
        # Need temperature, pressure, and dew point
        temp = read_netcdf(in_file, 't2m')
        press = read_netcdf(in_file, 'msl')
        dewpoint = read_netcdf(in_file, 'd2m')
        # Calculate vapour pressure
        e = es0*np.exp(Lv/Rv*(1/temp - 1/dewpoint))
        # Calculate specific humidity
        spf = sh_coeff*e/(press - (1-sh_coeff)*e)
        # Now flip in latitude to match Matlab-generated files
        spf = spf[:,::-1,:]
        out_file = out_head + str(year)
        write_binary(spf, out_file, prec=prec)
        if year == end_year:
            # Copy the last timestep as in era_dummy_year
            spf_last = spf[-1,:]
            out_file = out_head + str(year+1)
            write_binary(spf_last, out_file, prec=prec)
Ejemplo n.º 2
0
def make_climatology (start_year, end_year, output_file, directory='./'):
    
    directory = real_dir(directory)

    # Copy the first file
    # This will make a skeleton file with 12 time records and all the right metadata; later we will overwrite the values of all the time-dependent variables.
    print 'Setting up ' + output_file
    shutil.copyfile(directory+str(start_year)+'.nc', output_file)

    # Find all the time-dependent variables
    var_names = time_dependent_variables(output_file)

    # Calculate the monthly climatology for each variable
    id_out = nc.Dataset(output_file, 'a')
    for var in var_names:
        print 'Processing ' + var

        # Start with the first year
        print '...' + str(start_year)
        data = id_out.variables[var][:]
        
        # Add subsequent years
        for year in range(start_year+1, end_year+1):
            print '...' + str(year)
            data += read_netcdf(directory+str(year)+'.nc', var)

        # Divide by number of years to get average
        data /= (end_year-start_year+1)
        # Overwrite in output_file
        id_out.variables[var][:] = data

    id_out.close()
Ejemplo n.º 3
0
def mit_ics (grid_path, source_file, output_dir, nc_out=None, prec=64):

    from file_io import NCfile, read_netcdf
    from interpolation import interp_reg

    output_dir = real_dir(output_dir)

     # Fields to interpolate
    fields = ['THETA', 'SALT', 'SIarea', 'SIheff', 'SIhsnow']
    # Flag for 2D or 3D
    dim = [3, 3, 2, 2, 2]
    # End of filenames for output
    outfile_tail = '_MIT.ini'

    print 'Building grids'
    source_grid = Grid(source_file)
    model_grid = Grid(grid_path)
    # Extract land mask of source grid
    source_mask = source_grid.hfac==0

    print 'Building mask for points to fill'
    # Select open cells according to the model, interpolated to the source grid
    fill = np.ceil(interp_reg(model_grid, source_grid, np.ceil(model_grid.hfac), fill_value=0)).astype(bool)
    # Extend into mask a few times to make sure there are no artifacts near the coast
    fill = extend_into_mask(fill, missing_val=0, use_3d=True, num_iters=3)

    # Set up a NetCDF file so the user can check the results
    if nc_out is not None:
        ncfile = NCfile(nc_out, model_grid, 'xyz')

    # Process fields
    for n in range(len(fields)):
        print 'Processing ' + fields[n]
        out_file = output_dir + fields[n] + outfile_tail
        # Read the January climatology
        source_data = read_netcdf(source_file, fields[n], time_index=0)
        # Discard the land mask, and extrapolate slightly into missing regions so the interpolation doesn't get messed up.
        print '...extrapolating into missing regions'
        if dim[n] == 3:
            source_data = discard_and_fill(source_data, source_mask, fill)
        else:
            # Just care about the surface layer
            source_data = discard_and_fill(source_data, source_mask[0,:], fill[0,:], use_3d=False)
        print '...interpolating to model grid'
        data_interp = interp_reg(source_grid, model_grid, source_data, dim=dim[n])
        # Fill the land mask with zeros
        if dim[n] == 3:
            data_interp[model_grid.hfac==0] = 0
        else:
            data_interp[model_grid.hfac[0,:]==0] = 0
        write_binary(data_interp, out_file, prec=prec)
        if nc_out is not None:
            print '...adding to ' + nc_out
            if dim[n] == 3:
                ncfile.add_variable(fields[n], data_interp, 'xyz')
            else:
                ncfile.add_variable(fields[n], data_interp, 'xy')

    if nc_out is not None:
        ncfile.close()    
Ejemplo n.º 4
0
def era_dummy_year (bin_dir, last_year, option='era5', nlon=None, nlat=None, out_dir=None, prec=32):

    bin_dir = real_dir(bin_dir)
    if out_dir is None:
        out_dir = bin_dir
        
    if nlon is None:
        if option == 'era5':
            nlon = 1440
        elif option == 'eraint':
            nlon = 480
        else:
            print 'Error (era_dummy_year): invalid option ' + option
            sys.exit()
    if nlat is None:
        # The same for both cases, assuming ERA5 was cut off at 30S
        nlat = 241

    # Figure out the file paths
    if option == 'era5':
        var_names = ['apressure', 'atemp', 'aqh', 'uwind', 'vwind', 'precip', 'swdown', 'lwdown', 'evap']
        file_head = 'ERA5_'
    elif option == 'eraint':
        var_names = ['msl', 'tmp2m_degC', 'spfh2m', 'u10m', 'v10m', 'rain', 'dsw', 'dlw']
        file_head = 'ERAinterim_'            

    for var in var_names:
        file_in = bin_dir + file_head + var + '_' + str(last_year)
        # Select the last time index
        data = read_binary(file_in, [nlon, nlat], 'xyt', prec=prec)[-1,:]
        file_out = out_dir + file_head + var + '_' + str(last_year+1)
        write_binary(data, file_out, prec=prec)
Ejemplo n.º 5
0
def crash_to_netcdf (crash_dir, grid_path):

    from MITgcmutils import rdmds

    # Make sure crash_dir is a proper directory
    crash_dir = real_dir(crash_dir)

    # Read the grid
    grid = Grid(grid_path)
    # Initialise the NetCDF file
    ncfile = NCfile(crash_dir+'crash.nc', grid, 'xyz')

    # Find all the crash files
    for file in os.listdir(crash_dir):
        if file.startswith('stateThetacrash') and file.endswith('.data'):
            # Found temperature
            # Read it from binary
            temp = rdmds(crash_dir + file.replace('.data', ''))
            # Write it to NetCDF
            ncfile.add_variable('THETA', temp, 'xyz', units='C')
        if file.startswith('stateSaltcrash') and file.endswith('.data'):
            salt = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('SALT', salt, 'xyz', units='psu')
        if file.startswith('stateUvelcrash') and file.endswith('.data'):
            u = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('UVEL', u, 'xyz', gtype='u', units='m/s')
        if file.startswith('stateVvelcrash') and file.endswith('.data'):
            v = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('VVEL', v, 'xyz', gtype='v', units='m/s')
        if file.startswith('stateWvelcrash') and file.endswith('.data'):
            w = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('WVEL', w, 'xyz', gtype='w', units='m/s')
        if file.startswith('stateEtacrash') and file.endswith('.data'):
            eta = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('ETAN', eta, 'xy', units='m')
        if file.startswith('stateAreacrash') and file.endswith('.data'):
            area = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('SIarea', area, 'xy', units='fraction')
        if file.startswith('stateHeffcrash') and file.endswith('.data'):
            heff = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('SIheff', heff, 'xy', units='m')
        if file.startswith('stateUicecrash') and file.endswith('.data'):
            uice = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('SIuice', uice, 'xy', gtype='u', units='m/s')
        if file.startswith('stateVicecrash') and file.endswith('.data'):
            vice = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('SIvice', vice, 'xy', gtype='v', units='m/s')
        if file.startswith('stateQnetcrash') and file.endswith('.data'):
            qnet = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('Qnet', qnet, 'xy', units='W/m^2')
        if file.startswith('stateMxlcrash') and file.endswith('.data'):
            mld = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('MXLDEPTH', mld, 'xy', units='m')
        if file.startswith('stateEmpmrcrash') and file.endswith('.data'):
            empmr = rdmds(crash_dir + file.replace('.data', ''))
            ncfile.add_variable('Empmr', empmr, 'xy', units='kg/m^2/s')

    ncfile.close()
Ejemplo n.º 6
0
def iceberg_meltwater(grid_path, input_dir, output_file, nc_out=None, prec=32):

    from plot_latlon import latlon_plot

    input_dir = real_dir(input_dir)
    file_head = 'icebergs_'
    file_tail = '.nc'

    print 'Building grids'
    # Read the NEMO grid from the first file
    # It has longitude in the range -180 to 180
    file_path = input_dir + file_head + '01' + file_tail
    nemo_lon = read_netcdf(file_path, 'nav_lon')
    nemo_lat = read_netcdf(file_path, 'nav_lat')
    # Build the model grid
    model_grid = Grid(grid_path, max_lon=180)

    print 'Interpolating'
    icebergs_interp = np.zeros([12, model_grid.ny, model_grid.nx])
    for month in range(12):
        print '...month ' + str(month + 1)
        # Read the data
        file_path = input_dir + file_head + '{0:02d}'.format(month +
                                                             1) + file_tail
        icebergs = read_netcdf(file_path, 'berg_total_melt', time_index=0)
        # Interpolate
        icebergs_interp_tmp = interp_nonreg_xy(nemo_lon,
                                               nemo_lat,
                                               icebergs,
                                               model_grid.lon_1d,
                                               model_grid.lat_1d,
                                               fill_value=0)
        # Make sure the land and ice shelf cavities don't get any iceberg melt
        icebergs_interp_tmp[model_grid.land_mask + model_grid.ice_mask] = 0
        # Save to the master array
        icebergs_interp[month, :] = icebergs_interp_tmp

    write_binary(icebergs_interp, output_file, prec=prec)

    print 'Plotting'
    # Make a nice plot of the annual mean
    latlon_plot(mask_land_ice(np.mean(icebergs_interp, axis=0), model_grid),
                model_grid,
                include_shelf=False,
                vmin=0,
                title=r'Annual mean iceberg melt (kg/m$^2$/s)')
    if nc_out is not None:
        # Also write to NetCDF file
        print 'Writing ' + nc_out
        ncfile = NCfile(nc_out, model_grid, 'xyt')
        ncfile.add_time(np.arange(12) + 1, units='months')
        ncfile.add_variable('iceberg_melt',
                            icebergs_interp,
                            'xyt',
                            units='kg/m^2/s')
        ncfile.close()
Ejemplo n.º 7
0
def find_cmip6_files(model_path, expt, ensemble_member, var, time_code):

    # Construct the path to the directory containing all the data files, and make sure it exists
    in_dir = real_dir(
        model_path
    ) + expt + '/' + ensemble_member + '/' + time_code + '/' + var + '/gn/latest/'
    if not os.path.isdir(in_dir):
        print 'Error (find_cmip6_files): no such directory ' + in_dir
        sys.exit()

    # Get the names of all the data files in this directory, in chronological order
    in_files = []
    for fname in os.listdir(in_dir):
        if fname.endswith('.nc'):
            in_files.append(in_dir + fname)
    in_files.sort()

    # Work out the start and end years for each file
    start_years = []
    end_years = []
    for file_path in in_files:
        # Dates encoded in file names
        if time_code.endswith('day'):
            start_date = file_path[-20:-12]
            end_date = file_path[-11:-3]
        elif time_code.endswith('mon'):
            start_date = file_path[-16:-10]
            end_date = file_path[-9:-3]
        start_year = start_date[:4]
        end_year = end_date[:4]
        # Make sure they are 30-day months and complete years
        if (time_code.endswith('day')
                and start_date[4:] != '0101') or (time_code.endswith('mon')
                                                  and start_date[4:] != '01'):
            print 'Error (find_cmip6_files): ' + file_path + ' does not start at the beginning of January'
            sys.exit()
        if (time_code.endswith('day')
                and end_date[4:] != '1230') or (time_code.endswith('mon')
                                                and end_date[4:] != '12'):
            print 'Error (find_cmip6_files): ' + file_path + ' does not end at the end of December'
            sys.exit()
        # Save the start and end years
        start_years.append(int(start_year))
        end_years.append(int(end_year))
    # Now make sure there are no missing years
    for t in range(1, len(in_files)):
        if start_years[t] != end_years[t - 1] + 1:
            print 'Error (find_cmip6_files): there are missing years in ' + in_dir
            sys.exit()

    return in_files, start_years, end_years
Ejemplo n.º 8
0
def plot_seaice_annual (file_path, grid_path='../grid/', fig_dir='.', monthly=True):

    fig_dir = real_dir(fig_dir)

    grid = Grid(grid_path)

    time = netcdf_time(file_path, monthly=monthly)
    first_year = time[0].year
    if time[0].month > 2:
        first_year += 1
    last_year = time[-1].year
    if time[-1].month < 8:
        last_year -= 1
    for year in range(first_year, last_year+1):
        plot_aice_minmax(file_path, year, grid=grid, fig_name=fig_dir+'aice_minmax_'+str(year)+'.png')
Ejemplo n.º 9
0
def precompute_timeseries_coupled (output_dir='./', timeseries_file='timeseries.nc', file_name='output.nc', segment_dir=None, timeseries_types=None, key='WSFRIS'):

    if timeseries_types is None:
        if key == 'WSFRIS':
            timeseries_types = ['fris_mass_balance', 'hice_corner', 'mld_ewed', 'fris_temp', 'fris_salt', 'ocean_vol', 'eta_avg', 'seaice_area']
        elif key == 'FRIS':
            timeseries_types = ['fris_mass_balance', 'fris_temp', 'fris_salt', 'ocean_vol', 'eta_avg', 'seaice_area']

    output_dir = real_dir(output_dir)

    if segment_dir is None and os.path.isfile(timeseries_file):
        print 'Error (precompute_timeseries_coupled): since ' + timeseries_file + ' exists, you must specify segment_dir'
        sys.exit()
    segment_dir = check_segment_dir(output_dir, segment_dir)
    file_paths = segment_file_paths(output_dir, segment_dir, file_name)

    # Call precompute_timeseries for each segment
    for file_path in file_paths:
        print 'Processing ' + file_path
        precompute_timeseries(file_path, timeseries_file, timeseries_types=timeseries_types, monthly=True)
Ejemplo n.º 10
0
def sose_sss_restoring (grid_path, sose_dir, output_salt_file, output_mask_file, nc_out=None, h0=-1250, obcs_sponge=0, split=180, prec=64):

    sose_dir = real_dir(sose_dir)

    print 'Building grids'
    # First build the model grid and check that we have the right value for split
    model_grid = grid_check_split(grid_path, split)
    # Now build the SOSE grid
    sose_grid = SOSEGrid(sose_dir+'grid/', model_grid=model_grid, split=split)
    # Extract surface land mask
    sose_mask = sose_grid.hfac[0,:] == 0

    print 'Building mask'
    mask_surface = np.ones([model_grid.ny, model_grid.nx])
    # Mask out land and ice shelves
    mask_surface[model_grid.hfac[0,:]==0] = 0
    # Save this for later
    mask_land_ice = np.copy(mask_surface)
    # Mask out continental shelf
    mask_surface[model_grid.bathy > h0] = 0
    # Smooth, and remask the land and ice shelves
    mask_surface = smooth_xy(mask_surface, sigma=2)*mask_land_ice
    if obcs_sponge > 0:
        # Also mask the cells affected by OBCS and/or its sponge
        mask_surface[:obcs_sponge,:] = 0
        mask_surface[-obcs_sponge:,:] = 0
        mask_surface[:,:obcs_sponge] = 0
        mask_surface[:,-obcs_sponge:] = 0
    # Make a 3D version with zeros in deeper layers
    mask_3d = np.zeros([model_grid.nz, model_grid.ny, model_grid.nx])
    mask_3d[0,:] = mask_surface
    
    print 'Reading SOSE salinity'
    # Just keep the surface layer
    sose_sss = sose_grid.read_field(sose_dir+'SALT_climatology.data', 'xyzt')[:,0,:,:]
    
    # Figure out which SOSE points we need for interpolation
    # Restoring mask interpolated to the SOSE grid
    fill = np.ceil(interp_reg(model_grid, sose_grid, mask_3d[0,:], dim=2, fill_value=1))
    # Extend into the mask a few times to make sure there are no artifacts near the coast
    fill = extend_into_mask(fill, missing_val=0, num_iters=3)

    # Process one month at a time
    sss_interp = np.zeros([12, model_grid.nz, model_grid.ny, model_grid.nx])
    for month in range(12):
        print 'Month ' + str(month+1)
        print '...filling missing values'
        sose_sss_filled = discard_and_fill(sose_sss[month,:], sose_mask, fill, use_3d=False)
        print '...interpolating'
        # Mask out land and ice shelves
        sss_interp[month,0,:] = interp_reg(sose_grid, model_grid, sose_sss_filled, dim=2)*mask_land_ice

    write_binary(sss_interp, output_salt_file, prec=prec)
    write_binary(mask_3d, output_mask_file, prec=prec)

    if nc_out is not None:
        print 'Writing ' + nc_out
        ncfile = NCfile(nc_out, model_grid, 'xyzt')
        ncfile.add_time(np.arange(12)+1, units='months')
        ncfile.add_variable('salinity', sss_interp, 'xyzt', units='psu')
        ncfile.add_variable('restoring_mask', mask_3d, 'xyz')
        ncfile.close()
Ejemplo n.º 11
0
def pace_atm_forcing (var, ens, in_dir, out_dir):

    import netCDF4 as nc
    start_year = 1920
    end_year = 2013
    days_per_year = 365
    months_per_year = 12
    ens_str = str(ens).zfill(2)

    if var not in ['TREFHT', 'QBOT', 'PSL', 'UBOT', 'VBOT', 'PRECT', 'FLDS', 'FSDS']:
        print 'Error (pace_atm_forcing): Invalid variable ' + var
        sys.exit()

    path = real_dir(in_dir)
    # Decide if monthly or daily data
    monthly = var in ['FLDS', 'FSDS']
    if monthly:        
        path += 'monthly/'
    else:
        path += 'daily/'
    path += var + '/'

    for year in range(start_year, end_year+1):
        print 'Processing ' + str(year)
        # Construct the file based on the year (after 2006 use RCP 8.5) and whether it's monthly or daily
        if year < 2006:
            file_head = 'b.e11.B20TRLENS.f09_g16.SST.restoring.ens'
            if monthly:
                file_tail = '.192001-200512.nc'
            else:
                file_tail = '.19200101-20051231.nc'
        else:
            file_head = 'b.e11.BRCP85LENS.f09_g16.SST.restoring.ens'
            if monthly:
                file_tail = '.200601-201312.nc'
            else:
                file_tail = '.20060101-20131231.nc'
        if monthly:
            file_mid = '.cam.h0.'
        else:
            file_mid = '.cam.h1.'
        file_path = path + file_head + ens_str + file_mid + var + file_tail
        # Choose time indicies
        if monthly:
            per_year = months_per_year
        else:
            per_year = days_per_year
        t_start = (year-start_year)*per_year
        t_end = t_start + per_year
        print 'Reading indices ' + str(t_start) + '-' + str(t_end-1)
        # Read data
        data = read_netcdf(file_path, var, t_start=t_start, t_end=t_end)
        # Unit conversions
        if var in ['FLDS', 'FSDS']:
            # Swap sign
            data *= -1
        elif var == 'TREFHT':
            # Convert from K to C
            data -= temp_C2K
        elif var == 'QBOT':
            # Convert from mixing ratio to specific humidity
            data = data/(1.0 + data)
        # Write data
        out_file = real_dir(out_dir) + 'PACE_ens' + ens_str + '_' + var + '_' + str(year)
        write_binary(data, out_file)    
Ejemplo n.º 12
0
def cmip6_atm_forcing (var, expt, mit_start_year=None, mit_end_year=None, model_path='/badc/cmip6/data/CMIP6/CMIP/MOHC/UKESM1-0-LL/', ensemble_member='r1i1p1f2', out_dir='./', out_file_head=None):

    import netCDF4 as nc

    # Days per year (assumes 30-day months)
    days_per_year = 12*30

    # Make sure it's a real variable
    if var not in ['tas', 'huss', 'uas', 'vas', 'psl', 'pr', 'rsds', 'rlds']:
        print 'Error (cmip6_atm_forcing): unknown variable ' + var
        sys.exit()

    # Construct out_file_head if needed
    if out_file_head is None:
        out_file_head = expt+'_'+var+'_'
    elif out_file_head[-1] != '_':
        # Add an underscore if it's not already there
        out_file_head += '_'
    out_dir = real_dir(out_dir)

    # Figure out where all the files are, and which years they cover
    in_files, start_years, end_years = find_cmip6_files(model_path, expt, ensemble_member, var, 'day')
    if mit_start_year is None:
        mit_start_year = start_years[0]
    if mit_end_year is None:
        mit_end_year = end_years[-1]

    # Tell the user what to write about the grid
    lat = read_netcdf(in_files[0], 'lat')
    lon = read_netcdf(in_files[0], 'lon') 
    print '\nChanges to make in data.exf:'
    print '*_lon0='+str(lon[0])
    print '*_lon_inc='+str(lon[1]-lon[0])
    print '*_lat0='+str(lat[0])
    print '*_lat_inc='+str(lat[1]-lat[0])
    print '*_nlon='+str(lon.size)
    print '*_nlat='+str(lat.size)

    # Loop over each file
    for t in range(len(in_files)):

        file_path = in_files[t]
        print 'Processing ' + file_path        
        print 'Covers years '+str(start_years[t])+' to '+str(end_years[t])
        
        # Loop over years
        t_start = 0  # Time index in file
        t_end = t_start+days_per_year
        for year in range(start_years[t], end_years[t]+1):
            if year >= mit_start_year and year <= mit_end_year:
                print 'Processing ' + str(year)

                # Read data
                print 'Reading ' + str(year) + ' from indicies ' + str(t_start) + '-' + str(t_end)
                data = read_netcdf(file_path, var, t_start=t_start, t_end=t_end)
                # Conversions if necessary
                if var == 'tas':
                    # Kelvin to Celsius
                    data -= temp_C2K
                elif var == 'pr':
                    # kg/m^2/s to m/s
                    data /= rho_fw
                elif var in ['rsds', 'rlds']:
                    # Swap sign on radiation fluxes
                    data *= -1
                # Write data
                write_binary(data, out_dir+out_file_head+str(year))
            # Update time range for next time
            t_start = t_end
            t_end = t_start + days_per_year
Ejemplo n.º 13
0
def plot_everything (output_dir='./', timeseries_file='timeseries.nc', grid_path=None, fig_dir='.', file_path=None, monthly=True, date_string=None, time_index=-1, time_average=True, unravelled=False, key='WSFRIS', hovmoller_file='hovmoller.nc'):

    if time_average:
        time_index = None

    # Make sure proper directories
    output_dir = real_dir(output_dir)
    fig_dir = real_dir(fig_dir)
    
    # Build the list of output files in this directory (use them all for timeseries)
    if key in ['WSFRIS', 'FRIS']:
        # Coupled
        segment_dir = get_segment_dir(output_dir)
        output_files = segment_file_paths(output_dir, segment_dir, 'output.nc')
    else:
        # Uncoupled
        output_files = build_file_list(output_dir, unravelled=unravelled)
    if file_path is None:
        # Select the last file for single-timestep analysis
        file_path = output_files[-1]        

    # Build the grid
    if grid_path is None:
        grid_path = file_path
    grid = Grid(grid_path)

    # Timeseries
    if key == 'WSS':
        var_names = ['fris_mass_balance', 'eta_avg', 'seaice_area', 'fris_temp', 'fris_salt', 'fris_age']
    elif key == 'WSK':
        var_names = ['fris_mass_balance', 'hice_corner', 'mld_ewed', 'eta_avg', 'seaice_area', 'fris_temp', 'fris_salt']
    elif key == 'WSFRIS':
        var_names = ['fris_mass_balance', 'hice_corner', 'mld_ewed', 'fris_temp', 'fris_salt', 'ocean_vol', 'eta_avg', 'seaice_area']
    elif key == 'FRIS':
        var_names = ['fris_mass_balance', 'fris_temp', 'fris_salt', 'ocean_vol', 'eta_avg', 'seaice_area']
    elif key == 'PAS':
        var_names = ['all_massloss', 'eta_avg', 'seaice_area']
    for var in var_names:
        read_plot_timeseries(var, output_dir+timeseries_file, precomputed=True, fig_name=fig_dir+'timeseries_'+var+'.png', monthly=monthly)

    # Hovmoller plots
    if key == 'PAS':
        for loc in ['PIB', 'Dot']:
            read_plot_hovmoller_ts(hovmoller_file, loc, grid, tmax=1.5, smin=34, t_contours=[0,1], s_contours=[34.5, 34.7], fig_name=fig_dir+'hovmoller_ts_'+loc+'.png', monthly=monthly)

    # Lat-lon plots
    var_names = ['ismr', 'bwtemp', 'bwsalt', 'sst', 'sss', 'aice', 'hice', 'eta', 'vel', 'velice']
    if key in ['WSS', 'WSK', 'FRIS', 'WSFRIS']:
        var_names += ['hsnow', 'mld', 'saltflx', 'psi', 'iceprod']
        if key in ['WSS', 'WSK']:
            var_names += ['bwage']
    for var in var_names:
        # Customise bounds and zooming
        vmin = None
        vmax = None
        zoom_fris = False
        ymax = None
        chunk = None
        fig_name = fig_dir + var + '.png'
        if key == 'PAS' and var in ['bwsalt', 'bwtemp', 'hice', 'ismr', 'vel', 'velice']:
            ymax = -70
        if var == 'bwtemp':
            if key == 'WSS':
                vmin = -2.5
                vmax = -1.5
            elif key in ['WSK', 'WSFRIS']:
                vmax = 1
        if var == 'bwsalt':
            if key == 'PAS':
                vmin = 34.1
            else:
                vmin = 34.3
        if var == 'bwage':
            vmin = 0
            if key == 'WSS':
                vmax = 12
        if var == 'eta':
            vmin = -2.5
        if var == 'hice':
            if key == 'PAS':
                vmax = 2
            else:
                vmax = 4
        if var == 'saltflx':
            vmin = -0.001
            vmax = 0.001
        if var == 'iceprod':
            vmin = 0
            vmax = 5
        if var == 'psi' and key in ['WSS', 'FRIS']:
            vmin = -0.5
            vmax = 0.5
        if var in ['vel', 'velice'] and key=='WSS':
            chunk = 6
        if not zoom_fris and key in ['WSK', 'WSFRIS']:
            figsize = (10,6)
        elif key == 'PAS':
            if ymax == -70:
                figsize = (14,5)
            else:
                figsize = (12,6)
        else:
            figsize = (8,6)
        # Plot
        read_plot_latlon(var, file_path, grid=grid, time_index=time_index, time_average=time_average, vmin=vmin, vmax=vmax, zoom_fris=zoom_fris, ymax=ymax, fig_name=fig_name, date_string=date_string, figsize=figsize, chunk=chunk)
        # Make additional plots if needed
        if key in ['WSK', 'WSFRIS'] and var in ['ismr', 'vel', 'bwtemp', 'bwsalt', 'psi', 'bwage']:
            # Make another plot zoomed into FRIS
            figsize = (8,6)
            # First adjust bounds
            if var == 'bwtemp':
                vmax = -1.5
            if var == 'bwage':
                vmax = 10
            if var == 'psi':
                vmax = 0.5
            read_plot_latlon(var, file_path, grid=grid, time_index=time_index, time_average=time_average, vmin=vmin, vmax=vmax, zoom_fris=True, fig_name=fig_dir+var+'_zoom.png', date_string=date_string, figsize=figsize)
        if var == 'vel':
            # Call the other options for vertical transformations
            if key in ['WSK', 'WSFRIS']:
                figsize = (10,6)
            for vel_option in ['sfc', 'bottom']:
                read_plot_latlon(var, file_path, grid=grid, time_index=time_index, time_average=time_average, vel_option=vel_option, vmin=vmin, vmax=vmax, zoom_fris=zoom_fris, ymax=ymax, fig_name=fig_dir+var+'_'+vel_option+'.png', date_string=date_string, figsize=figsize, chunk=chunk)
        if var in ['eta', 'hice']:
            # Make another plot with unbounded colour bar
            read_plot_latlon(var, file_path, grid=grid, time_index=time_index, time_average=time_average, zoom_fris=zoom_fris, ymax=ymax, fig_name=fig_dir + var + '_unbound.png', date_string=date_string, figsize=figsize)

    # Slice plots
    if key in ['WSK', 'WSS', 'WSFRIS', 'FRIS']:
        read_plot_ts_slice(file_path, grid=grid, lon0=-40, hmax=-75, zmin=-1450, time_index=time_index, time_average=time_average, fig_name=fig_dir+'ts_slice_filchner.png', date_string=date_string)
        read_plot_ts_slice(file_path, grid=grid, lon0=-55, hmax=-72, time_index=time_index, time_average=time_average, fig_name=fig_dir+'ts_slice_ronne.png', date_string=date_string)
    if key in ['WSK', 'WSFRIS']:
        read_plot_ts_slice(file_path, grid=grid, lon0=0, time_index=time_index, time_average=time_average, fig_name=fig_dir+'ts_slice_eweddell.png', date_string=date_string)
Ejemplo n.º 14
0
    def __init__(self, path, x_is_lon=True, max_lon=None):

        if path.endswith('.nc'):
            use_netcdf = True
        elif os.path.isdir(path):
            use_netcdf = False
            path = real_dir(path)
            from MITgcmutils import rdmds
        else:
            print 'Error (Grid): ' + path + ' is neither a NetCDF file nor a directory'
            sys.exit()

        # Read variables
        # Note that some variables are capitalised differently in NetCDF versus binary, so can't make this more efficient...
        if use_netcdf:
            self.lon_2d = read_netcdf(path, 'XC')
            self.lat_2d = read_netcdf(path, 'YC')
            self.lon_corners_2d = read_netcdf(path, 'XG')
            self.lat_corners_2d = read_netcdf(path, 'YG')
            self.dx_s = read_netcdf(path, 'dxG')
            self.dy_w = read_netcdf(path, 'dyG')
            # I have no idea why this requires .data but it does, otherwise WSS breaks (?!?!)
            self.dA = read_netcdf(path, 'rA').data
            self.z = read_netcdf(path, 'Z')
            self.z_edges = read_netcdf(path, 'Zp1')
            self.dz = read_netcdf(path, 'drF')
            self.dz_t = read_netcdf(path, 'drC')
            self.hfac = read_netcdf(path, 'hFacC')
            self.hfac_w = read_netcdf(path, 'hFacW')
            self.hfac_s = read_netcdf(path, 'hFacS')
        else:
            self.lon_2d = rdmds(path + 'XC')
            self.lat_2d = rdmds(path + 'YC')
            self.lon_corners_2d = rdmds(path + 'XG')
            self.lat_corners_2d = rdmds(path + 'YG')
            self.dx_s = rdmds(path + 'DXG')
            self.dy_w = rdmds(path + 'DYG')
            self.dA = rdmds(path + 'RAC')
            # Remove singleton dimensions from 1D depth variables
            self.z = rdmds(path + 'RC').squeeze()
            self.z_edges = rdmds(path + 'RF').squeeze()
            self.dz = rdmds(path + 'DRF').squeeze()
            self.dz_t = rdmds(path + 'DRC').squeeze()
            self.hfac = rdmds(path + 'hFacC')
            self.hfac_w = rdmds(path + 'hFacW')
            self.hfac_s = rdmds(path + 'hFacS')

        # Make 1D versions of latitude and longitude arrays (only useful for regular lat-lon grids)
        if len(self.lon_2d.shape) == 2:
            self.lon_1d = self.lon_2d[0, :]
            self.lat_1d = self.lat_2d[:, 0]
            self.lon_corners_1d = self.lon_corners_2d[0, :]
            self.lat_corners_1d = self.lat_corners_2d[:, 0]
        elif len(self.lon_2d.shape) == 1:
            # xmitgcm output has these variables as 1D already. So make 2D ones.
            self.lon_1d = np.copy(self.lon_2d)
            self.lat_1d = np.copy(self.lat_2d)
            self.lon_corners_1d = np.copy(self.lon_corners_2d)
            self.lat_corners_1d = np.copy(self.lat_corners_2d)
            self.lon_2d, self.lat_2d = np.meshgrid(self.lon_1d, self.lat_1d)
            self.lon_corners_2d, self.lat_corners_2d = np.meshgrid(
                self.lon_corners_1d, self.lat_corners_1d)

        # Decide on longitude range
        if max_lon is None and x_is_lon:
            # Choose range automatically
            if np.amin(self.lon_1d) < 180 and np.amax(self.lon_1d) > 180:
                # Domain crosses 180E, so use the range (0, 360)
                max_lon = 360
            else:
                # Use the range (-180, 180)
                max_lon = 180
            # Do one array to test
            self.lon_1d = fix_lon_range(self.lon_1d, max_lon=max_lon)
            # Make sure it's strictly increasing now
            if not np.all(np.diff(self.lon_1d) > 0):
                print 'Error (Grid): Longitude is not strictly increasing either in the range (0, 360) or (-180, 180).'
                sys.exit()
        if max_lon == 360:
            self.split = 0
        elif max_lon == 180:
            self.split = 180
        self.lon_1d = fix_lon_range(self.lon_1d, max_lon=max_lon)
        self.lon_corners_1d = fix_lon_range(self.lon_corners_1d,
                                            max_lon=max_lon)
        self.lon_2d = fix_lon_range(self.lon_2d, max_lon=max_lon)
        self.lon_corners_2d = fix_lon_range(self.lon_corners_2d,
                                            max_lon=max_lon)

        # Save dimensions
        self.nx = self.lon_1d.size
        self.ny = self.lat_1d.size
        self.nz = self.z.size

        # Calculate volume
        self.dV = xy_to_xyz(self.dA, [self.nx, self.ny, self.nz]) * z_to_xyz(
            self.dz, [self.nx, self.ny, self.nz]) * self.hfac

        # Calculate bathymetry and ice shelf draft
        self.bathy = bdry_from_hfac('bathy', self.hfac, self.z_edges)
        self.draft = bdry_from_hfac('draft', self.hfac, self.z_edges)

        # Create masks on the t, u, and v grids
        # Land masks
        self.land_mask = self.build_land_mask(self.hfac)
        self.land_mask_u = self.build_land_mask(self.hfac_w)
        self.land_mask_v = self.build_land_mask(self.hfac_s)
        # Ice shelf masks
        self.ice_mask = self.build_ice_mask(self.hfac)
        self.ice_mask_u = self.build_ice_mask(self.hfac_w)
        self.ice_mask_v = self.build_ice_mask(self.hfac_s)
Ejemplo n.º 15
0
def make_annual_averages (in_dir='./', out_dir='./'):

    in_dir = real_dir(in_dir)
    out_dir = real_dir(out_dir)

    # Find all the files of the form output_*.nc
    file_names = build_file_list(in_dir)
    num_files = len(file_names)
    # Make sure their names go from 1 to n where  n is the number of files
    if '001' not in file_names[0] or '{0:03d}'.format(num_files) not in file_names[-1]:
        print 'Error (make_annual_average): based on filenames, you seem to be missing some files.'
        sys.exit()

    # Get the starting date
    time0 = netcdf_time(file_names[0])[0]
    if time0.month != 1:
        print "Error (make_annual_average): this simulation doesn't start in January."
        sys.exit()
    year0 = time0.year

    # Save the number of months in each file
    num_months = []
    for file in file_names:
        id = nc.Dataset(file)
        num_months.append(id.variables['time'].size)
        id.close()

    # Now the work starts
    year = year0
    i = 0  # file number
    t = 0  # the next time index that needs to be dealt with
    files_to_average = [] # list of files containing timesteps from the current year
    t_start = None  # time index of files_to_average[0] to start the averaging from

    # New iteration of loop each time we process a chunk of time from a file.
    while True:

        if len(files_to_average)==0 and t+12 <= num_months[i]:
            # Option 1: Average a full year
            files_to_average.append(file_names[i])
            t_start = t
            t_end = t+12
            print 'Processing all of ' + str(year) + ' from ' + file_names[i] + ', indices ' + str(t_start) + ' to ' + str(t_end-1)
            average_monthly_files(files_to_average, out_dir+str(year)+'_avg.nc', t_start=t_start, t_end=t_end)
            files_to_average = []
            t_start = None
            t += 12
            year += 1

        elif len(files_to_average)==0 and t+12 > num_months[i]:
            # Option 2: Start a new year
            files_to_average.append(file_names[i])
            t_start = t
            print 'Processing beginning of ' + str(year) + ' from ' + file_names[i] + ', indices ' + str(t_start) + ' to ' + str(num_months[i]-1)
            tmp_months = num_months[i] - t_start
            print '(have processed ' + str(tmp_months) + ' months of ' + str(year) + ')'
            t = num_months[i]

        elif len(files_to_average)>0 and t+12-tmp_months > num_months[i]:
            # Option 3: Add onto an existing year, but can't complete it
            files_to_average.append(file_names[i])
            if t != 0:
                print 'Error (make_annual_averages): something weird happened with Option 3'
                sys.exit()
            print 'Processing middle of ' + str(year) + ' from ' + file_names[i] + ', indices ' + str(t) + ' to ' + str(num_months[i]-1)
            tmp_months += num_months[i] - t
            print '(have processed ' + str(tmp_months) + ' months of ' + str(year) + ')'
            t = num_months[i]

        elif len(files_to_average)>0 and t+12-tmp_months <= num_months[i]:
            # Option 4: Add onto an existing year and complete it
            files_to_average.append(file_names[i])
            if t != 0:
                print 'Error (make_annual_averages): something weird happened with Option 4'
                sys.exit()
            t_end = t+12-tmp_months
            print 'Processing end of ' + str(year) + ' from ' + file_names[i] + ', indices ' + str(t) + ' to ' + str(t_end-1)
            average_monthly_files(files_to_average, out_dir+str(year)+'_avg.nc', t_start=t_start, t_end=t_end)
            files_to_average = []
            t_start = None
            t += 12-tmp_months
            year += 1

        if t == num_months[i]:
            print 'Reached the end of ' + file_names[i]
            # Prepare for the next file
            i += 1
            t = 0
            if i == num_files:
                # No more files
                if len(files_to_average)>0:
                    print 'Warning: ' + str(year) + ' is incomplete. Ignoring it.'
                break
Ejemplo n.º 16
0
def plot_everything (output_dir='.', timeseries_file='timeseries.nc', grid_path='../grid/', fig_dir='.', file_path=None, monthly=True, date_string=None, time_index=-1, time_average=False, unravelled=False, key='WSS'):

    if time_average:
        time_index = None

    # Make sure proper directories
    output_dir = real_dir(output_dir)
    fig_dir = real_dir(fig_dir)
    
    # Build the list of output files in this directory (use them all for timeseries)
    output_files = build_file_list(output_dir, unravelled=unravelled)
    if file_path is None:
        # Select the last file for single-timestep analysis
        file_path = output_files[-1]        

    # Build the grid
    grid = Grid(grid_path)

    # Timeseries
    var_names = ['fris_mass_balance', 'eta_avg', 'seaice_area', 'fris_temp', 'fris_salt', 'fris_age']
    for var in var_names:
        read_plot_timeseries(var, output_dir+timeseries_file, precomputed=True, fig_name=fig_dir+'timeseries_'+var+'.png', monthly=monthly)

    # Lat-lon plots
    var_names = ['ismr', 'bwtemp', 'bwsalt', 'sst', 'sss', 'aice', 'hice', 'hsnow', 'mld', 'eta', 'saltflx', 'vel', 'velice', 'psi', 'bwage', 'iceprod']
    for var in var_names:
        # Customise bounds and zooming
        vmin = None
        vmax = None
        zoom_fris = False
        chunk = None
        fig_name = fig_dir + var + '.png'
        if var == 'bwtemp':
            if key == 'WSS':
                vmin = -2.5
                vmax = -1.5
            elif key == 'WSK':
                vmax = 1
        if var == 'bwsalt':
            vmin = 34.3
        if var == 'bwage':
            vmin = 0
            if key == 'WSS':
                vmax = 12
        if var == 'eta':
            vmin = -2.5
        if var == 'hice':
            vmax = 4
        if var == 'saltflx':
            vmin = -0.001
            vmax = 0.001
        if var == 'iceprod':
            vmin = 0
            vmax = 5
        if var == 'psi' and key=='WSS':
            vmin = -0.5
            vmax = 0.5
        if var in ['vel', 'velice'] and key=='WSS':
            chunk = 6
        if not zoom_fris and key=='WSK':
            figsize = (10,6)
        else:
            figsize = (8,6)
        # Plot
        read_plot_latlon(var, file_path, grid=grid, time_index=time_index, time_average=time_average, vmin=vmin, vmax=vmax, zoom_fris=zoom_fris, fig_name=fig_name, date_string=date_string, figsize=figsize, chunk=chunk)
        # Make additional plots if needed
        if key=='WSK' and var in ['ismr', 'vel', 'bwtemp', 'bwsalt', 'psi', 'bwage']:
            # Make another plot zoomed into FRIS
            figsize = (8,6)
            # First adjust bounds
            if var == 'bwtemp':
                vmax = -1.5
            if var == 'bwage':
                vmax = 10
            if var == 'psi':
                vmax = 0.5
            read_plot_latlon(var, file_path, grid=grid, time_index=time_index, time_average=time_average, vmin=vmin, vmax=vmax, zoom_fris=True, fig_name=fig_dir+var+'_zoom.png', date_string=date_string, figsize=figsize)
        if var == 'vel':
            # Call the other options for vertical transformations
            if key=='WSK':
                figsize = (10,6)
            for vel_option in ['sfc', 'bottom']:
                read_plot_latlon(var, file_path, grid=grid, time_index=time_index, time_average=time_average, vel_option=vel_option, vmin=vmin, vmax=vmax, zoom_fris=zoom_fris, fig_name=fig_dir+var+'_'+vel_option+'.png', date_string=date_string, figsize=figsize, chunk=chunk)
        if var in ['eta', 'hice']:
            # Make another plot with unbounded colour bar
            read_plot_latlon(var, file_path, grid=grid, time_index=time_index, time_average=time_average, zoom_fris=zoom_fris, fig_name=fig_dir + var + '_unbound.png', date_string=date_string, figsize=figsize)

    # Slice plots
    read_plot_ts_slice(file_path, grid=grid, lon0=-40, hmax=-75, zmin=-1450, time_index=time_index, time_average=time_average, fig_name=fig_dir+'ts_slice_filchner.png', date_string=date_string)
    read_plot_ts_slice(file_path, grid=grid, lon0=-55, hmax=-72, time_index=time_index, time_average=time_average, fig_name=fig_dir+'ts_slice_ronne.png', date_string=date_string)
    if key == 'WSK':
        read_plot_ts_slice(file_path, grid=grid, lon0=0, hmax=-71, time_index=time_index, time_average=time_average, fig_name=fig_dir+'ts_slice_eweddell.png', date_string=date_string)
Ejemplo n.º 17
0
def plot_everything_diff (output_dir='./', baseline_dir=None, timeseries_file='timeseries.nc', grid_path='../grid/', fig_dir='.', option='last_year', unravelled=False, monthly=True, key='WSS'):

    # Check that baseline_dir is set
    # It's a keyword argument on purpose so that the user can't mix up which simulation is which.
    if baseline_dir is None:
        print 'Error (plot_everything_diff): must set baseline_dir'
        sys.exit()

    # Make sure proper directories, and rename so 1=baseline, 2=current
    output_dir_1 = real_dir(baseline_dir)
    output_dir_2 = real_dir(output_dir)    
    fig_dir = real_dir(fig_dir)

    # Build lists of output files in each directory
    output_files_1 = build_file_list(output_dir_1, unravelled=unravelled)
    output_files_2 = build_file_list(output_dir_2, unravelled=unravelled)

    # Build the grid
    grid = Grid(grid_path)

    # Timeseries through the entire simulation
    var_names = ['fris_mass_balance', 'eta_avg', 'seaice_area', 'fris_temp', 'fris_salt', 'fris_age']
    for var in var_names:
        read_plot_timeseries_diff(var, output_dir_1+timeseries_file, output_dir_2+timeseries_file, precomputed=True, fig_name=fig_dir+'timeseries_'+var+'_diff.png', monthly=monthly)

    # Now figure out which time indices to use for plots with no time dependence
    file_path_1, file_path_2, time_index_1, time_index_2, t_start_1, t_start_2, t_end_1, t_end_2, time_average = select_common_time(output_files_1, output_files_2, option=option, monthly=monthly)
    # Set date string
    if option == 'last_year':
        date_string = 'year beginning ' + parse_date(file_path=file_path_1, time_index=t_start_1)
    elif option == 'last_month':
        date_string = parse_date(file_path=file_path_1, time_index=time_index_1)

    # Now make lat-lon plots
    var_names = ['ismr', 'bwtemp', 'bwsalt', 'sst', 'sss', 'aice', 'hice', 'hsnow', 'mld', 'eta', 'vel', 'velice', 'bwage', 'iceprod']
    if key == 'WSK':
        figsize = (10,6)
    else:
        figsize = (8,6)
    for var in var_names:
        if var == 'iceprod':
            vmin = -2
            vmax = 2            
        else:
            vmin = None
            vmax = None
        read_plot_latlon_diff(var, file_path_1, file_path_2, grid=grid, time_index=time_index_1, t_start=t_start_1, t_end=t_end_1, time_average=time_average, time_index_2=time_index_2, t_start_2=t_start_2, t_end_2=t_end_2, date_string=date_string, fig_name=fig_dir+var+'_diff.png', figsize=figsize, vmin=vmin, vmax=vmax)
        # Zoom into some variables
        if key=='WSK' and var in ['ismr', 'bwtemp', 'bwsalt', 'vel', 'bwage']:
            if var == 'bwage':
                vmin = -5
                vmax = None
            else:
                vmin = None
                vmax = None
            read_plot_latlon_diff(var, file_path_1, file_path_2, grid=grid, time_index=time_index_1, t_start=t_start_1, t_end=t_end_1, time_average=time_average, time_index_2=time_index_2, t_start_2=t_start_2, t_end_2=t_end_2, zoom_fris=True, date_string=date_string, fig_name=fig_dir+var+'_zoom_diff.png', vmin=vmin, vmax=vmax)
        if var == 'vel':
            # Call the other options for vertical transformations
            for vel_option in ['sfc', 'bottom']:
                read_plot_latlon_diff(var, file_path_1, file_path_2, grid=grid, time_index=time_index_1, t_start=t_start_1, t_end=t_end_1, time_average=time_average, time_index_2=time_index_2, t_start_2=t_start_2, t_end_2=t_end_2, vel_option=vel_option, date_string=date_string, fig_name=fig_dir+var+'_'+vel_option+'_diff.png')

    # Slice plots
    read_plot_ts_slice_diff(file_path_1, file_path_2, grid=grid, lon0=-40, hmax=-75, zmin=-1450, time_index=time_index_1, t_start=t_start_1, t_end=t_end_1, time_average=time_average, time_index_2=time_index_2, t_start_2=t_start_2, t_end_2=t_end_2, date_string=date_string, fig_name=fig_dir+'ts_slice_filchner_diff.png')
    read_plot_ts_slice_diff(file_path_1, file_path_2, grid=grid, lon0=-55, hmax=-72, time_index=time_index_1, t_start=t_start_1, t_end=t_end_1, time_average=time_average, time_index_2=time_index_2, t_start_2=t_start_2, t_end_2=t_end_2, date_string=date_string, fig_name=fig_dir+'ts_slice_ronne_diff.png')
    if key == 'WSK':
        read_plot_ts_slice_diff(file_path_1, file_path_2, grid=grid, lon0=-25, zmin=-2000, time_index=time_index_1, t_start=t_start_1, t_end=t_end_1, time_average=time_average, time_index_2=time_index_2, t_start_2=t_start_2, t_end_2=t_end_2, date_string=date_string, fig_name=fig_dir+'ts_slice_eweddell_diff.png')    
Ejemplo n.º 18
0
def interp_bedmap2 (lon, lat, topo_dir, nc_out, bed_file=None, grounded_iceberg=False, rtopo_file=None):

    import netCDF4 as nc
    from plot_latlon import plot_tmp_domain

    topo_dir = real_dir(topo_dir)

    # BEDMAP2 file names
    surface_file = topo_dir+'bedmap2_surface.flt'
    thickness_file = topo_dir+'bedmap2_thickness.flt'
    mask_file = topo_dir+'bedmap2_icemask_grounded_and_shelves.flt'
    if bed_file is None:
        bed_file = topo_dir+'bedmap2_bed.flt'
    # GEBCO file name
    gebco_file = topo_dir+'GEBCO_2014_2D.nc'
    if grounded_iceberg and (rtopo_file is None):
        # RTopo-2 file name (auxiliary file including masks)        
        rtopo_file = topo_dir+'RTopo-2.0.1_30sec_aux.nc'

    if np.amin(lat) > -60:
        print "Error (interp_bedmap2): this domain doesn't go south of 60S, so it's not covered by BEDMAP2."
        sys.exit()
    if np.amax(lat) > -60:
        use_gebco = True
        # Find the first index north of 60S
        j_split = np.nonzero(lat >= -60)[0][0]
        # Split grid into a BEDMAP2 section and a GEBCO section (remembering lat is edges, not centres, so lat[j_split-1] is in both sections)
        lat_b = lat[:j_split]
        lat_g = lat[j_split-1:]
    else:
        use_gebco = False
        lat_b = lat

    # Set up BEDMAP grid (polar stereographic)
    x = np.arange(-bedmap_bdry, bedmap_bdry+bedmap_res, bedmap_res)
    y = np.arange(-bedmap_bdry, bedmap_bdry+bedmap_res, bedmap_res)

    print 'Reading data'
    # Have to flip it vertically so lon0=0 in polar stereographic projection
    # Otherwise, lon0=180 which makes x_interp and y_interp strictly decreasing when we call polar_stereo later, and the interpolation chokes
    bathy = np.flipud(np.fromfile(bed_file, dtype='<f4').reshape([bedmap_dim, bedmap_dim]))
    surf = np.flipud(np.fromfile(surface_file, dtype='<f4').reshape([bedmap_dim, bedmap_dim]))
    thick = np.flipud(np.fromfile(thickness_file, dtype='<f4').reshape([bedmap_dim, bedmap_dim]))
    mask = np.flipud(np.fromfile(mask_file, dtype='<f4').reshape([bedmap_dim, bedmap_dim]))

    if np.amax(lat_b) > -61:
        print 'Extending bathymetry past 60S'
        # Bathymetry has missing values north of 60S. Extend into that mask so there are no artifacts in the splines near 60S.
        bathy = extend_into_mask(bathy, missing_val=bedmap_missing_val, num_iters=5)

    print 'Calculating ice shelf draft'
    # Calculate ice shelf draft from ice surface and ice thickness
    draft = surf - thick

    print 'Calculating ocean and ice masks'
    # Mask: -9999 is open ocean, 0 is grounded ice, 1 is ice shelf
    # Make an ocean mask and an ice mask. Ice shelves are in both.
    omask = (mask!=0).astype(float)
    imask = (mask!=-9999).astype(float)

    # Convert lon and lat to polar stereographic coordinates
    lon_2d, lat_2d = np.meshgrid(lon, lat_b)
    x_interp, y_interp = polar_stereo(lon_2d, lat_2d)

    # Interpolate fields
    print 'Interpolating bathymetry'
    bathy_interp = interp_topo(x, y, bathy, x_interp, y_interp)
    print 'Interpolating ice shelf draft'
    draft_interp = interp_topo(x, y, draft, x_interp, y_interp)
    print 'Interpolating ocean mask'
    omask_interp = interp_topo(x, y, omask, x_interp, y_interp)
    print 'Interpolating ice mask'
    imask_interp = interp_topo(x, y, imask, x_interp, y_interp)

    if use_gebco:
        print 'Filling in section north of 60S with GEBCO data'

        print 'Reading data'
        id = nc.Dataset(gebco_file, 'r')
        lat_gebco_grid = id.variables['lat'][:]
        lon_gebco_grid = id.variables['lon'][:]
        # Figure out which indices we actually care about - buffer zone of 5 cells so the splines have room to breathe
        j_start = max(np.nonzero(lat_gebco_grid >= lat_g[0])[0][0] - 1 - 5, 0)
        j_end = min(np.nonzero(lat_gebco_grid >= lat_g[-1])[0][0] + 5, lat_gebco_grid.size-1)
        i_start = max(np.nonzero(lon_gebco_grid >= lon[0])[0][0] - 1 - 5, 0)
        i_end = min(np.nonzero(lon_gebco_grid >= lon[-1])[0][0] + 5, lon_gebco_grid.size-1)
        # Read GEBCO bathymetry just from this section
        bathy_gebco = id.variables['elevation'][j_start:j_end, i_start:i_end]
        id.close()
        # Trim the grid too
        lat_gebco_grid = lat_gebco_grid[j_start:j_end]
        lon_gebco_grid = lon_gebco_grid[i_start:i_end]

        print 'Interpolating bathymetry'
        lon_2d, lat_2d = np.meshgrid(lon, lat_g)
        bathy_gebco_interp = interp_topo(lon_gebco_grid, lat_gebco_grid, bathy_gebco, lon_2d, lat_2d)

        print 'Combining BEDMAP2 and GEBCO sections'
        # Deep copy the BEDMAP2 section of each field
        bathy_bedmap_interp = np.copy(bathy_interp)
        draft_bedmap_interp = np.copy(draft_interp)
        omask_bedmap_interp = np.copy(omask_interp)
        imask_bedmap_interp = np.copy(imask_interp)
        # Now combine them (remember we interpolated to the centres of grid cells, but lat and lon arrays define the edges, so minus 1 in each dimension)
        bathy_interp = np.empty([lat.size-1, lon.size-1])
        bathy_interp[:j_split-1,:] = bathy_bedmap_interp
        bathy_interp[j_split-1:,:] = bathy_gebco_interp
        # Ice shelf draft will be 0 in GEBCO region
        draft_interp = np.zeros([lat.size-1, lon.size-1])
        draft_interp[:j_split-1,:] = draft_bedmap_interp
        # Set ocean mask to 1 in GEBCO region; any land points will be updated later based on bathymetry > 0
        omask_interp = np.ones([lat.size-1, lon.size-1])
        omask_interp[:j_split-1,:] = omask_bedmap_interp
        # Ice mask will be 0 in GEBCO region
        imask_interp = np.zeros([lat.size-1, lon.size-1])
        imask_interp[:j_split-1,:] = imask_bedmap_interp

    print 'Processing masks'
    # Deal with values interpolated between 0 and 1
    omask_interp[omask_interp < 0.5] = 0
    omask_interp[omask_interp >= 0.5] = 1
    imask_interp[imask_interp < 0.5] = 0
    imask_interp[imask_interp >= 0.5] = 1
    # Zero out bathymetry and ice shelf draft on land    
    bathy_interp[omask_interp==0] = 0
    draft_interp[omask_interp==0] = 0
    # Zero out ice shelf draft in the open ocean
    draft_interp[imask_interp==0] = 0
    
    # Update masks due to interpolation changing their boundaries
    # Anything with positive bathymetry should be land
    index = bathy_interp > 0
    omask_interp[index] = 0
    bathy_interp[index] = 0
    draft_interp[index] = 0
    # Anything with negative or zero water column thickness should be land
    index = draft_interp - bathy_interp <= 0
    omask_interp[index] = 0
    bathy_interp[index] = 0
    draft_interp[index] = 0
    # Anything with positive ice shelf draft should be land
    index = draft_interp > 0
    omask_interp[index] = 0
    bathy_interp[index] = 0
    draft_interp[index] = 0
    # Any points with zero ice shelf draft should not be in the ice mask
    # (This will also remove grounded ice, and ice shelves with total thickness (draft + freeboard) thinner than firn_air)
    index = draft_interp == 0
    imask_interp[index] = 0

    print 'Removing isolated ocean cells'
    omask_interp = remove_isolated_cells(omask_interp)
    bathy_interp[omask_interp==0] = 0
    draft_interp[omask_interp==0] = 0
    imask_interp[omask_interp==0] = 0
    print 'Removing isolated ice shelf cells'
    imask_interp = remove_isolated_cells(imask_interp)
    draft_interp[imask_interp==0] = 0
    
    if grounded_iceberg:
        bathy_interp, omask_interp = add_grounded_iceberg(rtopo_file, lon, lat, bathy_interp, omask_interp)
        
    print 'Plotting'
    if use_gebco:
        # Remesh the grid, using the full latitude array
        lon_2d, lat_2d = np.meshgrid(lon, lat)
    plot_tmp_domain(lon_2d, lat_2d, bathy_interp, title='Bathymetry (m)')
    plot_tmp_domain(lon_2d, lat_2d, draft_interp, title='Ice shelf draft (m)')
    plot_tmp_domain(lon_2d, lat_2d, draft_interp - bathy_interp, title='Water column thickness (m)')
    plot_tmp_domain(lon_2d, lat_2d, omask_interp, title='Ocean mask')
    plot_tmp_domain(lon_2d, lat_2d, imask_interp, title='Ice mask')

    # Write to NetCDF file (at cell centres not edges!)
    ncfile = NCfile_basiclatlon(nc_out, 0.5*(lon[1:] + lon[:-1]), 0.5*(lat[1:] + lat[:-1]))
    ncfile.add_variable('bathy', bathy_interp, units='m')
    ncfile.add_variable('draft', draft_interp, units='m')
    ncfile.add_variable('omask', omask_interp)
    ncfile.add_variable('imask', imask_interp)
    ncfile.close()

    print 'The results have been written into ' + nc_out
    print 'Take a look at this file and make whatever edits you would like to the mask (eg removing everything west of the peninsula; you can use edit_mask if you like)'
    print "Then set your vertical layer thicknesses in a plain-text file, one value per line (make sure they clear the deepest bathymetry of " + str(abs(np.amin(bathy_interp))) + " m), and run remove_grid_problems"
Ejemplo n.º 19
0
    def __init__(self, path, model_grid=None, split=0):

        self.split = split

        if path.endswith('.nc'):
            use_netcdf = True
        elif os.path.isdir(path):
            use_netcdf = False
            path = real_dir(path)
            from MITgcmutils import rdmds
        else:
            print 'Error (SOSEGrid): ' + path + ' is neither a NetCDF file nor a directory'
            sys.exit()

        self.trim_extend = True
        if model_grid is None:
            self.trim_extend = False

        if self.trim_extend:
            # Error checking for which longitude range we're in
            if split == 180:
                max_lon = 180
                if np.amax(model_grid.lon_2d) > max_lon:
                    print 'Error (SOSEGrid): split=180 does not match model grid'
                    sys.exit()
            elif split == 0:
                max_lon = 360
                if np.amin(model_grid.lon_2d) < 0:
                    print 'Error (SOSEGrid): split=0 does not match model grid'
                    sys.exit()
            else:
                print 'Error (SOSEGrid): split must be 180 or 0'
                sys.exit()
        else:
            max_lon = 360

        # Read variables
        if use_netcdf:
            # Make the 2D grid 1D so it's regular
            self.lon_1d = read_netcdf(path, 'XC')[0, :]
            self.lon_corners_1d = read_netcdf(path, 'XG')[0, :]
            self.lat_1d = read_netcdf(path, 'YC')[:, 0]
            self.lat_corners_1d = read_netcdf(path, 'YG')[:, 0]
            self.z = read_netcdf(path, 'Z')
            self.z_edges = read_netcdf(path, 'RF')
        else:
            self.lon_1d = rdmds(path + 'XC')[0, :]
            self.lon_corners_1d = rdmds(path + 'XG')[0, :]
            self.lat_1d = rdmds(path + 'YC')[:, 0]
            self.lat_corners_1d = rdmds(path + 'YG')[:, 0]
            self.z = rdmds(path + 'RC').squeeze()
            self.z_edges = rdmds(path + 'RF').squeeze()

        # Fix longitude range
        self.lon_1d = fix_lon_range(self.lon_1d, max_lon=max_lon)
        self.lon_corners_1d = fix_lon_range(self.lon_corners_1d,
                                            max_lon=max_lon)
        if split == 180:
            # Split the domain at 180E=180W and rearrange the two halves so longitude is strictly ascending
            self.i_split = np.nonzero(self.lon_1d < 0)[0][0]
        else:
            # Set i_split to 0 which won't actually do anything
            self.i_split = 0
        self.lon_1d = split_longitude(self.lon_1d, self.i_split)
        self.lon_corners_1d = split_longitude(self.lon_corners_1d,
                                              self.i_split)
        if self.lon_corners_1d[0] > 0:
            # The split happened between lon_corners[i_split] and lon[i_split].
            # Take mod 360 on this index of lon_corners to make sure it's strictly increasing.
            self.lon_corners_1d[0] -= 360
        # Make sure the longitude axes are strictly increasing after the splitting
        if not np.all(np.diff(self.lon_1d) > 0) or not np.all(
                np.diff(self.lon_corners_1d) > 0):
            print 'Error (SOSEGrid): longitude is not strictly increasing'
            sys.exit()

        # Save original dimensions
        sose_nx = self.lon_1d.size
        sose_ny = self.lat_1d.size
        sose_nz = self.z.size

        if self.trim_extend:

            # Trim and/or extend the axes
            # Notes about this:
            # Longitude can only be trimmed as SOSE considers all longitudes (someone doing a high-resolution circumpolar model with points in the gap might need to write a patch to wrap the SOSE grid around)
            # Latitude can be trimmed in both directions, or extended to the south (not extended to the north - if you need to do this, SOSE is not the right product for you!)
            # Depth can be extended by one level in both directions, and the deeper bound can also be trimmed
            # The indices i, j, and k will be kept track of with 4 variables each. For example, with longitude:
            # i0_before = first index we care about
            #           = how many cells to trim at beginning
            # i0_after = i0_before's position in the new grid
            #          = how many cells to extend at beginning
            # i1_before = first index we don't care about
            #           sose_nx - i1_before = how many cells to trim at end
            # i1_after = i1_before's position in the new grid
            #          = i1_before - i0_before + i0_after
            # nx = length of new grid
            #      nx - i1_after = how many cells to extend at end

            # Find bounds on model grid
            xmin = np.amin(model_grid.lon_corners_2d)
            xmax = np.amax(model_grid.lon_2d)
            ymin = np.amin(model_grid.lat_corners_2d)
            ymax = np.amax(model_grid.lat_2d)
            z_shallow = model_grid.z[0]
            z_deep = model_grid.z[-1]

            # Western bound (use longitude at cell centres to make sure all grid types clear the bound)
            if xmin == self.lon_1d[0]:
                # Nothing to do
                self.i0_before = 0
            elif xmin > self.lon_1d[0]:
                # Trim
                self.i0_before = np.nonzero(self.lon_1d > xmin)[0][0] - 1
            else:
                print 'Error (SOSEGrid): not allowed to extend westward'
                sys.exit()
            self.i0_after = 0

            # Eastern bound (use longitude at cell corners, i.e. western edge)
            if xmax == self.lon_corners_1d[-1]:
                # Nothing to do
                self.i1_before = sose_nx
            elif xmax < self.lon_corners_1d[-1]:
                # Trim
                self.i1_before = np.nonzero(
                    self.lon_corners_1d > xmax)[0][0] + 1
            else:
                print 'Error (SOSEGrid): not allowed to extend eastward'
                sys.exit()
            self.i1_after = self.i1_before - self.i0_before + self.i0_after
            self.nx = self.i1_after

            # Southern bound (use latitude at cell centres)
            if ymin == self.lat_1d[0]:
                # Nothing to do
                self.j0_before = 0
                self.j0_after = 0
            elif ymin > self.lat_1d[0]:
                # Trim
                self.j0_before = np.nonzero(self.lat_1d > ymin)[0][0] - 1
                self.j0_after = 0
            elif ymin < self.lat_1d[0]:
                # Extend
                self.j0_after = int(np.ceil(
                    (self.lat_1d[0] - ymin) / sose_res))
                self.j0_before = 0

            # Northern bound (use latitude at cell corners, i.e. southern edge)
            if ymax == self.lat_corners_1d[-1]:
                # Nothing to do
                self.j1_before = sose_ny
            elif ymax < self.lat_corners_1d[-1]:
                # Trim
                self.j1_before = np.nonzero(
                    self.lat_corners_1d > ymax)[0][0] + 1
            else:
                print 'Error (SOSEGrid): not allowed to extend northward'
                sys.exit()
            self.j1_after = self.j1_before - self.j0_before + self.j0_after
            self.ny = self.j1_after

            # Depth
            self.k0_before = 0
            if z_shallow <= self.z[0]:
                # Nothing to do
                self.k0_after = 0
            else:
                # Extend
                self.k0_after = 1
            if z_deep > self.z[-1]:
                # Trim
                self.k1_before = np.nonzero(self.z < z_deep)[0][0] + 1
            else:
                # Either extend or do nothing
                self.k1_before = sose_nz
            self.k1_after = self.k1_before + self.k0_after
            if z_deep < self.z[-1]:
                # Extend
                self.nz = self.k1_after + 1
            else:
                self.nz = self.k1_after

            # Now we have the indices we need, so trim/extend the axes as needed
            # Longitude: can only trim
            self.lon_1d = self.lon_1d[self.i0_before:self.i1_before]
            self.lon_corners_1d = self.lon_corners_1d[self.i0_before:self.
                                                      i1_before]
            # Latitude: can extend on south side, trim on both sides
            lat_extend = np.flipud(-1 *
                                   (np.arange(self.j0_after) + 1) * sose_res +
                                   self.lat_1d[self.j0_before])
            lat_trim = self.lat_1d[self.j0_before:self.j1_before]
            self.lat_1d = np.concatenate((lat_extend, lat_trim))
            lat_corners_extend = np.flipud(-1 *
                                           (np.arange(self.j0_after) + 1) *
                                           sose_res +
                                           self.lat_corners_1d[self.j0_before])
            lat_corners_trim = self.lat_corners_1d[self.j0_before:self.
                                                   j1_before]
            self.lat_corners_1d = np.concatenate(
                (lat_corners_extend, lat_corners_trim))
            # Depth: can extend on both sides (depth 0 at top and extrapolated at bottom to clear the deepest model depth), trim on deep side
            z_above = 0 * np.ones([self.k0_after
                                   ])  # Will either be [0] or empty
            z_middle = self.z[self.k0_before:self.k1_before]
            z_edges_middle = self.z_edges[self.k0_before:self.k1_before + 1]
            z_below = (2 * model_grid.z[-1] - model_grid.z[-2]) * np.ones([
                self.nz - self.k1_after
            ])  # Will either be [something deeper than z_deep] or empty
            self.z = np.concatenate((z_above, z_middle, z_below))
            self.z_edges = np.concatenate((z_above, z_edges_middle, z_below))

            # Make sure we cleared those bounds
            if self.lon_corners_1d[0] > xmin:
                print 'Error (SOSEGrid): western bound not cleared'
                sys.exit()
            if self.lon_corners_1d[-1] < xmax:
                print 'Error (SOSEGrid): eastern bound not cleared'
                sys.exit()
            if self.lat_corners_1d[0] > ymin:
                print 'Error (SOSEGrid): southern bound not cleared'
                sys.exit()
            if self.lat_corners_1d[-1] < ymax:
                print 'Error (SOSEGrid): northern bound not cleared'
                sys.exit()
            if self.z[0] < z_shallow:
                print 'Error (SOSEGrid): shallow bound not cleared'
                sys.exit()
            if self.z[-1] > z_deep:
                print 'Error (SOSEGrid): deep bound not cleared'
                sys.exit()

        else:

            # Nothing fancy to do
            self.nx = sose_nx
            self.ny = sose_ny
            self.nz = sose_nz

        # Now read the rest of the variables we need, splitting/trimming/extending them if needed
        if use_netcdf:
            self.hfac = self.read_field(path,
                                        'xyz',
                                        var_name='hFacC',
                                        fill_value=0)
            self.hfac_w = self.read_field(path,
                                          'xyz',
                                          var_name='hFacW',
                                          fill_value=0)
            self.hfac_s = self.read_field(path,
                                          'xyz',
                                          var_name='hFacS',
                                          fill_value=0)
            self.dA = self.read_field(path, 'xy', var_name='rA', fill_value=0)
            self.dz = self.read_field(path, 'z', var_name='DRF', fill_value=0)
        else:
            self.hfac = self.read_field(path + 'hFacC', 'xyz', fill_value=0)
            self.hfac_w = self.read_field(path + 'hFacW', 'xyz', fill_value=0)
            self.hfac_s = self.read_field(path + 'hFacS', 'xyz', fill_value=0)
            self.dA = self.read_field(path + 'RAC', 'xy', fill_value=0)
            self.dz = self.read_field(path + 'DRF', 'z', fill_value=0)
        # Calculate volume
        self.dV = xy_to_xyz(self.dA, [self.nx, self.ny, self.nz]) * z_to_xyz(
            self.dz, [self.nx, self.ny, self.nz]) * self.hfac

        # Mesh lat and lon
        self.lon_2d, self.lat_2d = np.meshgrid(self.lon_1d, self.lat_1d)
        self.lon_corners_2d, self.lat_corners_2d = np.meshgrid(
            self.lon_corners_1d, self.lat_corners_1d)

        # Calculate bathymetry
        self.bathy = bdry_from_hfac('bathy', self.hfac, self.z_edges)

        # Create land masks
        self.land_mask = self.build_land_mask(self.hfac)
        self.land_mask_u = self.build_land_mask(self.hfac_w)
        self.land_mask_v = self.build_land_mask(self.hfac_s)
        # Dummy ice mask with all False
        self.ice_mask = np.zeros(self.land_mask.shape).astype(bool)
Ejemplo n.º 20
0
def make_obcs (location, grid_path, input_path, output_dir, source='SOSE', use_seaice=True, nc_out=None, prec=32, split=180):

    from grid import SOSEGrid
    from file_io import NCfile, read_netcdf
    from interpolation import interp_bdry

    if source == 'SOSE':
        input_path = real_dir(input_path)
    output_dir = real_dir(output_dir)

    # Fields to interpolate
    # Important: SIarea has to be before SIuice and SIvice so it can be used for masking
    fields = ['THETA', 'SALT', 'UVEL', 'VVEL', 'SIarea', 'SIheff', 'SIuice', 'SIvice', 'ETAN']  
    # Flag for 2D or 3D
    dim = [3, 3, 3, 3, 2, 2, 2, 2, 2]
    # Flag for grid type
    gtype = ['t', 't', 'u', 'v', 't', 't', 'u', 'v', 't']
    if source == 'MIT':
        # Also consider snow thickness
        fields += ['SIhsnow']
        dim += [2]
        gtype += ['t']
    # End of filenames for input
    infile_tail = '_climatology.data'
    # End of filenames for output
    outfile_tail = '_'+source+'.OBCS_'+location

    print 'Building MITgcm grid'
    if source == 'SOSE':
        model_grid = grid_check_split(grid_path, split)
    elif source == 'MIT':
        model_grid = Grid(grid_path)
    # Figure out what the latitude or longitude is on the boundary, both on the centres and outside edges of those cells
    if location == 'S':
        lat0 = model_grid.lat_1d[0]
        lat0_e = model_grid.lat_corners_1d[0]
        print 'Southern boundary at ' + str(lat0) + ' (cell centre), ' + str(lat0_e) + ' (cell edge)'
    elif location == 'N':
        lat0 = model_grid.lat_1d[-1]
        lat0_e = 2*model_grid.lat_corners_1d[-1] - model_grid.lat_corners_1d[-2]
        print 'Northern boundary at ' + str(lat0) + ' (cell centre), ' + str(lat0_e) + ' (cell edge)'
    elif location == 'W':
        lon0 = model_grid.lon_1d[0]
        lon0_e = model_grid.lon_corners_1d[0]
        print 'Western boundary at ' + str(lon0) + ' (cell centre), ' + str(lon0_e) + ' (cell edge)'
    elif location == 'E':
        lon0 = model_grid.lon_1d[-1]
        lon0_e = 2*model_grid.lon_corners_1d[-1] - model_grid.lon_corners_1d[-2]
        print 'Eastern boundary at ' + str(lon0) + ' (cell centre), ' + str(lon0_e) + ' (cell edge)'
    else:
        print 'Error (make_obcs): invalid location ' + str(location)
        sys.exit()

    if source == 'SOSE':
        print 'Building SOSE grid'
        source_grid = SOSEGrid(input_path+'grid/', model_grid=model_grid, split=split)
    elif source == 'MIT':
        print 'Building grid from source model'
        source_grid = Grid(input_path)
    else:
        print 'Error (make_obcs): invalid source ' + source
        sys.exit()
    # Calculate interpolation indices and coefficients to the boundary latitude or longitude
    if location in ['N', 'S']:
        # Cell centre
        j1, j2, c1, c2 = interp_slice_helper(source_grid.lat_1d, lat0)
        # Cell edge
        j1_e, j2_e, c1_e, c2_e = interp_slice_helper(source_grid.lat_corners_1d, lat0_e)
    else:
        # Pass lon=True to consider the possibility of boundary near 0E
        i1, i2, c1, c2 = interp_slice_helper(source_grid.lon_1d, lon0, lon=True)
        i1_e, i2_e, c1_e, c2_e = interp_slice_helper(source_grid.lon_corners_1d, lon0_e, lon=True)

    # Set up a NetCDF file so the user can check the results
    if nc_out is not None:
        ncfile = NCfile(nc_out, model_grid, 'xyzt')
        ncfile.add_time(np.arange(12)+1, units='months')  

    # Process fields
    for n in range(len(fields)):
        if fields[n].startswith('SI') and not use_seaice:
            continue

        print 'Processing ' + fields[n]
        if source == 'SOSE':
            in_file = input_path + fields[n] + infile_tail
        out_file = output_dir + fields[n] + outfile_tail
        # Read the monthly climatology at all points
        if source == 'SOSE':
            if dim[n] == 3:
                source_data = source_grid.read_field(in_file, 'xyzt')
            else:
                source_data = source_grid.read_field(in_file, 'xyt')
        else:
            source_data = read_netcdf(input_path, fields[n])

        if fields[n] == 'SIarea' and source == 'SOSE':
            # We'll need this field later for SIuice and SIvice, as SOSE didn't mask those variables properly
            print 'Interpolating sea ice area to u and v grids for masking of sea ice velocity'
            source_aice_u = interp_grid(source_data, source_grid, 't', 'u', time_dependent=True, mask_with_zeros=True, periodic=True)
            source_aice_v = interp_grid(source_data, source_grid, 't', 'v', time_dependent=True, mask_with_zeros=True, periodic=True)
        # Set sea ice velocity to zero wherever sea ice area is zero
        if fields[n] in ['SIuice', 'SIvice'] and source == 'SOSE':
            print 'Masking sea ice velocity with sea ice area'
            if fields[n] == 'SIuice':
                index = source_aice_u==0
            else:
                index = source_aice_v==0
            source_data[index] = 0            

        # Choose the correct grid for lat, lon, hfac
        source_lon, source_lat = source_grid.get_lon_lat(gtype=gtype[n], dim=1)
        source_hfac = source_grid.get_hfac(gtype=gtype[n])
        model_lon, model_lat = model_grid.get_lon_lat(gtype=gtype[n], dim=1)
        model_hfac = model_grid.get_hfac(gtype=gtype[n])
        # Interpolate to the correct grid and choose the correct horizontal axis
        if location in ['N', 'S']:
            if gtype[n] == 'v':
                source_data = c1_e*source_data[...,j1_e,:] + c2_e*source_data[...,j2_e,:]
                # Multiply hfac by the ceiling of hfac on each side, to make sure we're not averaging over land
                source_hfac = (c1_e*source_hfac[...,j1_e,:] + c2_e*source_hfac[...,j2_e,:])*np.ceil(source_hfac[...,j1_e,:])*np.ceil(source_hfac[...,j2_e,:])
            else:
                source_data = c1*source_data[...,j1,:] + c2*source_data[...,j2,:]
                source_hfac = (c1*source_hfac[...,j1,:] + c2*source_hfac[...,j2,:])*np.ceil(source_hfac[...,j1,:])*np.ceil(source_hfac[...,j2,:])
            source_haxis = source_lon
            model_haxis = model_lon
            if location == 'S':
                model_hfac = model_hfac[:,0,:]
            else:
                model_hfac = model_hfac[:,-1,:]
        else:
            if gtype[n] == 'u':
                source_data = c1_e*source_data[...,i1_e] + c2_e*source_data[...,i2_e]
                source_hfac = (c1_e*source_hfac[...,i1_e] + c2_e*source_hfac[...,i2_e])*np.ceil(source_hfac[...,i1_e])*np.ceil(source_hfac[...,i2_e])
            else:
                source_data = c1*source_data[...,i1] + c2*source_data[...,i2]
                source_hfac = (c1*source_hfac[...,i1] + c2*source_hfac[...,i2])*np.ceil(source_hfac[...,i1])*np.ceil(source_hfac[...,i2])
            source_haxis = source_lat
            model_haxis = model_lat
            if location == 'W':
                model_hfac = model_hfac[...,0]
            else:
                model_hfac = model_hfac[...,-1]
        if source == 'MIT' and model_haxis[0] < source_haxis[0]:
            # Need to extend source data to the west or south. Just add one row.
            source_haxis = np.concatenate(([model_haxis[0]-0.1], source_haxis))
            source_data = np.concatenate((np.expand_dims(source_data[:,...,0], -1), source_data), axis=-1)
            source_hfac = np.concatenate((np.expand_dims(source_hfac[:,0], 1), source_hfac), axis=1)
        # For 2D variables, just need surface hfac
        if dim[n] == 2:
            source_hfac = source_hfac[0,:]
            model_hfac = model_hfac[0,:]

        # Now interpolate each month to the model grid
        if dim[n] == 3:
            data_interp = np.zeros([12, model_grid.nz, model_haxis.size])
        else:
            data_interp = np.zeros([12, model_haxis.size])
        for month in range(12):
            print '...interpolating month ' + str(month+1)
            data_interp_tmp = interp_bdry(source_haxis, source_grid.z, source_data[month,:], source_hfac, model_haxis, model_grid.z, model_hfac, depth_dependent=(dim[n]==3))
            if fields[n] not in ['THETA', 'SALT']:
                # Zero in land mask is more physical than extrapolated data
                index = model_hfac==0
                data_interp_tmp[index] = 0
            data_interp[month,:] = data_interp_tmp

        write_binary(data_interp, out_file, prec=prec)
        
        if nc_out is not None:
            print '...adding to ' + nc_out
            # Construct the dimension code
            if location in ['S', 'N']:
                dimension = 'x'
            else:
                dimension = 'y'
            if dim[n] == 3:
                dimension += 'z'
            dimension += 't'
            ncfile.add_variable(fields[n] + '_' + location, data_interp, dimension)

    if nc_out is not None:
        ncfile.close()
Ejemplo n.º 21
0
def process_era5 (in_dir, out_dir, year, six_hourly=True, first_year=False, last_year=False, prec=32):

    in_dir = real_dir(in_dir)
    out_dir = real_dir(out_dir)

    if year == 1979 and not first_year:
        print 'Warning (process_era): last we checked, 1979 was the first year of ERA5. Unless this has changed, you need to set first_year=True.'
    if year == 2018 and not last_year:
        print 'Warning (process_era): last we checked, 2018 was the last year of ERA5. Unless this has changed, you need to set last_year=True.'

    # Construct file paths for input and output files
    in_head = in_dir + 'era5_'
    var_in = ['msl', 't2m', 'd2m', 'u10', 'v10', 'tp', 'ssrd', 'strd', 'e']
    if six_hourly:
        accum_flag = '_2'
    in_tail = '_' + str(year) + '.nc'
    out_head = out_dir + 'ERA5_'
    var_out = ['apressure', 'atemp', 'aqh', 'uwind', 'vwind', 'precip', 'swdown', 'lwdown', 'evap']
    out_tail = '_' + str(year)

    # Northermost latitude to keep
    lat0 = -30
    # Length of ERA5 time interval in seconds
    dt = 3600.

    # Read the grid from the first file
    first_file = in_head + var_in[0] + in_tail
    lon = read_netcdf(first_file, 'longitude')
    lat = read_netcdf(first_file, 'latitude')
    # Find the index of the last latitude we don't care about (remember that latitude goes from north to south in ERA files!)
    j_bound = np.nonzero(lat < lat0)[0][0] - 2
    # Trim and flip latitude
    lat = lat[:j_bound:-1]
    # Also read the first time index for the starting date
    start_date = netcdf_time(first_file, monthly=False)[0]

    if first_year:
        # Print grid information to the reader
        print '\n'
        print 'For var in ' + str(var_out) + ', make these changes in input/data.exf:\n'
        print 'varstartdate1 = ' + start_date.strftime('%Y%m%d')
        if six_hourly:
            print 'varperiod = ' + str(6*dt)
        else:
            print 'varperiod = ' + str(dt)
        print 'varfile = ' + 'ERA5_var'
        print 'var_lon0 = ' + str(lon[0])
        print 'var_lon_inc = ' + str(lon[1]-lon[0])
        print 'var_lat0 = ' + str(lat[0])
        print 'var_lat_inc = ' + str(lat.size-1) + '*' + str(lat[1]-lat[0])
        print 'var_nlon = ' + str(lon.size)
        print 'var_nlat = ' + str(lat.size)
        print '\n'

    # Loop over variables
    for i in range(len(var_in)):
        
        in_file = in_head + var_in[i] + in_tail
        print 'Reading ' + in_file
        data = read_netcdf(in_file, var_in[i])
        
        print 'Processing'
        # Trim and flip over latitude
        data = data[:,:j_bound:-1,:]
        
        if var_in[i] == 'msl':
            # Save pressure for later conversions
            press = np.copy(data)

        elif var_in[i] == 't2m':
            # Convert from Kelvin to Celsius
            data -= temp_C2K

        elif var_in[i] == 'd2m':
            # Calculate specific humidity from dew point temperature and pressure
            # Start with vapour pressure
            e = es0*np.exp(Lv/Rv*(1/temp_C2K - 1/data))
            data = sh_coeff*e/(press - (1-sh_coeff)*e)
            
        elif var_in[i] in ['tp', 'ssrd', 'strd', 'e']:
            # Accumulated variables
            # This is more complicated
            
            if six_hourly:
                # Need to read data from the following hour to interpolate to this hour. This was downloaded into separate files.
                in_file_2 = in_head + var_in[i] + accum_flag + in_tail
                print 'Reading ' + in_file_2
                data_2 = read_netcdf(in_file_2, var_in[i])
                data_2 = data_2[:,:j_bound:-1,:]
            # not six_hourly will be dealt with after the first_year check
            
            if first_year:
                # The first 7 hours of the accumulated variables are missing during the first year of ERA5. Fill this missing period with data from the next available time indices.
                if six_hourly:
                    # The first file is missing two indices (hours 0 and 6)
                    data = np.concatenate((data[:2,:], data), axis=0)
                    # The second file is missing one index (hour 1)
                    data_2 = np.concatenate((data_2[:1,:], data_2), axis=0)
                else:
                    # The first file is missing 7 indices (hours 0 to 6)
                    data = np.concatenate((data[:7,:], data), axis=0)
                    
            if not six_hourly:
                # Now get data from the following hour. Just shift one timestep ahead.
                # First need data from the first hour of next year
                if last_year:
                    # There is no such data; just copy the last hour of this year
                    data_next = data[-1,:]
                else:
                    in_file_2 = in_head + var_in[i] + '_' + str(year+1) + '.nc'
                    data_next = read_netcdf(in_file_2, var_in[i], time_index=0)
                    data_next = data_next[:j_bound:-1,:]  
                data_2 = np.concatenate((data[1:,:], np.expand_dims(data_next,0)), axis=0)
                
            # Now we can interpolate to the given hour: just the mean of either side
            data = 0.5*(data + data_2)
            # Convert from integrals to time-averages
            data /= dt
            if var_in[i] in ['ssrd', 'strd', 'e']:
                # Swap sign on fluxes
                data *= -1

        out_file = out_head + var_out[i] + out_tail
        write_binary(data, out_file, prec=prec)
Ejemplo n.º 22
0
def animate_latlon_coupled (var, output_dir='./', file_name='output.nc', segment_dir=None, vmin=None, vmax=None, change_points=None, mov_name=None, fig_name_beg=None, fig_name_end=None, figsize=(8,6)):

    import matplotlib.animation as animation

    output_dir = real_dir(output_dir)
    segment_dir = check_segment_dir(output_dir, segment_dir)
    file_paths = segment_file_paths(output_dir, segment_dir, file_name)

    # Inner function to read and process data from a single file
    def read_process_data (file_path, var_name, grid, mask_option='3d', gtype='t', lev_option=None, ismr=False, psi=False):
        data = read_netcdf(file_path, var_name)
        if mask_option == '3d':
            data = mask_3d(data, grid, gtype=gtype, time_dependent=True)
        elif mask_option == 'except_ice':
            data = mask_except_ice(data, grid, gtype=gtype, time_dependent=True)
        elif mask_option == 'land':
            data = mask_land(data, grid, gtype=gtype, time_dependent=True)
        elif mask_option == 'land_ice':
            data = mask_land_ice(data, grid, gtype=gtype, time_dependent=True)
        else:
            print 'Error (read_process_data): invalid mask_option ' + mask_option
            sys.exit()
        if lev_option is not None:
            if lev_option == 'top':
                data = select_top(data)
            elif lev_option == 'bottom':
                data = select_bottom(data)
            else:
                print 'Error (read_process_data): invalid lev_option ' + lev_option
                sys.exit()
        if ismr:
            data = convert_ismr(data)
        if psi:
            data = np.sum(data, axis=-3)*1e-6
        return data

    all_data = []
    all_grids = []
    all_dates = []
    # Loop over segments
    for file_path in file_paths:
        print 'Processing ' + file_path
        # Build the grid
        grid = Grid(file_path)
        # Read and process the variable we need
        ctype = 'basic'
        gtype = 't'
        include_shelf = var not in ['aice', 'hice', 'mld']
        if var == 'ismr':
            data = read_process_data(file_path, 'SHIfwFlx', grid, mask_option='except_ice', ismr=True)
            title = 'Ice shelf melt rate (m/y)'
            ctype = 'ismr'
        elif var == 'bwtemp':
            data = read_process_data(file_path, 'THETA', grid, lev_option='bottom')
            title = 'Bottom water temperature ('+deg_string+'C)'
        elif var == 'bwsalt':
            data = read_process_data(file_path, 'SALT', grid, lev_option='bottom')
            title = 'Bottom water salinity (psu)'
        elif var == 'draft':
            data = mask_except_ice(grid.draft, grid)
            title = 'Ice shelf draft (m)'
        elif var == 'aice':
            data = read_process_data(file_path, 'SIarea', grid, mask_option='land_ice')
            title = 'Sea ice concentration'
        elif var == 'hice':
            data = read_process_data(file_path, 'SIheff', grid, mask_option='land_ice')
            title = 'Sea ice thickness (m)'
        elif var == 'mld':
            data = read_process_data(file_path, 'MXLDEPTH', grid, mask_option='land_ice')
            title = 'Mixed layer depth (m)'
        elif var == 'eta':
            data = read_process_data(file_path, 'ETAN', grid, mask_option='land')
            title = 'Free surface (m)'
        elif var == 'psi':
            data = read_process_data(file_path, 'PsiVEL', grid, psi=True)
            title = 'Vertically integrated streamfunction (Sv)'
            ctype = 'plusminus'
        else:
            print 'Error (animate_latlon): invalid var ' + var
            sys.exit()
        # Loop over timesteps
        if var == 'draft':
            # Just one timestep
            all_data.append(data)
            all_grids.append(grid)
            all_dates.append(parse_date(file_path=file_path, time_index=0))
        else:
            for t in range(data.shape[0]):
                # Extract the data from this timestep
                # Save it and the grid to the long lists
                all_data.append(data[t,:])
                all_grids.append(grid)
                all_dates.append(parse_date(file_path=file_path, time_index=t))

    extend = get_extend(vmin=vmin, vmax=vmax)
    if vmin is None:
        vmin = np.amax(data)
        for elm in all_data:
            vmin = min(vmin, np.amin(elm))
    if vmax is None:
        vmax = np.amin(data)
        for elm in all_data:
            vmax = max(vmax, np.amax(elm))

    num_frames = len(all_data)

    # Make the first and last frames as stills
    tsteps = [0, -1]
    fig_names = [fig_name_beg, fig_name_end]
    for t in range(2):
        latlon_plot(all_data[tsteps[t]], all_grids[tsteps[t]], gtype=gtype, ctype=ctype, vmin=vmin, vmax=vmax, change_points=change_points, title=title, date_string=all_dates[tsteps[t]], figsize=figsize, fig_name=fig_names[t])

    # Now make the animation

    fig, ax = plt.subplots(figsize=figsize)

    # Inner function to plot a frame
    def plot_one_frame (t):
        img = latlon_plot(all_data[t], all_grids[t], ax=ax, gtype=gtype, ctype=ctype, vmin=vmin, vmax=vmax, change_points=change_points, title=title+'\n'+all_dates[t], make_cbar=False)
        if t==0:
            return img

    # First frame
    img = plot_one_frame(0)        
    plt.colorbar(img, extend=extend)

    # Function to update figure with the given frame
    def animate(t):
        ax.cla()
        plot_one_frame(t)

    # Call this for each frame
    anim = animation.FuncAnimation(fig, func=animate, frames=range(num_frames))
    writer = animation.FFMpegWriter(bitrate=500, fps=10)
    anim.save(mov_name, writer=writer)
    if mov_name is not None:
        print 'Saving ' + mov_name
        anim.save(mov_name, writer=writer)
    else:
        plt.show()
Ejemplo n.º 23
0
def sose_ics (grid_path, sose_dir, output_dir, nc_out=None, constant_t=-1.9, constant_s=34.4, split=180, prec=64):

    from grid import SOSEGrid
    from file_io import NCfile
    from interpolation import interp_reg

    sose_dir = real_dir(sose_dir)
    output_dir = real_dir(output_dir)

    # Fields to interpolate
    fields = ['THETA', 'SALT', 'SIarea', 'SIheff']
    # Flag for 2D or 3D
    dim = [3, 3, 2, 2]
    # Constant values for ice shelf cavities
    constant_value = [constant_t, constant_s, 0, 0]
    # End of filenames for input
    infile_tail = '_climatology.data'
    # End of filenames for output
    outfile_tail = '_SOSE.ini'
    
    print 'Building grids'
    # First build the model grid and check that we have the right value for split
    model_grid = grid_check_split(grid_path, split)
    # Now build the SOSE grid
    sose_grid = SOSEGrid(sose_dir+'grid/', model_grid=model_grid, split=split)
    # Extract land mask
    sose_mask = sose_grid.hfac == 0
    
    print 'Building mask for SOSE points to fill'
    # Figure out which points we need for interpolation
    # Find open cells according to the model, interpolated to SOSE grid
    model_open = np.ceil(interp_reg(model_grid, sose_grid, np.ceil(model_grid.hfac), fill_value=1))
    # Find ice shelf cavity points according to model, interpolated to SOSE grid
    model_cavity = np.ceil(interp_reg(model_grid, sose_grid, xy_to_xyz(model_grid.ice_mask, model_grid), fill_value=0)).astype(bool)
    # Select open, non-cavity cells
    fill = model_open*np.invert(model_cavity)
    # Extend into the mask a few times to make sure there are no artifacts near the coast
    fill = extend_into_mask(fill, missing_val=0, use_3d=True, num_iters=3)

    # Set up a NetCDF file so the user can check the results
    if nc_out is not None:
        ncfile = NCfile(nc_out, model_grid, 'xyz')

    # Process fields
    for n in range(len(fields)):
        print 'Processing ' + fields[n]
        in_file = sose_dir + fields[n] + infile_tail
        out_file = output_dir + fields[n] + outfile_tail
        print '...reading ' + in_file
        # Just keep the January climatology
        if dim[n] == 3:
            sose_data = sose_grid.read_field(in_file, 'xyzt')[0,:]
        else:
            # Fill any missing regions with zero sea ice, as we won't be extrapolating them later
            sose_data = sose_grid.read_field(in_file, 'xyt', fill_value=0)[0,:]
        # Discard the land mask, and extrapolate slightly into missing regions so the interpolation doesn't get messed up.
        print '...extrapolating into missing regions'
        if dim[n] == 3:
            sose_data = discard_and_fill(sose_data, sose_mask, fill)
            # Fill cavity points with constant values
            sose_data[model_cavity] = constant_value[n]
        else:
            # Just care about surface layer
            sose_data = discard_and_fill(sose_data, sose_mask[0,:], fill[0,:], use_3d=False)
        print '...interpolating to model grid'
        data_interp = interp_reg(sose_grid, model_grid, sose_data, dim=dim[n])
        # Fill the land mask with zeros
        if dim[n] == 3:
            data_interp[model_grid.hfac==0] = 0
        else:
            data_interp[model_grid.hfac[0,:]==0] = 0
        write_binary(data_interp, out_file, prec=prec)
        if nc_out is not None:
            print '...adding to ' + nc_out
            if dim[n] == 3:
                ncfile.add_variable(fields[n], data_interp, 'xyz')
            else:
                ncfile.add_variable(fields[n], data_interp, 'xy')

    if nc_out is not None:
        ncfile.close()
Ejemplo n.º 24
0
def process_forcing_for_correction(source,
                                   var,
                                   mit_grid_dir,
                                   out_file,
                                   in_dir=None,
                                   start_year=1979,
                                   end_year=None):

    # Set parameters based on source dataset
    if source == 'ERA5':
        if in_dir is None:
            # Path on BAS servers
            in_dir = '/data/oceans_input/processed_input_data/ERA5/'
        file_head = 'ERA5_'
        gtype = ['t', 't', 't', 't', 't']
    elif source == 'UKESM':
        if in_dir is None:
            # Path on JASMIN
            in_dir = '/badc/cmip6/data/CMIP6/CMIP/MOHC/UKESM1-0-LL/'
        expt = 'historical'
        ensemble_member = 'r1i1p1f2'
        if var == 'wind':
            var_names_in = ['uas', 'vas']
            gtype = ['u', 'v']
        elif var == 'thermo':
            var_names_in = ['tas', 'huss', 'pr', 'ssrd', 'strd']
            gtype = ['t', 't', 't', 't', 't']
        days_per_year = 12 * 30
    elif source == 'PACE':
        if in_dir is None:
            # Path on BAS servers
            in_dir = '/data/oceans_input/processed_input_data/CESM/PACE_new/'
        file_head = 'PACE_ens'
        num_ens = 20
        missing_ens = 13
        if var == 'wind':
            var_names_in = ['UBOT', 'VBOT']
            monthly = [False, False]
        elif var == 'thermo':
            var_names_in = ['TREFHT', 'QBOT', 'PRECT', 'FSDS', 'FLDS']
            monthly = [False, False, False, True, True]
        gtype = ['t', 't', 't', 't', 't']
    else:
        print 'Error (process_forcing_for_correction): invalid source ' + source
        sys.exit()
    # Set parameters based on variable type
    if var == 'wind':
        var_names = ['uwind', 'vwind']
        units = ['m/s', 'm/s']
    elif var == 'thermo':
        var_names = ['atemp', 'aqh', 'precip', 'swdown', 'lwdown']
        units = ['degC', '1', 'm/s', 'W/m^2', 'W/m^2']
    else:
        print 'Error (process_forcing_for_correction): invalid var ' + var
        sys.exit()
    # Check end_year is defined
    if end_year is None:
        print 'Error (process_forcing_for_correction): must set end_year. Typically use 2014 for WSFRIS and 2013 for PACE.'
        sys.exit()

    mit_grid_dir = real_dir(mit_grid_dir)
    in_dir = real_dir(in_dir)

    print 'Building grids'
    if source == 'ERA5':
        forcing_grid = ERA5Grid()
    elif source == 'UKESM':
        forcing_grid = UKESMGrid()
    elif source == 'PACE':
        forcing_grid = PACEGrid()
    mit_grid = Grid(mit_grid_dir)

    ncfile = NCfile(out_file, mit_grid, 'xy')

    # Loop over variables
    for n in range(len(var_names)):
        print 'Processing variable ' + var_names[n]
        # Read the data, time-integrating as we go
        data = None
        num_time = 0

        if source == 'ERA5':
            # Loop over years
            for year in range(start_year, end_year + 1):
                file_path = in_dir + file_head + var_names[n] + '_' + str(year)
                data_tmp = read_binary(file_path,
                                       [forcing_grid.nx, forcing_grid.ny],
                                       'xyt')
                if data is None:
                    data = np.sum(data_tmp, axis=0)
                else:
                    data += np.sum(data_tmp, axis=0)
                num_time += data_tmp.shape[0]

        elif source == ' UKESM':
            in_files, start_years, end_years = find_cmip6_files(
                in_dir, expt, ensemble_member, var_names_in[n], 'day')
            # Loop over each file
            for t in range(len(in_files)):
                file_path = in_files[t]
                print 'Processing ' + file_path
                print 'Covers years ' + str(start_years[t]) + ' to ' + str(
                    end_years[t])
                # Loop over years
                t_start = 0  # Time index in file
                t_end = t_start + days_per_year
                for year in range(start_years[t], end_years[t] + 1):
                    if year >= start_year and year <= end_year:
                        print 'Processing ' + str(year)
                        # Read data
                        print 'Reading ' + str(year) + ' from indices ' + str(
                            t_start) + '-' + str(t_end)
                        data_tmp = read_netcdf(file_path,
                                               var_names_in[n],
                                               t_start=t_start,
                                               t_end=t_end)
                        if data is None:
                            data = np.sum(data_tmp, axis=0)
                        else:
                            data += np.sum(data_tmp, axis=0)
                        num_time += days_per_year
                    # Update time range for next time
                    t_start = t_end
                    t_end = t_start + days_per_year
            if var_names[n] == 'atemp':
                # Convert from K to C
                data -= temp_C2K
            elif var_names[n] == 'precip':
                # Convert from kg/m^2/s to m/s
                data /= rho_fw
            elif var_names[n] in ['swdown', 'lwdown']:
                # Swap sign on radiation fluxes
                data *= -1

        elif source == 'PACE':
            # Loop over years
            for year in range(start_year, end_year + 1):
                # Loop over ensemble members
                data_tmp = None
                num_ens_tmp = 0
                for ens in range(1, num_ens + 1):
                    if ens == missing_ens:
                        continue
                    file_path = in_dir + file_head + str(ens).zfill(
                        2) + '_' + var_names_in[n] + '_' + str(year)
                    data_tmp_ens = read_binary(
                        file_path, [forcing_grid.nx, forcing_grid.ny], 'xyt')
                    if data_tmp is None:
                        data_tmp = data_tmp_ens
                    else:
                        data_tmp += data_tmp_ens
                    num_ens_tmp += 1
                # Ensemble mean for this year
                data_tmp /= num_ens_tmp
                # Now accumulate time integral
                if monthly[n]:
                    # Weighting for different number of days per month
                    for month in range(data_tmp.shape[0]):
                        # Get number of days per month with no leap years
                        ndays = days_per_month(month + 1, 1979)
                        data_tmp[month, :] *= ndays
                        num_time += ndays
                else:
                    num_time += data_tmp.shape[0]
                if data is None:
                    data = np.sum(data_tmp, axis=0)
                else:
                    data += np.sum(data_tmp, axis=0)

        # Now convert from time-integral to time-average
        data /= num_time

        forcing_lon, forcing_lat = forcing_grid.get_lon_lat(gtype=gtype[n],
                                                            dim=1)
        # Get longitude in the range -180 to 180, then split and rearrange so it's monotonically increasing
        forcing_lon = fix_lon_range(forcing_lon)
        i_split = np.nonzero(forcing_lon < 0)[0][0]
        forcing_lon = split_longitude(forcing_lon, i_split)
        data = split_longitude(data, i_split)
        # Now interpolate to MITgcm tracer grid
        mit_lon, mit_lat = mit_grid.get_lon_lat(gtype='t', dim=1)
        print 'Interpolating'
        data_interp = interp_reg_xy(forcing_lon, forcing_lat, data, mit_lon,
                                    mit_lat)
        print 'Saving to ' + out_file
        ncfile.add_variable(var_names[n], data_interp, 'xy', units=units[n])

    ncfile.close()