Exemple #1
0
def main():

    proc = psutil.Process()

    # Define data directory
    datadir = os.environ.get('UFSOUTPUT_DIR')
    postdir = os.environ.get('UFSPOST_DIR')
    gridfile = os.environ.get('MOM6_GRID_INFO')
    flist = get_filelist(datadir, "*mom6.r*")
    rlist = get_fileroot(flist)
    dsets = open_filelist(flist, __file__)
    print(flist)

    # Get target grid
    ds_out = xr.open_dataset(os.environ.get('MOM6_TARGET_GRID'))

    # Get various coordinate combinations
    xhyh, xhyq, xqyh, xqyq = parse_mom6static(dsmgrid)
    print(xhyh, xhyq, xqyh, xqyq)

    # Create regridders
    xhyhregridder = create_regridder(xhyh, ds_out, "xhyh")
    xhyqregridder = create_regridder(xhyq, ds_out, "xhyq")
    xqyhregridder = create_regridder(xqyh, ds_out, "xqyh")
    xqyqregridder = create_regridder(xqyq, ds_out, "xqyq")

    # Iterate over open files
    for i, dsin in enumerate(dsets):
        print(flist[i])
        #        print(rlist[i])
        vlist = list(dsin.data_vars.keys())
        print(vlist)
        glist = {vname: parse_mom6rgrid(dsin, vname) for vname in vlist}
        print(glist)

        # Create list of different variable types
        xhyhlist = [
            vname for vname in vlist
            if "lonh" in glist[vname] and "lath" in glist[vname]
        ]
        xqyhlist = [
            vname for vname in vlist
            if "lonq" in glist[vname] and "lath" in glist[vname]
        ]
        xhyqlist = [
            vname for vname in vlist
            if "lonh" in glist[vname] and "latq" in glist[vname]
        ]
        xqyqlist = [
            vname for vname in vlist
            if "lonq" in glist[vname] and "latq" in glist[vname]
        ]
        olist = [vname for vname in vlist if "NA" in glist[vname]]
        print("xhyh list is:", xhyhlist)
        print("xhyq list is:", xhyqlist)
        print("xqyh list is:", xqyhlist)
        print("xqyq list is:", xqyqlist)
        print("Non-grid vars:", olist)

        # Regrid each variable type separately
        rgflag = True
        xhyhreg = [xhyhregridder(dsin[vname]) for vname in xhyhlist]
        xhyqreg = [xhyqregridder(dsin[vname]) for vname in xhyqlist]
        xqyhreg = [xqyhregridder(dsin[vname]) for vname in xqyhlist]
        xqyqreg = [xqyqregridder(dsin[vname]) for vname in xqyqlist]
        ngvars = [dsin[vname] for vname in olist]

        # Merge variables in single xarray object
        dout = xr.merge(xhyhreg + xhyqreg + xqyhreg + xqyqreg + ngvars)

        dout.attrs['Conventions'] = "CF-1.7"
        print(dout)

        # Set data and metadata for output grid
        x = dout['lon'][0, :]
        y = dout['lat'][:, 0]

        dout['y'] = y
        dout['y'].attrs['long_name'] = "latitude"
        dout['y'].attrs['units'] = "degrees_N"
        dout['y'].attrs['cartesian_axis'] = "Y"
        dout['x'] = x
        dout['x'].attrs['long_name'] = "longitude"
        dout['x'].attrs['units'] = "degrees_E"
        dout['x'].attrs['cartesian_axis'] = "X"

        # Copy over variable attributes

        dout = copyattrs(dsin, dout, xhyhlist)
        dout = copyattrs(dsin, dout, xhyqlist)
        dout = copyattrs(dsin, dout, xqyhlist)
        dout = copyattrs(dsin, dout, xqyqlist)
        dout = copyattrs(dsin, dout, olist)

        # Write output
        if not os.path.exists(postdir):
            os.makedirs(postdir)
        dout.to_netcdf(postdir + "/" + rlist[i] + "_regrid_0p25x0p25.nc")

        # Delete xarray objects to avoid memory leak?
        print('memory=', proc.memory_info().rss)
        del dout
        print('memory=', proc.memory_info().rss)
Exemple #2
0
def main():

    # Define some constants
    # Number of input ice layers
    nilyr1 = 3

    # Number of CICE5 ice layers
    nilyr2 = 7

    # Number of snow layers in each ice category
    nslyr = 1

    # Number of ice categories in CICE5
    ncat = 5

    # Missing value
    missing = 9.96920996838687e+36

    # Category boundaries
    c1 = 0.6445
    c2 = 1.3914
    c3 = 2.4702
    c4 = 4.5673
    cvals = [c1, c2, c3, c4]

    # Salinity profile constants
    saltmax = 3.2  # Maximum salinity at ice base
    nsal = 0.407  # Profile constant
    msal = 0.573  # Profile constant

    # Density values
    rhoi = 917.0  # Density of ice
    rhos = 330.0  # Density of snow
    cp_ice = 2106.0  # Specific heat of fresh ice
    cp_ocn = 4218.0  # Specific heat of sea water
    Lfresh = 3.34e5  # Latent heat of melting fresh ice

    # Define intervals for interpolation to CICE5
    rstart = 0.5 * (1 / nilyr2)
    rend = 1 - rstart
    tlevs = np.linspace(rstart, rend, nilyr2)
    nilyr = tlevs

    # Define data directory
    print("Get environment variables.")
    datadir = os.environ.get('UFSDATA_DIR')
    postdir = os.environ.get('UFSPOST_DIR')
    ora_file = os.environ.get('ORAS5_FILE')
    print("End get environment variables.")

    # Get list of files to process
    print("Begin open files.")
    flist = get_filelist(datadir, "*" + ora_file + ".nc")
    rlist = get_fileroot(flist)
    dsets = open_filelist(flist, __file__)
    print("End open files.")
    print(flist)
    print(dsets)

    # Iterate over open files
    for i, dsin in enumerate(dsets):
        print(flist[i])

        # Rename input variables for consistency
        part_size = 1. - dsin.frld.squeeze()
        h_ice = dsin.hicif.squeeze()
        h_sno = dsin.hsnif.squeeze()
        t_surf = dsin.sist.squeeze()
        t1 = dsin.tbif1.squeeze()
        t2 = dsin.tbif2.squeeze()
        t3 = dsin.tbif3.squeeze()
        #
        part_size.name = "part_size"
        h_ice.name = "h_ice"
        h_sno.name = "h_sno"
        t_surf.name = "t_surf"
        t1.name = "t1"
        t2.name = "t2"
        t3.name = "t3"

        # Get dimensions if input data
        ndims1 = part_size.shape
        nj = part_size.nj.size
        ni = part_size.ni.size

        # Initalize dataarrays
        dummy = xr.DataArray(np.zeros((nj, ni), dtype=np.double),
                             coords={
                                 'nj': part_size.nj,
                                 "ni": part_size.ni
                             },
                             dims=['nj', 'ni'],
                             name="dummy var")
        iceumask = xr.DataArray(np.full((nj, ni), 1, dtype=np.double),
                                coords={
                                    'nj': part_size.nj,
                                    "ni": part_size.ni
                                },
                                dims=['nj', 'ni'],
                                name="iceumask")
        aicen = xr.DataArray(np.zeros((ncat, nj, ni), dtype=np.double),
                             coords={
                                 'ncat': range(0, ncat),
                                 'nj': part_size.nj,
                                 "ni": part_size.ni
                             },
                             dims=['ncat', 'nj', 'ni'],
                             name="aicen")
        vicen = xr.DataArray(np.zeros((ncat, nj, ni), dtype=np.double),
                             coords={
                                 'ncat': range(0, ncat),
                                 'nj': part_size.nj,
                                 "ni": part_size.ni
                             },
                             dims=['ncat', 'nj', 'ni'],
                             name="vicen")
        vsnon = xr.DataArray(np.zeros((ncat, nj, ni), dtype=np.double),
                             coords={
                                 'ncat': range(0, ncat),
                                 'nj': part_size.nj,
                                 "ni": part_size.ni
                             },
                             dims=['ncat', 'nj', 'ni'],
                             name="vsnon")
        Tsfcn = xr.DataArray(np.zeros((ncat, nj, ni), dtype=np.double),
                             coords={
                                 'ncat': range(0, ncat),
                                 'nj': part_size.nj,
                                 "ni": part_size.ni
                             },
                             dims=['ncat', 'nj', 'ni'],
                             name="Tsfcn")
        tice = xr.DataArray(np.zeros((nilyr1, ncat, nj, ni), dtype=np.double),
                            coords={
                                'nilyr': np.linspace(0, 1, 3),
                                'ncat': range(0, ncat),
                                'nj': part_size.nj,
                                "ni": part_size.ni
                            },
                            dims=['nilyr', 'ncat', 'nj', 'ni'],
                            name="tice")
        Tin = xr.DataArray(np.zeros((nilyr2, ncat, nj, ni), dtype=np.double),
                           coords={
                               'nilyr': tlevs,
                               'ncat': range(0, ncat),
                               'nj': part_size.nj,
                               "ni": part_size.ni
                           },
                           dims=['nilyr', 'ncat', 'nj', 'ni'],
                           name="Tin")
        sice = xr.DataArray(np.zeros((nilyr2, ncat, nj, ni), dtype=np.double),
                            coords={
                                'nilyr': tlevs,
                                'ncat': range(0, ncat),
                                'nj': part_size.nj,
                                "ni": part_size.ni
                            },
                            dims=['nilyr', 'ncat', 'nj', 'ni'],
                            name="sice")
        qice = xr.DataArray(np.zeros((nilyr2, ncat, nj, ni), dtype=np.double),
                            coords={
                                'nilyr': tlevs,
                                'ncat': range(0, ncat),
                                'nj': part_size.nj,
                                "ni": part_size.ni
                            },
                            dims=['nilyr', 'ncat', 'nj', 'ni'],
                            name="qice")
        qsno = xr.DataArray(np.zeros((nslyr, ncat, nj, ni), dtype=np.double),
                            coords={
                                'nslyr': range(nslyr),
                                'ncat': range(0, ncat),
                                'nj': part_size.nj,
                                "ni": part_size.ni
                            },
                            dims=['nslyr', 'ncat', 'nj', 'ni'],
                            name="qsno")
        print(vicen.shape)
        print(h_ice.shape)

        # Set ice fraction to zero where values are missing, based on surface temperature
        ice_frac = part_size.where(h_ice > 0., other=0)
        ice_frac.name = "ice_frac"

        # Calculate ice fraction per category
        aicen = ice_category_brdcst(ice_frac, h_ice, cvals)
        aicen.name = 'aicen'

        # Restore missing metadata
        aicen['nj'] = part_size.nj
        aicen['ni'] = part_size.ni
        aicen['ncat'] = range(0, ncat)

        # Calculate ice mask
        iceumask = iceumask.where(aicen.sum(dim='ncat') > 1e-11, other=0)

        # Calculate ice volume per category
        for k in range(ncat):
            vicen[k, :, :] = h_ice.where(h_ice > 0, other=0) * aicen[k, :, :]
        vicen.name = "vicen"

        # Calculate snow volume per category
        for k in range(ncat):
            vsnon[k, :, :] = h_sno.where(h_sno > 0, other=0) * aicen[k, :, :]
        vsnon.name = "vsnon"

        # Calculate Surface temperature per category
        # Missing value for t_surf is 0, convert to Kelvin to avoid excessive negative values later
        t_surf = t_surf.where(t_surf != 0, other=273.15)
        Tsfcn = ice_category_brdcst(t_surf - 273.15, h_ice, cvals)
        Tsfcn = Tsfcn.where(Tsfcn < 0, other=0)
        Tsfcn.name = "Tsfcn"

        # Calculate ice layer temperature per category and combine
        tice[0, :, :, :] = ice_category_brdcst(
            t1.where(t1 != 0, other=273.15) - 273.15, h_ice, cvals)
        tice[1, :, :, :] = ice_category_brdcst(
            t2.where(t2 != 0, other=273.15) - 273.15, h_ice, cvals)
        tice[2, :, :, :] = ice_category_brdcst(
            t3.where(t3 != 0, other=273.15) - 273.15, h_ice, cvals)

        # Linearly interpolate from ORAS5 layers to CICE5
        Tin = tice.interp(nilyr=tlevs)
        Tin.name = "Tin"
        Tin = Tin.where(Tin < 0, other=0)
        print(Tin)

        # Create salinity profile
        zn = np.asarray([(k + 1 - 0.5) / nilyr2 for k in range(nilyr2)])
        print((np.pi * zn**(nsal / (msal + zn))))
        salinz = 0.5 * saltmax * (1 - np.cos(np.pi * zn**(nsal / (msal + zn))))
        print(salinz)
        for k in range(nilyr2):
            sice[k, :, :, :] = salinz[k]

# Determine freezing point depression
        Tmltz = salinz / (-18.48 + (0.01848 * salinz))

        # Calculate ice layer enthalpy
        # Don't allow ice temperature to exceed melting temperature
        for k in range(nilyr2):
            Tin[k, :, :, :] = Tin[k, :, :, :].where(Tin[k, :, :, :] < Tmltz[k],
                                                    other=Tmltz[k])
            qice[k, :, :, :] = rhoi * cp_ice * Tin[k, :, :, :] - rhoi * Lfresh
            qice[k, :, :, :] = qice[k, :, :, :].where(vicen > 0, other=0)


# Calculate snow layer enthalpy
        qsno[0, :, :, :] = -rhos * (Lfresh - cp_ice * Tsfcn)
        qsno[0, :, :, :] = qsno[0, :, :, :].where(vsnon > 0,
                                                  other=-rhos * Lfresh)
        Trecon = (Lfresh + qsno / rhos) / cp_ice
        Trecon = Trecon.where(vsnon > 0, 0)
        Trecon.name = 'Trecon'
        Trecon.to_netcdf("test.nc")

        # Write output in expected format
        qsno.to_netcdf("qsno_test.nc")

        # Create list of variables initialized to zero
        dlist = [
            'uvel', 'vvel', 'scale_factor', 'coszen', 'swvdr', 'swvdf',
            'swidr', 'swidf', 'strocnxT', 'strocnyT', 'stressp_1', 'stressp_2',
            'stressp_3', 'stressp_4', 'stressm_1', 'stressm_2', 'stressm_3',
            'stressm_4', 'stress12_1', 'stress12_2', 'stress12_3',
            'stress12_4', 'frz_onset'
        ]
        dout = xr.merge([aicen, vicen, vsnon, Tsfcn, iceumask, Tin])
        dout['qsno001'] = qsno[0, :, :, :].squeeze()
        print(np.linspace(0, 4, 1))
        dout['ncat'] = np.linspace(0, 4, 5)
        dout['nilyr'] = tlevs
        for vname in dlist:
            dout[vname] = dummy
        for k in range(nilyr2):
            dout['qice00' + str(k + 1)] = qice[k, :, :, :]
            dout['sice00' + str(k + 1)] = sice[k, :, :, :]
        dout.fillna(missing)
        for vname in dout.data_vars:
            dout[vname].encoding['_FillValue'] = missing
        dout.to_netcdf("cice_20120101_test.qsno.nc", format='NETCDF3_CLASSIC')
def main():

    # Define data directory
    print("Get environment variables.")
    datadir = os.environ.get('UFSDATA_DIR')
    postdir = os.environ.get('UFSPOST_DIR')
    regriddir = os.environ.get('REGRID_DIR')
    cice_grid = os.environ.get('CICE5_GRID')
    ora_grid = os.environ.get('ORAS5_GRID')
    print("End get environment variables.")

    # Get grid information
    print("Begin open files.")
    flist = get_filelist(datadir, "*restart_ice.nc")
    rlist = get_fileroot(flist)
    dsets = open_filelist(flist, __file__)
    dscice = xr.open_dataset(regriddir + cice_grid + ".nc")
    dsora = xr.open_dataset(regriddir + ora_grid + ".nc")
    print("End open files.")

    # Define ORAS5 grid
    print("Begin define grids.")
    txy = {
        'lat': dsora['nav_lat'].squeeze(),
        'lon': dsora['nav_lon'].squeeze()
    }

    # Define MOM6 tracer grid
    cicexy = {'lon': dscice['TLON'], 'lat': dscice['TLAT']}
    print("End define grids.")

    # Create regridders
    print("Begin regridder creation.")
    tregrid = create_regridder(txy, cicexy, 'ORAS5ice.to.' + cice_grid,
                               regriddir)
    print("End regridder creation.")

    # Set variable list
    tlist = ['frld', 'hicif', 'hsnif', 'sist', 'tbif1', 'tbif2', 'tbif3']

    # Iterate over open files
    for i, dsin in enumerate(dsets):
        print(flist[i])

        # Regrid each variable type separately to the latlon grid
        print("Begin regridding.")
        tvars = [tregrid(dsin[vname]) for vname in tlist]
        print("End regridding.")

        # Merge variables in single xarray object
        dout = xr.merge(tvars)

        # Add CF convention information
        dout.attrs['Conventions'] = "CF-1.7"

        # Fix NaN value for temperature
        #dout['tmp'].attrs['_FillValue']=0.
        # Write output
        if not os.path.exists(postdir):
            os.makedirs(postdir)
        dout.to_netcdf(postdir + "/" + rlist[i] + "_regrid_" + cice_grid +
                       ".nc")

        # Delete xarray objects to avoid memory leak?
        del dout
def main():

    # Define data directory
    print("Get environment variables.")
    datadir = os.environ.get('UFSDATA_DIR')
    postdir = os.environ.get('UFSPOST_DIR')
    regriddir = os.environ.get('REGRID_DIR')
    mom_grid = os.environ.get('MOM6_GRID')
    ora_grid = os.environ.get('ORAS5_GRID')
    print("End get environment variables.")

    # Get grid information
    print("Begin open files.")
    flist = get_filelist(datadir, "*restart.nc")
    rlist = get_fileroot(flist)
    dsets = open_filelist(flist, __file__)
    dsmom = xr.open_dataset(regriddir + mom_grid + ".nc")
    dsora = xr.open_dataset(regriddir + ora_grid + ".nc")
    print("End open files.")

    # Define ORAS5 grid
    print("Begin define grids.")
    txy = {'lat': dsora['gphit'].squeeze(), 'lon': dsora['glamt'].squeeze()}

    # Define MOM6 tracer grid
    momxy = {'lon': dsmom['geolon'], 'lat': dsmom['geolat']}
    print("End define grids.")

    # Create regridders
    print("Begin regridder creation.")
    tregrid = create_regridder(txy, momxy, 'ORAS5T.to.' + mom_grid, regriddir)
    print("End regridder creation.")

    # Set variable list
    tlist = ['tn', 'sn']

    # Iterate over open files
    for i, dsin in enumerate(dsets):
        print(flist[i])

        # Regrid each variable type separately to the latlon grid
        print("Begin regridding.")
        tvars = [tregrid(dsin[vname]) for vname in tlist]
        print("End regridding.")

        # Merge variables in single xarray object
        dout = xr.merge(tvars)

        dout.attrs['Conventions'] = "CF-1.7"

        # Set data and metadata for output grid

        dout['geolat'] = dsmom['geolat']
        dout['geolon'] = dsmom['geolon']

        # Fix NaN value for temperature
        #dout['tmp'].attrs['_FillValue']=0.
        # Write output
        if not os.path.exists(postdir):
            os.makedirs(postdir)
        dout.to_netcdf(postdir + "/" + rlist[i] + "_regrid_0p25x0p25.nc")

        # Delete xarray objects to avoid memory leak?
        del dout
def main():

# Define data directory
    print("Get environment variables.")
    datadir = os.environ.get('UFSDATA_DIR')
    postdir = os.environ.get('UFSPOST_DIR')
    regriddir = os.environ.get('REGRID_DIR')
    ll_grid = os.environ.get('LL_GRID')
    ora_grid = os.environ.get('ORAS5_GRID')
    print("End get environment variables.")

# Get grid information
    print("Begin open files.")
    flist = get_filelist(datadir,"*restart.nc")
    rlist = get_fileroot(flist)
    dsets = open_filelist(flist,__file__)
    dsll=xr.open_dataset(regriddir+ll_grid+".nc")
    dsora =  xr.open_dataset(regriddir+ora_grid)
    print("End open files.")

# Define ORAS5 grid
    print("Begin define grids.")
    txy = {'lat':dsora['gphit'].squeeze(), 'lon':dsora['glamt'].squeeze()}
    uxy = {'lat':dsora['gphiu'].squeeze(), 'lon':dsora['glamu'].squeeze()}
    vxy = {'lat':dsora['gphiv'].squeeze(), 'lon':dsora['glamv'].squeeze()}
    fxy = {'lat':dsora['gphif'].squeeze(), 'lon':dsora['glamf'].squeeze()}

# Define latlon grid
    llxy = {'lon':dsll['lon'], 'lat':dsll['lat']}
    print("End define grids.")

# Create regridders
    print("Begin regridder creation.")
    tregrid = create_regridder(txy, llxy, 'ORAS5T.to.'+ll_grid,regriddir)
    uregrid = create_regridder(uxy, llxy, 'ORAS5U.to.'+ll_grid,regriddir)
    vregrid = create_regridder(vxy, llxy, 'ORAS5V.to.'+ll_grid,regriddir)
    fregrid = create_regridder(fxy, llxy, 'ORAS5F.to.'+ll_grid,regriddir) 
    print("End regridder creation.")

# Get angles between tripole and N-S grids
    print("Begin read grid angles.")
    cosu = dsora['cosu']
    sinu = dsora['sinu']
    cost = dsora['cost']
    sint = dsora['sint']
    cosv = dsora['cosv']
    sinv = dsora['sinv']
    cosf = dsora['cosf']
    sinf = dsora['sinf']
    print("End read grid angles.")

# Iterate over open files
    for i, dsin in enumerate(dsets):
        print(flist[i])

# Create list of variables pairs
        vpair = ['un','vn']
        tlist = ['tn','sn']

# Rotate vector pairs
        print("Begin vector rotation.")
        print(dsin[vpair[0]], dsin[vpair[1]])
        xrot,yrot = rotate_vector_oras5(dsin[vpair[0]],dsin[str(vpair[1])], "tripole", sinu, cosu)
        xrot.name=vpair[0]
        yrot.name=vpair[1]
        print("End vector rotation.")

# Regrid each variable type separately to the latlon grid
        print("Begin regridding.")
        tvars = [tregrid(dsin[vname]) for vname in tlist]
        uvars = uregrid(xrot)
        vvars = uregrid(yrot)
        print("End regridding.")

# Merge variables in single xarray object
        dout = xr.merge(tvars+uvars+vvars)

        dout.attrs['Conventions']="CF-1.7"

# Set data and metadata for output grid
        x=llxy['lon'][0,:]
        y=llxy['lat'][:,0]

        dout['y']=y
        dout['y'].attrs['long_name']="latitude"
        dout['y'].attrs['units']="degrees_N"
        dout['y'].attrs['cartesian_axis']="Y"
        dout['x']=x
        dout['x'].attrs['long_name']="longitude"
        dout['x'].attrs['units']="degrees_E"
        dout['x'].attrs['cartesian_axis']="X"

# Fix NaN value for temperature
#dout['tmp'].attrs['_FillValue']=0.
# Write output
        if not os.path.exists(postdir):
            os.makedirs(postdir)
        dout.to_netcdf(postdir+"/"+rlist[i]+"_regrid_0p25x0p25.nc")

# Delete xarray objects to avoid memory leak?
        del dout
Exemple #6
0
def main():

    # Set model output and postprocessing directories
    datadir = os.environ.get('UFSOUTPUT_DIR')
    postdir = os.environ.get('UFSPOST_DIR')

    # Read in target grid
    ds_out = xr.open_dataset(os.environ.get('DYNF_TARGET_GRID'))

    # Get list of files to process
    flist = get_filelist(datadir, "dynf???.nc")

    # Netcdf output from the model has poorly defined dimensions that causes
    # an xarray error.
    check_xrgrid(flist)

    # Get list of file roots for naming output file later
    rlist = get_fileroot(flist)

    # Open the files as xarray datasets
    dsets = open_filelist(flist, __file__)

    # Iterate over open files
    for i, dsin in enumerate(dsets):
        print(dsin)
        print(rlist[i])
        vlist = list(dsin.data_vars.keys())

        # Drop grid_xt and grid_yt from the list of variables to process
        vlist.remove("grid_xt")
        vlist.remove("grid_yt")

        # Rename grid_yt->lat and grid_xt->lon for xESMF regridding
        ds = dsin.rename({'grid_xt': 'lon', 'grid_yt': 'lat'})

        # Convert lat-lon from radians to degrees
        ds['lon'] = ds['lon'] * 180 / pi
        ds['lat'] = ds['lat'] * 180 / pi

        # Read in list of variables to process
        din = [ds[vname] for vname in vlist]

        # Generate weights for regridding. Only needs to be done once.
        if i == 0:
            regridder = xe.Regridder(ds,
                                     ds_out,
                                     'bilinear',
                                     reuse_weights=True,
                                     periodic=True)

# Regrid over variable list
        rgrd = [regridder(dx) for dx in din]

        # Merge regridded variables into single dataset
        dout = xr.merge(rgrd)

        # Add global attribute for CF compliance
        dout.attrs['Conventions'] = "CF-1.7"

        # Set data and metadata for output grid
        x = dout['lon'][0, :]
        y = dout['lat'][:, 0]

        dout['y'] = y
        dout['y'].attrs['long_name'] = "latitude"
        dout['y'].attrs['units'] = "degrees_N"
        dout['y'].attrs['cartesian_axis'] = "Y"
        dout['x'] = x
        dout['x'].attrs['long_name'] = "longitude"
        dout['x'].attrs['units'] = "degrees_E"
        dout['x'].attrs['cartesian_axis'] = "X"
        dout['lat'].attrs = dsin['grid_yt'].attrs
        dout['lon'].attrs = dsin['grid_xt'].attrs

        # Copy over variable attributes
        for vname in vlist:
            dout[vname].attrs = ds[vname].attrs
            dout[vname].encoding['missing_value'] = ds[vname].encoding[
                'missing_value']
            dout[vname].encoding['_FillValue'] = ds[vname].encoding[
                '_FillValue']


# Set unlimited dimension
        dout.encoding['unlimited_dims'] = "time"

        # Write output
        if not os.path.exists(postdir):
            os.makedirs(postdir)
        dout.to_netcdf(postdir + "/" + rlist[i] + "_regrid_0p25x0p25.nc")
def main():

# Define data directory
    datadir = os.environ.get('INPUT_DIR')
    postdir = os.environ.get('INPUT_DIR')
    regriddir = os.environ.get('REGRID_DIR')
    gspec = os.environ.get('MOM6_TARGET_GRID')
    flist = get_filelist(datadir,"oras5_MOM6_IC_UV_v1.nc")
    dsets = open_filelist(flist,__file__)
    uname = 'u'
    vname = 'v'
    vlist = ['u','v']

# Get target grid
    ds_stat=xr.open_dataset(regriddir+gspec)

# Get various coordinate combinations
    xhyh,xhyq,xqyh,xqyq = parse_mom6static(ds_stat)
    print(xhyh,xhyq,xqyh,xqyq)

# Create input latlon grid
    llxy = {'lon':dsets[0]['xt_ocean'], 'lat':dsets[0]['yt_ocean']}

# Create regridders
# ORAS5 latlon to MOM6 Ct grid
    xhyhregrid = create_regridder(llxy, xhyh, "0p25x0p25.to.xhyh", regriddir)

#MOM6 Ct grid to U    
    xhyh_to_xqyh = create_regridder(xhyh, xqyh, "xhyh.to.xqyh", regriddir)

# MOM6 Ct grid to V
    xhyh_to_xhyq = create_regridder(xhyh, xhyq, "xhyh.to.xhyq", regriddir)

# Get angles between MOM6 tripole and N-S grids
    sin_rot = ds_stat['sin_rot']
    cos_rot = ds_stat['cos_rot']

# Regrid from LL to xhyh grid where rotation angle is defined
    #plt.contourf(usets[0][uname][0,0,:,:])
    #plt.colorbar()
    #plt.show()

    uct = xhyhregrid(dsets[0][uname])
    vct = xhyhregrid(dsets[0][vname])
    #plt.contourf(uct[0,0,:,:])
    #plt.colorbar()
    #plt.show()

# Rotate vector pairs
    xrot,yrot = rotate_vector_mom6(uct, vct, "latlon", sin_rot, cos_rot)
    xrot.name=uname
    yrot.name=vname

# Regrid from Ct to staggered U and V 
    xout = xhyh_to_xqyh(xrot)
    yout = xhyh_to_xhyq(yrot)    

# Merge variables in single xarray object
    dout = xout.to_dataset(name=uname)
    dout[vname]=yout 

    dout.attrs['Conventions']="CF-1.7"

# Set data and metadata for output grid
    x=ds_stat['geolon']
    y=ds_stat['geolat']

    dout['y']=y
    dout['y'].attrs['long_name']="latitude"
    dout['y'].attrs['units']="degrees_N"
    dout['y'].attrs['cartesian_axis']="Y"
    dout['x']=x
    dout['x'].attrs['long_name']="longitude"
    dout['x'].attrs['units']="degrees_E"
    dout['x'].attrs['cartesian_axis']="X"

# Copy over variable attributes

    #dout = copyattrs(usets[0], dout, ulist)
    #dout = copyattrs(vsets[0], dout, vlist)

# Write output
    if not os.path.exists(postdir):
        os.makedirs(postdir)
    dout.to_netcdf(postdir+"/ORAS5_"+uname+"_"+vname+"_to_MOM6_tripolar.nc")

# Delete xarray objects to avoid memory leak?
    del dout
def main():

    # Define data directory
    datadir = os.environ.get('UFSOUTPUT_DIR')
    postdir = os.environ.get('UFSPOST_DIR')
    #flist = get_filelist(datadir,"*cice*nc")
    flist = get_filelist(
        datadir,
        "ufs.s2s.C384_t025.20120701.cmeps_v0.5.1.cice.h2_06h.2012-07-01-21600.nc"
    )
    rlist = get_fileroot(flist)
    dsets = open_filelist(flist, __file__)

    # Get target grid
    ds_out = xr.open_dataset(os.environ.get('CICE_TARGET_GRID'))

    # Create tgrid
    txy = {'lon': dsets[0]['TLON'], 'lat': dsets[0]['TLAT']}
    uxy = {'lon': dsets[0]['ULON'], 'lat': dsets[0]['ULAT']}

    # Create regridders
    tregrid = create_regridder(txy, ds_out, 'TGRID')
    uregrid = create_regridder(uxy, ds_out, 'UGRID')

    # Get angle between tripole and N-S grids
    angle = dsets[0]['ANGLE']
    anglet = dsets[0]['ANGLET']

    # Define pole types
    #    poles = {}
    #    poles['ds_in'] = np.array([ESMF.PoleKind.MONOPOLE, ESMF.PoleKind.MONOPOLE], np.int32)
    #    poles['ds_out'] = np.array([ESMF.PoleKind.MONOPOLE, ESMF.PoleKind.MONOPOLE], np.int32)

    # Iterate over open files
    for i, dsin in enumerate(dsets):
        print(flist[i])
        vlist = list(dsin.data_vars.keys())
        glist = {vname: parse_utgrid(dsin, vname) for vname in vlist}

        # Create list of different variable types
        tlist = [vname for vname in vlist if "TLAT" in glist[vname]]
        ulist = [vname for vname in vlist if "ULAT" in glist[vname]]
        olist = [vname for vname in vlist if "NA" in glist[vname]]
        print(ulist)

        # Test rotation
        uvel_h_rot, vvel_h_rot = rotate_vector_cice5(dsin['uvel_h'],
                                                     dsin['vvel_h'], "tripole",
                                                     angle)
        utest = uregrid(uvel_h_rot)
        vtest = uregrid(vvel_h_rot)
        print(utest.shape)
        #plt.contourf(utest[0,:,:])
        #plt.show()
        # Regrid each variable type separately
        #tvars = [esmf_regrid(dsin[vname],txy,ds_out,poles,vname) for vname in tlist]
        #uvars = [esmf_regrid(dsin[vname],uxy,ds_out,poles,vname) for vname in ulist]
        tvars = [tregrid(dsin[vname]) for vname in tlist]
        uvars = [uregrid(dsin[vname]) for vname in ulist]
        ngvars = [dsin[vname] for vname in olist]

        # Merge variables in single xarray object
        dout = xr.merge(tvars + uvars + ngvars + utest + vtest)

        dout.attrs['Conventions'] = "CF-1.7"

        # Set data and metadata for output grid
        x = dout['lon'][0, :]
        y = dout['lat'][:, 0]

        dout['y'] = y
        dout['y'].attrs['long_name'] = "latitude"
        dout['y'].attrs['units'] = "degrees_N"
        dout['y'].attrs['cartesian_axis'] = "Y"
        dout['x'] = x
        dout['x'].attrs['long_name'] = "longitude"
        dout['x'].attrs['units'] = "degrees_E"
        dout['x'].attrs['cartesian_axis'] = "X"

        # Copy over variable attributes

        dout = copyattrs(dsin, dout, tlist)
        dout = copyattrs(dsin, dout, ulist)
        dout = copyattrs(dsin, dout, olist)

        # Fix NaN value for temperature
        #dout['tmp'].attrs['_FillValue']=0.
        # Write output
        if not os.path.exists(postdir):
            os.makedirs(postdir)
        dout.to_netcdf(postdir + "/" + rlist[i] + "_regrid_0p25x0p25.nc")

        # Delete xarray objects to avoid memory leak?
        del dout