def _perform_interpolation(self,
                               input_dataset,
                               target_grid,
                               list_variables,
                               method='bilinear',
                               blend_missing=True,
                               periodic=True,
                               reuse_weights=False):

        # create interpolators
        regridder = xe.Regridder(input_dataset,
                                 target_grid,
                                 method,
                                 periodic=periodic,
                                 reuse_weights=reuse_weights)
        if blend_missing:
            backup_regridder = xe.Regridder(input_dataset,
                                            target_grid,
                                            'nearest_s2d',
                                            periodic=periodic,
                                            reuse_weights=reuse_weights)

        hremapped = xr.Dataset()
        hremapped.update({'XC': target_grid['lon']})
        hremapped.update({'YC': target_grid['lat']})

        for variable in list_variables:
            out_da = regridder(input_dataset[variable])
            if blend_missing:
                backup_da = backup_regridder(input_dataset[variable])
                out_da = self._blend(out_da, backup_da, 0)

            hremapped.update({variable: out_da})

        return hremapped
Пример #2
0
def process_weights(ds,weights=None,target='ds'):
    """ Process weights - including regridding
    
    If target == 'ds', regrid weights to ds. If target == 'weights',
    regrid ds to weights. 
    
    Also needs an output that will go into gdf_out, with a flag for
    'something was regridded, y'all'
    
    ohhh... wait what if the pixel polygons have the weight in the 
    geodataframe... so this actually goes in the get_pixel_polygons
    
    """
    
    if weights is None:
        # (for robustness against running this without an extra if statement
        # in a wrapper function)
        weights_info = 'nowghts'
    else:
        # Check types
        if type(weights) is not xr.core.dataarray.DataArray:
            raise TypeError('[weights] must be an xarray DataArray.')
        if type(ds) not in [xr.core.dataarray.DataArray,
                                xr.core.dataset.Dataset]:
            raise TypeError('[ds] must be an xarray structure (DataArray or Dataset)')
            
        # Stick weights into the same supported input format as ds
        weights = fix_ds(weights)
        
        # Set regridding info
        weights_info = {'target':target,
                        'ds_grid':{'lat':ds.lat,'lon':ds.lon},
                        'weights_grid':{'lat':weights.lat,'lon':weights.lon}}

        # Regrid, if necessary (do nothing if the grids match up to within
        # floating-point precision)
        if not ((ds.sizes['lat'] is weights.sizes['lat']) & (ds.sizes['lon'] is weights.sizes['lon'])):
            if not (np.allclose(ds.lat,weights.lat) & np.allclose(ds.lon,weights.lon)):
                if target is 'ds':
                    print('regridding weights to data grid...')
                    # Create regridder to the [ds] coordinates
                    rgrd = xe.Regridder(weights,ds,'bilinear')
                    # Regrid [weights] to [ds] grids
                    weights = rgrd(weights)

                elif target is 'weights':
                    print('regridding data to weights grid...')
                    # Create regridder to the [weights] coordinates
                    rgrd = xe.Regridder(ds,weights,'bilinear')
                    # Regrid [ds] to [weights] grid
                    ds = rgrd(ds)

                else:
                    raise KeyError(target+' is not a supported target for regridding. Choose "weights" or "ds".')
            
        # Add weights to ds
        ds['weights'] = weights
            
    # Return
    return ds,weights_info
Пример #3
0
def test_non_cf_latlon():
    ds_in_noncf = ds_in.copy()
    ds_in_noncf.lon.attrs = {}
    ds_in_noncf.lat.attrs = {}
    # Test non-CF lat/lon extraction for both DataArray and Dataset
    xe.Regridder(ds_in_noncf['data'], ds_out, 'bilinear')
    xe.Regridder(ds_in_noncf, ds_out, 'bilinear')
Пример #4
0
def _rect_regrid(dr, ds_in=None, _var=None):
    if ds_in == None:
        try:
            ds_in = dr.rename({'longitude': 'lon', 'latitude': 'lat'})
        except:
            try:
                ds_in = dr.rename({'nav_lon': 'lon', 'nav_lat': 'lat'})
            except:
                ds_in = dr
    else:
        try:
            ds_in = ds_in.rename({'longitude': 'lon', 'latitude': 'lat'})
        except:
            try:
                ds_in = ds_in.rename({'nav_lon': 'lon', 'nav_lat': 'lat'})
            except:
                ds_in = ds_in
    ds_out=xr.Dataset({'lat': (['lat'], np.arange(-89.5, 90.0, 1.0)),\
                           'lon': (['lon'], np.arange(0, 360, 1.0)),})
    try:
        regridder = xe.Regridder(ds_in, ds_out, 'bilinear', periodic=True)
    except:
        regridder = xe.Regridder(ds_in,
                                 ds_out,
                                 'bilinear',
                                 periodic=True,
                                 ignore_degenerate=True)
    ds = regridder(dr)
    regridder.clean_weight_file()
    return ds
Пример #5
0
def _curv_regrid(dr, ds_in=None):
    if ds_in == None:
        try:
            ds_in = dr.rename({'longitude': 'lon', 'latitude': 'lat'})
        except:
            try:
                ds_in = dr.rename({'nav_lon': 'lon', 'nav_lat': 'lat'})
            except:
                ds_in = dr
    else:
        try:
            ds_in = ds_in.rename({'longitude': 'lon', 'latitude': 'lat'})
        except:
            try:
                ds_in = ds_in.rename({'nav_lon': 'lon', 'nav_lat': 'lat'})
            except:
                ds_in = ds_in
    ds_out = xe.util.grid_global(1, 1)
    try:
        regridder = xe.Regridder(ds_in, ds_out, 'bilinear', periodic=True)
    except:
        regridder = xe.Regridder(ds_in,
                                 ds_out,
                                 'bilinear',
                                 periodic=True,
                                 ignore_degenerate=True)
    ds = regridder(dr)
    regridder.clean_weight_file()
    return ds
Пример #6
0
    def regrid_variable(self,
                        varname,
                        ds_in,
                        ds_out,
                        interpolation_method="bilinear",
                        use_esmf_v801=True):

        if "lat_bounds" and "lon_bounds" in list(ds_in.coords):
            ds_in = ds_in.drop({"lat_bounds", "lon_bounds"})
        if "yTe" and "xTe" in list(ds_in.coords):
            ds_in = ds_in.drop({"yTe", "xTe"})
        if "vertices_latitude" and "vertices_longitude" in list(ds_in.coords):
            ds_in = ds_in.drop({"vertices_latitude", "vertices_longitude"})

        if use_esmf_v801:
            regridder = xe.Regridder(ds_in,
                                     ds_out,
                                     interpolation_method,
                                     periodic=True,
                                     extrap_method='inverse_dist',
                                     extrap_num_src_pnts=10,
                                     extrap_dist_exponent=1,
                                     ignore_degenerate=False)
        else:
            regridder = xe.Regridder(ds_in,
                                     ds_out,
                                     interpolation_method,
                                     periodic=True,
                                     ignore_degenerate=True)

        print("[CMIP6_regrid] regridding {}".format(varname))

        return regridder(ds_in[varname]).to_dataset(name=varname)
def regrid_data(data, topo_coarse, topo_fine, variable, regrid_method='patch'):

    if (variable == 'tasmax') | (variable == 'tasmin') | (
            variable == 'rsds') | (variable == 'sfcWind') | (variable
                                                             == 'hurs'):
        # remove height dependency before regridding and add afterwards
        try:
            data.coords['month'] = data['time.month']
        except (AttributeError):
            data.coords['month'] = data['time.dt.month']

        grad = np.zeros((12, 1)) * np.nan
        c = np.zeros((12, 1)) * np.nan

        topo_1d = topo_coarse['height'].data.reshape(
            np.size(topo_coarse['height']), 1)

        data_det = data.copy(deep=True)

        for month in range(0, 12):

            # get data for month
            month_data = data[variable][data['month'] == month + 1, :, :]
            month_mean = month_data.mean(dim='time', skipna=True)
            data_1d = month_mean.data.reshape(np.size(month_mean), 1)

            grad[month], c[month] = linreg(data_1d, topo_1d)

            data_det[variable][data['month'] == month +
                               1, :, :] = month_data - (grad[month] *
                                                        topo_coarse['height'])

        regridder = xe.Regridder(data_det, topo_fine, regrid_method)
        data_regrid_tmp = regridder(data_det[variable])

        data_masked = correct_coast(data_regrid_tmp.data, topo_fine)

        data_regrid = np.ones_like(data_regrid_tmp) * np.nan
        for month in range(0, 12):
            data_regrid[
                data['month'] == month +
                1, :, :] = data_masked[data['month'] == month + 1, :, :] + (
                    grad[month] * topo_fine['height'].data)

    else:
        regridder = xe.Regridder(data, topo_fine, regrid_method)
        data_regrid_tmp = regridder(data[variable])

        data_regrid = correct_coast(data_regrid_tmp.data, topo_fine)

    if variable == 'pr':
        # set eventual negative values to 0
        data_regrid[data_regrid < 0] = 0

    regridder.clean_weight_file()

    return data_regrid
Пример #8
0
def regrid(ds_in, target_ds,  method='bilinear', reuse_weight=True):
    import xesmf as xe
    """Convenience function for one-time regridding"""
    if reuse_weight:
        regridder = xe.Regridder(ds_in, target_ds, method, periodic=True, reuse_weights=True)
    else: 
        regridder = xe.Regridder(ds_in, target_ds, method, periodic=True, reuse_weights=False)
    ds_out = regridder(ds_in)
    if not reuse_weight:
        regridder.clean_weight_file()
    return ds_out
Пример #9
0
def test_existing_weights():
    # the first run
    method = 'bilinear'
    regridder = xe.Regridder(ds_in, ds_out, method)
    fn = regridder.to_netcdf()

    # make sure we can reuse weights
    assert os.path.exists(fn)
    regridder_reuse = xe.Regridder(ds_in, ds_out, method, weights=fn)
    assert regridder_reuse.A.shape == regridder.A.shape

    # or can also overwrite it
    xe.Regridder(ds_in, ds_out, method)
Пример #10
0
Файл: data.py Проект: dfulu/UNIT
def construct_regridders(ds_a,
                         ds_b,
                         resolution_match='downscale',
                         scale_method='bilinear',
                         periodic=True):

    if resolution_match == 'downscale':
        ds_out = xr.Dataset({
            'lat':
            min([ds_a.lat, ds_b.lat], key=lambda x: len(x)),
            'lon':
            min([ds_a.lon, ds_b.lon], key=lambda x: len(x))
        })
    elif resolution_match == 'upscale':
        ds_out = xr.Dataset({
            'lat':
            max([ds_a.lat, ds_b.lat], key=lambda x: len(x)),
            'lon':
            max([ds_a.lon, ds_b.lon], key=lambda x: len(x))
        })
    else:
        raise ValueError(
            "resolution_match must be one of ['upscale', 'downscale']")

    _quick_add_bounds(ds_out)
    _quick_add_bounds(ds_a)
    _quick_add_bounds(ds_b)

    if not ds_out[['lat', 'lon']].equals(ds_a[['lat', 'lon']]):
        regridder_a = xe.Regridder(ds_a,
                                   ds_out,
                                   scale_method,
                                   periodic=periodic)
        regridder_a.clean_weight_file()
    else:
        regridder_a = None

    if not ds_out[['lat', 'lon']].equals(ds_b[['lat', 'lon']]):
        regridder_b = xe.Regridder(ds_b,
                                   ds_out,
                                   scale_method,
                                   periodic=periodic)
        regridder_b.clean_weight_file()
    else:
        regridder_b = None

    _quick_remove_bounds(ds_a)
    _quick_remove_bounds(ds_b)

    return regridder_a, regridder_b
Пример #11
0
def test_regrid_cfbounds():
    # Test regridding when bounds are given in cf format with a custom "bounds" name.
    ds = ds_in.copy().drop_vars(['lat_b', 'lon_b'])
    ds['lon_bounds'] = cfxr.vertices_to_bounds(ds_in.lon_b, ('bnds', 'y', 'x'))
    ds['lat_bounds'] = cfxr.vertices_to_bounds(ds_in.lat_b, ('bnds', 'y', 'x'))
    ds.lat.attrs['bounds'] = 'lat_bounds'
    ds.lon.attrs['bounds'] = 'lon_bounds'

    regridder = xe.Regridder(ds, ds_out, 'conservative', periodic=True)
    dr_out = regridder(ds['data'])
    # compare with provided-bounds solution
    dr_exp = xe.Regridder(ds_in, ds_out, 'conservative',
                          periodic=True)(ds_in['data'])
    assert_allclose(dr_out, dr_exp)
Пример #12
0
def gen_regridder(grid_in,grid_out,method='conservative',grid_dir='.',make_obj=True):
    # What kind of grids are these?
    cs_in  = len(grid_in['lat'])  == 6
    cs_out = len(grid_out['lat']) == 6
    if cs_in and cs_out:
       # CS -> CS
       n_in  = grid_in['lat'][0].shape[0]
       n_out = grid_out['lat'][0].shape[0]
       if n_in == n_out: 
          # Grids are identical
          regrid_obj = None
       else:
          regrid_obj=[]
          with warnings.catch_warnings():
              warnings.filterwarnings("ignore", message="Input array is not F_CONTIGUOUS. Will affect performance.")
              # Assume the faces align
              for i_face in range(6):
                 sub_grid_in  = {'lat':   grid_in['lat'][i_face], 
                                 'lon':   grid_in['lon'][i_face],
                                 'lat_b': grid_in['lat_b'][i_face], 
                                 'lon_b': grid_in['lon_b'][i_face]}
                 sub_grid_out = {'lat':   grid_out['lat'][i_face], 
                                 'lon':   grid_out['lon'][i_face],
                                 'lat_b': grid_out['lat_b'][i_face], 
                                 'lon_b': grid_out['lon_b'][i_face]}
                 fname = os.path.join(grid_dir,'{:s}_c{:d}f{:d}_c{:d}f{:d}'.format(method,n_in,i_face,n_out,i_face))
                 regrid_obj.append(xesmf.Regridder(sub_grid_in,sub_grid_out,method=method,reuse_weights=True,filename=fname))
    elif cs_in:
       # CS -> LL
       regrid_obj = gen_c2l_regridder(cs_grid=grid_in,ll_grid=grid_out,method=method,grid_dir=grid_dir)
    elif cs_out:
       # LL -> CS
       regrid_obj = gen_l2c_regridder(cs_grid=grid_out,ll_grid=grid_in,method=method,grid_dir=grid_dir)
    else:
       # LL -> LL
       n_lon_in  = grid_in['lon'].size
       n_lat_in  = grid_in['lat'].size
       n_lon_out = grid_out['lon'].size
       n_lat_out = grid_out['lat'].size
       fname = os.path.join(grid_dir,'{:s}_{:d}x{:d}_{:d}x{:d}'.format(
                              method,n_lat_in,n_lon_in,n_lat_out,n_lon_out))
       regrid_obj = xesmf.Regridder(grid_in,grid_out,method=method,reuse_weights=True,
                                    filename=fname)

    if make_obj:
       # Make it a little fancier...
       return regridder(regrid_obj)
    else:
       return regrid_obj
Пример #13
0
def apply_regrid(ds,
                 method='conservative',
                 grid_spacing=[5, 5],
                 latname='latitude',
                 lonname='longitude',
                 copy=True):
    if copy:
        ds = ds.copy()
    if ds.name is None:
        ds.name = 'placeholder_name'
    ds = ds.sortby(lonname).sortby(latname)
    lon0_b = ds[lonname].values[0]
    lon1_b = ds[lonname].values[-1]
    dlon = grid_spacing[0]
    lat0_b = ds[latname].values[0]
    lat1_b = ds[latname].values[-1]
    dlat = grid_spacing[1]
    ds_out = xe.util.grid_2d(lon0_b, lon1_b, dlon, lat0_b, lat1_b, dlat)
    if not isinstance(ds, xr.Dataset):
        is_array = True
        ds = ds.to_dataset()
    else:
        is_array = False

    regridder = xe.Regridder(ds, ds_out, method=method)
    ds_regridded = regridder(ds)
    ds_regridded = ds_regridded.assign_coords(
        x=ds_out['lon'].values[0, :],
        y=ds_out['lat'].values[:, 0]).rename(x=lonname, y=latname)
    if is_array:
        ds_regridded = ds_regridded.to_array().isel(
            variable=0).drop('variable')
    return ds_regridded
Пример #14
0
def regrid_to_pop(custom_output):
    with xr.open_dataset(custom_output) as ds:
        ds = ds[output]

    regridder = xe.Regridder(ds, pop_grid, "bilinear", reuse_weights=True)
    ds_regrid = regridder(ds)
    ds_regrid.to_netcdf(custom_output[0:-3] + "_popgrid_0.25deg.nc")
Пример #15
0
def test_existing_weights():
    # the first run
    method = 'bilinear'
    regridder = xe.Regridder(ds_in, ds_out, method)

    # make sure we can reuse weights
    assert os.path.exists(regridder.filename)
    regridder_reuse = xe.Regridder(ds_in, ds_out, method, reuse_weights=True)
    assert regridder_reuse.A.shape == regridder.A.shape

    # or can also overwrite it
    xe.Regridder(ds_in, ds_out, method)

    # clean-up
    regridder.clean_weight_file()
    assert not os.path.exists(regridder.filename)
Пример #16
0
def regrid(ds_in, target_ds,  method='bilinear'):
    import xesmf as xe
    """Convenience function for one-time regridding"""
    regridder = xe.Regridder(ds_in, target_ds, method, periodic=True)
    ds_out = regridder(ds_in)
    regridder.clean_weight_file()
    return ds_out
Пример #17
0
def test_regrid_dataset_from_locstream():
    # xarray.Dataset containing in-memory numpy array

    regridder = xe.Regridder(ds_locs, ds_in, 'nearest_s2d', locstream_in=True)
    outdata = regridder(ds_locs)
    # clean-up
    regridder.clean_weight_file()
Пример #18
0
def monthly_average(ds_in,
                    variable,
                    start_year=1990,
                    end_year=1999,
                    month_num=5):
    """
    Takes the input xarray dataset, converts the times from cftimes to
    datetime objects. It then subsets the time based on the given years,
    and calculates the monthly averages
    """

    # Subsets times based on given years
    ds_in = ds_in[variable].sel(time=slice(
        str(start_year) + '-' + str('%.2i' % month_num),
        str(end_year) + '-' + str('%.2i' % month_num), 12), )

    # Find the monthly average
    mon_avg = ds_in.mean('time', skipna=True)

    mon_avg_ds = mon_avg.to_dataset()

    # Setup the regridder - keek reuse_weights = True to use the regridder in the directory
    regridder = xe.Regridder(mon_avg_ds,
                             ds_out,
                             'bilinear',
                             reuse_weights=True)

    return regridder(mon_avg_ds)
Пример #19
0
def test_regrid_dataset():
    # xarray.Dataset containing in-memory numpy array

    regridder = xe.Regridder(ds_in, ds_out, 'conservative')

    # `ds_out` already refers to output grid object
    # TODO: use more consistent variable namings across tests
    ds_result = regridder(ds_in)

    # output should contain all data variables
    assert set(ds_result.data_vars.keys()) == set(ds_in.data_vars.keys())

    # compare with analytical solution
    rel_err = (ds_out['data_ref'] - ds_result['data']) / ds_out['data_ref']
    assert np.max(np.abs(rel_err)) < 0.05

    # data over broadcasting dimensions should agree
    assert_almost_equal(
        ds_in['data4D'].values.mean(axis=(2, 3)),
        ds_result['data4D'].values.mean(axis=(2, 3)),
        decimal=10,
    )

    # check metadata
    xr.testing.assert_identical(ds_result['time'], ds_in['time'])
    xr.testing.assert_identical(ds_result['lev'], ds_in['lev'])
    assert_equal(ds_result['lat'].values, ds_out['lat'].values)
    assert_equal(ds_result['lon'].values, ds_out['lon'].values)
def regrid_model(ds,
                 reference_grid,
                 latvariable='lat',
                 lonvariable='lon',
                 regrid_method='nearest_s2d'):
    """Regrids model output to a reference grid.

    Args:
        ds: The dataset of the model output.
        reference_grid: The dataset containing the reference grid.
        latvariable: The string name for the latitude variable.
        lonvariable: The string name for the longitude variable.
        regrid_method: The string name of the method to use for regridding.
    Returns:
        data_series_regridded: The regridded model dataset.
    """
    data_series = ds[THIS_VARIABLE_ID]
    ds_in = xr.Dataset({
        'lat': data_series[latvariable],
        'lon': data_series[lonvariable],
        'time': data_series['time'],
        THIS_VARIABLE_ID: data_series
    })
    regridder = xe.Regridder(ds_in,
                             reference_grid,
                             regrid_method,
                             periodic=True)
    data_series_regridded = regridder(ds_in)
    data_series_regridded.attrs.update(data_series.attrs)

    return data_series_regridded
Пример #21
0
def regrid(data_array, lon_out, lat_out, method='conservative'):
    """
    Regrid data. The DataArray given is regridded using rectilinear grid created out of lon_out
    and lat_out.
    The longitude mustn't do a loop. Examples for LMDz = [-180 -176.25 ... 176.25]

    :param data_array: DataArray (xarray) to regrid. Not a Dataset. It has to contain at least 'lat' and 'lon' dimensions.
                 If it is not under those names, the function will deal with it.
    :param lon_out: output centered longitude array
    :param lat_out: output centered latitude array
    :param method: type of interpolation ('bilinear', 'conservative', 'nearest_s2d', 'nearest_d2s', 'patch')
    :return: data_out: DataArray (xarray)
    """
    data_array = change_dim_name(data_array)
    lon, lat = data_array.lon.values, data_array.lat.values
    lon_b, lat_b = get_grid_corners(lon, lat)
    lon_out_b, lat_out_b = get_grid_corners(lon_out, lat_out)

    grid_in = {'lon': lon, 'lat': lat, 'lon_b': lon_b, 'lat_b': lat_b}
    grid_out = {
        'lon': lon_out,
        'lat': lat_out,
        'lon_b': lon_out_b,
        'lat_b': lat_out_b
    }

    regridder = xe.Regridder(grid_in, grid_out, method, reuse_weights=True)
    data_out = regridder(data_array)

    return data_out
Пример #22
0
def test_regrid_dataarray_dask_to_locstream():
    # xarray.DataArray containing chunked dask array

    regridder = xe.Regridder(ds_in, ds_locs, 'bilinear', locstream_out=True)

    dr_in = ds_in_chunked['data4D']
    dr_out = regridder(dr_in)
Пример #23
0
def test_regrid_dask_to_locstream():
    # chunked dask array (no xarray metadata)

    regridder = xe.Regridder(ds_in, ds_locs, 'bilinear', locstream_out=True)

    indata = ds_in_chunked['data4D'].data
    outdata = regridder(indata)
Пример #24
0
def test_regrid_dataarray_to_locstream():
    # xarray.DataArray containing in-memory numpy array

    regridder = xe.Regridder(ds_in, ds_locs, 'bilinear', locstream_out=True)

    outdata = regridder(ds_in['data'].values)  # pure numpy array
    dr_out = regridder(ds_in['data'])  # xarray DataArray

    # DataArray and numpy array should lead to the same result
    assert_equal(outdata.squeeze(), dr_out.values)

    with pytest.raises(ValueError):
        regridder = xe.Regridder(ds_in,
                                 ds_locs,
                                 'conservative',
                                 locstream_out=True)
Пример #25
0
def _clean_regridder(ds_source, ds_target, method, **xesmf_kwargs):
    def _clean(ds):
        # remove all unnecessary stuff for the regridding
        ds = ds.isel(time=0, lev=0, rho=0, missing_dims="ignore")
        for coord in [co for co in ds.coords if co not in ["lon", "lat"]
                      ]:  # , "lat_bounds""lon_bounds",
            ds = ds.drop_vars(coord)

        # Ugly Hack to elminate 'seam' when regridding from gr to native grids
        # There is something in xesmf that causes problems with broadcasted regular lon/lat values
        if "gr" in ds.attrs["grid_label"]:
            # actually revert the convention and return the 1d coordinates. ?Should I change that behavior in general?
            ds = ds.assign(lon=ds.lon.isel(y=0))
            ds = ds.assign(lat=ds.lat.isel(x=0))

        # for now just eliminate the attrs here
        # I can solve this more elegantly when I parse proper cf-attributes
        for coord in ds.coords:
            ds[coord].attrs = {}
        #         ds = ds.rename({'lon_bounds':'lon_b', 'lat_bounds':'lat_b'})

        # TODO: Make this work out of the box with lon/lat bounds and method='conservative'
        # TODO: Maybe erase the need for this completely with cf-xarray
        return ds

    ds_source = _clean(ds_source)
    ds_target = _clean(ds_target)
    return xesmf.Regridder(ds_source, ds_target, method, **xesmf_kwargs)
Пример #26
0
def regridhorizontal(infolder, variablename, inputtimesteps, outputgridfile,
                     outputfolder):
    """
	Regrid the output of the interpolate function for one variable to a different domain. Regridding will be performed using xesmf.

	Input:
	infolder: folder where the data of the target variable is located (the output of the interpolate.py)
	variablename: name of the variable to be regridded
	inputtimesteps: how many timesteps need to be regridded (corresponds to the number of files)?
	outputgridfile: a netcdf file that is in the target grid --> all input will be regridded to the grid defined in this netcdf file.
	outputfolder: path to a folder to write output. Overwriting files seems to cause problems, so it is recommended to use a new folder.

	Output: One netcdf file per timestep for the selected variable regirdded to the defined target grid.
	"""

    targetgrid = xr.open_dataset(outputgridfile)

    infile = xr.open_dataset(f"{infolder}/{variablename}00000.nc")

    regridder = xe.Regridder(infile,
                             targetgrid,
                             'bilinear',
                             reuse_weights=True,
                             filename=variablename + 'regridder.nc')

    Path(outputfolder).mkdir(parents=True, exist_ok=True)

    for stepnum in range(inputtimesteps):
        infile = xr.open_dataset(f"{infolder}/{variablename}{stepnum:05d}.nc")

        outfile = regridder(infile)
        infile.close()

        outfile.to_netcdf(f"{outputfolder}/{variablename}{stepnum:05d}.nc")
        outfile.close()
Пример #27
0
def test_regrid_with_1d_grid_infer_bounds():
    ds_in_1d = ds_2d_to_1d(ds_in).rename(x='lon', y='lat')
    ds_out_1d = ds_2d_to_1d(ds_out).rename(x='lon', y='lat')

    regridder = xe.Regridder(ds_in_1d,
                             ds_out_1d,
                             'conservative',
                             periodic=True)

    dr_out = regridder(ds_in['data'])

    # compare with provided-bounds solution
    dr_exp = xe.Regridder(ds_in, ds_out, 'conservative',
                          periodic=True)(ds_in['data'])

    assert_allclose(dr_out, dr_exp)
Пример #28
0
def test_regrid_dataarray():
    # xarray.DataArray containing in-memory numpy array

    regridder = xe.Regridder(ds_in, ds_out, 'conservative')

    outdata = regridder(ds_in['data'].values)  # pure numpy array
    dr_out = regridder(ds_in['data'])  # xarray DataArray

    # DataArray and numpy array should lead to the same result
    assert_equal(outdata, dr_out.values)

    # compare with analytical solution
    rel_err = (ds_out['data_ref'] - dr_out) / ds_out['data_ref']
    assert np.max(np.abs(rel_err)) < 0.05

    # check metadata
    assert_equal(dr_out['lat'].values, ds_out['lat'].values)
    assert_equal(dr_out['lon'].values, ds_out['lon'].values)

    # test broadcasting
    dr_out_4D = regridder(ds_in['data4D'])

    # data over broadcasting dimensions should agree
    assert_almost_equal(ds_in['data4D'].values.mean(axis=(2, 3)),
                        dr_out_4D.values.mean(axis=(2, 3)),
                        decimal=10)

    # check metadata
    xr.testing.assert_identical(dr_out_4D['time'], ds_in['time'])
    xr.testing.assert_identical(dr_out_4D['lev'], ds_in['lev'])

    # clean-up
    regridder.clean_weight_file()
Пример #29
0
def test_regrid_dataset_to_locstream():
    # xarray.Dataset containing in-memory numpy array

    regridder = xe.Regridder(ds_in, ds_locs, 'bilinear', locstream_out=True)
    ds_result = regridder(ds_in)
    # clean-up
    regridder.clean_weight_file()
Пример #30
0
    def __init__(self, xds_obs, xds_mod, **kwargs):
        """
        Parameters
        ----------
        xds_obs : :obj:`xarray.Dataset`
            The observation dataset containing the observation history of a single variable
        xds_mod : :obj:`xarray.Dataset`
            The model dataset containing the model history of a single variable
        """
        # UERRA
        # Extract projection variable
        self.obs_proj = xds_obs["Lambert_Conformal"]
        xds_obs = xds_obs.drop_vars(["Lambert_Conformal"])

        # Extract variable attributes
        self.obs_var_attrs = {k: v.attrs for k, v in xds_obs.variables.items()}

        xds_obs = xds_obs.sel(time=slice("1979-01-01", "2018-12-31"))
        xds_mod = xds_mod.sel(time=slice("1979-01-01", "2018-12-31"))

        #        xds_mod = xds_mod.drop_vars(["crs"])
        #        xds_mod = xds_mod.reindex_like(xds_obs)

        self.regridder = xe.Regridder(xds_mod, xds_obs, 'bilinear')

        # Regrid model data and set x and y accordingly
        xds_mod = self.regridder(xds_mod)
        xds_mod["x"] = xds_obs.x
        xds_mod["y"] = xds_obs.y

        self.xds_obs = xds_obs
        self.xds_mod = xds_mod

        logging.info('New downscaler object initialized')