Beispiel #1
0
    def test_padding_mixed(self, boundary_width):
        ds, coords, _ = datasets_grid_metric("C")
        grid = Grid(ds, coords=coords)
        data = ds.tracer

        axis_padding_mapping = {"X": "periodic", "Y": "extend"}
        method_mapping = {
            "periodic": "wrap",
            "extend": "edge",
        }

        # iterate over all axes
        expected = data.copy(deep=True)
        for ax, widths in boundary_width.items():
            dim = grid._get_dims_from_axis(
                data, ax)[0]  # ? not entirely sure why this is a list
            expected = expected.pad({dim: widths},
                                    method_mapping[axis_padding_mapping[ax]])

        # we intentionally strip all coords from padded arrays
        expected = _strip_all_coords(expected)

        result = pad(
            data,
            grid,
            boundary=axis_padding_mapping,
            boundary_width=boundary_width,
            fill_value=None,
            other_component=None,
        )
        xr.testing.assert_allclose(expected, result)
Beispiel #2
0
    def test_padding_fill(self, boundary_width, fill_value):

        ds, coords, _ = datasets_grid_metric("C")
        grid = Grid(ds, coords=coords)
        data = ds.tracer

        # iterate over all axes
        expected = data.copy(deep=True)
        for ax, widths in boundary_width.items():
            dim = grid._get_dims_from_axis(
                data, ax)[0]  # ? not entirely sure why this is a list
            expected = expected.pad({dim: widths},
                                    "constant",
                                    constant_values=fill_value)

        # we intentionally strip all coords from padded arrays
        expected = _strip_all_coords(expected)

        result = pad(
            data,
            grid,
            boundary="fill",
            boundary_width=boundary_width,
            fill_value=fill_value,
            other_component=None,
        )
        xr.testing.assert_allclose(expected, result)
Beispiel #3
0
def test_interp_c_to_g_periodic(periodic_1d):
    """Interpolate from c grid to g grid."""
    ds = periodic_1d

    data_c = np.sin(ds['XC'])
    # np.roll(np.arange(5), 1) --> [4, 0, 1, 2, 3]
    # positive roll shifts left
    data_expected = 0.5 * (data_c.values + np.roll(data_c.values, 1))

    grid = Grid(ds)
    data_g = grid.interp(data_c, 'X')

    # check that the dimensions are right
    assert data_g.dims == ('XG', )
    xr.testing.assert_equal(data_g.XG, ds.XG)
    assert len(data_g.XG) == len(data_g)

    # check that the values are right
    np.testing.assert_allclose(data_g.values, data_expected)

    # try the same with chunks
    data_c = np.sin(ds['XC'])
    data_c = data_c.chunk(10)
    data_g = grid.interp(data_c, 'X')
    np.testing.assert_allclose(data_g.values, data_expected)
Beispiel #4
0
def grid_calculations(grid: xgcm.Grid, ds_full: xr.core.dataset.Dataset):

    # Compute difference (in degrees) along longitude and latitude for
    # both cell center and left
    dlong = grid.diff(ds_full.xt_ocean, 'X', boundary_discontinuity=360)
    dlonc = grid.diff(ds_full.xt_ocean_left, 'X', boundary_discontinuity=360)

    dlatg = grid.diff(ds_full.yt_ocean,
                      'Y',
                      boundary='fill',
                      fill_value=np.nan)
    dlatc = grid.diff(ds_full.yt_ocean_left,
                      'Y',
                      boundary='fill',
                      fill_value=np.nan)

    # Convert degrees to actual Cartesian distances on the Earth
    # add distances to coordinates in data
    ds_full.coords['dxg'], ds_full.coords['dyg'] = dll_dist(
        dlong, dlatg, ds_full.xt_ocean, ds_full.yt_ocean)
    ds_full.coords['dxc'], ds_full.coords['dyc'] = dll_dist(
        dlonc, dlatc, ds_full.xt_ocean, ds_full.yt_ocean)

    # Calculate area of each gridcell
    ds_full.coords['area_c'] = ds_full.dxc * ds_full.dyc

    # Fill nan values
    dyg = ds_full.dyg.fillna(111000)
    dyc = ds_full.dyc.fillna(111000)
    ds_full.coords['dyg'] = dyg
    ds_full.coords['dyc'] = dyc
def test_interp_all():
    datadict = datasets()
    coords = datadict["coords"]
    ds_b = datadict["B"]
    grid_b = Grid(ds_b, coords=coords)

    ds_c = datadict["C"]
    grid_c = Grid(ds_c, coords=coords)

    for var in ["u", "v", "tracer"]:
        for ds, grid in zip([ds_b, ds_c], [grid_b, grid_c]):
            for target, control_dims in zip(
                ["center", "right"],
                [["xt", "yt", "time"], ["xu", "yu", "time"]],
            ):
                print(ds)
                print(grid)
                ds_interp = interp_all(grid, ds, target=target)
                assert set(ds_interp[var].dims) == set(control_dims)
                assert set(ds_interp.coords) == set(ds.coords)
                ds_interp_nocoords = interp_all(grid,
                                                ds,
                                                target=target,
                                                keep_coords=False)
                assert set(ds_interp_nocoords.coords) != set(ds.coords)
def load_gos_data(gos_filenames):
    #import xgcm
    from xgcm import Grid
    from xgcm.autogenerate import generate_grid_ds
    # ====== load in all .nc files and combine into one xarray dataset
    gos_map = xr.open_mfdataset(gos_filenames)
    gos_map = gos_map.rename({'latitude': 'lat'}).rename({'longitude': 'lon'})
    gos_select = gos_map  #.sel(time='2016-11-19',lon=slice(10,16),lat=slice(-28,-24))
    #gos_map.ugos
    #dx = gos_map.lon.diff('lon')
    #gos_map['rel_vort'] = gos_map.vgos.diff('lon')/gos_map.lon.diff('lon')

    #gos_select = gos_map #gos_map.sel(time='2016-11-19',lon=slice(10,16),lat=slice(-28,-24))
    # create grid for interpolation, differencing
    #grid = xgcm.Grid(gos_select)
    # for Satellite data:
    # https://xgcm.readthedocs.io/en/latest/autogenerate_examples.html
    ds_full = generate_grid_ds(gos_select, {'X': 'lon', 'Y': 'lat'})
    ds_full.vgos

    grid = Grid(ds_full, periodic=['X'])

    # compute the difference (in degrees) along the longitude and latitude for both the cell center and the cell face
    # need to specify the boundary_discontinutity in order to avoid the introduction of artefacts at the boundary
    dlong = grid.diff(ds_full.lon, 'X', boundary_discontinuity=360)
    dlonc = grid.diff(ds_full.lon_left, 'X', boundary_discontinuity=360)
    #dlonc_wo_discontinuity = grid.diff(ds_full.lon_left, 'X')
    dlatg = grid.diff(ds_full.lat, 'Y', boundary='fill', fill_value=np.nan)
    dlatc = grid.diff(ds_full.lat_left,
                      'Y',
                      boundary='fill',
                      fill_value=np.nan)

    # converted into approximate cartesian distances on a globe.
    ds_full.coords['dxg'], ds_full.coords['dyg'] = dll_dist(
        dlong, dlatg, ds_full.lon, ds_full.lat)
    ds_full.coords['dxc'], ds_full.coords['dyc'] = dll_dist(
        dlonc, dlatc, ds_full.lon, ds_full.lat)

    # Relative vorticity: ζ = ∂ v/∂ x – ∂ u/∂ y
    ds_full['dv_dx'] = grid.diff(ds_full.vgos, 'X') / ds_full.dxg
    ds_full['du_dy'] = grid.diff(
        ds_full.ugos, 'Y', boundary='fill', fill_value=np.nan) / ds_full.dyg
    dv_dx = grid.interp(ds_full['dv_dx'],
                        'Y',
                        boundary='fill',
                        fill_value=np.nan)  # get dv_dx and du_dy on same grid
    du_dy = grid.interp(ds_full['du_dy'],
                        'X',
                        boundary='fill',
                        fill_value=np.nan)
    ds_full['Rel_Vort'] = dv_dx - du_dy

    # Vorticity Rossby Number = ζ / f
    ds_full['Ro'] = ds_full.Rel_Vort / coriolis(ds_full.Rel_Vort.lat_left)

    return ds_full
Beispiel #7
0
def add_MITgcm_missing_metrics(dset, periodic=None):
    """
    Infer missing metrics from MITgcm output files.

    Parameters
    ----------
    dset : xarray.Dataset
        A dataset open from a file

    Return
    -------
    dset : xarray.Dataset
        Input dataset with appropriated metrics added
    grid : xgcm.Grid
        The grid with appropriated metrics
    """
    coords = dset.coords
    grid = Grid(dset, periodic=periodic)

    BCx = 'periodic' if grid.axes['X']._periodic else 'fill'
    BCy = 'periodic' if grid.axes['Y']._periodic else 'fill'

    if 'drW' not in coords:  # vertical cell size at u point
        coords['drW'] = dset.hFacW * dset.drF
    if 'drS' not in coords:  # vertical cell size at v point
        coords['drS'] = dset.hFacS * dset.drF
    if 'drC' not in coords:  # vertical cell size at tracer point
        coords['drC'] = dset.hFacC * dset.drF

    if 'dxF' not in coords:
        coords['dxF'] = grid.interp(dset.dxC, 'X', boundary=BCx)
    if 'dyF' not in coords:
        coords['dyF'] = grid.interp(dset.dyC, 'Y', boundary=BCy)
    if 'dxV' not in coords:
        coords['dxV'] = grid.interp(dset.dxG, 'X', boundary=BCx)
    if 'dyU' not in coords:
        coords['dyU'] = grid.interp(dset.dyG, 'Y', boundary=BCy)

    if 'hFacZ' not in coords:
        coords['hFacZ'] = grid.interp(dset.hFacS, 'X', boundary=BCx)
    if 'maskZ' not in coords:
        coords['maskZ'] = coords['hFacZ']

    # Calculate vertical distances located on the cellboundary
    # ds.coords['dzC'] = grid.diff(ds.depth, 'Z', boundary='extrapolate')
    # Calculate vertical distances located on the cellcenter
    # ds.coords['dzT'] = grid.diff(ds.depth_left, 'Z', boundary='extrapolate')

    metrics = {
        ('X', ): ['dxG', 'dxF', 'dxC', 'dxV'],  # X distances
        ('Y', ): ['dyG', 'dyF', 'dyC', 'dyU'],  # Y distances
        ('Z', ): ['drW', 'drS', 'drC', 'drF'],  # Z distances
        ('X', 'Y'): ['rAw', 'rAs', 'rA', 'rAz']
    }  # Areas

    grid._assign_metrics(metrics)

    return dset, grid
def recreate_full_grid(ds, lon_name="lon", lat_name="lat"):
    ds_full = generate_grid_ds(ds, {
        "X": "x",
        "Y": "y"
    },
                               position=("center", "right"))
    grid = Grid(ds_full, periodic=['X'])

    # infer dx at eastern bound from tracer points
    lon0, lon1 = grid.axes["X"]._get_neighbor_data_pairs(
        ds_full.lon.load(), "right")
    lat0 = lat1 = ds_full.lat.load().data
    dx = distance(lon0, lat0, lon1, lat1)
    ds_full.coords["dxe"] = xr.DataArray(dx,
                                         coords=grid.interp(ds_full.lon,
                                                            "X").coords)

    # infer dy at northern bound from tracer points
    lat0, lat1 = grid.axes["Y"]._get_neighbor_data_pairs(
        ds_full.lat.load(), "right", boundary="extrapolate")

    lon0 = lon1 = ds_full.lon.load().data
    dy = distance(lon0, lat0, lon1, lat1)
    ds_full.coords["dyn"] = xr.DataArray(dy,
                                         coords=grid.interp(
                                             ds_full.lat,
                                             "Y",
                                             boundary="extrapolate").coords)

    # now simply interpolate all the other metrics
    ds_full.coords['dxt'] = grid.interp(ds_full.coords['dxe'], 'X')
    ds_full.coords['dxne'] = grid.interp(ds_full.coords['dxe'],
                                         'Y',
                                         boundary="extrapolate")
    ds_full.coords['dxn'] = grid.interp(ds_full.coords['dxt'],
                                        'Y',
                                        boundary="extrapolate")

    ds_full.coords['dyt'] = grid.interp(ds_full.coords['dyn'],
                                        'Y',
                                        boundary="extrapolate")
    ds_full.coords['dyne'] = grid.interp(ds_full.coords['dyn'], 'X')
    ds_full.coords['dye'] = grid.interp(ds_full.coords['dyt'], 'X')

    ds_full.coords['area_t'] = ds_full.coords['dxt'] * ds_full.coords['dyt']
    ds_full.coords['area_e'] = ds_full.coords['dxe'] * ds_full.coords['dye']
    ds_full.coords['area_ne'] = ds_full.coords['dxne'] * ds_full.coords['dyne']
    ds_full.coords['area_n'] = ds_full.coords['dxn'] * ds_full.coords['dyn']

    # should i return the coords to dask?
    return ds_full
Beispiel #9
0
def add_vertical_spacing(ds):
    grid = Grid(ds)
    ds.coords['dst'] = calculate_ds(ds, dim='st')
    ds.coords['dswt'] = calculate_ds(ds, dim='sw')
    ds.coords['dzt'] = calculate_dz(ds['eta_t'], ds['ht'], ds['dst'])
    ds.coords['dzu'] = grid.min(grid.min(ds['dzt'], 'X'), 'Y')
    #     # Avoids suspected computation midway during the dask graph creation...Should probably raise an xarray issue about this.
    #     ds['dzt'] = calculate_dz(ds['eta_t'], ds['ht'], ds['dst'])
    #     ds['dzu'] = grid.min(grid.min(ds['dzt'], 'X'),'Y')
    #     # Lets try this as a workaround...
    #     for vv in ['dzt', 'dzu']:
    #         ds.coords[vv] = ds[vv]
    # the dzwt value is dependent on the model version (finite vol vs. engergetically consistent?; See MOM5 manual section 10.4.2)
    return ds
Beispiel #10
0
def split_adv_budget(ds):
    print('I think this is outdated....')
    ds = ds.copy()
    if 'o2_xflux_adv' in list(ds.data_vars):
        grid = Grid(ds)
        area = ds.area_t
        div_x = -grid.diff(ds.o2_xflux_adv, 'X', boundary='fill') / area
        div_y = -grid.diff(ds.o2_yflux_adv, 'Y', boundary='fill') / area
        div_z = grid.diff(ds.o2_zflux_adv, 'Z', boundary='fill') / area

        for data, name in zip([div_x, div_y, div_z],
                              ['o2_advection_%s' % a
                               for a in ['x', 'y', 'z']]):
            ds[name] = data
    return ds
Beispiel #11
0
def define_grid(ds, names):
    """build a xgcm.Grid object

    Args:
        ds (xarray.Dataset): dataset with model's grid
        names (dict): dictionary containing dimensions/coordinates names

    Returns:
        xgcm.Grid: grid object
    """

    x_center, y_center = names["x_center"], names["y_center"]
    x_corner, y_corner = names["x_corner"], names["y_corner"]

    qcoord = "outer" if is_symetric(ds, names) else "right"

    grid = Grid(
        ds,
        coords={
            "X": {"center": x_center, qcoord: x_corner},
            "Y": {"center": y_center, qcoord: y_corner},
        },
        periodic=["X"],
    )
    return grid
def full_preprocessing(dat_dict, modelname,
                       tracer_ref="thetao",
                       u_ref="uo",
                       v_ref="vo",
                       plot=True,
                       verbose=False):
    """Fully preprocess data for one model ensemble .
    The input needs to be a dictionary in the form:
    {'<source_id>':{'<varname_a>':'<uri_a>', '<varname_b>':'<uri_b>', ...}}
    """
    renaming_dict = cmip6_renaming_dict()
    # homogenize the naming
    dat_dict = {
        var: cmip6_homogenization(data, renaming_dict[modelname])
        for var, data in dat_dict.items()
    }

    # broadcast lon and lat values if they are 1d
    if renaming_dict[modelname]["lon"] is None:
        dat_dict = {var: broadcast_lonlat(data) for var, data in dat_dict.items()}
    
    # merge all variables together on the correct staggered grid
    ds = merge_variables_on_staggered_grid(
        dat_dict, modelname, u_ref=u_ref, v_ref=v_ref,plot=plot, verbose=verbose
    )

    grid_temp = Grid(ds)
    ds = recreate_metrics(ds, grid_temp)
    return ds
Beispiel #13
0
def test_grid_repr(all_datasets):
    grid = Grid(all_datasets)
    print(grid)
    r = repr(grid).split('\n')
    assert r[0] == "<xgcm.Grid>"
    # all datasets should have at least an X axis
    assert r[1].startswith('X-axis:')
def lat_slice(ds, ref, model_processing=True):
    ds = ds.copy()
    if model_processing:
        grid = Grid(ds)
        ds = interp_all(grid, ds)
    ds = ds.interp(xt_ocean=ref.xt_ocean)
    return ds
def eq_mean(
    ds,
    roi=dict(yt_ocean=slice(-1, 1)),
    model_processing=True,
    coord_include=["dzt"],
):
    """Calculate the equatorial slices, defined by the weighted mean around the equator.
    `model_processing` will add budget decompositions and interpolate all fields onto tracer grid."""
    ds = ds.copy()
    for co in coord_include:
        if co in list(ds.variables):
            ds[co + "_temp"] = ds[co]
            ds = ds.drop(co)
        # This works but seems clunky that I have to go through these lengths to assign any variable as a data_variable

    if model_processing:
        grid = Grid(ds)
        ds = interp_all(grid, ds)
        # This step is not conservative. Just a linear interpolation. But that only has a small effect on the velocities, since the budget terms are already on the tracer grid. Same for the other functions.
    ds = ds.sel(**roi)

    ds_mean = weighted_mean(ds, ds.dyt, dim="yt_ocean")

    for co in coord_include:
        tempname = co + "_temp"
        if tempname in list(ds_mean.data_vars):
            ds_mean.coords[co] = ds_mean[tempname]
            ds_mean = ds_mean.drop(tempname)

    ds_mean.attrs["averaged region"] = str(roi)

    return ds_mean
Beispiel #16
0
def interpolated_velocities(u, v):
    """Interpolate the staggered horizontal velocities to the cell centers

    Use's Ryan Abernathy's xgcm package.

    Parameters
    ----------
    u, v : xr.DataArray
        staggered velocities.

    Returns
    -------
    uint, vint : xr.DataArray
        interpolated velocites on (xc, yc) grid
    """
    # setup grid
    xl = u.x.values
    xc = xl + (xl[1] - xl[0]) / 2

    yl = u.y.values
    yc = yl + (yl[1] - yl[0]) / 2

    u = u.rename({'x': 'xl', 'y': 'yc'}).assign_coords(xl=xl, yc=yc)
    v = v.rename({'x': 'xc', 'y': 'yl'}).assign_coords(yl=yl, xc=xc)

    # combine the data
    ds = xr.Dataset({'u': u, 'v': v})

    # create grid object
    coords = {
        'x': {
            'center': 'xc',
            'left': 'xl'
        },
        'y': {
            'center': 'yc',
            'left': 'yl'
        }
    }
    grid = Grid(ds, periodic=['x'], coords=coords)

    # use grid to interpolate
    uint = grid.interp(ds.u, axis='x')
    vint = grid.interp(ds.v, axis='y', boundary='extend')

    return uint, vint
Beispiel #17
0
def test_get_axis_pos():
    datadict = datasets()
    ds = datadict["C"]
    coords = datadict["coords"]
    grid = Grid(ds, coords=coords)
    assert _get_axis_pos(grid, "X", ds.u) == "right"
    assert _get_axis_pos(grid, "X", ds.tracer) == "center"
    assert _get_axis_pos(grid, "Z", ds.u) is None
def calculate_momentum_budget(ds):
    grid = Grid(ds)

    combo = xr.Dataset()

    combo["u"] = ds.u
    combo["v"] = ds.v

    combo["du_dx"] = grid.diff(ds.u, "X") / ds.dxtn
    combo["du_dy"] = grid.diff(ds.u, "Y") / ds.dyte

    combo["u_du_dx"] = grid.interp(-combo["du_dx"] * grid.interp(ds.u, "X"),
                                   "Y")
    combo["v_du_dy"] = grid.interp(-combo["du_dy"] * grid.interp(ds.v, "Y"),
                                   "X")

    combo["hor"] = combo["u_du_dx"] + combo["v_du_dy"]
    combo["hor"].attrs[
        "long_name"] = "Zonal Velocity tendency due to hor divergence of momentum"
    combo["hor"].attrs["units"] = "m/s^(-2)"

    # Add tracer and vertical vel in there to get all relavant. Then drop again
    combo["wt"] = ds.wt  # for now just to include 'sw_ocean'
    combo["temp"] = ds.temp
    combo = combo.drop(["wt", "temp"])
    return combo
Beispiel #19
0
    def test_face_connections_right_left_same_axis(self, boundary_width,
                                                   ds_faces, fill_value):
        face_connections = {
            "face": {
                0: {
                    "X": (None, (1, "X", False))
                },
                1: {
                    "X": ((0, "X", False), None)
                },
            }
        }
        grid = Grid(ds_faces, face_connections=face_connections)
        data = ds_faces.data_c

        # fill in zeros for y boundary width if not given
        boundary_width["Y"] = boundary_width.get("Y", (0, 0))

        # # restrict data here, so its easier to see the output
        # data = data.isel(y=slice(0, 2), x=slice(0, 2))
        data = data.reset_coords(drop=True).reset_index(data.dims, drop=True)

        # prepad the arrays
        face_0_padded, face_1_padded = _prepad_right_left_same_axis(
            data, boundary_width, fill_value)

        # then simply add the corresponding slice to each face according to the connection
        face_0_expected = xr.concat(
            [
                face_0_padded,
                face_1_padded.isel(x=slice(0, boundary_width["X"][1]))
            ],
            dim="x",
        )
        face_1_expected = xr.concat(
            [
                face_0_padded.isel(x=slice(
                    -boundary_width["X"][0],
                    None if boundary_width["X"][0] > 0 else 0,
                    # this is a bit annoying. if the boundary width on this side is
                    # 0 I want nothing to be padded. but slice(0,None) pads the whole array...
                )),
                face_1_padded,
            ],
            dim="x",
        )

        expected = xr.concat([face_0_expected, face_1_expected], dim="face")
        result = pad(
            data,
            grid,
            boundary_width=boundary_width,
            boundary="fill",
            fill_value=fill_value,
            other_component=None,
        )
        xr.testing.assert_allclose(result, expected)
Beispiel #20
0
def test_interp_g_to_c_periodic(periodic_1d):
    """Interpolate from c grid to g grid."""
    ds = periodic_1d

    # a linear gradient in the ni direction
    data_g = np.sin(ds['XG'])
    data_expected = 0.5 * (data_g.values + np.roll(data_g.values, -1))

    grid = Grid(ds)
    data_c = grid.interp(data_g, 'X')

    # check that the dimensions are right
    assert data_c.dims == ('XC', )
    xr.testing.assert_equal(data_c.XC, ds.XC)
    assert len(data_c.XC) == len(data_c)

    # check that the values are right
    np.testing.assert_allclose(data_c.values, data_expected)
Beispiel #21
0
def recreate_grid_simple(ds, lon_name="lon", lat_name="lat"):
    ds_full = generate_grid_ds(ds, {
        "X": "x",
        "Y": "y"
    },
                               position=("center", "right"))
    grid = Grid(ds_full, periodic=["X"])
    ds_full = recreate_metrics(ds, grid)
    return ds_full
Beispiel #22
0
def test_find_dim():
    datadict = datasets()
    ds = datadict["C"]
    grid = Grid(ds)
    assert _find_dim(grid, ds, "X") == ["xt", "xu"]
    assert _find_dim(grid, ds, "Z") is None
    assert _find_dim(grid, ds["timeseries"], "X") is None
    assert _find_dim(grid, ds["timeseries"], "X") is None
    assert _find_dim(grid, ds["tracer"], "X") == ["xt"]
    assert _find_dim(grid, ds["u"], "X") == ["xu"]
def depth_slice(ds, st_ocean=250, model_processing=True):
    """Calculate a depth slice via interpolation.
    `model_processing` will interpolate all fields onto tracer grid."""
    ds = ds.copy()
    if model_processing:
        grid = Grid(ds)
        ds = interp_all(grid, ds)
    ds = ds.interp(st_ocean=st_ocean)
    ds.attrs["depth_slice"] = str(st_ocean)
    return ds
Beispiel #24
0
def test_diff_c_to_g_nonperiodic(nonperiodic_1d):
    ds = nonperiodic_1d

    # a linear gradient in the ni direction
    grad = 0.24
    data_c = grad * ds['ni']
    data_expected = data_c.values[1:] - data_c.values[:-1]

    grid = Grid(ds, x_periodic=False)
    data_u = grid.diff(data_c, 'X')

    # check that the dimensions are right
    assert data_u.dims == ('ni_u', )
    xr.testing.assert_equal(data_u.ni_u, ds.ni_u[1:-1])
    assert len(data_u.ni_u) == len(data_u)

    # check that the values are right
    np.testing.assert_allclose(data_u.values, data_expected)
    np.testing.assert_allclose(data_u.values, grad)
Beispiel #25
0
def test_interp_g_to_c_nonperiodic(nonperiodic_1d):
    """Interpolate from g grid to c grid."""

    ds = nonperiodic_1d

    # a linear gradient in the ni direction
    grad = 0.43
    data_u = grad * ds['ni_u']
    data_expected = 0.5 * (data_u.values[1:] + data_u.values[:-1])

    grid = Grid(ds, x_periodic=False)
    data_c = grid.interp(data_u, 'X')

    # check that the dimensions are right
    assert data_c.dims == ('ni', )
    xr.testing.assert_equal(data_c.ni, ds.ni)
    assert len(data_c.ni) == len(data_c)

    # check that the values are right
    np.testing.assert_allclose(data_c.values, data_expected)
Beispiel #26
0
def test_diff_g_to_c_periodic(periodic_1d):
    ds = periodic_1d

    # a linear gradient in the ni direction
    data_g = np.sin(ds['XG'])
    # np.roll(np.arange(5), -1) --> [1, 2, 3, 4, 0]
    # negative roll shifts right
    data_expected = np.roll(data_g.values, -1) - data_g.values
    #data_expected = np.cos(ds['XC']).values * (2*np.pi) / 100.

    grid = Grid(ds)
    data_c = grid.diff(data_g, 'X')

    # check that the dimensions are right
    assert data_c.dims == ('XC', )
    xr.testing.assert_equal(data_c.XC, ds.XC)
    assert len(data_c.XC) == len(data_c)

    # check that the values are right
    np.testing.assert_allclose(data_c.values, data_expected)
Beispiel #27
0
def test_calculate_rel_vorticity():
    datadict = datasets()
    coords = datadict["coords"]
    ds_b = datadict["B"]
    grid_b = Grid(ds_b, coords=coords)

    ds_c = datadict["C"]
    grid_c = Grid(ds_c, coords=coords)

    test_b = (grid_b.diff(grid_b.interp(ds_b.v * ds_b.dy_ne, "Y"), "X") -
              grid_b.diff(grid_b.interp(ds_b.u * ds_b.dx_ne, "X"),
                          "Y")) / ds_b.area_t

    zeta_b = calculate_rel_vorticity(
        grid_b,
        ds_b.u,
        ds_b.v,
        ds_b.dx_ne,
        ds_b.dy_ne,
        ds_b.area_t,
        gridtype=None,
    )

    test_c = (grid_c.diff(ds_c.v * ds_c.dy_n, "X") -
              grid_c.diff(ds_c.u * ds_c.dx_e, "Y")) / ds_c.area_ne

    zeta_c = calculate_rel_vorticity(
        grid_c,
        ds_c.u,
        ds_c.v,
        ds_c.dx_e,
        ds_c.dy_n,
        ds_c.area_ne,
        gridtype=None,
    )

    assert_allclose(test_b, zeta_b)
    assert_allclose(test_c, zeta_c)
    with pytest.raises(RuntimeError):
        zeta_c = calculate_rel_vorticity(
            grid_b,
            ds_c.u,
            ds_c.v,
            ds_c.dx_n,  # wrong coordinate
            ds_c.dy_n,
            ds_c.area_ne,
            gridtype=None,
        )
Beispiel #28
0
def test_diff_c_to_g_periodic(periodic_1d):
    ds = periodic_1d

    # a linear gradient in the ni direction
    data_c = np.sin(ds['XC'])
    data_expected = data_c.values - np.roll(data_c.values, 1)

    grid = Grid(ds)
    data_g = grid.diff(data_c, 'X')

    # check that the dimensions are right
    assert data_g.dims == ('XG', )
    xr.testing.assert_equal(data_g.XG, ds.XG)
    assert len(data_g.XG) == len(data_g)

    # check that the values are right
    np.testing.assert_allclose(data_g.values, data_expected)

    # try the same with chunks
    data_c = np.sin(ds['XC'])
    data_c = data_c.chunk(10)
    data_g = grid.diff(data_c, 'X')
    np.testing.assert_allclose(data_g.values, data_expected)
Beispiel #29
0
def test_infer_gridtype():
    datadict = datasets()
    coords = datadict["coords"]
    ds_b = datadict["B"]
    grid_b = Grid(ds_b, coords=coords)

    ds_c = datadict["C"]
    grid_c = Grid(ds_c, coords=coords)

    # This should fail(unkown gridtype)
    ds_fail = datadict["fail_gridtype"]
    grid_fail = Grid(ds_fail, coords=coords)

    # This is not supported yet ('inner' and 'outer' dims)
    coords2 = datadict["fail_coords"]
    ds_fail2 = datadict["fail_dimtype"]
    grid_fail2 = Grid(ds_fail2, coords=coords2)

    assert _infer_gridtype(grid_b, ds_b.u, ds_b.v) == "B"
    assert _infer_gridtype(grid_c, ds_c.u, ds_c.v) == "C"
    with pytest.raises(RuntimeError, match=r"Gridtype not recognized *"):
        _infer_gridtype(grid_fail, ds_fail.u, ds_fail.v)
    with pytest.raises(RuntimeError):  # , match=r'`inner` or `outer` *'
        _infer_gridtype(grid_fail2, ds_fail2.u, ds_fail2.v)
Beispiel #30
0
def test_xgcm_weighted_mean(axis, metric_list, gridtype):
    ds = datasets()[gridtype]
    grid = Grid(ds)
    a = xgcm_weighted_mean(grid, ds, axis, metric_list)
    for var in ["tracer", "u", "v"]:
        b = w_mean(grid, ds[var], axis, metric_list)
        c = xgcm_weighted_mean(grid, ds[var], axis, metric_list)

        assert_allclose(a[var], b)
        assert_allclose(b, c)

    for var in ["timeseries"]:
        b = ds[var]
        c = xgcm_weighted_mean(grid, ds[var], axis, metric_list)

        assert_allclose(a[var], b)
        assert_allclose(b, c)