Example #1
0
def test_region_datashade():
    """
    Tests that we can datashade a pandas.DataFrame based on the region's bounds
    """
    region = Region("Kitaa, Greenland", -1_600_000, -1_520_000, -1_360_000,
                    -1_300_000)

    atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.to_dask()
    atl11_dataset["x"], atl11_dataset["y"] = lonlat_to_xy(
        longitude=atl11_dataset.longitude,
        latitude=atl11_dataset.latitude,
        epsg=3995,
    )
    atl11_dataset = atl11_dataset.set_coords(["x", "y"])
    df: pd.DataFrame = atl11_dataset.h_corr.to_dataframe()

    agg_grid: xr.DataArray = region.datashade(df=df,
                                              z_dim="h_corr",
                                              plot_width=100)

    assert agg_grid.shape == (75, 100
                              )  # check correct aspect ratio is maintained
    npt.assert_allclose(agg_grid.min(), 1426.336637)
    npt.assert_allclose(agg_grid.mean(), 1668.94741)
    npt.assert_allclose(agg_grid.max(), 1798.066285)
Example #2
0
def test_lonlat_to_xy_dask_series():
    """
    Test that converting from longitude/latitude to x/y in EPSG:3031 works when
    passing them in as dask.dataframe.core.Series objects.
    """
    atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.to_dask()
    atl11_dataframe: dask.dataframe.core.DataFrame = atl11_dataset.to_dask_dataframe(
    )

    x, y = lonlat_to_xy(longitude=atl11_dataframe.longitude,
                        latitude=atl11_dataframe.latitude)
    npt.assert_equal(actual=x.mean(), desired=-56900105.00307033)
    npt.assert_equal(actual=y.mean(), desired=48141607.48486084)
Example #3
0
def test_lonlat_to_xy_xarray_dataarray():
    """
    Test that converting from longitude/latitude to x/y in EPSG:3031 works when
    passing them in as xarray.DataArray objects. Ensure that the xarray
    dimensions are preserved in the process.
    """
    atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.to_dask()

    x, y = lonlat_to_xy(longitude=atl11_dataset.longitude,
                        latitude=atl11_dataset.latitude)

    assert x.dims == y.dims == ("ref_pt", )
    assert x.shape == y.shape == (1404, )
    npt.assert_equal(actual=x.mean().data, desired=-56900105.00307034)
    npt.assert_equal(actual=y.mean().data, desired=48141607.48486084)
Example #4
0
    engine="zarr",
    combine="nested",
    concat_dim="ref_pt",
    parallel="True",
    backend_kwargs={"consolidated": True},
)

# %% [markdown]
# ## Light pre-processing
#
# - Reproject longitude/latitude to EPSG:3031 x/y
# - Mask out low quality height data

# %%
# Calculate the EPSG:3031 x/y projection coordinates
ds["x"], ds["y"] = deepicedrain.lonlat_to_xy(longitude=ds.longitude,
                                             latitude=ds.latitude)
# Set x, y, x_atc and y_atc as coordinates of the xarray.Dataset
ds: xr.Dataset = ds.set_coords(names=["x", "y", "x_atc", "y_atc"])

# %%
# Mask out low quality height data
ds["h_corr"]: xr.DataArray = ds.h_corr.where(
    cond=ds.quality_summary_ref_surf == 0)

# %% [markdown]
# ## Trim out unnecessary values (optional)
#
# There's ~220 million ATL11 points for the whole of Antarctica,
# and not all of them will be needed depending on what you want to do.
# To cut down on the number of data points the computer needs to work on,
# we can: