예제 #1
0
def test_opening_and_saving(name, catalog_url):
    if name == "error":
        # Open oceandataset
        with pytest.raises(ValueError):
            from_catalog(name, catalog_url)
    else:
        # Open oceandataset
        od1 = from_catalog(name, catalog_url)

        # Check dimensions
        if name not in ["xarray", "HYCOM"]:
            dimsList = ["X", "Y", "Xp1", "Yp1"]
            assert set(dimsList).issubset(set(od1.dataset.dims))

            # Check coordinates
            if name == "LLC":
                coordsList = ["XC", "YC", "XG", "YG"]
            elif name == "HYCOM":
                coordsList = ["XC", "YC"]
            else:
                coordsList = ["XC", "YC", "XG", "YG", "XU", "YU", "XV", "YV"]
            assert set(coordsList).issubset(set(od1.dataset.coords))

            # Check NaNs
            assert all(
                [not np.isnan(od1.dataset[coord].values).any() for coord in coordsList]
            )

        if name == "LLC":
            assert type(od1.face_connections["face"]) == dict
            assert set(["face"]).issubset(set(od1.dataset.dims))

        # Check shift
        if name == "xmitgcm_iters":
            sizes = od1.dataset.sizes
            assert sizes["time"] - sizes["time_midp"] == 1
            assert all(
                [
                    "time_midp" in od1.dataset[var].dims
                    for var in od1.dataset.data_vars
                    if "ave" in var
                ]
            )

        # Save to netcdf
        filename = "tmp.nc"
        od1.to_netcdf(filename)

        # Reopen
        if name == "LLC":
            args = {"decode_times": False}
        else:
            args = {}
        from_netcdf(filename, **args)

        # Clean up
        subprocess.call("rm -f " + filename, shell=True)
예제 #2
0
def test_opening_and_saving(name, catalog_url):
    if name == "error":
        # Open oceandataset
        with pytest.raises(ValueError):
            from_catalog(name, catalog_url)
    else:
        # Open oceandataset
        od1 = from_catalog(name, catalog_url)

        # Check dimensions
        if name != "xarray":
            dimsList = ["X", "Y", "Xp1", "Yp1"]
            assert set(dimsList).issubset(set(od1.dataset.dims))

            # Check coordinates
            coordsList = ["XC", "YC", "XG", "YG", "XU", "YU", "XV", "YV"]
            assert set(coordsList).issubset(set(od1.dataset.coords))

            # Check NaNs
            assert all([
                not np.isnan(od1.dataset[coord].values).any()
                for coord in coordsList
            ])

        # Check shift
        if name == "xmitgcm_iters":
            sizes = od1.dataset.sizes
            assert sizes["time"] - sizes["time_midp"] == 1
            assert all([
                "time_midp" in od1.dataset[var].dims
                for var in od1.dataset.data_vars if "ave" in var
            ])

        # Save to netcdf
        filename = "tmp.nc"
        od1.to_netcdf(filename)

        # Reopen
        from_netcdf(filename)

        # Clean up
        subprocess.call("rm -f " + filename, shell=True)
예제 #3
0
def test_opening_and_saving(name, catalog_url):
    if name == 'error':
        # Open oceandataset
        with pytest.raises(ValueError):
            from_catalog(name, catalog_url)
    else:
        # Open oceandataset
        od1 = from_catalog(name, catalog_url)

        # Check dimensions
        if name != 'xarray':
            dimsList = ['X', 'Y', 'Xp1', 'Yp1']
            assert set(dimsList).issubset(set(od1.dataset.dims))

            # Check coordinates
            coordsList = ['XC', 'YC', 'XG', 'YG', 'XU', 'YU', 'XV', 'YV']
            assert set(coordsList).issubset(set(od1.dataset.coords))

            # Check NaNs
            assert all([
                not np.isnan(od1.dataset[coord].values).any()
                for coord in coordsList
            ])

        # Check shift
        if name == 'xmitgcm_iters':
            sizes = od1.dataset.sizes
            assert sizes['time'] - sizes['time_midp'] == 1
            assert all([
                'time_midp' in od1.dataset[var].dims
                for var in od1.dataset.data_vars if 'ave' in var
            ])

        # Save to netcdf
        filename = 'tmp.nc'
        od1.to_netcdf(filename)

        # Reopen
        from_netcdf(filename)

        # Clean up
        subprocess.call('rm -f ' + filename, shell=True)
예제 #4
0
import xarray as xr

# Oceanspy modules
from oceanspy import (
    AVAILABLE_PARAMETERS,
    DEFAULT_PARAMETERS,
    OCEANSPY_AXES,
    OceanDataset,
    open_oceandataset,
)

# Directory
Datadir = "./oceanspy/tests/Data/"
od = open_oceandataset.from_netcdf("{}MITgcm_rect_nc.nc" "".format(Datadir))
ECCO_url = "{}catalog_ECCO.yaml".format(Datadir)
ECCOod = open_oceandataset.from_catalog("LLC", ECCO_url)

# Remove global attributes
ds = od.dataset
ds.attrs = {}
clean_od = OceanDataset(ds)

# Aliased od
ds = od.dataset
aliases = {var: var + "_alias" for var in ds.data_vars}
ds = ds.rename(aliases)
alias_od = OceanDataset(ds).set_aliases(aliases)

# Grid only wihtout time_midp
ds = clean_od.dataset
ds = ds.drop_vars(ds.data_vars)
예제 #5
0
# From OceanSpy
from oceanspy import open_oceandataset, OceanDataset

# Directory
Datadir = "./oceanspy/tests/Data/"

# Test oceandataset
MITgcm_curv_nc = open_oceandataset.from_netcdf("{}MITgcm_curv_nc.nc" "".format(Datadir))
MITgcm_rect_bin = open_oceandataset.from_netcdf(
    "{}MITgcm_rect_bin.nc" "".format(Datadir)
)
MITgcm_rect_nc = open_oceandataset.from_netcdf("{}MITgcm_rect_nc.nc" "".format(Datadir))

ECCO_url = "{}catalog_ECCO.yaml".format(Datadir)
ECCOod = open_oceandataset.from_catalog('LLC', ECCO_url)


# =======
# CUTOUT
# =======
od = MITgcm_curv_nc
moor_od = OceanDataset(od.dataset.expand_dims("mooring"))
Ywarn = od.dataset["YG"].min() - 1
Xwarn = od.dataset["XG"].min() - 1
Zwarn = od.dataset["Zp1"].min() - 1
Twarn = od.dataset["time"].min() - 1
YRange = [od.dataset["YG"].min(), od.dataset["YG"].max()]
XRange = [od.dataset["XG"].min(), od.dataset["XG"].max()]
ZRange = [od.dataset["Zp1"].min(), od.dataset["Zp1"].max()]
timeRange = [od.dataset["time"].values[0], od.dataset["time"].values[-1]]