def test_opening_and_saving(name, catalog_url): if name == "error": # Open oceandataset with pytest.raises(ValueError): from_catalog(name, catalog_url) else: # Open oceandataset od1 = from_catalog(name, catalog_url) # Check dimensions if name not in ["xarray", "HYCOM"]: dimsList = ["X", "Y", "Xp1", "Yp1"] assert set(dimsList).issubset(set(od1.dataset.dims)) # Check coordinates if name == "LLC": coordsList = ["XC", "YC", "XG", "YG"] elif name == "HYCOM": coordsList = ["XC", "YC"] else: coordsList = ["XC", "YC", "XG", "YG", "XU", "YU", "XV", "YV"] assert set(coordsList).issubset(set(od1.dataset.coords)) # Check NaNs assert all( [not np.isnan(od1.dataset[coord].values).any() for coord in coordsList] ) if name == "LLC": assert type(od1.face_connections["face"]) == dict assert set(["face"]).issubset(set(od1.dataset.dims)) # Check shift if name == "xmitgcm_iters": sizes = od1.dataset.sizes assert sizes["time"] - sizes["time_midp"] == 1 assert all( [ "time_midp" in od1.dataset[var].dims for var in od1.dataset.data_vars if "ave" in var ] ) # Save to netcdf filename = "tmp.nc" od1.to_netcdf(filename) # Reopen if name == "LLC": args = {"decode_times": False} else: args = {} from_netcdf(filename, **args) # Clean up subprocess.call("rm -f " + filename, shell=True)
def test_save_load(od, compute): path = "test_path" od.to_netcdf(path=path + ".nc", compute=compute) open_oceandataset.from_netcdf(path + ".nc") os.remove(path + ".nc") od.to_zarr(path=path, compute=compute) open_oceandataset.from_zarr(path) shutil.rmtree(path, ignore_errors=True)
def test_save_load(od, compute): path = 'test_path' od.to_netcdf(path=path+'.nc', compute=compute) new_od = open_oceandataset.from_netcdf(path+'.nc') os.remove(path+'.nc') od.to_zarr(path=path, compute=compute) new_od = open_oceandataset.from_zarr(path) shutil.rmtree(path, ignore_errors=True)
def test_opening_and_saving(name, catalog_url): if name == "error": # Open oceandataset with pytest.raises(ValueError): from_catalog(name, catalog_url) else: # Open oceandataset od1 = from_catalog(name, catalog_url) # Check dimensions if name != "xarray": dimsList = ["X", "Y", "Xp1", "Yp1"] assert set(dimsList).issubset(set(od1.dataset.dims)) # Check coordinates coordsList = ["XC", "YC", "XG", "YG", "XU", "YU", "XV", "YV"] assert set(coordsList).issubset(set(od1.dataset.coords)) # Check NaNs assert all([ not np.isnan(od1.dataset[coord].values).any() for coord in coordsList ]) # Check shift if name == "xmitgcm_iters": sizes = od1.dataset.sizes assert sizes["time"] - sizes["time_midp"] == 1 assert all([ "time_midp" in od1.dataset[var].dims for var in od1.dataset.data_vars if "ave" in var ]) # Save to netcdf filename = "tmp.nc" od1.to_netcdf(filename) # Reopen from_netcdf(filename) # Clean up subprocess.call("rm -f " + filename, shell=True)
def test_opening_and_saving(name, catalog_url): if name == 'error': # Open oceandataset with pytest.raises(ValueError): from_catalog(name, catalog_url) else: # Open oceandataset od1 = from_catalog(name, catalog_url) # Check dimensions if name != 'xarray': dimsList = ['X', 'Y', 'Xp1', 'Yp1'] assert set(dimsList).issubset(set(od1.dataset.dims)) # Check coordinates coordsList = ['XC', 'YC', 'XG', 'YG', 'XU', 'YU', 'XV', 'YV'] assert set(coordsList).issubset(set(od1.dataset.coords)) # Check NaNs assert all([ not np.isnan(od1.dataset[coord].values).any() for coord in coordsList ]) # Check shift if name == 'xmitgcm_iters': sizes = od1.dataset.sizes assert sizes['time'] - sizes['time_midp'] == 1 assert all([ 'time_midp' in od1.dataset[var].dims for var in od1.dataset.data_vars if 'ave' in var ]) # Save to netcdf filename = 'tmp.nc' od1.to_netcdf(filename) # Reopen from_netcdf(filename) # Clean up subprocess.call('rm -f ' + filename, shell=True)
def test_from_netcdf(): # Dataset orig_od = oceandatasets['MITgcm_rect_nc'] # Save filename = 'tmp_oceandataset.nc' orig_od.to_netcdf(filename) # Open open_od = from_netcdf(filename) assert orig_od.dataset.equals(open_od.dataset) assert orig_od._ds.equals(open_od._ds) # Clean up import os os.remove(filename)
import pytest import xarray as xr import numpy as np # From OceanSpy from oceanspy import open_oceandataset, OceanDataset from oceanspy.plot import TS_diagram, time_series, horizontal_section, vertical_section # Matplotlib (keep it below oceanspy) import matplotlib.pyplot as plt # Directory Datadir = "./oceanspy/tests/Data/" # Test oceandataset od = open_oceandataset.from_netcdf("{}MITgcm_rect_nc.nc" "".format(Datadir)) # Create mooring, sruvey, and particles Xmoor = [od.dataset["XC"].min().values, od.dataset["XC"].max().values] Ymoor = [od.dataset["YC"].min().values, od.dataset["YC"].max().values] od_moor = od.subsample.mooring_array(Xmoor=Xmoor, Ymoor=Ymoor) Xsurv = [ od.dataset["XC"].min().values, od.dataset["XC"].mean().values, od.dataset["XC"].max().values, ] Ysurv = [ od.dataset["YC"].min().values, od.dataset["YC"].mean().values, od.dataset["YC"].max().values,
mooring_volume_transport, normal_strain, potential_density_anomaly, relative_vorticity, salt_budget, shear_strain, survey_aligned_velocities, velocity_magnitude, vertical_relative_vorticity, ) # Directory Datadir = "./oceanspy/tests/Data/" # Create an oceandataset for testing calculus functions od = open_oceandataset.from_netcdf("{}MITgcm_rect_nc.nc" "".format(Datadir)) # Create an oceandataset for testing calculus functions od_curv = open_oceandataset.from_netcdf("{}MITgcm_curv_nc.nc" "".format(Datadir)) # Aliased od ds = od.dataset aliases = {var: var + "_alias" for var in ds.data_vars} ds = ds.rename(aliases) alias_od = ospy.OceanDataset(ds).set_aliases(aliases) # Budgets od_bdg = open_oceandataset.from_netcdf("{}budgets.nc" "".format(Datadir))
import xarray as xr import numpy as np # From OceanSpy from oceanspy import open_oceandataset, OceanDataset from oceanspy.plot import (TS_diagram, time_series, horizontal_section, vertical_section) # Matplotlib (keep it below oceanspy) import matplotlib.pyplot as plt # Directory Datadir = './oceanspy/tests/Data/' # Test oceandataset od = open_oceandataset.from_netcdf('{}MITgcm_rect_nc.nc' ''.format(Datadir)) # Create mooring, sruvey, and particles Xmoor = [od.dataset['XC'].min().values, od.dataset['XC'].max().values] Ymoor = [od.dataset['YC'].min().values, od.dataset['YC'].max().values] od_moor = od.subsample.mooring_array(Xmoor=Xmoor, Ymoor=Ymoor) Xsurv = [ od.dataset['XC'].min().values, od.dataset['XC'].mean().values, od.dataset['XC'].max().values ] Ysurv = [ od.dataset['YC'].min().values, od.dataset['YC'].mean().values, od.dataset['YC'].max().values ] od_surv = od.subsample.survey_stations(Xsurv=Xsurv, Ysurv=Ysurv)
# TODO: cartesian, and Xp1 Yp1 right are not tested. import pytest import numpy as np import xarray as xr from numpy.testing import assert_array_equal # From OceanSpy from oceanspy import open_oceandataset, OceanDataset # Directory Datadir = "./oceanspy/tests/Data/" # Test oceandataset MITgcm_curv_nc = open_oceandataset.from_netcdf("{}MITgcm_curv_nc.nc" "".format(Datadir)) MITgcm_rect_bin = open_oceandataset.from_netcdf("{}MITgcm_rect_bin.nc" "".format(Datadir)) MITgcm_rect_nc = open_oceandataset.from_netcdf("{}MITgcm_rect_nc.nc" "".format(Datadir)) # ======= # CUTOUT # ======= od = MITgcm_curv_nc moor_od = OceanDataset(od.dataset.expand_dims("mooring")) Ywarn = od.dataset["YG"].min() - 1 Xwarn = od.dataset["XG"].min() - 1 Zwarn = od.dataset["Zp1"].min() - 1 Twarn = od.dataset["time"].min() - 1 YRange = [od.dataset["YG"].min(), od.dataset["YG"].max()] XRange = [od.dataset["XG"].min(), od.dataset["XG"].max()]
import pytest import xarray as xr import copy from oceanspy.compute import * from oceanspy import utils, open_oceandataset from numpy.testing import assert_array_equal, assert_allclose import numpy as np from .test_compute_static import check_params, ds_out_IN_od_out # Add variables od_in = open_oceandataset.from_netcdf('./oceanspy/tests/Data/budgets.nc') def test_heat_budget(): # Compute heat_budget ds_out = heat_budget(od_in) for var in ds_out.data_vars: assert ds_out[var].attrs['units'] == 'degC/s' check_params(ds_out, var, ['rho0', 'c_p']) tendH = ds_out['tendH'] check = ds_out['adv_hConvH'] + ds_out['adv_vConvH'] + ds_out[ 'dif_vConvH'] + ds_out['kpp_vConvH'] + ds_out['forcH'] assert np.fabs(tendH - check).max().values < 1.e-17 # Test shortcut od_out = od_in.compute.heat_budget() ds_out_IN_od_out(ds_out, od_out)