def test_gauss_seidel(): grid = load_data() _, filled0 = pyinterp.fill.gauss_seidel(grid, num_threads=0) _, filled1 = pyinterp.fill.gauss_seidel(grid, num_threads=1) _, filled2 = pyinterp.fill.gauss_seidel(grid, first_guess='zero', num_threads=0) data = np.copy(grid.array) data[np.isnan(data)] = 0 filled0[np.isnan(filled0)] = 0 filled1[np.isnan(filled1)] = 0 filled2[np.isnan(filled2)] = 0 assert (filled0 - filled1).mean() == 0 assert np.ma.fix_invalid(grid.array - filled1).mean() == 0 assert (data - filled1).mean() != 0 assert (filled2 - filled1).mean() != 0 with pytest.raises(ValueError): pyinterp.fill.gauss_seidel(grid, '_') x_axis = pyinterp.Axis(np.linspace(-180, 180, 10), is_circle=True) y_axis = pyinterp.Axis(np.linspace(-90, 90, 10), is_circle=False) data = np.random.rand(len(x_axis), len(y_axis)) grid = pyinterp.Grid2D(x_axis, y_axis, data) _, filled0 = pyinterp.fill.gauss_seidel(grid, num_threads=0) assert isinstance(filled0, np.ndarray)
def init(self, dtype): ds = xr.load_dataset(self.GRID) x_axis = pyinterp.Axis(np.arange(-180, 180, 5), is_circle=True) y_axis = pyinterp.Axis(np.arange(-90, 95, 5)) binning = pyinterp.Binning2D(x_axis, y_axis, pyinterp.geodetic.System(), dtype=dtype) self.assertEqual(x_axis, binning.x) self.assertEqual(y_axis, binning.y) self.assertIsInstance(str(binning), str) lon, lat = np.meshgrid(ds.lon, ds.lat) binning.push(lon, lat, ds.mss, simple=True) simple_mean = binning.variable('mean') self.assertIsInstance(simple_mean, np.ndarray) binning.clear() binning.push(lon, lat, ds.mss, simple=False) linear_mean = binning.variable('mean') self.assertIsInstance(simple_mean, np.ndarray) self.assertFalse(np.all(linear_mean == simple_mean)) self.assertIsInstance(binning.variable("sum"), np.ndarray) self.assertIsInstance(binning.variable("count"), np.ndarray) with self.assertRaises(ValueError): binning.variable("_")
def load_dataset( self, first_date: np.datetime64, last_date: np.datetime64) -> pyinterp.backends.xarray.Grid3D: """Loads the 3D cube describing the SSH in time and space.""" if first_date < self.ts["date"][0] or last_date > self.ts["date"][-1]: raise IndexError( f"period [{first_date}, {last_date}] is out of range: " f"[{self.ts['date'][0]}, {self.ts['date'][-1]}]") first_date -= self.dt last_date += self.dt selected = self.ts["path"][:] ds = xr.open_mfdataset(selected, concat_dim="time", combine="nested", decode_times=False) ds = ds.sel(depth=0) x_axis = pyinterp.Axis(ds.variables["lon"][:], is_circle=True) y_axis = pyinterp.Axis(ds.variables["lat"][:]) hours = (ds.variables['time'][:].data * 3600000000).astype('timedelta64[us]') time = np.datetime64('2000') + hours z_axis = pyinterp.TemporalAxis(time) var = ds.surf_el[:].T return pyinterp.Grid3D(x_axis, y_axis, z_axis, var)
def test_bicubic(): grid = pyinterp.backends.xarray.Grid2D(xr.load_dataset(GRID).mss) lon = np.arange(-180, 180, 1) + 1 / 3.0 lat = np.arange(-90, 90, 1) + 1 / 3.0 x, y = np.meshgrid(lon, lat, indexing="ij") z = grid.bicubic(collections.OrderedDict(lon=x.flatten(), lat=y.flatten())) assert isinstance(z, np.ndarray) for fitting_model in [ 'linear', 'bicubic', 'polynomial', 'c_spline', 'c_spline_periodic', 'akima', 'akima_periodic', 'steffen' ]: other = grid.bicubic(collections.OrderedDict(lon=x.flatten(), lat=y.flatten()), fitting_model=fitting_model) assert (z - other).mean() != 0 with pytest.raises(ValueError): grid.bicubic(collections.OrderedDict(lon=x.flatten(), lat=y.flatten()), bounds_error=True) with pytest.raises(ValueError): grid.bicubic(collections.OrderedDict(lon=x.flatten(), lat=y.flatten()), bounds_error=True, boundary="sym") x_axis = pyinterp.Axis(np.linspace(-180, 179, 360), is_circle=True) y_axis = pyinterp.Axis(np.linspace(-90, 90, 181), is_circle=False) z_axis = pyinterp.Axis(np.linspace(0, 10, 10), is_circle=False) matrix, _ = np.meshgrid(x_axis[:], y_axis[:]) grid = pyinterp.Grid2D(x_axis, y_axis, matrix.T) assert isinstance(grid, pyinterp.Grid2D) with pytest.raises(ValueError): pyinterp.bicubic(grid, x.flatten(), y.flatten(), fitting_model='_') with pytest.raises(ValueError): pyinterp.bicubic(grid, x.flatten(), y.flatten(), boundary='_') grid = pyinterp.Grid2D(x_axis.flip(inplace=False), y_axis, matrix.T) with pytest.raises(ValueError): pyinterp.bicubic(grid, x.flatten(), y.flatten()) grid = pyinterp.Grid2D(x_axis, y_axis.flip(), matrix.T) with pytest.raises(ValueError): pyinterp.bicubic(grid, x.flatten(), y.flatten()) matrix, _, _ = np.meshgrid(x_axis[:], y_axis[:], z_axis[:]) grid = pyinterp.Grid3D(x_axis, y_axis, z_axis, matrix.transpose(1, 0, 2)) with pytest.raises(ValueError): pyinterp.bicubic(grid, x.flatten(), y.flatten()) grid = pyinterp.backends.xarray.RegularGridInterpolator( xr.load_dataset(GRID).mss) assert grid.ndim == 2 assert isinstance(grid.grid, pyinterp.backends.xarray.Grid2D) z = grid(collections.OrderedDict(lon=x.flatten(), lat=y.flatten()), method="bicubic", bicubic_kwargs=dict(nx=3, ny=3)) assert isinstance(z, np.ndarray)
def test_gauss_seidel(self): grid = self._load() _, filled0 = pyinterp.fill.gauss_seidel(grid, num_threads=0) _, filled1 = pyinterp.fill.gauss_seidel(grid, num_threads=1) _, filled2 = pyinterp.fill.gauss_seidel(grid, first_guess='zero', num_threads=0) data = np.copy(grid.array) data[np.isnan(data)] = 0 filled0[np.isnan(filled0)] = 0 filled1[np.isnan(filled1)] = 0 filled2[np.isnan(filled2)] = 0 self.assertEqual((filled0 - filled1).mean(), 0) self.assertEqual(np.ma.fix_invalid(grid.array - filled1).mean(), 0) self.assertNotEqual((data - filled1).mean(), 0) self.assertNotEqual((filled2 - filled1).mean(), 0) with self.assertRaises(ValueError): pyinterp.fill.gauss_seidel(grid, '_') x_axis = pyinterp.Axis(np.linspace(-180, 180, 10), is_circle=True) y_axis = pyinterp.Axis(np.linspace(-90, 90, 10), is_circle=False) data = np.random.rand(len(x_axis), len(y_axis)) grid = pyinterp.Grid2D(x_axis, y_axis, data) _, filled0 = pyinterp.fill.gauss_seidel(grid, num_threads=0) self.assertIsInstance(filled0, np.ndarray)
def _build_interpolator(self): """ build interpolation function """ x_axis = pyinterp.Axis(self._x, is_circle=True) y_axis = pyinterp.Axis(self._y) self._z[self._z.mask] = float("nan") self._grid = pyinterp.Grid2D(x_axis, y_axis, self._z.data)
def _load(cls, cube=False): ds = netCDF4.Dataset(cls.GRID) x_axis = pyinterp.Axis(ds.variables["lon"][::5], is_circle=True) y_axis = pyinterp.Axis(ds.variables["lat"][::5]) mss = ds.variables["mss"][::5, ::5].T mss[mss.mask] = float("nan") if cube: z_axis = pyinterp.Axis(np.arange(2)) mss = np.stack([mss.data] * len(z_axis)).transpose(1, 2, 0) return pyinterp.grid.Grid3D(x_axis, y_axis, z_axis, mss) return pyinterp.grid.Grid2D(x_axis, y_axis, mss.data)
def test_core_variate_interpolator(self): lon = pyinterp.Axis(np.arange(0, 360, 1), is_circle=True) lat = pyinterp.Axis(np.arange(-80, 80, 1), is_circle=False) matrix, _ = np.meshgrid(lon[:], lat[:]) grid = pyinterp.Grid2D(lon, lat, matrix.T) with self.assertRaises(TypeError): pyinterp.grid._core_variate_interpolator(None, "_") with self.assertRaises(ValueError): pyinterp.grid._core_variate_interpolator(grid, '_')
def reproj_with_manual_grid(da, x_coords, y_coords, new_grid): x_axis = pyinterp.Axis(da.x.values) y_axis = pyinterp.Axis(da.y.values) grid = pyinterp.Grid2D(x_axis, y_axis, da.data.T) reproj_data = (pyinterp .bivariate(grid, x_coords, y_coords) .reshape((len(new_grid['x_coords']), len(new_grid['y_coords']))) ) return reproj_data
def test_core_class_suffix(self): lon = pyinterp.Axis(np.arange(0, 360, 1), is_circle=True) lat = pyinterp.Axis(np.arange(-80, 80, 1), is_circle=False) for dtype in [ "float64", "float32", "int64", "uint64", "int32", "uint32", "int16", "uint16", "int8", "uint8" ]: matrix, _ = np.meshgrid(lon[:], lat[:]) self.assertIsInstance( pyinterp.Grid2D(lon, lat, matrix.T.astype(dtype=getattr(np, dtype))), pyinterp.Grid2D) with self.assertRaises(ValueError): pyinterp.Grid2D(lon, lat, matrix.astype(np.complex))
def compute_stats(time_alongtrack, lat_alongtrack, lon_alongtrack, ssh_alongtrack, ssh_map_interp, bin_lon_step, bin_lat_step, bin_time_step, output_filename, output_filename_timeseries): ncfile = netCDF4.Dataset(output_filename, 'w') binning = pyinterp.Binning2D( pyinterp.Axis(np.arange(0, 360, bin_lon_step), is_circle=True), pyinterp.Axis(np.arange(-90, 90 + bin_lat_step, bin_lat_step))) # binning alongtrack binning.push(lon_alongtrack, lat_alongtrack, ssh_alongtrack, simple=True) write_stat(ncfile, 'alongtrack', binning) binning.clear() # binning map interp binning.push(lon_alongtrack, lat_alongtrack, ssh_map_interp, simple=True) write_stat(ncfile, 'maps', binning) binning.clear() # binning diff sla-msla binning.push(lon_alongtrack, lat_alongtrack, ssh_alongtrack - ssh_map_interp, simple=True) write_stat(ncfile, 'diff', binning) binning.clear() # add rmse diff2 = (ssh_alongtrack - ssh_map_interp)**2 binning.push(lon_alongtrack, lat_alongtrack, diff2, simple=True) var = ncfile.groups['diff'].createVariable('rmse', binning.variable('mean').dtype, ('lat', 'lon'), zlib=True) var[:, :] = np.sqrt(binning.variable('mean')).T ncfile.close() logging.info(f' Results saved in: {output_filename}') # write time series statistics leaderboard_nrmse, leaderboard_nrmse_std = write_timeserie_stat( ssh_alongtrack, ssh_map_interp, time_alongtrack, bin_time_step, output_filename_timeseries) return leaderboard_nrmse, leaderboard_nrmse_std
def bfn_grid_dataset(list_of_file, var2add, var2sub, lon_min=0., lon_max=360., lat_min=-90, lat_max=90., time_min='1900-10-01', time_max='2100-01-01', is_circle=True): ds = xr.open_mfdataset(list_of_file, concat_dim ='time', combine='nested', parallel=True) ds = ds.sel(time=slice(time_min, time_max)) ds = ds.where((ds["lon"]%360. >= lon_min) & (ds["lon"]%360. <= lon_max), drop=True) ds = ds.where((ds["lat"] >= lat_min) & (ds["lat"] <= lat_max), drop=True) x_axis = pyinterp.Axis(ds["lon"][:]%360., is_circle=is_circle) y_axis = pyinterp.Axis(ds["lat"][:]) z_axis = pyinterp.TemporalAxis(ds["time"][:]) for variable_name in var2add: try: var += ds[variable_name][:] except UnboundLocalError: var = ds[variable_name][:] for variable_name in var2sub: try: var -= ds[variable_name][:] except UnboundLocalError: var = ds[variable_name][:] # MB clean boundary for file OSE_GULFSTREAM_FPGENN.nc #var.values[:, 0:3, :] = np.nan #var.values[:, :, 0:3] = np.nan # ds['time'] = (ds['time'] - np.datetime64('1950-01-01T00:00:00Z')) / np.timedelta64(1, 'D') var = var.transpose('lon', 'lat', 'time') # The undefined values must be set to nan. try: var[var.mask] = float("nan") except AttributeError: pass grid = pyinterp.Grid3D(x_axis, y_axis, z_axis, var.data) del ds return x_axis, y_axis, z_axis, grid
def dymost_grid_dataset(list_of_file, var2add, var2sub, lon_min=0., lon_max=360., lat_min=-90, lat_max=90., time_min='1900-10-01', time_max='2100-01-01', is_circle=True): ds = xr.open_mfdataset(list_of_file, concat_dim ='time', combine='nested', parallel=True) ds = ds.sel(time=slice(time_min, time_max)) ds = ds.where((ds["lon"] >= lon_min) & (ds["lon"] <= lon_max), drop=True) ds = ds.where((ds["lat"] >= lat_min) & (ds["lat"] <= lat_max), drop=True) # print(ds) x_axis = pyinterp.Axis(ds["lon"][0, :], is_circle=is_circle) y_axis = pyinterp.Axis(ds["lat"][:, 0]) z_axis = pyinterp.TemporalAxis(ds["time"][:]) for variable_name in var2add: try: var += ds[variable_name][:] #ds['Ha'][:] except UnboundLocalError: var = ds[variable_name][:] for variable_name in var2sub: try: var -= ds[variable_name][:] except UnboundLocalError: var = ds[variable_name][:] var = var.transpose('x', 'y', 'time') # The undefined values must be set to nan. try: var[var.mask] = float("nan") except AttributeError: pass grid = pyinterp.Grid3D(x_axis, y_axis, z_axis, var.data) del ds return x_axis, y_axis, z_axis, grid
def duacs_grid_dataset(list_of_file, variable_name='Grid_0001', lon_min=0., lon_max=360., lat_min=-90, lat_max=90., time_min='1900-10-01', time_max='2100-01-01', is_circle=True): def preprocess_duacs_maps(ds): vtime = ds[variable_name].attrs['Date_CNES_JD'] ds.coords['time'] = np.datetime64(netCDF4.num2date(vtime, units='days since 1950-01-01')) if ds[variable_name].units == 'cm': ds[variable_name] = ds[variable_name] / 100. ds[variable_name].attrs = {'units': 'm'} return ds ds = xr.open_mfdataset(list_of_file, concat_dim ='time', combine='nested', parallel=True, preprocess=preprocess_duacs_maps) ds = ds.sel(time=slice(time_min, time_max)) ds = ds.where((ds["NbLongitudes"] >= lon_min) & (ds["NbLongitudes"] <= lon_max), drop=True) ds = ds.where((ds["NbLatitudes"] >= lat_min) & (ds["NbLatitudes"] <= lat_max), drop=True) x_axis = pyinterp.Axis(ds["NbLongitudes"][:], is_circle=is_circle) y_axis = pyinterp.Axis(ds["NbLatitudes"][:]) z_axis = pyinterp.TemporalAxis(ds["time"][:]) var = ds[variable_name][:].transpose('NbLongitudes', 'NbLatitudes', 'time') # The undefined values must be set to nan. try: var[var.mask] = float("nan") except AttributeError: pass grid = pyinterp.Grid3D(x_axis, y_axis, z_axis, var.data) del ds return x_axis, y_axis, z_axis, grid
def read_l4_dataset(list_of_file, lon_min=0., lon_max=360., lat_min=-90, lat_max=90., time_min='1900-10-01', time_max='2100-01-01', is_circle=True): ds = xr.open_mfdataset(list_of_file, concat_dim='time', combine='nested', parallel=True) ds = ds.sel(time=slice(time_min, time_max), drop=True) ds = ds.where( (ds["lon"] % 360. >= lon_min) & (ds["lon"] % 360. <= lon_max), drop=True) ds = ds.where((ds["lat"] >= lat_min) & (ds["lat"] <= lat_max), drop=True) x_axis = pyinterp.Axis(ds["lon"][:] % 360., is_circle=is_circle) y_axis = pyinterp.Axis(ds["lat"][:]) z_axis = pyinterp.TemporalAxis(ds["time"][:]) var = ds['ssh'][:] var = var.transpose('lon', 'lat', 'time') # The undefined values must be set to nan. try: var[var.mask] = float("nan") except AttributeError: pass grid = pyinterp.Grid3D(x_axis, y_axis, z_axis, var.data) del ds return x_axis, y_axis, z_axis, grid
def load_dataset( self, first_date: np.datetime64, last_date: np.datetime64) -> pyinterp.backends.xarray.Grid3D: """Loads the 3D cube describing the SSH in time and space.""" if first_date < self.ts["date"][0] or last_date > self.ts["date"][-1]: raise IndexError( f"period [{first_date}, {last_date}] is out of range: " f"[{self.ts['date'][0]}, {self.ts['date'][-1]}]") first_date -= self.dt last_date += self.dt selected = self.ts["path"][(self.ts["date"] >= first_date) & (self.ts["date"] < last_date)] ds = xr.open_mfdataset(selected, concat_dim="time", combine="nested", decode_times=True) x_axis = pyinterp.Axis(ds.variables["longitude"][:], is_circle=True) y_axis = pyinterp.Axis(ds.variables["latitude"][:]) z_axis = pyinterp.TemporalAxis(ds.time) var = ds.wlv[:].T return pyinterp.Grid3D(x_axis, y_axis, z_axis, var)
def test__core_function_suffix(self): with self.assertRaises(TypeError): pyinterp.interface._core_function(1) lon = pyinterp.Axis(np.arange(0, 360, 1), is_circle=True) lat = pyinterp.Axis(np.arange(-80, 80, 1), is_circle=False) matrix, _ = np.meshgrid(lon[:], lat[:]) self.assertEqual( pyinterp.interface._core_function( "foo", pyinterp.core.Grid2DFloat64(lon, lat, matrix.T)), "foo_float64") self.assertEqual( pyinterp.interface._core_function( "foo", pyinterp.core.Grid2DFloat32(lon, lat, matrix.T)), "foo_float32") time = pyinterp.TemporalAxis( np.array(['2000-01-01'], dtype="datetime64")) matrix, _, _ = np.meshgrid(lon[:], lat[:], time[:], indexing='ij') self.assertEqual( pyinterp.interface._core_function( "foo", pyinterp.core.TemporalGrid3DFloat64(lon, lat, time, matrix)), "foo_float64")
def interpolate(self, lon: np.ndarray, lat: np.ndarray, dates: np.ndarray) -> np.ndarray: """Interpolate the SSH to the required coordinates.""" ds = self._select_ds( dates.min(), # type: ignore dates.max()) # type: ignore assert np.all(np.diff(ds.ocean_time.values) == self._dt) assert np.all(np.diff(ds.lon_rho.values, axis=0) < 1e-10) assert np.all(np.diff(ds.lat_rho.values, axis=1) < 1e-10) t_axis = pyinterp.TemporalAxis(ds.ocean_time.values) grid3d = pyinterp.Grid3D( pyinterp.Axis(ds.lon_rho.values[0, :], is_circle=True), pyinterp.Axis(ds.lat_rho.values[:, 0]), t_axis, ds[self.ssh].values.T) ssh = pyinterp.trivariate(grid3d, lon.ravel(), lat.ravel(), t_axis.safe_cast(dates.ravel()), num_threads=1).reshape(lon.shape) return ssh
def oi_regrid(ds_source, ds_target): logging.info(' Regridding...') # Define source grid x_source_axis = pyinterp.Axis(ds_source["lon"][:], is_circle=False) y_source_axis = pyinterp.Axis(ds_source["lat"][:]) z_source_axis = pyinterp.TemporalAxis(ds_source["time"][:]) ssh_source = ds_source["gssh"][:].T grid_source = pyinterp.Grid3D(x_source_axis, y_source_axis, z_source_axis, ssh_source.data) # Define target grid mx_target, my_target, mz_target = numpy.meshgrid( ds_target['lon'].values, ds_target['lat'].values, z_source_axis.safe_cast(ds_target['time'].values), indexing="ij") # Spatio-temporal Interpolation ssh_interp = pyinterp.trivariate(grid_source, mx_target.flatten(), my_target.flatten(), mz_target.flatten(), bounds_error=True).reshape( mx_target.shape).T # Save to dataset ds_ssh_interp = xr.Dataset( {'sossheig': (('time', 'lat', 'lon'), ssh_interp)}, coords={ 'time': ds_target['time'].values, 'lon': ds_target['lon'].values, 'lat': ds_target['lat'].values, }) return ds_ssh_interp
lat:standard_name = "latitude" ; float lon(lon) ; lon:long_name = "longitude" ; lon:units = "degrees_east" ; lon:standard_name = "longitude" ; Regular axis ============ For example, let's construct an axis representing a regular axis. """ #%% import numpy import pyinterp axis = pyinterp.Axis(numpy.arange(-90, 90, 0.25)) axis #%% # This object can be queried to obtain its properties. print(f"is ascending ? {axis.is_ascending()}") print(f"is regular ? {axis.is_regular()}") print(f"is circle ? {axis.is_circle}") #%% # The most useful interfaces allow you to search for the index of the closest # value. axis.find_index([1e-3]) #%% # It is also possible to find the indices around a value.
AOML = DATASET.joinpath("aoml_v2019.nc") #%% # The first step is to load the data into memory and create the interpolator # object: ds = xarray.open_dataset(AOML) #%% # Let's start by calculating the standard for vectors u and v. norm = (ds.ud**2 + ds.vd**2)**0.5 #%% # Now, we will describe the grid used to calculate our :py:class:`binned # <pyinterp.Binning2D>` statics. binning = pyinterp.Binning2D( pyinterp.Axis(numpy.arange(27, 42, 0.3), is_circle=True), pyinterp.Axis(numpy.arange(40, 47, 0.3))) binning #%% # We push the loaded data into the different defined bins using :ref:`simple # binning <bilinear_binning>`. binning.clear() binning.push(ds.lon, ds.lat, norm, True) #%% # .. note :: # # If the processed data is larger than the available RAM, it's possible to use # Dask to parallel the calculation. To do this, an instance must be built, # then the data must be added using the :py:meth:`push_delayed
def test_biavariate(): grid = pyinterp.backends.xarray.Grid2D(xr.load_dataset(GRID).mss) assert isinstance(grid, pyinterp.backends.xarray.Grid2D) assert isinstance(grid, pyinterp.Grid2D) other = pickle.loads(pickle.dumps(grid)) assert isinstance(other, pyinterp.backends.xarray.Grid2D) assert isinstance(grid, pyinterp.Grid2D) assert isinstance(grid.x, pyinterp.Axis) assert isinstance(grid.y, pyinterp.Axis) assert isinstance(grid.array, np.ndarray) lon = np.arange(-180, 180, 1) + 1 / 3.0 lat = np.arange(-90, 90, 1) + 1 / 3.0 x, y = np.meshgrid(lon, lat, indexing="ij") z = grid.bivariate( collections.OrderedDict(lon=x.flatten(), lat=y.flatten())) assert isinstance(z, np.ndarray) z = grid.bivariate(collections.OrderedDict(lon=x.flatten(), lat=y.flatten()), interpolator="nearest") assert isinstance(z, np.ndarray) z = grid.bivariate(collections.OrderedDict(lon=x.flatten(), lat=y.flatten()), interpolator="inverse_distance_weighting") assert isinstance(z, np.ndarray) grid = pyinterp.backends.xarray.Grid2D(xr.load_dataset(GRID).mss, geodetic=False) assert isinstance(grid, pyinterp.backends.xarray.Grid2D) w = grid.bivariate(collections.OrderedDict(lon=x.flatten(), lat=y.flatten()), interpolator="inverse_distance_weighting") assert np.ma.fix_invalid(z).mean() != np.ma.fix_invalid(w).mean() with pytest.raises(TypeError): grid.bivariate((x.flatten(), y.flatten())) with pytest.raises(IndexError): grid.bivariate( collections.OrderedDict(lon=x.flatten(), lat=y.flatten(), time=np.arange(3))) with pytest.raises(IndexError): grid.bivariate( collections.OrderedDict(longitude=x.flatten(), lat=y.flatten())) with pytest.raises(ValueError): grid.bivariate(collections.OrderedDict(lon=x.flatten(), lat=y.flatten()), bounds_error=True) lon = pyinterp.Axis(np.linspace(0, 360, 100), is_circle=True) lat = pyinterp.Axis(np.linspace(-80, 80, 50), is_circle=False) array, _ = np.meshgrid(lon[:], lat[:]) with pytest.raises(ValueError): pyinterp.Grid2D(lon, lat, array) grid = pyinterp.Grid2D(lon, lat, array.T) assert isinstance(grid, pyinterp.Grid2D) assert isinstance(str(grid), str) with pytest.raises(ValueError): pyinterp.Grid2D(lon, lat, array, increasing_axes='_') grid = pyinterp.backends.xarray.RegularGridInterpolator( xr.load_dataset(GRID).mss) z = grid(collections.OrderedDict(lon=x.flatten(), lat=y.flatten()), method="bilinear") assert isinstance(z, np.ndarray)
"longitude"][:], ds.variables["latitude"][:], ds.variables[ "time"][:], ds.variables["time"].units, ds.variables["tcw"][:] time = numpy.array(netCDF4.num2date(time, time_units), dtype="datetime64[us]") #%% # This regular 3-dimensional grid is associated with three axes: # # * longitudes, # * latitudes and # * time. # # To perform the calculations quickly, we will build three objects that will be # used by the interpolator to search for the data to be used. Let's start with # the y-axis representing the latitude axis. y_axis = pyinterp.Axis(lat) y_axis #%% # For example, you can search for the closest point to 0.12 degrees north latitude. y_axis.find_index([0.12]) #%% # Then, the x-axis representing the longitudinal axis. In this case, the axis is # an axis representing a 360 degree circle. x_axis = pyinterp.Axis(lon, is_circle=True) x_axis #%% # The values -180 and 180 degrees represent the same point on the axis. x_axis.find_index([-180]) == x_axis.find_index([180])