def test_convert(self): self.assertEqual(PointLike.convert(None), None) self.assertEqual(PointLike.convert(''), None) self.assertEqual(PointLike.convert('0.0,1.0'), Point(0.0, 1.0)) with self.assertRaises(ValueError) as err: PointLike.convert('0.0,abc') self.assertEqual(str(err.exception), 'cannot convert value <0.0,abc> to PointLike')
def test_convert(self): expected = Point(0.0, 1.0) actual = PointLike.convert('0.0,1.0') self.assertTrue(expected, actual) with self.assertRaises(ValueError) as err: PointLike.convert('0.0,abc') self.assertTrue('cannot convert' in str(err.exception)) self.assertEqual(None, PointLike.convert(None))
def test_accepts(self): self.assertTrue(PointLike.accepts("")) self.assertTrue(PointLike.accepts("\t\n ")) self.assertTrue(PointLike.accepts("2.4, 4.8\n")) self.assertTrue(PointLike.accepts((2.4, 4.8))) self.assertTrue(PointLike.accepts([2.4, 4.8])) self.assertTrue(PointLike.accepts(Point(2.4, 4.8))) self.assertTrue(PointLike.accepts(Point(2.4, 4.8).wkt)) self.assertFalse(PointLike.accepts("A, 4.8")) self.assertFalse(PointLike.accepts(25.1))
def test_accepts(self): self.assertTrue(PointLike.accepts("")) self.assertTrue(PointLike.accepts("\t\n ")) self.assertTrue(PointLike.accepts("2.4, 4.8\n")) self.assertTrue(PointLike.accepts((2.4, 4.8))) self.assertTrue(PointLike.accepts([2.4, 4.8])) self.assertTrue(PointLike.accepts(Point(2.4, 4.8))) self.assertTrue(PointLike.accepts(Point(2.4, 4.8).wkt)) self.assertFalse(PointLike.accepts("A, 4.8")) self.assertFalse(PointLike.accepts(25.1))
def tseries_point(ds: xr.Dataset, point: PointLike.TYPE, var: VarNamesLike.TYPE = None, method: str = 'nearest') -> xr.Dataset: """ Extract time-series from *ds* at given *lat*, *lon* position using interpolation *method* for each *var* given in a comma separated list of variables. The operation returns a new timeseries dataset, that contains the point timeseries for all required variables with original variable meta-information preserved. If a variable has more than three dimensions, the resulting timeseries variable will preserve all other dimensions except for lat/lon. :param ds: The dataset from which to perform timeseries extraction. :param point: Point to extract :param var: Variable(s) for which to perform the timeseries selection if none is given, all variables in the dataset will be used. :param method: Interpolation method to use. :return: A timeseries dataset """ point = PointLike.convert(point) lon = point.x lat = point.y if not var: var = '*' retset = select_var(ds, var=var) indexers = {'lat': lat, 'lon': lon} return retset.sel(method=method, **indexers)
def extract_point(ds: DatasetLike.TYPE, point: PointLike.TYPE, indexers: DictLike.TYPE = None, tolerance_default: float = 0.01) -> Dict: """ Extract data at the given point location. The returned dict will contain scalar values for all variables for which all dimension have been given in ``indexers``. For the dimensions *lon* and *lat* a nearest neighbour lookup is performed. All other dimensions must mach exact. :param ds: Dataset or dataframe to subset :param point: Geographic point given by longitude and latitude :param indexers: Optional indexers into data array of *var*. The *indexers* is a dictionary or a comma-separated string of key-value pairs that maps the variable's dimension names to constant labels. e.g. "layer=4". :param tolerance_default: The default longitude and latitude tolerance for the nearest neighbour lookup. It will only be used, if it is not possible to deduce the resolution of the dataset. :return: A dict with the scalar values of all variables and the variable names as keys. """ ds = DatasetLike.convert(ds) point = PointLike.convert(point) indexers = DictLike.convert(indexers) or {} lon_lat_indexers = {'lon': point.x, 'lat': point.y} tolerance = _get_tolerance(ds, tolerance_default) variable_values = {} var_names = sorted(ds.data_vars.keys()) for var_name in var_names: if not var_name.endswith('_bnds'): variable = ds.data_vars[var_name] effective_indexers = {} used_dims = {'lat', 'lon'} for dim_name, dim_value in indexers.items(): if dim_name in variable.dims: effective_indexers[dim_name] = dim_value used_dims.add(dim_name) if set(variable.dims) == used_dims: try: lon_lat_data = variable.sel(**effective_indexers) except KeyError: # if there is no exact match for the "additional" dims, skip this variable continue try: point_data = lon_lat_data.sel(method='nearest', tolerance=tolerance, **lon_lat_indexers) except KeyError: # if there is no point within the given tolerance, return an empty dict return {} if not variable_values: variable_values['lat'] = float(point_data.lat) variable_values['lon'] = float(point_data.lon) value = to_scalar(point_data.values, ndigits=3) if value is not UNDEFINED: variable_values[var_name] = value return variable_values
def extract_point(ds: DatasetLike.TYPE, point: PointLike.TYPE, indexers: DictLike.TYPE = None, tolerance_default: float = 0.01) -> Dict: """ Extract data at the given point location. The returned dict will contain scalar values for all variables for which all dimension have been given in ``indexers``. For the dimensions *lon* and *lat* a nearest neighbour lookup is performed. All other dimensions must mach exact. :param ds: Dataset or dataframe to subset :param point: Geographic point given by longitude and latitude :param indexers: Optional indexers into data array of *var*. The *indexers* is a dictionary or a comma-separated string of key-value pairs that maps the variable's dimension names to constant labels. e.g. "layer=4". :param tolerance_default: The default longitude and latitude tolerance for the nearest neighbour lookup. It will only be used, if it is not possible to deduce the resolution of the dataset. :return: A dict with the scalar values of all variables and the variable names as keys. """ ds = DatasetLike.convert(ds) point = PointLike.convert(point) indexers = DictLike.convert(indexers) or {} lon_lat_indexers = {'lon': point.x, 'lat': point.y} tolerance = _get_tolerance(ds, tolerance_default) variable_values = {} var_names = sorted(ds.data_vars.keys()) for var_name in var_names: if not var_name.endswith('_bnds'): variable = ds.data_vars[var_name] effective_indexers = {} used_dims = {'lat', 'lon'} for dim_name, dim_value in indexers.items(): if dim_name in variable.dims: effective_indexers[dim_name] = dim_value used_dims.add(dim_name) if set(variable.dims) == used_dims: try: lon_lat_data = variable.sel(**effective_indexers) except KeyError: # if there is no exact match for the "additional" dims, skip this variable continue try: point_data = lon_lat_data.sel(method='nearest', tolerance=tolerance, **lon_lat_indexers) except KeyError: # if there is no point within the given tolerance, return an empty dict return {} if not variable_values: variable_values['lat'] = float(point_data.lat) variable_values['lon'] = float(point_data.lon) value = to_scalar(point_data.values, ndigits=3) if value is not UNDEFINED: variable_values[var_name] = value return variable_values
def test_convert(self): self.assertEqual(PointLike.convert(None), None) self.assertEqual(PointLike.convert(''), None) self.assertEqual(PointLike.convert('0.0,1.0'), Point(0.0, 1.0)) with self.assertRaises(ValidationError) as err: PointLike.convert('0.0,abc') self.assertEqual(str(err.exception), "Value cannot be converted into a 'PointLike': " "Invalid geometry WKT format.") self.assertEqual(PointLike.convert('POINT(0.0 1.0)'), Point(0.0, 1.0))
def tseries_point(ds: xr.Dataset, point: PointLike.TYPE, var: VarNamesLike.TYPE = None, method: str = 'nearest') -> xr.Dataset: """ Extract time-series from *ds* at given *lon*, *lat* position using interpolation *method* for each *var* given in a comma separated list of variables. The operation returns a new timeseries dataset, that contains the point timeseries for all required variables with original variable meta-information preserved. If a variable has more than three dimensions, the resulting timeseries variable will preserve all other dimensions except for lon/lat. :param ds: The dataset from which to perform timeseries extraction. :param point: Point to extract, e.g. (lon,lat) :param var: Variable(s) for which to perform the timeseries selection if none is given, all variables in the dataset will be used. :param method: Interpolation method to use. :return: A timeseries dataset """ point = PointLike.convert(point) lon = point.x lat = point.y if not var: var = '*' retset = select_var(ds, var=var) indexers = {'lat': lat, 'lon': lon} retset = retset.sel(method=method, **indexers) # The dataset is no longer a spatial dataset -> drop associated global # attributes drop = [ 'geospatial_bounds_crs', 'geospatial_bounds_vertical_crs', 'geospatial_vertical_min', 'geospatial_vertical_max', 'geospatial_vertical_positive', 'geospatial_vertical_units', 'geospatial_vertical_resolution', 'geospatial_lon_min', 'geospatial_lat_min', 'geospatial_lon_max', 'geospatial_lat_max' ] for key in drop: retset.attrs.pop(key, None) return retset
def test_convert(self): self.assertEqual(PointLike.convert(None), None) self.assertEqual(PointLike.convert(''), None) self.assertEqual(PointLike.convert('0.0,1.0'), Point(0.0, 1.0)) with self.assertRaises(ValueError) as err: PointLike.convert('0.0,abc') self.assertEqual( str(err.exception), 'cannot convert value to PointLike: ' 'invalid geometry WKT format') self.assertEqual(PointLike.convert('POINT(0.0 1.0)'), Point(0.0, 1.0))
def test_convert(self): self.assertEqual(PointLike.convert(None), None) self.assertEqual(PointLike.convert(''), None) self.assertEqual(PointLike.convert('0.0,1.0'), Point(0.0, 1.0)) with self.assertRaises(ValidationError) as err: PointLike.convert('0.0,abc') self.assertEqual( str(err.exception), "Value cannot be converted into a 'PointLike': " "Invalid geometry WKT format.") self.assertEqual(PointLike.convert('POINT(0.0 1.0)'), Point(0.0, 1.0))
def sel(ds: DatasetLike.TYPE, point: PointLike.TYPE = None, time: TimeLike.TYPE = None, indexers: DictLike.TYPE = None, method: str = 'nearest') -> xr.Dataset: """ Return a new dataset with each array indexed by tick labels along the specified dimension(s). This is a wrapper for the ``xarray.sel()`` function. For documentation refer to xarray documentation at http://xarray.pydata.org/en/stable/generated/xarray.Dataset.sel.html#xarray.Dataset.sel :param ds: The dataset from which to select. :param point: Optional geographic point given by longitude and latitude :param time: Optional time :param indexers: Keyword arguments with names matching dimensions and values given by scalars, slices or arrays of tick labels. For dimensions with multi-index, the indexer may also be a dict-like object with keys matching index level names. :param method: Method to use for inexact matches: * None: only exact matches * ``pad`` / ``ffill``: propagate last valid index value forward * ``backfill`` / ``bfill``: propagate next valid index value backward * ``nearest`` (default): use nearest valid index value :return: A new Dataset with the same contents as this dataset, except each variable and dimension is indexed by the appropriate indexers. In general, each variable's data will be a view of the variable's data in this dataset. """ ds = DatasetLike.convert(ds) point = PointLike.convert(point) time = TimeLike.convert(time) indexers = DictLike.convert(indexers) indexers = dict(indexers or {}) if point is not None: indexers.setdefault('lon', point.x) indexers.setdefault('lat', point.y) if time is not None: indexers.setdefault('time', time) # Filter out non-existent coordinates indexers = { name: value for name, value in indexers.items() if name in ds.coords } return ds.sel(method=method, **indexers)
def sel(ds: DatasetLike.TYPE, point: PointLike.TYPE = None, time: TimeLike.TYPE = None, indexers: DictLike.TYPE = None, method: str = 'nearest') -> xr.Dataset: """ Return a new dataset with each array indexed by tick labels along the specified dimension(s). This is a wrapper for the ``xarray.sel()`` function. For documentation refer to xarray documentation at http://xarray.pydata.org/en/stable/generated/xarray.Dataset.sel.html#xarray.Dataset.sel :param ds: The dataset from which to select. :param point: Optional geographic point given by longitude and latitude :param time: Optional time :param indexers: Keyword arguments with names matching dimensions and values given by scalars, slices or arrays of tick labels. For dimensions with multi-index, the indexer may also be a dict-like object with keys matching index level names. :param method: Method to use for inexact matches: * None: only exact matches * ``pad`` / ``ffill``: propagate last valid index value forward * ``backfill`` / ``bfill``: propagate next valid index value backward * ``nearest`` (default): use nearest valid index value :return: A new Dataset with the same contents as this dataset, except each variable and dimension is indexed by the appropriate indexers. In general, each variable's data will be a view of the variable's data in this dataset. """ ds = DatasetLike.convert(ds) point = PointLike.convert(point) time = TimeLike.convert(time) indexers = DictLike.convert(indexers) indexers = dict(indexers or {}) if point is not None: indexers.setdefault('lon', point.x) indexers.setdefault('lat', point.y) if time is not None: indexers.setdefault('time', time) # Filter out non-existent coordinates indexers = {name: value for name, value in indexers.items() if name in ds.coords} return ds.sel(method=method, **indexers)
def test_format(self): self.assertEqual(PointLike.format(Point(2.4, 4.8)), "2.4, 4.8")
def test_format(self): self.assertEqual(PointLike.format(Point(2.4, 4.8)), "2.4, 4.8")