def test_debug_udf(): """ Shows how to run your UDF locally for testing. This method uses the same code as the backend, and can be used to check validity of your UDF. https://open-eo.github.io/openeo-python-client/udf.html#example-downloading-a-datacube-and-executing-an-udf-locally depends on composite.nc file created in earlier function! """ DataCube.execute_local_udf(compositing_udf, 'masked.nc', fmt='netcdf')
def load_collection( self, collection_id: str, spatial_extent: Optional[Dict[str, float]] = None, temporal_extent: Optional[List[Union[str, datetime.datetime, datetime.date]]] = None, bands: Optional[List[str]] = None, properties: Optional[Dict[str, Union[str, PGNode, Callable]]] = None ) -> DataCube: """ Load a DataCube by collection id. :param collection_id: image collection identifier :param spatial_extent: limit data to specified bounding box or polygons :param temporal_extent: limit data to specified temporal interval :param bands: only add the specified bands :param properties: limit data by metadata property predicates :return: a datacube containing the requested data """ if self._api_version.at_least("1.0.0"): return DataCube.load_collection(collection_id=collection_id, connection=self, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands, properties=properties) else: return ImageCollectionClient.load_collection( collection_id=collection_id, session=self, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands)
def test_mask_polygon_and_load_collection_spatial_extent(dry_run_env, dry_run_tracer): polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]} cube = DataCube(PGNode( "load_collection", id="S2_FOOBAR", spatial_extent={"west": -1, "south": -1, "east": 10, "north": 10} ), connection=None) cube = cube.mask_polygon(mask=polygon) pg = cube.flat_graph() res = evaluate(pg, env=dry_run_env) source_constraints = dry_run_tracer.get_source_constraints(merge=True) assert len(source_constraints) == 1 src, constraints = source_constraints[0] assert src == ("load_collection", ("S2_FOOBAR", ())) assert constraints == { "spatial_extent": {"west": -1, "south": -1, "east": 10, "north": 10, "crs": "EPSG:4326"} }
def test_run_local_udf_frommemory(self): from openeo_udf.api.datacube import DataCube dc=self.buildData() r=rest_DataCube.execute_local_udf(udfcode, dc) result=r.get_datacube_list()[0].get_array() exec(udfcode) ref=locals()["apply_datacube"](DataCube(dc.get_array().astype(numpy.float64).drop(labels='x').drop(labels='y')), {}).get_array() xarray.testing.assert_allclose(result,ref)
def test_mask_polygon_only(dry_run_env, dry_run_tracer, inside, replacement, expect_spatial_extent): polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]} cube = DataCube(PGNode("load_collection", id="S2_FOOBAR"), connection=None) cube = cube.mask_polygon(mask=polygon, inside=inside, replacement=replacement) pg = cube.flat_graph() res = evaluate(pg, env=dry_run_env) source_constraints = dry_run_tracer.get_source_constraints(merge=True) assert len(source_constraints) == 1 src, constraints = source_constraints[0] assert src == ("load_collection", ("S2_FOOBAR", ())) if expect_spatial_extent: expected = { "spatial_extent": {"west": 0.0, "south": 0.0, "east": 8.0, "north": 5.0, "crs": "EPSG:4326"} } else: expected = {} assert constraints == expected
def test_mask_polygon_and_filter_bbox(dry_run_env, dry_run_tracer, bbox_first): polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]} bbox = {"west": -1, "south": -1, "east": 9, "north": 9, "crs": "EPSG:4326"} # Use client lib to build process graph in flexible way cube = DataCube(PGNode("load_collection", id="S2_FOOBAR"), connection=None) if bbox_first: cube = cube.filter_bbox(bbox=bbox).mask_polygon(mask=polygon) else: cube = cube.mask_polygon(mask=polygon).filter_bbox(bbox=bbox) pg = cube.flat_graph() res = evaluate(pg, env=dry_run_env) source_constraints = dry_run_tracer.get_source_constraints(merge=True) assert len(source_constraints) == 1 src, constraints = source_constraints[0] assert src == ("load_collection", ("S2_FOOBAR", ())) assert constraints == { "spatial_extent": {"west": -1, "south": -1, "east": 9, "north": 9, "crs": "EPSG:4326"} }
def test_run_local_udf_fromfile(self): from openeo_udf.api.datacube import DataCube with TemporaryDirectory() as td: dc=self.buildData() tmpfile=os.path.join(td,'test_data') dc.to_file(tmpfile) r=rest_DataCube.execute_local_udf(udfcode, tmpfile) result=r.get_datacube_list()[0].get_array() exec(udfcode) ref=locals()["apply_datacube"](DataCube(dc.get_array().astype(numpy.float64).drop(labels='x').drop(labels='y')), {}).get_array() xarray.testing.assert_allclose(result,ref)
def compute_indices(datacube: DataCube, index_list: list) -> DataCube: """ Computes a list of indices from a datacube param datacube: an instance of openeo.rest.DataCube param index_list: a list of indices. The following indices are currently implemented: NDVI, NDMI, NDGI, ANIR, NDRE1, NDRE2 and NDRE5 return: the datacube with the indices attached as bands """ return datacube.apply_dimension( dimension="bands", process=lambda x: _callback(x, index_list, datacube)).rename_labels( 'bands', target=datacube.metadata.band_names + index_list)
def load_disk_collection(self, format: str, glob_pattern: str, options: dict = {}) -> ImageCollectionClient: """ Loads image data from disk as an ImageCollection. :param format: the file format, e.g. 'GTiff' :param glob_pattern: a glob pattern that matches the files to load from disk :param options: options specific to the file format :return: the data as an ImageCollection """ if self._api_version.at_least("1.0.0"): return DataCube.load_disk_collection(self, format, glob_pattern, **options) else: return ImageCollectionClient.load_disk_collection(self, format, glob_pattern, **options)
def load_collection(self, collection_id: str, **kwargs) -> Union[ImageCollectionClient, DataCube]: """ Load an image collection by collection id see :py:meth:`openeo.rest.imagecollectionclient.ImageCollectionClient.load_collection` for available arguments. :param collection_id: image collection identifier (string) :return: ImageCollectionClient """ if self._api_version.at_least("1.0.0"): return DataCube.load_collection(collection_id=collection_id, connection=self, **kwargs) else: return ImageCollectionClient.load_collection(collection_id=collection_id, session=self, **kwargs)
def datacube_from_process(self, process_id: str, **kwargs) -> DataCube: """ Load a raster datacube, from a custom process. @param process_id: The process id of the custom process. @param kwargs: The arguments of the custom process @return: A DataCube, without valid metadata, as the client is not aware of this custom process. """ if self._api_version.at_least("1.0.0"): graph = PGNode(process_id, kwargs) return DataCube(graph, self) else: raise OpenEoClientException( "This method requires support for at least version 1.0.0 in the openEO backend.")
def test_resample_filter_spatial(dry_run_env, dry_run_tracer): polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]} cube = DataCube(PGNode("load_collection", id="S2_FOOBAR"), connection=None) cube = cube.filter_spatial(geometries = polygon) cube = cube.resample_spatial(projection=4326, resolution=0.25) pg = cube.flat_graph() res = evaluate(pg, env=dry_run_env) source_constraints = dry_run_tracer.get_source_constraints(merge=True) assert len(source_constraints) == 1 src, constraints = source_constraints[0] assert src == ("load_collection", ("S2_FOOBAR", ())) geometries, = dry_run_tracer.get_geometries(operation="filter_spatial") assert constraints == { "spatial_extent": {'crs': 'EPSG:4326','east': 8.0,'north': 5.0,'south': 0.0,'west': 0.0}, "filter_spatial": {"geometries": shapely.geometry.shape(polygon)}, "resample": {'method': 'near', 'resolution': [0.25, 0.25], 'target_crs': 4326}, } assert isinstance(geometries, shapely.geometry.Polygon) assert shapely.geometry.mapping(geometries) == { "type": "Polygon", "coordinates": (((0.0, 0.0), (3.0, 5.0), (8.0, 2.0), (0.0, 0.0)),) }
def test_aggregate_spatial_and_filter_bbox(dry_run_env, dry_run_tracer): polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]} bbox = {"west": -1, "south": -1, "east": 9, "north": 9, "crs": "EPSG:4326"} cube = DataCube(PGNode("load_collection", id="S2_FOOBAR"), connection=None) cube = cube.filter_bbox(bbox=bbox) cube = cube.aggregate_spatial(geometries=polygon, reducer="mean") pg = cube.flat_graph() res = evaluate(pg, env=dry_run_env) source_constraints = dry_run_tracer.get_source_constraints(merge=True) assert len(source_constraints) == 1 src, constraints = source_constraints[0] assert src == ("load_collection", ("S2_FOOBAR", ())) assert constraints == { "spatial_extent": bbox, "aggregate_spatial": {"geometries": shapely.geometry.shape(polygon)}, } geometries, = dry_run_tracer.get_geometries() assert isinstance(geometries, shapely.geometry.Polygon) assert shapely.geometry.mapping(geometries) == { "type": "Polygon", "coordinates": (((0.0, 0.0), (3.0, 5.0), (8.0, 2.0), (0.0, 0.0)),) }
def test_client_pipeline(self): graph = PGNode("add", x=3, y=8) cube = DataCube(graph=graph, connection=None) pg = cube.flat_graph() processing = SimpleProcessing() assert processing.evaluate(pg) == 11
def test_run_local_udf_none(self): r=rest_DataCube.execute_local_udf(udfcode) self.assertEqual(r,None)
def _get_leaf_node(cube: DataCube) -> dict: """Get leaf node (node with result=True), supporting old and new style of graph building.""" flattened = cube.flatten() node, = [n for n in flattened.values() if n.get("result")] return node