예제 #1
0
def test_extract_result_metadata_aggregate_spatial_delayed_vector():
    tracer = DryRunDataTracer()
    cube = tracer.load_collection(collection_id="Sentinel2",
                                  arguments={
                                      "temporal_extent":
                                      ["2020-02-02", "2020-03-03"],
                                  })
    cube = cube.filter_bbox(west=4, south=51, east=5, north=52)
    geometries = DelayedVector(
        str(get_test_data_file("multipolygon01.geojson")))
    cube = cube.aggregate_spatial(geometries=geometries, reducer="mean")

    metadata = extract_result_metadata(tracer)
    expected = {
        "bbox": (5.0, 5.0, 45.0, 40.0),
        "geometry": {
            'type':
            'Polygon',
            'coordinates': (((5.0, 5.0), (5.0, 40.0), (45.0, 40.0),
                             (45.0, 5.0), (5.0, 5.0)), ),
        },
        "area": {
            "value": approx(6763173869883.0, 1.0),
            "unit": "square meter"
        },
        "start_datetime": "2020-02-02T00:00:00Z",
        "end_datetime": "2020-03-03T00:00:00Z",
        "links": []
    }
    assert metadata == expected
예제 #2
0
def test_aggregate_spatial_read_vector(dry_run_env, dry_run_tracer):
    geometry_path = str(get_path("geojson/GeometryCollection01.json"))
    pg = {
        "lc": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}},
        "vector": {"process_id": "read_vector", "arguments": {"filename": geometry_path}},
        "agg": {
            "process_id": "aggregate_spatial",
            "arguments": {
                "data": {"from_node": "lc"},
                "geometries": {"from_node": "vector"},
                "reducer": {
                    "process_graph": {
                        "mean": {
                            "process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True
                        }
                    }
                }
            },
            "result": True,
        },
    }
    cube = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "spatial_extent": {"west": 5.05, "south": 51.21, "east": 5.15, "north": 51.3, "crs": "EPSG:4326"},
        "aggregate_spatial": {"geometries": DelayedVector(geometry_path)},
    }
    geometries, = dry_run_tracer.get_geometries()
    assert isinstance(geometries, DelayedVector)
예제 #3
0
    def _normalize_geometry(
        self, geometries
    ) -> Tuple[Union[DriverVectorCube, DelayedVector, BaseGeometry], dict]:
        """
        Helper to preprocess geometries (as used in aggregate_spatial and mask_polygon)
        and extract bbox (e.g. for filter_bbox)
        """
        # TODO #71 #114 EP-3981 normalize to vector cube instead of GeometryCollection
        if isinstance(geometries, DriverVectorCube):
            bbox = geometries.get_bounding_box()
        elif isinstance(geometries, dict):
            return self._normalize_geometry(geojson_to_geometry(geometries))
        elif isinstance(geometries, str):
            return self._normalize_geometry(DelayedVector(geometries))
        elif isinstance(geometries, DelayedVector):
            bbox = geometries.bounds
        elif isinstance(geometries, shapely.geometry.base.BaseGeometry):
            if isinstance(geometries, Point):
                geometries = buffer_point_approx(geometries, "EPSG:4326")
            elif isinstance(geometries, GeometryCollection):
                geometries = GeometryCollection([
                    buffer_point_approx(geom, "EPSG:4326") if isinstance(
                        geom, Point) else geom for geom in geometries.geoms
                ])

            bbox = geometries.bounds
        else:
            raise ValueError(geometries)
        bbox = dict(west=bbox[0],
                    south=bbox[1],
                    east=bbox[2],
                    north=bbox[3],
                    crs="EPSG:4326")
        return geometries, bbox
예제 #4
0
def test_feature_collection_bounds():
    dv = DelayedVector(str(get_path("geojson/FeatureCollection01.json")))
    assert dv.bounds == (4.45, 51.1, 4.52, 51.2)
예제 #5
0
def test_geojson_crs_from_ogc_urn():
    dv = DelayedVector(
        str(get_path("geojson/test_geojson_crs_from_ogc_urn.geojson")))
    assert dv.crs == CRS.from_user_input("+init=epsg:4326")
예제 #6
0
def test_geojson_crs_unspecified():
    dv = DelayedVector(
        str(get_path("geojson/test_geojson_crs_unspecified.geojson")))
    assert dv.crs == CRS.from_user_input("+init=epsg:4326")
예제 #7
0
def test_geometry_collection_bounds():
    dv = DelayedVector(str(get_path("geojson/GeometryCollection01.json")))
    assert dv.bounds == (5.05, 51.21, 5.15, 51.3)
예제 #8
0
    def aggregate_spatial(
        self,
        geometries: Union[BaseGeometry, str, DriverVectorCube],
        reducer: dict,
        target_dimension: str = "result",
    ) -> Union[AggregatePolygonResult, AggregatePolygonSpatialResult,
               DriverVectorCube]:

        # TODO: support more advanced reducers too
        assert isinstance(reducer, dict) and len(reducer) == 1
        reducer = next(iter(reducer.values()))["process_id"]
        assert reducer == 'mean' or reducer == 'avg'

        def assert_polygon_sequence(geometries: Union[Sequence,
                                                      BaseMultipartGeometry]):
            assert len(geometries) > 0
            for g in geometries:
                assert isinstance(g, Polygon) or isinstance(g, MultiPolygon)

        # TODO #114 EP-3981 normalize to vector cube and preserve original properties
        if isinstance(geometries, DriverVectorCube):
            # Build dummy aggregation data cube
            dims, coords = geometries.get_xarray_cube_basics()
            if self.metadata.has_temporal_dimension():
                dims += (self.metadata.temporal_dimension.name, )
                coords[self.metadata.temporal_dimension.name] = [
                    "2015-07-06T00:00:00", "2015-08-22T00:00:00"
                ]
            if self.metadata.has_band_dimension():
                dims += (self.metadata.band_dimension.name, )
                coords[self.metadata.band_dimension.
                       name] = self.metadata.band_names
            shape = [len(coords[d]) for d in dims]
            data = numpy.arange(numpy.prod(shape)).reshape(shape)
            cube = xarray.DataArray(data=data,
                                    dims=dims,
                                    coords=coords,
                                    name="aggregate_spatial")
            return geometries.with_cube(cube=cube, flatten_prefix="agg")
        elif isinstance(geometries, str):
            geometries = [
                geometry for geometry in DelayedVector(geometries).geometries
            ]
            assert_polygon_sequence(geometries)
        elif isinstance(geometries, GeometryCollection):
            # TODO #71 #114 EP-3981: GeometryCollection is deprecated
            assert_polygon_sequence(geometries)
        elif isinstance(geometries, BaseGeometry):
            assert_polygon_sequence([geometries])
        else:
            assert_polygon_sequence(geometries)

        if self.metadata.has_temporal_dimension():
            return AggregatePolygonResult(timeseries={
                "2015-07-06T00:00:00": [2.345],
                "2015-08-22T00:00:00": [float('nan')]
            },
                                          regions=geometries)
        else:
            return DummyAggregatePolygonSpatialResult(cube=self,
                                                      geometries=geometries)
예제 #9
0
def read_vector(args: Dict, viewingParameters) -> DelayedVector:
    path = extract_arg(args, 'filename')
    return DelayedVector(path)