Ejemplo n.º 1
0
def test_evaluate_predefined_property(backend_implementation):
    pg = {
        "lc": {"process_id": "load_collection", "arguments": {"id": "TERRASCOPE_S2_FAPAR_V2"}, "result": True},
    }

    env = EvalEnv(dict(backend_implementation=backend_implementation))
    evaluate(pg, do_dry_run=True, env=env)
Ejemplo n.º 2
0
def test_CropSAR_aggregate_spatial_constraint(dry_run_env, dry_run_tracer):
    cropsar_process = load_json("pg/1.0/cropsar_graph.json")
    custom_process_from_process_graph(cropsar_process, namespace="test")

    try:
        geometry_path = str(get_path("geojson/thaipolys_ad.geojson"))

        pg = {
            "CropSAR1": {
                "process_id": "CropSAR",
                "arguments": {
                    "file_polygons": geometry_path,
                    "time_range": [
                        "2019-07-01",
                        "2019-08-31"
                    ]
                },
                "namespace": "test",
                "result": True
            }
        }

        evaluate(pg, env=dry_run_env)
        source_constraints = dry_run_tracer.get_source_constraints(merge=True)

        assert len(source_constraints) > 0

        for _, constraints in source_constraints:
            assert constraints['aggregate_spatial']['geometries'].path == geometry_path
    finally:
        del process_registry_100._processes['test', 'CropSAR']
Ejemplo n.º 3
0
def test_aggregate_spatial_read_vector(dry_run_env, dry_run_tracer):
    geometry_path = str(get_path("geojson/GeometryCollection01.json"))
    pg = {
        "lc": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}},
        "vector": {"process_id": "read_vector", "arguments": {"filename": geometry_path}},
        "agg": {
            "process_id": "aggregate_spatial",
            "arguments": {
                "data": {"from_node": "lc"},
                "geometries": {"from_node": "vector"},
                "reducer": {
                    "process_graph": {
                        "mean": {
                            "process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True
                        }
                    }
                }
            },
            "result": True,
        },
    }
    cube = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "spatial_extent": {"west": 5.05, "south": 51.21, "east": 5.15, "north": 51.3, "crs": "EPSG:4326"},
        "aggregate_spatial": {"geometries": DelayedVector(geometry_path)},
    }
    geometries, = dry_run_tracer.get_geometries()
    assert isinstance(geometries, DelayedVector)
Ejemplo n.º 4
0
def test_evaluate_basic_no_load_collection(dry_run_env, dry_run_tracer):
    pg = {
        "add": {"process_id": "add", "arguments": {"x": 1, "y": 2}, "result": True},
    }
    res = evaluate(pg, env=dry_run_env)
    assert res == 3
    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert source_constraints == []
Ejemplo n.º 5
0
def point():
    # TODO: deprecated? do we still need this endpoint? #35
    x = float(request.args.get('x', ''))
    y = float(request.args.get('y', ''))
    srs = request.args.get('srs', None)
    process_graph = _extract_process_graph(request.json)
    image_collection = evaluate(process_graph, viewingParameters={'version': g.api_version})
    return jsonify(image_collection.timeseries(x, y, srs))
Ejemplo n.º 6
0
def test_evaluate_merge_collections(dry_run_env, dry_run_tracer):
    pg = {
        "load": {
            "process_id": "load_collection",
            "arguments": {
                "id": "S2_FOOBAR",
                "spatial_extent": {"west": 0, "south": 50, "east": 5, "north": 55},
                "temporal_extent": ["2020-01-01", "2020-10-10"],
                "bands": ["red", "green", "blue"]
            },
        },
        "load_s1": {
            "process_id": "load_collection",
            "arguments": {
                "id": "S2_FAPAR_CLOUDCOVER",
                "spatial_extent": {"west": -1, "south": 50, "east": 5, "north": 55},
                "temporal_extent": ["2020-01-01", "2020-10-10"],
                "bands": ["VV"]
            },
        },
        "merge": {
            "process_id": "merge_cubes",
            "arguments": {
                "cube1": {"from_node": "load"},
                "cube2": {"from_node": "load_s1"}
            },
            "result": True

        }
    }
    cube = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 2

    source, constraints = source_constraints[0]

    assert source == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "temporal_extent": ("2020-01-01", "2020-10-10"),
        "spatial_extent": {"west": 0, "south": 50, "east": 5, "north": 55, "crs": "EPSG:4326"},
        "bands": ["red", "green", "blue"],
    }

    source, constraints = source_constraints[1]

    assert source == ("load_collection", ("S2_FAPAR_CLOUDCOVER", ()))
    assert constraints == {
        "temporal_extent": ("2020-01-01", "2020-10-10"),
        "spatial_extent": {"west": -1, "south": 50, "east": 5, "north": 55, "crs": "EPSG:4326"},
        "bands": ["VV"],
    }

    dry_run_env = dry_run_env.push({ENV_SOURCE_CONSTRAINTS: source_constraints})
    loadparams = _extract_load_parameters(dry_run_env, ("load_collection", ("S2_FOOBAR", ())))
    assert {"west": -1, "south": 50, "east": 5, "north": 55, "crs": "EPSG:4326"} == loadparams.global_extent
Ejemplo n.º 7
0
def test_aggregate_spatial_get_geometries_feature_collection(dry_run_env, dry_run_tracer):
    pg = {
        "lc": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}},
        "vector": {"process_id": "get_geometries", "arguments": {"feature_collection": {
            "type": "FeatureCollection",
            "name": "fields",
            "crs": {
                "type": "name",
                "properties": {
                    "name": "urn:ogc:def:crs:OGC:1.3:CRS84"
                }
            },
            "features": [
                {
                    "type": "Feature",
                    "geometry": {
                        "type": "Polygon",
                        "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]
                    },
                    "properties": {
                        "CODE_OBJ": "0000000000000001"
                    }
                }
            ]
        }}},
        "agg": {
            "process_id": "aggregate_spatial",
            "arguments": {
                "data": {"from_node": "lc"},
                "geometries": {"from_node": "vector"},
                "reducer": {
                    "process_graph": {
                        "mean": {
                            "process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True
                        }
                    }
                }
            },
            "result": True,
        },
    }
    cube = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    expected_geometry_collection = shapely.geometry.GeometryCollection(
        [shapely.geometry.shape({"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]})]
    )
    assert constraints == {
        "spatial_extent": {'west': 0.0, 'south': 0.0, 'east': 8.0, 'north': 5.0, 'crs': 'EPSG:4326'},
        "aggregate_spatial": {"geometries": expected_geometry_collection}
    }
    geometries, = dry_run_tracer.get_geometries()
    assert isinstance(geometries, shapely.geometry.GeometryCollection)
Ejemplo n.º 8
0
def test_evaluate_basic_load_collection(dry_run_env, dry_run_tracer):
    pg = {
        "lc": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}, "result": True},
    }
    cube = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert source_constraints == [
        (("load_collection", ("S2_FOOBAR", ())), {})
    ]
Ejemplo n.º 9
0
def test_sources_are_subject_to_correct_constraints(dry_run_env, dry_run_tracer):
    pg = {
        'loadcollection1': {'process_id': 'load_collection',
                            'arguments': {'bands': ['VV', 'VH'], 'id': 'S2_FOOBAR',
                                          'spatial_extent': {'west': 11.465226, 'east': 11.465435, 'south': 46.343118,
                                                             'north': 46.343281, 'crs': 'EPSG:4326'},
                                          'temporal_extent': ['2018-01-01', '2018-01-01']}},
        'sarbackscatter1': {'process_id': 'sar_backscatter',
                            'arguments': {'coefficient': 'sigma0-ellipsoid', 'contributing_area': False,
                                          'data': {'from_node': 'loadcollection1'}, 'elevation_model': None,
                                          'ellipsoid_incidence_angle': False, 'local_incidence_angle': False,
                                          'mask': False, 'noise_removal': True}},
        'renamelabels1': {'process_id': 'rename_labels',
                          'arguments': {'data': {'from_node': 'sarbackscatter1'}, 'dimension': 'bands',
                                        'source': ['VV', 'VH'], 'target': ['VV_sigma0', 'VH_sigma0']}},
        'sarbackscatter2': {'process_id': 'sar_backscatter',
                            'arguments': {'coefficient': 'gamma0-terrain', 'contributing_area': False,
                                          'data': {'from_node': 'loadcollection1'}, 'elevation_model': None,
                                          'ellipsoid_incidence_angle': False, 'local_incidence_angle': False,
                                          'mask': False, 'noise_removal': True}},
        'renamelabels2': {'process_id': 'rename_labels',
                          'arguments': {'data': {'from_node': 'sarbackscatter2'}, 'dimension': 'bands',
                                        'source': ['VV', 'VH'], 'target': ['VV_gamma0', 'VH_gamma0']}},
        'mergecubes1': {'process_id': 'merge_cubes', 'arguments': {'cube1': {'from_node': 'renamelabels1'},
                                                                   'cube2': {'from_node': 'renamelabels2'}},
                        'result': True}
    }
    cube = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 2

    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "temporal_extent": ('2018-01-01', '2018-01-01'),
        "spatial_extent": {'west': 11.465226, 'east': 11.465435, 'south': 46.343118, 'north': 46.343281,
                           'crs': 'EPSG:4326'},
        'bands': ['VV', 'VH'],
        'sar_backscatter': SarBackscatterArgs(coefficient='sigma0-ellipsoid', elevation_model=None, mask=False,
                                              contributing_area=False, local_incidence_angle=False,
                                              ellipsoid_incidence_angle=False, noise_removal=True, options={})
    }

    src, constraints = source_constraints[1]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "temporal_extent": ('2018-01-01', '2018-01-01'),
        "spatial_extent": {'west': 11.465226, 'east': 11.465435, 'south': 46.343118, 'north': 46.343281,
                           'crs': 'EPSG:4326'},
        'bands': ['VV', 'VH'],
        'sar_backscatter': SarBackscatterArgs(coefficient='gamma0-terrain', elevation_model=None, mask=False,
                                              contributing_area=False, local_incidence_angle=False,
                                              ellipsoid_incidence_angle=False, noise_removal=True, options={})
    }
Ejemplo n.º 10
0
def test_evaluate_load_collection_and_filter_extents(dry_run_env, dry_run_tracer):
    """temporal/bbox/band extents in load_collection *and* filter_ processes"""
    pg = {
        "load": {
            "process_id": "load_collection",
            "arguments": {
                "id": "S2_FOOBAR",
                "spatial_extent": {"west": 0, "south": 50, "east": 5, "north": 55},
                "temporal_extent": ["2020-01-01", "2020-10-10"],
                "bands": ["red", "green", "blue"]
            },
        },
        "filter_temporal": {
            "process_id": "filter_temporal",
            "arguments": {"data": {"from_node": "load"}, "extent": ["2020-02-02", "2020-03-03"]},
        },
        "filter_bbox": {
            "process_id": "filter_bbox",
            "arguments": {
                "data": {"from_node": "filter_temporal"},
                "extent": {"west": 1, "south": 51, "east": 3, "north": 53}
            },
        },
        "filter_bands": {
            "process_id": "filter_bands",
            "arguments": {"data": {"from_node": "filter_bbox"}, "bands": ["red"]},
            "result": True,
        }
    }
    cube = evaluate(pg, env=dry_run_env)

    """
    source_constraints = dry_run_tracer.get_source_constraints(merge=False)
    assert len(source_constraints) == 1
    src, constraints = source_constraints.popitem()
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == [{
        "temporal_extent": [("2020-01-01", "2020-10-10"), ("2020-02-02", "2020-03-03"), ],
        "spatial_extent": [
            {"west": 0, "south": 50, "east": 5, "north": 55, "crs": "EPSG:4326"},
            {"west": 1, "south": 51, "east": 3, "north": 53, "crs": "EPSG:4326"}
        ],
        "bands": [["red", "green", "blue"], ["red"]],
    }]
    """

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "temporal_extent": ("2020-01-01", "2020-10-10"),
        "spatial_extent": {"west": 0, "south": 50, "east": 5, "north": 55, "crs": "EPSG:4326"},
        "bands": ["red", "green", "blue"],
    }
Ejemplo n.º 11
0
def test_evaluate_drop_dimension(dry_run_env, dry_run_tracer, dimension_name, expected):
    pg = {
        "lc": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}},
        "drop": {
            "process_id": "drop_dimension",
            "arguments": {"data": {"from_node": "lc"}, "name": dimension_name},
            "result": True,
        },
    }
    cube = evaluate(pg, env=dry_run_env)
    assert cube.metadata.dimension_names() == expected
Ejemplo n.º 12
0
def download():
    # TODO: deprecated?
    if request.method == 'POST':
        outputformat = request.args.get('outputformat', 'geotiff')

        process_graph = request.get_json()
        image_collection = evaluate(process_graph)
        # TODO Unify with execute?
        filename = image_collection.download(None,outputformat=outputformat)

        return send_from_directory(os.path.dirname(filename),os.path.basename(filename))
    else:
        return 'Usage: Download image using POST.'
Ejemplo n.º 13
0
def test_evaluate_graph_diamond(dry_run_env, dry_run_tracer):
    """
    Diamond graph:
    load -> band red -> mask -> bbox
        `-> band grass -^
    """
    pg = {
        "load": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}},
        "band_red": {
            "process_id": "filter_bands",
            "arguments": {"data": {"from_node": "load"}, "bands": ["red"]},
        },
        "band_grass": {
            "process_id": "filter_bands",
            "arguments": {"data": {"from_node": "load"}, "bands": ["grass"]},

        },
        "mask": {
            "process_id": "mask",
            "arguments": {"data": {"from_node": "band_red"}, "mask": {"from_node": "band_grass"}},
        },
        "bbox": {
            "process_id": "filter_bbox",
            "arguments": {"data": {"from_node": "mask"}, "extent": {"west": 1, "east": 2, "south": 51, "north": 52}},
            "result": True,
        }
    }
    cube = evaluate(pg, env=dry_run_env)

    """
    source_constraints = dry_run_tracer.get_source_constraints(merge=False)
    assert len(source_constraints) == 1
    src, constraints = source_constraints.popitem()
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert sorted(constraints, key=str) == [
    """

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 2
    assert source_constraints == [(
        ("load_collection", ("S2_FOOBAR", ())),
        {
            "bands": ["grass"],
            "spatial_extent": {"west": 1, "east": 2, "south": 51, "north": 52, "crs": "EPSG:4326"}
        }), (
        ("load_collection", ("S2_FOOBAR", ())),
        {
            "bands": ["red"],
            "spatial_extent": {"west": 1, "east": 2, "south": 51, "north": 52, "crs": "EPSG:4326"}
        }),
    ]
Ejemplo n.º 14
0
def test_evaluate_sar_backscatter(dry_run_env, dry_run_tracer, arguments, expected):
    pg = {
        "lc": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}},
        "sar": {
            "process_id": "sar_backscatter",
            "arguments": dict(data={"from_node": "lc"}, **arguments),
            "result": True,
        },
    }
    cube = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {"sar_backscatter": expected}
Ejemplo n.º 15
0
def test_evaluate_atmospheric_correction(dry_run_env, dry_run_tracer, arguments, expected):
    pg = {
        "lc": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}},
        "sar": {
            "process_id": "atmospheric_correction",
            "arguments": dict(data={"from_node": "lc"}, **arguments),
            "result": True,
        },
    }
    cube = evaluate(pg, env=dry_run_env)

    metadata_links = dry_run_tracer.get_metadata_links()
    assert len(metadata_links) == 1
    src, links = metadata_links.popitem()
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert links == expected
Ejemplo n.º 16
0
def test_evaluate_temporal_extent_dynamic_item(dry_run_env, dry_run_tracer):
    pg = {
        "load": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}},
        "start": {"process_id": "constant", "arguments": {"x": "2020-01-01"}},
        "filtertemporal": {
            "process_id": "filter_temporal",
            "arguments": {"data": {"from_node": "load"}, "extent": [{"from_node": "start"}, "2020-02-02"]},
            "result": True,
        },
    }
    cube = evaluate(pg, env=dry_run_env)
    source_constraints = dry_run_tracer.get_source_constraints()
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {"temporal_extent": ("2020-01-01", "2020-02-02")}
Ejemplo n.º 17
0
def test_mask_polygon_and_load_collection_spatial_extent(dry_run_env, dry_run_tracer):
    polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]}
    cube = DataCube(PGNode(
        "load_collection", id="S2_FOOBAR",
        spatial_extent={"west": -1, "south": -1, "east": 10, "north": 10}
    ), connection=None)
    cube = cube.mask_polygon(mask=polygon)
    pg = cube.flat_graph()
    res = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "spatial_extent": {"west": -1, "south": -1, "east": 10, "north": 10, "crs": "EPSG:4326"}
    }
Ejemplo n.º 18
0
def main(argv=None):
    logging.basicConfig(level=logging.INFO)
    process_graph, args = handle_cli(argv)
    _log.info(
        "Evaluating process graph: {pg}".format(pg=safe_repr(process_graph)))

    _setup_local_spark(print=_log.info)

    # Local imports to workaround the pyspark import issues.
    from openeo_driver.ProcessGraphDeserializer import evaluate
    from openeogeotrellis.backend import GeoPySparkBackendImplementation

    env = EvalEnv({
        "version":
        args.api_version,
        "pyramid_levels":
        "highest",
        "user":
        None,  # TODO
        "require_bounds":
        True,
        "correlation_id":
        f"cli-pid{os.getpid()}",
        "backend_implementation":
        GeoPySparkBackendImplementation(use_zookeeper=False),
    })

    with TimingLogger(title="Evaluate process graph", logger=_log):
        result = evaluate(process_graph, env=env)

    if isinstance(result, ImageCollectionResult):
        filename = args.output or f"result.{result.format}"
        with TimingLogger(title=f"Saving result to {filename!r}", logger=_log):
            result.save_result(filename)
    elif isinstance(result, JSONResult):
        if args.output:
            with open(args.output, "w") as f:
                json.dump(result.prepare_for_json(), f)
        else:
            print(result.prepare_for_json())
    elif isinstance(result, dict):
        # TODO: support storing JSON result to file
        print(result)
    else:
        # TODO: support more result types
        raise ValueError(result)
Ejemplo n.º 19
0
def execute():
    # TODO:  This is not an official endpoint, does this "/execute" still have to be exposed as route?
    post_data = request.get_json()
    process_graph = _extract_process_graph(post_data)
    result = evaluate(process_graph, viewingParameters={'version': g.api_version})

    # TODO unify all this output handling within SaveResult logic?
    if isinstance(result, ImageCollection):
        format_options = post_data.get('output', {})
        filename = result.download(None, bbox="", time="", **format_options)
        return send_from_directory(os.path.dirname(filename), os.path.basename(filename))
    elif result is None:
        abort(500, "Process graph evaluation gave no result")
    elif isinstance(result, SaveResult):
        return result.create_flask_response()
    else:
        return jsonify(replace_nan_values(result))
Ejemplo n.º 20
0
def test_mask_polygon_only(dry_run_env, dry_run_tracer, inside, replacement, expect_spatial_extent):
    polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]}
    cube = DataCube(PGNode("load_collection", id="S2_FOOBAR"), connection=None)
    cube = cube.mask_polygon(mask=polygon, inside=inside, replacement=replacement)
    pg = cube.flat_graph()
    res = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    if expect_spatial_extent:
        expected = {
            "spatial_extent": {"west": 0.0, "south": 0.0, "east": 8.0, "north": 5.0, "crs": "EPSG:4326"}
        }
    else:
        expected = {}
    assert constraints == expected
Ejemplo n.º 21
0
    def create_service(self, process_graph: dict, service_type: str, api_version: str, post_data: dict) -> ServiceMetadata:
        """
        https://open-eo.github.io/openeo-api/apireference/#tag/Secondary-Services-Management/paths/~1services/post
        :return: (location, openeo_identifier)
        """
        from openeo_driver.ProcessGraphDeserializer import evaluate
        # TODO require auth/user handle?
        if service_type.lower() not in set(st.lower() for st in self.service_types()):
            raise ServiceUnsupportedException(
                message="Secondary service type {t!r} is not supported.".format(t=service_type),
            )

        image_collection = evaluate(process_graph, viewingParameters={'version': api_version})
        service_metadata = image_collection.tiled_viewing_service(
            service_type=service_type,
            process_graph=process_graph,
            post_data=post_data
        )
        return service_metadata
Ejemplo n.º 22
0
def test_mask_polygon_and_filter_bbox(dry_run_env, dry_run_tracer, bbox_first):
    polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]}
    bbox = {"west": -1, "south": -1, "east": 9, "north": 9, "crs": "EPSG:4326"}
    # Use client lib to build process graph in flexible way
    cube = DataCube(PGNode("load_collection", id="S2_FOOBAR"), connection=None)
    if bbox_first:
        cube = cube.filter_bbox(bbox=bbox).mask_polygon(mask=polygon)
    else:
        cube = cube.mask_polygon(mask=polygon).filter_bbox(bbox=bbox)

    pg = cube.flat_graph()
    res = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "spatial_extent": {"west": -1, "south": -1, "east": 9, "north": 9, "crs": "EPSG:4326"}
    }
Ejemplo n.º 23
0
def test_load_collection_properties(dry_run_env, dry_run_tracer):
    def get_props(direction="DESCENDING"):
        return {
            "orbitDirection": {
                "process_graph": {
                    "od": {
                        "process_id": "eq",
                        "arguments": {
                            "x": {"from_parameter": "value"},
                            "y": direction
                        },
                        "result": True
                    }
                }
            }
        }

    properties = get_props()
    asc_props = get_props("ASCENDING")
    pg = {
        "lc": {
            "process_id": "load_collection", "arguments": {"id": "S2_FOOBAR", "properties": properties},
        },
        "lc2": {
            "process_id": "load_collection", "arguments": {"id": "S2_FOOBAR", "properties": asc_props},
        },
        "merge": {
            "process_id": "merge_cubes",
            "arguments": {"cube1": {"from_node": "lc"}, "cube2": {"from_node": "lc2"}},
            "result": True
        }
    }
    cube = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)

    assert source_constraints == [
        (("load_collection", ("S2_FOOBAR", (("orbitDirection", (("eq", "DESCENDING"),),),))), {"properties": properties}),
        (("load_collection", ("S2_FOOBAR", (("orbitDirection", (("eq", "ASCENDING"),)),),)), {"properties": asc_props})
    ]
Ejemplo n.º 24
0
def test_aggregate_spatial_and_filter_bbox(dry_run_env, dry_run_tracer):
    polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]}
    bbox = {"west": -1, "south": -1, "east": 9, "north": 9, "crs": "EPSG:4326"}
    cube = DataCube(PGNode("load_collection", id="S2_FOOBAR"), connection=None)
    cube = cube.filter_bbox(bbox=bbox)
    cube = cube.aggregate_spatial(geometries=polygon, reducer="mean")

    pg = cube.flat_graph()
    res = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "spatial_extent": bbox,
        "aggregate_spatial": {"geometries": shapely.geometry.shape(polygon)},
    }
    geometries, = dry_run_tracer.get_geometries()
    assert isinstance(geometries, shapely.geometry.Polygon)
    assert shapely.geometry.mapping(geometries) == {
        "type": "Polygon",
        "coordinates": (((0.0, 0.0), (3.0, 5.0), (8.0, 2.0), (0.0, 0.0)),)
    }
Ejemplo n.º 25
0
def test_evaluate_basic_filter_temporal(dry_run_env, dry_run_tracer):
    pg = {
        "lc": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}},
        "ft": {
            "process_id": "filter_temporal",
            "arguments": {"data": {"from_node": "lc"}, "extent": ["2020-02-02", "2020-03-03"]},
            "result": True,
        },
    }
    cube = evaluate(pg, env=dry_run_env)

    """
    source_constraints = dry_run_tracer.get_source_constraints(merge=False)
    assert len(source_constraints) == 1
    src, constraints = source_constraints.popitem()
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == [{"temporal_extent": [("2020-02-02", "2020-03-03")]}]
    """

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {"temporal_extent": ("2020-02-02", "2020-03-03")}
Ejemplo n.º 26
0
def test_aggregate_spatial_only(dry_run_env, dry_run_tracer):
    polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]}
    pg = {
        "lc": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR"}},
        "agg": {
            "process_id": "aggregate_spatial",
            "arguments": {
                "data": {"from_node": "lc"},
                "geometries": polygon,
                "reducer": {
                    "process_graph": {
                        "mean": {
                            "process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True
                        }
                    }
                }
            },
            "result": True,
        },
    }
    cube = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "spatial_extent": {"west": 0.0, "south": 0.0, "east": 8.0, "north": 5.0, "crs": "EPSG:4326"},
        "aggregate_spatial": {"geometries": shapely.geometry.shape(polygon)},
    }
    geometries, = dry_run_tracer.get_geometries()
    assert isinstance(geometries, shapely.geometry.Polygon)
    assert shapely.geometry.mapping(geometries) == {
        "type": "Polygon",
        "coordinates": (((0.0, 0.0), (3.0, 5.0), (8.0, 2.0), (0.0, 0.0)),)
    }
Ejemplo n.º 27
0
def test_resample_filter_spatial(dry_run_env, dry_run_tracer):
    polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]}
    cube = DataCube(PGNode("load_collection", id="S2_FOOBAR"), connection=None)
    cube = cube.filter_spatial(geometries = polygon)
    cube = cube.resample_spatial(projection=4326, resolution=0.25)

    pg = cube.flat_graph()
    res = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    geometries, = dry_run_tracer.get_geometries(operation="filter_spatial")
    assert constraints == {
        "spatial_extent": {'crs': 'EPSG:4326','east': 8.0,'north': 5.0,'south': 0.0,'west': 0.0},
        "filter_spatial": {"geometries": shapely.geometry.shape(polygon)},
        "resample": {'method': 'near', 'resolution': [0.25, 0.25], 'target_crs': 4326},
    }
    assert isinstance(geometries, shapely.geometry.Polygon)
    assert shapely.geometry.mapping(geometries) == {
        "type": "Polygon",
        "coordinates": (((0.0, 0.0), (3.0, 5.0), (8.0, 2.0), (0.0, 0.0)),)
    }
Ejemplo n.º 28
0
def test_filter_after_merge_cubes(dry_run_env, dry_run_tracer):
    """based on use case of https://jira.vito.be/browse/EP-3747"""
    pg = {
        "loadcollection1": {
            "process_id": "load_collection",
            "arguments": {"id": "S2_FOOBAR", "bands": ["B04", "B08"]}
        },
        "reducedimension1": {
            "process_id": "reduce_dimension",
            "arguments": {
                "data": {"from_node": "loadcollection1"},
                "dimension": "bands",
                "reducer": {
                    "process_graph": {
                        "arrayelement1": {
                            "process_id": "array_element",
                            "arguments": {"data": {"from_parameter": "data"}, "index": 1}
                        },
                        "arrayelement2": {
                            "process_id": "array_element",
                            "arguments": {"data": {"from_parameter": "data"}, "index": 0}
                        },
                        "subtract1": {
                            "process_id": "subtract",
                            "arguments": {"x": {"from_node": "arrayelement1"}, "y": {"from_node": "arrayelement2"}}
                        },
                        "add1": {
                            "process_id": "add",
                            "arguments": {"x": {"from_node": "arrayelement1"}, "y": {"from_node": "arrayelement2"}}
                        },
                        "divide1": {
                            "process_id": "divide",
                            "arguments": {"x": {"from_node": "subtract1"}, "y": {"from_node": "add1"}},
                            "result": True
                        }
                    }
                }
            }
        },
        "adddimension1": {
            "process_id": "add_dimension",
            "arguments": {
                "data": {"from_node": "reducedimension1"}, "label": "s2_ndvi", "name": "bands",
                "type": "bands"}
        },
        "loadcollection2": {
            "process_id": "load_collection",
            "arguments": {"id": "PROBAV_L3_S10_TOC_NDVI_333M_V2", "bands": ["ndvi"], }
        },
        "resamplecubespatial1": {
            "process_id": "resample_cube_spatial",
            "arguments": {
                "data": {"from_node": "loadcollection2"},
                "method": "near",
                "target": {"from_node": "adddimension1"}
            }
        },
        "maskpolygon1": {
            "process_id": "mask_polygon",
            "arguments": {
                "data": {"from_node": "resamplecubespatial1"},
                "mask": {
                    "type": "Polygon",
                    "coordinates": [[
                        [5.03536, 51.219], [5.03586, 51.230], [5.01754, 51.231], [5.01704, 51.219], [5.03536, 51.219]
                    ]]
                }
            }
        },
        "mergecubes1": {
            "process_id": "merge_cubes",
            "arguments": {
                "cube1": {"from_node": "adddimension1"},
                "cube2": {"from_node": "maskpolygon1"}
            }
        },
        "filtertemporal1": {
            "process_id": "filter_temporal",
            "arguments": {
                "data": {"from_node": "mergecubes1"},
                "extent": ["2019-03-01", "2019-04-01"]
            }
        },
        "filterbbox1": {
            "process_id": "filter_bbox",
            "arguments": {
                "data": {"from_node": "filtertemporal1"},
                "extent": {
                    "west": 640860.0, "east": 642140.0, "north": 5677450.0, "south": 5676170.0, "crs": "EPSG:32631"
                }
            },
            "result": True
        }
    }

    cube = evaluate(pg, env=dry_run_env)
    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert source_constraints == [
        (
            ('load_collection', ('S2_FOOBAR', ())),
            {
                'bands': ['B04', 'B08'],
                'spatial_extent': {
                    'crs': 'EPSG:32631', 'east': 642140.0, 'north': 5677450.0, 'south': 5676170.0, 'west': 640860.0,
                },
                'temporal_extent': ('2019-03-01', '2019-04-01')}
        ),
        (
            ('load_collection', ('PROBAV_L3_S10_TOC_NDVI_333M_V2', ())),
            {
                'bands': ['ndvi'],
                'process_type': [ProcessType.FOCAL_SPACE],
                'resample': {'resolution': [10, 10], 'target_crs': 'AUTO:42001'},
                'spatial_extent': {
                    'crs': 'EPSG:32631', 'east': 642140.0, 'north': 5677450.0, 'south': 5676170.0, 'west': 640860.0,
                },
                'temporal_extent': ('2019-03-01', '2019-04-01')}
        ),
        (
            ('load_collection', ('S2_FOOBAR', ())),
            {
                'bands': ['B04', 'B08'],
                'spatial_extent': {
                    'crs': 'EPSG:32631', 'east': 642140.0, 'north': 5677450.0, 'south': 5676170.0, 'west': 640860.0,
                },
                'temporal_extent': ('2019-03-01', '2019-04-01')}
        )
    ]
Ejemplo n.º 29
0
def test_aggregate_spatial_apply_dimension(dry_run_env, dry_run_tracer):
    polygon = {"type": "Polygon", "coordinates": [[(0, 0), (3, 5), (8, 2), (0, 0)]]}
    pg = {'loadcollection1': {'process_id': 'load_collection', 'arguments': {'bands': ['B04', 'B08', 'B11', 'SCL'],
                                                                             'id': 'S2_FOOBAR',
                                                                             'spatial_extent': None,
                                                                             'temporal_extent': ['2018-11-01',
                                                                                                 '2020-02-01']}},
          'maskscldilation1': {'process_id': 'mask_scl_dilation',
                               'arguments': {'data': {'from_node': 'loadcollection1'}, 'scl_band_name': 'SCL'}},
          'aggregatetemporalperiod1': {'process_id': 'aggregate_temporal_period',
                                       'arguments': {'data': {'from_node': 'maskscldilation1'}, 'period': 'month',
                                                     'reducer': {'process_graph': {'mean1': {'process_id': 'mean',
                                                                                             'arguments': {'data': {
                                                                                                 'from_parameter': 'data'}},
                                                                                             'result': True}}}}},
          'applydimension1': {'process_id': 'apply_dimension',
                              'arguments': {'data': {'from_node': 'aggregatetemporalperiod1'}, 'dimension': 't',
                                            'process': {'process_graph': {
                                                'arrayinterpolatelinear1': {'process_id': 'array_interpolate_linear',
                                                                            'arguments': {
                                                                                'data': {'from_parameter': 'data'}},
                                                                            'result': True}}}}},
          'filtertemporal1': {'process_id': 'filter_temporal', 'arguments': {'data': {'from_node': 'applydimension1'},
                                                                             'extent': ['2019-01-01', '2020-01-01']}},
          'applydimension2': {'process_id': 'apply_dimension',
                              'arguments': {'data': {'from_node': 'filtertemporal1'}, 'dimension': 'bands', 'process': {
                                  'process_graph': {'arrayelement1': {'process_id': 'array_element',
                                                                      'arguments': {'data': {'from_parameter': 'data'},
                                                                                    'index': 1}},
                                                    'arrayelement2': {'process_id': 'array_element',
                                                                      'arguments': {'data': {'from_parameter': 'data'},
                                                                                    'index': 0}},
                                                    'normalizeddifference1': {'process_id': 'normalized_difference',
                                                                              'arguments': {
                                                                                  'x': {'from_node': 'arrayelement1'},
                                                                                  'y': {'from_node': 'arrayelement2'}}},
                                                    'arraymodify1': {'process_id': 'array_modify',
                                                                     'arguments': {'data': {'from_parameter': 'data'},
                                                                                   'index': 0, 'values': {
                                                                             'from_node': 'normalizeddifference1'}},
                                                                     'result': True}}}}},
          'renamelabels1': {'process_id': 'rename_labels',
                            'arguments': {'data': {'from_node': 'applydimension2'}, 'dimension': 'bands',
                                          'target': ['NDVI', 'B04', 'B08']}},
          'aggregatespatial1': {'process_id': 'aggregate_spatial',
                                'arguments': {'data': {'from_node': 'renamelabels1'}, 'geometries': polygon,
                                              'reducer': {'process_graph': {'mean2': {'process_id': 'mean',
                                                                                      'arguments': {'data': {
                                                                                          'from_parameter': 'data'}},
                                                                                      'result': True}}}},
                                'result': True}}

    cube = evaluate(pg, env=dry_run_env)

    source_constraints = dry_run_tracer.get_source_constraints(merge=True)
    assert len(source_constraints) == 1
    src, constraints = source_constraints[0]
    assert src == ("load_collection", ("S2_FOOBAR", ()))
    assert constraints == {
        "spatial_extent": {"west": 0.0, "south": 0.0, "east": 8.0, "north": 5.0, "crs": "EPSG:4326"},
        "process_type": [ProcessType.GLOBAL_TIME],
        "bands": ["B04", "B08", "B11", "SCL"],
        "custom_cloud_mask": {"method": "mask_scl_dilation", 'scl_band_name': 'SCL'},
        "aggregate_spatial": {"geometries": shapely.geometry.shape(polygon)},
        "temporal_extent": ("2018-11-01", "2020-02-01")
    }
    geometries, = dry_run_tracer.get_geometries()
    assert isinstance(geometries, shapely.geometry.Polygon)
    assert shapely.geometry.mapping(geometries) == {
        "type": "Polygon",
        "coordinates": (((0.0, 0.0), (3.0, 5.0), (8.0, 2.0), (0.0, 0.0)),)
    }