def test_pgnode_normalize_pgnode_args():
    graph = PGNode(
        "foo",
        x=PGNode("bar", color="red"),
        y={"from_node": PGNode("xev", color="green")},
    )
    assert graph.to_dict() == {
        "process_id": "foo",
        "arguments": {
            "x": {
                "from_node": {
                    "process_id": "bar",
                    "arguments": {
                        "color": "red"
                    }
                }
            },
            "y": {
                "from_node": {
                    "process_id": "xev",
                    "arguments": {
                        "color": "green"
                    }
                }
            },
        }
    }
Esempio n. 2
0
    def _polygonal_timeseries(self, polygon: Union[Polygon, MultiPolygon, str], func: str) -> 'DataCube':

        if isinstance(polygon, str):
            # polygon is a path to vector file
            # TODO this is non-standard process: check capabilities? #104 #40
            geometries = PGNode(process_id="read_vector", arguments={"filename": polygon})
        else:
            geometries = shapely.geometry.mapping(polygon)
            geometries['crs'] = {
                'type': 'name',  # TODO: name?
                'properties': {
                    'name': 'EPSG:4326'
                }
            }

        return self.process_with_node(PGNode(
            process_id="aggregate_spatial",
            arguments={
                "data": self._pg,
                "geometries": geometries,
                "reducer": {"process_graph": PGNode(
                    process_id=func,
                    arguments={"data": {"from_parameter": "data"}}
                )},
                # TODO #125 target dimension, context
            }
        ))
def test_reduce_node_process_graph():
    reduce_pg = PGNode("array_element",
                       data={"from_argument": "data"},
                       index=3)
    a = PGNode("load_collection", collection_id="S2")
    graph = ReduceNode(a, reducer=reduce_pg, dimension="time")
    assert graph.to_dict() == {
        'process_id': 'reduce_dimension',
        'arguments': {
            'data': {
                'from_node': {
                    'process_id': 'load_collection',
                    'arguments': {
                        'collection_id': 'S2'
                    },
                }
            },
            'reducer': {
                "process_graph": {
                    "process_id": "array_element",
                    "arguments": {
                        "data": {
                            "from_argument": "data"
                        },
                        "index": 3
                    }
                }
            },
            'dimension': 'time',
        },
    }
Esempio n. 4
0
def test_apply_callback_chain_pgnode(con100):
    im = con100.load_collection("S2")
    result = im.apply(
        PGNode("add",
               x=PGNode("cos", x=PGNode("absolute", x={"from_parameter":
                                                       "x"})),
               y=1.23))
    assert result.graph == load_json_resource('data/1.0.0/apply_chain.json')
 def test_parameter_substitution_defined(self):
     flat_graph = {
         "add": {"process_id": "add", "arguments": {"x": 1, "y": {"from_parameter": "increment"}}},
         "mul": {"process_id": "multiply", "arguments": {"x": {"from_node": "add"}, "y": 3}, "result": True},
     }
     result: PGNode = PGNodeGraphUnflattener.unflatten(flat_graph, parameters={"increment": 100})
     expected = x = PGNode("multiply", x=PGNode("add", x=1, y=100), y=3)
     assert result == expected
 def test_basic(self):
     flat_graph = {
         "add12": {"process_id": "add", "arguments": {"x": 1, "y": 2}},
         "mul3": {"process_id": "multiply", "arguments": {"x": {"from_node": "add12"}, "y": 3}},
         "div4": {"process_id": "divide", "arguments": {"x": {"from_node": "mul3"}, "y": 4}, "result": True},
     }
     result: PGNode = PGNodeGraphUnflattener.unflatten(flat_graph)
     expected = PGNode("divide", x=PGNode("multiply", x=PGNode("add", x=1, y=2), y=3), y=4)
     assert result == expected
def test_pgnode_to_dict_subprocess_graphs():
    load_collection = PGNode("load_collection", collection_id="S2")
    band2 = PGNode("array_element", data={"from_argument": "data"}, index=2)
    band2_plus3 = PGNode("add", x={"from_node": band2}, y=2)
    graph = ReduceNode(data=load_collection, reducer=band2_plus3, dimension='bands')

    assert graph.to_dict() == {
        'process_id': 'reduce_dimension',
        'arguments': {
            'data': {'from_node': {
                'process_id': 'load_collection',
                'arguments': {'collection_id': 'S2'},
            }},
            'dimension': 'bands',
            'reducer': {'process_graph': {
                'process_id': 'add',
                'arguments': {
                    'x': {"from_node": {
                        'process_id': 'array_element',
                        'arguments': {'data': {'from_argument': 'data'}, 'index': 2},
                    }},
                    'y': 2
                },
            }}
        },
    }
    assert graph.flat_graph() == {
        "loadcollection1": {
            'process_id': 'load_collection',
            'arguments': {'collection_id': 'S2'},
        },
        "reducedimension1": {
            'process_id': 'reduce_dimension',
            'arguments': {
                'data': {'from_node': "loadcollection1"},
                'dimension': 'bands',
                'reducer': {'process_graph': {
                    "arrayelement1": {
                        'process_id': 'array_element',
                        'arguments': {'data': {'from_argument': 'data'}, 'index': 2},
                    },
                    "add1": {
                        'process_id': 'add',
                        'arguments': {
                            'x': {"from_node": "arrayelement1"},
                            'y': 2
                        },
                        'result': True,
                    }}
                }
            },
            "result": True,
        }
    }
 def test_pgnode_reuse(self):
     flat_graph = {
         "value1": {"process_id": "constant", "arguments": {"x": 1}},
         "add1": {
             "process_id": "add",
             "arguments": {"x": {"from_node": "value1"}, "y": {"from_node": "value1"}},
             "result": True
         },
     }
     result: PGNode = PGNodeGraphUnflattener.unflatten(flat_graph)
     expected = PGNode("add", x=PGNode("constant", x=1), y=PGNode("constant", x=1))
     assert result == expected
     assert result.arguments["x"]["from_node"] is result.arguments["y"]["from_node"]
Esempio n. 9
0
    def apply_dimension(
            self, code: str, runtime=None, version="latest", dimension='t', target_dimension=None
    ) -> 'DataCube':
        """
        Applies a user defined process to all pixel values along a dimension of a raster data cube. For example,
        if the temporal dimension is specified the process will work on a time series of pixel values.

        The process reduce_dimension also applies a process to pixel values along a dimension, but drops
        the dimension afterwards. The process apply applies a process to each pixel value in the data cube.

        The target dimension is the source dimension if not specified otherwise in the target_dimension parameter.
        The pixel values in the target dimension get replaced by the computed pixel values. The name, type and
        reference system are preserved.

        The dimension labels are preserved when the target dimension is the source dimension and the number of
        pixel values in the source dimension is equal to the number of values computed by the process. Otherwise,
        the dimension labels will be incrementing integers starting from zero, which can be changed using
        rename_labels afterwards. The number of labels will equal to the number of values computed by the process.

        :param code: UDF code or process identifier
        :param runtime: UDF runtime to use
        :param version: Version of the UDF runtime to use
        :param dimension: The name of the source dimension to apply the process on. Fails with a DimensionNotAvailable error if the specified dimension does not exist.
        :param target_dimension: The name of the target dimension or null (the default) to use the source dimension
        specified in the parameter dimension. By specifying a target dimension, the source dimension is removed.
        The target dimension with the specified name and the type other (see add_dimension) is created, if it doesn't exist yet.

        :return: A datacube with the UDF applied to the given dimension.
        :raises: DimensionNotAvailable
        """
        if runtime:
            process = self._create_run_udf(code, runtime, version)
        else:
            process = PGNode(
                process_id=code,
                arguments={"data": {"from_parameter": "data"}},
            )
        arguments = {
            "data": self._pg,
            "process": PGNode.to_process_graph_argument(process),
            "dimension": self.metadata.assert_valid_dimension(dimension),
            # TODO #125 arguments: context
        }
        if target_dimension is not None:
            arguments["target_dimension"] = target_dimension
        return self.process_with_node(PGNode(
            process_id="apply_dimension",
            arguments=arguments
        ))
Esempio n. 10
0
 def apply(self, process: Union[str, PGNode], data_argument='x') -> 'DataCube':
     if isinstance(process, str):
         # Simple single string process specification
         process = PGNode(
             process_id=process,
             arguments={data_argument: {"from_parameter": "x"}}
         )
     return self.process_with_node(PGNode(
         process_id='apply',
         arguments={
             "data": self._pg,
             "process": {"process_graph": process},
             # TODO #125 context
         }
     ))
Esempio n. 11
0
 def save_ml_model(self, options: Optional[dict] = None):
     pgnode = PGNode(process_id="save_ml_model",
                     arguments={
                         "data": self,
                         "options": options or {}
                     })
     return MlModel(graph=pgnode, connection=self._connection)
def test_pgnode_to_dict_namespace():
    pg = PGNode(process_id="load_collection", arguments={"collection_id": "S2"}, namespace="bar")
    assert pg.to_dict() == {
        "process_id": "load_collection",
        "namespace": "bar",
        "arguments": {"collection_id": "S2"}
    }
def test_reduce_dimension_binary(con100):
    s2 = con100.load_collection("S2")
    reducer = PGNode(
        process_id="add",
        arguments={"x": {"from_parameter": "x"}, "y": {"from_parameter": "y"}},
    )
    x = s2.reduce_dimension(dimension="bands", reducer=reducer, process_id="reduce_dimension_binary")
    assert x.graph == {
        'loadcollection1': {
            'process_id': 'load_collection',
            'arguments': {'id': 'S2', 'spatial_extent': None, 'temporal_extent': None},
        },
        'reducedimensionbinary1': {
            'process_id': 'reduce_dimension_binary',
            'arguments': {
                'data': {'from_node': 'loadcollection1'},
                'dimension': 'bands',
                'reducer': {'process_graph': {
                    'add1': {
                        'process_id': 'add',
                        'arguments': {'x': {'from_parameter': 'x'}, 'y': {'from_parameter': 'y'}},
                        'result': True
                    }
                }}
            },
            'result': True
        }}
Esempio n. 14
0
 def merge(self, other: 'DataCube', overlap_resolver: Union[str, PGNode] = None) -> 'DataCube':
     arguments = {
         'cube1': {'from_node': self._pg},
         'cube2': {'from_node': other._pg},
     }
     if overlap_resolver:
         if isinstance(overlap_resolver, str):
             # Simple resolver (specified as process_id string)
             overlap_resolver = PGNode(
                 process_id=overlap_resolver,
                 arguments={"data": [{"from_parameter": "x"}, {"from_parameter": "y"}]}
             )
         arguments["overlap_resolver"] = {"process_graph": overlap_resolver}
     # TODO #125 context
     # TODO: set metadata of reduced cube?
     return self.process_with_node(PGNode(process_id="merge_cubes", arguments=arguments))
Esempio n. 15
0
def test_reduce_dimension_max_pgnode(con100):
    im = con100.load_collection("S2")
    res = im.reduce_dimension(reducer=PGNode("max",
                                             data={"from_parameter": "data"}),
                              dimension="bands")
    assert res.graph == load_json_resource(
        'data/1.0.0/reduce_dimension_max.json')
Esempio n. 16
0
def test_apply_neighborhood_trim_pgnode(con100):
    im = con100.load_collection("S2")
    result = im.apply_neighborhood(
        process=PGNode("trim_cube", data={"from_parameter": "data"}),
        size=[{'dimension': 'x', 'value': 128, 'unit': 'px'}, {'dimension': 'y', 'value': 128, 'unit': 'px'}]
    )
    assert result.graph == load_json_resource('data/1.0.0/apply_neighborhood_trim.json')
Esempio n. 17
0
    def process(cls,
                process_id: str,
                arguments: dict = None,
                namespace: Union[str, None] = None,
                **kwargs):
        """
        Apply process, using given arguments

        :param process_id: process id of the process.
        :param arguments: argument dictionary for the process.
        :param namespace: process namespace (only necessary to specify for non-predefined or non-user-defined processes)
        :return: new ProcessBuilder instance
        """
        arguments = {**(arguments or {}), **kwargs}
        for arg, value in arguments.items():
            if isinstance(value, ProcessBuilderBase):
                arguments[arg] = value.pgnode
            elif isinstance(value, list):
                for index, arrayelement in enumerate(value):
                    if (isinstance(arrayelement, ProcessBuilderBase)):
                        value[index] = arrayelement.pgnode

        for arg in [a for a, v in arguments.items() if v is UNSET]:
            del arguments[arg]
        return cls(
            PGNode(process_id=process_id,
                   arguments=arguments,
                   namespace=namespace))
Esempio n. 18
0
def test_merge_cubes_max_pgnode(con100):
    im1 = con100.load_collection("S2")
    im2 = con100.load_collection("MASK")
    res = im1.merge_cubes(
        other=im2,
        overlap_resolver=PGNode("max", data=[{"from_parameter": "x"}, {"from_parameter": "y"}])
    )
    assert res.flat_graph() == load_json_resource('data/1.0.0/merge_cubes_max.json')
def test_apply_absolute_pgnode(con100):
    im = con100.load_collection("S2")
    result = im.apply(
        PGNode(process_id="absolute", arguments={"x": {
            "from_parameter": "x"
        }}))
    expected_graph = load_json_resource('data/1.0.0/apply_absolute.json')
    assert result.graph == expected_graph
Esempio n. 20
0
def test_merge_cubes_add_pgnode(con100):
    im1 = con100.load_collection("S2")
    im2 = con100.load_collection("MASK")
    res = im1.merge_cubes(other=im2,
                          overlap_resolver=PGNode("add",
                                                  x={"from_parameter": "x"},
                                                  y={"from_parameter": "y"}))
    assert res.graph == load_json_resource('data/1.0.0/merge_cubes_add.json')
def test_build_and_flatten_simple():
    node = PGNode("foo")
    assert node.flatten() == {
        "foo1": {
            "process_id": "foo",
            "arguments": {},
            "result": True
        }
    }
def test_pgnode_to_dict():
    pg = PGNode(process_id="load_collection",
                arguments={"collection_id": "S2"})
    assert pg.to_dict() == {
        "process_id": "load_collection",
        "arguments": {
            "collection_id": "S2"
        }
    }
 def between(min, max) -> PGNode:
     return PGNode(process_id="between",
                   arguments={
                       "x": {
                           "from_parameter": "value"
                       },
                       "min": min,
                       "max": max
                   })
 def eq(value, case_sensitive=True) -> PGNode:
     return PGNode(process_id="eq",
                   arguments={
                       "x": {
                           "from_parameter": "value"
                       },
                       "y": value,
                       "case_sensitive": case_sensitive
                   })
def test_filter_spatial_callbak(con100):
    """
    Experiment test showing how to introduce a callback for preprocessing process arguments
    https://github.com/Open-EO/openeo-processes/issues/156
    @param con100:
    @return:
    """
    collection = con100.load_collection("S2")

    point_to_bbox_callback = PGNode(process_id="run_udf", arguments={
        "data": {
            "type": "FeatureCollection",
            "features": [{
                "type": "Feature",
                "geometry": {
                    "type": "Point",
                    "coordinates": [125.6, 10.1]
                }
            }]
        },
        "runtime": "Python",
        "udf": "def transform_point_into_bbox(data:UdfData): blabla"
    })

    filtered_collection = collection.process("filter_spatial", {
        "data": collection._pg,
        "geometries": point_to_bbox_callback
    })

    assert filtered_collection.graph == {
        'filterspatial1': {
            'arguments': {
                'data': {'from_node': 'loadcollection1'},
                'geometries': {'from_node': 'runudf1'}
            },
            'process_id': 'filter_spatial',
            'result': True
        },
        'loadcollection1': {
            'arguments': {
                'id': 'S2',
                'spatial_extent': None,
                'temporal_extent': None
            },
            'process_id': 'load_collection'
        },
        'runudf1': {
            'arguments': {
                'data': {
                    'features': [{'geometry': {'coordinates': [125.6, 10.1], 'type': 'Point'}, 'type': 'Feature'}],
                    'type': 'FeatureCollection'
                },
                'runtime': 'Python',
                'udf': 'def transform_point_into_bbox(data:UdfData): blabla'
            },
            'process_id': 'run_udf'}
    }
Esempio n. 26
0
 def _bandmath_operator_binary_scalar(self, operator: str, other: Union[int, float], reverse=False) -> 'DataCube':
     """Band math binary operator with scalar value (int or float) as right hand side argument"""
     node = self._get_bandmath_node()
     x = {'from_node': node.reducer_process_graph()}
     y = other
     if reverse:
         x, y = y, x
     return self.process_with_node(node.clone_with_new_reducer(
         PGNode(operator, x=x, y=y)
     ))
Esempio n. 27
0
def test_build_and_flatten_namespace():
    node = PGNode("foo", namespace="bar")
    assert node.flat_graph() == {
        "foo1": {
            "process_id": "foo",
            "namespace": "bar",
            "arguments": {},
            "result": True
        }
    }
Esempio n. 28
0
 def power(self,other,reverse):
     operator = "power"
     node = self._get_bandmath_node()
     x = {'from_node': node.reducer_process_graph()}
     y = other
     if reverse:
         x, y = y, x
     return self.process_with_node(node.clone_with_new_reducer(
         PGNode(operator, base=x, p=y)
     ))
    def test_minimal(self):
        flat_graph = {
            "add12": {"process_id": "add", "arguments": {"x": 1, "y": 2}, "result": True},
        }
        result: PGNode = PGNodeGraphUnflattener.unflatten(flat_graph)
        assert result.process_id == "add"
        assert result.arguments == {"x": 1, "y": 2}
        assert result.namespace is None
        assert result == PGNode("add", {"x": 1, "y": 2})

        assert list(result.flat_graph().values()) == list(flat_graph.values())
Esempio n. 30
0
 def _build_pgnode(self, process_id: str, arguments: dict, namespace: Optional[str], **kwargs) -> PGNode:
     """
     Helper to build a PGNode from given argument dict and/or kwargs,
     and possibly resolving the `THIS` reference.
     """
     arguments = {**(arguments or {}), **kwargs}
     for k, v in arguments.items():
         if v is THIS:
             arguments[k] = self
         # TODO: also necessary to traverse lists/dictionaries?
     return PGNode(process_id=process_id, arguments=arguments, namespace=namespace)