def test_reduce_node_process_graph(): reduce_pg = PGNode("array_element", data={"from_argument": "data"}, index=3) a = PGNode("load_collection", collection_id="S2") graph = ReduceNode(a, reducer=reduce_pg, dimension="time") assert graph.to_dict() == { 'process_id': 'reduce_dimension', 'arguments': { 'data': { 'from_node': { 'process_id': 'load_collection', 'arguments': { 'collection_id': 'S2' }, } }, 'reducer': { "process_graph": { "process_id": "array_element", "arguments": { "data": { "from_argument": "data" }, "index": 3 } } }, 'dimension': 'time', }, }
def test_pgnode_to_dict_subprocess_graphs(): load_collection = PGNode("load_collection", collection_id="S2") band2 = PGNode("array_element", data={"from_argument": "data"}, index=2) band2_plus3 = PGNode("add", x={"from_node": band2}, y=2) graph = ReduceNode(data=load_collection, reducer=band2_plus3, dimension='bands') assert graph.to_dict() == { 'process_id': 'reduce_dimension', 'arguments': { 'data': {'from_node': { 'process_id': 'load_collection', 'arguments': {'collection_id': 'S2'}, }}, 'dimension': 'bands', 'reducer': {'process_graph': { 'process_id': 'add', 'arguments': { 'x': {"from_node": { 'process_id': 'array_element', 'arguments': {'data': {'from_argument': 'data'}, 'index': 2}, }}, 'y': 2 }, }} }, } assert graph.flat_graph() == { "loadcollection1": { 'process_id': 'load_collection', 'arguments': {'collection_id': 'S2'}, }, "reducedimension1": { 'process_id': 'reduce_dimension', 'arguments': { 'data': {'from_node': "loadcollection1"}, 'dimension': 'bands', 'reducer': {'process_graph': { "arrayelement1": { 'process_id': 'array_element', 'arguments': {'data': {'from_argument': 'data'}, 'index': 2}, }, "add1": { 'process_id': 'add', 'arguments': { 'x': {"from_node": "arrayelement1"}, 'y': 2 }, 'result': True, }} } }, "result": True, } }
def test_reduce_node(): a = PGNode("load_collection", collection_id="S2") graph = ReduceNode(a, reducer="mean", dimension="time") assert graph.to_dict() == { 'process_id': 'reduce_dimension', 'arguments': { 'data': {'from_node': { 'process_id': 'load_collection', 'arguments': {'collection_id': 'S2'}, }}, 'reducer': 'mean', 'dimension': 'time', }, }
def reduce_dimension(self, dimension: str, reducer: Union[PGNode, str], process_id="reduce_dimension", band_math_mode: bool = False) -> 'DataCube': """ Add a reduce process with given reducer callback along given dimension """ # TODO: check if dimension is valid according to metadata? #116 # TODO: #125 use/test case for `reduce_dimension_binary`? if isinstance(reducer, str): # Assume given reducer is a simple predefined reduce process_id reducer = PGNode(process_id=reducer, arguments={"data": {"from_parameter": "data"}}) return self.process_with_node(ReduceNode( process_id=process_id, data=self._pg, reducer=reducer, dimension=self.metadata.assert_valid_dimension(dimension), # TODO #123 is it (still) necessary to make "band" math a special case? band_math_mode=band_math_mode # TODO: add `context` argument #125 ), metadata=self.metadata.reduce_dimension(dimension_name=dimension))