Exemple #1
0
def test_extract_result_metadata_aggregate_spatial():
    tracer = DryRunDataTracer()
    cube = tracer.load_collection(collection_id="Sentinel2",
                                  arguments={
                                      "temporal_extent":
                                      ["2020-02-02", "2020-03-03"],
                                  })
    cube = cube.filter_bbox(west=4, south=51, east=5, north=52)

    geometries = shape(read_json(get_test_data_file("multipolygon01.geojson")))
    cube = cube.aggregate_spatial(geometries=geometries, reducer="mean")

    metadata = extract_result_metadata(tracer)
    expected = {
        "bbox": (5.0, 5.0, 45.0, 40.0),
        "geometry": {
            'type':
            'MultiPolygon',
            'coordinates':
            [(((30.0, 20.0), (45.0, 40.0), (10.0, 40.0), (30.0, 20.0)), ),
             (((15.0, 5.0), (40.0, 10.0), (10.0, 20.0), (5.0, 10.0),
               (15.0, 5.0)), )],
        },
        "area": {
            "value": approx(6797677574525.158, 0.01),
            "unit": "square meter"
        },
        "start_datetime": "2020-02-02T00:00:00Z",
        "end_datetime": "2020-03-03T00:00:00Z",
        "links": []
    }
    assert metadata == expected
Exemple #2
0
def handle_cli(argv=None) -> Tuple[dict, argparse.Namespace]:
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "process_graph",
        nargs="?",
        default=None,
        help=
        "Process graph to evaluate. Can be given as path to JSON file or directly as JSON string."
        " If nothing is given, the process graph should be given on standard input."
    )
    parser.add_argument("-o",
                        "--output",
                        default=None,
                        help="Output file name.")
    parser.add_argument(
        "-e",
        "--edit",
        metavar="PATH=VALUE",
        action="append",
        default=[],
        help="Preprocess the process graph before executing it."
        " Specify as `path=value`, with `path` the period separated path in JSON tree"
        " and `value` the new/updated value (in JSON format)."
        " For example, to change the 'west' border of load_collection bbox:"
        " `--edit loadcollection.arguments.spatial_extent.west=3.3`")
    parser.add_argument("--api-version",
                        default="1.0.0",
                        help="openEO API version to evaluate against.")

    args = parser.parse_args(argv)

    if args.process_graph is None:
        # Read process graph from standard input
        _log.info("Reading process graph from STDIN ...")
        process_graph = json.load(sys.stdin)
    elif args.process_graph.strip().startswith("{"):
        # Process graph is given directly as JSON blob
        process_graph = json.loads(args.process_graph)
    elif args.process_graph.lower().endswith(".json"):
        # Process graph is given as JSON file
        process_graph = read_json(args.process_graph)
    else:
        raise ValueError(args.process_graph)

    if len(process_graph) == 1 and set(
            process_graph.keys()) == {"process_graph"}:
        process_graph = process_graph["process_graph"]

    # Edit process graph in-place
    for path, value in (e.split("=", 1) for e in args.edit):
        steps = path.split(".")
        cursor = process_graph
        for step in steps[:-1]:
            if step not in cursor:
                cursor[step] = {}
            cursor = cursor[step]
        cursor[steps[-1]] = json.loads(value)

    return process_graph, args
Exemple #3
0
def test_collect_unique_process_ids():
    process_graph = read_json("tests/graphs/process_ids.json")["process_graph"]

    collector = CollectUniqueProcessIdsVisitor()
    collector.accept_process_graph(process_graph)

    assert collector.process_ids == {
        'load_collection', 'ndvi', 'reduce_dimension', 'array_element', 'lt',
        'gt', 'or', 'mask', 'filter_bands', 'save_result'
    }
Exemple #4
0
def test_get_layer_catalog_opensearch_enrich_creodias(requests_mock):
    with mock.patch(
            "openeogeotrellis.layercatalog.ConfigParams") as ConfigParams:
        ConfigParams.return_value.layer_catalog_metadata_files = [
            "tests/data/layercatalog01.json",
            "tests/data/layercatalog04_creodias.json"
        ]
        collections_response = read_json(
            "tests/data/collections_creodias01.json")
        requests_mock.get(
            "https://finder.creodias.test/resto/collections.json",
            json=collections_response)

        all_metadata = get_layer_catalog(
            opensearch_enrich=True).get_all_metadata()

    assert all_metadata == [{
        "id":
        "WUQ",
        "title":
        "Sentinel-1 Collection",
        "description":
        "Sentinel-1 Collection",
        "keywords": ["esa", "sentinel", "sentinel1", "s1", "radar"],
        "_vito": {
            "data_source": {
                "opensearch_collection_id": "Sentinel1",
                "opensearch_endpoint": "https://finder.creodias.test"
            }
        },
        "cube:dimensions": {
            "t": {
                "type": "temporal"
            },
            "x": {
                "axis": "x",
                "type": "spatial"
            },
            "y": {
                "axis": "y",
                "type": "spatial"
            }
        },
    }, {
        "id": "FOO",
        "license": "mit"
    }, {
        "id": "BAR",
        "description": "bar",
        "links": ["example.com/bar"]
    }, {
        "id": "BZZ"
    }]
Exemple #5
0
 def load_predefined_spec(self, name: str) -> dict:
     """Get predefined process specification (dict) based on process name."""
     try:
         spec = read_json(self._processes_spec_root /
                          '{n}.json'.format(n=name))
         # Health check: required fields for predefined processes
         assert all(k in spec
                    for k in ['id', 'description', 'parameters', 'returns'])
         return spec
     except Exception:
         raise ProcessRegistryException(
             "Failed to load predefined spec of process {n!r}".format(
                 n=name))
Exemple #6
0
def get_layer_catalog(
    service_registry: AbstractServiceRegistry = None
) -> GeoPySparkLayerCatalog:
    """
    Get layer catalog (from JSON files)
    """
    catalog_files = ConfigParams().layer_catalog_metadata_files
    logger.info(
        "Reading layer catalog metadata from {f!r}".format(f=catalog_files[0]))
    metadata = read_json(catalog_files[0])
    if len(catalog_files) > 1:
        # Merge metadata recursively
        metadata = {l["id"]: l for l in metadata}
        for path in catalog_files[1:]:
            logger.info(
                "Updating layer catalog metadata from {f!r}".format(f=path))
            updates = {l["id"]: l for l in read_json(path)}
            metadata = dict_merge_recursive(metadata, updates, overwrite=True)
        metadata = list(metadata.values())

    return GeoPySparkLayerCatalog(all_metadata=metadata,
                                  service_registry=service_registry
                                  or InMemoryServiceRegistry())
Exemple #7
0
def test_run_job(evaluate, tmp_path):
    cube_mock = MagicMock()
    asset_meta = {
        "openEO01-01.tif": {
            "href": "tmp/openEO01-01.tif",
            "roles": "data"
        },
        "openEO01-05.tif": {
            "href": "tmp/openEO01-05.tif",
            "roles": "data"
        }
    }
    cube_mock.write_assets.return_value = asset_meta
    evaluate.return_value = ImageCollectionResult(cube=cube_mock,
                                                  format="GTiff",
                                                  options={"multidate": True})
    run_job(job_specification={
        'process_graph': {
            'nop': {
                'process_id': 'discard_result',
                'result': True
            }
        }
    },
            output_file=tmp_path / "out",
            metadata_file=tmp_path / "metadata.json",
            api_version="1.0.0",
            job_dir="./",
            dependencies={},
            user_id="jenkins")
    cube_mock.write_assets.assert_called_once()
    metadata_result = read_json(tmp_path / "metadata.json")
    assert {
        'assets': asset_meta,
        'bbox': None,
        'end_datetime': None,
        'epsg': None,
        'geometry': None,
        'area': None,
        'unique_process_ids': ['discard_result'],
        'instruments': [],
        'links': [],
        'processing:facility': 'VITO - SPARK',
        'processing:software': 'openeo-geotrellis-' + __version__,
        'start_datetime': None
    } == metadata_result
Exemple #8
0
 def from_json_file(cls, filename: Union[str, Path] = "layercatalog.json", *args, **kwargs):
     """Factory to read catalog from a JSON file"""
     return cls(read_json(filename), *args, **kwargs)
Exemple #9
0
def read_spec(path: Union[str, Path]) -> dict:
    """Read specification JSON file (given by relative path)"""
    return read_json(SPECS_ROOT / path)
Exemple #10
0
 def read_catalog_file(catalog_file) -> Dict[str, dict]:
     return {coll["id"]: coll for coll in read_json(catalog_file)}
Exemple #11
0
def test_get_layer_catalog_opensearch_enrich_oscars(requests_mock):
    test_root = Path(__file__).parent / "data"
    with mock.patch(
            "openeogeotrellis.layercatalog.ConfigParams") as ConfigParams:
        ConfigParams.return_value.layer_catalog_metadata_files = [
            test_root / "layercatalog01.json",
            test_root / "layercatalog02.json",
            test_root / "layercatalog03_oscars.json"
        ]

        collections_response = read_json(test_root /
                                         "collections_oscars01.json")
        requests_mock.get(
            "https://services.terrascope.test/catalogue/collections",
            json=collections_response)

        all_metadata = get_layer_catalog(
            opensearch_enrich=True).get_all_metadata()

    assert all_metadata == [{
        "id":
        "XIP",
        "_vito": {
            "data_source": {
                "opensearch_endpoint":
                "https://services.terrascope.test/catalogue",
                "opensearch_collection_id": "urn:eop:VITO:CGS_S1_GRD_SIGMA0_L1"
            }
        },
        "title":
        "Sentinel 1 GRD Sigma0 product, VH, VV and angle.",
        "description":
        "The Sigma0 product describes how much of the radar signal that was sent out by Sentinel-1 "
        "is reflected back to the sensor...",
        "extent": {
            "spatial": {
                "bbox": [[-1.05893, 47.66031, 11.6781, 53.67487]]
            },
            "temporal": {
                "interval": [["2014-10-23", None]]
            }
        },
        'keywords': [
            'VITO', 'C-SAR', 'Orthoimagery', 'SENTINEL-1A', 'SENTINEL-1',
            'SENTINEL-1B', 'RADAR BACKSCATTER', 'RADAR'
        ],
        "links": [{
            "rel": "alternate",
            "href":
            "https://docs.terrascope.be/#/DataProducts/Sentinel-1/ProductsOverview",
            "title": "Online User Documentation"
        }, {
            "rel":
            "alternate",
            "href":
            "https://www.vito-eodata.be/collections/srv/eng/main.home?uuid=urn:eop:VITO:CGS_S1_GRD_SIGMA0_L1"
        }, {
            "rel": "alternate",
            "href":
            "https://services.terrascope.be/catalogue/description.geojson?collection=urn:eop:VITO:CGS_S1_GRD_SIGMA0_L1",
            "title": "OpenSearch entry point"
        }],
        "cube:dimensions": {
            "x": {
                "type": "spatial",
                "axis": "x"
            },
            "y": {
                "type": "spatial",
                "axis": "y"
            },
            "t": {
                "type": "temporal"
            },
            "bands": {
                "type": "bands",
                "values": ["VH"]
            }
        },
        "summaries": {
            "eo:bands": [{
                "description":
                "Calibrated radar backscattering coefficient (unitless), describing the returned radar signal strength in the cross-polarized channel (V transmit, H receive). Values are stored as floats.",
                "type": "VH",
                "title": "VH",
                "resolution": 10,
                "bitPerValue": 32,
                "name": "VH"
            }],
            "platform": [],
            "instruments": ["MSI"]
        },
        "assets": {}
    }, {
        "id": "FOO",
        "license": "apache",
        "links": ["example.com/foo"]
    }, {
        "id": "BAR",
        "description": "The BAR layer",
        "links": ["example.com/bar"]
    }, {
        "id": "BZZ"
    }, {
        "id": "QUU"
    }]