Пример #1
0
 def region(self, request):
     """Return region shape file."""
     extensions = [".gml", ".shp", ".gpkg", ".geojson", ".json"]
     region_vector = request.inputs.pop("region_vector")[0].file
     return single_file_check(
         archive_sniffer(region_vector,
                         working_dir=self.workdir,
                         extensions=extensions))
Пример #2
0
    def _handler(self, request, response):

        shape_url = request.inputs["shape"][0].file
        projected_crs = request.inputs["projected_crs"][0].data

        extensions = [".gml", ".shp", ".gpkg", ".geojson", ".json"]
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=extensions))
        shape_crs = crs_sniffer(vector_file)

        try:
            projection = CRS.from_epsg(projected_crs)
            if projection.is_geographic:
                msg = (
                    f"Desired CRS {projection.to_epsg()} is geographic. "
                    "Areal analysis values will be in decimal-degree units.")
                LOGGER.warning(msg)
        except Exception as e:
            msg = f"{e}: Failed to parse CRS definition. Exiting."
            LOGGER.error(msg)
            raise Exception(msg)

        # TODO: It would be good to one day refactor this to make use of RavenPy utils and gis utilities
        properties = list()
        try:
            for i, layer_name in enumerate(fiona.listlayers(vector_file)):
                with fiona.open(vector_file, "r", crs=shape_crs,
                                layer=i) as src:
                    for feature in src:
                        geom = shape(feature["geometry"])

                        multipolygon_check(geom)

                        transformed = geom_transform(geom,
                                                     source_crs=shape_crs,
                                                     target_crs=projection)
                        prop = {"id": feature["id"]}
                        prop.update(feature["properties"])
                        prop.update(geom_prop(transformed))

                        # Recompute the centroid location using the original projection
                        prop["centroid"] = geom_prop(geom)["centroid"]

                        properties.append(prop)

        except Exception as e:
            msg = f"{e}: Failed to extract features from shape {vector_file}."
            LOGGER.error(msg)
            raise Exception(msg)

        response.outputs["properties"].data = json.dumps(properties)

        return response
Пример #3
0
    def _handler(self, request, response):

        shape_url = request.inputs["shape"][0].file
        simple_categories = request.inputs["simple_categories"][0].data
        band = request.inputs["band"][0].data
        touches = request.inputs["select_all_touching"][0].data

        vectors = [".gml", ".shp", ".gpkg", ".geojson", ".json"]
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))
        vec_crs = crs_sniffer(vector_file)

        response.update_status("Accessed vector", status_percentage=5)

        # For raster files using the UNFAO Land Cover Classification System (19 types)
        if "raster" in request.inputs:
            rasters = [".tiff", ".tif"]
            raster_url = request.inputs["raster"][0].file
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))
            ras_crs = crs_sniffer(raster_file)

            if vec_crs != ras_crs:
                msg = f"CRS for files {vector_file} and {raster_file} are not the same. Reprojecting..."
                LOGGER.warning(msg)

                # Reproject full vector to preserve feature attributes
                projected = tempfile.NamedTemporaryFile(
                    prefix="reprojected_",
                    suffix=".json",
                    delete=False,
                    dir=self.workdir,
                ).name
                generic_vector_reproject(vector_file,
                                         projected,
                                         source_crs=vec_crs,
                                         target_crs=ras_crs)
            else:
                projected = vector_file

        else:
            raster_url = None
            # using the NALCMS data from GeoServer
            projected = tempfile.NamedTemporaryFile(prefix="reprojected_",
                                                    suffix=".json",
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_vector_reproject(vector_file,
                                     projected,
                                     source_crs=vec_crs,
                                     target_crs=NALCMS_PROJ4)
            raster_file = gather_dem_tile(
                vector_file,
                self.workdir,
                geographic=False,
                raster="public:CEC_NALCMS_LandUse_2010",
            )

        data_type = raster_datatype_sniffer(raster_file)
        response.update_status("Accessed raster", status_percentage=10)

        categories = SIMPLE_CATEGORIES if simple_categories else TRUE_CATEGORIES
        summary_stats = SUMMARY_ZONAL_STATS

        try:

            # Use zonalstats to produce a GeoJSON
            stats = zonal_stats(
                projected,
                raster_file,
                stats=summary_stats,
                band=band,
                categorical=True,
                all_touched=touches,
                geojson_out=True,
                raster_out=False,
            )

            land_use = list()
            for stat in stats:
                lu = defaultdict(lambda: 0)
                prop = stat["properties"]

                # Rename/aggregate land-use categories
                for k, v in categories.items():
                    lu[v] += prop.get(k, 0)

                prop.update(lu)
                land_use.append(lu)
                # prop['mini_raster_array'] = pickle.dumps(prop['mini_raster_array'], protocol=0).decode()

            # Use zonalstats to produce sets of raster grids
            raster_subset = zonal_stats(
                projected,
                raster_file,
                stats=summary_stats,
                band=band,
                categorical=True,
                all_touched=touches,
                geojson_out=False,
                raster_out=True,
            )

            raster_out = zonalstats_raster_file(
                raster_subset,
                working_dir=self.workdir,
                data_type=data_type,
                crs=NALCMS_PROJ4,
                zip_archive=False,
            )

            ml = MetaLink4(
                "rasters_out",
                "Metalink to series of GeoTIFF raster files",
                workdir=self.workdir,
            )
            for r in raster_out:
                mf = MetaFile(Path(r).name,
                              "Raster subset",
                              fmt=FORMATS.GEOTIFF)
                mf.file = r
                ml.append(mf)

            feature_collect = {"type": "FeatureCollection", "features": stats}
            response.outputs["features"].data = json.dumps(feature_collect)
            response.outputs["statistics"].data = json.dumps(land_use)
            response.outputs["raster"].data = ml.xml

        except Exception as e:
            msg = f"Failed to perform raster subset using {shape_url}{f' and {raster_url} ' if raster_url else ''}: {e}"
            LOGGER.error(msg)
            raise Exception(msg) from e

        return response
Пример #4
0
    def _handler(self, request, response):

        shape_url = request.inputs["shape"][0].file
        simple_categories = request.inputs["simple_categories"][0].data
        band = request.inputs["band"][0].data
        touches = request.inputs["select_all_touching"][0].data

        vectors = [".gml", ".shp", ".gpkg", ".geojson", ".json"]
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))
        vec_crs = crs_sniffer(vector_file)

        response.update_status("Accessed vector", status_percentage=5)

        if (
                "raster" in request.inputs
        ):  # For raster files using the UNFAO Land Cover Classification System (19 types)
            rasters = [".tiff", ".tif"]
            raster_url = request.inputs["raster"][0].file
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))
            ras_crs = crs_sniffer(raster_file)

            if vec_crs != ras_crs:
                msg = f"CRS for files {vector_file} and {raster_file} are not the same. Reprojecting..."
                LOGGER.warning(msg)

                # Reproject full vector to preserve feature attributes
                projected = tempfile.NamedTemporaryFile(
                    prefix="reprojected_",
                    suffix=".json",
                    delete=False,
                    dir=self.workdir,
                ).name
                generic_vector_reproject(vector_file,
                                         projected,
                                         source_crs=vec_crs,
                                         target_crs=ras_crs)
            else:
                projected = vector_file

        else:  # using the NALCMS data from GeoServer
            projected = tempfile.NamedTemporaryFile(prefix="reprojected_",
                                                    suffix=".json",
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_vector_reproject(vector_file,
                                     projected,
                                     source_crs=vec_crs,
                                     target_crs=NALCMS_PROJ4)

            bbox = get_bbox(projected)
            raster_url = "public:CEC_NALCMS_LandUse_2010"
            raster_bytes = geoserver.get_raster_wcs(bbox,
                                                    geographic=False,
                                                    layer=raster_url)
            raster_file = tempfile.NamedTemporaryFile(prefix="wcs_",
                                                      suffix=".tiff",
                                                      delete=False,
                                                      dir=self.workdir).name
            with open(raster_file, "wb") as f:
                f.write(raster_bytes)

        response.update_status("Accessed raster", status_percentage=10)

        categories = SIMPLE_CATEGORIES if simple_categories else TRUE_CATEGORIES
        summary_stats = SUMMARY_ZONAL_STATS

        try:
            stats = zonal_stats(
                projected,
                raster_file,
                stats=summary_stats,
                band=band,
                categorical=True,
                all_touched=touches,
                geojson_out=True,
                raster_out=False,
            )

            land_use = list()
            for stat in stats:
                lu = defaultdict(lambda: 0)
                prop = stat["properties"]

                # Rename/aggregate land-use categories
                for k, v in categories.items():
                    lu[v] += prop.get(k, 0)

                prop.update(lu)
                land_use.append(lu)
                # prop['mini_raster_array'] = pickle.dumps(prop['mini_raster_array'], protocol=0).decode()

            feature_collect = {"type": "FeatureCollection", "features": stats}
            response.outputs["features"].data = json.dumps(feature_collect)
            response.outputs["statistics"].data = json.dumps(land_use)

        except Exception as e:
            msg = f"Failed to perform zonal statistics using {shape_url} and {raster_url}: {e}"
            LOGGER.error(msg)
            raise Exception(msg) from e

        return response
    def _handler(self, request, response):

        level = 12  # request.inputs['level'][0].data
        lakes = True  # request.inputs['lakes'][0].data
        collect_upstream = request.inputs["aggregate_upstream"][0].data
        lon, lat = parse_lonlat(request.inputs["location"][0].data)

        bbox = (lon, lat, lon, lat)

        shape_url = tempfile.NamedTemporaryFile(
            prefix="hybas_", suffix=".gml", delete=False, dir=self.workdir
        ).name

        domain = geoserver.select_hybas_domain(bbox)
        hybas_gml = geoserver.get_hydrobasins_location_wfs(
            bbox, lakes=lakes, level=level, domain=domain
        )

        if isinstance(hybas_gml, str):
            write_flags = "w"
        else:
            write_flags = "wb"

        with open(shape_url, write_flags) as f:
            f.write(hybas_gml)

        response.update_status("Found downstream watershed", status_percentage=10)

        extensions = [".gml", ".shp", ".gpkg", ".geojson", ".json"]
        shp = single_file_check(
            archive_sniffer(shape_url, working_dir=self.workdir, extensions=extensions)
        )

        shape_crs = crs_sniffer(shp)

        # Find HYBAS_ID
        src = fiona.open(shp, "r", crs=shape_crs)
        feat = next(iter(src))
        hybas_id = feat["properties"]["HYBAS_ID"]
        gml_id = feat["properties"]["gml_id"]

        if collect_upstream:

            main_bas = feat["properties"]["MAIN_BAS"]

            if lakes is False or level != 12:
                raise InvalidParameterValue("Set lakes to True and level to 12.")

            # Collect features from GeoServer
            response.update_status("Collecting relevant features", status_percentage=70)

            region_url = geoserver.get_hydrobasins_attributes_wfs(
                attribute="MAIN_BAS",
                value=main_bas,
                lakes=lakes,
                level=level,
                domain=domain,
            )

            # Read table of relevant features sharing main basin
            df = gpd.read_file(region_url)

            # TODO: Load and keep this data in memory; Figure out how to better handle encoding and column names.
            # Identify upstream sub-basins and write to a new file
            up = geoserver.hydrobasins_upstream_ids(hybas_id, df)
            upfile = tempfile.NamedTemporaryFile(
                prefix="hybas_", suffix=".json", delete=False, dir=self.workdir
            ).name
            up.to_file(upfile, driver="GeoJSON")

            # Aggregate upstream features into a single geometry.
            gdf = gpd.read_file(upfile)
            agg = geoserver.hydrobasins_aggregate(gdf)

            # The aggregation returns a FeatureCollection with one feature. We select the first feature so that the
            # output is a Feature whether aggregate is True or False.
            afeat = json.loads(agg.to_json())["features"][0]
            response.outputs["feature"].data = json.dumps(afeat)
            response.outputs["upstream_ids"].data = json.dumps(up["id"].tolist())

        else:
            response.outputs["feature"].data = json.dumps(feat)
            response.outputs["upstream_ids"].data = json.dumps([gml_id])

        src.close()

        return response
Пример #6
0
    def _handler(self, request, response):

        # Process inputs
        # ---------------
        shape_url = request.inputs["shape"][0].file
        destination_crs = request.inputs["projected_crs"][0].data
        touches = request.inputs["select_all_touching"][0].data

        # Checks for valid CRS and that CRS is projected
        # -----------------------------------------------
        projection = CRS.from_user_input(destination_crs)
        if not projection.is_projected:
            msg = f"Destination CRS {projection.to_epsg()} is not projected. Terrain analysis values will not be valid."
            LOGGER.error(ValueError(msg))
            raise ValueError(msg)

        # Collect and process the shape
        # -----------------------------
        vectors = [".gml", ".shp", ".gpkg", ".geojson", ".json"]
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))
        vec_crs = crs_sniffer(vector_file)

        # Check that boundaries within 60N and 60S
        boundary_check(vector_file)

        if "raster" in request.inputs:
            raster_url = request.inputs["raster"][0].file
            rasters = [".tiff", ".tif"]
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))

        else:
            # Assuming that the shape coordinate are in WGS84
            raster_file = gather_dem_tile(vector_file, self.workdir)

        ras_crs = crs_sniffer(raster_file)

        # Reproject raster
        # ----------------
        if ras_crs != projection.to_epsg():
            msg = f"CRS for {raster_file} is not {projection}. Reprojecting raster..."
            LOGGER.warning(msg)
            warped_fn = tempfile.NamedTemporaryFile(prefix="warped_",
                                                    suffix=".tiff",
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_raster_warp(raster_file, warped_fn, projection)

        else:
            warped_fn = raster_file

        # Perform the terrain analysis
        # ----------------------------
        rpj = tempfile.NamedTemporaryFile(prefix="reproj_",
                                          suffix=".json",
                                          delete=False,
                                          dir=self.workdir).name
        generic_vector_reproject(vector_file,
                                 rpj,
                                 source_crs=vec_crs,
                                 target_crs=projection.to_epsg())
        with open(rpj) as src:
            geo = json.load(src)

        features = [sgeo.shape(feat["geometry"]) for feat in geo["features"]]
        union = ops.unary_union(features)

        clipped_fn = tempfile.NamedTemporaryFile(prefix="clipped_",
                                                 suffix=".tiff",
                                                 delete=False,
                                                 dir=self.workdir).name
        # Ensure that values for regions outside of clip are kept
        generic_raster_clip(
            raster=warped_fn,
            output=clipped_fn,
            geometry=union,
            touches=touches,
            fill_with_nodata=True,
            padded=True,
        )

        # Compute DEM properties for each feature.
        properties = []
        for i in range(len(features)):
            properties.append(
                dem_prop(clipped_fn, geom=features[i], directory=self.workdir))
        properties.append(dem_prop(clipped_fn, directory=self.workdir))

        response.outputs["properties"].data = json.dumps(properties)
        response.outputs["dem"].file = clipped_fn

        return response
Пример #7
0
    def _handler(self, request, response):

        shape_url = request.inputs["shape"][0].file
        band = request.inputs["band"][0].data
        touches = request.inputs["select_all_touching"][0].data

        vectors = [".gml", ".shp", ".gpkg", ".geojson", ".json"]
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))

        if "raster" in request.inputs:
            raster_url = request.inputs["raster"][0].file
            rasters = [".tiff", ".tif"]
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))
        else:
            raster_url = None
            # Assuming that the shape coordinate are in WGS84
            raster_file = gather_dem_tile(vector_file,
                                          self.workdir,
                                          geographic=True)

        vec_crs, ras_crs = crs_sniffer(vector_file), crs_sniffer(raster_file)

        if ras_crs != vec_crs:
            msg = f"CRS for files {vector_file} and {raster_file} are not the same. Reprojecting raster..."
            LOGGER.warning(msg)

            projected = tempfile.NamedTemporaryFile(prefix="reprojected_",
                                                    suffix=".json",
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_raster_warp(raster_file, projected, target_crs=vec_crs)
            raster_file = projected

        data_type = raster_datatype_sniffer(raster_file)

        try:
            stats = zonal_stats(
                vector_file,
                raster_file,
                band=band,
                all_touched=touches,
                raster_out=True,
            )

            raster_files = zonalstats_raster_file(
                stats,
                working_dir=self.workdir,
                data_type=data_type,
                crs=vec_crs or ras_crs,
            )

            if len(raster_files) > 1:
                ml = MetaLink4(
                    "test-ml-1",
                    "MetaLink with links to raster files.",
                    workdir=self.workdir,
                )
                for i, file in enumerate(raster_files):
                    # Create a MetaFile instance, which instantiates a ComplexOutput object.
                    mf = MetaFile(file.name,
                                  description="Raster file",
                                  fmt=FORMATS.GEOTIFF)
                    mf.file = (
                        file.as_posix()
                    )  # or mf.file = <path to file> or mf.url = <url>
                    ml.append(mf)

                response.outputs["raster"].data = ml.xml
            else:
                response.outputs["raster"].file = raster_files[0]

        except Exception as e:
            msg = f"Failed to perform raster subset using {shape_url}{f' and {raster_url} ' if raster_url else ''}: {e}"
            LOGGER.error(msg)
            raise Exception(msg)

        return response
Пример #8
0
    def _handler(self, request, response):

        shape_url = request.inputs["shape"][0].file
        band = request.inputs["band"][0].data
        categorical = request.inputs["categorical"][0].data
        touches = request.inputs["select_all_touching"][0].data

        vectors = [".gml", ".shp", ".gpkg", ".geojson", ".json"]
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))
        rasters = [".tiff", ".tif"]

        if "raster" in request.inputs:
            raster_url = request.inputs["raster"][0].file
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))
        else:
            raster_url = None
            # Assuming that the shape coordinate are in WGS84
            raster_file = gather_dem_tile(vector_file,
                                          self.workdir,
                                          geographic=True)

        vec_crs, ras_crs = crs_sniffer(vector_file), crs_sniffer(raster_file)

        if ras_crs != vec_crs:
            msg = f"CRS for files {vector_file} and {raster_file} are not the same. Reprojecting vector..."
            LOGGER.warning(msg)

            # Reproject full vector to preserve feature attributes
            projected = tempfile.NamedTemporaryFile(prefix="reprojected_",
                                                    suffix=".json",
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_vector_reproject(vector_file,
                                     projected,
                                     source_crs=vec_crs,
                                     target_crs=ras_crs)
            vector_file = projected

        summary_stats = SUMMARY_ZONAL_STATS

        try:
            stats = zonal_stats(
                vector_file,
                raster_file,
                stats=summary_stats,
                band=band,
                categorical=categorical,
                all_touched=touches,
                geojson_out=True,
                raster_out=False,
            )

            feature_collect = {"type": "FeatureCollection", "features": stats}
            response.outputs["statistics"].data = json.dumps(feature_collect)

        except Exception as e:
            msg = f"Failed to perform raster subset using {shape_url}{f' and {raster_url} ' if raster_url else ''}: {e}"
            LOGGER.error(msg)
            raise Exception(msg) from e

        return response