def _handler(self, request, response):

        shape_url = request.inputs['shape'][0].file
        projected_crs = request.inputs['projected_crs'][0].data

        extensions = ['.gml', '.shp', '.gpkg', '.geojson', '.json']
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=extensions))
        shape_crs = crs_sniffer(vector_file)

        try:
            projection = CRS.from_epsg(projected_crs)
            if projection.is_geographic:
                msg = 'Desired CRS {} is geographic. ' \
                      'Areal analysis values will be in decimal-degree units.'.format(projection.to_epsg())
                LOGGER.warning(msg)
        except Exception as e:
            msg = '{}: Failed to parse CRS definition. Exiting.'.format(e)
            LOGGER.error(msg)
            raise Exception(msg)

        properties = []
        try:
            for i, layer_name in enumerate(fiona.listlayers(vector_file)):
                with fiona.open(vector_file, 'r', crs=shape_crs,
                                layer=i) as src:
                    for feature in src:
                        geom = shape(feature['geometry'])

                        multipolygon_check(geom)

                        transformed = geom_transform(geom,
                                                     source_crs=shape_crs,
                                                     target_crs=projection)
                        prop = {'id': feature['id']}
                        prop.update(feature['properties'])
                        prop.update(geom_prop(transformed))

                        # Recompute the centroid location using the original projection
                        prop['centroid'] = geom_prop(geom)['centroid']

                        properties.append(prop)

        except Exception as e:
            msg = '{}: Failed to extract features from shape {}'.format(
                e, vector_file)
            LOGGER.error(msg)
            raise Exception(msg)

        response.outputs['properties'].data = json.dumps(properties)

        return response
    def _handler(self, request, response):

        level = 12  # request.inputs['level'][0].data
        lakes = True  # request.inputs['lakes'][0].data
        collect_upstream = request.inputs['aggregate_upstream'][0].data
        lonlat = request.inputs['location'][0].data

        # shape_description = 'hydrobasins_{}na_lev{}'.format('lake_' if lakes else '', level)
        # table = DATA / 'hybas_{}na_lev{:02}.csv'.format('lake_' if lakes else '', level)
        # shape_url = TESTDATA[shape_description]

        # extensions = ['.gml', '.shp', '.gpkg', '.geojson', '.json']
        # shp = single_file_check(archive_sniffer(shape_url, working_dir=self.workdir, extensions=extensions))

        lon, lat = parse_lonlat(lonlat)
        bbox = (lon, lat, lon, lat)

        shape_url = tempfile.NamedTemporaryFile(prefix='hybas_',
                                                suffix='.gml',
                                                delete=False,
                                                dir=self.workdir).name

        hybas_gml = gis.get_hydrobasins_location_wfs(bbox,
                                                     lakes=lakes,
                                                     level=level)

        with open(shape_url, 'w') as f:
            f.write(hybas_gml)

        response.update_status('Found downstream watershed',
                               status_percentage=10)

        extensions = ['.gml', '.shp', '.gpkg', '.geojson', '.json']
        shp = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=extensions))

        shape_crs = crs_sniffer(shp)

        with fiona.Collection(shp, 'r', crs=shape_crs) as src:

            # Find HYBAS_ID
            feat = next(src)
            hybas_id = feat['properties']['HYBAS_ID']
            gml_id = feat['properties']['gml_id']

            if collect_upstream:

                main_bas = feat['properties']['MAIN_BAS']

                if lakes is False or level != 12:
                    raise InvalidParameterValue(
                        "Set lakes to True and level to 12.")

                # Collect features from GeoServer
                response.update_status('Collecting relevant features',
                                       status_percentage=70)

                region = tempfile.NamedTemporaryFile(prefix='hybas_',
                                                     suffix='.json',
                                                     delete=False,
                                                     dir=self.workdir).name
                region_url = gis.get_hydrobasins_attributes_wfs(
                    attribute='MAIN_BAS',
                    value=main_bas,
                    lakes=lakes,
                    level=level)

                # Read table of relevant features sharing main basin
                df = gpd.read_file(region_url)
                df.to_file(region, driver='GeoJSON')

                # TODO: Load and keep this data in memory; Figure out how to better handle encoding and column names.
                # Identify upstream sub-basins and write to a new file
                up = gis.hydrobasins_upstream_ids(hybas_id, df)
                upfile = tempfile.NamedTemporaryFile(prefix='hybas_',
                                                     suffix='.json',
                                                     delete=False,
                                                     dir=self.workdir).name
                up.to_file(upfile, driver='GeoJSON')

                # Aggregate upstream features into a single geometry.
                gdf = gpd.read_file(upfile)
                agg = gis.hydrobasins_aggregate(gdf)

                feat = json.loads(agg.to_json())['features'][0]
                response.outputs['feature'].data = json.dumps(feat)
                response.outputs['upstream_ids'].data = json.dumps(
                    up['id'].tolist())

            else:
                response.outputs['feature'].data = json.dumps(feat)
                response.outputs['upstream_ids'].data = json.dumps([
                    gml_id,
                ])

        return response
Ejemplo n.º 3
0
    def _handler(self, request, response):

        shape_url = request.inputs['shape'][0].file
        band = request.inputs['band'][0].data
        touches = request.inputs['select_all_touching'][0].data

        vectors = ['.gml', '.shp', '.gpkg', '.geojson', '.json']
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))

        if 'raster' in request.inputs:
            raster_url = request.inputs['raster'][0].file
            rasters = ['.tiff', '.tif']
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))
        else:
            bbox = gis.get_bbox(vector_file)
            raster_url = 'public:EarthEnv_DEM90_NorthAmerica'
            raster_bytes = gis.get_raster_wcs(bbox,
                                              geographic=True,
                                              layer=raster_url)
            raster_file = tempfile.NamedTemporaryFile(prefix='wcs_',
                                                      suffix='.tiff',
                                                      delete=False,
                                                      dir=self.workdir).name
            with open(raster_file, 'wb') as f:
                f.write(raster_bytes)

        vec_crs, ras_crs = crs_sniffer(vector_file), crs_sniffer(raster_file)

        if ras_crs != vec_crs:
            msg = 'CRS for files {} and {} are not the same. Reprojecting raster...'.format(
                vector_file, raster_file)
            LOGGER.warning(msg)

            projected = tempfile.NamedTemporaryFile(prefix='reprojected_',
                                                    suffix='.json',
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_raster_warp(vector_file, projected, target_crs=vec_crs)
            raster_file = projected

        data_type = raster_datatype_sniffer(raster_file)
        raster_compression = 'lzw'

        out_dir = os.path.join(self.workdir, 'output')
        os.makedirs(out_dir)

        try:
            stats = zonal_stats(vector_file,
                                raster_file,
                                band=band,
                                all_touched=touches,
                                raster_out=True)

            for i in range(len(stats)):

                file = 'subset_{}.tiff'.format(i + 1)
                raster_subset = os.path.join(out_dir, file)

                try:
                    raster_location = stats[i]
                    raster = raster_location['mini_raster_array']
                    grid_properties = raster_location['mini_raster_affine'][
                        0:6]
                    nodata = raster_location['mini_raster_nodata']

                    aff = Affine(*grid_properties)

                    LOGGER.info(
                        'Writing raster data to {}'.format(raster_subset))

                    masked_array = np.ma.masked_values(raster, nodata)
                    if masked_array.mask.all():
                        msg = 'Subset {} is empty, continuing...'.format(i)
                        LOGGER.warning(msg)

                    normal_array = np.asarray(masked_array, dtype=data_type)

                    # Write to GeoTIFF
                    with rio.open(raster_subset,
                                  'w',
                                  driver='GTiff',
                                  count=1,
                                  compress=raster_compression,
                                  height=raster.shape[0],
                                  width=raster.shape[1],
                                  dtype=data_type,
                                  transform=aff,
                                  crs=vec_crs or ras_crs,
                                  nodata=nodata) as f:
                        f.write(normal_array, 1)

                except Exception as e:
                    msg = 'Failed to write raster outputs: {}'.format(e)
                    LOGGER.error(msg)
                    raise Exception(msg)

            # `shutil.make_archive` could potentially cause problems with multi-thread? Worth investigating later.
            out_fn = os.path.join(self.workdir, self.identifier)
            shutil.make_archive(base_name=out_fn,
                                format='zip',
                                root_dir=out_dir,
                                logger=LOGGER)

            response.outputs['raster'].file = '{}.zip'.format(out_fn)

        except Exception as e:
            msg = 'Failed to perform raster subset using {} and {}: {}'.format(
                shape_url, raster_url, e)
            LOGGER.error(msg)
            raise Exception(msg)

        return response
Ejemplo n.º 4
0
    def _handler(self, request, response):

        # Process inputs
        # ---------------
        shape_url = request.inputs['shape'][0].file
        destination_crs = request.inputs['projected_crs'][0].data
        touches = request.inputs['select_all_touching'][0].data

        # Checks for valid CRS and that CRS is projected
        # -----------------------------------------------
        projection = CRS.from_user_input(destination_crs)
        if not projection.is_projected:
            msg = 'Destination CRS {} is not projected.' \
                  ' Terrain analysis values will not be valid.'.format(projection.to_epsg())
            LOGGER.error(ValueError(msg))
            raise ValueError(msg)

        # Collect and process the shape
        # -----------------------------
        vectors = ['.gml', '.shp', '.gpkg', '.geojson', '.json']
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))
        vec_crs = crs_sniffer(vector_file)

        # Check that boundaries within 60N and 60S
        boundary_check(vector_file)

        if 'raster' in request.inputs:
            raster_url = request.inputs['raster'][0].file
            rasters = ['.tiff', '.tif']
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))

        else:
            # Assuming that the shape coordinate are in WGS84
            bbox = gis.get_bbox(vector_file)
            raster_url = 'public:EarthEnv_DEM90_NorthAmerica'
            raster_bytes = gis.get_raster_wcs(bbox,
                                              geographic=True,
                                              layer=raster_url)
            raster_file = tempfile.NamedTemporaryFile(prefix='wcs_',
                                                      suffix='.tiff',
                                                      delete=False,
                                                      dir=self.workdir).name
            with open(raster_file, 'wb') as f:
                f.write(raster_bytes)

        ras_crs = crs_sniffer(raster_file)

        # Reproject raster
        # ----------------
        if ras_crs != projection.to_proj4():
            msg = 'CRS for {} is not {}. Reprojecting raster...'.format(
                raster_file, projection)
            LOGGER.warning(msg)
            warped_fn = tempfile.NamedTemporaryFile(prefix='warped_',
                                                    suffix='.tiff',
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_raster_warp(raster_file, warped_fn, projection.to_proj4())

        else:
            warped_fn = raster_file

        # Perform the terrain analysis
        # ----------------------------
        rpj = tempfile.NamedTemporaryFile(prefix='reproj_',
                                          suffix='.json',
                                          delete=False,
                                          dir=self.workdir).name
        generic_vector_reproject(vector_file,
                                 rpj,
                                 source_crs=vec_crs,
                                 target_crs=projection.to_proj4())
        with open(rpj) as src:
            geo = json.load(src)

        features = [sgeo.shape(feat['geometry']) for feat in geo['features']]
        union = ops.unary_union(features)

        clipped_fn = tempfile.NamedTemporaryFile(prefix='clipped_',
                                                 suffix='.tiff',
                                                 delete=False,
                                                 dir=self.workdir).name
        # Ensure that values for regions outside of clip are kept
        generic_raster_clip(raster=warped_fn,
                            output=clipped_fn,
                            geometry=union,
                            touches=touches,
                            fill_with_nodata=True,
                            padded=True)

        # Compute DEM properties for each feature.
        properties = []
        for i in range(len(features)):
            properties.append(
                dem_prop(clipped_fn, geom=features[i], directory=self.workdir))
        properties.append(dem_prop(clipped_fn, directory=self.workdir))

        response.outputs['properties'].data = json.dumps(properties)
        response.outputs['dem'].file = clipped_fn

        return response
Ejemplo n.º 5
0
    def _handler(self, request, response):

        shape_url = request.inputs['shape'][0].file
        band = request.inputs['band'][0].data
        categorical = request.inputs['categorical'][0].data
        touches = request.inputs['select_all_touching'][0].data

        vectors = ['.gml', '.shp', '.gpkg', '.geojson', '.json']
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))
        rasters = ['.tiff', '.tif']

        if 'raster' in request.inputs:
            raster_url = request.inputs['raster'][0].file
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))
        else:
            bbox = gis.get_bbox(vector_file)
            raster_url = 'public:EarthEnv_DEM90_NorthAmerica'
            raster_bytes = gis.get_raster_wcs(bbox,
                                              geographic=True,
                                              layer=raster_url)
            raster_file = tempfile.NamedTemporaryFile(prefix='wcs_',
                                                      suffix='.tiff',
                                                      delete=False,
                                                      dir=self.workdir).name
            with open(raster_file, 'wb') as f:
                f.write(raster_bytes)

        vec_crs, ras_crs = crs_sniffer(vector_file), crs_sniffer(raster_file)

        if ras_crs != vec_crs:
            msg = 'CRS for files {} and {} are not the same. Reprojecting vector...'.format(
                vector_file, raster_file)
            LOGGER.warning(msg)

            # Reproject full vector to preserve feature attributes
            projected = tempfile.NamedTemporaryFile(prefix='reprojected_',
                                                    suffix='.json',
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_vector_reproject(vector_file,
                                     projected,
                                     source_crs=vec_crs,
                                     target_crs=ras_crs)
            vector_file = projected

        summary_stats = SUMMARY_ZONAL_STATS

        try:
            stats = zonal_stats(vector_file,
                                raster_file,
                                stats=summary_stats,
                                band=band,
                                categorical=categorical,
                                all_touched=touches,
                                geojson_out=True,
                                raster_out=False)

            feature_collect = {'type': 'FeatureCollection', 'features': stats}
            response.outputs['statistics'].data = json.dumps(feature_collect)

        except Exception as e:
            msg = 'Failed to perform zonal statistics using {} and {}: {}'.format(
                shape_url, raster_url, e)
            LOGGER.error(msg)
            raise Exception(msg) from e

        return response
Ejemplo n.º 6
0
    def _handler(self, request, response):

        shape_url = request.inputs['shape'][0].file
        simple_categories = request.inputs['simple_categories'][0].data
        band = request.inputs['band'][0].data
        touches = request.inputs['select_all_touching'][0].data

        vectors = ['.gml', '.shp', '.gpkg', '.geojson', '.json']
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))
        vec_crs = crs_sniffer(vector_file)

        response.update_status('Accessed vector', status_percentage=5)

        if 'raster' in request.inputs:  # For raster files using the UNFAO Land Cover Classification System (19 types)
            rasters = ['.tiff', '.tif']
            raster_url = request.inputs['raster'][0].file
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))
            ras_crs = crs_sniffer(raster_file)

            if vec_crs != ras_crs:
                msg = 'CRS for files {} and {} are not the same. Reprojecting...'.format(
                    vector_file, raster_file)
                LOGGER.warning(msg)

                # Reproject full vector to preserve feature attributes
                projected = tempfile.NamedTemporaryFile(prefix='reprojected_',
                                                        suffix='.json',
                                                        delete=False,
                                                        dir=self.workdir).name
                generic_vector_reproject(vector_file,
                                         projected,
                                         source_crs=vec_crs,
                                         target_crs=ras_crs)
            else:
                projected = vector_file

        else:  # using the NALCMS data from GeoServer
            projected = tempfile.NamedTemporaryFile(prefix='reprojected_',
                                                    suffix='.json',
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_vector_reproject(vector_file,
                                     projected,
                                     source_crs=vec_crs,
                                     target_crs=NALCMS_PROJ4)

            bbox = gis.get_bbox(projected)
            raster_url = 'public:CEC_NALCMS_LandUse_2010'
            raster_bytes = gis.get_raster_wcs(bbox,
                                              geographic=False,
                                              layer=raster_url)
            raster_file = tempfile.NamedTemporaryFile(prefix='wcs_',
                                                      suffix='.tiff',
                                                      delete=False,
                                                      dir=self.workdir).name
            with open(raster_file, 'wb') as f:
                f.write(raster_bytes)

        response.update_status('Accessed raster', status_percentage=10)

        if simple_categories:
            categories = SIMPLE_CATEGORIES
        else:
            categories = TRUE_CATEGORIES
        summary_stats = SUMMARY_ZONAL_STATS

        try:
            stats = zonal_stats(projected,
                                raster_file,
                                stats=summary_stats,
                                band=band,
                                categorical=True,
                                all_touched=touches,
                                geojson_out=True,
                                raster_out=False)

            land_use = list()
            for stat in stats:
                lu = defaultdict(lambda: 0)
                prop = stat['properties']

                # Rename/aggregate land-use categories
                for k, v in categories.items():
                    lu[v] += prop.get(k, 0)

                prop.update(lu)
                land_use.append(lu)
                # prop['mini_raster_array'] = pickle.dumps(prop['mini_raster_array'], protocol=0).decode()

            feature_collect = {'type': 'FeatureCollection', 'features': stats}
            response.outputs['features'].data = json.dumps(feature_collect)
            response.outputs['statistics'].data = json.dumps(land_use)

        except Exception as e:
            msg = 'Failed to perform zonal statistics using {} and {}: {}'.format(
                shape_url, raster_url, e)
            LOGGER.error(msg)
            raise Exception(msg) from e

        return response