def test_nonintersecting_window_index(): """See gh-2378""" t = from_origin(0, 0, 1, 1) w = from_bounds(-3, -3, -1, -1, t) data = np.arange(25).reshape(5, 5) selection = data[window_index(w, height=5, width=5)] assert selection.shape == (2, 0) assert selection.flatten().tolist() == []
def test_window_index(): idx = window_index(((0, 4), (1, 12))) assert len(idx) == 2 r, c = idx assert r.start == 0 assert r.stop == 4 assert c.start == 1 assert c.stop == 12 arr = np.ones((20, 20)) assert arr[idx].shape == (4, 11)
def test_window_index(): idx = window_index(((0, 4), (1, 12))) assert len(idx) == 2 r, c = idx assert r.start == 0 assert r.stop == 4 assert c.start == 1 assert c.stop == 12 arr = np.ones((20, 20)) assert arr[idx].shape == (4, 11)
def main(cadastre_shp_filepath, agglom_extent_filepath, dst_filepath, dst_res, dst_nodata, dst_dtype): logger = logging.getLogger(__name__) cadastre_gdf = gpd.read_file(cadastre_shp_filepath, bbox=(WEST, SOUTH, EAST, NORTH)) # rasterize the cadastre cadastre_arr, cadastre_transform = rasterize_cadastre( cadastre_gdf, dst_res, dst_nodata, dst_dtype) logger.info("rasterized cadastre vector LULC dataset to shape %s", str(cadastre_arr.shape)) # TODO; crop it to the extent agglom_extent_geom_nodata = 0 agglom_extent_geom = gpd.read_file( agglom_extent_filepath)['geometry'].iloc[:1] agglom_extent_mask = features.rasterize(agglom_extent_geom, out_shape=cadastre_arr.shape, fill=agglom_extent_geom_nodata, transform=cadastre_transform) # get window and transform of valid data points, i.e., the computed extent extent_window = windows.get_data_window(agglom_extent_mask, nodata=agglom_extent_geom_nodata) extent_transform = windows.transform(extent_window, cadastre_transform) dst_arr = np.where(agglom_extent_mask, cadastre_arr, dst_nodata)[windows.window_index(extent_window)] # dump it with rio.open( dst_filepath, 'w', driver='GTiff', width=extent_window.width, height=extent_window.height, count=1, crs=CRS, # cadastre_gdf.crs transform=extent_transform, dtype=dst_dtype, nodata=dst_nodata) as dst: dst.write(dst_arr, 1) logger.info("dumped rasterized dataset to %s", dst_filepath)
def window_index(*args, **kwargs): from rasterio.windows import window_index warnings.warn("Deprecated; Use rasterio.windows instead", FutureWarning) return window_index(*args, **kwargs)
def window_index(*args, **kwargs): from rasterio.windows import window_index warnings.warn("Deprecated; Use rasterio.windows instead", FutureWarning) return window_index(*args, **kwargs)
def main(cadastre_filepath, dst_tif_filepath, dst_shp_filepath, dst_res, num_patches, kernel_radius, urban_threshold, buffer_dist, dst_nodata): logger = logging.getLogger(__name__) logger.info("preparing raster agglomeration LULC from %s", cadastre_filepath) cadastre_arr, cadastre_transform = utils.rasterize_cadastre( cadastre_filepath, dst_res, dst_nodata) logger.info("rasterized cadastre vector LULC dataset to shape %s", str(cadastre_arr.shape)) # get the urban extent mask according to the criteria used in the "Atlas # of Urban Expansion, The 2016 Edition" by Angel, S. et al. uf = ufp.UrbanFootprinter(cadastre_arr, urban_classes=utils.URBAN_CLASSES, res=dst_res) urban_mask = uf.compute_footprint_mask(kernel_radius, urban_threshold, num_patches=num_patches, buffer_dist=buffer_dist) logger.info( "obtained extent of the %d largest urban cluster(s) (%d pixels)", num_patches, np.sum(urban_mask)) # exclude lake # TODO: arguments to customize `LULC_WATER_VAL` and `SIEVE_SIZE` label_arr = ndi.label(cadastre_arr == utils.LULC_WATER_VAL, ndi.generate_binary_structure(2, 2))[0] cluster_label = np.argmax(np.unique(label_arr, return_counts=True)[1][1:]) + 1 largest_cluster = np.array(label_arr == cluster_label, dtype=np.uint8) urban_mask = features.sieve( np.array(urban_mask.astype(bool) & ~largest_cluster.astype(bool), dtype=urban_mask.dtype), SIEVE_SIZE) # get window and transform of valid data points, i.e., the computed extent extent_window = windows.get_data_window(urban_mask, nodata=0) extent_transform = windows.transform(extent_window, cadastre_transform) dst_arr = np.where(urban_mask, cadastre_arr, dst_nodata)[windows.window_index(extent_window)] # dump it # ACHTUNG: use hardcoded CRS string (for the same CRS) to avoid issues with rio.open( dst_tif_filepath, 'w', driver='GTiff', width=extent_window.width, height=extent_window.height, count=1, crs=utils.CRS, # cadastre_gdf.crs transform=extent_transform, dtype=np.uint8, nodata=dst_nodata) as dst: dst.write(dst_arr, 1) logger.info("dumped rasterized dataset to %s", dst_tif_filepath) if dst_shp_filepath: # save the geometry extent # get the urban mask geometry # urban_mask_geom = uf.compute_footprint_mask_shp( # kernel_radius, # urban_threshold, # largest_patch_only=largest_patch_only, # buffer_dist=buffer_dist, # transform=extent_transform) urban_mask_geom = geometry.shape( max([(geom, val) for geom, val in features.shapes( np.array(dst_arr != dst_nodata, dtype=np.uint8), transform=extent_transform) if val == 1], key=lambda geom: len(geom[0]['coordinates']))[0]) # get the window and transform of the lake extent lake_mask = features.sieve(largest_cluster, SIEVE_SIZE) extent_window = windows.get_data_window(lake_mask, nodata=0) extent_transform = windows.transform(extent_window, cadastre_transform) lake_mask = lake_mask[windows.window_index(extent_window)] # get the lake mask geometry lake_mask_geom = geometry.shape( max([(geom, val) for geom, val in features.shapes( lake_mask, transform=extent_transform) if val == 1], key=lambda geom: len(geom[0]['coordinates']))[0]) # ACHTUNG: use hardcoded CRS string (for the same CRS) to avoid issues gpd.GeoSeries([urban_mask_geom, lake_mask_geom], crs=utils.CRS).to_file(dst_shp_filepath) logger.info("dumped extent geometry to %s", dst_shp_filepath)