def test_compare_fill_no_flats_python_and_optimized(dtmdata): speedups.enable() assert speedups.enabled short, diag = fill.minimum_safe_short_and_diag(dtmdata) filled_optimized = fill.fill_terrain_no_flats(dtmdata, short, diag) speedups.disable() assert not speedups.enabled filled_python = fill.fill_terrain_no_flats(dtmdata, short, diag) assert np.all(filled_optimized == filled_python)
def test_negative_dem_values(): negative_value = -9999 dtm = np.empty((10, 10), dtype=np.float32) dtm[:, :] = -9999 dtm[4:6, 4:6] = 0 short, diag = fill.minimum_safe_short_and_diag(dtm) filled = fill.fill_terrain_no_flats(dtm, short, diag) assert filled[1, 1] != negative_value
def test_optimized_fill_no_flats(dtmdata, fillednoflatsdata): speedups.enable() assert speedups.enabled short, diag = fill.minimum_safe_short_and_diag(dtmdata) filled = fill.fill_terrain_no_flats(dtmdata, short, diag) assert np.sum(filled > dtmdata) > 0 assert np.all(filled <= np.max(dtmdata) + (dtmdata.shape[0] + dtmdata.shape[1]) * diag) assert np.all(filled == fillednoflatsdata)
def test_python_fill_no_flats(dtmdata, fillednoflatsdata): speedups.disable() assert not speedups.enabled short, diag = fill.minimum_safe_short_and_diag(dtmdata) filled = fill.fill_terrain_no_flats(dtmdata, short, diag) assert np.all(filled >= np.min(dtmdata)) assert np.all(filled <= np.max(dtmdata) + (dtmdata.shape[0] + dtmdata.shape[1]) * diag) assert np.all(filled == fillednoflatsdata)
def process(self): """Process """ dem = self.input_dem.read().astype(dtypes.DTYPE_DTM, casting='same_kind', copy=False) transform = self.input_dem.transform # Input cells must be square assert abs(abs(transform[1]) - abs(transform[5])) < 0.01 * abs( transform[1]), "Input cells must be square" if not speedups.enabled: self.logger.warning( 'Warning: Speedups are not available. If you have more than toy data you want them to be!' ) self.logger.info("Calculating filled DEM") # Filled and derived from it filled = fill.fill_terrain(dem) self.output_filled.write(filled) self.logger.info("Calculating bluespot depths") depths = filled - dem self.output_depths.write(depths) del filled del depths self.logger.info("Calculating flow directions") # Filled no flats and derived short, diag = fill.minimum_safe_short_and_diag(dem) filled_no_flats = fill.fill_terrain_no_flats(dem, short=short, diag=diag) del dem flowdir = flow.terrain_flowdirection(filled_no_flats, edges_flow_outward=True) self.output_flowdir.write(flowdir) del filled_no_flats if self.output_accum: self.logger.info("Calculating flow accumulation") accum = flow.accumulated_flow(flowdir) self.output_accum.write(accum) del accum self.logger.info("Done")
def process_pourpoints(bluespots, depths, watersheds, dem, accum, out, format, layername, dsco, lco): """Determine pour points. \b Determines a pour point for each bluespot using one of two methods: * Random candidate. Requires DEM only * Maximum accumulated flow candidate. Requires accumulated flow The output of the two methods only differ when there are more than one pour point candidate (ie multiple threshold cells with identical Z) for a given bluespot. For documentation of OGR features (format, dsco and lco) see http://www.gdal.org/ogr_formats.html """ bspot_reader = io.RasterReader(bluespots) depths_reader = io.RasterReader(depths) wsheds_reader = io.RasterReader(watersheds) data = accum if accum else dem if not data: raise Exception('Either accum or dem must be specified') data_reader = io.RasterReader(data) format = str(format) layername = str(layername) pourpnt_writer = io.VectorWriter(format, out, layername, [], ogr.wkbPoint, depths_reader.crs, dsco, lco) # Recalculate stats on filtered bluespots labeled_data = bspot_reader.read() depths_data = depths_reader.read() bluespot_stats = label.label_stats(depths_data, labeled_data) del depths_data if accum: pp_pix = label.label_max_index(data_reader.read(), labeled_data) elif dem: dem_data = data_reader.read() short, diag = fill.minimum_safe_short_and_diag(dem_data) filled_no_flats = fill.fill_terrain_no_flats(dem_data, short, diag) pp_pix = label.label_min_index(filled_no_flats, labeled_data) del dem_data watershed_stats = label.label_count(wsheds_reader.read()) pour_points = assemble_pourpoints(depths_reader.transform, pp_pix, bluespot_stats, watershed_stats) feature_collection = dict(type="FeatureCollection", features=pour_points) pourpnt_writer.write_geojson_features(feature_collection)