def subcatch_order_a(ldd, oorder): """ Determines subcatchments using the catchment order This version uses the last cell BELOW order to derive the catchments. In general you want the _b version Input: - ldd - order - order to use Output: - map with catchment for the given streamorder """ outl = find_outlet(ldd) large = pcr.subcatchment(ldd, pcr.boolean(outl)) stt = pcr.streamorder(ldd) sttd = pcr.downstream(ldd, stt) pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd) dif = pcr.upstream( ldd, pcr.cover( pcr.ifthen( large, pcr.uniqueid(pcr.boolean(pcr.ifthen(stt == pcr.ordinal(oorder), pts))), ), 0, ), ) dif = pcr.cover(pcr.scalar(outl), dif) # Add catchment outlet dif = pcr.ordinal(pcr.uniqueid(pcr.boolean(dif))) sc = pcr.subcatchment(ldd, dif) return sc, dif, stt
def subcatch_stream(ldd, stream, threshold): """ Derive catchments based upon strahler threshold Input: ldd -- pcraster object direction, local drain directions stream -- pcraster object direction, streamorder threshold -- integer, strahler threshold, subcatchments ge threshold are derived output: stream_ge -- pcraster object, streams of strahler order ge threshold subcatch -- pcraster object, subcatchments of strahler order ge threshold """ # derive stream order # stream = pcr.streamorder(ldd) stream_ge = pcr.ifthen(stream >= threshold, stream) stream_up_sum = pcr.ordinal(pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0))) # detect any transfer of strahler order, to a higher strahler order. transition_strahler = pcr.ifthenelse(pcr.downstream(ldd, stream_ge) != stream_ge, pcr.boolean(1), pcr.ifthenelse(pcr.nominal(ldd) == 5, pcr.boolean(1), pcr.ifthenelse(pcr.downstream(ldd, pcr.scalar(stream_up_sum)) > pcr.scalar(stream_ge), pcr.boolean(1), pcr.boolean(0)))) # make unique ids (write to file) transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler)) # derive upstream catchment areas (write to file) subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique)) return stream_ge, subcatch
def subcatch_stream(ldd, stream, threshold): """ Derive catchments based upon strahler threshold Input: ldd -- pcraster object direction, local drain directions stream -- pcraster object direction, streamorder threshold -- integer, strahler threshold, subcatchments ge threshold are derived output: stream_ge -- pcraster object, streams of strahler order ge threshold subcatch -- pcraster object, subcatchments of strahler order ge threshold """ # derive stream order # stream = pcr.streamorder(ldd) stream_ge = pcr.ifthen(stream >= threshold, stream) stream_up_sum = pcr.ordinal( pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0))) # detect any transfer of strahler order, to a higher strahler order. transition_strahler = pcr.ifthenelse( pcr.downstream(ldd, stream_ge) != stream_ge, pcr.boolean(1), pcr.ifthenelse( pcr.nominal(ldd) == 5, pcr.boolean(1), pcr.ifthenelse( pcr.downstream(ldd, pcr.scalar(stream_up_sum)) > pcr.scalar(stream_ge), pcr.boolean(1), pcr.boolean(0)))) # make unique ids (write to file) transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler)) # derive upstream catchment areas (write to file) subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique)) return stream_ge, subcatch
def ReverseMap(MAP): MAX = int(np.max(pcr.pcr2numpy(MAP,np.NAN))) REV_MAP = pcr.ordinal(pcr.ifthen(pcr.scalar(MAP) == pcr.scalar(-9999), pcr.scalar(0))) for i in range(MAX+1): if i > 0: print i REV_MAP = pcr.cover(pcr.ifthen(pcr.ordinal(MAP) == pcr.ordinal(i), pcr.ordinal(pcr.scalar(MAX+1)-pcr.scalar(i))),REV_MAP) REV_MAP = pcr.cover(REV_MAP, pcr.ordinal(MAP)) return REV_MAP
def derive_HAND(dem, ldd, accuThreshold, rivers=None, basin=None, up_area=None): """ Function derives Height-Above-Nearest-Drain. See http://www.sciencedirect.com/science/article/pii/S003442570800120X Input: dem -- pcraster object float32, elevation data ldd -- pcraster object direction, local drain directions accuThreshold -- upstream amount of cells as threshold for river delineation rivers=None -- you can provide a rivers layer here. Pixels that are identified as river should have a value > 0, other pixels a value of zero. basin=None -- set a boolean pcraster map where areas with True are estimated using the nearest drain in ldd distance and areas with False by means of the nearest friction distance. Friction distance estimated using the upstream area as weight (i.e. drains with a bigger upstream area have a lower friction) the spreadzone operator is used in this case. up_area=None -- provide the upstream area (if not assigned a guesstimate is prepared, assuming the LDD covers a full catchment area) Output: hand -- pcraster bject float32, height, normalised to nearest stream dist -- distance to nearest stream measured in cell lengths according to D8 directions """ if rivers is None: # prepare stream from a strahler threshold stream = pcr.ifthenelse( pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1), pcr.boolean(0)) else: # convert stream network to boolean stream = pcr.boolean(pcr.cover(rivers, 0)) # determine height in river (in DEM*100 unit as ordinal) height_river = pcr.ifthenelse(stream, pcr.ordinal(dem * 100), 0) if basin is None: up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river)) else: # use basin to allocate areas outside basin to the nearest stream. Nearest is weighted by upstream area if up_area is None: up_area = pcr.accuflux(ldd, 1) up_area = pcr.ifthen(stream, up_area) # mask areas outside streams friction = 1. / pcr.scalar( pcr.spreadzone(pcr.cover(pcr.ordinal(up_area), 0), 0, 0)) # if basin, use nearest river within subcatchment, if outside basin, use weighted-nearest river up_elevation = pcr.ifthenelse( basin, pcr.scalar(pcr.subcatchment(ldd, height_river)), pcr.scalar(pcr.spreadzone(height_river, 0, friction))) # replace areas outside of basin by a spread zone calculation. hand = pcr.max(pcr.scalar(pcr.ordinal(dem * 100)) - up_elevation, 0) / 100 # convert back to float in DEM units # hand = (pcr.scalar(pcr.ordinal(dem*100))-up_elevation)/100 # convert back to float in DEM units dist = pcr.ldddist(ldd, stream, 1) # compute horizontal distance estimate return hand, dist
def derive_HAND(dem, ldd, accuThreshold, rivers=None, basin=None, up_area=None, neg_HAND=None): """ Function derives Height-Above-Nearest-Drain. See http://www.sciencedirect.com/science/article/pii/S003442570800120X Input: dem -- pcraster object float32, elevation data ldd -- pcraster object direction, local drain directions accuThreshold -- upstream amount of cells as threshold for river delineation rivers=None -- you can provide a rivers layer here. Pixels that are identified as river should have a value > 0, other pixels a value of zero. basin=None -- set a boolean pcraster map where areas with True are estimated using the nearest drain in ldd distance and areas with False by means of the nearest friction distance. Friction distance estimated using the upstream area as weight (i.e. drains with a bigger upstream area have a lower friction) the spreadzone operator is used in this case. up_area=None -- provide the upstream area (if not assigned a guesstimate is prepared, assuming the LDD covers a full catchment area) neg_HAND=None -- if set to 1, HAND maps can have negative values when elevation outside of stream is lower than stream (for example when there are natural embankments) Output: hand -- pcraster bject float32, height, normalised to nearest stream dist -- distance to nearest stream measured in cell lengths according to D8 directions """ if rivers is None: # prepare stream from a strahler threshold stream = pcr.ifthenelse(pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1), pcr.boolean(0)) else: # convert stream network to boolean stream = pcr.boolean(pcr.cover(rivers, 0)) # determine height in river (in DEM*100 unit as ordinal) height_river = pcr.ifthenelse(stream, pcr.ordinal(dem*100), 0) if basin is None: up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river)) else: # use basin to allocate areas outside basin to the nearest stream. Nearest is weighted by upstream area if up_area is None: up_area = pcr.accuflux(ldd, 1) up_area = pcr.ifthen(stream, up_area) # mask areas outside streams friction = 1./pcr.scalar(pcr.spreadzone(pcr.cover(pcr.ordinal(up_area), 0), 0, 0)) # if basin, use nearest river within subcatchment, if outside basin, use weighted-nearest river up_elevation = pcr.ifthenelse(basin, pcr.scalar(pcr.subcatchment(ldd, height_river)), pcr.scalar(pcr.spreadzone(height_river, 0, friction))) # replace areas outside of basin by a spread zone calculation. # make negative HANDS also possible if neg_HAND == 1: hand = (pcr.scalar(pcr.ordinal(dem*100))-up_elevation)/100 # convert back to float in DEM units else: hand = pcr.max(pcr.scalar(pcr.ordinal(dem*100))-up_elevation, 0)/100 # convert back to float in DEM units dist = pcr.ldddist(ldd, stream, 1) # compute horizontal distance estimate return hand, dist
def classify( inmap, lower=[0, 10, 20, 30], upper=[10, 20, 30, 40], classes=[2, 2, 3, 4] ): """ classify a scaler maps accroding to the boundaries given in classes. """ result = pcr.ordinal(pcr.cover(-1)) for l, u, c in zip(lower, upper, classes): result = pcr.cover( pcr.ifthen(inmap >= l, pcr.ifthen(inmap < u, pcr.ordinal(c))), result ) return pcr.ifthen(result >= 0, result)
def testNonSpatialConversions(self): nonSpatialValue = pcraster.mapmaximum( pcraster.readmap("map2asc_PCRmap.map")) # Ordinal. nonSpatial = pcraster.ordinal(nonSpatialValue) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 124) self.assertEqual(float(nonSpatial), 124.0) # Nominal. nonSpatial = pcraster.nominal(nonSpatialValue) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 124) self.assertEqual(float(nonSpatial), 124) # Boolean. nonSpatial = pcraster.boolean(nonSpatialValue) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 1) self.assertEqual(float(nonSpatial), 1.0) # Scalar. nonSpatial = pcraster.scalar(pcraster.mapmaximum("abs_Expr.map")) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 14) self.assertEqual(float(nonSpatial), 14.0)
def testNonSpatialConversions(self): nonSpatialValue = pcraster.mapmaximum(pcraster.readmap("map2asc_PCRmap.map")) # Ordinal. nonSpatial = pcraster.ordinal(nonSpatialValue) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 124) self.assertEqual(float(nonSpatial), 124.0) # Nominal. nonSpatial = pcraster.nominal(nonSpatialValue) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 124) self.assertEqual(float(nonSpatial), 124) # Boolean. nonSpatial = pcraster.boolean(nonSpatialValue) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 1) self.assertEqual(float(nonSpatial), 1.0) # Scalar. nonSpatial = pcraster.scalar(pcraster.mapmaximum("abs_Expr.map")) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 14) self.assertEqual(float(nonSpatial), 14.0)
def checkerboard(mapin, fcc): """ checkerboard create a checkerboard map with unique id's in a fcc*fcc cells area. The resulting map can be used to derive statistics for (later) upscaling of maps (using the fcc factor) .. warning: use with unitcell to get most reliable results! Input: - map (used to determine coordinates) - fcc (size of the areas in cells) Output: - checkerboard type map """ msker = pcr.defined(mapin) ymin = pcr.mapminimum(pcr.ycoordinate(msker)) yc = (pcr.ycoordinate((msker)) - ymin) / pcr.celllength() yc = pcr.rounddown(yc / fcc) # yc = yc/fcc xmin = pcr.mapminimum(pcr.xcoordinate((msker))) xc = (pcr.xcoordinate((msker)) - xmin) / pcr.celllength() xc = pcr.rounddown(xc / fcc) # xc = xc/fcc yc = yc * (pcr.mapmaximum(xc) + 1.0) xy = pcr.ordinal(xc + yc) return xy
def _parseLine(self, line, lineNumber, nrColumns, externalNames, keyDict): line = re.sub("\n", "", line) line = re.sub("\t", " ", line) result = None # read until first comment content = "" content, sep, comment = line.partition("#") if len(content) > 1: collectionVariableName, sep, tail = content.partition(" ") if collectionVariableName == self._varName: tail = tail.strip() key, sep, variableValue = tail.rpartition(" ") if len(key.split()) != nrColumns: tmp = re.sub("\(|\)|,", "", str(key)) msg = "Error reading %s line %d, order of columns given (%s columns) does not match expected order of %s columns" % ( self._fileName, lineNumber, len(key.split()) + 2, int(nrColumns) + 2) raise ValueError(msg) variableValue = re.sub('\"', "", variableValue) tmp = None try: tmp = int(variableValue) if self._dataType == pcraster.Boolean: tmp = pcraster.boolean(tmp) elif self._dataType == pcraster.Nominal: tmp = pcraster.nominal(tmp) elif self._dataType == pcraster.Ordinal: tmp = pcraster.ordinal(tmp) elif self._dataType == pcraster.Ldd: tmp = pcraster.ldd(tmp) else: msg = "Conversion to %s failed" % (self._dataType) raise Exception(msg) except ValueError, e: try: tmp = float(variableValue) if self._dataType == pcraster.Scalar: tmp = pcraster.scalar(tmp) elif self._dataType == pcraster.Directional: tmp = pcraster.directional(tmp) else: msg = "Conversion to %s failed" % (self._dataType) raise Exception(msg) except ValueError, e: variableValue = re.sub("\\\\", "/", variableValue) variableValue = variableValue.strip() path = os.path.normpath(variableValue) try: tmp = pcraster.readmap(path) except RuntimeError, e: msg = "Error reading %s line %d, %s" % ( self._fileName, lineNumber, e) raise ValueError(msg)
def derive_HAND(dem, ldd, accuThreshold, rivers=None, basin=None): """ Function derives Height-Above-Nearest-Drain. See http://www.sciencedirect.com/science/article/pii/S003442570800120X Input: dem -- pcraster object float32, elevation data ldd -- pcraster object direction, local drain directions accuThreshold -- upstream amount of cells as threshold for river delineation rivers=None -- you can provide a rivers layer here. Pixels that are identified as river should have a value > 0, other pixels a value of zero. basin=None -- set a boolean pcraster map where areas with True are estimated using the nearest drain in ldd distance and areas with False by means of the nearest friction distance. Friction distance estimated using the upstream area as weight (i.e. drains with a bigger upstream area have a lower friction) the spreadzone operator is used in this case. Output: hand -- pcraster bject float32, height, normalised to nearest stream dist -- distance to nearest stream measured in cell lengths according to D8 directions """ if rivers is None: stream = pcr.ifthenelse( pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1), pcr.boolean(0) ) else: stream = pcr.boolean(pcr.cover(rivers, 0)) height_river = pcr.ifthenelse(stream, pcr.ordinal(dem * 100), 0) if basin is None: up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river)) else: drainage_surf = pcr.ifthen(rivers, pcr.accuflux(ldd, 1)) weight = 1.0 / pcr.scalar( pcr.spreadzone(pcr.cover(pcr.ordinal(drainage_surf), 0), 0, 0) ) up_elevation = pcr.ifthenelse( basin, pcr.scalar(pcr.subcatchment(ldd, height_river)), pcr.scalar(pcr.spreadzone(height_river, 0, weight)), ) # replace areas outside of basin by a spread zone calculation. hand = pcr.max(pcr.scalar(pcr.ordinal(dem * 100)) - up_elevation, 0) / 100 dist = pcr.ldddist(ldd, stream, 1) return hand, dist
def _parseLine(self, line, lineNumber, nrColumns, externalNames, keyDict): line = re.sub("\n","",line) line = re.sub("\t"," ",line) result = None # read until first comment content = "" content,sep,comment = line.partition("#") if len(content) > 1: collectionVariableName, sep, tail = content.partition(" ") if collectionVariableName == self._varName: tail = tail.strip() key, sep, variableValue = tail.rpartition(" ") if len(key.split()) != nrColumns: tmp = re.sub("\(|\)|,","",str(key)) msg = "Error reading %s line %d, order of columns given (%s columns) does not match expected order of %s columns" %(self._fileName, lineNumber, len(key.split()) + 2, int(nrColumns) + 2) raise ValueError(msg) variableValue = re.sub('\"', "", variableValue) tmp = None try: tmp = int(variableValue) if self._dataType == pcraster.Boolean: tmp = pcraster.boolean(tmp) elif self._dataType == pcraster.Nominal: tmp = pcraster.nominal(tmp) elif self._dataType == pcraster.Ordinal: tmp = pcraster.ordinal(tmp) elif self._dataType == pcraster.Ldd: tmp = pcraster.ldd(tmp) else: msg = "Conversion to %s failed" % (self._dataType) raise Exception(msg) except ValueError, e: try: tmp = float(variableValue) if self._dataType == pcraster.Scalar: tmp = pcraster.scalar(tmp) elif self._dataType == pcraster.Directional: tmp = pcraster.directional(tmp) else: msg = "Conversion to %s failed" % (self._dataType) raise Exception(msg) except ValueError,e: variableValue = re.sub("\\\\","/",variableValue) variableValue = variableValue.strip() path = os.path.normpath(variableValue) try: tmp = pcraster.readmap(path) except RuntimeError, e: msg = "Error reading %s line %d, %s" %(self._fileName, lineNumber, e) raise ValueError(msg)
def __init__(self, tssFilename, model, idMap=None, noHeader=False): """ """ if not isinstance(tssFilename, str): raise Exception( "timeseries output filename must be of type string") self._outputFilename = tssFilename self._maxId = 1 self._spatialId = None self._spatialDatatype = None self._spatialIdGiven = False self._userModel = model self._writeHeader = not noHeader # array to store the timestep values self._sampleValues = None _idMap = False if isinstance(idMap, str) or isinstance(idMap, pcraster._pcraster.Field): _idMap = True nrRows = self._userModel.nrTimeSteps() - self._userModel.firstTimeStep( ) + 1 if _idMap: self._spatialId = idMap if isinstance(idMap, str): self._spatialId = pcraster.readmap(idMap) _allowdDataTypes = [ pcraster.Nominal, pcraster.Ordinal, pcraster.Boolean ] if self._spatialId.dataType() not in _allowdDataTypes: raise Exception( "idMap must be of type Nominal, Ordinal or Boolean") if self._spatialId.isSpatial(): self._maxId, valid = pcraster.cellvalue( pcraster.mapmaximum(pcraster.ordinal(self._spatialId)), 1) else: self._maxId = 1 # cell indices of the sample locations self._sampleAddresses = [] for cellId in range(1, self._maxId + 1): self._sampleAddresses.append(self._getIndex(cellId)) self._spatialIdGiven = True nrCols = self._maxId self._sampleValues = [[Decimal("NaN")] * nrCols for _ in [0] * nrRows] else: self._sampleValues = [[Decimal("NaN")] * 1 for _ in [0] * nrRows]
def testCellValueOrdinal(self): raster = pcraster.ordinal(self._read_set_clone("areaarea_Class.map")) value, isValid = pcraster.cellvalue(raster, 1) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, int)) self.assertEqual(value, 2) value, isValid = pcraster.cellvalue(raster, 2) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, int)) self.assertEqual(value, 6) value, isValid = pcraster.cellvalue(raster, 5) self.assertEqual(isValid, False)
def testCellValueOrdinal(self): raster = pcraster.ordinal(pcraster.readmap("areaarea_Class.map")) value, isValid = pcraster.cellvalue(raster, 1) self.assertEqual(isValid, True) self.assert_(isinstance(value, types.IntType)) self.assertEqual(value, 2) value, isValid = pcraster.cellvalue(raster, 2) self.assertEqual(isValid, True) self.assert_(isinstance(value, types.IntType)) self.assertEqual(value, 6) value, isValid = pcraster.cellvalue(raster, 5) self.assertEqual(isValid, False)
def test_03(self): """ cellvalue_by_index Ordinal """ raster = pcraster.ordinal(self._read_set_clone("areaarea_Class.map")) value, isValid = pcraster.cellvalue_by_index(raster, 0) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, int)) self.assertEqual(value, 2) value, isValid = pcraster.cellvalue_by_index(raster, 1) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, int)) self.assertEqual(value, 6) value, isValid = pcraster.cellvalue_by_index(raster, 4) self.assertEqual(isValid, False)
def interpolategauges(inputmap, method): """" Interpolate time series gauge data onto a grid using different methods inputmap: map with points data for a single timestep method: string indicating the method inv pol input: inputmap, method returns: interpolated map """ if method == "inv": result = pcr.inversedistance(1, inputmap, 3, 0, 0) elif method == "pol": Unq = pcr.uniqueid(pcr.boolean(inputmap + 1)) result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1) result = pcr.areaaverage(inputmap, result) else: Unq = pcr.uniqueid(pcr.boolean(inputmap + 1)) result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1) result = pcr.areaaverage(inputmap, result) return result
def __init__(self, tssFilename, model, idMap=None, noHeader=False): """ """ if not isinstance(tssFilename, str): raise Exception("timeseries output filename must be of type string") self._outputFilename = tssFilename self._maxId = 1 self._spatialId = None self._spatialDatatype = None self._spatialIdGiven = False self._userModel = model self._writeHeader = not noHeader # array to store the timestep values self._sampleValues = None _idMap = False if isinstance(idMap, str) or isinstance(idMap, pcraster._pcraster.Field): _idMap = True nrRows = self._userModel.nrTimeSteps() - self._userModel.firstTimeStep() + 1 if _idMap: self._spatialId = idMap if isinstance(idMap, str): self._spatialId = pcraster.readmap(idMap) _allowdDataTypes = [pcraster.Nominal,pcraster.Ordinal,pcraster.Boolean] if self._spatialId.dataType() not in _allowdDataTypes: raise Exception("idMap must be of type Nominal, Ordinal or Boolean") if self._spatialId.isSpatial(): self._maxId, valid = pcraster.cellvalue(pcraster.mapmaximum(pcraster.ordinal(self._spatialId)), 1) else: self._maxId = 1 # cell indices of the sample locations self._sampleAddresses = [] for cellId in range(1, self._maxId + 1): self._sampleAddresses.append(self._getIndex(cellId)) self._spatialIdGiven = True nrCols = self._maxId self._sampleValues = [[Decimal("NaN")] * nrCols for _ in [0] * nrRows] else: self._sampleValues = [[Decimal("NaN")] * 1 for _ in [0] * nrRows]
def subcatch(ldd, outlet): """ Determines a subcatchment map using LDD and outlet(s). In the resulting subcatchment map the i's of the catchment are determiend by the id's of the outlets. Input: - ldd - Outlet - maps with points for each outlet. Output: - map of subcatchments """ subcatch = pcr.subcatchment(ldd, pcr.ordinal(outlet)) return subcatch
def riverlength(ldd, order): """ Determines the length of a river using the ldd. only determined for order and higher. Input: - ldd, order (streamorder) Returns: - totallength,lengthpercell, streamorder """ strorder = pcr.streamorder(ldd) strorder = pcr.ifthen(strorder >= pcr.ordinal(order), strorder) dist = pcr.max(pcr.celllength(), pcr.ifthen(pcr.boolean(strorder), pcr.downstreamdist(ldd))) return pcr.catchmenttotal(pcr.cover(dist, 0), ldd), dist, strorder
def riverlength(ldd, order): """ Determines the length of a river using the ldd. only determined for order and higher. Input: - ldd, order (streamorder) Returns: - totallength,lengthpercell, streamorder """ strorder = pcr.streamorder(ldd) strorder = pcr.ifthen(strorder >= pcr.ordinal(order), strorder) dist = pcr.max( pcr.celllength(), pcr.ifthen(pcr.boolean(strorder), pcr.downstreamdist(ldd)) ) return pcr.catchmenttotal(pcr.cover(dist, 0), ldd), dist, strorder
def points_to_map(in_map, xcor, ycor, tolerance): """ Returns a map with non zero values at the points defined in X, Y pairs. It's goal is to replace the pcraster col2map program. tolerance should be 0.5 to select single points Performance is not very good and scales linear with the number of points Input: - in_map - map to determine coordinates from - xcor - x coordinate (array or single value) - ycor - y coordinate (array or single value) - tolerance - tolerance in cell units. 0.5 selects a single cell\ 10 would select a 10x10 block of cells Output: - Map with values burned in. 1 for first point, 2 for second and so on """ point = in_map * 0.0 x = pcr.pcr2numpy(pcr.xcoordinate(pcr.defined(in_map)), np.nan) y = pcr.pcr2numpy(pcr.ycoordinate(pcr.defined(in_map)), np.nan) cell_length = float(pcr.celllength()) # simple check to use both floats and numpy arrays try: c = xcor.ndim except: xcor = np.array([xcor]) ycor = np.array([ycor]) # Loop over points and "burn in" map for n in range(0, xcor.size): if Verbose: print(n) diffx = x - xcor[n] diffy = y - ycor[n] col_ = np.absolute(diffx) <= (cell_length * tolerance) # cellsize row_ = np.absolute(diffy) <= (cell_length * tolerance) # cellsize point = point + pcr.numpy2pcr(pcr.Scalar, ((col_ * row_) * (n + 1)), np.nan) return pcr.ordinal(point)
def testDeepCopyRasterNonSpatial(self): pcraster.setclone("validated/boolean_Result.map") raster = pcraster.boolean(1) tmp = copy.deepcopy(raster) self.assertEqual(True, self.arbitraryMapEquals(raster, tmp)) raster1 = pcraster.nominal(1) tmp1 = copy.deepcopy(raster1) self.assertEqual(True, self.arbitraryMapEquals(raster1, tmp1)) raster2 = pcraster.ordinal(1) tmp2 = copy.deepcopy(raster2) self.assertEqual(True, self.arbitraryMapEquals(raster2, tmp2)) raster3 = pcraster.scalar(1) tmp3 = copy.deepcopy(raster3) self.assertEqual(True, self.arbitraryMapEquals(raster3, tmp3)) raster4 = pcraster.directional(1) tmp4 = copy.deepcopy(raster4) self.assertEqual(True, self.arbitraryMapEquals(raster4, tmp4)) raster5 = pcraster.ldd(1) tmp5 = copy.deepcopy(raster5) self.assertEqual(True, self.arbitraryMapEquals(raster5, tmp5))
def __init__(self, tssFilename, model, idMap=None, noHeader=False, save_path=None, period=None, sample_nr=None): """ Method adapted to be able to save to a specific output folder (PAZ) save_path=None: folder path in coupled model. period=None: periods in between LISEM runs, corresponds to saving folders sample_nr=None: Montecarlo sample, coupled Dynamic framework only, use for saving """ if not isinstance(tssFilename, str): raise Exception( "timeseries output filename must be of type string") self._outputFilename = tssFilename self._maxId = 1 self._spatialId = None self._spatialDatatype = None self._spatialIdGiven = False self._userModel = model self._writeHeader = not noHeader # array to store the timestep values self._sampleValues = None self._save_path = save_path # PAZ self._period = period # PAZ self.sample_nr = sample_nr _idMap = False if isinstance(idMap, str) or isinstance(idMap, pcraster._pcraster.Field): _idMap = True nrRows = self._userModel.nrTimeSteps() - self._userModel.firstTimeStep( ) + 1 if _idMap: self._spatialId = idMap if isinstance(idMap, str): self._spatialId = pcraster.readmap(idMap) _allowdDataTypes = [ pcraster.Nominal, pcraster.Ordinal, pcraster.Boolean ] if self._spatialId.dataType() not in _allowdDataTypes: raise Exception( "idMap must be of type Nominal, Ordinal or Boolean") if self._spatialId.isSpatial(): self._maxId, valid = pcraster.cellvalue( pcraster.mapmaximum(pcraster.ordinal(self._spatialId)), 1) else: self._maxId = 1 # cell indices of the sample locations self._sampleAddresses = [] for cellId in range(1, self._maxId + 1): self._sampleAddresses.append(self._getIndex(cellId)) self._spatialIdGiven = True nrCols = self._maxId self._sampleValues = [[Decimal("NaN")] * nrCols for _ in [0] * nrRows] else: self._sampleValues = [[Decimal("NaN")] * 1 for _ in [0] * nrRows]
def testOrdinal2Nominal(self): ordinalMap = pcraster.ordinal(pcraster.readmap("areaarea_Class.map")) self.assertEqual(ordinalMap.dataType(), pcraster.VALUESCALE.Ordinal) nominalMap = pcraster.nominal(ordinalMap) pcraster.report(nominalMap, "nominal.map") self.assertEqual(nominalMap.dataType(), pcraster.VALUESCALE.Nominal)
def generate_hydro_datasets(path, output_dir, step): print(path) file_name = os.path.splitext(os.path.basename(path))[0] map_path = output_dir + "/" + file_name + ".map" path_prefix = map_path[:-14] if step == "ldd": cmd = u"gdal_translate -a_nodata -9999 -of PCRaster -ot Float32 " + path + " " + map_path print(cmd) subprocess.call(cmd, shell=True) # slope = pcr.slope(dem) # pcr.report(slope, path_prefix + '_slope.map') # pcr.setglobaloption("lddin") if step == "ldd": dem = pcr.readmap(map_path) print("Computing LDD ...") # enable pit filling ldd = pcr.lddcreate(dem, 9999999, 9999999, 9999999, 9999999) pcr.report(ldd, path_prefix + "_ldd.map") return elif step == "ldddem": dem = pcr.readmap(map_path) print("Computing LDD DEM ...") dem_pitfilled = pcr.lddcreatedem(dem, 9999999, 9999999, 9999999, 9999999) dem_diff = dem_pitfilled - dem pcr.report(dem_diff, path_prefix + "_dem_pits_diff.map") return # print("Computing LDD without pit filling ...") # ldd_pits = pcr.lddcreate(dem, 0, 0, 0, 0) # pcr.report(ldd_pits, path_prefix + '_ldd_with_pits.map') # print("Computing pits ...") # pits = pcr.pit(ldd_pits) # pcr.report(pits, path_prefix + '_pits.map') if step == "fa": ldd = pcr.readmap(path_prefix + "_ldd.map") print("Computing flow accumulation ...") fa = pcr.accuflux(ldd, 1) pcr.report(fa, path_prefix + "_fa.map") return if step == "catchments": ldd = pcr.readmap(path_prefix + "_ldd.map") print("Delineating catchments ...") catchments = pcr.catchment(ldd, pcr.pit(ldd)) pcr.report(catchments, path_prefix + "_catchments.map") return if step == "stream_order": ldd = pcr.readmap(path_prefix + "_ldd.map") print("Computing stream order ...") stream_order = pcr.streamorder(ldd) pcr.report(stream_order, path_prefix + "_streamorder.map") return if step == "stream": ldd = pcr.readmap(path_prefix + "_ldd.map") accuThreshold = 100 print("Computing stream ...") stream = pcr.ifthenelse(pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1), pcr.boolean(0)) pcr.report(stream, path_prefix + "_stream.map") return if step == "height_river": print("Computing heigh_river ...") stream = pcr.readmap(path_prefix + "_stream.map") dem = pcr.readmap(map_path) height_river = pcr.ifthenelse(stream, pcr.ordinal(dem), 0) pcr.report(height_river, path_prefix + "_height_river.map") return if step == "up_elevation": print("Computing up_elevation ...") height_river = pcr.readmap(path_prefix + "_height_river.map") ldd = pcr.readmap(path_prefix + "_ldd.map") up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river)) pcr.report(up_elevation, path_prefix + "_up_elevation.map") return if step == "hand": print("Computing HAND ...") dem = pcr.readmap(map_path) up_elevation = pcr.readmap(path_prefix + "_up_elevation.map") hand = pcr.max(dem - up_elevation, 0) pcr.report(hand, path_prefix + "_hand.map") return if step == "dand": print("Computing DAND ...") ldd = pcr.readmap(path_prefix + "_ldd.map") stream = pcr.readmap(path_prefix + "_stream.map") dist = pcr.ldddist(ldd, stream, 1) pcr.report(dist, path_prefix + "_dist.map") return if step == "fa_river": print("Computing FA river ...") fa = pcr.readmap(path_prefix + "_fa.map") stream = pcr.readmap(path_prefix + "_stream.map") fa_river = pcr.ifthenelse(stream, pcr.ordinal(fa), 0) pcr.report(fa_river, path_prefix + "_fa_river.map") return if step == "faand": print("Computing FAAND ...") fa_river = pcr.readmap(path_prefix + "_fa_river.map") ldd = pcr.readmap(path_prefix + "_ldd.map") up_fa = pcr.scalar(pcr.subcatchment(ldd, fa_river)) pcr.report(up_fa, path_prefix + "_faand.map") return
def testNominal2Ordinal(self): pcraster.setclone("areaarea_Class.map") nominalMap = pcraster.readmap("areaarea_Class.map") self.assertEqual(nominalMap.dataType(), pcraster.VALUESCALE.Nominal) ordinalMap = pcraster.ordinal(nominalMap) self.assertEqual(ordinalMap.dataType(), pcraster.VALUESCALE.Ordinal)
def subcatch_stream(ldd, threshold, stream=None, min_strahler=-999, max_strahler=999, assign_edge=False, assign_existing=False, up_area=None, basin=None): """ Derive catchments based upon strahler threshold Input: ldd -- pcraster object direction, local drain directions threshold -- integer, strahler threshold, subcatchments ge threshold are derived stream=None -- pcraster object ordinal, stream order map (made with pcr.streamorder), if provided, stream order map is not generated on the fly but used from this map. Useful when a subdomain within a catchment is provided, which would cause edge effects in the stream order map min_strahler=-999 -- integer, minimum strahler threshold of river catchments to return max_strahler=999 -- integer, maximum strahler threshold of river catchments to return assign_unique=False -- if set to True, unassigned connected areas at the edges of the domain are assigned a unique id as well. If set to False, edges are not assigned assign_existing=False == if set to True, unassigned edges are assigned to existing basins with an upstream weighting. If set to False, edges are assigned to unique IDs, or not assigned output: stream_ge -- pcraster object, streams of strahler order ge threshold subcatch -- pcraster object, subcatchments of strahler order ge threshold """ # derive stream order if stream is None: stream = pcr.streamorder(ldd) stream_ge = pcr.ifthen(stream >= threshold, stream) stream_up_sum = pcr.ordinal(pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0))) # detect any transfer of strahler order, to a higher strahler order. transition_strahler = pcr.ifthenelse(pcr.downstream(ldd, stream_ge) != stream_ge, pcr.boolean(1), pcr.ifthenelse(pcr.nominal(ldd) == 5, pcr.boolean(1), pcr.ifthenelse(pcr.downstream(ldd, pcr.scalar(stream_up_sum)) > pcr.scalar(stream_ge), pcr.boolean(1), pcr.boolean(0)))) # make unique ids (write to file) transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler)) # derive upstream catchment areas (write to file) subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique)) # mask out areas outside basin if basin is not None: subcatch = pcr.ifthen(basin, subcatch) if assign_edge: # fill unclassified areas (in pcraster equal to zero) with a unique id, above the maximum id assigned so far unique_edge = pcr.clump(pcr.ifthen(subcatch==0, pcr.ordinal(0))) subcatch = pcr.ifthenelse(subcatch==0, pcr.nominal(pcr.mapmaximum(pcr.scalar(subcatch)) + pcr.scalar(unique_edge)), pcr.nominal(subcatch)) elif assign_existing: # unaccounted areas are added to largest nearest draining basin if up_area is None: up_area = pcr.ifthen(pcr.boolean(pcr.cover(stream_ge, 0)), pcr.accuflux(ldd, 1)) riverid = pcr.ifthen(pcr.boolean(pcr.cover(stream_ge, 0)), subcatch) friction = 1./pcr.scalar(pcr.spreadzone(pcr.cover(pcr.ordinal(up_area), 0), 0, 0)) # *(pcr.scalar(ldd)*0+1) delta = pcr.ifthen(pcr.scalar(ldd)>=0, pcr.ifthen(pcr.cover(subcatch, 0)==0, pcr.spreadzone(pcr.cover(riverid, 0), 0, friction))) subcatch = pcr.ifthenelse(pcr.boolean(pcr.cover(subcatch, 0)), subcatch, delta) # finally, only keep basins with minimum and maximum river order flowing through them strahler_subcatch = pcr.areamaximum(stream, subcatch) subcatch = pcr.ifthen(pcr.ordinal(strahler_subcatch) >= min_strahler, pcr.ifthen(pcr.ordinal(strahler_subcatch) <= max_strahler, subcatch)) return stream_ge, pcr.ordinal(subcatch)
def subcatch_order_b( ldd, oorder, sizelimit=0, fill=False, fillcomplete=False, stoporder=0 ): """ Determines subcatchments using the catchment order This version tries to keep the number op upstream/downstream catchment the small by first dederivingatchment connected to the major river(the order) given, and fill up from there. Input: - ldd - oorder - order to use - sizelimit - smallest catchments to include, default is all (sizelimit=0) in number of cells - if fill is set to True the higer order catchment are filled also - if fillcomplete is set to True the whole ldd is filled with catchments. :returns sc, dif, nldd; Subcatchment, Points, subcatchldd """ # outl = find_outlet(ldd) # large = pcr.subcatchment(ldd,pcr.boolean(outl)) if stoporder == 0: stoporder = oorder stt = pcr.streamorder(ldd) sttd = pcr.downstream(ldd, stt) pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd) maxorder = pcraster.framework.getCellValue(pcr.mapmaximum(stt), 1, 1) dif = pcr.uniqueid(pcr.boolean(pcr.ifthen(stt == pcr.ordinal(oorder), pts))) if fill: for order in range(oorder, maxorder): m_pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(order)) > 0.0, sttd) m_dif = pcr.uniqueid( pcr.boolean(pcr.ifthen(stt == pcr.ordinal(order), m_pts)) ) dif = pcr.uniqueid(pcr.boolean(pcr.cover(m_dif, dif))) for myorder in range(oorder - 1, stoporder, -1): sc = pcr.subcatchment(ldd, pcr.nominal(dif)) m_pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd) m_dif = pcr.uniqueid( pcr.boolean(pcr.ifthen(stt == pcr.ordinal(myorder - 1), m_pts)) ) dif = pcr.uniqueid( pcr.boolean(pcr.cover(pcr.ifthen(pcr.scalar(sc) == 0, m_dif), dif)) ) if fillcomplete: sc = pcr.subcatchment(ldd, pcr.nominal(dif)) cs, m_dif, stt = subcatch_order_a(ldd, stoporder) dif = pcr.uniqueid( pcr.boolean( pcr.cover( pcr.ifthen(pcr.scalar(sc) == 0, pcr.ordinal(m_dif)), pcr.ordinal(dif), ) ) ) scsize = pcr.catchmenttotal(1, ldd) dif = pcr.ordinal(pcr.uniqueid(pcr.boolean(pcr.ifthen(scsize >= sizelimit, dif)))) sc = pcr.subcatchment(ldd, dif) # Make pit ldd nldd = pcr.lddrepair(pcr.ifthenelse(pcr.cover(dif, 0) > 0, 5, ldd)) return sc, dif, nldd
def testNominal2Ordinal(self): nominalMap = pcraster.readmap("areaarea_Class.map") self.assertEqual(nominalMap.dataType(), pcraster.VALUESCALE.Nominal) ordinalMap = pcraster.ordinal(nominalMap) self.assertEqual(ordinalMap.dataType(), pcraster.VALUESCALE.Ordinal)
def generate_hydro_datasets(path, output_dir, step): print(path) file_name = os.path.splitext(os.path.basename(path))[0] map_path = output_dir + '/' + file_name + '.map' path_prefix = map_path[:-14] if step == 'ldd': cmd = u'gdal_translate -a_nodata -9999 -of PCRaster -ot Float32 ' + path + ' ' + map_path print(cmd) subprocess.call(cmd, shell=True) # slope = pcr.slope(dem) # pcr.report(slope, path_prefix + '_slope.map') # pcr.setglobaloption("lddin") if step == 'ldd': dem = pcr.readmap(map_path) print("Computing LDD ...") # enable pit filling ldd = pcr.lddcreate(dem, 9999999, 9999999, 9999999, 9999999) pcr.report(ldd, path_prefix + '_ldd.map') return elif step == 'ldddem': dem = pcr.readmap(map_path) print("Computing LDD DEM ...") dem_pitfilled = pcr.lddcreatedem(dem, 9999999, 9999999, 9999999, 9999999) dem_diff = dem_pitfilled - dem pcr.report(dem_diff, path_prefix + '_dem_pits_diff.map') return # print("Computing LDD without pit filling ...") # ldd_pits = pcr.lddcreate(dem, 0, 0, 0, 0) # pcr.report(ldd_pits, path_prefix + '_ldd_with_pits.map') # print("Computing pits ...") # pits = pcr.pit(ldd_pits) # pcr.report(pits, path_prefix + '_pits.map') if step == 'fa': ldd = pcr.readmap(path_prefix + '_ldd.map') print("Computing flow accumulation ...") fa = pcr.accuflux(ldd, 1) pcr.report(fa, path_prefix + '_fa.map') return if step == 'catchments': ldd = pcr.readmap(path_prefix + '_ldd.map') print("Delineating catchments ...") catchments = pcr.catchment(ldd, pcr.pit(ldd)) pcr.report(catchments, path_prefix + '_catchments.map') return if step == 'stream_order': ldd = pcr.readmap(path_prefix + '_ldd.map') print("Computing stream order ...") stream_order = pcr.streamorder(ldd) pcr.report(stream_order, path_prefix + '_streamorder.map') return if step == 'stream': ldd = pcr.readmap(path_prefix + '_ldd.map') accuThreshold = 100 print("Computing stream ...") stream = pcr.ifthenelse( pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1), pcr.boolean(0)) pcr.report(stream, path_prefix + '_stream.map') return if step == 'height_river': print("Computing heigh_river ...") stream = pcr.readmap(path_prefix + '_stream.map') dem = pcr.readmap(map_path) height_river = pcr.ifthenelse(stream, pcr.ordinal(dem), 0) pcr.report(height_river, path_prefix + '_height_river.map') return if step == 'up_elevation': print("Computing up_elevation ...") height_river = pcr.readmap(path_prefix + '_height_river.map') ldd = pcr.readmap(path_prefix + '_ldd.map') up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river)) pcr.report(up_elevation, path_prefix + '_up_elevation.map') return if step == 'hand': print("Computing HAND ...") dem = pcr.readmap(map_path) up_elevation = pcr.readmap(path_prefix + '_up_elevation.map') hand = pcr.max(dem - up_elevation, 0) pcr.report(hand, path_prefix + '_hand.map') return if step == 'dand': print("Computing DAND ...") ldd = pcr.readmap(path_prefix + '_ldd.map') stream = pcr.readmap(path_prefix + '_stream.map') dist = pcr.ldddist(ldd, stream, 1) pcr.report(dist, path_prefix + '_dist.map') return if step == 'fa_river': print("Computing FA river ...") fa = pcr.readmap(path_prefix + '_fa.map') stream = pcr.readmap(path_prefix + '_stream.map') fa_river = pcr.ifthenelse(stream, pcr.ordinal(fa), 0) pcr.report(fa_river, path_prefix + '_fa_river.map') return if step == 'faand': print("Computing FAAND ...") fa_river = pcr.readmap(path_prefix + '_fa_river.map') ldd = pcr.readmap(path_prefix + '_ldd.map') up_fa = pcr.scalar(pcr.subcatchment(ldd, fa_river)) pcr.report(up_fa, path_prefix + '_faand.map') return
def main(): ### Read input arguments ##### logfilename = 'wtools_static_maps.log' parser = OptionParser() usage = "usage: %prog [options]" parser = OptionParser(usage=usage) parser.add_option('-q', '--quiet', dest='verbose', default=True, action='store_false', help='do not print status messages to stdout') parser.add_option('-i', '--ini', dest='inifile', default=None, help='ini file with settings for static_maps.exe') parser.add_option('-s', '--source', dest='source', default='wflow', help='Source folder containing clone (default=./wflow)') parser.add_option('-d', '--destination', dest='destination', default='staticmaps', help='Destination folder (default=./staticmaps)') parser.add_option('-r', '--river', dest='rivshp', default=None, help='river network polyline layer (ESRI Shapefile)') parser.add_option('-c', '--catchment', dest='catchshp', default=None, help='catchment polygon layer (ESRI Shapefile)') parser.add_option('-g', '--gauges', dest='gaugeshp', default=None, help='gauge point layer (ESRI Shapefile)') parser.add_option('-D', '--dem', dest='dem_in', default=None, help='digital elevation model (GeoTiff)') parser.add_option('-L', '--landuse', dest='landuse', default=None, help='land use / land cover layer (GeoTiff)') parser.add_option('-S', '--soiltype', dest='soil', default=None, help='soil type layer (GeoTiff)') parser.add_option( '-V', '--vegetation', dest='lai', default=None, help= 'vegetation LAI layer location (containing 12 GeoTiffs <LAI00000.XXX.tif>)' ) parser.add_option( '-O', '--other_maps', dest='other_maps', default=None, help= 'bracketed [] comma-separated list of paths to other maps that should be reprojected' ) parser.add_option( '-C', '--clean', dest='clean', default=False, action='store_true', help='Clean the .xml files from static maps folder when finished') parser.add_option( '-A', '--alltouch', dest='alltouch', default=False, action='store_true', help= 'option to burn catchments "all touching".\nUseful when catchment-size is small compared to cellsize' ) (options, args) = parser.parse_args() # parse other maps into an array options.other_maps = options.other_maps.replace(' ', '').replace( '[', '').replace(']', '').split(',') options.source = os.path.abspath(options.source) clone_map = os.path.join(options.source, 'mask.map') clone_shp = os.path.join(options.source, 'mask.shp') clone_prj = os.path.join(options.source, 'mask.prj') if None in (options.inifile, options.rivshp, options.catchshp, options.dem_in): msg = """The following files are compulsory: - ini file - DEM (raster) - river (shape) - catchment (shape) """ print(msg) parser.print_help() sys.exit(1) if not os.path.exists(options.inifile): print 'path to ini file cannot be found' sys.exit(1) if not os.path.exists(options.rivshp): print 'path to river shape cannot be found' sys.exit(1) if not os.path.exists(options.catchshp): print 'path to catchment shape cannot be found' sys.exit(1) if not os.path.exists(options.dem_in): print 'path to DEM cannot be found' sys.exit(1) # open a logger, dependent on verbose print to screen or not logger, ch = wtools_lib.setlogger(logfilename, 'WTOOLS', options.verbose) # create directories # TODO: check if workdir is still necessary, try to keep in memory as much as possible # delete old files (when the source and destination folder are different) if np.logical_and(os.path.isdir(options.destination), options.destination is not options.source): shutil.rmtree(options.destination) if options.destination is not options.source: os.makedirs(options.destination) # Read mask if not (os.path.exists(clone_map)): logger.error( 'Clone file {:s} not found. Please run create_grid first.'.format( clone_map)) sys.exit(1) else: # set clone pcr.setclone(clone_map) # get the extent from clone.tif xax, yax, clone, fill_value = gis.gdal_readmap(clone_map, 'GTiff') trans = wtools_lib.get_geotransform(clone_map) extent = wtools_lib.get_extent(clone_map) xmin, ymin, xmax, ymax = extent zeros = np.zeros(clone.shape) ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999) # get the projection from clone.tif srs = wtools_lib.get_projection(clone_map) unit_clone = srs.GetAttrValue('UNIT').lower() ### READ CONFIG FILE # open config-file config = wtools_lib.OpenConf(options.inifile) # read settings snapgaugestoriver = wtools_lib.configget(config, 'settings', 'snapgaugestoriver', True, datatype='boolean') burnalltouching = wtools_lib.configget(config, 'settings', 'burncatchalltouching', True, datatype='boolean') burninorder = wtools_lib.configget(config, 'settings', 'burncatchalltouching', False, datatype='boolean') verticetollerance = wtools_lib.configget(config, 'settings', 'vertice_tollerance', 0.0001, datatype='float') ''' read parameters ''' burn_outlets = wtools_lib.configget(config, 'parameters', 'burn_outlets', 10000, datatype='int') burn_rivers = wtools_lib.configget(config, 'parameters', 'burn_rivers', 200, datatype='int') burn_connections = wtools_lib.configget(config, 'parameters', 'burn_connections', 100, datatype='int') burn_gauges = wtools_lib.configget(config, 'parameters', 'burn_gauges', 100, datatype='int') minorder = wtools_lib.configget(config, 'parameters', 'riverorder_min', 3, datatype='int') percentiles = np.array(config.get('parameters', 'statisticmaps', '0, 100').replace(' ', '').split(','), dtype='float') # read the parameters for generating a temporary very high resolution grid if unit_clone == 'degree': cellsize_hr = wtools_lib.configget(config, 'parameters', 'highres_degree', 0.0005, datatype='float') elif (unit_clone == 'metre') or (unit_clone == 'meter'): cellsize_hr = wtools_lib.configget(config, 'parameters', 'highres_metre', 50, datatype='float') cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2) rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2) hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0, -cellsize_hr) clone_hr = os.path.join(options.destination, 'clone_highres.tif') # make a highres clone as well! wtools_lib.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0) # read staticmap locations catchment_map = wtools_lib.configget(config, 'staticmaps', 'catchment', 'wflow_catchment.map') dem_map = wtools_lib.configget(config, 'staticmaps', 'dem', 'wflow_dem.map') demmax_map = wtools_lib.configget(config, 'staticmaps', 'demmax', 'wflow_demmax.map') demmin_map = wtools_lib.configget(config, 'staticmaps', 'demmin', 'wflow_demmin.map') gauges_map = wtools_lib.configget(config, 'staticmaps', 'gauges', 'wflow_gauges.map') landuse_map = wtools_lib.configget(config, 'staticmaps', 'landuse', 'wflow_landuse.map') ldd_map = wtools_lib.configget(config, 'staticmaps', 'ldd', 'wflow_ldd.map') river_map = wtools_lib.configget(config, 'staticmaps', 'river', 'wflow_river.map') outlet_map = wtools_lib.configget(config, 'staticmaps', 'outlet', 'wflow_outlet.map') riverlength_fact_map = wtools_lib.configget(config, 'staticmaps', 'riverlength_fact', 'wflow_riverlength_fact.map') soil_map = wtools_lib.configget(config, 'staticmaps', 'soil', 'wflow_soil.map') streamorder_map = wtools_lib.configget(config, 'staticmaps', 'streamorder', 'wflow_streamorder.map') subcatch_map = wtools_lib.configget(config, 'staticmaps', 'subcatch', 'wflow_subcatch.map') # read mask location (optional) masklayer = wtools_lib.configget(config, 'mask', 'masklayer', options.catchshp) # ???? empty = pcr.ifthen(ones == 0, pcr.scalar(0)) # TODO: check if extents are correct this way # TODO: check what the role of missing values is in zeros and ones (l. 123 in old code) # first add a missing value to dem_in ds = gdal.Open(options.dem_in, gdal.GA_Update) RasterBand = ds.GetRasterBand(1) fill_val = RasterBand.GetNoDataValue() if fill_val is None: RasterBand.SetNoDataValue(-9999) ds = None # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python # resample DEM logger.info('Resampling dem from {:s} to {:s}'.format( os.path.abspath(options.dem_in), os.path.join(options.destination, dem_map))) gis.gdal_warp(options.dem_in, clone_map, os.path.join(options.destination, dem_map), format='PCRaster', gdal_interp=gdalconst.GRA_Average) # retrieve amount of rows and columns from clone # TODO: make windowstats applicable to source/target with different projections. This does not work yet. # retrieve srs from DEM try: srs_dem = wtools_lib.get_projection(options.dem_in) except: logger.warning( 'No projection found in DEM, assuming WGS 1984 lat long') srs_dem = osr.SpatialReference() srs_dem.ImportFromEPSG(4326) clone2dem_transform = osr.CoordinateTransformation(srs, srs_dem) #if srs.ExportToProj4() == srs_dem.ExportToProj4(): for percentile in percentiles: if percentile >= 100: logger.info('computing window maximum') percentile_dem = os.path.join(options.destination, 'wflow_dem_max.map') elif percentile <= 0: logger.info('computing window minimum') percentile_dem = os.path.join(options.destination, 'wflow_dem_min.map') else: logger.info('computing window {:d} percentile'.format( int(percentile))) percentile_dem = os.path.join( options.destination, 'wflow_dem_{:03d}.map'.format(int(percentile))) percentile_dem = os.path.join( options.destination, 'wflow_dem_{:03d}.map'.format(int(percentile))) stats = wtools_lib.windowstats(options.dem_in, len(yax), len(xax), trans, srs, percentile_dem, percentile, transform=clone2dem_transform, logger=logger) # else: # logger.warning('Projections of DEM and clone are different. DEM statistics for different projections is not yet implemented') """ # burn in rivers # first convert and clip the river shapefile # retrieve river shape projection, if not available assume EPSG:4326 file_att = os.path.splitext(os.path.basename(options.rivshp))[0] ds = ogr.Open(options.rivshp) lyr = ds.GetLayerByName(file_att) extent = lyr.GetExtent() extent_in = [extent[0], extent[2], extent[1], extent[3]] try: # get spatial reference from shapefile srs_rivshp = lyr.GetSpatialRef() logger.info('Projection in river shapefile is {:s}'.format(srs_rivshp.ExportToProj4())) except: logger.warning('No projection found in {:s}, assuming WGS 1984 lat-lon'.format(options.rivshp)) srs_rivshp = osr.SpatialReference() srs_rivshp.ImportFromEPSG(4326) rivprojshp = os.path.join(options.destination, 'rivshp_proj.shp') logger.info('Projecting and clipping {:s} to {:s}'.format(options.rivshp, rivprojshp)) # TODO: Line below takes a very long time to process, the bigger the shapefile, the more time. How do we deal with this? call(('ogr2ogr','-s_srs', srs_rivshp.ExportToProj4(),'-t_srs', srs.ExportToProj4(), '-clipsrc', '{:f}'.format(xmin), '{:f}'.format(ymin), '{:f}'.format(xmax), '{:f}'.format(ymax), rivprojshp, options.rivshp)) """ # TODO: BURNING!! # project catchment layer to projection of clone file_att = os.path.splitext(os.path.basename(options.catchshp))[0] print options.catchshp ds = ogr.Open(options.catchshp) lyr = ds.GetLayerByName(file_att) extent = lyr.GetExtent() extent_in = [extent[0], extent[2], extent[1], extent[3]] try: # get spatial reference from shapefile srs_catchshp = lyr.GetSpatialRef() logger.info('Projection in catchment shapefile is {:s}'.format( srs_catchshp.ExportToProj4())) except: logger.warning( 'No projection found in {:s}, assuming WGS 1984 lat-lon'.format( options.catchshp)) srs_catchshp = osr.SpatialReference() srs_catchshp.ImportFromEPSG(4326) catchprojshp = os.path.join(options.destination, 'catchshp_proj.shp') logger.info('Projecting {:s} to {:s}'.format(options.catchshp, catchprojshp)) call(('ogr2ogr', '-s_srs', srs_catchshp.ExportToProj4(), '-t_srs', srs.ExportToProj4(), '-clipsrc', '{:f}'.format(xmin), '{:f}'.format(ymin), '{:f}'.format(xmax), '{:f}'.format(ymax), catchprojshp, options.catchshp)) # logger.info('Calculating ldd') ldddem = pcr.readmap(os.path.join(options.destination, dem_map)) ldd_select = pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35) pcr.report(ldd_select, os.path.join(options.destination, 'wflow_ldd.map')) # compute stream order, identify river cells streamorder = pcr.ordinal(pcr.streamorder(ldd_select)) river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1)) # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out! # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(options.destination, dem_map)),9999999))) # dem_resample_map = pcr.cover(os.path.join(options.destination, dem_map), pcr.scalar(river)*0+mindem) # pcr.report(dem_resample_map, os.path.join(options.destination, dem_map)) pcr.report(streamorder, os.path.join(options.destination, streamorder_map)) pcr.report(river, os.path.join(options.destination, river_map)) # deal with your catchments if options.gaugeshp == None: logger.info('No gauges defined, using outlets instead') gauges = pcr.ordinal( pcr.uniqueid( pcr.boolean( pcr.ifthen(pcr.scalar(ldd_select) == 5, pcr.boolean(1))))) pcr.report(gauges, os.path.join(options.destination, gauges_map)) # TODO: Add the gauge shape code from StaticMaps.py (line 454-489) # TODO: add river length map (see SticMaps.py, line 492-499) # report river length # make a high resolution empty map dem_hr_file = os.path.join(options.destination, 'dem_highres.tif') burn_hr_file = os.path.join(options.destination, 'burn_highres.tif') demburn_hr_file = os.path.join(options.destination, 'demburn_highres.map') riv_hr_file = os.path.join(options.destination, 'riv_highres.map') gis.gdal_warp(options.dem_in, clone_hr, dem_hr_file) # wtools_lib.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0) file_att = os.path.splitext(os.path.basename(options.rivshp))[0] # open the shape layer ds = ogr.Open(options.rivshp) lyr = ds.GetLayerByName(file_att) gis.ogr_burn(lyr, clone_hr, -100, file_out=burn_hr_file, format='GTiff', gdal_type=gdal.GDT_Float32, fill_value=0) # read dem and burn values and add xax_hr, yax_hr, burn_hr, fill = gis.gdal_readmap(burn_hr_file, 'GTiff') burn_hr[burn_hr == fill] = 0 xax_hr, yax_hr, dem_hr, fill = gis.gdal_readmap(dem_hr_file, 'GTiff') dem_hr[dem_hr == fill] = np.nan demburn_hr = dem_hr + burn_hr demburn_hr[np.isnan(demburn_hr)] = -9999 gis.gdal_writemap(demburn_hr_file, 'PCRaster', xax_hr, yax_hr, demburn_hr, -9999.) pcr.setclone(demburn_hr_file) demburn_hr = pcr.readmap(demburn_hr_file) ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35) pcr.report(ldd_hr, os.path.join(options.destination, 'ldd_hr.map')) pcr.setglobaloption('unitcell') riv_hr = pcr.scalar( pcr.streamorder(ldd_hr) >= minorder) * pcr.downstreamdist(ldd_hr) pcr.report(riv_hr, riv_hr_file) pcr.setglobaloption('unittrue') pcr.setclone(clone_map) logger.info('Computing river length') #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform) riverlength = wtools_lib.windowstats(riv_hr_file, len(yax), len(xax), trans, srs, os.path.join(options.destination, riverlength_fact_map), stat='fact', logger=logger) # TODO: nothing happends with the river lengths yet. Need to decide how to use these # report outlet map pcr.report(pcr.ifthen(pcr.ordinal(ldd_select) == 5, pcr.ordinal(1)), os.path.join(options.destination, outlet_map)) # report subcatchment map subcatchment = pcr.subcatchment(ldd_select, gauges) pcr.report(pcr.ordinal(subcatchment), os.path.join(options.destination, subcatch_map)) # Report land use map if options.landuse == None: logger.info( 'No land use map used. Preparing {:s} with only ones.'.format( os.path.join(options.destination, landuse_map))) pcr.report(pcr.nominal(ones), os.path.join(options.destination, landuse_map)) else: logger.info('Resampling land use from {:s} to {:s}'.format( os.path.abspath(options.landuse), os.path.join(options.destination, os.path.abspath(landuse_map)))) gis.gdal_warp(options.landuse, clone_map, os.path.join(options.destination, landuse_map), format='PCRaster', gdal_interp=gdalconst.GRA_Mode, gdal_type=gdalconst.GDT_Int32) # report soil map if options.soil == None: logger.info('No soil map used. Preparing {:s} with only ones.'.format( os.path.join(options.destination, soil_map))) pcr.report(pcr.nominal(ones), os.path.join(options.destination, soil_map)) else: logger.info('Resampling soil from {:s} to {:s}'.format( os.path.abspath(options.soil), os.path.join(options.destination, os.path.abspath(soil_map)))) gis.gdal_warp(options.soil, clone_map, os.path.join(options.destination, soil_map), format='PCRaster', gdal_interp=gdalconst.GRA_Mode, gdal_type=gdalconst.GDT_Int32) if options.lai == None: logger.info( 'No vegetation LAI maps used. Preparing default maps {:s} with only ones.' .format(os.path.join(options.destination, soil_map))) pcr.report(pcr.nominal(ones), os.path.join(options.destination, soil_map)) else: dest_lai = os.path.join(options.destination, 'clim') os.makedirs(dest_lai) for month in range(12): lai_in = os.path.join(options.lai, 'LAI00000.{:03d}'.format(month + 1)) lai_out = os.path.join(dest_lai, 'LAI00000.{:03d}'.format(month + 1)) logger.info('Resampling vegetation LAI from {:s} to {:s}'.format( os.path.abspath(lai_in), os.path.abspath(lai_out))) gis.gdal_warp(lai_in, clone_map, lai_out, format='PCRaster', gdal_interp=gdalconst.GRA_Bilinear, gdal_type=gdalconst.GDT_Float32) # report soil map if options.other_maps == None: logger.info('No other maps used. Skipping other maps.') else: logger.info('Resampling list of other maps...') for map_file in options.other_maps: map_name = os.path.split(map_file)[1] logger.info('Resampling a map from {:s} to {:s}'.format( os.path.abspath(map_file), os.path.join(options.destination, map_name))) gis.gdal_warp(map_file, clone_map, os.path.join(options.destination, map_name), format='PCRaster', gdal_interp=gdalconst.GRA_Mode, gdal_type=gdalconst.GDT_Float32) if options.clean: wtools_lib.DeleteList(glob.glob( os.path.join(options.destination, '*.xml')), logger=logger) wtools_lib.DeleteList(glob.glob( os.path.join(options.destination, 'clim', '*.xml')), logger=logger) wtools_lib.DeleteList(glob.glob( os.path.join(options.destination, '*highres*')), logger=logger)
def main( source, destination, inifile, dem_in, rivshp, catchshp, gaugeshp=None, landuse=None, soil=None, lai=None, other_maps=None, logfilename="wtools_static_maps.log", verbose=True, clean=True, alltouch=False, outlets=([], []), ): # parse other maps into an array if not other_maps == None: if type(other_maps) == str: print other_maps other_maps = (other_maps.replace(" ", "").replace("[", "").replace( "]", "").split(",")) source = os.path.abspath(source) clone_map = os.path.join(source, "mask.map") clone_shp = os.path.join(source, "mask.shp") clone_prj = os.path.join(source, "mask.prj") if None in (rivshp, catchshp, dem_in): msg = """The following files are compulsory: - DEM (raster) - river (shape) - catchment (shape) """ print(msg) parser.print_help() sys.exit(1) if (inifile is not None) and (not os.path.exists(inifile)): print "path to ini file cannot be found" sys.exit(1) if not os.path.exists(rivshp): print "path to river shape cannot be found" sys.exit(1) if not os.path.exists(catchshp): print "path to catchment shape cannot be found" sys.exit(1) if not os.path.exists(dem_in): print "path to DEM cannot be found" sys.exit(1) # open a logger, dependent on verbose print to screen or not logger, ch = wt.setlogger(logfilename, "WTOOLS", verbose) # create directories # TODO: check if workdir is still necessary, try to # keep in memory as much as possible # delete old files (when the source and destination folder are different) if np.logical_and(os.path.isdir(destination), destination is not source): shutil.rmtree(destination) if destination is not source: os.makedirs(destination) # Read mask if not (os.path.exists(clone_map)): logger.error( "Clone file {:s} not found. Please run create_grid first.".format( clone_map)) sys.exit(1) else: # set clone pcr.setclone(clone_map) # get the extent from clone.tif xax, yax, clone, fill_value = wt.gdal_readmap(clone_map, "GTiff") trans = wt.get_geotransform(clone_map) extent = wt.get_extent(clone_map) xmin, ymin, xmax, ymax = extent zeros = np.zeros(clone.shape) ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999) # get the projection from clone.tif srs = wt.get_projection(clone_map) unit_clone = srs.GetAttrValue("UNIT").lower() # READ CONFIG FILE # open config-file if inifile is None: config = ConfigParser.SafeConfigParser() config.optionxform = str else: config = wt.OpenConf(inifile) # read settings snapgaugestoriver = wt.configget(config, "settings", "snapgaugestoriver", True, datatype="boolean") burnalltouching = wt.configget(config, "settings", "burncatchalltouching", True, datatype="boolean") burninorder = wt.configget(config, "settings", "burncatchalltouching", False, datatype="boolean") verticetollerance = wt.configget(config, "settings", "vertice_tollerance", 0.0001, datatype="float") """ read parameters """ burn_outlets = wt.configget(config, "parameters", "burn_outlets", 10000, datatype="int") burn_rivers = wt.configget(config, "parameters", "burn_rivers", 200, datatype="int") burn_connections = wt.configget(config, "parameters", "burn_connections", 100, datatype="int") burn_gauges = wt.configget(config, "parameters", "burn_gauges", 100, datatype="int") minorder = wt.configget(config, "parameters", "riverorder_min", 3, datatype="int") try: percentiles = np.array( config.get("parameters", "statisticmaps", "0, 100").replace(" ", "").split(","), dtype="float", ) except ConfigParser.NoOptionError: percentiles = [0.0, 100.0] # read the parameters for generating a temporary very high resolution grid if unit_clone == "degree": cellsize_hr = wt.configget(config, "parameters", "highres_degree", 0.0005, datatype="float") elif (unit_clone == "metre") or (unit_clone == "meter"): cellsize_hr = wt.configget(config, "parameters", "highres_metre", 50, datatype="float") cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2) rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2) hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0, -cellsize_hr) clone_hr = os.path.join(destination, "clone_highres.tif") # make a highres clone as well! wt.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0) # read staticmap locations catchment_map = wt.configget(config, "staticmaps", "catchment", "wflow_catchment.map") dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map") demmax_map = wt.configget(config, "staticmaps", "demmax", "wflow_demmax.map") demmin_map = wt.configget(config, "staticmaps", "demmin", "wflow_demmin.map") gauges_map = wt.configget(config, "staticmaps", "gauges", "wflow_gauges.map") landuse_map = wt.configget(config, "staticmaps", "landuse", "wflow_landuse.map") ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map") river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map") outlet_map = wt.configget(config, "staticmaps", "outlet", "wflow_outlet.map") riverlength_fact_map = wt.configget(config, "staticmaps", "riverlength_fact", "wflow_riverlength_fact.map") soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map") streamorder_map = wt.configget(config, "staticmaps", "streamorder", "wflow_streamorder.map") subcatch_map = wt.configget(config, "staticmaps", "subcatch", "wflow_subcatch.map") # read mask location (optional) masklayer = wt.configget(config, "mask", "masklayer", catchshp) # ???? empty = pcr.ifthen(ones == 0, pcr.scalar(0)) # TODO: check if extents are correct this way # TODO: check what the role of missing values is in zeros and ones (l. 123 # in old code) # first add a missing value to dem_in ds = gdal.Open(dem_in, gdal.GA_Update) RasterBand = ds.GetRasterBand(1) fill_val = RasterBand.GetNoDataValue() if fill_val is None: RasterBand.SetNoDataValue(-9999) ds = None # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python # resample DEM logger.info("Resampling dem from {:s} to {:s}".format( os.path.abspath(dem_in), os.path.join(destination, dem_map))) wt.gdal_warp( dem_in, clone_map, os.path.join(destination, dem_map), format="PCRaster", gdal_interp=gdalconst.GRA_Average, ) # retrieve amount of rows and columns from clone # TODO: make windowstats applicable to source/target with different projections. This does not work yet. # retrieve srs from DEM try: srs_dem = wt.get_projection(dem_in) except: logger.warning( "No projection found in DEM, assuming WGS 1984 lat long") srs_dem = osr.SpatialReference() srs_dem.ImportFromEPSG(4326) clone2dem_transform = osr.CoordinateTransformation(srs, srs_dem) # if srs.ExportToProj4() == srs_dem.ExportToProj4(): wt.windowstats( dem_in, len(yax), len(xax), trans, srs, destination, percentiles, transform=clone2dem_transform, logger=logger, ) ## read catchment shape-file to create catchment map src = rasterio.open(clone_map) shapefile = fiona.open(catchshp, "r") catchment_shapes = [feature["geometry"] for feature in shapefile] image = features.rasterize(catchment_shapes, out_shape=src.shape, all_touched=True, transform=src.transform) catchment_domain = pcr.numpy2pcr(pcr.Ordinal, image.copy(), 0) ## read river shape-file and create burn layer shapefile = fiona.open(rivshp, "r") river_shapes = [feature["geometry"] for feature in shapefile] image = features.rasterize(river_shapes, out_shape=src.shape, all_touched=False, transform=src.transform) rivers = pcr.numpy2pcr(pcr.Nominal, image.copy(), 0) riverdem = pcr.scalar(rivers) * pcr.readmap( os.path.join(destination, dem_map)) pcr.setglobaloption("lddin") riverldd = pcr.lddcreate(riverdem, 1e35, 1e35, 1e35, 1e35) riveroutlet = pcr.cover( pcr.ifthen(pcr.scalar(riverldd) == 5, pcr.scalar(1000)), 0) burn_layer = pcr.cover( (pcr.scalar( pcr.ifthen( pcr.streamorder(riverldd) > 1, pcr.streamorder(riverldd))) - 1) * 1000 + riveroutlet, 0, ) outlets_x, outlets_y = outlets n_outlets = len(outlets_x) logger.info("Number of outlets: {}".format(n_outlets)) if n_outlets >= 1: outlets_map_numbered = tr.points_to_map(pcr.scalar(0), outlets_x, outlets_y, 0.5) outlets_map = pcr.boolean(outlets_map_numbered) # snap outlets to closest river (max 1 cell closer to river) outlets_map = pcr.boolean( pcr.cover(tr.snaptomap(pcr.ordinal(outlets_map), rivers), 0)) ## create ldd per catchment logger.info("Calculating ldd") ldddem = pcr.scalar(clone_map) # per subcatchment, burn dem, then create modified dem that fits the ldd of the subcatchment # this ldd dem is merged over catchments, to create a global ldd that abides to the subcatchment boundaries for idx, shape in enumerate(catchment_shapes): logger.info("Computing ldd for catchment " + str(idx + 1) + "/" + str(len(catchment_shapes))) image = features.rasterize([shape], out_shape=src.shape, all_touched=True, transform=src.transform) catchment = pcr.numpy2pcr(pcr.Scalar, image.copy(), 0) dem_burned_catchment = ( pcr.readmap(os.path.join(destination, dem_map)) * pcr.scalar(catchment_domain) * catchment) - burn_layer # ldddem_catchment = pcr.lddcreatedem( # dem_burned_catchment, 1e35, 1e35, 1e35, 1e35) ldddem = pcr.cover(ldddem, dem_burned_catchment) pcr.report(ldddem, os.path.join(destination, "ldddem.map")) wflow_ldd = pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35) if n_outlets >= 1: # set outlets to pit wflow_ldd = pcr.ifthenelse(outlets_map, pcr.ldd(5), wflow_ldd) wflow_ldd = pcr.lddrepair(wflow_ldd) pcr.report(wflow_ldd, os.path.join(destination, "wflow_ldd.map")) # compute stream order, identify river cells streamorder = pcr.ordinal(pcr.streamorder(wflow_ldd)) river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1)) # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out! # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(destination, dem_map)),9999999))) # dem_resample_map = pcr.cover(os.path.join(destination, dem_map), pcr.scalar(river)*0+mindem) # pcr.report(dem_resample_map, os.path.join(destination, dem_map)) pcr.report(streamorder, os.path.join(destination, streamorder_map)) pcr.report(river, os.path.join(destination, river_map)) # deal with your catchments if gaugeshp == None: logger.info("No gauges defined, using outlets instead") gauges = pcr.ordinal( pcr.uniqueid( pcr.boolean( pcr.ifthen(pcr.scalar(wflow_ldd) == 5, pcr.boolean(1))))) pcr.report(gauges, os.path.join(destination, gauges_map)) # TODO: Add the gauge shape code from StaticMaps.py (line 454-489) # TODO: add river length map (see SticMaps.py, line 492-499) # since the products here (river length fraction) are not yet used # this is disabled for now, as it also takes a lot of computation time if False: # report river length # make a high resolution empty map dem_hr_file = os.path.join(destination, "dem_highres.tif") burn_hr_file = os.path.join(destination, "burn_highres.tif") demburn_hr_file = os.path.join(destination, "demburn_highres.map") riv_hr_file = os.path.join(destination, "riv_highres.map") wt.gdal_warp(dem_in, clone_hr, dem_hr_file) # wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0) # open the shape layer ds = ogr.Open(rivshp) lyr = ds.GetLayer(0) wt.ogr_burn( lyr, clone_hr, -100, file_out=burn_hr_file, format="GTiff", gdal_type=gdal.GDT_Float32, fill_value=0, ) # read dem and burn values and add xax_hr, yax_hr, burn_hr, fill = wt.gdal_readmap(burn_hr_file, "GTiff") burn_hr[burn_hr == fill] = 0 xax_hr, yax_hr, dem_hr, fill = wt.gdal_readmap(dem_hr_file, "GTiff") dem_hr[dem_hr == fill] = np.nan demburn_hr = dem_hr + burn_hr demburn_hr[np.isnan(demburn_hr)] = -9999 wt.gdal_writemap(demburn_hr_file, "PCRaster", xax_hr, yax_hr, demburn_hr, -9999.) pcr.setclone(demburn_hr_file) demburn_hr = pcr.readmap(demburn_hr_file) logger.info("Calculating ldd to determine river length") ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35) pcr.report(ldd_hr, os.path.join(destination, "ldd_hr.map")) pcr.setglobaloption("unitcell") riv_hr = pcr.scalar( pcr.streamorder(ldd_hr) >= minorder) * pcr.downstreamdist(ldd_hr) pcr.report(riv_hr, riv_hr_file) pcr.setglobaloption("unittrue") pcr.setclone(clone_map) logger.info("Computing river length") wt.windowstats( riv_hr_file, len(yax), len(xax), trans, srs, destination, stat="fact", transform=False, logger=logger, ) # TODO: nothing happens with the river lengths yet. Need to decide how to use these # report outlet map pcr.report( pcr.ifthen(pcr.ordinal(wflow_ldd) == 5, pcr.ordinal(1)), os.path.join(destination, outlet_map), ) # report subcatchment map subcatchment = pcr.subcatchment(wflow_ldd, gauges) pcr.report(pcr.ordinal(subcatchment), os.path.join(destination, subcatch_map)) # Report land use map if landuse == None: logger.info( "No land use map used. Preparing {:s} with only ones.".format( os.path.join(destination, landuse_map))) pcr.report(pcr.nominal(ones), os.path.join(destination, landuse_map)) else: logger.info("Resampling land use from {:s} to {:s}".format( os.path.abspath(landuse), os.path.join(destination, os.path.abspath(landuse_map)), )) wt.gdal_warp( landuse, clone_map, os.path.join(destination, landuse_map), format="PCRaster", gdal_interp=gdalconst.GRA_Mode, gdal_type=gdalconst.GDT_Int32, ) # report soil map if soil == None: logger.info("No soil map used. Preparing {:s} with only ones.".format( os.path.join(destination, soil_map))) pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map)) else: logger.info("Resampling soil from {:s} to {:s}".format( os.path.abspath(soil), os.path.join(destination, os.path.abspath(soil_map)), )) wt.gdal_warp( soil, clone_map, os.path.join(destination, soil_map), format="PCRaster", gdal_interp=gdalconst.GRA_Mode, gdal_type=gdalconst.GDT_Int32, ) if lai == None: logger.info( "No vegetation LAI maps used. Preparing default maps {:s} with only ones." .format(os.path.join(destination, soil_map))) pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map)) else: dest_lai = os.path.join(destination, "clim") os.makedirs(dest_lai) for month in range(12): lai_in = os.path.join(lai, "LAI00000.{:03d}".format(month + 1)) lai_out = os.path.join(dest_lai, "LAI00000.{:03d}".format(month + 1)) logger.info("Resampling vegetation LAI from {:s} to {:s}".format( os.path.abspath(lai_in), os.path.abspath(lai_out))) wt.gdal_warp( lai_in, clone_map, lai_out, format="PCRaster", gdal_interp=gdalconst.GRA_Bilinear, gdal_type=gdalconst.GDT_Float32, ) # report soil map if other_maps == None: logger.info("No other maps used. Skipping other maps.") else: logger.info("Resampling list of other maps...") for map_file in other_maps: map_name = os.path.split(map_file)[1] logger.info("Resampling a map from {:s} to {:s}".format( os.path.abspath(map_file), os.path.join( destination, os.path.splitext(os.path.basename(map_file))[0] + ".map", ), )) wt.gdal_warp( map_file, clone_map, os.path.join( destination, os.path.splitext(os.path.basename(map_file))[0] + ".map", ), format="PCRaster", gdal_interp=gdalconst.GRA_Mode, gdal_type=gdalconst.GDT_Float32, ) if clean: wt.DeleteList(glob.glob(os.path.join(destination, "*.xml")), logger=logger) wt.DeleteList(glob.glob(os.path.join(destination, "clim", "*.xml")), logger=logger) wt.DeleteList(glob.glob(os.path.join(destination, "*highres*")), logger=logger)
def main(): clone_map = "mask\mask.map" clone_shp = "mask\mask.shp" clone_prj = "mask\mask.prj" workdir = "work\\" resultdir = "staticmaps\\" ''' read commandline arguments ''' argv = sys.argv clone_EPSG = False try: opts, args = getopt.getopt(argv[1:], 'i:g:p:r:c:d:l:s:CA') except getopt.error: print 'fout' Usage() sys.exit(1) inifile = None rivshp = None catchshp = None dem_in = None landuse = None soiltype = None clean = False gaugeshp = None alltouching = False for o, a in opts: if o == '-i': inifile = a if o == '-p': clone_EPSG = 'EPSG:' + a if o == '-r': rivshp = a if o == '-c': catchshp = a if o == '-d': dem_in = a if o == '-l': landuse = a if o == '-s': soiltype = a if o == '-C': clean = True if o == '-g': gaugeshp = a if o == '-A': alltouching = True if inifile == None or rivshp == None or catchshp == None or dem_in == None: print 'the following files are compulsory:' print ' - ini-file' print ' - DEM (raster)' print ' - river (shape)' print ' - catchment (shape)' Usage() sys.exit(1) if landuse == None: print 'no raster with landuse classifications is specified. 1 class will be applied for the entire domain' if soiltype == None: print 'no raster with soil classifications is specified. 1 class will be applied for the entire domain' ''' read mask ''' if not os.path.exists(clone_map): print 'Mask not found. Make sure the file mask\mask.map exists' print 'This file is usually created with the CreateGrid script' sys.exit(1) else: pcr.setclone(clone_map) ds = gdal.Open(clone_map, GA_ReadOnly) clone_trans = ds.GetGeoTransform() cellsize = clone_trans[1] clone_rows = ds.RasterYSize clone_columns = ds.RasterXSize extent_mask = [ clone_trans[0], clone_trans[3] - ds.RasterYSize * cellsize, clone_trans[0] + ds.RasterXSize * cellsize, clone_trans[3] ] xmin, ymin, xmax, ymax = map(str, extent_mask) ds = None ones = pcr.scalar(pcr.readmap(clone_map)) zeros = ones * 0 empty = pcr.ifthen(ones == 0, pcr.scalar(0)) ''' read projection from mask.shp ''' # TODO: check how to deal with projections (add .prj to mask.shp in creategrid) if not os.path.exists(clone_prj): print 'please add prj-file to mask.shp' sys.exit(1) if os.path.exists(clone_shp): ds = ogr.Open(clone_shp) file_att = os.path.splitext(os.path.basename(clone_shp))[0] lyr = ds.GetLayerByName(file_att) spatialref = lyr.GetSpatialRef() if not spatialref == None: srs_clone = osr.SpatialReference() srs_clone.ImportFromWkt(spatialref.ExportToWkt()) srs_clone.AutoIdentifyEPSG() unit_clone = False unit_clone = srs_clone.GetAttrValue('UNIT').lower() #clone_EPSG = 'EPSG:'+srs_clone.GetAttrValue("AUTHORITY",1) # TODO: fix hard EPSG code below clone_EPSG = 'EPSG:' + '4167' print 'EPSG-code is read from mask.shp: ' + clone_EPSG spatialref == None if not clone_EPSG: print 'EPSG-code cannot be read from mask.shp' print 'please add prj-file to mask.shp or specify on command line' print 'e.g. -p EPSG:4326 (for WGS84 lat lon projection)' ds = None clone_EPSG_int = int(clone_EPSG[5:len(clone_EPSG)]) ''' open config-file ''' config = wt.OpenConf(inifile) ''' read settings ''' snapgaugestoriver = bool( int(wt.configget(config, "settings", "snapgaugestoriver", "1"))) burnalltouching = bool( int(wt.configget(config, "settings", "burncatchalltouching", "1"))) burninorder = bool( int(wt.configget(config, "settings", "burncatchalltouching", "0"))) verticetollerance = float( wt.configget(config, "settings", "vertice_tollerance", "0.0001")) ''' read parameters ''' burn_outlets = int( wt.configget(config, "parameters", "burn_outlets", 10000)) burn_rivers = int(wt.configget(config, "parameters", "burn_rivers", 200)) burn_connections = int( wt.configget(config, "parameters", "burn_connections", 100)) burn_gauges = int(wt.configget(config, "parameters", "burn_gauges", 100)) minorder = int(wt.configget(config, "parameters", "riverorder_min", 3)) exec "percentile=tr.array(" + wt.configget(config, "parameters", "statisticmaps", [0, 100]) + ")" if not unit_clone: print 'failed to read unit (meter or degree) from mask projection' unit_clone = str(wt.configget(config, "settings", "unit", 'meter')) print 'unit read from settings: ' + unit_clone if unit_clone == 'degree': cellsize_hr = float( wt.configget(config, "parameters", "highres_degree", 0.0005)) elif (unit_clone == 'metre') or (unit_clone == 'meter'): cellsize_hr = float( wt.configget(config, "parameters", "highres_metre", 50)) cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2) rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2) hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0, -cellsize_hr) ''' read staticmap locations ''' catchment_map = wt.configget(config, "staticmaps", "catchment", "wflow_catchment.map") dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map") demmax_map = wt.configget(config, "staticmaps", "demmax", "wflow_demmax.map") demmin_map = wt.configget(config, "staticmaps", "demmin", "wflow_demmin.map") gauges_map = wt.configget(config, "staticmaps", "gauges", "wflow_gauges.map") landuse_map = wt.configget(config, "staticmaps", "landuse", "wflow_landuse.map") ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map") river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map") outlet_map = wt.configget(config, "staticmaps", "outlet", "wflow_outlet.map") riverlength_fact_map = wt.configget(config, "staticmaps", "riverlength_fact", "wflow_riverlength_fact.map") soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map") streamorder_map = wt.configget(config, "staticmaps", "streamorder", "wflow_streamorder.map") subcatch_map = wt.configget(config, "staticmaps", "subcatch", "wflow_subcatch.map") ''' read mask location (optional) ''' masklayer = wt.configget(config, "mask", "masklayer", catchshp) ''' create directories ''' if os.path.isdir(workdir): shutil.rmtree(workdir) os.makedirs(workdir) if os.path.isdir(resultdir): shutil.rmtree(resultdir) os.makedirs(resultdir) ''' Preperation steps ''' zero_map = workdir + "zero.map" zero_tif = workdir + "zero.tif" pcr.report(zeros, zero_map) # TODO: replace gdal_translate call call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot', 'Float32', zero_map, zero_tif)) pcr.setglobaloption("lddin") ''' resample DEM ''' dem_resample = workdir + "dem_resampled.tif" ds = gdal.Open(dem_in, GA_ReadOnly) band = ds.GetRasterBand(1) nodata = band.GetNoDataValue() proj = ds.GetGeoTransform() cellsize_dem = proj[1] ''' read DEM projection ''' spatialref == None spatialref = ds.GetProjection() if not spatialref == None: srs = osr.SpatialReference() srs.ImportFromWkt(spatialref) srs.AutoIdentifyEPSG() dem_EPSG = 'EPSG:' + srs.GetAttrValue("AUTHORITY", 1) print 'EPSG-code is read from ' + os.path.basename( dem_in) + ': ' + dem_EPSG spatialref == None dem_EPSG_int = int(dem_EPSG[5:len(dem_EPSG)]) srs_DEM = osr.SpatialReference() srs_DEM.ImportFromEPSG(dem_EPSG_int) clone2dem_transform = osr.CoordinateTransformation(srs_clone, srs_DEM) else: dem_EPSG = clone_EPSG print 'No projection defined for ' + os.path.basename(dem_in) print 'Assumed to be the same as model projection (' + clone_EPSG + ')' ds = None print 'Resampling DEM...' if nodata == None: call(('gdalwarp', '-overwrite', '-t_srs', clone_prj, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-dstnodata', str(-9999), '-r', 'cubic', dem_in, dem_resample)) else: call(('gdalwarp', '-overwrite', '-t_srs', clone_prj, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-srcnodata', str(nodata), '-dstnodata', str(nodata), '-r', 'cubic', dem_in, dem_resample)) ''' create dem.map and statistic maps ''' dem_resample_map = resultdir + dem_map call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot', 'Float32', dem_resample, dem_resample_map)) print 'Computing DEM statistics ....' stats = wt.windowstats(dem_in, clone_rows, clone_columns, clone_trans, srs_clone, resultdir, percentile) ''' burn DEM ''' ds = ogr.Open(rivshp) file_att = os.path.splitext(os.path.basename(rivshp))[0] lyr = ds.GetLayerByName(file_att) spatialref = lyr.GetSpatialRef() # if not spatialref == None: # srs = osr.SpatialReference() # srs.ImportFromWkt(spatialref.ExportToWkt()) # srs.AutoIdentifyEPSG() # rivshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) # spatialref == None # else: rivshp_EPSG = clone_EPSG print 'No projection defined for ' + file_att + '.shp' print 'Assumed to be the same as model projection (' + clone_EPSG + ')' # strip rivers to nodes xminc = str(float(xmin) + 0.5 * cellsize) yminc = str(float(ymin) + 0.5 * cellsize) xmaxc = str(float(xmax) - 0.5 * cellsize) ymaxc = str(float(ymax) - 0.5 * cellsize) if rivshp_EPSG == clone_EPSG: rivclipshp = workdir + 'rivshape_clip.shp' call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat', xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc, rivclipshp, rivshp)) else: rivprojshp = workdir + 'rivshape_proj.shp' rivclipshp = workdir + 'rivshape_clip.shp' call(('ogr2ogr', '-s_srs', rivshp_EPSG, '-t_srs', clone_EPSG, '-spat', xmin, ymin, xmax, ymax, rivprojshp, rivshp)) call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat', xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc, rivclipshp, rivprojshp)) rivshp = rivclipshp #### BURNING BELOW #### # TODO: check if extraction can be done within memory and retun a burn layer shapes = wt.Reach2Nodes(rivclipshp, clone_EPSG_int, cellsize * verticetollerance, workdir) outlets = shapes[1] connections = shapes[2] outlets_att = os.path.splitext(os.path.basename(outlets))[0] connections_att = os.path.splitext(os.path.basename(connections))[0] dem_resample_att = os.path.splitext(os.path.basename(dem_resample))[0] connections_tif = workdir + connections_att + ".tif" outlets_tif = workdir + outlets_att + ".tif" # TODO: make the burning in memory call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot', 'Float32', zero_map, connections_tif)) call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot', 'Float32', zero_map, outlets_tif)) call(('gdal_rasterize', '-burn', '1', '-l', outlets_att, outlets, outlets_tif)) call(('gdal_rasterize', '-burn', '1', '-l', connections_att, connections, connections_tif)) # convert rivers to order rivshp_att = os.path.splitext(os.path.basename(rivshp))[0] rivers_tif = workdir + rivshp_att + ".tif" call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot', 'Float32', zero_map, rivers_tif)) if burninorder: # make river shape with an order attribute OrderSHPs = wt.ReachOrder(rivshp, clone_EPSG_int, cellsize * verticetollerance, workdir) wt.Burn2Tif(OrderSHPs, 'order', rivers_tif) else: call(('gdal_rasterize', '-burn', '1', '-l', rivshp_att, rivshp, rivers_tif)) # convert 2 maps connections_map = workdir + connections_att + ".map" rivers_map = workdir + rivshp_att + ".map" outlets_map = workdir + outlets_att + ".map" call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot', 'Float32', connections_tif, connections_map)) call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot', 'Float32', rivers_tif, rivers_map)) call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot', 'Float32', outlets_tif, outlets_map)) # burn the layers in DEM outletsburn = pcr.scalar( pcr.readmap(outlets_map)) * pcr.scalar(burn_outlets) connectionsburn = pcr.scalar( pcr.readmap(connections_map)) * pcr.scalar(burn_connections) riverburn = pcr.scalar(pcr.readmap(rivers_map)) * pcr.scalar(burn_rivers) ldddem = pcr.cover(dem_resample_map, pcr.ifthen(riverburn > 0, pcr.scalar(0))) ldddem = ldddem - outletsburn - connectionsburn - riverburn ldddem = pcr.cover(ldddem, pcr.scalar(0)) pcr.report(ldddem, workdir + "dem_burn.map") ''' create ldd for multi-catchments ''' ldd = pcr.ldd(empty) # reproject catchment shape-file ds = ogr.Open(catchshp) file_att = os.path.splitext(os.path.basename(catchshp))[0] lyr = ds.GetLayerByName(file_att) spatialref = lyr.GetSpatialRef() # if not spatialref == None: # srs = osr.SpatialReference() # srs.ImportFromWkt(spatialref.ExportToWkt()) # srs.AutoIdentifyEPSG() # catchshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) # spatialref == None # else: catchshp_EPSG = clone_EPSG print 'No projection defined for ' + file_att + '.shp' print 'Assumed to be the same as model projection (' + clone_EPSG + ')' if not rivshp_EPSG == clone_EPSG: catchprojshp = workdir + 'catchshape_proj.shp' call(('ogr2ogr', '-s_srs', catchshp_EPSG, '-t_srs', clone_ESPG, catchprojshp, catchshp)) catchshp = catchprojshp ds.Destroy() ds = ogr.Open(catchshp) file_att = os.path.splitext(os.path.basename(catchshp))[0] lyr = ds.GetLayerByName(file_att) fieldDef = ogr.FieldDefn("ID", ogr.OFTString) fieldDef.SetWidth(12) TEMP_out = Driver.CreateDataSource(workdir + "temp.shp") if not srs == None: TEMP_LYR = TEMP_out.CreateLayer("temp", srs, geom_type=ogr.wkbMultiPolygon) else: TEMP_LYR = TEMP_out.CreateLayer("temp", geom_type=ogr.wkbMultiPolygon) TEMP_LYR.CreateField(fieldDef) for i in range(lyr.GetFeatureCount()): orgfeature = lyr.GetFeature(i) geometry = orgfeature.geometry() feature = ogr.Feature(TEMP_LYR.GetLayerDefn()) feature.SetGeometry(geometry) feature.SetField("ID", str(i + 1)) TEMP_LYR.CreateFeature(feature) TEMP_out.Destroy() ds.Destroy # rasterize catchment map catchments_tif = workdir + "catchments.tif" catchments_map = workdir + "catchments.map" call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, zero_map, catchments_tif)) if alltouching: call(('gdal_rasterize', '-at', '-a', 'ID', '-l', "temp", workdir + 'temp.shp', catchments_tif)) else: call(('gdal_rasterize', '-a', 'ID', '-l', "temp", workdir + 'temp.shp', catchments_tif)) call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, catchments_tif, catchments_map)) catchments = pcr.readmap(catchments_map) riverunique = pcr.clump(pcr.nominal(pcr.ifthen(riverburn > 0, riverburn))) rivercatch = pcr.areamajority(pcr.ordinal(catchments), riverunique) #catchments = pcr.cover(pcr.ordinal(rivercatch),pcr.ordinal(pcr.ifthen(catchments > 0, catchments)),pcr.ordinal(0)) catchments = pcr.cover( pcr.ifthen(catchments > 0, pcr.ordinal(catchments)), pcr.ifthen( riverburn > 0, pcr.ordinal( pcr.spreadzone(pcr.nominal(catchments), pcr.ifthen(riverburn > 0, pcr.scalar(1)), 1)))) rivercatch_map = workdir + "catchments_river.map" catchclip_map = workdir + "catchments_clip.map" pcr.report(rivercatch, rivercatch_map) pcr.report(catchments, catchclip_map) ds = ogr.Open(workdir + "temp.shp") lyr = ds.GetLayerByName("temp") print 'calculating ldd' for i in range(lyr.GetFeatureCount()): feature = lyr.GetFeature(i) catch = int(feature.GetField("ID")) print "calculating ldd for catchment: " + str(i + 1) + "/" + str( lyr.GetFeatureCount()) + "...." ldddem_select = pcr.scalar(pcr.ifthen(catchments == catch, catchments)) * 0 + 1 * ldddem ldd_select = pcr.lddcreate(ldddem_select, float("1E35"), float("1E35"), float("1E35"), float("1E35")) ldd = pcr.cover(ldd, ldd_select) pcr.report(ldd, resultdir + ldd_map) ds.Destroy() ''' report stream order, river and dem ''' streamorder = pcr.ordinal(pcr.streamorder(ldd)) river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1)) mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(dem_resample_map), 9999999))) dem_resample_map = pcr.cover(dem_resample_map, pcr.scalar(river) * 0 + mindem) pcr.report(dem_resample_map, resultdir + dem_map) pcr.report(streamorder, resultdir + streamorder_map) pcr.report(river, resultdir + river_map) ''' deal with your catchments ''' if gaugeshp == None: print 'No gauges defined, using outlets instead' gauges = pcr.ordinal( pcr.uniqueid( pcr.boolean(pcr.ifthen(pcr.scalar(ldd) == 5, pcr.boolean(1))))) pcr.report(gauges, resultdir + gauges_map) # ds = ogr.Open(gaugeshp) # file_att = os.path.splitext(os.path.basename(gaugeshp))[0] # lyr = ds.GetLayerByName(file_att) # spatialref = lyr.GetSpatialRef() ## if not spatialref == None: ## srs = osr.SpatialReference() ## srs.ImportFromWkt(spatialref.ExportToWkt()) ## srs.AutoIdentifyEPSG() ## gaugeshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) ## spatialref == None # #else: # gaugeshp_EPSG = clone_EPSG # print 'No projection defined for ' + file_att + '.shp' # print 'Assumed to be the same as model projection (' + clone_EPSG + ')' # # # reproject gauge shape if necesarry # if not gaugeshp_EPSG == clone_EPSG: # gaugeprojshp = workdir + 'gaugeshape_proj.shp' # call(('ogr2ogr','-s_srs',rivshp_EPSG,'-t_srs',clone_ESPG,gaugeprojshp,gaugeshp)) # gaugeshp = gaugeprojshp # # file_att = os.path.splitext(os.path.basename(gaugeshp))[0] # gaugestif = workdir + file_att + '.tif' # gaugesmap = workdir + file_att + '.map' # call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,zero_map,gaugestif)) # call(('gdal_rasterize','-burn','1','-l',file_att,gaugeshp,gaugestif)) # call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,gaugestif,gaugesmap)) # gaugelocs = pcr.readmap(gaugesmap) # snapgaugestoriver = True # # if snapgaugestoriver: # print "Snapping gauges to river" # gauges = pcr.uniqueid(pcr.boolean(gaugelocs)) # gauges= wt.snaptomap(pcr.ordinal(gauges),river) # # gaugesmap = pcr.ifthen(gauges > 0, gauges) ''' report riverlengthfrac ''' riv_hr = workdir + 'river_highres.tif' wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs_clone, 0) file_att = os.path.splitext(os.path.basename(rivshp))[0] call(('gdal_rasterize', '-burn', '1', '-l', file_att, rivshp, riv_hr)) print 'Computing river length...' #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform) riverlength = wt.windowstats(riv_hr, clone_rows, clone_columns, clone_trans, srs_clone, resultdir, 'frac') ''' report outlet map ''' pcr.report(pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.ordinal(1)), resultdir + outlet_map) ''' report map ''' catchment = pcr.ifthen(catchments > 0, pcr.ordinal(1)) pcr.report(catchment, resultdir + catchment_map) ''' report subcatchment map ''' subcatchment = pcr.subcatchment(ldd, gauges) pcr.report(pcr.ordinal(subcatchment), resultdir + subcatch_map) ''' report landuse map ''' if landuse == None: pcr.report(pcr.nominal(ones), resultdir + landuse_map) else: landuse_resample = workdir + 'landuse.tif' landuse_map = resultdir + landuse_map transform = wt.GetRasterTranform(landuse, srs_clone) if not transform[0]: call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-r', 'mode', landuse, landuse_resample)) else: call(('gdalwarp', '-overwrite', '-s_srs', transform[1], '-t_srs', clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-r', 'mode', landuse, landuse_resample)) call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32', landuse_resample, landuse_map)) landuse_work = pcr.readmap(landuse_map) pcr.report(pcr.nominal(landuse_work), landuse_map) ''' report soil map ''' if soiltype == None: pcr.report(pcr.nominal(ones), resultdir + soil_map) else: soiltype_resample = workdir + 'soiltype.tif' soil_map = resultdir + soil_map #transform = wt.GetRasterTranform(soiltype,srs_clone) # if not transform[0]: call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-r', 'mode', soiltype, soiltype_resample)) # else: # call(('gdalwarp','-overwrite','-s_srs',transform[1],'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',soiltype, soiltype_resample)) call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32', soiltype_resample, soil_map)) soiltype_work = pcr.readmap(soil_map) pcr.report(pcr.nominal(soiltype_work), soil_map) if clean: wt.DeleteList(glob.glob(os.getcwd() + '\\' + resultdir + '/*.xml'))
def _parseLine(self, line, lineNumber, nrColumns, externalNames, keyDict): line = re.sub("\n", "", line) line = re.sub("\t", " ", line) result = None # read until first comment content = "" content, sep, comment = line.partition("#") if len(content) > 1: collectionVariableName, sep, tail = content.partition(" ") if collectionVariableName == self._varName: tail = tail.strip() key, sep, variableValue = tail.rpartition(" ") if len(key.split()) != nrColumns: tmp = re.sub("\(|\)|,", "", str(key)) msg = "Error reading %s line %d, order of columns given (%s columns) does not match expected order of %s columns" % ( self._fileName, lineNumber, len(key.split()) + 2, int(nrColumns) + 2) raise ValueError(msg) variableValue = re.sub('\"', "", variableValue) tmp = None try: tmp = int(variableValue) if self._dataType == pcraster.Boolean: tmp = pcraster.boolean(tmp) elif self._dataType == pcraster.Nominal: tmp = pcraster.nominal(tmp) elif self._dataType == pcraster.Ordinal: tmp = pcraster.ordinal(tmp) elif self._dataType == pcraster.Ldd: tmp = pcraster.ldd(tmp) else: msg = "Conversion to %s failed" % (self._dataType) raise Exception(msg) except ValueError as e: try: tmp = float(variableValue) if self._dataType == pcraster.Scalar: tmp = pcraster.scalar(tmp) elif self._dataType == pcraster.Directional: tmp = pcraster.directional(tmp) else: msg = "Conversion to %s failed" % (self._dataType) raise Exception(msg) except ValueError as e: variableValue = re.sub("\\\\", "/", variableValue) variableValue = variableValue.strip() path = os.path.normpath(variableValue) try: tmp = pcraster.readmap(path) except RuntimeError as e: msg = "Error reading %s line %d, %s" % ( self._fileName, lineNumber, e) raise ValueError(msg) # test if key is an external name transformedKeys = [] counter = 0 for k in key.split(): k = k.strip() if externalNames[counter].get(k): transformedKeys.append(externalNames[counter].get(k)) else: transformedKeys.append(k) counter += 1 key = tuple(transformedKeys) if not key in keyDict: tmp = re.sub("\(|\)|,", "", str(key)) msg = "Error reading %s line %d, %s unknown collection index" % ( self._fileName, lineNumber, tmp) raise ValueError(msg) if not keyDict[key] is None: tmp = re.sub("\(|\)|,", "", str(key)) msg = "Error reading %s line %d, %s %s already initialised" % ( self._fileName, lineNumber, self._varName, tmp) raise ValueError(msg) keyDict[key] = tmp
def subcatch_stream( ldd, threshold, min_strahler=-999, max_strahler=999, assign_edge=False, assign_existing=False, up_area=None, ): """ (From Deltares Hydrotools) Derive catchments based upon strahler threshold Input: ldd -- pcraster object direction, local drain directions threshold -- integer, strahler threshold, subcatchments ge threshold are derived min_strahler -- integer, minimum strahler threshold of river catchments to return max_strahler -- integer, maximum strahler threshold of river catchments to return assign_unique=False -- if set to True, unassigned connected areas at the edges of the domain are assigned a unique id as well. If set to False, edges are not assigned assign_existing=False == if set to True, unassigned edges are assigned to existing basins with an upstream weighting. If set to False, edges are assigned to unique IDs, or not assigned output: stream_ge -- pcraster object, streams of strahler order ge threshold subcatch -- pcraster object, subcatchments of strahler order ge threshold """ # derive stream order stream = pcr.streamorder(ldd) stream_ge = pcr.ifthen(stream >= threshold, stream) stream_up_sum = pcr.ordinal(pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0))) # detect any transfer of strahler order, to a higher strahler order. transition_strahler = pcr.ifthenelse( pcr.downstream(ldd, stream_ge) != stream_ge, pcr.boolean(1), pcr.ifthenelse( pcr.nominal(ldd) == 5, pcr.boolean(1), pcr.ifthenelse( pcr.downstream(ldd, pcr.scalar(stream_up_sum)) > pcr.scalar(stream_ge), pcr.boolean(1), pcr.boolean(0), ), ), ) # make unique ids (write to file) transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler)) # derive upstream catchment areas (write to file) subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique)) if assign_edge: # fill unclassified areas (in pcraster equal to zero) with a unique id, above the maximum id assigned so far unique_edge = pcr.clump(pcr.ifthen(subcatch == 0, pcr.ordinal(0))) subcatch = pcr.ifthenelse( subcatch == 0, pcr.nominal(pcr.mapmaximum(pcr.scalar(subcatch)) + pcr.scalar(unique_edge)), pcr.nominal(subcatch), ) elif assign_existing: # unaccounted areas are added to largest nearest draining basin if up_area is None: up_area = pcr.ifthen( pcr.boolean(pcr.cover(stream_ge, 0)), pcr.accuflux(ldd, 1) ) riverid = pcr.ifthen(pcr.boolean(pcr.cover(stream_ge, 0)), subcatch) friction = 1.0 / pcr.scalar( pcr.spreadzone(pcr.cover(pcr.ordinal(up_area), 0), 0, 0) ) # *(pcr.scalar(ldd)*0+1) delta = pcr.ifthen( pcr.scalar(ldd) >= 0, pcr.ifthen( pcr.cover(subcatch, 0) == 0, pcr.spreadzone(pcr.cover(riverid, 0), 0, friction), ), ) subcatch = pcr.ifthenelse(pcr.boolean(pcr.cover(subcatch, 0)), subcatch, delta) # finally, only keep basins with minimum and maximum river order flowing through them strahler_subcatch = pcr.areamaximum(stream, subcatch) subcatch = pcr.ifthen( pcr.ordinal(strahler_subcatch) >= min_strahler, pcr.ifthen(pcr.ordinal(strahler_subcatch) <= max_strahler, subcatch), ) return stream_ge, pcr.ordinal(subcatch)
call(('gdal_translate','-of','PCRaster','-a_srs',EPSG,'-ot','Float32',line_tif,line_map)) lines = pcr.scalar(pcr.readmap(line_map)) burn = burn - (pcr.scalar(lines) * pcr.scalar(burnvalue)) #pcr.report(burn,'burn2.map') #burn points if not pointshp == None: file_att = os.path.splitext(os.path.basename(pointshp))[0] point_tif = workdir + 'point.tif' point_map = workdir + 'point.map' call(('gdal_translate','-of','GTiff','-a_srs',EPSG,'-ot','Float32',clone_map,point_tif)) call(('gdal_rasterize','-burn','1','-l',file_att,pointshp,point_tif)) call(('gdal_translate','-of','PCRaster','-a_srs',EPSG,'-ot','Float32',point_tif,point_map)) points = pcr.scalar(pcr.readmap(point_map)) if snapgaugestoriver: print "Snapping points to line" points= wt.snaptomap(pcr.ordinal(points),pcr.boolean(lines)) points= pcr.cover(pcr.scalar(points),pcr.scalar(0)) points = pcr.cover(points, pcr.scalar(0)) #pcr.report(points,'points.map') burn = burn - (points * pcr.scalar(burnvalue)*2) #pcr.report(burn,'burn3.map') ''' create ldd ''' pcr.setglobaloption("lddout") if lddin: pcr.setglobaloption("lddin") ldd_map = workdir + 'ldd.map' streamorder_map = workdir + 'streamorder.map' river_map = workdir + 'river.map' catchments_map = workdir + 'catchments.map' catchments_tif = workdir + 'catchments.tif'
def _parseLine(self, line, lineNumber, nrColumns, externalNames, keyDict): line = re.sub("\n","",line) line = re.sub("\t"," ",line) result = None # read until first comment content = "" content,sep,comment = line.partition("#") if len(content) > 1: collectionVariableName, sep, tail = content.partition(" ") if collectionVariableName == self._varName: tail = tail.strip() key, sep, variableValue = tail.rpartition(" ") if len(key.split()) != nrColumns: tmp = re.sub("\(|\)|,","",str(key)) msg = "Error reading %s line %d, order of columns given (%s columns) does not match expected order of %s columns" %(self._fileName, lineNumber, len(key.split()) + 2, int(nrColumns) + 2) raise ValueError(msg) variableValue = re.sub('\"', "", variableValue) tmp = None try: tmp = int(variableValue) if self._dataType == pcraster.Boolean: tmp = pcraster.boolean(tmp) elif self._dataType == pcraster.Nominal: tmp = pcraster.nominal(tmp) elif self._dataType == pcraster.Ordinal: tmp = pcraster.ordinal(tmp) elif self._dataType == pcraster.Ldd: tmp = pcraster.ldd(tmp) else: msg = "Conversion to %s failed" % (self._dataType) raise Exception(msg) except ValueError as e: try: tmp = float(variableValue) if self._dataType == pcraster.Scalar: tmp = pcraster.scalar(tmp) elif self._dataType == pcraster.Directional: tmp = pcraster.directional(tmp) else: msg = "Conversion to %s failed" % (self._dataType) raise Exception(msg) except ValueError as e: variableValue = re.sub("\\\\","/",variableValue) variableValue = variableValue.strip() path = os.path.normpath(variableValue) try: tmp = pcraster.readmap(path) except RuntimeError as e: msg = "Error reading %s line %d, %s" %(self._fileName, lineNumber, e) raise ValueError(msg) # test if key is an external name transformedKeys = [] counter = 0 for k in key.split(): k = k.strip() if externalNames[counter].get(k): transformedKeys.append(externalNames[counter].get(k)) else: transformedKeys.append(k) counter += 1 key = tuple(transformedKeys) if not key in keyDict: tmp = re.sub("\(|\)|,","",str(key)) msg = "Error reading %s line %d, %s unknown collection index" %(self._fileName, lineNumber, tmp) raise ValueError(msg) if not keyDict[key] is None: tmp = re.sub("\(|\)|,","",str(key)) msg = "Error reading %s line %d, %s %s already initialised" %(self._fileName, lineNumber, self._varName, tmp) raise ValueError(msg) keyDict[key] = tmp
# rasterize catchment map catchments_tif = workdir + "catchments.tif" catchments_map = workdir + "catchments.map" call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, zero_map, catchments_tif)) if alltouching: call(('gdal_rasterize', '-at', '-a', 'ID', '-l', "temp", workdir + 'temp.shp', catchments_tif)) else: call(('gdal_rasterize', '-a', 'ID', '-l', "temp", workdir + 'temp.shp', catchments_tif)) call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, catchments_tif, catchments_map)) catchments = pcr.readmap(catchments_map) riverunique = pcr.clump(pcr.nominal(pcr.ifthen(riverburn > 0, riverburn))) rivercatch = pcr.areamajority(pcr.ordinal(catchments), riverunique) #catchments = pcr.cover(pcr.ordinal(rivercatch),pcr.ordinal(pcr.ifthen(catchments > 0, catchments)),pcr.ordinal(0)) catchments = pcr.cover( pcr.ifthen(catchments > 0, pcr.ordinal(catchments)), pcr.ifthen( riverburn > 0, pcr.ordinal( pcr.spreadzone(pcr.nominal(catchments), pcr.ifthen(riverburn > 0, pcr.scalar(1)), 1)))) rivercatch_map = workdir + "catchments_river.map" catchclip_map = workdir + "catchments_clip.map" pcr.report(rivercatch, rivercatch_map) pcr.report(catchments, catchclip_map) ds = ogr.Open(workdir + "temp.shp") lyr = ds.GetLayerByName("temp")