def execute_BankfullAtCS(r_dem, r_flowdir, str_frompoints, r_cs, p_detect, thresholdz, increment, str_bkf, messages, language = "FR"): # Chargement des fichiers dem = RasterIO(r_dem) flowdir = RasterIO(r_flowdir) cs = RasterIO(r_cs) try: dem.checkMatch(flowdir) dem.checkMatch(cs) except Exception as e: messages.addErrorMessage(str(e)) Result = RasterIO(r_dem, str_bkf, float,-255) # Décompte du nombre de points de départ pour configurer de la barre de progression frompointcursor = arcpy.da.SearchCursor(str_frompoints, "OID@") count = 0 for frompoint in frompointcursor: count += 1 progtext= "Calcul des élévations plein-bord aux sections transversales" if language == "EN": progtext = "Computing elevation at cross-sections" arcpy.SetProgressor("step", progtext, 0, count, 1) progres = 0 # Traitement effectué pour chaque point de départ frompointcursor = arcpy.da.SearchCursor(str_frompoints, "SHAPE@") for frompoint in frompointcursor: # Mise à jour de la barre de progression arcpy.SetProgressorPosition(progres) progres += 1 # On prend l'objet géométrique (le point) associé à la ligne dans la table frompointshape = frompoint[0].firstPoint # Conversion des coordonnées currentcol = flowdir.XtoCol(frompointshape.X) currentrow = flowdir.YtoRow(frompointshape.Y) intheraster = True # Tests de sécurité pour s'assurer que le point de départ est à l'intérieurs des rasters if currentcol<0 or currentcol>=flowdir.raster.width or currentrow<0 or currentrow>= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False # Traitement effectué sur chaque cellule le long de l'écoulement while (intheraster): # Traitement effectué sur chaque section transversale if cs.getValue(currentrow,currentcol) != cs.nodata: zmin = dem.getValue(currentrow,currentcol) angle = cs.getValue(currentrow,currentcol) MaxHydroH = 0 HydroH = 0 stophydroh = False previouslisth = [] previouslisth.append(zmin) previousstep = 0 prevz = zmin previouslisth2 = [] previouslisth2.append(zmin) previousstep2 = 0 prevz2 = zmin n = 4 # itérations, par incrément de 10 cm while n < (thresholdz/increment) and not stophydroh : n += 1 # currentz : élévation plein-bord testée currentz = zmin + increment*n listh = list(previouslisth) step = previousstep stop = False # Déplacement le long de la section transversale while not stop: step += 1 # calcul de la ligne et de la colonne du prochain point dans l'axe de la section transversale if (angle > math.pi / 4 and angle < 3 * math.pi / 4): rowinc = -step colinc = int(math.cos(angle)*step) else: colinc = step rowinc = -int(math.sin(angle)*step) if angle > 3 * math.pi / 4: rowinc = -rowinc # localz : élévation du sol pour le point testé le long de la section transversale localz = dem.getValue(currentrow + rowinc, currentcol + colinc) # on arrête d'avancer le long de la section transversale quand l'élévation devient plus grande que l'élévation plein-bord testée (ou si on sort du raster) if localz != dem.nodata: if localz < prevz: localz = prevz if localz >= currentz: stop = True prevz = localz else: stop = True listh.append(localz) if(len(listh)>2): previousstep = step -1 previouslisth = listh[:len(listh)-1] sumh = 0 # Pour toutes les élévations sauf la première for i in range(1,len(listh)): if i<len(listh)-1: # Toutes les élévations sauf la dernière. # Aire = panneau trapezoidale sumh+= (currentz-listh[i-1]+currentz-listh[i])/2 elif listh[i]!=dem.nodata: # Dernière élévation : plus haute que currentz. # Aire = formule pour le petit triangle sumh += (currentz-listh[i-1])*(currentz-listh[i-1])/(listh[i]-listh[i-1])/2 step -= 1 step += (currentz - listh[i - 1]) / (listh[i] - listh[i - 1]) else: # Dernière élévation : No Data # Pas d'aire calculée pour ce panneau step-=1 # on répète les opérations précédentes en progressant dans l'autre sens le long de la section transversale listh = list(previouslisth2) step2 = previousstep2 stop = False while not stop: step2 += 1 if (angle > math.pi / 4 and angle < 3 * math.pi / 4): rowinc = -step2 colinc = int(math.cos(angle) * step2) else: colinc = step2 rowinc = -int(math.sin(angle) * step2) if angle > 3 * math.pi / 4: rowinc = -rowinc localz = dem.getValue(currentrow - rowinc, currentcol - colinc) if localz != dem.nodata: if localz < prevz2: localz = prevz2 if localz >= currentz: stop = True prevz2 = localz else: stop = True listh.append(localz) if(len(listh)>2): previousstep2 = step2 -1 previouslisth2 = listh[:len(listh)-1] sumh2 = 0 for i in range(1, len(listh)): if i < len(listh) - 1: sumh2 += (currentz - listh[i - 1] + currentz - listh[i]) / 2 elif listh[i] != dem.nodata: sumh2 += (currentz - listh[i - 1]) * (currentz - listh[i - 1]) / ( listh[i] - listh[i - 1]) / 2 step2 -= 1 step2 += (currentz - listh[i - 1]) / (listh[i] - listh[i - 1]) else: step2-=1 # Calcul de la profondeur moyenne # Note: erreur de +1 au lieu de -1 ds la version EPRI1 HydroH = (sumh + sumh2) / (step + step2) # Le critère pour détecter le niveau plein-bord est-il respecté ? if (HydroH < (1-p_detect/100)*MaxHydroH): # si oui, on prends la dernière élévation plein-bord testé, moins 10 cm bkfh = currentz - increment stophydroh = True MaxHydroH = max(HydroH, MaxHydroH) # stophydroh est vrai si un niveau plein-bord a été détecté if stophydroh: Result.setValue(currentrow,currentcol,bkfh) # On cherche le prochain point à partir du flow direction direction = flowdir.getValue(currentrow, currentcol) if (direction == 1): currentcol = currentcol + 1 if (direction == 2): currentcol = currentcol + 1 currentrow = currentrow + 1 if (direction == 4): currentrow = currentrow + 1 if (direction == 8): currentcol = currentcol - 1 currentrow = currentrow + 1 if (direction == 16): currentcol = currentcol - 1 if (direction == 32): currentcol = currentcol - 1 currentrow = currentrow - 1 if (direction == 64): currentrow = currentrow - 1 if (direction == 128): currentcol = currentcol + 1 currentrow = currentrow - 1 # Tests de sécurité pour s'assurer que l'on ne sorte pas des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if intheraster: if (Result.getValue(currentrow, currentcol) != -255): # Atteinte d'un confluent intheraster = False Result.save() return
def execute_GaussianSmooth(r_flowdir, str_frompoint, r_values, gaussiansigma, nbgaussianpoints, SaveResult, messages, language="FR"): # Chargement des fichiers flowdir = RasterIO(r_flowdir) valuesraster = RasterIO(r_values) try: flowdir.checkMatch(valuesraster) except Exception as e: messages.addErrorMessage(e.message) Result = RasterIO(r_flowdir, SaveResult, float, -255) # Décompte du nombre de points de départ pour configurer de la barre de progression count = 0 frompointcursor = arcpy.da.SearchCursor(str_frompoint, "OID@") for frompoint in frompointcursor: count += 1 progtext = "Lissage par moyenne mobile" if language == "EN": progtext = "Processing" arcpy.SetProgressor("step", progtext, 0, count, 1) progres = 0 # Traitement effectué pour chaque point de départ frompointcursor = arcpy.da.SearchCursor(str_frompoint, ["OID@", "SHAPE@"]) for frompoint in frompointcursor: # Mise à jour de la barre de progression arcpy.SetProgressorPosition(progres) progres += 1 # On prend l'objet géométrique (le point) associé à la ligne dans la table frompointshape = frompoint[1].firstPoint # Conversion des coordonnées currentcol = flowdir.XtoCol(frompointshape.X) currentrow = flowdir.YtoRow(frompointshape.Y) intheraster = True # Tests de sécurité pour s'assurer que le point de départ est à l'intérieurs des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False listflowpath = [] listpointsflowpath = [] listdistance = [] listelevation = [] totaldistance = 0 currentdistance = 0 confluencedist = 0 # Traitement effectué sur chaque cellule le long de l'écoulement while (intheraster): currentpoint = pointflowpath() currentpoint.row = currentrow currentpoint.col = currentcol listpointsflowpath.append(currentpoint) totaldistance = totaldistance + currentdistance listflowpath.append(totaldistance) # On crée une liste des points d'élévation connue le long de l'écoulement, ainsi qu'une liste associée avec leur distance depuis le point de distance if valuesraster.getValue(currentrow, currentcol) != valuesraster.nodata: listdistance.append(totaldistance) listelevation.append( valuesraster.getValue(currentrow, currentcol)) # On cherche le prochain point à partir du flow direction direction = flowdir.getValue(currentrow, currentcol) if (direction == 1): currentcol = currentcol + 1 currentdistance = flowdir.raster.meanCellWidth if (direction == 2): currentcol = currentcol + 1 currentrow = currentrow + 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 4): currentrow = currentrow + 1 currentdistance = flowdir.raster.meanCellHeight if (direction == 8): currentcol = currentcol - 1 currentrow = currentrow + 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 16): currentcol = currentcol - 1 currentdistance = flowdir.raster.meanCellWidth if (direction == 32): currentcol = currentcol - 1 currentrow = currentrow - 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 64): currentrow = currentrow - 1 currentdistance = flowdir.raster.meanCellHeight if (direction == 128): currentcol = currentcol + 1 currentrow = currentrow - 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) # Tests de sécurité pour s'assurer que l'on ne sorte pas des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if intheraster: if (Result.getValue(currentrow, currentcol) != -255): # Atteinte d'un confluent if confluencedist == 0: confluencedist = totaldistance + currentdistance # On continue encore sur la distance d'extension après le confluent if (totaldistance + currentdistance - confluencedist) > nbgaussianpoints / 2: intheraster = False if len(listdistance) <= 1: # Avertissement si il n'y a qu'un seul (ou aucun) point de données if language == "FR": messages.addWarningMessage( "Point source {0}: pas assez de sections transversales". format(frompoint[0])) else: messages.addWarningMessage( "From point {0}: not enough cross-sections".format( frompoint[0])) else: currentpointnumber = 0 # Traitement pour chaque point le long de l'écoulement while (currentpointnumber < len(listflowpath)): currentpoint = listpointsflowpath[currentpointnumber] weights = [] values = [] sumwgt = 0 # On parcourt tous les points d'élévation connue for i in range(len(listdistance)): distlocale = abs(listdistance[i] - listflowpath[currentpointnumber]) # ... pour trouver ceux situés à l'intérieur de la fenêtre if distlocale < (nbgaussianpoints - 1) / 2: # On utilise alors la courbe gaussienne pour trouver la pondération de chaque point (et mettre à jour la somme des poids) pointwgt = 1 / (gaussiansigma * math.sqrt(2 * math.pi)) * math.exp( -0.5 * (distlocale / gaussiansigma)**2) sumwgt += pointwgt # Et on enregistre les valeurs dans des listes weights.append(pointwgt) values.append(listelevation[i]) # Message d'avertissement si la taille de fenêtre est insuffisante if len(weights) == 0: if language == "FR": messages.addWarningMessage( "Manque trop important de données à interpoler : " + str(frompoint[0]) + " - " + str(listflowpath[currentpointnumber])) else: messages.addWarningMessage( "Not enough data to interpolate : " + str(frompoint[0]) + " - " + str(listflowpath[currentpointnumber])) # On calcul la valeur finale à partir des valeurs et des poids associés finalvalue = 0 for i in range(len(weights)): finalvalue += values[i] * weights[i] / sumwgt Result.setValue(currentpoint.row, currentpoint.col, finalvalue) currentpointnumber = currentpointnumber + 1 Result.save() return
def execute_RiverWith(r_binary, r_flowdir, str_frompoints, r_cs, oriented, str_width, messages, language="FR"): # Chargement des fichiers binary = RasterIO(r_binary) flowdir = RasterIO(r_flowdir) cs = RasterIO(r_cs) try: flowdir.checkMatch(cs) except Exception as e: messages.addErrorMessage(e.message) Result = RasterIO(r_flowdir, str_width, float, -255) # Décompte du nombre de points de départ pour configurer de la barre de progression frompointcursor = arcpy.da.SearchCursor(str_frompoints, "OID@") count = 0 for frompoint in frompointcursor: count += 1 progtext = "Calcul de la largeur aux sections transversales" if language == "EN": progtext = "Processing" arcpy.SetProgressor("step", progtext, 0, count, 1) progres = 0 # Traitement effectué pour chaque point de départ frompointcursor = arcpy.da.SearchCursor(str_frompoints, "SHAPE@") for frompoint in frompointcursor: # Mise à jour de la barre de progression arcpy.SetProgressorPosition(progres) progres += 1 # On prend l'objet géométrique (le point) associé à la ligne dans la table frompointshape = frompoint[0].firstPoint # Conversion des coordonnées currentcol = flowdir.XtoCol(frompointshape.X) currentrow = flowdir.YtoRow(frompointshape.Y) intheraster = True # Tests de sécurité pour s'assurer que le point de départ est à l'intérieurs des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False # Traitement effectué sur chaque cellule le long de l'écoulement while (intheraster): # On se reprojète dans le système de coordonnées du raster binary colbinary = binary.XtoCol(flowdir.ColtoX(currentcol)) rowbinary = binary.YtoRow(flowdir.RowtoY(currentrow)) angle = cs.getValue(currentrow, currentcol) if angle != cs.nodata: if oriented: # calcul de la ligne et de la colonne du prochain point dans l'axe de la section transversale if (angle > math.pi / 4 and angle < 3 * math.pi / 4): rowinc = -1 colinc = math.cos(angle) else: colinc = 1 rowinc = -math.sin(angle) if angle > 3 * math.pi / 4: rowinc = -rowinc step = 0 while binary.getValue( rowbinary + int(rowinc * step), colbinary + int(colinc * step)) != binary.nodata: step += 1 # on répète les opérations précédentes en progressant dans l'autre sens le long de la section transversale step2 = 0 while binary.getValue( rowbinary - int(rowinc * step2), colbinary - int(colinc * step2)) != binary.nodata: step2 += 1 # if the process was not interrupted because an edge was reached if (rowbinary + int(rowinc * step)) >= 0 and ( colbinary + int(colinc * step) ) >= 0 and (rowbinary - int(rowinc * step2)) >= 0 and ( colbinary - int(colinc * step2) ) and (rowbinary + int(rowinc * step) ) < binary.raster.height and (colbinary + int( colinc * step)) < binary.raster.width and ( rowbinary - int(rowinc * step2) ) < binary.raster.height and (colbinary - int( colinc * step2)) < binary.raster.width: basedist = math.sqrt( (rowinc * binary.raster.meanCellHeight)**2 + (colinc * binary.raster.meanCellWidth)**2) width = basedist * (step + step2) Result.setValue(currentrow, currentcol, width) else: # on teste la largeur sur une étoile à 16 branches et on prend le minimum minwidth = None for star_i in range(1, 9): step = 0 step2 = 0 if star_i == 1: rowinc = 0 colinc = 1 if star_i == 2: rowinc = step % 2 colinc = 1 if star_i == 3: rowinc = 1 colinc = 1 if star_i == 4: rowinc = 1 colinc = step % 2 if star_i == 5: rowinc = 1 colinc = 0 if star_i == 6: rowinc = 1 colinc = -step % 2 if star_i == 7: rowinc = 1 colinc = -1 if star_i == 8: rowinc = step % 2 colinc = -1 while binary.getValue( rowbinary + int(rowinc * step), colbinary + int(colinc * step)) != binary.nodata: step += 1 while binary.getValue( rowbinary - int(rowinc * step2), colbinary - int(colinc * step2)) != binary.nodata: step2 += 1 width = None # if the process was not interrupted because an edge was reached if (rowbinary + int(rowinc * step)) >= 0 and ( colbinary + int(colinc * step) ) >= 0 and (rowbinary - int(rowinc * step2)) >= 0 and ( colbinary - int(colinc * step2) ) and (rowbinary + int(rowinc * step)) < binary.raster.height and ( colbinary + int(colinc * step) ) < binary.raster.width and ( rowbinary - int(rowinc * step2) ) < binary.raster.height and (colbinary - int( colinc * step2)) < binary.raster.width: basedist = math.sqrt( (rowinc * binary.raster.meanCellHeight)**2 + (colinc * binary.raster.meanCellWidth)**2) width = basedist * (step + step2) if minwidth is not None: if width is not None: minwidth = min(minwidth, width) else: minwidth = width if minwidth is not None: Result.setValue(currentrow, currentcol, minwidth) # On cherche le prochain point à partir du flow direction direction = flowdir.getValue(currentrow, currentcol) if (direction == 1): currentcol = currentcol + 1 if (direction == 2): currentcol = currentcol + 1 currentrow = currentrow + 1 if (direction == 4): currentrow = currentrow + 1 if (direction == 8): currentcol = currentcol - 1 currentrow = currentrow + 1 if (direction == 16): currentcol = currentcol - 1 if (direction == 32): currentcol = currentcol - 1 currentrow = currentrow - 1 if (direction == 64): currentrow = currentrow - 1 if (direction == 128): currentcol = currentcol + 1 currentrow = currentrow - 1 # Tests de sécurité pour s'assurer que l'on ne sorte pas des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if intheraster: if (Result.getValue(currentrow, currentcol) != -255): # Atteinte d'un confluent intheraster = False Result.save() return
def execute_CreateZone(r_flowdir, str_lakes, r_slope, minslope, str_frompoint, distance, bufferw, str_zonesfolder, messages): str_segments = str_zonesfolder + "\\segments" str_linesegments = str_zonesfolder + "\\line_segments.shp" str_bufferedsegments = str_zonesfolder + "\\buff_segments.shp" save_sourcepoints = str_zonesfolder + "\\sourcepoints.shp" str_r_lakes = str_zonesfolder + "\\r_lakes" flowdir = RasterIO(r_flowdir) if r_slope is not None: slope = RasterIO(r_slope) try: flowdir.checkMatch(slope) except Exception as e: messages.addErrorMessage(e.message) else: slope = None # Conversion des lacs en raster et copie arcpy.env.snapRaster = flowdir.raster arcpy.env.outputCoordinateSystem = flowdir.raster.spatialReference arcpy.env.extent = flowdir.raster arcpy.PolygonToRaster_conversion(str_lakes, arcpy.Describe(str_lakes).OIDFieldName, str_r_lakes, cellsize=flowdir.raster) lakes = RasterIO(arcpy.Raster(str_r_lakes)) arcpy.CopyFeatures_management(str_lakes, str_zonesfolder + "\\lakes.shp") str_lakes = str_zonesfolder + "\\lakes.shp" arcpy.AddGeometryAttributes_management(str_lakes, "EXTENT") ### Début du traitement perrmettant d'identifier les segments (sous forme de raster) ### raster_segments = RasterIO(r_flowdir, str_segments, int, -255) # numérotation des segments segnumber = 0 lakes_bci = {} toclip = {} inputpoints = {} # Pour chaque point de départ frompointcursor = arcpy.da.SearchCursor(str_frompoint, ["SHAPE@", "OID@"]) for frompoint in frompointcursor: # On prend l'objet géométrique (le point) associé à la ligne dans la table frompointshape = frompoint[0].firstPoint # Nouvelle rivière : on change de segment segnumber += 1 # Conversion des coordonnées currentcol = flowdir.XtoCol(frompointshape.X) currentrow = flowdir.YtoRow(frompointshape.Y) # Tests de sécurité pour s'assurer que le point de départ est à l'intérieurs des rasters intheraster = True if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False listpointsflowpath = [] totaldistance = 0 currentdistance = 0 inlake = True # True si la rivière (depuis le point de départ jusqu'au confluent) est composée d'au moins deux segments dividedriver = False # listtomerged contient la liste des segments trop courts, qui doivent être fusionnés avec segment précédent listtomerged = [] # Pour chaque cellule en suivant l'écoulement while (intheraster): waslake = inlake inlake = False lakevalue = lakes.getValue(currentrow, currentcol) inlake = (lakevalue != lakes.nodata) if not (inlake and waslake): # Distance parcourue depuis le début du segment totaldistance = totaldistance + currentdistance slope_criteria = True if slope is not None: slope_criteria = slope.getValue(currentrow, currentcol) > minslope # Test si on arrive à un lac if inlake and not waslake: # Segment trop court si de longueur inférieure à 30% de la distance voullue # Le segment doit être alors fusionné avec le segment précédent (qui existe uniquement si dividedriver = True) # ajout de l'info de clipper ensuite également coordX = flowdir.ColtoX(currentcol) coordY = flowdir.RowtoY(currentrow) fieldidlakes = arcpy.Describe(str_lakes).OIDFieldName shplakes = arcpy.da.SearchCursor(str_lakes, [ "SHAPE@", "EXT_MIN_X", "EXT_MAX_X", "EXT_MIN_Y", "EXT_MAX_Y", fieldidlakes ]) for shplake in shplakes: if shplake[0].contains(arcpy.Point(coordX, coordY)): lakes_bci[segnumber] = shplakes[5] distXmin = abs(coordX - shplake[1]) distXmax = abs(coordX - shplake[2]) distYmin = abs(coordY - shplake[3]) distYmax = abs(coordY - shplake[4]) mini = min(distXmin, distXmax, distYmin, distYmax) if mini == distXmin: toclip[segnumber] = ["Xmax", shplake[1]] messages.addMessage( str(segnumber) + " Xmax " + str(shplake[1])) if mini == distXmax: toclip[segnumber] = ["Xmin", shplake[2]] messages.addMessage( str(segnumber) + " Xmin " + str(shplake[2])) if mini == distYmin: toclip[segnumber] = ["Ymax", shplake[3]] messages.addMessage( str(segnumber) + " Ymax " + str(shplake[3])) if mini == distYmax: toclip[segnumber] = ["Ymin", shplake[4]] messages.addMessage( str(segnumber) + " Ymin " + str(shplake[4])) if totaldistance < 0.3 * distance and dividedriver: if segnumber in toclip: toclip[segnumber - 1] = toclip.pop(segnumber) listtomerged.append(segnumber) totaldistance = 0 segnumber += 1 dividedriver = False elif totaldistance > distance and slope_criteria: # Test si on a parcouru la distance voullue totaldistance = 0 segnumber += 1 dividedriver = True if not inlake: # On conseerve une liste des points traités, avec leur numéro de segment currentpoint = pointflowpath() currentpoint.row = currentrow currentpoint.col = currentcol currentpoint.X = flowdir.ColtoX(currentcol) currentpoint.Y = flowdir.RowtoY(currentrow) currentpoint.distance = totaldistance currentpoint.segnumber = segnumber currentpoint.frompointid = frompoint[1] listpointsflowpath.append(currentpoint) # On cherche le prochain point à partir du flow direction direction = flowdir.getValue(currentrow, currentcol) if (direction == 1): currentcol = currentcol + 1 currentdistance = flowdir.raster.meanCellWidth if (direction == 2): currentcol = currentcol + 1 currentrow = currentrow + 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 4): currentrow = currentrow + 1 currentdistance = flowdir.raster.meanCellHeight if (direction == 8): currentcol = currentcol - 1 currentrow = currentrow + 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 16): currentcol = currentcol - 1 currentdistance = flowdir.raster.meanCellWidth if (direction == 32): currentcol = currentcol - 1 currentrow = currentrow - 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 64): currentrow = currentrow - 1 currentdistance = flowdir.raster.meanCellHeight if (direction == 128): currentcol = currentcol + 1 currentrow = currentrow - 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) # Tests de sécurité pour s'assurer que l'on ne sorte pas des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if intheraster: confluence_seg = raster_segments.getValue( currentrow, currentcol) if (confluence_seg != -255): # Atteinte d'un confluent déjà traité # Si le segment dans lequel on arrive est limité par un lac, le confluent peut l'être aussi # On copie l'item toclip pour limiter l'étendu de la zone # par contre on garde l'élévation à -999 (hfix sera le résultat de la simulation du cours d'eau confluent) if confluence_seg in listtomerged: confluence_seg -= 1 if confluence_seg in toclip: clipitem = list(toclip[confluence_seg]) toclip[segnumber] = clipitem if totaldistance < 0.3 * distance and dividedriver: # Segment trop court si de longueur inférieure à 30% de la distance voullue # Le segment doit être alors fusionné avec le segment précédent (qui existe uniquement si dividedriver = True) listtomerged.append(segnumber) if segnumber in toclip: toclip[segnumber - 1] = toclip.pop(segnumber) intheraster = False # Pour chaque point traité le long de l'écoulement for currentpoint in listpointsflowpath: # Si c'est un point d'un segment trop court, on remplace le numéro du segment par le numéro du segment précédent if currentpoint.segnumber in listtomerged: currentpoint.segnumber -= 1 # On enregistre le point raster_segments.setValue(currentpoint.row, currentpoint.col, currentpoint.segnumber) if currentpoint.segnumber not in inputpoints: newpoint = pointflowpath() newpoint.type = "main" newpoint.frompointid = currentpoint.frompointid # Coordonnées du point source newpoint.X = currentpoint.X newpoint.Y = currentpoint.Y inputpoints[currentpoint.segnumber] = newpoint raster_segments.save() ### Fin du traitement perrmettant d'identifier les segments ### ### Transformation du raster des segments en zones #### # Transformation en polyline tmp_segments = arcpy.env.scratchWorkspace + "\\tmpsegments.shp" arcpy.RasterToPolyline_conversion(str_segments, tmp_segments) arcpy.Dissolve_management(tmp_segments, str_linesegments, "GRID_CODE") arcpy.Delete_management(tmp_segments) # Création de la zone tampon # Harcoded : buffer longitudinal de 1/10 de l'extension latérale (obtenue par Euclidienne Allocation) tmp_segmentsbuf = arcpy.env.scratchWorkspace + "\\tmpsegments_buf.shp" arcpy.Buffer_analysis(str_linesegments, tmp_segmentsbuf, bufferw / 10.) segalloc = arcpy.sa.EucAllocation(raster_segments.raster, bufferw) arcpy.RasterToPolygon_conversion(segalloc, tmp_segments) arcpy.AddField_management(tmp_segments, "GRID_CODE", "LONG") arcpy.CalculateField_management(tmp_segments, "GRID_CODE", "!GRIDCODE!", "PYTHON_9.3") tmp_segments2 = arcpy.env.scratchWorkspace + "\\tmpsegments2.shp" arcpy.Merge_management([tmp_segmentsbuf, tmp_segments], tmp_segments2) arcpy.Dissolve_management(tmp_segments2, str_bufferedsegments, ["GRID_CODE"], multi_part="SINGLE_PART") arcpy.Delete_management(tmp_segments) arcpy.Delete_management(tmp_segments2) arcpy.Delete_management(str_lakes) # Création de la zone pour chaque segment arcpy.CreateFeatureclass_management( str_zonesfolder, "polyzones.shp", "POLYGON", str_bufferedsegments, spatial_reference=flowdir.raster.spatialReference) polyzones = str_zonesfolder + "\\polyzones.shp" arcpy.AddField_management(polyzones, "Lake_ID", "LONG") cursor = arcpy.da.InsertCursor(polyzones, ["GRID_CODE", "SHAPE@", "Lake_ID"]) segmentscursor = arcpy.da.UpdateCursor(str_bufferedsegments, ["GRID_CODE", "SHAPE@"]) for segment in segmentscursor: Xmin = segment[1].extent.XMin Ymin = segment[1].extent.YMin Xmax = segment[1].extent.XMax Ymax = segment[1].extent.YMax if segment[0] in toclip: if toclip[segment[0]][0] == "Xmin": Xmin = max(toclip[segment[0]][1], Xmin) if toclip[segment[0]][0] == "Xmax": Xmax = min(toclip[segment[0]][1], Xmax) if toclip[segment[0]][0] == "Ymin": Ymin = max(toclip[segment[0]][1], Ymin) if toclip[segment[0]][0] == "Ymax": Ymax = min(toclip[segment[0]][1], Ymax) segmentscursor.updateRow(segment) array = arcpy.Array([ arcpy.Point(Xmin, Ymin), arcpy.Point(Xmin, Ymax), arcpy.Point(Xmax, Ymax), arcpy.Point(Xmax, Ymin) ]) polygon = arcpy.Polygon(array) if segment[0] in lakes_bci: lakeid = lakes_bci[segment[0]] else: lakeid = -999 cursor.insertRow([segment[0], polygon, lakeid]) del cursor del segmentscursor arcpy.CreateFeatureclass_management( os.path.dirname(save_sourcepoints), os.path.basename(save_sourcepoints), "POINT", spatial_reference=flowdir.raster.spatialReference) arcpy.AddField_management(save_sourcepoints, "ZoneID", "LONG") arcpy.AddField_management(save_sourcepoints, "fpid", "LONG") pointcursor = arcpy.da.InsertCursor(save_sourcepoints, ["ZoneID", "fpid", "SHAPE@XY"]) for pointkey in inputpoints: pointcursor.insertRow([ pointkey, inputpoints[pointkey].frompointid, (inputpoints[pointkey].X, inputpoints[pointkey].Y) ]) del pointcursor return
def execute_FloodAndChannel(r_dem, r_elevation, r_flowdir, r_slope, str_frompoints, threshold_slope, threshold_growth, maxiter, str_output, messages, language="FR"): # Chargement des fichiers dem = RasterIO(r_dem) elevation = RasterIO(r_elevation) slope = None if r_slope is not None: slope = RasterIO(r_slope) flowdir = RasterIO(r_flowdir) try: dem.checkMatch(flowdir) dem.checkMatch(elevation) if slope is not None: dem.checkMatch(slope) except Exception as e: messages.addErrorMessage(e.message) # Fichier temporaire créé dans le "Scratch folder" randomname = binascii.hexlify(os.urandom(6)).decode() temp_flood = arcpy.env.scratchWorkspace + "\\" + str(randomname) Result = RasterIO(r_dem, temp_flood, float, -255) # Décompte du nombre de points de départ pour configurer de la barre de progression frompointcursor = arcpy.da.SearchCursor(str_frompoints, "SHAPE@") count = 0 for frompoint in frompointcursor: count += 1 progtext = "Traitement" if language == "EN": progtext = "Processing" arcpy.SetProgressor("step", progtext, 0, count, 1) progres = 0 # Traitement effectué pour chaque point de départ frompointcursor = arcpy.da.SearchCursor(str_frompoints, ["SHAPE@", "OID@"]) for frompoint in frompointcursor: # Mise à jour de la barre de progression arcpy.SetProgressorPosition(progres) progres += 1 # On prend l'objet géométrique (le point) associé à la ligne dans la table frompointshape = frompoint[0].firstPoint # Conversion des coordonnées currentcol = flowdir.XtoCol(frompointshape.X) currentrow = flowdir.YtoRow(frompointshape.Y) intheraster = True # Tests de sécurité pour s'assurer que le point de départ est à l'intérieurs des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False listnewcells = [] # Première itération: on ajoute les cellules pour la rivière, le long de l'écoulement while (intheraster): point = pointflowpath() point.row = currentrow point.col = currentcol point.elev = elevation.getValue(currentrow, currentcol) listnewcells.append(point) Result.setValue(currentrow, currentcol, point.elev) # On cherche le prochain point à partir du flow direction direction = flowdir.getValue(currentrow, currentcol) if (direction == 1): currentcol = currentcol + 1 if (direction == 2): currentcol = currentcol + 1 currentrow = currentrow + 1 if (direction == 4): currentrow = currentrow + 1 if (direction == 8): currentcol = currentcol - 1 currentrow = currentrow + 1 if (direction == 16): currentcol = currentcol - 1 if (direction == 32): currentcol = currentcol - 1 currentrow = currentrow - 1 if (direction == 64): currentrow = currentrow - 1 if (direction == 128): currentcol = currentcol + 1 currentrow = currentrow - 1 # Tests de sécurité pour s'assurer que l'on ne sorte pas des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if intheraster: if (Result.getValue(currentrow, currentcol) != Result.nodata): # Atteinte d'un confluent intheraster = False lengthflowpath = len(listnewcells) # Itération réalisée jusqu'à atteinte du seuil de croissance négligeable, avec aggrandissement de la largeur d'une cellule à chaque itération iteration = 0 while ((float(len(listnewcells)) / float(lengthflowpath)) >= threshold_growth) and iteration < maxiter: currentlistnewcells = [] iteration += 1 # Pour chaque cellule que l'on a ajoutée à l'itération précédente... while (len(listnewcells) > 0): currentpoint = listnewcells.pop() # ... on test les cellules voisines for i in range(1, 5): neighbourcol = currentpoint.col neighbourrow = currentpoint.row if i == 1: neighbourcol += 1 elif i == 2: neighbourcol -= 1 elif i == 3: neighbourrow += 1 elif i == 4: neighbourrow -= 1 try: # Si la cellule voisine existe et n'a pas déjà été testée... if (Result.getValue(neighbourrow, neighbourcol) == Result.nodata) and dem.getValue( neighbourrow, neighbourcol) != dem.nodata: testslope = True if slope is not None: testslope = slope.getValue( neighbourrow, neighbourcol) < threshold_slope # ... on teste si la cellule testée est dans le chenal / inondée ... if (dem.getValue(neighbourrow, neighbourcol) < currentpoint.elev) and testslope: #... auquel cas on mets à jour le fichier de résultat et la liste des cellules ajoutées par l'itération en cours Result.setValue(neighbourrow, neighbourcol, currentpoint.elev) point = pointflowpath() point.row = neighbourrow point.col = neighbourcol point.elev = currentpoint.elev currentlistnewcells.append(point) else: # Les cellules testées négativement sont mises à -999 pour éviter de les tester à nouveau Result.setValue(neighbourrow, neighbourcol, -999) except IndexError: # Exception déclenchée et à ignorer lorsque l'on est sur le bord du raster pass listnewcells.extend(currentlistnewcells) # On supprime les -999 du résultat final Result.save() raster_res = arcpy.sa.SetNull(temp_flood, temp_flood, "VALUE = -999") raster_res.save(str_output) arcpy.Delete_management(temp_flood) return
def execute_LinearInterpolation(r_flowdir, str_frompoint, r_values, str_results, messages, language="FR"): # Chargement des fichiers flowdir = RasterIO(r_flowdir) bkfatcs = RasterIO(r_values) try: flowdir.checkMatch(bkfatcs) except Exception as e: messages.addErrorMessage(e.message) Result = RasterIO(r_flowdir, str_results, float, -255) # Décompte du nombre de points de départ pour configurer de la barre de progression count = 0 frompointcursor = arcpy.da.SearchCursor(str_frompoint, "OID@") for frompoint in frompointcursor: count += 1 progtext = "Traitement" if language == "EN": progtext = "Processing" arcpy.SetProgressor("step", progtext, 0, count, 1) progres = 0 # Traitement effectué pour chaque point de départ frompointcursor = arcpy.da.SearchCursor(str_frompoint, ["OID@", "SHAPE@"]) for frompoint in frompointcursor: # Mise à jour de la barre de progression arcpy.SetProgressorPosition(progres) progres += 1 # On prend l'objet géométrique (le point) associé à la ligne dans la table frompointshape = frompoint[1].firstPoint # Conversion des coordonnées currentcol = flowdir.XtoCol(frompointshape.X) currentrow = flowdir.YtoRow(frompointshape.Y) intheraster = True # Tests de sécurité pour s'assurer que le point de départ est à l'intérieurs des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False listpointsflowpath = [] listdistance = [] listelevation = [] totaldistance = 0 currentdistance = 0 confluence = False # Traitement effectué sur chaque cellule le long de l'écoulement while (intheraster): currentpoint = pointflowpath() currentpoint.row = currentrow currentpoint.col = currentcol totaldistance = totaldistance + currentdistance currentpoint.flowlength = totaldistance currentpoint.oncs = False # On crée une liste des points d'élévation connue le long de l'écoulement, ainsi qu'une liste associée avec leur distance depuis le point de distance if bkfatcs.getValue(currentrow, currentcol) != bkfatcs.nodata: if confluence: # point après atteinte d'une confluence. On arrête le traitement intheraster = False listdistance.append(totaldistance) listelevation.append(bkfatcs.getValue(currentrow, currentcol)) currentpoint.oncs = True currentpoint.previouscsid = len(listdistance) - 1 if not confluence: listpointsflowpath.append(currentpoint) # On cherche le prochain point à partir du flow direction direction = flowdir.getValue(currentrow, currentcol) if (direction == 1): currentcol = currentcol + 1 currentdistance = flowdir.raster.meanCellWidth if (direction == 2): currentcol = currentcol + 1 currentrow = currentrow + 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 4): currentrow = currentrow + 1 currentdistance = flowdir.raster.meanCellHeight if (direction == 8): currentcol = currentcol - 1 currentrow = currentrow + 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 16): currentcol = currentcol - 1 currentdistance = flowdir.raster.meanCellWidth if (direction == 32): currentcol = currentcol - 1 currentrow = currentrow - 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 64): currentrow = currentrow - 1 currentdistance = flowdir.raster.meanCellHeight if (direction == 128): currentcol = currentcol + 1 currentrow = currentrow - 1 currentdistance = math.sqrt(flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) # Tests de sécurité pour s'assurer que l'on ne sorte pas des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if intheraster: if (Result.getValue(currentrow, currentcol) != -255): # Atteinte d'un confluent # On continue encore jusqu'au prochain point confluence = True if len(listdistance) <= 1: # Avertissement si il n'y a qu'un seul (ou aucun) point de données if language == "FR": messages.addWarningMessage( "Point source {0}: pas assez de sections transversales". format(frompoint[0])) else: messages.addWarningMessage( "From point {0}: not enough cross-sections".format( frompoint[0])) else: currentpointnumber = 0 # Traitement pour chaque point le long de l'écoulement for currentpoint in listpointsflowpath: try: if currentpoint.previouscsid == -1: # cas particulier des premiers points avant une cs # on prends la valeur de la premier cs finalvalue = listelevation[0] else: if currentpoint.oncs: # cas particulier : points sur une section transversale finalvalue = listelevation[ currentpoint.previouscsid] else: finalvalue = listelevation[currentpoint.previouscsid]* \ (listdistance[currentpoint.previouscsid + 1] - currentpoint.flowlength)/\ (listdistance[currentpoint.previouscsid+1]-listdistance[currentpoint.previouscsid])\ + listelevation[currentpoint.previouscsid+1]* \ (currentpoint.flowlength - listdistance[currentpoint.previouscsid])/\ (listdistance[currentpoint.previouscsid+1]-listdistance[currentpoint.previouscsid]) except IndexError: # IndexError lorsque l'on est en aval de la dernière section transversales finalvalue = listelevation[-1] Result.setValue(currentpoint.row, currentpoint.col, finalvalue) currentpointnumber = currentpointnumber + 1 Result.save() return
def execute_LinearInterpolationWithPriority(r_flowdir, str_frompoint, r_values, str_results, messages, language="FR"): # Chargement des fichiers flowdir = RasterIO(r_flowdir) valuesatcs = RasterIO(r_values) try: flowdir.checkMatch(valuesatcs) except Exception as e: messages.addErrorMessage(e.message) trees = build_trees(flowdir, str_frompoint, tointerpolate=valuesatcs) # Find how to interpolate # Interpolation can not be done in this first run through the trees because the totaldistance must be calculated first for tree in trees: totaldistance = 0 datacs_down = None for segment in tree.treesegments(): for cs in segment.get_profile(): if cs.tointerpolate != valuesatcs.nodata: datacs_down = cs bestcsup = None for datacs_up in tree.points_up_with_data( segment, cs, "tointerpolate", valuesatcs.nodata): if datacs_down is not None: # a downstream point exists: looking for the upstream point with the closest value if bestcsup is None or (abs(datacs_down.tointerpolate - datacs_up.tointerpolate) < abs(datacs_down.tointerpolate - bestcsup.tointerpolate)): bestcsup = datacs_up else: # a downstream point does not exist: looking for the upstream point with the highest value if bestcsup is None or (datacs_up.tointerpolate > bestcsup.tointerpolate): bestcsup = datacs_up cs.datacs_up = bestcsup if (datacs_down is not None) and (datacs_down.datacs_up == cs.datacs_up): # general case cs.datacs_down = datacs_down else: # special case at confluence, the local cs in a narrow steam, must be extrapolated from the upstream data # Or, the cs is downstream the most downstream data point cs.datacs_down = None totaldistance += cs.dist cs.totaldist = totaldistance # Calculating the interpolations and saving the results Result = RasterIO(r_values, str_results, float, -255) for tree in trees: for segment in tree.treesegments(): for cs in segment.get_profile(): if cs.datacs_down is None: # extrapolation from upstream cs.interpolated = cs.datacs_up.tointerpolate else: if cs.datacs_up is None: # extrapolation from downstream cs.interpolated = cs.datacs_down.tointerpolate else: # interpolation cs.interpolated = (cs.totaldist - cs.datacs_down.totaldist) \ * (cs.datacs_up.tointerpolate - cs.datacs_down.tointerpolate) \ / (cs.datacs_up.totaldist - cs.datacs_down.totaldist) \ + cs.datacs_down.tointerpolate Result.setValue(cs.row, cs.col, cs.interpolated) Result.save() return
def execute_BedAssessmentMultiDEM(r_flowdir, str_frompoint, r_width, zwater_dir, manning, result_dir, Q_dir, downstream_s, r_lakes, messages): # Work as BedAssessment with the following modifications: # - width, zwater and Q are folders, with rasters within for each day of LiDAR acquisition (same name for the # rasters of the same day in the different folders) # - width and Q must have values at the watershed scale # - zwater has value only where the DEM is available for a given day # - When the zwater value is found and there no zwater downstream, the normal depth is calculated at the downstream # point, using the (average) slope and the (average) bed elevation of the valid downstream point result(s) # - The results (bed elevation) are also for each day of LiDAR acquisition, so it's a folder to. flowdir = RasterIO(r_flowdir) zwater_dict = {} width = RasterIO(r_width) width_dict = {} lakes = RasterIO(r_lakes) lakes_dict = {} try: lakes.checkMatch(width) except Exception as e: messages.addErrorMessage("Lakes and width files resolution or extent do not match") raise RuntimeError arcpy.env.workspace = zwater_dir rasterlist = arcpy.ListRasters() try: for raster in rasterlist: zwater_raster = RasterIO(arcpy.Raster(raster)) raster_name = zwater_raster.raster.name zwater_dict[raster_name] = zwater_raster width.checkMatch(zwater_raster) # creating the dictionnary for the width and the lakes. Not a good solution to copy all these data width_dict[raster_name] = width lakes_dict[raster_name] = lakes except Exception as e: messages.addErrorMessage("Water surface file "+raster_name+" and width resolution or extent do not match") raise RuntimeError Q_dict = {} arcpy.env.workspace = Q_dir rasterlist = arcpy.ListRasters() try: for raster in rasterlist: q_raster = RasterIO(arcpy.Raster(raster)) raster_name = q_raster.raster.name Q_dict[raster_name] = q_raster width.checkMatch(q_raster) except Exception as e: messages.addErrorMessage("Discharge file " + raster_name + " and width resolution or extent do not match") raise RuntimeError if set(Q_dict.keys()) != set(zwater_dict.keys()): messages.addErrorMessage("List of discharge rasters and list of water surface rasters do not match") raise RuntimeError trees = build_trees(flowdir, str_frompoint, dtype="MULTI", width=width_dict, wslidar=zwater_dict, Q=Q_dict, inlake=lakes_dict) #pickle.dump(trees, open(r"D:\InfoCrue\tmp\savetreebed_bec.pkl", "wb"), protocol=2) #trees = pickle.load(open(r"D:\InfoCrue\tmp\savetreebed_v6.pkl", "rb")) # Ordering the DEMs to be processed # rasters can be added several times, if there are several reaches with gaps between # each raster (or part of a raster) need to be fully processed before passing to the next one. # Let's call each of the data processing a "run". We create a list of runs, defined by a raster and a number # ordering the runs. The run number is given to the csdata. # Meanwhile, let's check if the cs has valid data (i.e. data on at least one water surface elevation raster) runlist = [] current_run_num = 0 for tree in trees: print (tree) for segment, prev_cs, cs in tree.browsepts(): cs.valid_data = False for raster_name, csdata in cs.data_dict.items(): if csdata.Q == Q_dict[raster_name].nodata: messages.addErrorMessage("Error: missing discharges for raster "+raster_name) raise RuntimeError() if csdata.wslidar != zwater_dict[raster_name].nodata and csdata.inlake != 1: cs.valid_data = True if prev_cs is None: current_run_num +=1 runlist.append((raster_name, current_run_num)) csdata.run_num = current_run_num else: if not (prev_cs.data_dict[raster_name].wslidar != zwater_dict[raster_name].nodata and prev_cs.data_dict[raster_name].inlake != 1): current_run_num += 1 runlist.append((raster_name, current_run_num)) csdata.run_num = current_run_num else: csdata.run_num = prev_cs.data_dict[raster_name].run_num else: csdata.run_num = 0 print (runlist) # 1D hydraulic calculations # .wslidar: water surface measured on the LiDAR (do not change) # .z: bed elevation (calculated after each iteration) # .ws: water surface calculated at each iteration for raster_name, run_num in runlist: # iterate through each "run" print (raster_name) print (run_num) dem_reached = False results_dict = {} # first run: calculate downstream border condition for tree in trees: #print tree for segment, prev_cs, cs in tree.browsepts(): csdata = cs.data_dict[raster_name] csdata.n = manning if csdata.run_num == run_num: dem_reached = True # - Calculate bed elevation from other DEMs # - compute hydraulic with the discharge for the current DEM list_avg_z = [] for otherraster_name, othercsdata in cs.data_dict.items(): if othercsdata.run_num != 0 and othercsdata.run_num < run_num: # already treated csdata list_avg_z.append(othercsdata.z) if len(list_avg_z) > 0: prevres_csdata = copy.deepcopy(csdata) cs.prevres_csdata = prevres_csdata prevres_csdata.z = sum(list_avg_z) / len(list_avg_z) # Apply 1D hydraulic if prev_cs == None or not prev_cs.valid_data: # downstream cs calculation prevres_csdata.s = cs.proxy_s manning_solver(prevres_csdata) prevres_csdata.v = prevres_csdata.Q / (prevres_csdata.width * prevres_csdata.y) prevres_csdata.h = prevres_csdata.z + prevres_csdata.y + prevres_csdata.v ** 2 / (2 * g) prevres_csdata.Fr = prevres_csdata.v / (g * prevres_csdata.y) ** 0.5 prevres_csdata.solver = "manning" prevres_csdata.type = 0 else: cs_solver(prevres_csdata, prev_cs.prevres_csdata) prevres_csdata.solver = "regular" cs.proxy_s = prevres_csdata.s else: # slope is passed through the cells, assuming a uniform flow if prev_cs != None: if csdata.inlake == 1: # if we pass through a lake than we use the very gentle downstream slope cs.proxy_s = downstream_s else: # else we just copy the slope from the downstream cell cs.proxy_s = prev_cs.proxy_s if not prev_cs.valid_data and csdata.run_num == run_num and cs.proxy_s != downstream_s: # Gap: no valid data in any DEM messages.addWarningMessage("Gap at " + str(cs.X) + ", " + str(cs.Y) + ". Normal depth applied based on downstream slope") if csdata.run_num == run_num: csdata.ws = csdata.wslidar csdata.z = csdata.wslidar if prev_cs == None: cs.proxy_s = downstream_s for tree in trees: enditeration = False iteration = 0 while not enditeration: iteration += 1 for segment, prev_cs, cs in tree.browsepts(): csdata = cs.data_dict[raster_name] csdata.n = manning if csdata.run_num == run_num: # Apply 1D hydraulic if prev_cs == None or not prev_cs.valid_data or prev_cs.data_dict[raster_name].run_num == 0: csdata.s = cs.proxy_s manning_solver(csdata) csdata.v = csdata.Q / (csdata.width * csdata.y) csdata.h = csdata.z + csdata.y + csdata.v ** 2 / (2 * g) csdata.Fr = csdata.v / (g * csdata.y) ** 0.5 csdata.solver = "manning" csdata.type = 0 else: cs_solver(csdata, prev_cs.data_dict[raster_name]) csdata.solver = "regular" csdata.ws_before_correction = csdata.ws csdata.ws = csdata.z + csdata.y csdata.dif = csdata.ws - csdata.wslidar corrections = [] # down to up for segment, prev_cs, cs in tree.browsepts(): csdata = cs.data_dict[raster_name] if csdata.run_num == run_num: if prev_cs != None and prev_cs.data_dict[raster_name].run_num == run_num: prev_csdata = prev_cs.data_dict[raster_name] if prev_csdata.z_fill > csdata.z: if prev_csdata.z == prev_csdata.z_fill: # first backwater cell correction = [] corrections.append(correction) csdata.idcorrection = len(corrections) - 1 else: correction = corrections[prev_csdata.idcorrection] csdata.idcorrection = prev_csdata.idcorrection csdata.z_fill = prev_csdata.z_fill correction.append(csdata) else: csdata.z_fill = csdata.z correction = [] correction.append(csdata) corrections.append(correction) csdata.idcorrection = len(corrections) - 1 else: csdata.z_fill = csdata.z correction = [] correction.append(csdata) corrections.append(correction) csdata.idcorrection = len(corrections) - 1 enditeration = True for correction in corrections: sumdif = 0 sumdifws = 0 for cscorrect in correction: sumdif += cscorrect.dif sumdifws += cscorrect.ws - cscorrect.ws_before_correction correcteddif = sumdif / len(correction) difws = abs(sumdifws) / len(correction) if difws > 0.01: for cscorrect in correction: cscorrect.z = cscorrect.z - correcteddif enditeration = False print (iteration) results_dict = {} arcpy.env.workspace = zwater_dir rasterlist = arcpy.ListRasters() for str_raster in rasterlist: raster = arcpy.Raster(str_raster) results_dict[raster.name] = RasterIO(r_flowdir, os.path.join(result_dir, raster.name), float, -255) for tree in trees: for segment in tree.treesegments(): for pt in segment.get_profile(): csdata = pt.data_dict[raster.name] if csdata.wslidar != zwater_dict[raster_name].nodata and csdata.inlake != 1: # saving results only where the DEMs are results_dict[raster.name].setValue(pt.row, pt.col, csdata.z) results_dict[raster.name].save() return
def execute_Breach(r_dem, r_flowdir, str_frompoint, SaveResult, messages, language="FR"): # Chargement des fichiers dem = RasterIO(r_dem) flowdir = RasterIO(r_flowdir) try: dem.checkMatch(flowdir) except Exception as e: messages.addErrorMessage(str(e)) Result = RasterIO(r_flowdir, SaveResult, float, -255) # Traitement effectué pour chaque point de départ frompointcursor = arcpy.da.SearchCursor(str_frompoint, "SHAPE@") for frompoint in frompointcursor: # On prend l'objet géométrique (le point) associé à la ligne dans la table frompointshape = frompoint[0].firstPoint # Conversion des coordonnées currentcol = flowdir.XtoCol(frompointshape.X) currentrow = flowdir.YtoRow(frompointshape.Y) # prev_z : élévation au point précédent (= élévation du point testé pour le premier point) prev_z = dem.getValue(currentrow, currentcol) # Tests de sécurité pour s'assurer que le point de départ est à l'intérieurs des rasters intheraster = True if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if dem.getValue(currentrow, currentcol) == dem.nodata: intheraster = False confluence = False # Traitement effectué sur chaque cellule le long de l'écoulement while (intheraster): if not confluence: # L'élévation finale du point testé (z) est la valeur la plus petite entre son élévation actuelle et l'élévation du point précédent (prev_z) newz = dem.getValue(currentrow, currentcol) else: # Si on a atteint une confluence, on continue jusqu'à ce que qu'on atteingne un point plus bas newz = Result.getValue(currentrow, currentcol) if newz <= prev_z: intheraster = False z = min(prev_z, newz) Result.setValue(currentrow, currentcol, z) prev_z = z # On cherche le prochain point à partir du flow direction direction = flowdir.getValue(currentrow, currentcol) if (direction == 1): currentcol = currentcol + 1 if (direction == 2): currentcol = currentcol + 1 currentrow = currentrow + 1 if (direction == 4): currentrow = currentrow + 1 if (direction == 8): currentcol = currentcol - 1 currentrow = currentrow + 1 if (direction == 16): currentcol = currentcol - 1 if (direction == 32): currentcol = currentcol - 1 currentrow = currentrow - 1 if (direction == 64): currentrow = currentrow - 1 if (direction == 128): currentcol = currentcol + 1 currentrow = currentrow - 1 # Tests de sécurité pour s'assurer que l'on ne sorte pas des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if dem.getValue(currentrow, currentcol) == dem.nodata: intheraster = False if intheraster: if (Result.getValue(currentrow, currentcol) != -255): # Atteinte d'un confluent confluence = True Result.save() return
def execute_Slope(r_dem, r_flowdir, str_frompoint, distancesmoothingpath, save_slope, save_newfp, messages, language="FR"): # Chargement des fichiers DEM = RasterIO(r_dem) FlowDir = RasterIO(r_flowdir) try: DEM.checkMatch(FlowDir) except Exception as e: messages.addErrorMessage(e.message) Result = RasterIO(r_flowdir, save_slope, float, -255) # Liste des nouveaux points de départ listfirstpoints = [] # Décompte du nombre de points de départ pour configurer de la barre de progression frompointcursor = arcpy.da.SearchCursor(str_frompoint, "OID@") count = 0 for frompoint in frompointcursor: count += 1 progtext = "Calcul des pentes" if language == "EN": progtext = "Processing" arcpy.SetProgressor("step", progtext, 0, count, 1) progres = 0 # Traitement effectué pour chaque point de départ frompointcursor = arcpy.da.SearchCursor(str_frompoint, "SHAPE@") for frompoint in frompointcursor: # Mise à jour de la barre de progression arcpy.SetProgressorPosition(progres) progres += 1 # On prend l'objet géométrique (le point) associé à la ligne dans la table frompointshape = frompoint[0].firstPoint # Conversion des coordonnées currentcol = FlowDir.XtoCol(frompointshape.X) currentrow = FlowDir.YtoRow(frompointshape.Y) intheraster = True # Tests de sécurité pour s'assurer que le point de départ est à l'intérieurs des rasters if currentcol < 0 or currentcol >= FlowDir.raster.width or currentrow < 0 or currentrow >= FlowDir.raster.height: intheraster = False elif (FlowDir.getValue(currentrow, currentcol) != 1 and FlowDir.getValue(currentrow, currentcol) != 2 and FlowDir.getValue(currentrow, currentcol) != 4 and FlowDir.getValue(currentrow, currentcol) != 8 and FlowDir.getValue(currentrow, currentcol) != 16 and FlowDir.getValue(currentrow, currentcol) != 32 and FlowDir.getValue(currentrow, currentcol) != 64 and FlowDir.getValue(currentrow, currentcol) != 128): intheraster = False listpointsflowpath = [] totaldistance = 0 currentdistance = 0 confluencedist = 0 firstpoint = True # Traitement effectué sur chaque cellule le long de l'écoulement while (intheraster): # On met à jour la liste des points le long de l'écoulement currentpoint = pointflowpath() currentpoint.row = currentrow currentpoint.col = currentcol currentpoint.addeddistance = currentdistance totaldistance = totaldistance + currentdistance currentpoint.distance = totaldistance listpointsflowpath.append(currentpoint) # les points sont mis à -999 (cela permet la détection des confluences) if confluencedist == 0: Result.setValue(currentrow, currentcol, -999) # On cherche le prochain point à partir du flow direction direction = FlowDir.getValue(currentrow, currentcol) if (direction == 1): currentcol = currentcol + 1 currentdistance = FlowDir.raster.meanCellWidth if (direction == 2): currentcol = currentcol + 1 currentrow = currentrow + 1 currentdistance = math.sqrt(FlowDir.raster.meanCellWidth * FlowDir.raster.meanCellWidth + FlowDir.raster.meanCellHeight * FlowDir.raster.meanCellHeight) if (direction == 4): currentrow = currentrow + 1 currentdistance = FlowDir.raster.meanCellHeight if (direction == 8): currentcol = currentcol - 1 currentrow = currentrow + 1 currentdistance = math.sqrt(FlowDir.raster.meanCellWidth * FlowDir.raster.meanCellWidth + FlowDir.raster.meanCellHeight * FlowDir.raster.meanCellHeight) if (direction == 16): currentcol = currentcol - 1 currentdistance = FlowDir.raster.meanCellWidth if (direction == 32): currentcol = currentcol - 1 currentrow = currentrow - 1 currentdistance = math.sqrt(FlowDir.raster.meanCellWidth * FlowDir.raster.meanCellWidth + FlowDir.raster.meanCellHeight * FlowDir.raster.meanCellHeight) if (direction == 64): currentrow = currentrow - 1 currentdistance = FlowDir.raster.meanCellHeight if (direction == 128): currentcol = currentcol + 1 currentrow = currentrow - 1 currentdistance = math.sqrt(FlowDir.raster.meanCellWidth * FlowDir.raster.meanCellWidth + FlowDir.raster.meanCellHeight * FlowDir.raster.meanCellHeight) # Tests de sécurité pour s'assurer que l'on ne sorte pas des rasters if currentcol < 0 or currentcol >= FlowDir.raster.width or currentrow < 0 or currentrow >= FlowDir.raster.height: intheraster = False elif (FlowDir.getValue(currentrow, currentcol) != 1 and FlowDir.getValue(currentrow, currentcol) != 2 and FlowDir.getValue(currentrow, currentcol) != 4 and FlowDir.getValue(currentrow, currentcol) != 8 and FlowDir.getValue(currentrow, currentcol) != 16 and FlowDir.getValue(currentrow, currentcol) != 32 and FlowDir.getValue(currentrow, currentcol) != 64 and FlowDir.getValue(currentrow, currentcol) != 128): intheraster = False if intheraster: if (Result.getValue(currentrow, currentcol) != -255): # Atteinte d'un confluent if confluencedist == 0: confluencedist = totaldistance + currentdistance # On continue encore sur la distance de calcul de la pente après le confluent if (totaldistance + currentdistance - confluencedist) > distancesmoothingpath: intheraster = False currentpointnumber = 0 # Pour chaque point le long de l'écoulement while (currentpointnumber < len(listpointsflowpath)): currentpoint = listpointsflowpath[currentpointnumber] listpointforregression = [] listpointforregression.append(currentpoint) distancefromcurrentpoint = 0 nbcellsfromcurrentpoint = 0 try: # on s'éloigne du point courant, en allant vers l'amont, jusqu'à dépasser la distance de calcul de la pente while (distancefromcurrentpoint <= distancesmoothingpath / 2): nbcellsfromcurrentpoint = nbcellsfromcurrentpoint - 1 if (currentpointnumber + nbcellsfromcurrentpoint >= 0): # on mets à jour la distance jusqu'au point courant distancefromcurrentpoint = distancefromcurrentpoint + listpointsflowpath[ currentpointnumber + nbcellsfromcurrentpoint].addeddistance # on ajoute le point aux points à utiliser pour la régression (calcul de la pente au point courant) listpointforregression.append( listpointsflowpath[currentpointnumber + nbcellsfromcurrentpoint]) else: # la distance à l'extrémité amont est plus petie que la distance de calcul de pente raise IndexError distancefromcurrentpoint = 0 nbcellsfromcurrentpoint = 0 # même chose en s'éliognant du point courant vers l'aval while (distancefromcurrentpoint < distancesmoothingpath / 2): nbcellsfromcurrentpoint = nbcellsfromcurrentpoint + 1 if (currentpointnumber + nbcellsfromcurrentpoint < len(listpointsflowpath)): distancefromcurrentpoint = distancefromcurrentpoint + listpointsflowpath[ currentpointnumber + nbcellsfromcurrentpoint].addeddistance listpointforregression.append( listpointsflowpath[currentpointnumber + nbcellsfromcurrentpoint]) else: # la distance à l'extrémité aval est plus petite que la distance de calcul de pente raise IndexError # Calcul de la régression linéaire sumdistance = 0 sumelevation = 0 sumdistanceelevation = 0 sumsquaredistance = 0 for pointforregression in listpointforregression: sumdistance = sumdistance + pointforregression.distance sumelevation = sumelevation + DEM.getValue( pointforregression.row, pointforregression.col) sumdistanceelevation = sumdistanceelevation + pointforregression.distance * \ DEM.getValue(pointforregression.row, pointforregression.col) sumsquaredistance = sumsquaredistance + pointforregression.distance * pointforregression.distance slope = -(len(listpointforregression) * sumdistanceelevation - sumdistance * sumelevation) / ( len(listpointforregression) * sumsquaredistance - sumdistance * sumdistance) Result.setValue(currentpoint.row, currentpoint.col, slope) if firstpoint: # S'il s'agit du premier point (pour ce point de départ) pour lequel une valeur de pente est calculée, on enregistre ce point dans la liste des points de départ des pentes newpoint = arcpy.Point( FlowDir.raster.extent.XMin + (currentpoint.col + 0.5) * FlowDir.raster.meanCellWidth, FlowDir.raster.extent.YMax - (currentpoint.row + 0.5) * FlowDir.raster.meanCellHeight) listfirstpoints.append(newpoint) firstpoint = False except IndexError: # pas de calcul de la pente pour les extrémité amont et aval (il n'y a pas la distance suffisante pour le calcul de la pente) pass currentpointnumber = currentpointnumber + 1 Result.save() # On supprime les -999 du résultat final raster_res = arcpy.sa.SetNull(save_slope, save_slope, "VALUE = -999") raster_res.save(save_slope) # on enregistre la liste des points de départ des pente arcpy.CreateFeatureclass_management(os.path.dirname(save_newfp), os.path.basename(save_newfp), "POINT", spatial_reference=r_flowdir) pointcursor = arcpy.da.InsertCursor(save_newfp, "SHAPE@XY") for point in listfirstpoints: pointcursor.insertRow([(point.X, point.Y)]) return
def execute_D8toD4(r_flowdir, r_dem, str_frompoint, str_result, messages, language="FR"): """The source code of the tool.""" # Chargement des fichiers flowdir = RasterIO(r_flowdir) dem = RasterIO(r_dem) try: dem.checkMatch(flowdir) except Exception as e: messages.addErrorMessage(e.message) Result = RasterIO(r_flowdir, str_result, float, -255) donepoints = __Frompoint_paths() # Traitement effectué pour chaque point de départ frompointcursor = arcpy.da.SearchCursor(str_frompoint, ["SHAPE@", "OID@"]) for frompoint in frompointcursor: # On prend l'objet géométrique (le point) associé à la ligne dans la table frompointshape = frompoint[0].firstPoint # Conversion des coordonnées currentcol = flowdir.XtoCol(frompointshape.X) currentrow = flowdir.YtoRow(frompointshape.Y) intheraster = True # Tests de sécurité pour s'assurer que le point de départ est à l'intérieurs des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False # Traitement effectué sur chaque cellule le long de l'écoulement while (intheraster): # On regarde la valeur du "flow direction" direction = flowdir.getValue(currentrow, currentcol) donepoints.add_point(currentrow, currentcol, frompoint[1]) # Si cette valeur est 1, 4, 16 ou 64, on se déplace sur des cases adjacentes. On garde la valeur. # Si cette valeur est 2, 8, 32 ou 128, on se déplace en diagonale. Un traitement est nécesaire. if (direction == 1): Result.setValue(currentrow, currentcol, direction) currentcol = currentcol + 1 if (direction == 2): # on regarde, parmi les deux cellules adjacentes pouvant remplacer le déplacement en diagonale, quelle est celle d'élévation la plus basse, et on passe par celle-ci # exemple : direction = 2 -> on se déplace en diagonale, en bas à droite # on peut donc remplacer ce déplacement par aller à droite (flow direction = 1) puis aller en bas (flow direction = 4) ou bien aller en bas puis aller à droite if dem.getValue(currentrow, currentcol + 1) is None: Result.setValue(currentrow, currentcol, 1) intheraster = False elif dem.getValue(currentrow + 1, currentcol) is None: Result.setValue(currentrow, currentcol, 4) intheraster = False elif dem.getValue(currentrow, currentcol + 1) < dem.getValue( currentrow + 1, currentcol): # La cellule à droite à une élévation plus basse que la cellule en bas, on choisie donc d'aller à droite puis ensuite en bas # On modifie donc le flow direction pour aller à droite Result.setValue(currentrow, currentcol, 1) # Puis on modifie le flow direction du la cellule à droite pour aller en bas if donepoints.done_previously(currentrow, currentcol + 1, frompoint[1]): # Atteinte d'un confluent intheraster = False else: Result.setValue(currentrow, currentcol + 1, 4) donepoints.add_point(currentrow, currentcol + 1, frompoint[1]) else: Result.setValue(currentrow, currentcol, 4) if donepoints.done_previously(currentrow + 1, currentcol, frompoint[1]): # Atteinte d'un confluent intheraster = False else: Result.setValue(currentrow + 1, currentcol, 1) donepoints.add_point(currentrow + 1, currentcol, frompoint[1]) currentcol = currentcol + 1 currentrow = currentrow + 1 if (direction == 4): Result.setValue(currentrow, currentcol, direction) currentrow = currentrow + 1 if (direction == 8): if dem.getValue(currentrow + 1, currentcol) is None: Result.setValue(currentrow, currentcol, 4) intheraster = False elif dem.getValue(currentrow, currentcol - 1) is None: Result.setValue(currentrow, currentcol, 16) intheraster = False elif dem.getValue(currentrow + 1, currentcol) < dem.getValue( currentrow, currentcol - 1): Result.setValue(currentrow, currentcol, 4) if donepoints.done_previously(currentrow + 1, currentcol, frompoint[1]): # Atteinte d'un confluent intheraster = False else: Result.setValue(currentrow + 1, currentcol, 16) donepoints.add_point(currentrow + 1, currentcol, frompoint[1]) else: Result.setValue(currentrow, currentcol, 16) if donepoints.done_previously(currentrow, currentcol - 1, frompoint[1]): # Atteinte d'un confluent intheraster = False else: Result.setValue(currentrow, currentcol - 1, 4) donepoints.add_point(currentrow, currentcol - 1, frompoint[1]) currentcol = currentcol - 1 currentrow = currentrow + 1 if (direction == 16): Result.setValue(currentrow, currentcol, direction) currentcol = currentcol - 1 if (direction == 32): if dem.getValue(currentrow - 1, currentcol) is None: Result.setValue(currentrow, currentcol, 64) intheraster = False elif dem.getValue(currentrow, currentcol - 1) is None: Result.setValue(currentrow, currentcol, 16) intheraster = False elif dem.getValue(currentrow - 1, currentcol) < dem.getValue( currentrow, currentcol - 1): Result.setValue(currentrow, currentcol, 64) if donepoints.done_previously(currentrow - 1, currentcol, frompoint[1]): # Atteinte d'un confluent intheraster = False else: Result.setValue(currentrow - 1, currentcol, 16) donepoints.add_point(currentrow - 1, currentcol, frompoint[1]) else: Result.setValue(currentrow, currentcol, 16) if donepoints.done_previously(currentrow, currentcol - 1, frompoint[1]): # Atteinte d'un confluent intheraster = False else: Result.setValue(currentrow, currentcol - 1, 64) donepoints.add_point(currentrow, currentcol - 1, frompoint[1]) currentcol = currentcol - 1 currentrow = currentrow - 1 if (direction == 64): Result.setValue(currentrow, currentcol, direction) currentrow = currentrow - 1 if (direction == 128): if dem.getValue(currentrow - 1, currentcol) is None: Result.setValue(currentrow, currentcol, 64) intheraster = False elif dem.getValue(currentrow, currentcol + 1) is None: Result.setValue(currentrow, currentcol, 1) intheraster = False elif dem.getValue(currentrow - 1, currentcol) < dem.getValue( currentrow, currentcol + 1): Result.setValue(currentrow, currentcol, 64) if donepoints.done_previously(currentrow - 1, currentcol, frompoint[1]): # Atteinte d'un confluent intheraster = False else: Result.setValue(currentrow - 1, currentcol, 1) donepoints.add_point(currentrow - 1, currentcol, frompoint[1]) else: Result.setValue(currentrow, currentcol, 1) if donepoints.done_previously(currentrow, currentcol + 1, frompoint[1]): # Atteinte d'un confluent intheraster = False else: Result.setValue(currentrow, currentcol + 1, 64) donepoints.add_point(currentrow, currentcol + 1, frompoint[1]) currentcol = currentcol + 1 currentrow = currentrow - 1 if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if intheraster: if donepoints.done_previously(currentrow, currentcol, frompoint[1]): # Atteinte d'un confluent intheraster = False Result.save() return
def execute_DefBCI(r_flowdir, r_flowacc, distoutput, percent, str_zonesfolder, r_dem, r_width, r_zbed, r_manning, r_mask, str_outputfolder, messages): save_inbci = str_zonesfolder + "\\inbci.shp" save_outbci = str_zonesfolder + "\\outbci.shp" # Chargement des fichiers flowdir = RasterIO(r_flowdir) flowacc = RasterIO(r_flowacc) dem = RasterIO(r_dem) try: flowdir.checkMatch(flowacc) flowdir.checkMatch(dem) except Exception as e: messages.addErrorMessage(e.message) # Création d'un nouveau fichier de zones en prenant les enveloppes zones = str_zonesfolder + "\\envelopezones.shp" arcpy.FeatureEnvelopeToPolygon_management( str_zonesfolder + "\\polyzones.shp", zones) # Clip du DEM zonesscursor = arcpy.da.SearchCursor(zones, ["GRID_CODE", "SHAPE@"]) for zoneshp in zonesscursor: Xmin = zoneshp[1].extent.XMin Ymin = zoneshp[1].extent.YMin Xmax = zoneshp[1].extent.XMax Ymax = zoneshp[1].extent.YMax envelope = str(Xmin) + " " + str(Ymin) + " " + str(Xmax) + " " + str( Ymax) arcpy.Clip_management(dem.raster, envelope, str_zonesfolder + "\\zone" + str(zoneshp[0])) # Listes des points source et des points de sortie listinputpoints = [] listoutputpoints = [] # Lectures des points source pour chaque zone sourcepointcursor = arcpy.da.SearchCursor( str_zonesfolder + "\\sourcepoints.shp", ["SHAPE@", "ZoneID", "fpid"]) for sourcepoint in sourcepointcursor: newpoint = pointflowpath() newpoint.type = "main" newpoint.frompointid = sourcepoint[2] # Coordonnées du point source newpoint.X = sourcepoint[0].firstPoint.X newpoint.Y = sourcepoint[0].firstPoint.Y # Raster de la zone newpoint.numzone = sourcepoint[1] # Valeur du flow accumulation col = flowacc.XtoCol(newpoint.X) row = flowacc.YtoRow(newpoint.Y) newpoint.flowacc = flowacc.getValue(row, col) listinputpoints.append(newpoint) ### Début de traitement pour la détection des points de sortie et des points sources latéraux ### listlateralinputpoints = [] # Pour chaque raster, on parcourt l'écoulement à partir du point source for mainpoint in listinputpoints: # Raster de la zone correspondante au point source localraster = RasterIO( arcpy.Raster(str_zonesfolder + r"\zone" + str(mainpoint.numzone))) # Conversion des coordonnées currentcol = flowdir.XtoCol(mainpoint.X) currentrow = flowdir.YtoRow(mainpoint.Y) localcol = localraster.XtoCol(mainpoint.X) localrow = localraster.YtoRow(mainpoint.Y) currentflowacc = flowacc.getValue(currentrow, currentcol) mainpoint.flowacc = currentflowacc lastflowacc = currentflowacc # Parcours de l'écoulement intheraster = True while (intheraster): prevcol = currentcol prevrow = currentrow currentflowacc = flowacc.getValue(currentrow, currentcol) if 100. * float(currentflowacc - lastflowacc) / float(lastflowacc) >= percent: newpoint = pointflowpath() newpoint.type = "lateral" newpoint.frompointid = mainpoint.frompointid # Coordonnées du point source newpoint.X = flowdir.ColtoX(currentcol) newpoint.Y = flowdir.RowtoY(currentrow) # Raster de la zone newpoint.numzone = mainpoint.numzone # Valeur du flow accumulation newpoint.flowacc = currentflowacc lastflowacc = currentflowacc listlateralinputpoints.append(newpoint) # On cherche le prochain point à partir du flow direction direction = flowdir.getValue(currentrow, currentcol) if (direction == 1): currentcol = currentcol + 1 localcol += 1 if (direction == 2): currentcol = currentcol + 1 currentrow = currentrow + 1 localcol += 1 localrow += 1 if (direction == 4): currentrow = currentrow + 1 localrow += 1 if (direction == 8): currentcol = currentcol - 1 currentrow = currentrow + 1 localcol -= 1 localrow += 1 if (direction == 16): currentcol = currentcol - 1 localcol -= 1 if (direction == 32): currentcol = currentcol - 1 currentrow = currentrow - 1 localcol -= 1 localrow -= 1 if (direction == 64): currentrow = currentrow - 1 localrow -= 1 if (direction == 128): currentcol = currentcol + 1 currentrow = currentrow - 1 localcol += 1 localrow -= 1 if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if localcol < 0 or localcol >= localraster.raster.width or localrow < 0 or localrow >= localraster.raster.height: intheraster = False elif localraster.getValue(localrow, localcol) == localraster.nodata: intheraster = False # On enregistre un point de sortie au dernier point traité avant de sortir de la zone newpoint = pointflowpath() newpoint.numzone = mainpoint.numzone newpoint.X = flowdir.ColtoX(prevcol) newpoint.Y = flowdir.RowtoY(prevrow) # Il est nécessaire d'enregistrer le côté du raster par lequel on sort. # Ceci est fait en regardant la distance entre le dernier point traité et les coordonnées maximales de la zone # Le côté de sortie est le côté pour lequel cette distance est minimum distside = min(newpoint.X - localraster.raster.extent.XMin, localraster.raster.extent.XMax - newpoint.X, newpoint.Y - localraster.raster.extent.YMin, localraster.raster.extent.YMax - newpoint.Y) if distside == newpoint.X - localraster.raster.extent.XMin: newpoint.side = "W" if distside == localraster.raster.extent.XMax - newpoint.X: newpoint.side = "E" if distside == newpoint.Y - localraster.raster.extent.YMin: newpoint.side = "S" if distside == localraster.raster.extent.YMax - newpoint.Y: newpoint.side = "N" listoutputpoints.append(newpoint) listinputpoints.extend(listlateralinputpoints) ### Fin de traitement pour la détection des points de sortie et des points sources latéraux ### ### Début de traitement pour la configuration des fenêtres de sortie ### # Pour chaque point de sortie for point in listoutputpoints: raster = RasterIO( arcpy.Raster(str_zonesfolder + r"\zone" + str(point.numzone))) colinc = 0 rowinc = 0 distinc = 0 point.side2 = "0" point.lim3 = 0 point.lim4 = 0 # Selon le coté de sortie, on progressera horizontalement ou verticalement if point.side == "W" or point.side == "E": rowinc = 1 distinc = raster.raster.meanCellHeight else: colinc = 1 distinc = raster.raster.meanCellWidth currentcol = raster.XtoCol(point.X) currentrow = raster.YtoRow(point.Y) distance = 0 # On progresse sur dans une direction jusqu'à sortir du raster ou jusqu'à ce que la distance voullue soit attente while (not (currentcol < 0 or currentcol >= raster.raster.width or currentrow < 0 or currentrow >= raster.raster.height)) \ and raster.getValue(currentrow,currentcol) != raster.nodata and distance < distoutput/2: distance += distinc currentrow += rowinc currentcol += colinc # On prends les coordonnées avant de sortir du raster currentrow -= rowinc currentcol -= colinc if point.side == "W" or point.side == "E": point.lim1 = raster.RowtoY(currentrow) else: point.lim1 = raster.ColtoX(currentcol) # Si la procédure s'est arrêtée parce qu'on est sorti du raster, on tourne de 90 degrés et on continue if distance < distoutput / 2: distance -= distinc if point.side == "W": colinc = 1 rowinc = 0 distinc = raster.raster.meanCellWidth point.lim3 = raster.raster.extent.XMin + ( currentcol + 0.5) * raster.raster.meanCellWidth elif point.side == "E": colinc = -1 rowinc = 0 distinc = raster.raster.meanCellWidth point.lim3 = raster.raster.extent.XMin + ( currentcol + 0.5) * raster.raster.meanCellWidth elif point.side == "N": rowinc = 1 colinc = 0 distinc = raster.raster.meanCellHeight point.lim3 = max( raster.raster.extent.YMin, raster.raster.extent.YMax - (currentrow + 1) * raster.raster.meanCellHeight ) + 0.5 * raster.raster.meanCellHeight elif point.side == "S": rowinc = -1 colinc = 0 distinc = raster.raster.meanCellHeight point.lim3 = max( raster.raster.extent.YMin, raster.raster.extent.YMax - (currentrow + 1) * raster.raster.meanCellHeight ) + 0.5 * raster.raster.meanCellHeight # On progresse à nouveau jusqu'à sortir du raster ou jusqu'à ce que la distance voullue soit attente while (not (currentcol < 0 or currentcol >= raster.raster.width or currentrow < 0 or currentrow >= raster.raster.height)) \ and raster.getValue(currentrow, currentcol) != raster.nodata and distance < distoutput / 2: distance += distinc currentrow += rowinc currentcol += colinc currentrow -= rowinc currentcol -= colinc # On cherche sur quel coté on est après avoir tourné de 90 degrés if point.side == "W" or point.side == "E": point.side2 = "S" point.lim4 = raster.raster.extent.XMin + ( currentcol + 0.5) * raster.raster.meanCellWidth else: point.side2 = "E" point.lim4 = max( raster.raster.extent.YMin, raster.raster.extent.YMax - (currentrow + 1) * raster.raster.meanCellHeight ) + 0.5 * raster.raster.meanCellHeight # On recommence toute la procédure de l'autre côté du point de sortie colinc = 0 rowinc = 0 distinc = 0 if point.side == "W" or point.side == "E": rowinc = -1 distinc = raster.raster.meanCellHeight else: colinc = -1 distinc = raster.raster.meanCellWidth currentcol = raster.XtoCol(point.X) currentrow = raster.YtoRow(point.Y) distance = 0 while (not (currentcol < 0 or currentcol >= raster.raster.width or currentrow < 0 or currentrow >= raster.raster.height)) \ and raster.getValue(currentrow, currentcol) != raster.nodata and distance < distoutput / 2: distance += distinc currentrow += rowinc currentcol += colinc currentrow -= rowinc currentcol -= colinc if point.side == "W" or point.side == "E": point.lim2 = max( raster.raster.extent.YMin, raster.raster.extent.YMax - (currentrow + 1) * raster.raster.meanCellHeight ) + 0.5 * raster.raster.meanCellHeight else: point.lim2 = raster.raster.extent.XMin + ( currentcol + 0.5) * raster.raster.meanCellWidth # Si la procédure s'est arrêtée parce qu'on est sorti du raster, on tourne de 90 degrés et on continue if distance < distoutput / 2: distance -= distinc if point.side == "W": colinc = 1 rowinc = 0 distinc = raster.raster.meanCellWidth point.lim3 = raster.raster.extent.XMin + ( currentcol + 0.5) * raster.raster.meanCellWidth elif point.side == "E": colinc = -1 rowinc = 0 distinc = raster.raster.meanCellWidth point.lim3 = raster.raster.extent.XMin + ( currentcol + 0.5) * raster.raster.meanCellWidth elif point.side == "N": rowinc = 1 colinc = 0 distinc = raster.raster.meanCellHeight point.lim3 = max( raster.raster.extent.YMin, raster.raster.extent.YMax - (currentrow + 1) * raster.raster.meanCellHeight ) + 0.5 * raster.raster.meanCellHeight elif point.side == "S": rowinc = -1 colinc = 0 distinc = raster.raster.meanCellHeight point.lim3 = max( raster.raster.extent.YMin, raster.raster.extent.YMax - (currentrow + 1) * raster.raster.meanCellHeight ) + 0.5 * raster.raster.meanCellHeight while (not (currentcol < 0 or currentcol >= raster.raster.width or currentrow < 0 or currentrow >= raster.raster.height)) \ and raster.getValue(currentrow, currentcol) != raster.nodata and distance < distoutput / 2: distance += distinc currentrow += rowinc currentcol += colinc currentrow -= rowinc currentcol -= colinc if point.side == "W" or point.side == "E": point.side2 = "N" point.lim4 = raster.raster.extent.XMin + ( currentcol + 0.5) * raster.raster.meanCellWidth else: point.side2 = "W" point.lim4 = max( raster.raster.extent.YMin, raster.raster.extent.YMax - (currentrow + 1) * raster.raster.meanCellHeight ) + 0.5 * raster.raster.meanCellHeight ### Fin du traitement pour la configuration des fenêtres de sortie ### # Création des shapefiles inbci et outbci, avec les champs nécessaires arcpy.CreateFeatureclass_management( os.path.dirname(save_inbci), os.path.basename(save_inbci), "POINT", spatial_reference=flowdir.raster.spatialReference) arcpy.AddField_management(save_inbci, "zoneid", "LONG") arcpy.AddField_management(save_inbci, "flowacc", "LONG") arcpy.AddField_management(save_inbci, "type", "TEXT") arcpy.AddField_management(save_inbci, "fpid", "LONG") arcpy.CreateFeatureclass_management( os.path.dirname(save_outbci), os.path.basename(save_outbci), "POINT", spatial_reference=flowdir.raster.spatialReference) arcpy.AddField_management(save_outbci, "zoneid", "LONG") arcpy.AddField_management(save_outbci, "side", "TEXT", field_length=1) arcpy.AddField_management(save_outbci, "lim1", "LONG") arcpy.AddField_management(save_outbci, "lim2", "LONG") arcpy.AddField_management( save_outbci, "side2", "TEXT", field_length=1, ) arcpy.AddField_management(save_outbci, "lim3", "LONG") arcpy.AddField_management(save_outbci, "lim4", "LONG") # Enregistrement dans les shapefiles des informations contenues dans les listes pointcursor = arcpy.da.InsertCursor( save_inbci, ["zoneid", "flowacc", "type", "fpid", "SHAPE@XY"]) for point in listinputpoints: pointcursor.insertRow([ point.numzone, point.flowacc, point.type, point.frompointid, (point.X, point.Y) ]) pointcursor = arcpy.da.InsertCursor(save_outbci, [ "zoneid", "side", "lim1", "lim2", "side2", "lim3", "lim4", "SHAPE@XY" ]) for point in listoutputpoints: pointcursor.insertRow([ point.numzone, point.side, point.lim1, point.lim2, point.side2, point.lim3, point.lim4, (point.X, point.Y) ]) del pointcursor # Création des fichiers .bci à partir des information du fichier inbci.shp bcipointcursor = arcpy.da.SearchCursor( save_inbci, ["SHAPE@", "zoneid", "flowacc", "type"]) dictsegmentsin = {} for point in bcipointcursor: if point[1] not in dictsegmentsin: dictsegmentsin[point[1]] = [] dictsegmentsin[point[1]].append(point) for segment in dictsegmentsin.values(): for point in sorted(segment, key=lambda q: q[2]): if point[3] == "main": latnum = 0 pointshape = point[0].firstPoint # Création du fichier newfile = str_outputfolder + "\\zone" + str(point[1]) + ".bci" # Enregistrement des coordonnées pour le point source filebci = open(newfile, 'w') filebci.write("P\t" + str(int(pointshape.X)) + "\t" + str(int(pointshape.Y)) + "\tQVAR\tzone" + str(point[1]) + "\n") filebci.close() if point[3] == "lateral": pointshape = point[0].firstPoint latnum += 1 newfile = str_outputfolder + "\\zone" + str(point[1]) + ".bci" # Enregistrement des coordonnées et du débit pour le point source filebci = open(newfile, 'a') filebci.write("P\t" + str(int(pointshape.X)) + "\t" + str(int(pointshape.Y)) + "\tQVAR\tzone" + str(point[1]) + "_" + str(latnum) + "\n") filebci.close() # Ajout de la zone de sortie au .bci bcipointcursor = arcpy.da.SearchCursor( save_outbci, ["zoneid", "side", "lim1", "lim2", "side2", "lim3", "lim4", "SHAPE@"]) for point in bcipointcursor: newfile = str_outputfolder + "\\zone" + str(point[0]) + ".bci" filebci = open(newfile, 'a') filebci.write(point[1] + "\t" + str(point[2]) + "\t" + str(point[3]) + "\tHVAR\thvar") if str(point[4]) != "0": filebci.write("\n" + str(point[4]) + "\t" + str(point[5]) + "\t" + str(point[6]) + "\tHVAR\thvar") filebci.close() # Création des fichiers .par et conversion des rasters en fichiers ASCII (un par point source comme il n'y a qu'un seul point d'entrée par simulation) for segment in dictsegmentsin.values(): for point in sorted(segment, key=lambda q: q[2]): if point[3] == "main": arcpy.Clip_management( r_width, "#", str_zonesfolder + r"\wzone" + str(point[1]), str_zonesfolder + "\\zone" + str(point[1]), "#", "NONE", "MAINTAIN_EXTENT") arcpy.Clip_management( r_zbed, "#", str_zonesfolder + r"\dzone" + str(point[1]), str_zonesfolder + "\\zone" + str(point[1]), "#", "NONE", "MAINTAIN_EXTENT") arcpy.Clip_management( r_manning, "#", str_zonesfolder + r"\nzone" + str(point[1]), str_zonesfolder + "\\zone" + str(point[1]), "#", "NONE", "MAINTAIN_EXTENT") arcpy.RasterToASCII_conversion( str_zonesfolder + "\\zone" + str(point[1]), str_outputfolder + "\\zone" + str(point[1]) + ".txt") arcpy.RasterToASCII_conversion( str_zonesfolder + "\dzone" + str(point[1]), str_outputfolder + "\\dzone" + str(point[1]) + ".txt") arcpy.RasterToASCII_conversion( str_zonesfolder + "\wzone" + str(point[1]), str_outputfolder + "\\wzone" + str(point[1]) + ".txt") arcpy.RasterToASCII_conversion( str_zonesfolder + "\\nzone" + str(point[1]), str_outputfolder + "\\nzone" + str(point[1]) + ".txt") arcpy.Delete_management(str_zonesfolder + r"\wzone" + str(point[1])) arcpy.Delete_management(str_zonesfolder + r"\dzone" + str(point[1])) arcpy.Delete_management(str_zonesfolder + r"\nzone" + str(point[1])) arcpy.Clip_management( r_mask, "#", str_zonesfolder + r"\mzone" + str(point[1]), str_zonesfolder + "\\zone" + str(point[1]), "#", "NONE", "MAINTAIN_EXTENT") arcpy.RasterToASCII_conversion( str_zonesfolder + "\mzone" + str(point[1]), str_outputfolder + "\\mzone" + str(point[1]) + ".txt") arcpy.Delete_management(str_zonesfolder + r"\mzone" + str(point[1])) return
def execute_MovingWindowStats(r_flowdir, str_frompoint, r_values, distance, function, SaveResult, messages): # Chargement des fichiers flowdir = RasterIO(r_flowdir) valuesraster = RasterIO(r_values) try: flowdir.checkMatch(valuesraster) except Exception as e: messages.addErrorMessage(e.message) Result = RasterIO(r_flowdir, SaveResult, float,-255) # Décompte du nombre de points de départ pour configurer de la barre de progression count = 0 frompointcursor = arcpy.da.SearchCursor(str_frompoint, "OID@") for frompoint in frompointcursor: count += 1 arcpy.SetProgressor("step", "Lissage par moyenne mobile", 0, count, 1) progres = 0 # Traitement effectué pour chaque point de départ frompointcursor = arcpy.da.SearchCursor(str_frompoint, ["OID@", "SHAPE@"]) for frompoint in frompointcursor: # Mise à jour de la barre de progression arcpy.SetProgressorPosition(progres) progres += 1 # On prend l'objet géométrique (le point) associé à la ligne dans la table frompointshape = frompoint[1].firstPoint # Conversion des coordonnées currentcol = flowdir.XtoCol(frompointshape.X) currentrow = flowdir.YtoRow(frompointshape.Y) intheraster = True # Tests de sécurité pour s'assurer que le point de départ est à l'intérieurs des rasters if currentcol<0 or currentcol>=flowdir.raster.width or currentrow<0 or currentrow>= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False listflowpath = [] listpointsflowpath = [] listdistance = [] listelevation = [] totaldistance = 0 currentdistance = 0 # Traitement effectué sur chaque cellule le long de l'écoulement while (intheraster): currentpoint = pointflowpath() currentpoint.row = currentrow currentpoint.col = currentcol listpointsflowpath.append(currentpoint) totaldistance = totaldistance + currentdistance listflowpath.append(totaldistance) # On crée une liste des points d'élévation connue le long de l'écoulement, ainsi qu'une liste associée avec leur distance depuis le point de distance if valuesraster.getValue(currentrow, currentcol) != valuesraster.nodata: listdistance.append(totaldistance) listelevation.append(valuesraster.getValue(currentrow, currentcol)) # On cherche le prochain point à partir du flow direction direction = flowdir.getValue(currentrow, currentcol) if (direction == 1): currentcol = currentcol + 1 currentdistance = flowdir.raster.meanCellWidth if (direction == 2): currentcol = currentcol + 1 currentrow = currentrow + 1 currentdistance = math.sqrt( flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 4): currentrow = currentrow + 1 currentdistance = flowdir.raster.meanCellHeight if (direction == 8): currentcol = currentcol - 1 currentrow = currentrow + 1 currentdistance = math.sqrt( flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 16): currentcol = currentcol - 1 currentdistance = flowdir.raster.meanCellWidth if (direction == 32): currentcol = currentcol - 1 currentrow = currentrow - 1 currentdistance = math.sqrt( flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) if (direction == 64): currentrow = currentrow - 1 currentdistance = flowdir.raster.meanCellHeight if (direction == 128): currentcol = currentcol + 1 currentrow = currentrow - 1 currentdistance = math.sqrt( flowdir.raster.meanCellWidth * flowdir.raster.meanCellWidth + flowdir.raster.meanCellHeight * flowdir.raster.meanCellHeight) # Tests de sécurité pour s'assurer que l'on ne sorte pas des rasters if currentcol < 0 or currentcol >= flowdir.raster.width or currentrow < 0 or currentrow >= flowdir.raster.height: intheraster = False elif (flowdir.getValue(currentrow, currentcol) != 1 and flowdir.getValue(currentrow, currentcol) != 2 and flowdir.getValue(currentrow, currentcol) != 4 and flowdir.getValue(currentrow, currentcol) != 8 and flowdir.getValue(currentrow, currentcol) != 16 and flowdir.getValue(currentrow, currentcol) != 32 and flowdir.getValue(currentrow, currentcol) != 64 and flowdir.getValue(currentrow, currentcol) != 128): intheraster = False if intheraster: if (Result.getValue(currentrow, currentcol) != -255): intheraster = False if len(listdistance) <= 1: # Avertissement si il n'y a qu'un seul (ou aucun) point de données messages.addWarningMessage("Point source {0}: pas assez de sections transversales".format(frompoint[0])) else: currentpointnumber = 0 # Traitement pour chaque point le long de l'écoulement while (currentpointnumber < len(listflowpath)): currentpoint = listpointsflowpath[currentpointnumber] weights = [] values = [] sumwgt = 0 # On parcourt tous les points d'élévation connue for i in range(len(listdistance)): distlocale = abs(listdistance[i]-listflowpath[currentpointnumber]) # ... pour trouver ceux situés à l'intérieur de la fenêtre if distlocale <= distance / 2: values.append(listelevation[i]) # On calcul la valeur finale à partir des valeurs et des poids associés finalvalue = function(values) Result.setValue(currentpoint.row, currentpoint.col, finalvalue) currentpointnumber = currentpointnumber + 1 Result.save() return