def derive_from_dem(dem): """derive slope and flow direction from a DEM. Results are returned in a dictionary that contains references to ArcPy Raster objects stored in the "in_memory" (temporary) workspace """ # set the snap raster for subsequent operations env.snapRaster = dem # calculate flow direction for the whole DEM flowdir = FlowDirection(in_surface_raster=dem, force_flow="NORMAL") flow_direction_raster = so("flowdir", "random", "in_memory") flowdir.save(flow_direction_raster) # calculate slope for the whole DEM slope = Slope(in_raster=dem, output_measurement="PERCENT_RISE", method="PLANAR") slope_raster = so("slope", "random", "in_memory") slope.save(slope_raster) return { "flow_direction_raster": Raster(flow_direction_raster), "slope_raster": Raster(slope_raster), }
def create_slope_raster(workspace_gdb, input_elevation, output_measurement, z_factor): arcpy.CheckOutExtension("Spatial") output_slope_raster = Slope(input_elevation, output_measurement, z_factor) slope_raster = os.path.join(workspace_gdb, "slope_raster") output_slope_raster.save(slope_raster) print("end of slope script") return slope_raster
def main(bathy=None, out_raster=None): try: arcpy.env.rasterStatistics = "STATISTICS" # Calculate the slope of the bathymetric raster utils.msg("Calculating the slope...") out_slope = Slope(bathy, "DEGREE", 1) out_raster = utils.validate_path(out_raster) out_slope.save(out_raster) except Exception as e: utils.msg(e, mtype='error')
def main(bathy=None, out_raster=None): """Compute raster slope in degrees.""" try: arcpy.env.rasterStatistics = "STATISTICS" # Calculate the slope of the bathymetric raster utils.msg("Calculating the slope...") out_slope = Slope(bathy, "DEGREE", 1) out_raster = utils.validate_path(out_raster) out_slope.save(out_raster) except Exception as e: utils.msg(e, mtype='error')
def slopeaspect(path): import arcpy, os, re from arcpy.sa import Slope,Times,Aspect,Int from arcpy import env arcpy.env.workspace = path print('creating slope and aspect raster map') arcpy.CheckOutExtension("Spatial") # activating spetial analyst module # Transforming altitude, soil type, landuse and slope raster maps to polygon # cellsize = arcpy.GetRasterProperties_management("altitude.tif","CELLSIZEX") #Extracting the raster cell size # cellsize1 = float(cellsize.getOutput(0)) # cellarea = cellsize1*cellsize1/1000000.0 slope1 = Slope("altitude.tif", 'DEGREE') # SLOPE IN DEGREE slope1.save("slope_in_deg.tif") const = 100.0 OutRas = Times("slope_in_deg.tif", 100.0) intslope = Int(OutRas) intslope.save("times.tif") # aspect1 = Aspect("altitude.tif") # ATTENTION: Aspect in Arcgis is calculated clockwise so East is 90. in Raven's manual Aspect is assumed to be counterclockwise i.e., west 90 aspect1.save("aspect") print('done!')
# Define CosineFitting function def CosineFitting(x, a, b, c): return a * np.cos(b - np.pi / 180 * x) + c while 1: iteration = iteration + 1 arcpy.AddMessage( "--------------------------------------------------------------") arcpy.AddMessage("Iteration {0} is running!".format(iteration)) # DEM difference [m] dh = Raster(DEM_master) - Raster(DEM_slave_after) # slope of the slave DEM [degree] slp = Slope(DEM_slave_after, "DEGREE", "1") # aspect of the slave DEM [degree] asp = Aspect(DEM_slave_after) # Mask 'dh' using statale terrain polygon dh_mask = ExtractByMask(dh, OffGlacier) # Mask 'slp' and 'asp' using 'dh_mask' in order to keep same georeference as 'dh_mask' slp_mask = ExtractByMask(slp, dh_mask) asp_mask = ExtractByMask(asp, dh_mask) del dh, slp, asp # Raster to Array dh_mask_arr = arcpy.RasterToNumPyArray(dh_mask, nodata_to_value=-32768)
def function(outputFolder, DEM, studyAreaMask, streamInput, minAccThresh, majAccThresh, smoothDropBuffer, smoothDrop, streamDrop, reconDEM, rerun=False): try: # Set environment variables arcpy.env.compression = "None" arcpy.env.snapRaster = DEM arcpy.env.extent = DEM arcpy.env.cellSize = arcpy.Describe(DEM).meanCellWidth ######################## ### Define filenames ### ######################## files = common.getFilenames('preprocess', outputFolder) rawDEM = files.rawDEM hydDEM = files.hydDEM hydFDR = files.hydFDR hydFDRDegrees = files.hydFDRDegrees hydFAC = files.hydFAC streamInvRas = files.streamInvRas # Inverse stream raster - 0 for stream, 1 for no stream streams = files.streams streamDisplay = files.streamDisplay multRaster = files.multRaster hydFACInt = files.hydFACInt slopeRawDeg = files.slopeRawDeg slopeRawPer = files.slopeRawPer slopeHydDeg = files.slopeHydDeg slopeHydPer = files.slopeHydPer ############################### ### Set temporary variables ### ############################### prefix = os.path.join(arcpy.env.scratchGDB, "base_") cellSizeDEM = float(arcpy.env.cellSize) burnedDEM = prefix + "burnedDEM" streamAccHaFile = prefix + "streamAccHa" rawFDR = prefix + "rawFDR" allPolygonSinks = prefix + "allPolygonSinks" DEMTemp = prefix + "DEMTemp" hydFACTemp = prefix + "hydFACTemp" # Saved as .tif as did not save as ESRI grid on server streamsRasterFile = os.path.join(arcpy.env.scratchFolder, "base_") + "StreamsRaster.tif" ############################### ### Save DEM to base folder ### ############################### codeBlock = 'Save DEM' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): # Save DEM to base folder as raw DEM with no compression pixelType = int( arcpy.GetRasterProperties_management(DEM, "VALUETYPE").getOutput(0)) if pixelType == 9: # 32 bit float arcpy.CopyRaster_management(DEM, rawDEM, pixel_type="32_BIT_FLOAT") else: log.info("Converting DEM to 32 bit floating type") arcpy.CopyRaster_management(DEM, DEMTemp) arcpy.CopyRaster_management(Float(DEMTemp), rawDEM, pixel_type="32_BIT_FLOAT") # Delete temporary DEM arcpy.Delete_management(DEMTemp) # Calculate statistics for raw DEM arcpy.CalculateStatistics_management(rawDEM) progress.logProgress(codeBlock, outputFolder) ################################ ### Create multiplier raster ### ################################ codeBlock = 'Create multiplier raster' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): Reclassify(rawDEM, "Value", RemapRange([[-999999.9, 999999.9, 1]]), "NODATA").save(multRaster) progress.logProgress(codeBlock, outputFolder) codeBlock = 'Calculate slope in percent' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): intSlopeRawPer = Slope(rawDEM, "PERCENT_RISE") intSlopeRawPer.save(slopeRawPer) del intSlopeRawPer log.info('Slope calculated in percent') progress.logProgress(codeBlock, outputFolder) if reconDEM is True: ####################### ### Burn in streams ### ####################### codeBlock = 'Burn in streams' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): # Recondition DEM (burning stream network in using AGREE method) log.info("Burning streams into DEM.") reconditionDEM.function(rawDEM, streamInput, smoothDropBuffer, smoothDrop, streamDrop, burnedDEM) log.info("Completed stream network burn in to DEM") progress.logProgress(codeBlock, outputFolder) ################## ### Fill sinks ### ################## codeBlock = 'Fill sinks' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): Fill(burnedDEM).save(hydDEM) log.info("Sinks in DEM filled") progress.logProgress(codeBlock, outputFolder) ###################### ### Flow direction ### ###################### codeBlock = 'Flow direction' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): FlowDirection(hydDEM, "NORMAL").save(hydFDR) log.info("Flow Direction calculated") progress.logProgress(codeBlock, outputFolder) ################################# ### Flow direction in degrees ### ################################# codeBlock = 'Flow direction in degrees' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): # Save flow direction raster in degrees (for display purposes) degreeValues = RemapValue([[1, 90], [2, 135], [4, 180], [8, 225], [16, 270], [32, 315], [64, 0], [128, 45]]) Reclassify(hydFDR, "Value", degreeValues, "NODATA").save(hydFDRDegrees) progress.logProgress(codeBlock, outputFolder) ######################### ### Flow accumulation ### ######################### codeBlock = 'Flow accumulation' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): hydFACTemp = FlowAccumulation(hydFDR, "", "FLOAT") hydFACTemp.save(hydFAC) arcpy.sa.Int(Raster(hydFAC)).save(hydFACInt) # integer version log.info("Flow Accumulation calculated") progress.logProgress(codeBlock, outputFolder) ####################### ### Calculate slope ### ####################### codeBlock = 'Calculate slope on burned DEM' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): intSlopeHydDeg = Slope(hydDEM, "DEGREE") intSlopeHydDeg.save(slopeHydDeg) del intSlopeHydDeg intSlopeHydPer = Slope(hydDEM, "PERCENT_RISE") intSlopeHydPer.save(slopeHydPer) del intSlopeHydPer log.info('Slope calculated') progress.logProgress(codeBlock, outputFolder) ########################## ### Create stream file ### ########################## codeBlock = 'Create stream file' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): # Create accumulation in metres streamAccHaFileInt = hydFACTemp * cellSizeDEM * cellSizeDEM / 10000.0 streamAccHaFileInt.save(streamAccHaFile) del streamAccHaFileInt # Check stream initiation threshold reached streamYes = float( arcpy.GetRasterProperties_management( streamAccHaFile, "MAXIMUM").getOutput(0)) if streamYes > float(minAccThresh): reclassifyRanges = RemapRange( [[-1000000, float(minAccThresh), 1], [float(minAccThresh), 9999999999, 0]]) outLUCIstream = Reclassify(streamAccHaFile, "VALUE", reclassifyRanges) outLUCIstream.save(streamInvRas) del outLUCIstream log.info("Stream raster for input to LUCI created") # Create stream file for display reclassifyRanges = RemapRange( [[0, float(minAccThresh), "NODATA"], [float(minAccThresh), float(majAccThresh), 1], [float(majAccThresh), 99999999999999, 2]]) streamsRaster = Reclassify(streamAccHaFile, "Value", reclassifyRanges, "NODATA") streamOrderRaster = StreamOrder(streamsRaster, hydFDR, "STRAHLER") streamsRaster.save(streamsRasterFile) # Create two streams feature classes - one for analysis and one for display arcpy.sa.StreamToFeature(streamOrderRaster, hydFDR, streams, 'NO_SIMPLIFY') arcpy.sa.StreamToFeature(streamOrderRaster, hydFDR, streamDisplay, 'SIMPLIFY') # Rename grid_code column to 'Strahler' for streamFC in [streams, streamDisplay]: arcpy.AddField_management(streamFC, "Strahler", "LONG") arcpy.CalculateField_management( streamFC, "Strahler", "!GRID_CODE!", "PYTHON_9.3") arcpy.DeleteField_management(streamFC, "GRID_CODE") del streamsRaster del streamOrderRaster log.info("Stream files created") else: warning = 'No streams initiated' log.warning(warning) common.logWarnings(outputFolder, warning) # Create LUCIStream file from multiplier raster (i.e. all cells have value of 1 = no stream) arcpy.CopyRaster_management(multRaster, streamInvRas) progress.logProgress(codeBlock, outputFolder) codeBlock = 'Clip data, build pyramids and generate statistics' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): try: # Generate pyramids and stats arcpy.BuildPyramidsandStatistics_management( outputFolder, "", "", "", "") log.info( "Pyramids and Statistics calculated for all LUCI topographical information rasters" ) except Exception: log.info("Warning - could not generate all raster statistics") progress.logProgress(codeBlock, outputFolder) # Reset snap raster arcpy.env.snapRaster = None except Exception: log.error("Error in preprocessing operations") raise
def main(in_raster=None, neighborhood_size=None, out_raster=None): """ Compute terrain ruggedness, using the vector ruggedness measure (VRM), as described in: Sappington et al., 2007. Quantifying Landscape Ruggedness for Animal Habitat Analysis: A Case Study Using Bighorn Sheep in the Mojave Desert. Journal of Wildlife Management. 71(5): 1419 -1426. """ hood_size = int(neighborhood_size) # FIXME: expose this as an option per #18 w = utils.Workspace() if w.exists: out_workspace = w.path else: out_workspace = os.path.dirname(out_raster) utils.workspace_exists(out_workspace) # force temporary stats to be computed in our output workspace arcpy.env.scratchWorkspace = out_workspace arcpy.env.workspace = out_workspace # TODO expose as config pyramid_orig = arcpy.env.pyramid arcpy.env.pyramid = "NONE" # TODO: currently set to automatically overwrite, expose this as option arcpy.env.overwriteOutput = True arcpy.env.compression = 'LZW' try: # Create Slope and Aspect rasters utils.msg("Calculating aspect...") out_aspect = Aspect(in_raster) utils.msg("Calculating slope...") out_slope = Slope(in_raster, "DEGREE") # Convert Slope and Aspect rasters to radians utils.msg("Converting slope and aspect to radians...") slope_rad = out_slope * (math.pi / 180) aspect_rad = out_aspect * (math.pi / 180) # Calculate x, y, and z rasters utils.msg("Calculating x, y, and z rasters...") xy_raster_calc = Sin(slope_rad) z_raster_calc = Cos(slope_rad) x_raster_calc = Con(out_aspect == -1, 0, Sin(aspect_rad)) * xy_raster_calc y_raster_calc = Con(out_aspect == -1, 0, Cos(aspect_rad)) * xy_raster_calc # Calculate sums of x, y, and z rasters for selected neighborhood size utils.msg("Calculating sums of x, y, and z rasters in neighborhood...") hood = NbrRectangle(hood_size, hood_size, "CELL") x_sum_calc = FocalStatistics(x_raster_calc, hood, "SUM", "NODATA") y_sum_calc = FocalStatistics(y_raster_calc, hood, "SUM", "NODATA") z_sum_calc = FocalStatistics(z_raster_calc, hood, "SUM", "NODATA") # Calculate the resultant vector utils.msg("Calculating the resultant vector...") result_vect = (x_sum_calc**2 + y_sum_calc**2 + z_sum_calc**2)**0.5 arcpy.env.rasterStatistics = "STATISTICS" arcpy.env.pyramid = pyramid_orig # Calculate the Ruggedness raster utils.msg("Calculating the final ruggedness raster...") ruggedness = 1 - (result_vect / hood_size**2) out_raster = utils.validate_path(out_raster) utils.msg("Saving ruggedness raster to to {}.".format(out_raster)) arcpy.CopyRaster_management(ruggedness, out_raster) except Exception as e: utils.msg(e, mtype='error')
# Step 7 # Set references to the slope and reclassify tool operation configuration parameters print( "setting references to the slope tool operation configuration parameters..." ) outMeasurement = "PERCENT_RISE" zFactor = 1 # Step 8 print("executing slope...") # Check out the ArcGIS Spatial Analyst extension arcpy.CheckOutExtension("Spatial") # Execute slope tool outSlope = Slope(input_elev_raster, outMeasurement, zFactor) # Specify a temporary path to the slope output work_slope_out_path = os.path.join(workspace_gdb, "slope_raster") # Save the output slope raster outSlope.save(work_slope_out_path) print(work_slope_out_path) # Step 9 # Set local variables input_slope_raster = work_slope_out_path print("calculating statistics...") arcpy.CalculateStatistics_management(work_slope_out_path)
def VegetationHeightProfil(emprise, mnh, bornes, OutputFc, idfield, geodata): from arcpy import env from arcpy.sa import ExtractByMask, Slope arcpy.CheckOutExtension("spatial") env.workspace= geodata env.overwriteOutput = True # Extraire le mnh pathExtract = os.path.join(geodata, "ExtractMNH") Extract_MNH = ExtractByMask(mnh, emprise) Extract_MNH.save(pathExtract) # Calculer la pente pathSlope = os.path.join(geodata,"SlopeMNH") slope_mnh = Slope(pathExtract,"DEGREE") slope_mnh.save(pathSlope) # Transformer le raster en point arcpy.RasterToPoint_conversion(slope_mnh, "Slope", "Value") # Jointure spatiale Cauler Moyenne et Ecart type fmap = arcpy.FieldMappings() fmap.addTable(emprise) fmap.addTable("Slope") # Create fieldmap for Mean fldMean = arcpy.FieldMap() fldMean.addInputField("Slope", "grid_code") fMean = fldMean.outputField fMean.name = "Mean" fMean.aliasName = "Mean" fldMean.outputField = fMean fldMean.mergeRule= "Mean" fmap.addFieldMap(fldMean) # Create fieldmap for StdDev fldEcartype = arcpy.FieldMap() fldEcartype.addInputField("Slope","grid_code") fEcartype = fldEcartype.outputField fEcartype.name = "Stdv" fEcartype.aliasName = "Stdv" fldEcartype.outputField = fEcartype fldEcartype.mergeRule = "StdDev" fmap.addFieldMap(fldEcartype) # Perform de spatial join arcpy.SpatialJoin_analysis(emprise, "Slope", OutputFc, "", "", fmap) # Create a field arcpy.AddField_management(OutputFc, "Prof_Typ", "TEXT") # Delete Field: for fld in arcpy.ListFields(OutputFc): if fld.name not in [idfield,"Stdv","Mean","Prof_Typ"]: try: arcpy.DeleteField_management(OutputFc,fld.name) except: pass # Evaluer la pente avec les bornes b1 = bornes[0] b2 = bornes[1] Code_bloc="""def Eval(Moyenne, EcarType): if Moyenne > """+str(b2)+ """ and EcarType < """+str(b1)+ """ : ProfilType = "Asc/Desc_Continue" if Moyenne < """+str(b2)+ """ and EcarType < """+str(b1)+ """ : ProfilType = "Plat" else : ProfilType = "Hétérogène" return ProfilType """ expression = "Eval(!Mean!,!Stdv!)" # Calcul du champ Prof Typ arcpy.CalculateField_management(OutputFc, "Prof_Typ", expression, "PYTHON_9.3", Code_bloc) # Return the result return OutputFc
def IceCliffLocation(workspace,dem,tileDebarea,pixel,skinny,minSlope,n_iterations,L_e,alpha,beta_e,A_min,phi,gamma): import sys import os import arcpy from arcpy import env from arcpy.sa import Slope, ExtractByMask, Raster, SetNull, Int import matplotlib.pyplot as plt import numpy as np from numpy import array from scipy.optimize import curve_fit env.overwriteOutput = True try: import arcinfo except: sys.exit("ArcInfo license not available") arcpy.AddMessage("ArcInfo license not available") if arcpy.CheckExtension("spatial") == "Available": arcpy.CheckOutExtension("spatial") else: sys.exit("Spatial Analyst license not available") arcpy.AddMessage("Spatial Analyst license not available") #Parameters that should be stable: slopeLimit = 90 # slope detection capped at this value ## Loop for optimizing slope if str(workspace.split("\\")[-1]) == 'Final': n = [] n.append(minSlope) else: minSlope = 0 n = np.arange(minSlope,slopeLimit,(slopeLimit-minSlope)/n_iterations) skipIteration = [] for minSlope in n: # check for existing iterations if code has previously run but crashed. if arcpy.ListFeatureClasses("*cliffMap*"): fcListPrior = arcpy.ListFeatureClasses("*cliffMap*") skipIteration = [] for prior_i in fcListPrior: if int(prior_i[14:16]) == int("%02d" % (int(minSlope),)): skipIteration = 1 if skipIteration == 1: continue ## Ice Cliff code if skinny == 'false': print 'IceCliffLocation script started...' if skinny == 'true': print 'skinny IceCliffLocation script started...' # Parameter that probably should be 0 minProb = 0 # probability associated with minSlope. arcpy.CopyFeatures_management(tileDebarea, workspace+"\\del_debarea.shp") debarea_iteration = workspace+"\\del_debarea.shp" arcpy.env.snapRaster = dem outExtractSlope = ExtractByMask(dem, debarea_iteration) outExtractSlope.save("dem_extract.TIF") if int(round(float(str(arcpy.GetRasterProperties_management(dem, "CELLSIZEX"))))) == pixel: dem = "dem_extract.TIF" else: arcpy.Resample_management("dem_extract.TIF", "dem_extractResample.TIF", pixel, "NEAREST") arcpy.env.snapRaster = dem print "DEM resampeld from "+str(int(round(float(str(arcpy.GetRasterProperties_management(dem, "CELLSIZEX"))))))+' to '+str(pixel) dem = "dem_extractResample.TIF" # Create slope raster outSlope = Slope(dem, "DEGREE", 1) outSlope.save("del_slope.TIF") # Isolate slope values above minSlope outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(minSlope)) outSetNull.save("del_minSlope.TIF") # Exit process if no cliffs exist nocliff = arcpy.GetRasterProperties_management(Int("del_minSlope.TIF"), "ALLNODATA") if int(str(nocliff)) == 1: print "No area with a slope above "+str(minSlope)+"." elif float(str(arcpy.GetRasterProperties_management('del_minSlope.TIF',"MAXIMUM"))) - float(str(arcpy.GetRasterProperties_management('del_minSlope.TIF',"MINIMUM"))) == 0: print "Only one pixel with a slope above "+str(minSlope)+", iteration skipped." else: minMean = float(str(arcpy.GetRasterProperties_management("del_minSlope.TIF", "MEAN"))) minSD = float(str(arcpy.GetRasterProperties_management("del_minSlope.TIF", "STD"))) areaSlope = minMean print 'areaSlope = ' + str(areaSlope) # Isolate slope values above areaSlope outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(areaSlope)) outSetNull.save("del_areaSlope.TIF") arcpy.env.snapRaster = dem # Exit process if no cliffs exist nocliff = arcpy.GetRasterProperties_management(Int("del_areaSlope.TIF"), "ALLNODATA") if int(str(nocliff)) == 1: print "No area with a slope above "+str(areaSlope)+"." elif float(str(arcpy.GetRasterProperties_management("del_areaSlope.TIF","MAXIMUM"))) - float(str(arcpy.GetRasterProperties_management("del_areaSlope.TIF","MINIMUM"))) == 0: print "Only one pixel with a slope above "+str(areaSlope)+", iteration skipped." else: seedSlope = minMean+minSD print 'seedSlope = ' + str(seedSlope) # Isolate slope values above areaSlope outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(seedSlope)) outSetNull.save("del_seedSlope.TIF") # Exit process if no cliffs exist nocliff = arcpy.GetRasterProperties_management(Int("del_seedSlope.TIF"), "ALLNODATA") if int(str(nocliff)) == 1: print "No seed area with a slope above "+str(seedSlope)+"." else: # to int speeds up computation time outInt = Int("del_areaSlope.TIF") outInt.save("del_minSlopeInt.TIF") outInt = Int("del_seedSlope.TIF") outInt.save("del_seedSlopeInt.TIF") arcpy.RasterToPolygon_conversion("del_minSlopeInt.TIF", "del_minCliffSlope.shp", "NO_SIMPLIFY", "VALUE") arcpy.AddField_management("del_minCliffSlope.shp", "value", "SHORT", 1, "", "", "", "", "") arcpy.Dissolve_management("del_minCliffSlope.shp", "del_minCliff_dissolve.shp", "value") arcpy.MultipartToSinglepart_management("del_minCliff_dissolve.shp", "del_minCliff_explode.shp") arcpy.AddField_management("del_minCliff_explode.shp",'Area','FLOAT') rows = arcpy.UpdateCursor("del_minCliff_explode.shp") for row in rows: areacliff = row.shape.area row.Area = areacliff rows.updateRow(row) del row, rows arcpy.CopyFeatures_management("del_minCliff_explode.shp", "min"+str("%02d" % (minSlope,))+"_CliffArea.shp") # skinny/non-skinny fix for ending iteration. 0 = no skip, 1 = skip skip_iter = 0 # skinny ice cliffs, does not include ice cliff end extension to speed up computations if skinny == 'true': if arcpy.management.GetCount("del_minCliff_explode.shp")[0] == "0": skip_iter = 1 print "No area within del_minCliff_explode.shp, skinny iteration skipped." else: # "_FinalCliffShape.shp" and "_cliffArea.shp" are the same if skinny == true arcpy.CopyFeatures_management("del_minCliff_explode.shp", "min"+str("%02d" % (minSlope,))+"area"+str(int(areaSlope))+"_FinalCliffShape.shp") # copy working .shp, used below arcpy.CopyFeatures_management('del_minCliff_explode.shp', 'del_lineAndArea_area.shp') arcpy.CalculateAreas_stats('del_minCliff_explode.shp', 'del_lineAndArea_area.shp') arcpy.MakeFeatureLayer_management('del_lineAndArea_area.shp', 'tempLayer') expression = 'F_AREA <=' + str((pixel**2)*A_min) arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression) arcpy.DeleteFeatures_management('tempLayer') arcpy.Delete_management('tempLayer') if skinny == 'false': # buffer in/out area to break up attached features arcpy.Buffer_analysis("del_minCliff_explode.shp", "del_extendLineBuffer.shp", (pixel/2)-0.1, "FULL", "ROUND", "NONE") # Generate ice cliff centerlines from Voronoi cells if arcpy.management.GetCount("del_extendLineBuffer.shp")[0] == "0": arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_dissolve.shp") skip_iter = 1 print "No area within the criteria defined by seed area value "+str(seedSlope)+", iteration stopped before centerlines." else: arcpy.FeatureToLine_management("del_extendLineBuffer.shp","del_line.shp","","ATTRIBUTES") arcpy.Densify_edit("del_line.shp", "","5", "", "") arcpy.FeatureVerticesToPoints_management ("del_line.shp", "del_verti.shp", "ALL") arcpy.CreateThiessenPolygons_analysis("del_verti.shp","del_voronoiCells.shp" ,"ONLY_FID") arcpy.RepairGeometry_management("del_voronoiCells.shp") #use geodatabase here due to unexpected error: "Invalid Topology [Duplicate segment.]" arcpy.CreateFileGDB_management(workspace, "fGDB.gdb") fgdb = workspace+"\\fGDB.gdb" #arcpy.env.workspace = fgdb arcpy.Clip_analysis(workspace+"\\del_voronoiCells.shp", workspace+"\\del_extendLineBuffer.shp", fgdb+"\\shp","") arcpy.FeatureToLine_management(fgdb+"\\shp", workspace+"\\del_toLine.shp", "", attributes="ATTRIBUTES") arcpy.Delete_management(fgdb) #arcpy.env.workspace = workspace #arcpy.FeatureToLine_management("del_voronoiCellsClip.shp","del_toLine.shp", "", attributes="ATTRIBUTES") arcpy.MakeFeatureLayer_management("del_toLine.shp", "tempLayer", "", "", "") arcpy.SelectLayerByLocation_management("tempLayer", "CROSSED_BY_THE_OUTLINE_OF","del_minCliff_explode.shp","","NEW_SELECTION") arcpy.DeleteFeatures_management("tempLayer") arcpy.Delete_management("tempLayer") arcpy.Intersect_analysis(["del_toLine.shp",'del_minCliff_explode.shp'],"del_lineIntersect.shp") arcpy.Dissolve_management("del_lineIntersect.shp", "del_toLineDis.shp", "", "", "SINGLE_PART", "DISSOLVE_LINES") arcpy.UnsplitLine_management("del_toLineDis.shp","del_unsplit.shp","Id") arcpy.MakeFeatureLayer_management("del_unsplit.shp", "tempLayer2", "", "", "") arcpy.SelectLayerByLocation_management("tempLayer2", "BOUNDARY_TOUCHES","del_minCliff_explode.shp","","NEW_SELECTION") arcpy.DeleteFeatures_management("tempLayer2") arcpy.Delete_management("tempLayer2") arcpy.cartography.SimplifyLine("del_unsplit.shp","del_clineSimpExp.shp","POINT_REMOVE",10) arcpy.AddField_management("del_clineSimpExp.shp", "value", "SHORT", 1, "", "", "", "", "") arcpy.Dissolve_management("del_clineSimpExp.shp", "del_clineSimp.shp", "value") arcpy.TrimLine_edit("del_clineSimp.shp", "8 meters", "KEEP_SHORT") arcpy.CopyFeatures_management("del_unsplit.shp", "min"+str("%02d" % (minSlope,))+"_Centerlines.shp") #refine centerline for final map if arcpy.management.GetCount("del_clineSimp.shp")[0] == "0": arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_dissolve.shp") skip_iter = 1 print "No area big enough to generate a centerline, iteration skipped." else: # extend lines to capture cliff ends count = 0 print "Extend line started..." jlist = [(pixel/2)-0.1] * int(round(L_e/(pixel/2))) for j in jlist: #create buffer out to set the limit a line will be extended to arcpy.Buffer_analysis("del_clineSimp.shp", "del_clineSimpBuff1.shp", j, "FULL", "ROUND", "ALL") arcpy.PolygonToLine_management("del_clineSimpBuff1.shp","del_clineSimpBuff1line.shp") #merge centerline and bufferline arcpy.Merge_management(["del_clineSimp.shp","del_clineSimpBuff1line.shp"], "del_clineSimpBuff1merge_dis.shp") arcpy.Delete_management("del_clineSimp.shp") print "Extend line "+str(count)+" started..." arcpy.MultipartToSinglepart_management("del_clineSimpBuff1merge_dis.shp", "del_clineSimpBuff1merge.shp") arcpy.MakeFeatureLayer_management("del_clineSimpBuff1merge.shp", "lineLayer", "", "", "") arcpy.SelectLayerByLocation_management("lineLayer", "SHARE_A_LINE_SEGMENT_WITH", "del_clineSimpBuff1.shp", "", "NEW_SELECTION", "INVERT") arcpy.ExtendLine_edit("del_clineSimpBuff1merge.shp", str(j+1)+" meters", "EXTENSION") #select share a line segment with buffer to remove buffer arcpy.SelectLayerByLocation_management("lineLayer", "SHARE_A_LINE_SEGMENT_WITH", "del_clineSimpBuff1.shp", "", "NEW_SELECTION") arcpy.DeleteFeatures_management("lineLayer") arcpy.Delete_management("lineLayer") arcpy.CopyFeatures_management("del_clineSimpBuff1merge.shp", "del_clineSimp.shp") arcpy.Delete_management("del_clineSimpBuff1.shp") arcpy.Delete_management("del_clineSimpBuff1line.shp") arcpy.Delete_management("del_clineSimpBuff1merge.shp") count = count + j del j, jlist #remove last short ribs with a lenght threhold then reattach centerlines that may have been split # calculate lenght of each centerline if arcpy.management.GetCount("del_clineSimp.shp")[0] == "0": arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_explode.shp") skip_iter = 1 print "Centerline shape empty, iteration skipped." else: arcpy.AddField_management("del_clineSimp.shp",'L','FLOAT') rows = arcpy.UpdateCursor("del_clineSimp.shp") for row in rows: areacliff = row.shape.length row.L = areacliff rows.updateRow(row) del row, rows arcpy.CopyFeatures_management("del_clineSimp.shp", "min"+str("%02d" % (minSlope,))+"_extendedCenterlines.shp") # buffer out centerlines to capture end area removed in earlier buffer arcpy.Buffer_analysis("del_clineSimp.shp", "del_CliffCenterlineOut.shp", ((alpha*pixel*(2**(1/2)))/2), "FULL", "ROUND", "NONE") # define area with a slope less than that which defined "del_minCliff_dissolve.shp" edgeAreaSlope = areaSlope-beta_e print "Edge area defined by slope "+str(edgeAreaSlope) outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(edgeAreaSlope)) outSetNull.save("del_edgeSlope.TIF") outInt = Int("del_edgeSlope.TIF") outInt.save("del_edgeSlopeInt.TIF") arcpy.RasterToPolygon_conversion("del_edgeSlopeInt.TIF", "del_edgeAreaSlope.shp", "NO_SIMPLIFY", "VALUE") arcpy.AddField_management("del_edgeAreaSlope.shp", "value", "SHORT", 1, "", "", "", "", "") arcpy.Dissolve_management("del_edgeAreaSlope.shp", "del_edgeAreaSlope_dissolve.shp", "value") arcpy.CopyFeatures_management("del_edgeAreaSlope_dissolve.shp", "min"+str("%02d" % (minSlope,))+"_edgeArea.shp") arcpy.Intersect_analysis (["del_edgeAreaSlope_dissolve.shp", "del_CliffCenterlineOut.shp"], "del_betaF_edgeArea.shp") # merge buffered lines with buffered area arcpy.Merge_management(["del_betaF_edgeArea.shp", "del_minCliff_explode.shp"], "del_lineAndArea.shp") arcpy.AddField_management("del_lineAndArea.shp", "valueDis", "SHORT", 1, "", "", "", "", "") arcpy.Dissolve_management("del_lineAndArea.shp", "del_lineAndArea_dissolve1.shp", "valueDis") arcpy.RepairGeometry_management("del_lineAndArea_dissolve1.shp") # fill holes and remove shapes less than one pixel to avoid error from buffer tool arcpy.MultipartToSinglepart_management("del_lineAndArea_dissolve1.shp", "del_lineAndArea_explode1.shp") arcpy.CalculateAreas_stats("del_lineAndArea_explode1.shp", 'del_lineAndArea_area1.shp') arcpy.MakeFeatureLayer_management('del_lineAndArea_area1.shp', 'tempLayer') expression = 'F_AREA <' + str(pixel**2) # m2 arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression) arcpy.DeleteFeatures_management('tempLayer') arcpy.Delete_management('tempLayer') arcpy.cartography.AggregatePolygons('del_lineAndArea_area1.shp', "del_lineAndArea_dissolve.shp", 1, 0, pixel**2, 'NON_ORTHOGONAL') arcpy.RepairGeometry_management("del_lineAndArea_dissolve.shp") # buffer in to reomve sliver geometries and out to make a diagonal set of single pixel shapes one feature arcpy.Buffer_analysis("del_lineAndArea_dissolve.shp", "del_lineAndArea_dissolveSmallBufferIn.shp", -0.5, "FULL", "ROUND", "ALL") arcpy.Buffer_analysis("del_lineAndArea_dissolveSmallBufferIn.shp", "del_lineAndArea_dissolveSmallBuffer.shp", 1, "FULL", "ROUND", "ALL") arcpy.MultipartToSinglepart_management("del_lineAndArea_dissolveSmallBuffer.shp", "del_lineAndArea_explode.shp") arcpy.CalculateAreas_stats('del_lineAndArea_explode.shp', 'del_lineAndArea_area.shp') arcpy.MakeFeatureLayer_management('del_lineAndArea_area.shp', 'tempLayer') expression = 'F_AREA <=' + str((pixel**2)*A_min) arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression) arcpy.DeleteFeatures_management('tempLayer') arcpy.Delete_management('tempLayer') if arcpy.management.GetCount("del_lineAndArea_area.shp")[0] == "0": print "del_lineAndArea_area.shp empty, iteration stopped." skip_iter = 1 else: arcpy.AddField_management("del_lineAndArea_area.shp", "value", "SHORT", 1, "", "", "", "", "") arcpy.CopyFeatures_management('del_lineAndArea_area.shp', "min"+str("%02d" % (minSlope,))+"area"+str(int(areaSlope))+"_FinalCliffShape.shp") if skip_iter == 0: # CDF for values between minSlope and maxSlope outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE >= "+ str(minSlope)) outSetNull.save("del_min.TIF") arcpy.RasterToFloat_conversion("del_min.TIF", "del_min.flt") minsl = Raster('del_min.flt') slopemin = minsl*0.0 slopemin.save('del_minSl.TIF') outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE > "+ str(seedSlope)) outSetNull = SetNull(outSetNull, outSetNull, "VALUE < "+ str(minSlope)) outSetNull.save("del_mid.TIF") arcpy.RasterToFloat_conversion("del_mid.TIF", "del_mid.flt") midsl = Raster('del_mid.flt') b = (1-(((1-minProb)/(seedSlope-minSlope))*seedSlope)) slopemid = (((1-minProb)/(seedSlope-minSlope))*midsl)+b arcpy.env.snapRaster = dem slopemid.save('del_midSl.TIF') arcpy.env.snapRaster = dem outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(seedSlope)) outSetNull.save("del_max.TIF") arcpy.RasterToFloat_conversion("del_max.TIF", "del_max.flt") maxsl = Raster('del_max.flt') slopemax = maxsl*0.0+1.0 arcpy.env.snapRaster = dem slopemax.save('del_maxSl.TIF') arcpy.env.snapRaster = dem arcpy.MosaicToNewRaster_management("del_minSl.TIF;del_midSl.TIF;del_maxSl.TIF", workspace, "del_cliffProbabilitySlope.TIF", "", "32_BIT_FLOAT", "", "1", "LAST","FIRST") arcpy.env.snapRaster = dem # extract cliff probability and apply reduction factor to area outside of buffer.shp if arcpy.management.GetCount("del_lineAndArea_area.shp")[0] == "0": print "del_lineAndArea_area.shp is empty, did not create: CliffProbability_betai" + str("%02d" % (int(minSlope),)) + "betaA" + str(int(areaSlope))+".TIF" else: outExtractSlope = ExtractByMask("del_cliffProbabilitySlope.TIF", "del_lineAndArea_area.shp") outExtractSlope.save("del_final_cliffs_found.TIF") arcpy.RasterToFloat_conversion("del_cliffProbabilitySlope.TIF", "del_CliffProbabilitySlope.flt") CliffProbabilitySlope = Raster('del_CliffProbabilitySlope.flt') CliffProbabilitySlopeREDUCED = CliffProbabilitySlope*phi arcpy.env.snapRaster = dem CliffProbabilitySlopeREDUCED.save('del_CliffProbabilitySlopeREDUCED.TIF') arcpy.MosaicToNewRaster_management("del_final_cliffs_found.TIF;del_CliffProbabilitySlopeREDUCED.TIF", workspace, "CliffProbability_betai" + str("%02d" % (int(minSlope),)) + "betaA" + str(int(areaSlope))+".TIF", "", "32_BIT_FLOAT", "", "1", "FIRST","FIRST") arcpy.env.snapRaster = dem del CliffProbabilitySlope del CliffProbabilitySlopeREDUCED del minsl del midsl del maxsl ## ---------------------------------- ## Compute percent cliff in total spatial domain cliff_area_sum = 0 debris_area_sum = 0 Perc_Cliff = 0 arcpy.CalculateAreas_stats(debarea_iteration, 'del_debris_area.shp') with arcpy.da.SearchCursor('del_debris_area.shp', ['F_AREA']) as cursor: for row in cursor: debris_area_sum += row[0] if os.path.isfile(workspace+'\\del_lineAndArea_area.shp') == False: print "'del_lineAndArea_area.shp'does not exist." elif arcpy.management.GetCount('del_lineAndArea_area.shp')[0] == "0": print "No area within 'del_lineAndArea_area.shp'." else: with arcpy.da.SearchCursor('del_lineAndArea_area.shp', ['F_AREA']) as cursor: for row in cursor: cliff_area_sum += row[0] Perc_Cliff = (cliff_area_sum/debris_area_sum)*100 arcpy.Dissolve_management("del_lineAndArea_area.shp", 'cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp', "value") arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','minSlope','FLOAT') arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Area_Cliff','FLOAT') arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Area_Deb','FLOAT') arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Perc_Cliff','FLOAT') rows = arcpy.UpdateCursor('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp') for row in rows: row.setValue('Area_Cliff', cliff_area_sum) row.setValue('Area_Deb', debris_area_sum) row.setValue('minSlope', minSlope) row.setValue('Perc_Cliff', Perc_Cliff) rows.updateRow(row) del row, rows print 'IceCliffLocation script [minSlope: ' + str("%02d" % (int(minSlope),)) + ' areaSlope: ' + str(int(areaSlope))+ '] done...' rasterList = arcpy.ListRasters("*del*") for raster in rasterList: arcpy.Delete_management(raster) del raster del rasterList fcList = arcpy.ListFeatureClasses("*del*") for fc in fcList: arcpy.Delete_management(fc) del fc del fcList print "intermediate files deleted" del minSlope del n if str(workspace.split("\\")[-1]) == 'Final': print "Script complete" else: initialSlope_doubles = [] percentCliffs_doubles = [] initialSlope = [] percentCliffs = [] xfit = [] yfit = [] fcList = [] arr = [] fcList = arcpy.ListFeatureClasses("*cliffMap*") arcpy.Merge_management(fcList, "mergedSolutions.shp") arr = arcpy.da.TableToNumPyArray("mergedSolutions.shp", ('Perc_Cliff','minSlope')) arcpy.Delete_management("del_mergedSolutions.shp") initialSlope_doubles = [row[1] for row in arr] percentCliffs_doubles = [row[0] for row in arr] #remove rows that are repeated due to (possible) earlier tiled dissolve from insufficient memory for i,j in enumerate(initialSlope_doubles): if j != initialSlope_doubles[(i-1) % len(initialSlope_doubles)]: initialSlope.append(j) del i,j for i,j in enumerate(percentCliffs_doubles): if j != percentCliffs_doubles[(i-1) % len(percentCliffs_doubles)]: percentCliffs.append(j) del i,j def func(x,a,b,c): return a*np.exp(-((x-b)/c)**2) try: popt, pcov = curve_fit(func,initialSlope,percentCliffs, maxfev=1000) except RuntimeError: fig = plt.figure() ax1 = fig.add_subplot(111) ax1.plot(initialSlope, percentCliffs, 'ko');plt.draw() fig.show() print("Error - curve_fit failed") xfit = np.linspace(min(initialSlope), max(initialSlope), 100) yfit = popt[0]*np.exp(-((xfit-popt[1])/popt[2])**2) def secondDer(x): return popt[0]*(((4*(x-popt[1])**2*np.exp(-(x-popt[1])**2/popt[2]**2))/popt[2]**4)-((2*np.exp(-(x-popt[1])**2/popt[2]**2))/popt[2]**2)) a1 = [] a1 = [i for i in xrange(91)] a2 = secondDer(a1) #the next 3 for loops and a[x] variables define 1 of the 2 points to derive the optimization line. a3 = [] a4 = [] # values of second derivative where slope is below 'gamma' for i, j in enumerate(a2): if j <= gamma: a3.append(i) == i # find the steepest point (in the middle of the side of the bell) for i, j in enumerate(a2): if j == max(a2): m=i # take only values to the right of 'm' in case the curve is flat at 0 slope for i in a3: if i > m: a4.append(i) == i del i,j ax = min(a4) ay = popt[0]*np.exp(-((ax-popt[1])/popt[2])**2) #find max of bell for first point in optmization line yfit_array = array(yfit) ftup = (np.where(yfit_array == max(yfit_array))) f = int(ftup[0]) # x,y index of max yfit # d = distance from fit Equation 2 (Herreid and Pellicciotti, 2018) to line definded by ((xfit[0],yfit[0]),(ax,yx)) d = abs((yfit[f]-ay)*xfit-(xfit[f]-ax)*yfit+xfit[f]*ay-yfit[f]*ax)/((yfit[f]-ay)**2+(xfit[f]-ax)**2)**(1/2) # crit is the index of the longest d crit = np.where(d == max(d)) m = (yfit[f]-ay)/(xfit[f]-ax) b = yfit[f]-m*xfit[f] x_crit = (xfit[crit]+m*yfit[crit]-m*b)/(m**2+1) y_crit = m*((xfit[crit]+m*yfit[crit]-m*b)/(m**2+1))+b fig = plt.figure() ax1 = fig.add_subplot(111) ax1.plot(initialSlope, percentCliffs, 'ko'); plt.plot([xfit[f],ax],[yfit[f],ay]); plt.plot([xfit[crit],x_crit],[yfit[crit],y_crit]); plt.plot(xfit,yfit);plt.xlim(0, 100);plt.ylim(0, 100);plt.gca().set_aspect('equal', adjustable='box');plt.draw() ax1.set_xlabel(r'$\mathrm{\beta_i (^\circ)}$') ax1.set_ylabel('Ice cliff fraction (%)') fig.show() #fig.canvas.flush_events() import time time.sleep(1) #plt.pause(0.01) #plt.waitforbuttonpress() #save data used to make figure np.save(workspace+'\\figureData', (initialSlope, percentCliffs,[xfit[f],ax],[yfit[f],ay],[xfit[crit],x_crit],[yfit[crit],y_crit],xfit,yfit)) IceCliffLocation.minSlope = float(xfit[crit])
def calculate_topographic_properties(**kwargs): """ Description: calculates topographic properties from an elevation raster Inputs: 'z_unit' -- a string value of either 'Meter' or 'Foot' representing the vertical unit of the elevation raster 'input_array' -- an array containing the grid raster (must be first) and the elevation raster 'output_array' -- an array containing the output rasters for aspect, compound topographic index, heat load index, integrated moisture index, roughness, site exposure, slope, surface area ratio, and surface relief ratio (in that order) Returned Value: Returns a raster dataset on disk for each topographic property Preconditions: requires an input DEM that can be created through other scripts in this repository """ # Import packages import arcpy from arcpy.sa import Con from arcpy.sa import IsNull from arcpy.sa import ExtractByMask from arcpy.sa import Raster from arcpy.sa import Int from arcpy.sa import FlowDirection from arcpy.sa import FlowAccumulation from arcpy.sa import Slope from arcpy.sa import Aspect from package_Geomorphometry import compound_topographic from package_Geomorphometry import getZFactor from package_Geomorphometry import linear_aspect from package_Geomorphometry import mean_slope from package_Geomorphometry import roughness from package_Geomorphometry import site_exposure from package_Geomorphometry import surface_area from package_Geomorphometry import surface_relief from package_Geomorphometry import topographic_position from package_Geomorphometry import topographic_radiation import datetime import os import time # Parse key word argument inputs z_unit = kwargs['z_unit'] grid_raster = kwargs['input_array'][0] elevation_input = kwargs['input_array'][1] elevation_output = kwargs['output_array'][0] aspect_output = kwargs['output_array'][1] cti_output = kwargs['output_array'][2] roughness_output = kwargs['output_array'][3] exposure_output = kwargs['output_array'][4] slope_output = kwargs['output_array'][5] area_output = kwargs['output_array'][6] relief_output = kwargs['output_array'][7] position_output = kwargs['output_array'][8] radiation_output = kwargs['output_array'][9] # Set overwrite option arcpy.env.overwriteOutput = True # Use two thirds of cores on processes that can be split. arcpy.env.parallelProcessingFactor = "75%" # Set snap raster and extent arcpy.env.snapRaster = grid_raster arcpy.env.extent = Raster(grid_raster).extent # Define folder structure grid_title = os.path.splitext(os.path.split(grid_raster)[1])[0] raster_folder = os.path.split(elevation_output)[0] intermediate_folder = os.path.join(raster_folder, 'intermediate') # Create raster folder if it does not already exist if os.path.exists(raster_folder) == 0: os.mkdir(raster_folder) # Create intermediate folder if it does not already exist if os.path.exists(intermediate_folder) == 0: os.mkdir(intermediate_folder) # Define intermediate datasets flow_direction_raster = os.path.join(intermediate_folder, 'flow_direction.tif') flow_accumulation_raster = os.path.join(intermediate_folder, 'flow_accumulation.tif') raw_slope_raster = os.path.join(intermediate_folder, 'raw_slope.tif') raw_aspect_raster = os.path.join(intermediate_folder, 'raw_aspect.tif') # Get the z factor appropriate to the xy and z units zFactor = getZFactor(elevation_input, z_unit) #### CALCULATE INTERMEDIATE DATASETS # Calculate flow direction if it does not already exist if os.path.exists(flow_direction_raster) == 0: # Calculate flow direction print(f'\tCalculating flow direction for {grid_title}...') iteration_start = time.time() flow_direction = FlowDirection(elevation_input, 'NORMAL', '', 'D8') flow_direction.save(flow_direction_raster) # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tFlow direction already exists for {grid_title}.') print('\t----------') # Calculate flow accumulation if it does not already exist if os.path.exists(flow_accumulation_raster) == 0: # Calculate flow accumulation print(f'\tCalculating flow accumulation for {grid_title}...') iteration_start = time.time() flow_accumulation = FlowAccumulation(flow_direction_raster, '', 'FLOAT', 'D8') flow_accumulation.save(flow_accumulation_raster) # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tFlow accumulation already exists for {grid_title}.') print('\t----------') # Calculate raw slope in degrees if it does not already exist if os.path.exists(raw_slope_raster) == 0: # Calculate slope print(f'\tCalculating raw slope for {grid_title}...') iteration_start = time.time() raw_slope = Slope(elevation_input, "DEGREE", zFactor) raw_slope.save(raw_slope_raster) # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tRaw slope already exists for {grid_title}.') print('\t----------') # Calculate raw aspect if it does not already exist if os.path.exists(raw_aspect_raster) == 0: # Calculate aspect print(f'\tCalculating raw aspect for {grid_title}...') iteration_start = time.time() raw_aspect = Aspect(elevation_input, 'PLANAR', z_unit) raw_aspect.save(raw_aspect_raster) # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tRaw aspect already exists for {grid_title}.') print('\t----------') #### CALCULATE INTEGER ELEVATION # Calculate integer elevation if it does not already exist if arcpy.Exists(elevation_output) == 0: print(f'\tCalculating integer elevation for {grid_title}...') iteration_start = time.time() # Round to integer print(f'\t\tConverting values to integers...') integer_elevation = Int(Raster(elevation_input) + 0.5) # Copy extracted raster to output print(f'\t\tCreating output raster...') arcpy.management.CopyRaster(integer_elevation, elevation_output, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tInteger elevation already exists for {grid_title}.') print('\t----------') #### CALCULATE LINEAR ASPECT # Calculate linear aspect if it does not already exist if arcpy.Exists(aspect_output) == 0: print(f'\tCalculating linear aspect for {grid_title}...') iteration_start = time.time() # Create an initial linear aspect calculation using the linear aspect function aspect_intermediate = os.path.splitext( aspect_output)[0] + '_intermediate.tif' linear_aspect(raw_aspect_raster, aspect_intermediate) # Round to integer print(f'\t\tConverting values to integers...') integer_aspect = Int(Raster(aspect_intermediate) + 0.5) # Fill missing data (no aspect) with values of -1 print(f'\t\tFilling values of no aspect...') conditional_aspect = Con(IsNull(integer_aspect), -1, integer_aspect) # Extract filled raster to grid mask print(f'\t\tExtracting filled raster to grid...') extract_aspect = ExtractByMask(conditional_aspect, grid_raster) # Copy extracted raster to output print(f'\t\tCreating output raster...') arcpy.management.CopyRaster(extract_aspect, aspect_output, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Delete intermediate dataset if possible try: arcpy.management.Delete(aspect_intermediate) except: print('\t\tCould not delete intermediate dataset...') # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tLinear aspect already exists for {grid_title}.') print('\t----------') #### CALCULATE COMPOUND TOPOGRAPHIC INDEX # Calculate compound topographic index if it does not already exist if arcpy.Exists(cti_output) == 0: print(f'\tCalculating compound topographic index for {grid_title}...') iteration_start = time.time() # Create an intermediate compound topographic index calculation cti_intermediate = os.path.splitext( cti_output)[0] + '_intermediate.tif' compound_topographic(elevation_input, flow_accumulation_raster, raw_slope_raster, cti_intermediate) # Convert to integer values print(f'\t\tConverting values to integers...') integer_compound = Int((Raster(cti_intermediate) * 100) + 0.5) # Copy integer raster to output print(f'\t\tCreating output raster...') arcpy.management.CopyRaster(integer_compound, cti_output, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Delete intermediate dataset if possible try: arcpy.management.Delete(cti_intermediate) except: print('\t\tCould not delete intermediate dataset...') # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tCompound topographic index already exists for {grid_title}.') print('\t----------') #### CALCULATE ROUGHNESS # Calculate roughness if it does not already exist if arcpy.Exists(roughness_output) == 0: print(f'\tCalculating roughness for {grid_title}...') iteration_start = time.time() # Create an intermediate compound topographic index calculation roughness_intermediate = os.path.splitext( roughness_output)[0] + '_intermediate.tif' roughness(elevation_input, roughness_intermediate) # Convert to integer values print(f'\t\tConverting values to integers...') integer_roughness = Int(Raster(roughness_intermediate) + 0.5) # Fill missing data (no aspect) with values of 0 print(f'\t\tFilling values of roughness...') conditional_roughness = Con(IsNull(integer_roughness), 0, integer_roughness) # Extract filled raster to grid mask print(f'\t\tExtracting filled raster to grid...') extract_roughness = ExtractByMask(conditional_roughness, grid_raster) # Copy extracted raster to output print(f'\t\tCreating output raster...') arcpy.management.CopyRaster(extract_roughness, roughness_output, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Delete intermediate dataset if possible try: arcpy.management.Delete(roughness_intermediate) except: print('\t\tCould not delete intermediate dataset...') # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tRoughness already exists for {grid_title}.') print('\t----------') #### CALCULATE SITE EXPOSURE # Calculate site exposure if it does not already exist if arcpy.Exists(exposure_output) == 0: print(f'\tCalculating site exposure for {grid_title}...') iteration_start = time.time() # Create an intermediate compound topographic index calculation exposure_intermediate = os.path.splitext( exposure_output)[0] + '_intermediate.tif' site_exposure(raw_aspect_raster, raw_slope_raster, exposure_intermediate) # Convert to integer values print(f'\t\tConverting values to integers...') integer_exposure = Int((Raster(exposure_intermediate) * 100) + 0.5) # Copy extracted raster to output print(f'\t\tCreating output raster...') arcpy.management.CopyRaster(integer_exposure, exposure_output, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Delete intermediate dataset if possible try: arcpy.management.Delete(exposure_intermediate) except: print('\t\tCould not delete intermediate dataset...') # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tSite exposure already exists for {grid_title}.') print('\t----------') #### CALCULATE MEAN SLOPE # Calculate mean slope if it does not already exist if arcpy.Exists(slope_output) == 0: print(f'\tCalculating mean slope for {grid_title}...') iteration_start = time.time() # Create an intermediate mean slope calculation slope_intermediate = os.path.splitext( slope_output)[0] + '_intermediate.tif' mean_slope(raw_slope_raster, slope_intermediate) # Convert to integer values print(f'\t\tConverting values to integers...') integer_slope = Int(Raster(slope_intermediate) + 0.5) # Copy extracted raster to output print(f'\t\tCreating output raster...') arcpy.management.CopyRaster(integer_slope, slope_output, '', '', '-128', 'NONE', 'NONE', '8_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Delete intermediate dataset if possible try: arcpy.management.Delete(slope_intermediate) except: print('\t\tCould not delete intermediate dataset...') # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tMean slope already exists for {grid_title}.') print('\t----------') #### CALCULATE SURFACE AREA RATIO # Calculate surface area ratio if it does not already exist if os.path.exists(area_output) == 0: print(f'\tCalculating surface area ratio for {grid_title}...') iteration_start = time.time() # Create an intermediate surface area ratio calculation area_intermediate = os.path.splitext( area_output)[0] + '_intermediate.tif' surface_area(raw_slope_raster, area_intermediate) # Convert to integer values print(f'\t\tConverting values to integers...') integer_area = Int((Raster(area_intermediate) * 10) + 0.5) # Copy extracted raster to output print(f'\t\tCreating output raster...') arcpy.management.CopyRaster(integer_area, area_output, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Delete intermediate dataset if possible try: arcpy.management.Delete(area_intermediate) except: print('\t\tCould not delete intermediate dataset...') # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tSurface area ratio already exists for {grid_title}.') print('\t----------') #### CALCULATE SURFACE RELIEF RATIO # Calculate surface relief ratio if it does not already exist if arcpy.Exists(relief_output) == 0: print(f'\tCalculating surface relief ratio for {grid_title}...') iteration_start = time.time() # Create an intermediate surface relief ratio calculation relief_intermediate = os.path.splitext( relief_output)[0] + '_intermediate.tif' surface_relief(elevation_input, relief_intermediate) # Convert to integer values print(f'\t\tConverting values to integers...') integer_relief = Int((Raster(relief_intermediate) * 1000) + 0.5) # Copy extracted raster to output print(f'\t\tCreating output raster...') arcpy.management.CopyRaster(integer_relief, relief_output, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Delete intermediate dataset if possible try: arcpy.management.Delete(relief_intermediate) except: print('\t\tCould not delete intermediate dataset...') # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tSurface relief ratio already exists for {grid_title}.') print('\t----------') #### CALCULATE TOPOGRAPHIC POSITION # Calculate topographic position if it does not already exist if arcpy.Exists(position_output) == 0: print(f'\tCalculating topographic position for {grid_title}...') iteration_start = time.time() # Create an intermediate topographic position calculation position_intermediate = os.path.splitext( position_output)[0] + '_intermediate.tif' topographic_position(elevation_input, position_intermediate) # Convert to integer values print(f'\t\tConverting values to integers...') integer_position = Int((Raster(position_intermediate) * 100) + 0.5) # Copy extracted raster to output print(f'\t\tCreating output raster...') arcpy.management.CopyRaster(integer_position, position_output, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Delete intermediate dataset if possible try: arcpy.management.Delete(position_intermediate) except: print('\t\tCould not delete intermediate dataset...') # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tTopographic position already exists for {grid_title}.') print('\t----------') #### CALCULATE TOPOGRAPHIC RADIATION # Calculate topographic radiation if it does not already exist if arcpy.Exists(radiation_output) == 0: print(f'\tCalculating topographic radiation for {grid_title}...') iteration_start = time.time() # Create an intermediate topographic position calculation radiation_intermediate = os.path.splitext( radiation_output)[0] + '_intermediate.tif' radiation_integer = os.path.splitext( radiation_output)[0] + '_integer.tif' topographic_radiation(elevation_input, radiation_intermediate) # Convert to integer values print(f'\t\tConverting values to integers...') integer_radiation = Int((Raster(radiation_intermediate) * 1000) + 0.5) arcpy.management.CopyRaster(integer_radiation, radiation_integer, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # Extract filled raster to grid mask print(f'\t\tExtracting integer raster to grid...') extract_radiation = ExtractByMask(radiation_integer, grid_raster) # Copy extracted raster to output print(f'\t\tCreating output raster...') arcpy.management.CopyRaster(extract_radiation, radiation_output, '', '', '-32768', 'NONE', 'NONE', '16_BIT_SIGNED', 'NONE', 'NONE', 'TIFF', 'NONE') # End timing iteration_end = time.time() iteration_elapsed = int(iteration_end - iteration_start) iteration_success_time = datetime.datetime.now() # Delete intermediate dataset if possible try: arcpy.management.Delete(radiation_intermediate) arcpy.management.Delete(radiation_integer) except: print('\t\tCould not delete intermediate dataset...') # Report success print( f'\tCompleted at {iteration_success_time.strftime("%Y-%m-%d %H:%M")} (Elapsed time: {datetime.timedelta(seconds=iteration_elapsed)})' ) print('\t----------') else: print(f'\tTopographic radiation already exists for {grid_title}.') print('\t----------') outprocess = f'Finished topographic properties for {grid_title}.' return outprocess