def makeOutcropRaster(dem, controlPoints, inc, sgdb): addMsgAndPrint(' making predicted outcrop raster') # set up raster environment arcpy.env.snapRaster = dem arcpy.env.cellSize = arcpy.Describe(dem).meanCellWidth arcpy.env.extent = controlPoints # TREND addMsgAndPrint(' calculating plane with TREND') outRaster = os.path.join(sgdb, 'xxxPlane') testAndDelete(outRaster) arcpy.Trend_3d(controlPoints, 'SHAPE', outRaster, '', 1, 'LINEAR') # subtract TREND plane from DEM addMsgAndPrint(' subtracting plane from DEM') intersectRaster1 = outRaster + '_i' testAndDelete(intersectRaster1) arcpy.Minus_3d(dem, outRaster, intersectRaster1) arcpy.CalculateStatistics_management(intersectRaster1) # classify results dH = math.tan(math.radians(inc)) * 2.0 * float(arcpy.env.cellSize) addMsgAndPrint(' classifying results, dH=' + str(dH)) outCon = Con(intersectRaster1, 0, 1, 'VALUE > ' + str(dH) + ' OR VALUE < -' + str(dH)) testAndDelete(outRaster) testAndDelete(intersectRaster1) return outCon
def create_chm(rootdir): """ Resamples the DTM to the resolution of the DSM and then subtracts the resampled DTM from the DSM to create a Canopy Height Model (CHM) Assumes directory structure of rootdir\refID\sensor\output... """ arcpy.CheckOutExtension("3d") ## Needed ffor the Minus_3d tool refIDs = [] for subdir in os.listdir (rootdir): refIDs.append(rootdir + "\\" + subdir) for refID in refIDs: sensors = ["\\Mavic", "\\Sequoia"] for sensor in sensors: layers = "" for root, dirs, files in os.walk(refID + sensor + "\\Output"): for dir in dirs: ##excludes directories containing only tiles if "tiles" in dirs: dirs.remove("tiles") for file in files: if file.endswith("dsm.tif"): DSM = os.path.join(root,file) if file.endswith("dtm.tif"): DTM = os.path.join(root,file) CellSize = arcpy.GetRasterProperties_management(DSM, property_type = "CELLSIZEX") ## Defining cell size to resample the DTM to that of the DSM- known ##bug for doing so in the Resample_management tool. ##BUG!!! https://community.esri.com/thread/162982 DTMrsmpl = refID + sensor + "\\Output" + "\\DTMrsmpl.tif" arcpy.Resample_management(DTM, DTMrsmpl, CellSize ) ## Subtract DTM (resampled) from DSM to create CHM arcpy.Minus_3d(DSM, DTMrsmpl, refID + sensor + "\\Output" + sensor +"CHM.tif") print (refID + sensor + " CHM Created")
def create_chm(rootdir): """ Resamples the DTM to the resolution of the DSM and then subtracts the resampled DTM from the DSM to create a Canopy Height Model (CHM) Assumes directory structure of rootdir\refID\sensor\output... """ arcpy.CheckOutExtension("3d") ## Needed ffor the Minus_3d tool refIDs = os.listdir(rootdir) for refID in refIDs: if "Products" in refIDs: refIDs.remove("Products") if "ENV" in refIDs: refIDs.remove("ENV") #sensors = ["Mavic", "Sequoia"] sensors = ["Sequoia"] for sensor in sensors: if sensor == "Mavic": bands = ["Output"] if sensor == "Sequoia": bands = ['gre', 'red', 'reg', 'nir'] for band in bands: for root, dirs, files in os.walk(os.path.join(rootdir, refID, sensor, band)): for dir in dirs: ##excludes directories containing only tiles if "tiles" in dirs: dirs.remove("tiles") for f in files: if f.endswith("dsm.tif"): DSM = os.path.join(root,f) if f.endswith("dtm.tif"): DTM = os.path.join(root,f) CellSize = arcpy.GetRasterProperties_management(DSM, property_type = "CELLSIZEX") ## Defining cell size to resample the DTM to that of the DSM- known ##bug for doing so in the Resample_management tool. ##BUG!!! https://community.esri.com/thread/162982 DTMrsmpl = os.path.join(rootdir, refID, sensor, band, "DTMrsmpl.tif") arcpy.Resample_management(DTM, DTMrsmpl, CellSize ) ## Subtract DTM (resampled) from DSM to create CHM if sensor == "Mavic": arcpy.Minus_3d(DSM, DTMrsmpl, os.path.join(rootdir, refID, sensor, band, "{}_{}_CHM.tif".format(refID, sensor))) print (refID, sensor, " CHM Created") if sensor == "Sequoia": ## have to use the old formatting style (Python 2.7) arcpy.Minus_3d(DSM, DTMrsmpl, os.path.join(rootdir, refID, sensor, band, "%s_%s_CHM.tif"%(refID, band))) #"{}_{}_CHM.tif".format(refID, sensor))) print (refID, band, " CHM Created")
def do_task(grid_code): NUACI_path = '{}/{}{}.tif'.format(config['in_folder_path'], config['input_prefix'], grid_code) inRaster = Raster(NUACI_path) outRas_path = '{}/{}{}.tif'.format(config['out_folder_path'], config['output_prefix'], grid_code) arcpy.Minus_3d(inRaster, inRaster, outRas_path) print('{} -> {}'.format(NUACI_path, outRas_path))
def create_raster(input_source, depth_raster, depth_value, boundary_size, boundary_offset, output_raster, debug): try: # Get Attributes from User if debug == 0: # script variables aprx = arcpy.mp.ArcGISProject("CURRENT") home_directory = aprx.homeFolder tiff_directory = home_directory + "\\Tiffs" tin_directory = home_directory + "\\Tins" scripts_directory = aprx.homeFolder + "\\Scripts" rule_directory = aprx.homeFolder + "\\rule_packages" log_directory = aprx.homeFolder + "\\Logs" layer_directory = home_directory + "\\layer_files" project_ws = aprx.defaultGeodatabase enableLogging = True DeleteIntermediateData = True verbose = 0 in_memory_switch = True else: # debug home_directory = r'D:\Temporary\Flood\3DFloodImpact' tiff_directory = home_directory + "\\Tiffs" log_directory = home_directory + "\\Logs" layer_directory = home_directory + "\\LayerFiles" project_ws = home_directory + "\\Testing.gdb" enableLogging = False DeleteIntermediateData = True verbose = 1 in_memory_switch = False scratch_ws = common_lib.create_gdb(home_directory, "Intermediate.gdb") arcpy.env.workspace = scratch_ws arcpy.env.overwriteOutput = True # fail safe for Eurpose's comma's depth_value = float(re.sub("[,.]", ".", depth_value)) boundary_size = float(re.sub("[,.]", ".", boundary_size)) boundary_offset = float(re.sub("[,.]", ".", boundary_offset)) bail = 0 if debug == 1: use_in_memory = False else: use_in_memory = True common_lib.set_up_logging(log_directory, TOOLNAME) start_time = time.clock() if arcpy.CheckExtension("3D") == "Available": arcpy.CheckOutExtension("3D") if arcpy.CheckExtension("Spatial") == "Available": arcpy.CheckOutExtension("Spatial") # check if input exists if arcpy.Exists(input_source): arcpy.AddMessage( "Processing input source: " + common_lib.get_name_from_feature_class(input_source)) no_initial_depth_raster = False # create isnull from input source if use_in_memory: is_null = "in_memory/isnull_copy" else: is_null = os.path.join(scratch_ws, "isnull_copy") if arcpy.Exists(is_null): arcpy.Delete_management(is_null) # check where we have NULL values is_Null_raster = arcpy.sa.IsNull(input_source) is_Null_raster.save(is_null) # if we have a depth raster as input: make sure it overlaps with input_source if depth_raster: if arcpy.Exists(depth_raster): # Check if same spatial reference!!! if common_lib.check_same_spatial_reference( [input_source], [depth_raster]) == 1: depth_raster = None raise MixOfSR else: if use_in_memory: clip_raster = "in_memory/clip_copy" else: clip_raster = os.path.join( scratch_ws, "clip_copy") if arcpy.Exists(clip_raster): arcpy.Delete_management(clip_raster) # check extents # clip terrain to extent msg_body = create_msg_body( "Clipping depth raster to input flooding layer extent", 0, 0) msg(msg_body) arcpy.Clip_management(depth_raster, "#", clip_raster, input_source, "#", "#", "MAINTAIN_EXTENT") # TODO double check below # create IsNull to be used to check for NoData. if use_in_memory: is_null0 = "in_memory/is_null0" else: is_null0 = os.path.join( scratch_ws, "is_null0") if arcpy.Exists(is_null0): arcpy.Delete_management(is_null0) is_null_raster = arcpy.sa.IsNull(clip_raster) is_null_raster.save(is_null0) min_value = arcpy.GetRasterProperties_management( is_null0, "MINIMUM")[0] # all_nodata = arcpy.GetRasterProperties_management(clip_raster, "ALLNODATA")[0] if int(min_value) == 1: msg_body = create_msg_body( "Input rasters do not overlap.", 0, 0) msg(msg_body, WARNING) depth_raster = None else: org_depth_raster = depth_raster depth_raster = clip_raster no_initial_depth_raster = False # if depth_value > 0: # # grab set all values > 2 to default depth value # if use_in_memory: # depth_push = "in_memory/depth_push" # else: # depth_push = os.path.join(scratch_ws, "depth_push") # # if arcpy.Exists(depth_push): # arcpy.Delete_management(depth_push) # # msg_body = create_msg_body("Pushing depth > 2 to: " + str(depth_value), 0, 0) # msg(msg_body) # # depth_pushRaster = arcpy.sa.Con(clip_raster, depth_value, clip_raster, "VALUE > 2") # depth_pushRaster.save(depth_push) # # depth_raster = depth_push # else: # depth_raster = clip_raster else: depth_raster = None raise NoDepthRaster # if we don't have a depth raster: crate one based on the depth value if not depth_raster: if depth_value != 0: no_initial_depth_raster = True arcpy.AddMessage("Using default depth value of: " + str(depth_value)) # create raster from default depth value if use_in_memory: depth_raster = "in_memory/depth_value_raster" else: depth_raster = os.path.join( scratch_ws, "depth_value_raster") if arcpy.Exists(depth_raster): arcpy.Delete_management(depth_raster) # create raster from default depth value msg_body = create_msg_body( "Create depth raster from default depth value.", 0, 0) msg(msg_body) outConRaster = arcpy.sa.Con( is_null, depth_value, depth_value) outConRaster.save(depth_raster) else: bail = 1 msg_body = create_msg_body( "No depth raster and default depth value is 0. No point continuing.", 0, 0) msg(msg_body, WARNING) if bail == 0: # subtract depth raster from flood elevation raster cell_size_source = arcpy.GetRasterProperties_management( input_source, "CELLSIZEX") cell_size_depth = arcpy.GetRasterProperties_management( depth_raster, "CELLSIZEX") if cell_size_source.getOutput( 0) == cell_size_depth.getOutput(0): if arcpy.Exists(output_raster): arcpy.Delete_management(output_raster) # create raster from depth values # adjust values that are less than 0.2 if use_in_memory: depth_push = "in_memory/depth_boundary_push" depth_temp = "in_memory/depth_temp" else: depth_push = os.path.join( scratch_ws, "depth_boundary_push") if arcpy.Exists(depth_push): arcpy.Delete_management(depth_push) depth_temp = os.path.join( scratch_ws, "depth_temp") if arcpy.Exists(depth_temp): arcpy.Delete_management(depth_temp) msg_body = create_msg_body( "Adjusting boundary values by: " + str(boundary_offset), 0, 0) msg(msg_body) # add boundary offset to depth raster arcpy.Plus_3d(depth_raster, boundary_offset, depth_temp) depth_raster_object = arcpy.sa.Raster(depth_raster) # for values less than 0.2 -> grab adjusted depth raster. depth_push_Boundary_Raster = arcpy.sa.Con( depth_raster_object < 0.2, depth_temp, depth_raster) depth_push_Boundary_Raster.save(depth_push) depth_raster = depth_push if use_in_memory: clip_depth = "in_memory/clip_depth" else: clip_depth = os.path.join( scratch_ws, "clip_depth") if arcpy.Exists(clip_depth): arcpy.Delete_management(clip_depth) # create raster from default depth value msg_body = create_msg_body( "Create clip depth raster...", 0, 0) msg(msg_body) # grab depth elevation values where not null and null where is null (clip using flooding raster) outConRaster = arcpy.sa.Con( is_null, input_source, depth_raster) outConRaster.save(clip_depth) msg_body = create_msg_body( "Subtracting depth raster from input flooding raster.", 0, 0) msg(msg_body) if use_in_memory: minus_raster = "in_memory/minus_3D" else: minus_raster = os.path.join( scratch_ws, "minus_3D") if arcpy.Exists(minus_raster): arcpy.Delete_management(minus_raster) # actual subtract arcpy.Minus_3d(input_source, clip_depth, minus_raster) # now we want just the outside cells (1x cellsize) if use_in_memory: raster_polygons = "in_memory/raster_polygons" else: raster_polygons = os.path.join( scratch_ws, "raster_polygons") if arcpy.Exists(raster_polygons): arcpy.Delete_management(raster_polygons) out_geom = "POLYGON" # output geometry type arcpy.RasterDomain_3d(minus_raster, raster_polygons, out_geom) # buffer it outwards first if use_in_memory: polygons_outward = "in_memory/outward_buffer" else: polygons_outward = os.path.join( scratch_ws, "outward_buffer") if arcpy.Exists(polygons_outward): arcpy.Delete_management(polygons_outward) # x = cell_size_source.getOutput(0) x = float( re.sub("[,.]", ".", str(cell_size_source.getOutput(0)))) # x = float(str(cell_size_source.getOutput(0))) buffer_out = int(x) xy_unit = common_lib.get_xy_unit(minus_raster, 0) if xy_unit == "Feet": buffer_text = str(buffer_out) + " Feet" else: buffer_text = str(buffer_out) + " Meters" sideType = "FULL" arcpy.Buffer_analysis(raster_polygons, polygons_outward, buffer_text, sideType) # buffer it inwards so that we have a polygon only of the perimeter plus a 2 cells inward. if use_in_memory: polygons_inward = "in_memory/inward_buffer" else: polygons_inward = os.path.join( scratch_ws, "inward_buffer") if arcpy.Exists(polygons_inward): arcpy.Delete_management(polygons_inward) # x = cell_size_source.getOutput(0) x = float( re.sub("[,.]", ".", str(cell_size_source.getOutput(0)))) # x = float(str(cell_size_source.getOutput(0))) buffer_in = (boundary_size - 1) + int( 2 * x ) # boundary is always 2 cellsizes / user can't go lower than 2. xy_unit = common_lib.get_xy_unit(minus_raster, 0) if xy_unit == "Feet": buffer_text = "-" + str(buffer_in) + " Feet" else: buffer_text = "-" + str(buffer_in) + " Meters" sideType = "FULL" arcpy.Buffer_analysis(polygons_outward, polygons_inward, buffer_text, sideType) if use_in_memory: erase_polygons = "in_memory/erase" else: erase_polygons = os.path.join( scratch_ws, "erase") if arcpy.Exists(erase_polygons): arcpy.Delete_management(erase_polygons) xyTol = "1 Meters" arcpy.Erase_analysis(polygons_outward, polygons_inward, erase_polygons) msg_body = create_msg_body( "Buffering depth edges...", 0, 0) msg(msg_body) if use_in_memory: extract_mask_raster = "in_memory/extract_mask" else: extract_mask_raster = os.path.join( scratch_ws, "extract_mask") if arcpy.Exists(extract_mask_raster): arcpy.Delete_management( extract_mask_raster) extract_temp_raster = arcpy.sa.ExtractByMask( minus_raster, erase_polygons) extract_temp_raster.save(extract_mask_raster) if no_initial_depth_raster == True: if use_in_memory: plus_mask = "in_memory/plus_mask" else: plus_mask = os.path.join( scratch_ws, "plus_mask") if arcpy.Exists(plus_mask): arcpy.Delete_management(plus_mask) arcpy.Plus_3d(extract_mask_raster, (depth_value - 1), plus_mask) extract_mask_raster = plus_mask if use_in_memory: minus_raster2 = "in_memory/minus_3D2" else: minus_raster2 = os.path.join( scratch_ws, "minus_3D2") if arcpy.Exists(minus_raster2): arcpy.Delete_management(minus_raster2) # push depth elevation raster down by default depth value if depth_value > 0 and no_initial_depth_raster == False: msg_body = create_msg_body( "Pushing inner depth down by: " + str(depth_value) + " to prevent z-fighting.", 0, 0) msg(msg_body) arcpy.Minus_3d(minus_raster, depth_value, minus_raster2) else: minus_raster2 = minus_raster if 0: #use_in_memory: mosaic_raster = "in_memory/mosaic" else: mosaic_raster = os.path.join( scratch_ws, "mosaic") if arcpy.Exists(mosaic_raster): arcpy.Delete_management(mosaic_raster) listRasters = [] listRasters.append(extract_mask_raster) listRasters.append(minus_raster2) desc = arcpy.Describe(listRasters[0]) # grab the original outside cells and the pushed down depth elevation raster arcpy.MosaicToNewRaster_management( listRasters, os.path.dirname(mosaic_raster), os.path.basename(mosaic_raster), desc.spatialReference, "32_BIT_FLOAT", x, 1, "FIRST", "") # now we do an isnull on raster domain poly assignmentType = "CELL_CENTER" priorityField = "#" # Execute PolygonToRaster calc_field = "value_field" common_lib.delete_add_field( raster_polygons, calc_field, "DOUBLE") arcpy.CalculateField_management( raster_polygons, calc_field, 1, "PYTHON_9.3") if use_in_memory: poly_raster = "in_memory/poly_raster" else: poly_raster = os.path.join( scratch_ws, "poly_raster") if arcpy.Exists(poly_raster): arcpy.Delete_management(poly_raster) arcpy.PolygonToRaster_conversion( raster_polygons, calc_field, poly_raster, assignmentType, priorityField, x) # create isnull if use_in_memory: is_null2 = "in_memory/isnull_copy2" else: is_null2 = os.path.join( scratch_ws, "isnull_copy2") if arcpy.Exists(is_null2): arcpy.Delete_management(is_null2) is_Null_raster2 = arcpy.sa.IsNull(poly_raster) is_Null_raster2.save(is_null2) # con on mosaic finalRaster = arcpy.sa.Con(is_null2, poly_raster, mosaic_raster) finalRaster.save(output_raster) else: arcpy.AddWarning( "Cell size of " + common_lib.get_name_from_feature_class( input_source) + " is different than " + org_depth_raster + ". Exiting...") output_raster = None if use_in_memory: arcpy.Delete_management("in_memory") else: # use default depth value raise NoInputLayer end_time = time.clock() msg_body = create_msg_body( "Set Flood Elevation Value for Raster completed successfully.", start_time, end_time) msg(msg_body) arcpy.ClearWorkspaceCache_management() return output_raster else: raise LicenseErrorSpatial else: raise LicenseError3D arcpy.ClearWorkspaceCache_management() except MixOfSR: # The input has mixed SR # print(( 'Input data has mixed spatial references. Ensure all input is in the same spatial reference, including the same vertical units.' )) arcpy.AddError( 'Input data has mixed spatial references. Ensure all input is in the same spatial reference, including the same vertical units.' ) except NoInputLayer: print("Can't find Input layer. Exiting...") arcpy.AddError("Can't find Input layer. Exiting...") except NoDepthRaster: print("Can't find Depth raster. Exiting...") arcpy.AddError("Can't find depth raster. Exiting...") except NotProjected: print( "Input data needs to be in a projected coordinate system. Exiting..." ) arcpy.AddError( "Input data needs to be in a projected coordinate system. Exiting..." ) except NoLayerFile: print("Can't find Layer file. Exiting...") arcpy.AddError("Can't find Layer file. Exiting...") except LicenseError3D: print("3D Analyst license is unavailable") arcpy.AddError("3D Analyst license is unavailable") except LicenseErrorSpatial: print("Spatial Analyst license is unavailable") arcpy.AddError("Spatial Analyst license is unavailable") except NoNoDataError: print("Input raster does not have NODATA values") arcpy.AddError("Input raster does not have NODATA values") except NoUnits: print("No units detected on input data") arcpy.AddError("No units detected on input data") except NoPolygons: print("Input data can only be polygon features or raster datasets.") arcpy.AddError( "Input data can only be polygon features or raster datasets.") except ValueError: print("Input no flood value is not a number.") arcpy.AddError("Input no flood value is not a number.") except arcpy.ExecuteError: line, filename, synerror = trace() msg("Error on %s" % line, ERROR) msg("Error in file name: %s" % filename, ERROR) msg("With error message: %s" % synerror, ERROR) msg("ArcPy Error Message: %s" % arcpy.GetMessages(2), ERROR) except FunctionError as f_e: messages = f_e.args[0] msg("Error in function: %s" % messages["function"], ERROR) msg("Error on %s" % messages["line"], ERROR) msg("Error in file name: %s" % messages["filename"], ERROR) msg("With error message: %s" % messages["synerror"], ERROR) msg("ArcPy Error Message: %s" % messages["arc"], ERROR) except: line, filename, synerror = trace() msg("Error on %s" % line, ERROR) msg("Error in file name: %s" % filename, ERROR) msg("with error message: %s" % synerror, ERROR) finally: arcpy.CheckInExtension("3D") arcpy.CheckInExtension("Spatial")
temp_name2 = split1[1] # identify raster type event1 = temp_name1[1:] # scrub "e" or "p" from scenario outname = "c_" + event1 + "_" + temp_name2 if design == "p": # only proceed for 1/2 of data, proposed # Iterate through raster list again, comparing to current raster in outer loop... if event analysis type match, but are different design scenarios, minus. for raster2 in rasters: split2 = raster2.split( "_") # split input feature path at underscores temp_name3 = split2[0] # identify scenario temp_name4 = split2[1] # identify raster type event2 = temp_name3[1:] # scrub "e" or "p" from scenario if (event1 == event2) and (temp_name2 == temp_name4): if outname not in created: created.append(outname) outpath = os.path.join(outworkspace, outname) if temp_name1 == "wsel": con_raster = arcpy.sa.Con( raster1 == 0, topo - raster2, Con(raster == 0, raster1 - topo, arcpy.Minus_3d(raster1, raster2))) con_raster.save(outpath) else: arcpy.Minus_3d(raster1, raster2, outpath) print len(created), " rasters created: ", created # Check in spatial analyst arcpy.CheckInExtension("Spatial")
expression = "{0} <None> Soft_Clip <None>;{1} {2} Mass_Points <None>;{3} {4} Hard_Line <None>".format( Reaches, New_CrossSections, stage_name_for_table, New_CrossSections, stage_name_for_table) arcpy.CreateTin_3d(TIN, spatial_ref, expression, "CONSTRAINED_DELAUNAY") # TIN to Raster arcpy.AddMessage("Converting TIN to raster for {0}".format(stage)) input_cell_size = "CELLSIZE {0}".format(Cell_Size) arcpy.TinRaster_3d(TIN, RasterFromTIN, "FLOAT", "LINEAR", input_cell_size, "1") # Raster subtraction arcpy.AddMessage( "Subtracting the raster from TIN with the input DEM for {0}".format( stage)) arcpy.Minus_3d(RasterFromTIN, DEM, Subtracted) # Reclassify raster arcpy.AddMessage( "Reclassifing the subtracted raster for {0}".format(stage)) arcpy.Reclassify_3d(Subtracted, "Value", "-999 0.01 1;0.01 999 0", Reclassified, "DATA") # Extract by Attributes arcpy.AddMessage("Extracting by attributes for {0}".format(stage)) arcpy.gp.ExtractByAttributes_sa(Reclassified, "\"Value\" = 0", ExtractedRaster) # Create depth grid arcpy.AddMessage("Creating depthgrid for {0}".format(stage)) depth_grid_name = "{0}_DepthGrid".format(stage)
Transect_1_sum = Transect1["SHAPE_Length"].sum() Transect2 = arcpy.da.TableToNumPyArray(arcpy.Erase_analysis(beachtrans2,slr_bruun, beachwid2, ""),'SHAPE_Length') Transect_2_sum = Transect2["SHAPE_Length"].sum() Transect3 = arcpy.da.TableToNumPyArray(arcpy.Erase_analysis(beachtrans3,slr_bruun, beachwid3, ""),'SHAPE_Length') Transect_3_sum = Transect3["SHAPE_Length"].sum() Transect4 = arcpy.da.TableToNumPyArray(arcpy.Erase_analysis(beachtrans4,slr_bruun, beachwid4, ""),'SHAPE_Length') Transect_4_sum = Transect4["SHAPE_Length"].sum() Transect5 = arcpy.da.TableToNumPyArray(arcpy.Erase_analysis(beachtrans5,slr_bruun, beachwid5, ""),'SHAPE_Length') Transect_5_sum = Transect5["SHAPE_Length"].sum() Beach_Width = np.average([Transect_1_sum, Transect_2_sum, Transect_3_sum, Transect_4_sum, Transect_5_sum]) print "The average beach width is " + str(round(Beach_Width,1)) + "m" Av_Beach_Width.append(Beach_Width) # VALUE OF HOUSES LOST AT MWHS ############################################ # Process: Minus (2) arcpy.Minus_3d(slr_surface, MWHS_ldrclip2m, diffslr) # Process: Extract by Mask arcpy.gp.ExtractByMask_sa(diffslr, max_slr_poly, slrdepth) # Process: Select Layer By Location. Select land within inundation extent # Land use type is houses # Process: Make Feature Layer. Needed for extract by mask step arcpy.MakeFeatureLayer_management(arcpy.SelectLayerByLocation_management("houses", "INTERSECT", max_slr_poly, "", "NEW_SELECTION", "NOT_INVERT"), houserisk, "", "", "OBJECTID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;Entity Entity VISIBLE NONE;Shape_Length Shape_Length VISIBLE NONE;Shape_Area Shape_Area VISIBLE NONE;Av_ppl_per_dwelling Av_ppl_per_dwelling VISIBLE NONE") # Process: Extract by Mask. Extract raster depth cells in land use area # arcpy.gp.ExtractByMask_sa(slrdepth, houserisk, house_loss) # Apply pre-defined function that applies (1) damage index based on depth; # (2) multiplies damage index by MaxDam # tempdamarray = arcpy.RasterToNumPyArray(house_loss,nodata_to_value=0) # RasterLoss(tempdamarray, MAX_DAMAGE) # slrlossimp = rastvul.sum() house_area = "house_area"
def Calculate_depth(surge,terrain): arcpy.Minus_3d(in_raster_or_constant1=surge, in_raster_or_constant2=terrain, out_raster=wrkdir+"\\"+name+"_depth.tif")
arcpy.MakeLasDatasetLayer_management(lasD, lasDEM, class_code) print(" Creating surface...") arcpy.conversion.LasDatasetToRaster(lasDEM, outDEMras, 'ELEVATION', 'BINNING MINIMUM NATURAL_NEIGHBOR', 'FLOAT', 'CELLSIZE', cellSize, zFactor) print("%s processing completed!" % nm + '_DEM.tif') #Create DSM outDSMras = os.path.join(outDSM, nm + '_DSM_full.tif') print("Begin %s processing..." % (nm + '_DSM_full.tif')) lasDSM = arcpy.CreateUniqueName(nm + '_DSM_full') #returnValue = [3,4,5]#['FIRST', 'SINGLE']#[3,4,5] class_code = [0, 1, 2, 3, 4, 5, 9] print(" Creating LAS layer...") arcpy.MakeLasDatasetLayer_management(lasD, lasDSM, class_code) print(" Creating surface...") arcpy.conversion.LasDatasetToRaster(lasDSM, outDSMras, 'ELEVATION', 'BINNING MAXIMUM NATURAL_NEIGHBOR', 'FLOAT', 'CELLSIZE', cellSize, zFactor) print("%s processing completed!" % nm + '_DSM_full.tif') ## Get tree height raster canopyHgt = os.path.join(outCnpy, nm + '_CHM_full.tif') print("Begin processing %s..." % (nm + '_CHM_full.tif')) tmpHgt = "in_memory/hgt" arcpy.Minus_3d(outDSMras, outDEMras, tmpHgt) outFocalStatistics = Con(tmpHgt, tmpHgt, 0, "VALUE > 0") outFocalStatistics.save(canopyHgt) print("%s processing completed!" % nm + '_CHM_full.tif') print("LiDaR proceesing complete for %s" % nm)
def execute(self, params, messages): deleteInMemory() rawPath = os.path.dirname( params[1].valueAsText) + "\\" + os.path.basename( params[1].valueAsText) + "_Raw_Data" finalPath = os.path.dirname( params[1].valueAsText) + "\\" + os.path.basename( params[1].valueAsText) + "_Final_Data" testPath = os.path.dirname( params[1].valueAsText) + "\\" + os.path.basename( params[1].valueAsText) + "_Test_Data" if not os.path.exists(rawPath): os.mkdir(rawPath) if not os.path.exists(finalPath): os.mkdir(finalPath) if not os.path.exists(testPath): os.mkdir(testPath) poly = arcpy.MakeFeatureLayer_management(params[0].valueAsText) outRaw = rawPath + "\\" + os.path.basename(params[1].valueAsText) outFinal = finalPath + "\\" + os.path.basename(params[1].valueAsText) outTest = testPath + "\\" + os.path.basename(params[1].valueAsText) arcpy.env.workspace = os.path.dirname(params[1].valueAsText) arcpy.env.scratchWorkspace = os.path.dirname(params[1].valueAsText) Sites = arcpy.MakeFeatureLayer_management(params[2].valueAsText) DEM = params[4].valueAsText zFactor = params[5].value Streams = arcpy.MakeFeatureLayer_management(params[6].valueAsText) #Process Input Polygon lyr = finalPath + "\\" + os.path.basename( params[1].valueAsText) + "_Poly.shp" polyParts = int(arcpy.GetCount_management(poly).getOutput(0)) if polyParts > 1: arcpy.Dissolve_management(poly, lyr) else: arcpy.CopyFeatures_management(poly, lyr) lyrDesc = arcpy.Describe(lyr) lyrFields = lyrDesc.fields lyrExtent = lyrDesc.extent arcpy.env.extent = lyrExtent fieldx = 0 for field in lyrFields: if field.name == "POLY_ACRES": fieldx = 1 if fieldx == 0: arcpy.AddField_management(lyr, "POLY_ACRES", 'DOUBLE', 12, 8) arcpy.CalculateField_management(lyr, "POLY_ACRES", "!shape.area@ACRES!", "PYTHON_9.3", "") Desc = arcpy.Describe(lyr) polyAcres = ([ row[0] for row in arcpy.da.SearchCursor(lyr, ["POLY_ACRES"]) ][0]) arcpy.AddMessage("Polygon acreage = %d" % polyAcres) #Clip Sites siteQuery = params[3].ValueAsText outPoints = outFinal + "_Data_Points.shp" outSites = outRaw + "_Sites" if siteQuery == "Use All Sites": arcpy.MakeFeatureLayer_management(Sites, outSites) else: arcpy.MakeFeatureLayer_management(Sites, outSites, siteQuery) arcpy.SelectLayerByLocation_management(outSites, "INTERSECT", lyr) siteResult = int(arcpy.GetCount_management(outSites).getOutput(0)) arcpy.AddMessage(siteQuery) arcpy.AddMessage("Site Count = " + str(siteResult)) if siteResult < 10: arcpy.AddMessage("There are insufficient site data for analysis") systemExit(0) arcpy.FeatureToPoint_management(outSites, outPoints, "CENTROID") #Add Random field to extract build and test points arcpy.AddField_management(outPoints, "Test_Hold", "Double") with arcpy.da.UpdateCursor(outPoints, "Test_Hold") as cursor: for row in cursor: row[0] = random.random() cursor.updateRow(row) buildPoints = outTest + "_Build_Sites.shp" testPoints = outTest + "_Test_Sites.shp" arcpy.MakeFeatureLayer_management(outPoints, "in_memory\\test", """ "Test_Hold" <= 0.2 """) arcpy.CopyFeatures_management("in_memory\\test", testPoints) arcpy.MakeFeatureLayer_management(outPoints, "in_memory\\build", """ "Test_Hold" > 0.2 """) arcpy.CopyFeatures_management("in_memory\\build", buildPoints) #These are the raw layers of interest outSlope = outRaw + "_slp" outTopoProm = outRaw + "_pro" outHHODist = outRaw + "_dtw" outEleHHO = outRaw + "_eaw" outConfDist = outRaw + "_dtc" outEaConf = outRaw + "_eac" #DEM-based analysis outDEM = outRaw + "_dem" arcpy.Clip_management(DEM, "#", outDEM, lyr, "#", "ClippingGeometry") arcpy.Slope_3d(outDEM, outSlope, "DEGREE", zFactor) outBlk = BlockStatistics(outDEM, NbrCircle(3, "CELL"), "RANGE", "DATA") outBlk.save(outTopoProm) #Stream-based analysis - rubs only if streams are within input polygon outStreams = outFinal + "_str.shp" outVPts = outRaw + "_vpt.shp" vPtsEle = outRaw + "_vpe.shp" vPtsCor = outRaw + "_vpc.shp" outCPts = outRaw + "_cpt.shp" outCPsC = outRaw + "_cpc.shp" outBuff = outRaw + "_buff.shp" outDiss = outRaw + "_diss.shp" outConPts = outRaw + "_con.shp" cPtsEle = outRaw + "_cpe.shp" arcpy.Clip_analysis(Streams, lyr, outStreams) streamCount = arcpy.GetCount_management(outStreams) if not streamCount == 0: arcpy.FeatureVerticesToPoints_management(outStreams, outVPts, "ALL") arcpy.gp.ExtractValuesToPoints_sa(outVPts, outDEM, vPtsEle, "NONE", "VALUE_ONLY") arcpy.MakeFeatureLayer_management(vPtsEle, "in_memory\\vPtsCor", """"RASTERVALU" > 0""") arcpy.CopyFeatures_management("in_memory\\vPtsCor", vPtsCor) arcpy.AddField_management(vPtsCor, "WAT_ELEV", "SHORT") arcpy.CalculateField_management(vPtsCor, "WAT_ELEV", "[RASTERVALU]", "VB", "#") arcpy.gp.EucAllocation_sa(vPtsCor, "in_memory\\outAllo", "#", "#", "10", "WAT_ELEV", outHHODist, "#") arcpy.Minus_3d(outDEM, "in_memory\\outAllo", outEleHHO) deleteList = [outVPts, vPtsEle, vPtsCor] #Confluence-based analysis arcpy.FeatureVerticesToPoints_management(outStreams, "in_memory\\outCPts", "BOTH_ENDS") arcpy.MakeFeatureLayer_management("in_memory\\outCPts", outCPts) arcpy.FeatureToLine_management(lyr, "in_memory\\lyrLine", "#", "ATTRIBUTES") arcpy.SelectLayerByLocation_management(outCPts, "WITHIN_A_DISTANCE", "in_memory\\lyrLine", "100 Meters", "NEW_SELECTION") arcpy.SelectLayerByLocation_management(outCPts, "#", "#", "#", "SWITCH_SELECTION") arcpy.CopyFeatures_management(outCPts, outCPsC) arcpy.Buffer_analysis(outCPsC, outBuff, "10 METERS", "#", "#", "NONE", "#") arcpy.Dissolve_management(outBuff, outDiss, "#", "#", "SINGLE_PART", "#") arcpy.SpatialJoin_analysis(outDiss, outCPsC, "in_memory\\outJoin") arcpy.MakeFeatureLayer_management("in_memory\\outJoin", "in_memory\\joinLayer", """"Join_Count" >= 3""") arcpy.FeatureToPoint_management("in_memory\\joinLayer", outConPts, "CENTROID") arcpy.gp.ExtractValuesToPoints_sa(outConPts, outDEM, cPtsEle, "NONE", "VALUE_ONLY") arcpy.AddField_management(cPtsEle, "CONF_ELEV", "SHORT") arcpy.CalculateField_management(cPtsEle, "CONF_ELEV", "[RASTERVALU]", "VB", "#") arcpy.gp.EucAllocation_sa(cPtsEle, "in_memory\\outConfAllo", "#", "#", "10", "CONF_ELEV", outConfDist, "#") arcpy.Minus_3d(outDEM, "in_memory\\outConfAllo", outEaConf) deleteList = [ outCPts, outCPsC, outBuff, outDiss, outConPts, cPtsEle, outVPts, vPtsEle, vPtsCor ] for delete in deleteList: arcpy.Delete_management(delete) #Extract values to seperate tables and rename fields def extractValues(pointLayer, raster, outPoints, renameField): arcpy.gp.ExtractValuesToPoints_sa(pointLayer, raster, outPoints, "NONE", "ALL") arcpy.AddField_management(outPoints, renameField, "SHORT") arcpy.CalculateField_management(outPoints, renameField, "[RASTERVALU]", "VB", "#") return slopeTable = outRaw + "_slopePts.shp" promTable = outRaw + "_promPts.shp" distTHOtable = outRaw + "_distTHOPts.shp" distAHOtable = outRaw + "_distAHOPts.shp" distTCOtable = outRaw + "_distTCOPts.shp" distACOtable = outRaw + "_distACOPts.shp" extractValues(buildPoints, outSlope, slopeTable, "Slope") extractValues(buildPoints, outTopoProm, promTable, "Relief") if not streamCount == 0: extractValues(buildPoints, outHHODist, distTHOtable, "DTo_Water") extractValues(buildPoints, outEleHHO, distAHOtable, "DAbo_Water") extractValues(buildPoints, outConfDist, distTCOtable, "DTo_Conf") extractValues(buildPoints, outEaConf, distACOtable, "DAbo_Conf") #Get range of values for each layer and populate lists - reject null values def getValues(layer, fieldName): vList = [] with arcpy.da.SearchCursor(layer, [fieldName]) as cursor: for row in cursor: if row[0] != -999 and row[0] != -9999: vList.append(row[0]) return vList slopeList = getValues(slopeTable, "Slope") promList = getValues(promTable, "Relief") if not streamCount == 0: dtwList = getValues(distTHOtable, "DTo_Water") dawList = getValues(distAHOtable, "DAbo_Water") dtcList = getValues(distTCOtable, "DTo_Conf") dacList = getValues(distACOtable, "DAbo_Conf") deleteList = [ slopeTable, promTable, distTHOtable, distAHOtable, distTCOtable, distACOtable ] for item in deleteList: if arcpy.Exists(item): arcpy.Delete_management(item) #Get statistics for range of values def meanstdv(xlist): from math import sqrt n, total, std1 = len(xlist), 0, 0 for x in xlist: total = total + x mean = total / float(n) for x in xlist: std1 = std1 + (x - mean)**2 std = sqrt(std1 / float(n - 1)) return mean, std slopeStats = meanstdv(slopeList) promStats = meanstdv(promList) if not streamCount == 0: dtwStats = meanstdv(dtwList) dawStats = meanstdv(dawList) dtcStats = meanstdv(dtcList) dacStats = meanstdv(dacList) #Remap rasters according to 1-sigma range def remapRaster(inRaster, outRaster, recField, statList): R1 = statList[0] - statList[1] R2 = statList[0] + statList[1] rasterMin = arcpy.GetRasterProperties_management( inRaster, "MINIMUM") rasterMax = arcpy.GetRasterProperties_management( inRaster, "MAXIMUM") if R1 < rasterMin: R1 = rasterMin if R2 > rasterMax: R2 = rasterMax remap = str(rasterMin) + " " + str(R1) + " 0;" + str( R1) + " " + str(R2) + " 1;" + str(R2) + " " + str( rasterMax) + " 0" arcpy.Reclassify_3d(inRaster, recField, remap, outRaster, "NODATA") return outRaster targetSlope = outTest + "_slp" targetTopoProm = outTest + "_pro" targetHHODist = outTest + "_dtw" targetConfDist = outTest + "_dtc" targetEleHHO = outTest + "_eaw" targetEaConf = outTest + "_eac" remapRaster(outSlope, targetSlope, "Value", slopeStats) remapRaster(outTopoProm, targetTopoProm, "Value", promStats) if not streamCount == 0: remapRaster(outHHODist, targetHHODist, "Value", dtwStats) remapRaster(outEleHHO, targetEleHHO, "Value", dawStats) remapRaster(outConfDist, targetConfDist, "Value", dtcStats) remapRaster(outEaConf, targetEaConf, "Value", dacStats) #Test against test points def AreaAndAccuracy(inRaster, inPoly): rasterPoly = outRaw + "_poly.shp" rasterPolyarea = 0 lyrPolyarea = 0 testCount = int(arcpy.GetCount_management(testPoints).getOutput(0)) arcpy.RasterToPolygon_conversion(inRaster, rasterPoly, "SIMPLIFY", "Value") with arcpy.da.SearchCursor(rasterPoly, ("GRIDCODE", "SHAPE@AREA")) as cursor: for row in cursor: if row[0] == 1: rasterPolyarea += row[1] with arcpy.da.SearchCursor(inPoly, "SHAPE@AREA") as cursor: for row in cursor: lyrPolyarea += row[0] targetAcres = rasterPolyarea / lyrPolyarea arcpy.MakeFeatureLayer_management(rasterPoly, "in_memory\\rasterPoly", """ "GRIDCODE" = 1 """) arcpy.MakeFeatureLayer_management(testPoints, "in_memory\\testPoints") arcpy.SelectLayerByLocation_management("in_memory\\testPoints", "WITHIN", "in_memory\\rasterPoly") selectCount = int( arcpy.GetCount_management("in_memory\\testPoints").getOutput( 0)) Accuracy = float(selectCount) / float(testCount) indexValue = float(Accuracy) / float(targetAcres) arcpy.AddMessage( os.path.basename(inRaster) + ": Accuracy = " + (str(Accuracy)[:5]) + ", Target Area Proportion = " + (str(targetAcres)[:5]) + ", Index = " + (str(indexValue)[:5])) arcpy.Delete_management(rasterPoly) return targetAcres, Accuracy, indexValue #Evaluate accuracy and target area proprtion - generate accuracy/area index - eliminate where index < 1 assessList = [ targetSlope, targetTopoProm, targetHHODist, targetEleHHO, targetConfDist, targetEaConf ] sumDict = {} for item in assessList: if arcpy.Exists(item): testX = AreaAndAccuracy(item, lyr) if testX[2] >= 1: sumDict[item] = testX nameList = sumDict.keys() #Weighted overlay outWeight = outFinal + "wgt" weightList = [] for item in sumDict: weightList.append(str(item) + " Value " + str(sumDict[item][2])) weightString = ";".join(weightList) arcpy.gp.WeightedSum_sa(weightString, outWeight) deleteInMemory() return
# Local variables: GeoTiffDEM_32 = "I:\\GeoTiffDEM_32" Name = "MN_000_077_dem" Input = "I:\GeoTiffDEM_32" MN_000_077_dem_tif = "I:\\GeoTiffDEM_32\\MN_000_077_dem.tif" v_NAME__POS_tif = "I:\\Openness\\Temp\\%NAME%_POS.tif" v_NAME__NEG_tif = "I:\\Openness\\Temp\\%NAME%_NEG.tif" v_NAME__PminusN_tif = "I:\\Openness\\Temp\\%NAME%_PminusN.tif" Input_raster_or_constant_value_2 = "2" v_NAME__Open_tif = "I:\\Openness\\Final_output\\%NAME%_Open.tif" # Process: Iterate Rasters arcpy.IterateRasters_mb(GeoTiffDEM_32, "", "TIF", "NOT_RECURSIVE") # Process: Topographic Openness arcpy.gp.toolbox = "C:/saga-6.4.0_x64/ArcSAGA Toolboxes/Terrain Analysis - Lighting, Visibility.pyt" # Warning: the toolbox C:/saga-6.4.0_x64/ArcSAGA Toolboxes/Terrain Analysis - Lighting, Visibility.pyt DOES NOT have an alias. # Please assign this toolbox an alias to avoid tool name collisions # And replace arcpy.gp.tool_5(...) with arcpy.tool_5_ALIAS(...) arcpy.gp.tool_5(MN_000_077_dem_tif, v_NAME__POS_tif, v_NAME__NEG_tif, "50", "sectors", "3", "8") # Process: Minus arcpy.Minus_3d(v_NAME__POS_tif, v_NAME__NEG_tif, v_NAME__PminusN_tif) # Process: Divide arcpy.Divide_3d(v_NAME__PminusN_tif, Input_raster_or_constant_value_2, v_NAME__Open_tif) #
def add_minimum_height_above_water_surface(lc_ws, lc_input_features, lc_bridge_raster, lc_input_surface, lc_memory_switch): try: if arcpy.Exists(lc_input_features): # subtract input surface from bridge raster to obtain heights above surface if lc_memory_switch: minus_raster = "in_memory/minus_3D" else: minus_raster = os.path.join(lc_ws, "minus_3D") if arcpy.Exists(minus_raster): arcpy.Delete_management(minus_raster) # actual subtract msg_body = create_msg_body( "Finding minimum distance between surfaces for each input feature...", 0, 0) msg(msg_body) arcpy.Minus_3d(lc_bridge_raster, lc_input_surface, minus_raster) # zonal stats to find minimum height above surface heights_table = os.path.join(lc_ws, "heightsTable") if arcpy.Exists(heights_table): arcpy.Delete_management(heights_table) stat_type = "MINIMUM" arcpy.AddMessage( "Calculating Height Statistics Information for " + common_lib.get_name_from_feature_class(lc_input_features) + ".") arcpy.sa.ZonalStatisticsAsTable(lc_input_features, esri_featureID, minus_raster, heights_table, "DATA", stat_type) # join back to bridge object common_lib.delete_fields(lc_input_features, [min_field]) arcpy.JoinField_management(lc_input_features, esri_featureID, heights_table, esri_featureID, min_field) # add Z information arcpy.AddZInformation_3d(lc_input_features, zmin_field, None) # calculate height above surface common_lib.add_field(lc_input_features, has_field, "DOUBLE", 5) expression = "round(float(!" + min_field + "!), 2)" arcpy.CalculateField_management(lc_input_features, has_field, expression, "PYTHON3", None) else: msg_body = create_msg_body( "Couldn't find input feature class: " + str(lc_input_features), 0, 0) msg(msg_body, WARNING) except arcpy.ExecuteError: # Get the tool error messages msgs = arcpy.GetMessages(2) arcpy.AddError(msgs) except Exception: e = sys.exc_info()[1] arcpy.AddMessage("Unhandled exception: " + str(e.args[0]))
print "Importing LAS to " + ground_feature_dataset + " ..." out_fclass = output_gdb_location + "\\" + ground_feature_dataset + "\\g_" + shortname arcpy.LASToMultipoint_3d(lasfile, out_fclass, cell_size, "2", "ANY_RETURNS", "", "", "las") print "Interpolating " + out_fclass + " ..." outgrid = output_gdb_location + "\\DTM_" if arcpy.Exists(out_fclass): arcpy.gp.NaturalNeighbor_sa(out_fclass, "Shape.Z", outgrid, cell_size) print "Creating a nDSM ....." outgrid = output_gdb_location + "\\nDSM_" + shortname grid1 = output_gdb_location + "\\DSM_" grid2 = output_gdb_location + "\\DTM_" if arcpy.Exists(grid1): if arcpy.Exists(grid2): arcpy.Minus_3d(grid1, grid2, outgrid) print "Exporting TIFF ...." if arcpy.Exists(outgrid): arcpy.RasterToOtherFormat_conversion(outgrid, tiff_folder, "TIFF") except Exception as ex: print(ex.message) print 'END ' + time.ctime() seconds = time.time() - start_time m, s = divmod(seconds, 60) h, m = divmod(m, 60) print "Time elapsed: " + "%d:%02d:%02d" % (h, m, s)
if n == len(data) - 1: startRAS = startdate # Get YYYYMMDD startRASm = locras + rastype + startRAS # Set location of merged raster endRAS = enddate endRASm = locras + rastype + endRAS else: startRAS = data[n] startRASm = locras + rastype + startRAS endRAS = data[n + 1] endRASm = locras + rastype + endRAS RASdifname = endRAS[:6] + "_" + startRAS[: 6] # Set string for difference raster RASdif = locdif + RASdifname # Set location for difference raster difs.append(RASdifname) arcpy.Minus_3d(endRASm, startRASm, RASdif) # Output difference print RASdifname + ' difference raster created' #Create data table print difs voltab = loctab + voltabname arcpy.CreateTable_management(loctab, voltabname) arcpy.AddField_management(voltab, "COMP", "SHORT") arcpy.AddField_management(voltab, "DURATION", "TEXT") arcpy.AddField_management(voltab, "Deposition", "Double") arcpy.AddField_management(voltab, "Erosion", "Double") arcpy.AddField_management(voltab, "Volume", "Double") arcpy.DeleteField_management(voltab, "FIELD1") arcpy.DeleteField_management(voltab, "OBJECTID") #Calculate differences
preDEM = os.path.join(gdb, 'preDEM') print(preDEM) arcpy.PointToRaster_conversion(GrdPts, Val, preDEM, assignmentType, priorityField, cellSize) outFocalStatistics = Con( IsNull(preDEM), FocalStatistics(preDEM, NbrRectangle(3, 3, "CELL"), "MEAN", "DATA"), preDEM) DEM = os.path.join(gdb, 'DEM') print(DEM) outFocalStatistics.save(DEM) ## Create DSM-canopy assignmentType = "MAXIMUM" preDSM = os.path.join(gdb, 'preDSM') print(preDSM) arcpy.PointToRaster_conversion(VegPts, Val, preDSM, assignmentType, priorityField, cellSize) outFocalStatistics = Con( IsNull(preDSM), FocalStatistics(preDSM, NbrRectangle(3, 3, "CELL"), "MEAN", "DATA"), preDSM) DSM = os.path.join(gdb, 'DSM') print(DSM) outFocalStatistics.save(DSM) ## Get tree height raster canopyHgt = os.path.join(gdb, 'CanopyHeight') arcpy.Minus_3d(DSM, DEM, canopyHgt) ## END
rasterJsl = arcpy.RasterToFloat_conversion(jslFullName)#叶面积的tif文件转成float以便计算 rasterYmj = arcpy.RasterToFloat_conversion(ymjFullName) print "开始计算" arcpy.Times_3d(0.17,rasterJsl,"F:/temp/1.tif") raster1 = arcpy.RasterToFloat_conversion("F:/temp/1.tif") arcpy.Times_3d(0.35,rasterYmj,"F:/temp/2.tif") raster2 = arcpy.RasterToFloat_conversion("F:/temp/2.tif") arcpy.Times_3d(0.35,rasterYmj,"F:/temp/3.tif") raster3 = arcpy.RasterToFloat_conversion("F:/temp/3.tif") arcpy.Divide_3d(rasterJsl,raster3,"F:/temp/4.tif") raster4 = arcpy.RasterToFloat_conversion("F:/temp/4.tif") arcpy.Times_3d(-1,raster4,"F:/temp/5.tif") raster5 = arcpy.RasterToFloat_conversion("F:/temp/5.tif") expData = arcpy.sa.Exp(raster5) expData.save("F:/temp/6.tif") raster6 = arcpy.RasterToFloat_conversion("F:/temp/6.tif") arcpy.Minus_3d(1,raster6,"F:/temp/7.tif") raster7 = arcpy.RasterToFloat_conversion("F:/temp/7.tif") arcpy.Times_3d(raster2,raster7,"F:/temp/8.tif") raster8 = arcpy.RasterToFloat_conversion("F:/temp/8.tif") arcpy.Plus_3d(raster8,raster1,"F:/temp/9.tif") if (os.path.exists("F:/out/zbzl"+name[:6]+".tif")):#判断输出文件是否存在 os.remove("F:/out/zbzl"+name[:6]+".tif")#存在则删除 shutil.copyfile("F:/temp/9.tif", "F:/out/zbzl"+name[:6]+".tif")#吧最后的计算结果复制到输出文件夹 print "计算好了" del expData#删除变量引用 del raster7#删除变量引用 shutil.rmtree("F:/temp")#请控零时文件夹 if __name__ == '__main__': pass