def filter_evi(land_cover_ras): evi_dir = "G:\evi_extract" evi_files = glob.glob(os.path.join(evi_dir, "*.tif")) for evi_file in evi_files: print("Extracting {} by mask".format(evi_file)) filted_evi = ExtractByMask(evi_file, land_cover_ras) filted_evi.save(os.path.join(env.workspace, "evis_for_41", os.path.basename(evi_file)))
def clip_to_shape(rasterlist, shapefile, outdir = False): """ Simple batch clipping script to clip rasters to shapefiles. :param rasterlist: single file, list of files, or directory for which to clip rasters :param shapefile: shapefile to which rasters will be clipped :param outdir: desired output directory. If no output directory is specified, the new files will simply have '_c' added as a suffix. :return output_filelist: list of files created by this function. """ rasterlist = enf_rastlist(rasterlist) output_filelist = [] # ensure output directorycore.exists if outdir and not os.path.exists(outdir): os.makedirs(outdir) for raster in rasterlist: # create output filename with "c" suffix outname = core.create_outname(outdir,raster,'c') # perform double clip , first using clip_management (preserves no data values) # then using arcpy.sa module which can actually do clipping geometry unlike the management tool. arcpy.Clip_management(raster, "#", outname, shapefile, "ClippingGeometry") out = ExtractByMask(outname, shapefile) out.save(outname) output_filelist.append(outname) print("Clipped and saved: {0}".format(outname)) return output_filelist
def rasterPercentAreas(feature, featureID, inRaster, uniqueRasterIDfield, rasterValueField, fieldPrefix): try: #create results obj results = {} #define land use key value dictionary with all possible values for row in arcpy.da.SearchCursor(inRaster, uniqueRasterIDfield): results[fieldPrefix + str(row[0])] = 0 #mask raster outExtractByMask = ExtractByMask(inRaster, feature) outExtractByMask.save('in_memory/mask.img') #get total cell count for percent area computation field = arcpy.da.TableToNumPyArray('in_memory/mask.img', rasterValueField, skip_nulls=True) sum = field[rasterValueField].sum() #loop over masked raster rows for row in arcpy.da.SearchCursor( 'in_memory/mask.img', [uniqueRasterIDfield, rasterValueField]): #get values value, count = row percentArea = round((float(count) / sum) * 100, 5) results[fieldPrefix + str(row[0])] = percentArea data = ResultObj(featureID, results) return data except: tb = format_exc().split('\n') raise Exception(tb)
def clip_to_shape(rasterlist, shapefile, outdir = False): """ Simple batch clipping script to clip rasters to shapefiles. Inputs: rasterlist single file, list of files, or directory for which to clip rasters shapefile shapefile to which rasters will be clipped outdir desired output directory. If no output directory is specified, the new files will simply have '_c' added as a suffix. """ rasterlist = enf_rastlist(rasterlist) # ensure output directorycore.exists if outdir and not os.path.exists(outdir): os.makedirs(outdir) for raster in rasterlist: # create output filename with "c" suffix outname = core.create_outname(outdir,raster,'c') # perform double clip , first using clip_management (preserves no data values) # then using arcpy.sa module which can actually do clipping geometry unlike the management tool. arcpy.Clip_management(raster, "#", outname, shapefile, "ClippingGeometry") out = ExtractByMask(outname, shapefile) out.save(outname) print("Clipped and saved: {0}".format(outname)) return
def extract_by_mask(in_raster, in_mask, output_path): path_check(output_path) if os.path.exists(output_path): print("{} has already been created before".format(output_path)) return print "Extracting {} by mask {}".format(output_path, in_mask) temp_output = ExtractByMask(in_raster, in_mask) temp_output.save(output_path)
def delresident(inpath, outpath): env.workspace = inpath f = os.listdir(inpath) for i in range(len(f)): if f[i].endswith('.tif'): outfile = outpath + '\\' + f[i] outExtractByMask = ExtractByMask(inpath + '\\' + f[i], r"F:\EULUC\city_noresident.tif") outExtractByMask.save(outfile)
def evi_raster_shrink(evi_raster_path, feature_points_path, radius): if not os.path.exists( os.path.join(env.workspace, "feature_points_buffer.shp")): arcpy.Buffer_analysis(feature_points_path, "feature_points_buffer", radius, dissolve_option="ALL") out_raster = ExtractByMask(evi_raster_path, "feature_points_buffer.shp") out_raster.save("evi_raster_shrink.tif") return out_raster
def recortar(entidadRecorte, lista_capas_gdb, save_path): """ Recorta capas en funcion de la entidad de recorte. lista_capas_gdb: lista de capas a cortar save_path: donde guardar la vaina """ inRaster = capa inMaskData = entidadRecorte outExtractByMask = ExtractByMask(inRaster, inMaskData) outExtractByMask.save(os.path.join(ruta_geodb_trabajo, save_path)) if debug: print("estoy recortando {0}".format(capa)) print("estoy guardando en {0}".format(save_path))
def batch_clip_raster(inRaster, inMaskData, savepath): # Check out the ArcGIS Spatial Analyst extension license arcpy.CheckOutExtension("Spatial") cursor = arcpy.SearchCursor(inMaskData) for row in cursor: mask = row.getValue("Shape") name = row.getValue( "name") # name the output layer by COUNTRY_NA field print(name.encode('gbk')) outExtractByMask = ExtractByMask(inRaster, mask) outExtractByMask.save( os.path.join(savepath, name.encode('gbk') + '.tif')) print(os.path.join(savepath, name.encode('gbk') + '.tif'))
def CreateMapStratum(mnh, emprise, TextRemap, idfield, outputFC, geodata): # Définir l'environnement de travail arcpy.env.workspace = geodata arcpy.env.overwriteOutput = True disp = arcpy.AddMessage # Extraire par le masque disp("Ectracting MNH ...") pathOutExtract = os.path.join(geodata,"OutExtract") OutExtract = ExtractByMask(mnh, emprise) OutExtract.save(pathOutExtract) # Reclassement disp("Reclassing ...") remap = TextToRemap(TextRemap) pathOutReclass = os.path.join(geodata,"Reclass") OutReclass = Reclassify(pathOutExtract,"Value", remap) OutReclass.save(pathOutReclass) # Convertir en polygon disp("Vectorisating ...") arcpy.RasterToPolygon_conversion(pathOutReclass, "PolyRaster") # clip arcpy.Clip_analysis("PolyRaster", emprise, "PolyRaster_Clip") # identity arcpy.Identity_analysis("PolyRaster_Clip", emprise, "PolyRaster_Ident") # Dissolve disp("Dissolving ...") dissolveFileds =[idfield, "grid_code"] arcpy.Dissolve_management("PolyRaster_Ident", outputFC, dissolveFileds) # supprimer le champ grid_code arcpy.AddField_management(outputFC,"Strate", "SHORT") arcpy.CalculateField_management(outputFC, "Strate", "!grid_code!","PYTHON") lfields = arcpy.ListFields(outputFC) arcpy.DeleteField_management(outputFC,"grid_code") # retourner le résultat : return outputFC
def testStatistics(feature, featureID, calculateStat): str900_all = "D:\\StreamStats\\ny_strgrid\\str900_all" ned10sl = "D:\\ned10\\output\\ned10sl_utm.img" #create results obj results = {} #arcpy.CopyFeatures_management(feature, "C:\\NYBackup\\NYFF2016\\LGSS_sites\\output\\sh_" + featureID + ".shp") # Process: Extract by Mask strExtractByMask = ExtractByMask(str900_all, feature) strExtractByMask.save("C:\\NYBackup\\NYFF2016\\LGSS_sites\\output\\str_" + featureID + ".img") slExtractByMask = ExtractByMask(ned10sl, strExtractByMask) slExtractByMask.save("C:\\NYBackup\\NYFF2016\\LGSS_sites\\output\\sl_" + featureID + ".img") value = arcpy.GetRasterProperties_management(slExtractByMask, calculateStat).getOutput(0) value = round(float(value), 5) results[calculateStat] = value data = ResultObj(featureID, results) return data
def glacier_debris(band_4, band_5, glacier_outline, out_dir): print 'Running glacier_debris' if Want_CloudRemoval == 'True': outExtractByMask = ExtractByMask( band_4, mask_dir + '\\' + band_4.split('\\')[-1].split('_b')[0][0:16] + band_4.split('\\')[-1].split('_b')[0][17:21] + 'mask.shp') outExtractByMask.save('del_nodatagone4.TIF') outExtractByMask = ExtractByMask( band_5, mask_dir + '\\' + band_4.split('\\')[-1].split('_b')[0][0:16] + band_4.split('\\')[-1].split('_b')[0][17:21] + 'mask.shp') outExtractByMask.save('del_nodatagone5.TIF') outExtractByMask = ExtractByMask('del_nodatagone4.TIF', glacier_outline) outExtractByMask.save('del_mask4.TIF') outExtractByMask = ExtractByMask('del_nodatagone5.TIF', glacier_outline) outExtractByMask.save('del_mask5.TIF') print 'extract' else: outExtractByMask = ExtractByMask(band_4, glacier_outline) outExtractByMask.save('del_mask4.TIF') outExtractByMask = ExtractByMask(band_5, glacier_outline) outExtractByMask.save('del_mask5.TIF') print 'extract' #Convert Raster to float for decimal threshold values arcpy.RasterToFloat_conversion('del_mask4.TIF', 'del_band_4a.flt') arcpy.RasterToFloat_conversion('del_mask5.TIF', 'del_band_5a.flt') arcpy.Divide_3d('del_band_4a.flt', 'del_band_5a.flt', 'del_division.TIF') print 'division' outSetNull = SetNull('del_division.TIF', 'del_division.TIF', 'VALUE > ' + str(threshold)) #path to results folder, for loops add a counter if images are from the same year and day result_name = glacier_outline.split('.shp')[0].split( '\\' )[-1] + '_' + band_4.split('\\')[-1][9:13] + 'y' + band_4.split( '\\')[-1][13:16] + 'd' + '_L' + band_4.split( '\\')[-1][2:3] + '_' + Lband.split('_')[-1][1:2] + Hband.split( '_')[-1][1:2] + 'b' + str(int( threshold * 100)) + 't' + str(A_remove) + 'r' + str(A_fill) + 'f' result_path = out_dir + glacier_outline.split('.shp')[0].split( '\\' )[-1] + '_' + band_4.split('\\')[-1][9:13] + 'y' + band_4.split( '\\')[-1][13:16] + 'd' + '_L' + band_4.split( '\\')[-1][2:3] + '_' + Lband.split('_')[-1][1:2] + Hband.split( '_')[-1][1:2] + 'b' + str(int( threshold * 100)) + 't' + str(A_remove) + 'r' + str(A_fill) + 'f' if str(result_name + '1.shp' in os.listdir(out_dir)) == 'True': result_path = result_path + '2' elif str(result_name + '2.shp' in os.listdir(out_dir)) == 'True': result_path = result_path + '3' elif str(result_name + '3.shp' in os.listdir(out_dir)) == 'True': result_path = result_path + '4' elif str(result_name + '4.shp' in os.listdir(out_dir)) == 'True': result_path = result_path + '5' elif str(result_name + '5.shp' in os.listdir(out_dir)) == 'True': result_path = result_path + '6' else: result_path = result_path + '1' result_file = result_path + '.TIF' print 'result file: ' + result_file outSetNull.save(result_file) print 'Level 1 product produced' #Float raster to integer outInt = Int(result_file) outInt.save('del_result_file_int.TIF') # Set local variables inRaster = 'del_result_file_int.TIF' outPolygons = 'del_debris.shp' field = 'VALUE' arcpy.RasterToPolygon_conversion(inRaster, outPolygons, 'NO_SIMPLIFY', field) print 'to polygon' #Process: Dissolve. need to create "value" row where all elements=0 arcpy.AddField_management('del_debris.shp', 'value', 'SHORT', 1, '', '', '', '', '') arcpy.Dissolve_management('del_debris.shp', 'del_debris_dissolve.shp', 'value') print 'dissolve' # Run the tool to create a new fc with only singlepart features arcpy.MultipartToSinglepart_management('del_debris_dissolve.shp', 'del_explode.shp') print 'explode' # Process: Calculate polygon area (km2) arcpy.CalculateAreas_stats('del_explode.shp', 'del_area.shp') arcpy.MakeFeatureLayer_management('del_area.shp', 'tempLayer') # Execute SelectLayerByAttribute to determine which features to delete expression = 'F_AREA <=' + str(A_remove) # m2 arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression) arcpy.DeleteFeatures_management('tempLayer') print 'Shapes with an area <= ' + str( A_remove) + ' m2 removed; ' + str( A_remove / 900) + ' pixles, if 30m pixels' #Delete polygons < xx m2 arcpy.Delete_management('tempLayer') print 'tempLayer deleted' result_file2 = result_path + '.shp' print 'Level 2 result file: ' + result_file2 #Process: aggrigate (distance=1 m minimum area=0 minimum hole size=xx m: ) CA.AggregatePolygons('del_area.shp', result_file2, 1, 0, A_fill, 'NON_ORTHOGONAL') print 'holes with an area <= ' + str( A_fill) + ' m2 filled/merged with debris polygon; ' + str( A_fill / 900) + ' pixles, if 30m pixels' rasterList = arcpy.ListRasters('*del*') for raster in rasterList: arcpy.Delete_management(raster) fcList = arcpy.ListFeatureClasses('*del*') for fc in fcList: arcpy.Delete_management(fc) print 'intermediate files deleted' print 'level 2 product produced'
def VegetationHeightProfil(emprise, mnh, bornes, OutputFc, idfield, geodata): from arcpy import env from arcpy.sa import ExtractByMask, Slope arcpy.CheckOutExtension("spatial") env.workspace= geodata env.overwriteOutput = True # Extraire le mnh pathExtract = os.path.join(geodata, "ExtractMNH") Extract_MNH = ExtractByMask(mnh, emprise) Extract_MNH.save(pathExtract) # Calculer la pente pathSlope = os.path.join(geodata,"SlopeMNH") slope_mnh = Slope(pathExtract,"DEGREE") slope_mnh.save(pathSlope) # Transformer le raster en point arcpy.RasterToPoint_conversion(slope_mnh, "Slope", "Value") # Jointure spatiale Cauler Moyenne et Ecart type fmap = arcpy.FieldMappings() fmap.addTable(emprise) fmap.addTable("Slope") # Create fieldmap for Mean fldMean = arcpy.FieldMap() fldMean.addInputField("Slope", "grid_code") fMean = fldMean.outputField fMean.name = "Mean" fMean.aliasName = "Mean" fldMean.outputField = fMean fldMean.mergeRule= "Mean" fmap.addFieldMap(fldMean) # Create fieldmap for StdDev fldEcartype = arcpy.FieldMap() fldEcartype.addInputField("Slope","grid_code") fEcartype = fldEcartype.outputField fEcartype.name = "Stdv" fEcartype.aliasName = "Stdv" fldEcartype.outputField = fEcartype fldEcartype.mergeRule = "StdDev" fmap.addFieldMap(fldEcartype) # Perform de spatial join arcpy.SpatialJoin_analysis(emprise, "Slope", OutputFc, "", "", fmap) # Create a field arcpy.AddField_management(OutputFc, "Prof_Typ", "TEXT") # Delete Field: for fld in arcpy.ListFields(OutputFc): if fld.name not in [idfield,"Stdv","Mean","Prof_Typ"]: try: arcpy.DeleteField_management(OutputFc,fld.name) except: pass # Evaluer la pente avec les bornes b1 = bornes[0] b2 = bornes[1] Code_bloc="""def Eval(Moyenne, EcarType): if Moyenne > """+str(b2)+ """ and EcarType < """+str(b1)+ """ : ProfilType = "Asc/Desc_Continue" if Moyenne < """+str(b2)+ """ and EcarType < """+str(b1)+ """ : ProfilType = "Plat" else : ProfilType = "Hétérogène" return ProfilType """ expression = "Eval(!Mean!,!Stdv!)" # Calcul du champ Prof Typ arcpy.CalculateField_management(OutputFc, "Prof_Typ", expression, "PYTHON_9.3", Code_bloc) # Return the result return OutputFc
def evi_raster_shrink(evi_raster_path, feature_points_path, radius): if not os.path.exists(os.path.join(env.workspace, "feature_points_buffer.shp")): arcpy.Buffer_analysis(feature_points_path, "feature_points_buffer", radius, dissolve_option="ALL") out_raster = ExtractByMask(evi_raster_path, "feature_points_buffer.shp") out_raster.save("evi_raster_shrink.tif") return out_raster
def IceCliffLocation(workspace,dem,tileDebarea,pixel,skinny,minSlope,n_iterations,L_e,alpha,beta_e,A_min,phi,gamma): import sys import os import arcpy from arcpy import env from arcpy.sa import Slope, ExtractByMask, Raster, SetNull, Int import matplotlib.pyplot as plt import numpy as np from numpy import array from scipy.optimize import curve_fit env.overwriteOutput = True try: import arcinfo except: sys.exit("ArcInfo license not available") arcpy.AddMessage("ArcInfo license not available") if arcpy.CheckExtension("spatial") == "Available": arcpy.CheckOutExtension("spatial") else: sys.exit("Spatial Analyst license not available") arcpy.AddMessage("Spatial Analyst license not available") #Parameters that should be stable: slopeLimit = 90 # slope detection capped at this value ## Loop for optimizing slope if str(workspace.split("\\")[-1]) == 'Final': n = [] n.append(minSlope) else: minSlope = 0 n = np.arange(minSlope,slopeLimit,(slopeLimit-minSlope)/n_iterations) skipIteration = [] for minSlope in n: # check for existing iterations if code has previously run but crashed. if arcpy.ListFeatureClasses("*cliffMap*"): fcListPrior = arcpy.ListFeatureClasses("*cliffMap*") skipIteration = [] for prior_i in fcListPrior: if int(prior_i[14:16]) == int("%02d" % (int(minSlope),)): skipIteration = 1 if skipIteration == 1: continue ## Ice Cliff code if skinny == 'false': print 'IceCliffLocation script started...' if skinny == 'true': print 'skinny IceCliffLocation script started...' # Parameter that probably should be 0 minProb = 0 # probability associated with minSlope. arcpy.CopyFeatures_management(tileDebarea, workspace+"\\del_debarea.shp") debarea_iteration = workspace+"\\del_debarea.shp" arcpy.env.snapRaster = dem outExtractSlope = ExtractByMask(dem, debarea_iteration) outExtractSlope.save("dem_extract.TIF") if int(round(float(str(arcpy.GetRasterProperties_management(dem, "CELLSIZEX"))))) == pixel: dem = "dem_extract.TIF" else: arcpy.Resample_management("dem_extract.TIF", "dem_extractResample.TIF", pixel, "NEAREST") arcpy.env.snapRaster = dem print "DEM resampeld from "+str(int(round(float(str(arcpy.GetRasterProperties_management(dem, "CELLSIZEX"))))))+' to '+str(pixel) dem = "dem_extractResample.TIF" # Create slope raster outSlope = Slope(dem, "DEGREE", 1) outSlope.save("del_slope.TIF") # Isolate slope values above minSlope outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(minSlope)) outSetNull.save("del_minSlope.TIF") # Exit process if no cliffs exist nocliff = arcpy.GetRasterProperties_management(Int("del_minSlope.TIF"), "ALLNODATA") if int(str(nocliff)) == 1: print "No area with a slope above "+str(minSlope)+"." elif float(str(arcpy.GetRasterProperties_management('del_minSlope.TIF',"MAXIMUM"))) - float(str(arcpy.GetRasterProperties_management('del_minSlope.TIF',"MINIMUM"))) == 0: print "Only one pixel with a slope above "+str(minSlope)+", iteration skipped." else: minMean = float(str(arcpy.GetRasterProperties_management("del_minSlope.TIF", "MEAN"))) minSD = float(str(arcpy.GetRasterProperties_management("del_minSlope.TIF", "STD"))) areaSlope = minMean print 'areaSlope = ' + str(areaSlope) # Isolate slope values above areaSlope outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(areaSlope)) outSetNull.save("del_areaSlope.TIF") arcpy.env.snapRaster = dem # Exit process if no cliffs exist nocliff = arcpy.GetRasterProperties_management(Int("del_areaSlope.TIF"), "ALLNODATA") if int(str(nocliff)) == 1: print "No area with a slope above "+str(areaSlope)+"." elif float(str(arcpy.GetRasterProperties_management("del_areaSlope.TIF","MAXIMUM"))) - float(str(arcpy.GetRasterProperties_management("del_areaSlope.TIF","MINIMUM"))) == 0: print "Only one pixel with a slope above "+str(areaSlope)+", iteration skipped." else: seedSlope = minMean+minSD print 'seedSlope = ' + str(seedSlope) # Isolate slope values above areaSlope outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(seedSlope)) outSetNull.save("del_seedSlope.TIF") # Exit process if no cliffs exist nocliff = arcpy.GetRasterProperties_management(Int("del_seedSlope.TIF"), "ALLNODATA") if int(str(nocliff)) == 1: print "No seed area with a slope above "+str(seedSlope)+"." else: # to int speeds up computation time outInt = Int("del_areaSlope.TIF") outInt.save("del_minSlopeInt.TIF") outInt = Int("del_seedSlope.TIF") outInt.save("del_seedSlopeInt.TIF") arcpy.RasterToPolygon_conversion("del_minSlopeInt.TIF", "del_minCliffSlope.shp", "NO_SIMPLIFY", "VALUE") arcpy.AddField_management("del_minCliffSlope.shp", "value", "SHORT", 1, "", "", "", "", "") arcpy.Dissolve_management("del_minCliffSlope.shp", "del_minCliff_dissolve.shp", "value") arcpy.MultipartToSinglepart_management("del_minCliff_dissolve.shp", "del_minCliff_explode.shp") arcpy.AddField_management("del_minCliff_explode.shp",'Area','FLOAT') rows = arcpy.UpdateCursor("del_minCliff_explode.shp") for row in rows: areacliff = row.shape.area row.Area = areacliff rows.updateRow(row) del row, rows arcpy.CopyFeatures_management("del_minCliff_explode.shp", "min"+str("%02d" % (minSlope,))+"_CliffArea.shp") # skinny/non-skinny fix for ending iteration. 0 = no skip, 1 = skip skip_iter = 0 # skinny ice cliffs, does not include ice cliff end extension to speed up computations if skinny == 'true': if arcpy.management.GetCount("del_minCliff_explode.shp")[0] == "0": skip_iter = 1 print "No area within del_minCliff_explode.shp, skinny iteration skipped." else: # "_FinalCliffShape.shp" and "_cliffArea.shp" are the same if skinny == true arcpy.CopyFeatures_management("del_minCliff_explode.shp", "min"+str("%02d" % (minSlope,))+"area"+str(int(areaSlope))+"_FinalCliffShape.shp") # copy working .shp, used below arcpy.CopyFeatures_management('del_minCliff_explode.shp', 'del_lineAndArea_area.shp') arcpy.CalculateAreas_stats('del_minCliff_explode.shp', 'del_lineAndArea_area.shp') arcpy.MakeFeatureLayer_management('del_lineAndArea_area.shp', 'tempLayer') expression = 'F_AREA <=' + str((pixel**2)*A_min) arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression) arcpy.DeleteFeatures_management('tempLayer') arcpy.Delete_management('tempLayer') if skinny == 'false': # buffer in/out area to break up attached features arcpy.Buffer_analysis("del_minCliff_explode.shp", "del_extendLineBuffer.shp", (pixel/2)-0.1, "FULL", "ROUND", "NONE") # Generate ice cliff centerlines from Voronoi cells if arcpy.management.GetCount("del_extendLineBuffer.shp")[0] == "0": arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_dissolve.shp") skip_iter = 1 print "No area within the criteria defined by seed area value "+str(seedSlope)+", iteration stopped before centerlines." else: arcpy.FeatureToLine_management("del_extendLineBuffer.shp","del_line.shp","","ATTRIBUTES") arcpy.Densify_edit("del_line.shp", "","5", "", "") arcpy.FeatureVerticesToPoints_management ("del_line.shp", "del_verti.shp", "ALL") arcpy.CreateThiessenPolygons_analysis("del_verti.shp","del_voronoiCells.shp" ,"ONLY_FID") arcpy.RepairGeometry_management("del_voronoiCells.shp") #use geodatabase here due to unexpected error: "Invalid Topology [Duplicate segment.]" arcpy.CreateFileGDB_management(workspace, "fGDB.gdb") fgdb = workspace+"\\fGDB.gdb" #arcpy.env.workspace = fgdb arcpy.Clip_analysis(workspace+"\\del_voronoiCells.shp", workspace+"\\del_extendLineBuffer.shp", fgdb+"\\shp","") arcpy.FeatureToLine_management(fgdb+"\\shp", workspace+"\\del_toLine.shp", "", attributes="ATTRIBUTES") arcpy.Delete_management(fgdb) #arcpy.env.workspace = workspace #arcpy.FeatureToLine_management("del_voronoiCellsClip.shp","del_toLine.shp", "", attributes="ATTRIBUTES") arcpy.MakeFeatureLayer_management("del_toLine.shp", "tempLayer", "", "", "") arcpy.SelectLayerByLocation_management("tempLayer", "CROSSED_BY_THE_OUTLINE_OF","del_minCliff_explode.shp","","NEW_SELECTION") arcpy.DeleteFeatures_management("tempLayer") arcpy.Delete_management("tempLayer") arcpy.Intersect_analysis(["del_toLine.shp",'del_minCliff_explode.shp'],"del_lineIntersect.shp") arcpy.Dissolve_management("del_lineIntersect.shp", "del_toLineDis.shp", "", "", "SINGLE_PART", "DISSOLVE_LINES") arcpy.UnsplitLine_management("del_toLineDis.shp","del_unsplit.shp","Id") arcpy.MakeFeatureLayer_management("del_unsplit.shp", "tempLayer2", "", "", "") arcpy.SelectLayerByLocation_management("tempLayer2", "BOUNDARY_TOUCHES","del_minCliff_explode.shp","","NEW_SELECTION") arcpy.DeleteFeatures_management("tempLayer2") arcpy.Delete_management("tempLayer2") arcpy.cartography.SimplifyLine("del_unsplit.shp","del_clineSimpExp.shp","POINT_REMOVE",10) arcpy.AddField_management("del_clineSimpExp.shp", "value", "SHORT", 1, "", "", "", "", "") arcpy.Dissolve_management("del_clineSimpExp.shp", "del_clineSimp.shp", "value") arcpy.TrimLine_edit("del_clineSimp.shp", "8 meters", "KEEP_SHORT") arcpy.CopyFeatures_management("del_unsplit.shp", "min"+str("%02d" % (minSlope,))+"_Centerlines.shp") #refine centerline for final map if arcpy.management.GetCount("del_clineSimp.shp")[0] == "0": arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_dissolve.shp") skip_iter = 1 print "No area big enough to generate a centerline, iteration skipped." else: # extend lines to capture cliff ends count = 0 print "Extend line started..." jlist = [(pixel/2)-0.1] * int(round(L_e/(pixel/2))) for j in jlist: #create buffer out to set the limit a line will be extended to arcpy.Buffer_analysis("del_clineSimp.shp", "del_clineSimpBuff1.shp", j, "FULL", "ROUND", "ALL") arcpy.PolygonToLine_management("del_clineSimpBuff1.shp","del_clineSimpBuff1line.shp") #merge centerline and bufferline arcpy.Merge_management(["del_clineSimp.shp","del_clineSimpBuff1line.shp"], "del_clineSimpBuff1merge_dis.shp") arcpy.Delete_management("del_clineSimp.shp") print "Extend line "+str(count)+" started..." arcpy.MultipartToSinglepart_management("del_clineSimpBuff1merge_dis.shp", "del_clineSimpBuff1merge.shp") arcpy.MakeFeatureLayer_management("del_clineSimpBuff1merge.shp", "lineLayer", "", "", "") arcpy.SelectLayerByLocation_management("lineLayer", "SHARE_A_LINE_SEGMENT_WITH", "del_clineSimpBuff1.shp", "", "NEW_SELECTION", "INVERT") arcpy.ExtendLine_edit("del_clineSimpBuff1merge.shp", str(j+1)+" meters", "EXTENSION") #select share a line segment with buffer to remove buffer arcpy.SelectLayerByLocation_management("lineLayer", "SHARE_A_LINE_SEGMENT_WITH", "del_clineSimpBuff1.shp", "", "NEW_SELECTION") arcpy.DeleteFeatures_management("lineLayer") arcpy.Delete_management("lineLayer") arcpy.CopyFeatures_management("del_clineSimpBuff1merge.shp", "del_clineSimp.shp") arcpy.Delete_management("del_clineSimpBuff1.shp") arcpy.Delete_management("del_clineSimpBuff1line.shp") arcpy.Delete_management("del_clineSimpBuff1merge.shp") count = count + j del j, jlist #remove last short ribs with a lenght threhold then reattach centerlines that may have been split # calculate lenght of each centerline if arcpy.management.GetCount("del_clineSimp.shp")[0] == "0": arcpy.CreateFeatureclass_management(workspace, 'del_lineAndArea_area.shp', "POLYGON","del_minCliff_explode.shp") skip_iter = 1 print "Centerline shape empty, iteration skipped." else: arcpy.AddField_management("del_clineSimp.shp",'L','FLOAT') rows = arcpy.UpdateCursor("del_clineSimp.shp") for row in rows: areacliff = row.shape.length row.L = areacliff rows.updateRow(row) del row, rows arcpy.CopyFeatures_management("del_clineSimp.shp", "min"+str("%02d" % (minSlope,))+"_extendedCenterlines.shp") # buffer out centerlines to capture end area removed in earlier buffer arcpy.Buffer_analysis("del_clineSimp.shp", "del_CliffCenterlineOut.shp", ((alpha*pixel*(2**(1/2)))/2), "FULL", "ROUND", "NONE") # define area with a slope less than that which defined "del_minCliff_dissolve.shp" edgeAreaSlope = areaSlope-beta_e print "Edge area defined by slope "+str(edgeAreaSlope) outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(edgeAreaSlope)) outSetNull.save("del_edgeSlope.TIF") outInt = Int("del_edgeSlope.TIF") outInt.save("del_edgeSlopeInt.TIF") arcpy.RasterToPolygon_conversion("del_edgeSlopeInt.TIF", "del_edgeAreaSlope.shp", "NO_SIMPLIFY", "VALUE") arcpy.AddField_management("del_edgeAreaSlope.shp", "value", "SHORT", 1, "", "", "", "", "") arcpy.Dissolve_management("del_edgeAreaSlope.shp", "del_edgeAreaSlope_dissolve.shp", "value") arcpy.CopyFeatures_management("del_edgeAreaSlope_dissolve.shp", "min"+str("%02d" % (minSlope,))+"_edgeArea.shp") arcpy.Intersect_analysis (["del_edgeAreaSlope_dissolve.shp", "del_CliffCenterlineOut.shp"], "del_betaF_edgeArea.shp") # merge buffered lines with buffered area arcpy.Merge_management(["del_betaF_edgeArea.shp", "del_minCliff_explode.shp"], "del_lineAndArea.shp") arcpy.AddField_management("del_lineAndArea.shp", "valueDis", "SHORT", 1, "", "", "", "", "") arcpy.Dissolve_management("del_lineAndArea.shp", "del_lineAndArea_dissolve1.shp", "valueDis") arcpy.RepairGeometry_management("del_lineAndArea_dissolve1.shp") # fill holes and remove shapes less than one pixel to avoid error from buffer tool arcpy.MultipartToSinglepart_management("del_lineAndArea_dissolve1.shp", "del_lineAndArea_explode1.shp") arcpy.CalculateAreas_stats("del_lineAndArea_explode1.shp", 'del_lineAndArea_area1.shp') arcpy.MakeFeatureLayer_management('del_lineAndArea_area1.shp', 'tempLayer') expression = 'F_AREA <' + str(pixel**2) # m2 arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression) arcpy.DeleteFeatures_management('tempLayer') arcpy.Delete_management('tempLayer') arcpy.cartography.AggregatePolygons('del_lineAndArea_area1.shp', "del_lineAndArea_dissolve.shp", 1, 0, pixel**2, 'NON_ORTHOGONAL') arcpy.RepairGeometry_management("del_lineAndArea_dissolve.shp") # buffer in to reomve sliver geometries and out to make a diagonal set of single pixel shapes one feature arcpy.Buffer_analysis("del_lineAndArea_dissolve.shp", "del_lineAndArea_dissolveSmallBufferIn.shp", -0.5, "FULL", "ROUND", "ALL") arcpy.Buffer_analysis("del_lineAndArea_dissolveSmallBufferIn.shp", "del_lineAndArea_dissolveSmallBuffer.shp", 1, "FULL", "ROUND", "ALL") arcpy.MultipartToSinglepart_management("del_lineAndArea_dissolveSmallBuffer.shp", "del_lineAndArea_explode.shp") arcpy.CalculateAreas_stats('del_lineAndArea_explode.shp', 'del_lineAndArea_area.shp') arcpy.MakeFeatureLayer_management('del_lineAndArea_area.shp', 'tempLayer') expression = 'F_AREA <=' + str((pixel**2)*A_min) arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression) arcpy.DeleteFeatures_management('tempLayer') arcpy.Delete_management('tempLayer') if arcpy.management.GetCount("del_lineAndArea_area.shp")[0] == "0": print "del_lineAndArea_area.shp empty, iteration stopped." skip_iter = 1 else: arcpy.AddField_management("del_lineAndArea_area.shp", "value", "SHORT", 1, "", "", "", "", "") arcpy.CopyFeatures_management('del_lineAndArea_area.shp', "min"+str("%02d" % (minSlope,))+"area"+str(int(areaSlope))+"_FinalCliffShape.shp") if skip_iter == 0: # CDF for values between minSlope and maxSlope outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE >= "+ str(minSlope)) outSetNull.save("del_min.TIF") arcpy.RasterToFloat_conversion("del_min.TIF", "del_min.flt") minsl = Raster('del_min.flt') slopemin = minsl*0.0 slopemin.save('del_minSl.TIF') outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE > "+ str(seedSlope)) outSetNull = SetNull(outSetNull, outSetNull, "VALUE < "+ str(minSlope)) outSetNull.save("del_mid.TIF") arcpy.RasterToFloat_conversion("del_mid.TIF", "del_mid.flt") midsl = Raster('del_mid.flt') b = (1-(((1-minProb)/(seedSlope-minSlope))*seedSlope)) slopemid = (((1-minProb)/(seedSlope-minSlope))*midsl)+b arcpy.env.snapRaster = dem slopemid.save('del_midSl.TIF') arcpy.env.snapRaster = dem outSetNull = SetNull("del_slope.TIF", "del_slope.TIF", "VALUE <= "+ str(seedSlope)) outSetNull.save("del_max.TIF") arcpy.RasterToFloat_conversion("del_max.TIF", "del_max.flt") maxsl = Raster('del_max.flt') slopemax = maxsl*0.0+1.0 arcpy.env.snapRaster = dem slopemax.save('del_maxSl.TIF') arcpy.env.snapRaster = dem arcpy.MosaicToNewRaster_management("del_minSl.TIF;del_midSl.TIF;del_maxSl.TIF", workspace, "del_cliffProbabilitySlope.TIF", "", "32_BIT_FLOAT", "", "1", "LAST","FIRST") arcpy.env.snapRaster = dem # extract cliff probability and apply reduction factor to area outside of buffer.shp if arcpy.management.GetCount("del_lineAndArea_area.shp")[0] == "0": print "del_lineAndArea_area.shp is empty, did not create: CliffProbability_betai" + str("%02d" % (int(minSlope),)) + "betaA" + str(int(areaSlope))+".TIF" else: outExtractSlope = ExtractByMask("del_cliffProbabilitySlope.TIF", "del_lineAndArea_area.shp") outExtractSlope.save("del_final_cliffs_found.TIF") arcpy.RasterToFloat_conversion("del_cliffProbabilitySlope.TIF", "del_CliffProbabilitySlope.flt") CliffProbabilitySlope = Raster('del_CliffProbabilitySlope.flt') CliffProbabilitySlopeREDUCED = CliffProbabilitySlope*phi arcpy.env.snapRaster = dem CliffProbabilitySlopeREDUCED.save('del_CliffProbabilitySlopeREDUCED.TIF') arcpy.MosaicToNewRaster_management("del_final_cliffs_found.TIF;del_CliffProbabilitySlopeREDUCED.TIF", workspace, "CliffProbability_betai" + str("%02d" % (int(minSlope),)) + "betaA" + str(int(areaSlope))+".TIF", "", "32_BIT_FLOAT", "", "1", "FIRST","FIRST") arcpy.env.snapRaster = dem del CliffProbabilitySlope del CliffProbabilitySlopeREDUCED del minsl del midsl del maxsl ## ---------------------------------- ## Compute percent cliff in total spatial domain cliff_area_sum = 0 debris_area_sum = 0 Perc_Cliff = 0 arcpy.CalculateAreas_stats(debarea_iteration, 'del_debris_area.shp') with arcpy.da.SearchCursor('del_debris_area.shp', ['F_AREA']) as cursor: for row in cursor: debris_area_sum += row[0] if os.path.isfile(workspace+'\\del_lineAndArea_area.shp') == False: print "'del_lineAndArea_area.shp'does not exist." elif arcpy.management.GetCount('del_lineAndArea_area.shp')[0] == "0": print "No area within 'del_lineAndArea_area.shp'." else: with arcpy.da.SearchCursor('del_lineAndArea_area.shp', ['F_AREA']) as cursor: for row in cursor: cliff_area_sum += row[0] Perc_Cliff = (cliff_area_sum/debris_area_sum)*100 arcpy.Dissolve_management("del_lineAndArea_area.shp", 'cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp', "value") arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','minSlope','FLOAT') arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Area_Cliff','FLOAT') arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Area_Deb','FLOAT') arcpy.AddField_management('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp','Perc_Cliff','FLOAT') rows = arcpy.UpdateCursor('cliffMap_betai' + str("%02d" % (int(minSlope),)) + 'betaA' + str(int(areaSlope)) + '.shp') for row in rows: row.setValue('Area_Cliff', cliff_area_sum) row.setValue('Area_Deb', debris_area_sum) row.setValue('minSlope', minSlope) row.setValue('Perc_Cliff', Perc_Cliff) rows.updateRow(row) del row, rows print 'IceCliffLocation script [minSlope: ' + str("%02d" % (int(minSlope),)) + ' areaSlope: ' + str(int(areaSlope))+ '] done...' rasterList = arcpy.ListRasters("*del*") for raster in rasterList: arcpy.Delete_management(raster) del raster del rasterList fcList = arcpy.ListFeatureClasses("*del*") for fc in fcList: arcpy.Delete_management(fc) del fc del fcList print "intermediate files deleted" del minSlope del n if str(workspace.split("\\")[-1]) == 'Final': print "Script complete" else: initialSlope_doubles = [] percentCliffs_doubles = [] initialSlope = [] percentCliffs = [] xfit = [] yfit = [] fcList = [] arr = [] fcList = arcpy.ListFeatureClasses("*cliffMap*") arcpy.Merge_management(fcList, "mergedSolutions.shp") arr = arcpy.da.TableToNumPyArray("mergedSolutions.shp", ('Perc_Cliff','minSlope')) arcpy.Delete_management("del_mergedSolutions.shp") initialSlope_doubles = [row[1] for row in arr] percentCliffs_doubles = [row[0] for row in arr] #remove rows that are repeated due to (possible) earlier tiled dissolve from insufficient memory for i,j in enumerate(initialSlope_doubles): if j != initialSlope_doubles[(i-1) % len(initialSlope_doubles)]: initialSlope.append(j) del i,j for i,j in enumerate(percentCliffs_doubles): if j != percentCliffs_doubles[(i-1) % len(percentCliffs_doubles)]: percentCliffs.append(j) del i,j def func(x,a,b,c): return a*np.exp(-((x-b)/c)**2) try: popt, pcov = curve_fit(func,initialSlope,percentCliffs, maxfev=1000) except RuntimeError: fig = plt.figure() ax1 = fig.add_subplot(111) ax1.plot(initialSlope, percentCliffs, 'ko');plt.draw() fig.show() print("Error - curve_fit failed") xfit = np.linspace(min(initialSlope), max(initialSlope), 100) yfit = popt[0]*np.exp(-((xfit-popt[1])/popt[2])**2) def secondDer(x): return popt[0]*(((4*(x-popt[1])**2*np.exp(-(x-popt[1])**2/popt[2]**2))/popt[2]**4)-((2*np.exp(-(x-popt[1])**2/popt[2]**2))/popt[2]**2)) a1 = [] a1 = [i for i in xrange(91)] a2 = secondDer(a1) #the next 3 for loops and a[x] variables define 1 of the 2 points to derive the optimization line. a3 = [] a4 = [] # values of second derivative where slope is below 'gamma' for i, j in enumerate(a2): if j <= gamma: a3.append(i) == i # find the steepest point (in the middle of the side of the bell) for i, j in enumerate(a2): if j == max(a2): m=i # take only values to the right of 'm' in case the curve is flat at 0 slope for i in a3: if i > m: a4.append(i) == i del i,j ax = min(a4) ay = popt[0]*np.exp(-((ax-popt[1])/popt[2])**2) #find max of bell for first point in optmization line yfit_array = array(yfit) ftup = (np.where(yfit_array == max(yfit_array))) f = int(ftup[0]) # x,y index of max yfit # d = distance from fit Equation 2 (Herreid and Pellicciotti, 2018) to line definded by ((xfit[0],yfit[0]),(ax,yx)) d = abs((yfit[f]-ay)*xfit-(xfit[f]-ax)*yfit+xfit[f]*ay-yfit[f]*ax)/((yfit[f]-ay)**2+(xfit[f]-ax)**2)**(1/2) # crit is the index of the longest d crit = np.where(d == max(d)) m = (yfit[f]-ay)/(xfit[f]-ax) b = yfit[f]-m*xfit[f] x_crit = (xfit[crit]+m*yfit[crit]-m*b)/(m**2+1) y_crit = m*((xfit[crit]+m*yfit[crit]-m*b)/(m**2+1))+b fig = plt.figure() ax1 = fig.add_subplot(111) ax1.plot(initialSlope, percentCliffs, 'ko'); plt.plot([xfit[f],ax],[yfit[f],ay]); plt.plot([xfit[crit],x_crit],[yfit[crit],y_crit]); plt.plot(xfit,yfit);plt.xlim(0, 100);plt.ylim(0, 100);plt.gca().set_aspect('equal', adjustable='box');plt.draw() ax1.set_xlabel(r'$\mathrm{\beta_i (^\circ)}$') ax1.set_ylabel('Ice cliff fraction (%)') fig.show() #fig.canvas.flush_events() import time time.sleep(1) #plt.pause(0.01) #plt.waitforbuttonpress() #save data used to make figure np.save(workspace+'\\figureData', (initialSlope, percentCliffs,[xfit[f],ax],[yfit[f],ay],[xfit[crit],x_crit],[yfit[crit],y_crit],xfit,yfit)) IceCliffLocation.minSlope = float(xfit[crit])
entidadRecorte = arcpy.Select_analysis(catastro_capa, r"C:\script\workspace\datosInput.gdb", campo + "=" + str(input_catastro)) #entidadRecorte = arcpy.Select_analysis(catastro_capa,r"C:\script\workspace\datosInput.gdb",campo + "= '" + str(numero) + "'") ### REALIZAMOS EL CLIP DE CATASTRO CON LOS DATOS CLIMATICOS: arcpy.env.workspace = r"C:\script\workspace\datosInput.gdb" lista_capas_gdb = arcpy.ListDatasets("*", "Raster") #print(lista_capas_gdb) for capa in lista_capas_gdb: inRaster = capa inMaskData = entidadRecorte outExtractByMask = ExtractByMask(inRaster, inMaskData) outExtractByMask.save( os.path.join(r"C:\script\workspace\datosInput.gdb", capa + "_recorte")) # print(f"estoy recortando {capa}") #RECORTE DATOS CONSTANTES: #RECORTAMOS LA CAPA DE GEOLOGIA enRaster = geologia_capa enMaskData = entidadRecorte outExtractByMask = ExtractByMask(enRaster, enMaskData) outExtractByMask.save(r"C:\script\workspace\datosInput.gdb\geologia_recorte") #print("He recortado geologia") #RECORTAMOS CAPA DE PENDIENTE: raster = pendiente_capa mask = entidadRecorte outExtractByMask = ExtractByMask(raster, mask)
def evi_raster_shrink(evi_raster_path, feature_points_path, radius): arcpy.Buffer_analysis(feature_points_path, "feature_points_buffers", radius, dissolve_option="ALL") out_raster = ExtractByMask(evi_raster_path, "feature_points_buffers") out_raster.save("evi_raster_shrink") return out_raster
def CalculateVegetationHeight(MNH, emprise, idField, geodata, OutputFC): # Local variables: env.workspace = geodata # Process: Extraction par masque pathExtract = os.path.join(geodata, "EctractMNH") Extract_MNH = ExtractByMask(MNH, emprise) Extract_MNH.save(pathExtract) # Process: Raster vers points arcpy.RasterToPoint_conversion(Extract_MNH, "MNHPoint", "Value") # Process: Jointure spatiale # Création de l'objet fieldmappings fmap = arcpy.FieldMappings() # on ajoute les tables fmap.addTable("MNHPoint") fmap.addTable(emprise) # on cherche l'indice de la du grid_code et on créer un fieldmap pour chaque indice (mean, idexGridCode = fmap.findFieldMapIndex("grid_code") # On créer les fieldmap pour chaque colonne fieldMapGCmean = fmap.getFieldMap(idexGridCode) fieldMapGCmin = arcpy.FieldMap() fieldMapGCmin.addInputField("MNHPoint","grid_code") fieldMapGCmax = arcpy.FieldMap() fieldMapGCmax.addInputField("MNHPoint","grid_code") fieldMapGCmedian = arcpy.FieldMap() fieldMapGCmedian.addInputField("MNHPoint","grid_code") fieldMapGCstdv = arcpy.FieldMap() fieldMapGCstdv.addInputField("MNHPoint","grid_code") # On calcul le champ Min_height Minfld = fieldMapGCmin.outputField Minfld.name = "Min_height" Minfld.aliasName = "Min_height" fieldMapGCmin.outputField = Minfld fieldMapGCmin.mergeRule = "Min" fmap.addFieldMap(fieldMapGCmin) # On calcul le champ Max_Height Maxfld =fieldMapGCmax.outputField Maxfld.name = "Max_height" Maxfld.aliasName = "Max_height" fieldMapGCmax.outputField = Maxfld fieldMapGCmax.mergeRule = "Max" fmap.addFieldMap(fieldMapGCmax) # On calcul le champ Median_height MedianFld = fieldMapGCmedian.outputField MedianFld.name = "Med_height" MedianFld.aliasName = "Med_height" fieldMapGCmedian.outputField = MedianFld fieldMapGCmedian.mergeRule = "Median" fmap.addFieldMap(fieldMapGCmedian) # On calcul le champ stdv_height StdvFld = fieldMapGCstdv.outputField StdvFld.name= "Stdv_height" StdvFld.aliasName = "Stdv_height" fieldMapGCstdv.outputField=StdvFld fieldMapGCstdv.mergeRule="StdDev" fmap.addFieldMap(fieldMapGCstdv) # On calcul le champ Mean_height Meanfld = fieldMapGCmean.outputField Meanfld.name ="Mean_height" Meanfld.aliasName = "Mean_height" fieldMapGCmean.outputField = Meanfld fieldMapGCmean.mergeRule ="Mean" fmap.replaceFieldMap(idexGridCode, fieldMapGCmean) arcpy.SpatialJoin_analysis(emprise, "MNHPoint", OutputFC, "", "", fmap) # Process: Calculer un champ arcpy.AddField_management(OutputFC, "Variance_height", "DOUBLE") arcpy.CalculateField_management(OutputFC, "Variance_height", "[Stdv_height]*[Stdv_height]") # Delete Field: for fld in arcpy.ListFields(OutputFC): if fld.name not in [idField,"Min_height","Max_height","Med_height","Stdv_height","Mean_height","Join_Count","Variance_height"]: try: arcpy.DeleteField_management(OutputFC,fld.name) except: pass # Return outputFC return OutputFC