def split_roads_by_boundaries(self): arcpy.AddMessage("splitting the roads by county and city") #: Get county name on roads and split on county polygons arcpy.Identity_analysis(self._input.roads_feature, self._input.county_features, self._output._intermediate_identity_county) #: Use output from first identity to get city name and split on city polygons arcpy.Identity_analysis( self._output._intermediate_identity_county, self._input.city_features, self._output._intermediate_identity_city_and_county)
def start(targetpath, cskpath, tempname): #确认审核字段是否存在 arcpy.AddMessage("2_确认审核字段") ensureSHFields(targetpath) #删除规则能够产生的字段 # arcpy.AddMessage("2_删除字段") # deleteFields(targetpath) #标识分析 arcpy.AddMessage("2_相交分析") arcpy.Identity_analysis(cskpath, targetpath, tempname, "ALL", "", "") arcpy.AddMessage("2_创建平均宽度") createWidth(tempname) #收集 arcpy.AddMessage("2_收集") result = arcpy.GetCount_management(tempname) count = int(result.getOutput(0)) arcpy.SetProgressor('step', '2_收集', 0, count, 1) matchedDataDict = collectIntersectFeauture(tempname) #还原数据 arcpy.AddMessage("2_还原数据") result = arcpy.GetCount_management(targetpath) count = int(result.getOutput(0)) arcpy.SetProgressor('step', '2_还原数据', 0, count, 1) UpdateTarget(targetpath, matchedDataDict)
def downsample(city_id): log('Downsampling points for %s', city_id) output_dir = join(DOWNSAMPLE_DIR, str(city_id)) if not exists(output_dir): os.makedirs(output_dir) log('Created %s', output_dir) else: log('%s already exists!', output_dir) samples_shp = join(LATLNGS_SHP_DIR, '%s.shp' % city_id) downsampling_fishnet_poly_shp = join(output_dir, 'downsampling_fishnet.shp') downsampling_fishnet_label_shp = join(output_dir, 'downsampling_fishnet_label.shp') if not exists(downsampling_fishnet_poly_shp): log('Creating fishnet...') desc = arcpy.Describe(samples_shp) arcpy.CreateFishnet_management( downsampling_fishnet_poly_shp, str(desc.extent.lowerLeft), str(desc.extent.XMin) + ' ' + str(desc.extent.YMax + 10), '0.0012', '0.0012', '0', '0', str(desc.extent.upperRight), 'LABELS', '#', 'POLYGON') log('Fishnet creation complete') samples_identity_shp = join(output_dir, 'samples_identity.shp') if not exists(samples_identity_shp): log('Computing identity...') arcpy.Identity_analysis(samples_shp, downsampling_fishnet_poly_shp, samples_identity_shp) log('Identity complete') samples_stats = join(output_dir, 'samples_stats') if not exists(join(output_dir, 'info')): log('Starting summary statistics...') arcpy.Statistics_analysis(samples_identity_shp, samples_stats, [['price', 'MEAN']], 'FID_downsa') log('Summary statistics complete') log('Detecting if join has already been done...') join_done = False fields = arcpy.ListFields(downsampling_fishnet_label_shp) for field in fields: if field.name == 'MEAN_PRICE': join_done = True if not join_done: log('Performing table join on FID:FID_DOWNSA...') arcpy.JoinField_management(downsampling_fishnet_label_shp, 'FID', samples_stats, 'FID_DOWNSA', ['MEAN_PRICE']) log('Table join on FID:FID_DOWNSA done.') log('Defining projection...') arcpy.DefineProjection_management(downsampling_fishnet_label_shp, PROJECTION_FILE) log('FINISHED downsampling %s', city_id) return downsampling_fishnet_label_shp log('======================END==========================')
def split_strahler(stream_area_fc, streams, out_area_fc): """This function splits up the NHDArea feature class, which does not start and stop polygons at confluences, by creating break points near the confluences to split up the polygons. Then, it adds the Strahler value from the stream centerline.""" # 1) Generate euclidean allocation raster from streams (use OBJECTID) # 2) Convert euclidean allocation raster to polygons # 3) Join allocation polygons "gridcode" to streams "OBJECTID" so that # Strahler value is attached to allocation polygon # 4) Use identity function to split up the StreamRiver polygons at the # allocation polygon boundaries, and add the Strahler values old_workspace = env.workspace env.workspace = 'in_memory' cu.multi_msg( "Splitting stream area polygons between confluences and joining 1) Strahler order to them..." ) cu.multi_msg('next messages for testing') arcpy.CheckOutExtension('Spatial') cu.multi_msg('euc') euc = EucAllocation(streams, cell_size='50', source_field='OBJECTID') arcpy.CheckInExtension('Spatial') cu.multi_msg('conversion') arcpy.RasterToPolygon_conversion(euc, 'allocation_polys') stream_id_field = arcpy.ListFields(streams, 'Permanent_')[0].name cu.multi_msg('join') arcpy.JoinField_management('allocation_polys', 'grid_code', streams, 'OBJECTID', ['Strahler', 'LengthKm', stream_id_field]) cu.multi_msg('identity') arcpy.Identity_analysis(stream_area_fc, 'allocation_polys', out_area_fc) env.workspace = old_workspace cu.multi_msg("Splitting strema area polygons finished.")
def planarizeAndGetArcEndPoints(fds,caf,mup,fdsToken): # returns a feature class of endpoints of all caf lines, two per planarized line segment addMsgAndPrint('Planarizing '+os.path.basename(caf)+' and getting segment endpoints') # add LineID (so we can recover lines after planarization) arcpy.AddField_management(caf,'LineID','LONG') arcpy.CalculateField_management(caf,'LineID','!OBJECTID!','PYTHON_9.3') # planarize CAF by FeatureToLine addMsgAndPrint(' planarizing caf') planCaf = caf+'_xxx_plan' testAndDelete(planCaf) arcpy.FeatureToLine_management(caf,planCaf) # planarize CAF (by IDENTITY with MUP) addMsgAndPrint(' IDENTITYing caf with mup') cafp = caf+'_planarized' testAndDelete(cafp) arcpy.Identity_analysis(planCaf,mup,cafp,'ALL','','KEEP_RELATIONSHIPS') # delete extra fields addMsgAndPrint(' deleting extra fields') fns = fieldNameList(cafp) deleteFields = [] for f in fieldNameList(mup): if f != 'MapUnit': for hf in ('RIGHT_'+f,'LEFT_'+f): if hf in fns: deleteFields.append(hf) arcpy.DeleteField_management(cafp,deleteFields) # calculate azimuths startDir and endDir addMsgAndPrint(' adding StartAzimuth and EndAzimuth') for f in ('LineDir','StartAzimuth','EndAzimuth'): arcpy.AddField_management(cafp,f,'FLOAT') arcpy.AddField_management(cafp,'ToFrom','TEXT','','',4) fields = ['SHAPE@','StartAzimuth','EndAzimuth'] with arcpy.da.UpdateCursor(cafp,fields) as cursor: for row in cursor: lineSeg = row[0].getPart(0) row[1],row[2] = startEndGeogDirections(lineSeg) cursor.updateRow(row) # make endpoint feature class addMsgAndPrint(' converting line ends to points') arcEndPoints = fds+'/'+fdsToken+'xxx_EndPoints' # will be a feature class in fds arcEndPoints2 = arcEndPoints+'_end' testAndDelete(arcEndPoints) arcpy.FeatureVerticesToPoints_management(cafp,arcEndPoints, 'START') arcpy.CalculateField_management(arcEndPoints,'LineDir','!StartAzimuth!','PYTHON') arcpy.CalculateField_management(arcEndPoints,'ToFrom','"From"','PYTHON') testAndDelete(arcEndPoints2) arcpy.FeatureVerticesToPoints_management(cafp,arcEndPoints2,'END') arcpy.CalculateField_management(arcEndPoints2,'LineDir','!EndAzimuth!','PYTHON') arcpy.CalculateField_management(arcEndPoints2,'ToFrom','"To"','PYTHON') arcpy.Append_management(arcEndPoints2,arcEndPoints) testAndDelete(arcEndPoints2) # delete some more fields deleteFields = ['EndAzimuth','StartAzimuth','LEFT_MapUnitPolys','RIGHT_MapUnitPolys'] arcpy.DeleteField_management(arcEndPoints,deleteFields) addMsgAndPrint(' adding POINT_X and POINT_Y') arcpy.AddXY_management(arcEndPoints) testAndDelete(planCaf) return cafp, arcEndPoints
def identity(parcelPts, landOwn, owner, zipCodes, zips, munis, parcelPtsFinal, newParcelsFinal): print( 'Point Identity Analysis on Land Ownership, Zip Codes, and Municipalities' ) fcs = [owner, zips, parcelPtsFinal, newParcelsFinal] for fc in fcs: if arcpy.Exists(fc): arcpy.Delete_management(fc) arcpy.Identity_analysis(parcelPts, landOwn, owner) arcpy.Identity_analysis(owner, zipCodes, zips) arcpy.Identity_analysis(zips, munis, parcelPtsFinal)
def processbuf(studyArea, roadBuffer): #get road buffer within the study area outputBuf = roadBuffer + "_Clip" arcpy.Clip_analysis(roadBuffer, studyArea, outputBuf) #label road buffer with district names outputstudyareaBuf = roadBuffer + "_dist" arcpy.Identity_analysis(studyArea, outputBuf, outputstudyareaBuf) return outputstudyareaBuf
def further_process_blended(): env.workspace = outBlendedWS env.overwriteOutput = True GISDBASCL = r'S:\LV_Valley_Imagery\2017\SwimmingPool2017\gdb\general_data.gdb\GISDBA_SCL_STREETS' fcs = arcpy.ListFeatureClasses() arcpy.MakeFeatureLayer_management(projectAreaTiles, 'TileClipLayer') for fc in fcs: print 'clipping ' + fc arcpy.MakeFeatureLayer_management(fc, 'lyr') arcpy.AddField_management('lyr', 'YARD', 'TEXT', '', '', '5') arcpy.AddField_management('lyr', 'TILENAME', 'Text', '', '', '8') arcpy.AddField_management('lyr', 'ERROR_TYPE', 'SHORT') arcpy.SelectLayerByAttribute_management( 'TileClipLayer', 'NEW_SELECTION', "BOOKSEC_PT = 'o" + fc[4:] + "'") arcpy.Clip_analysis(fc, 'TileClipLayer', outClippedBlendedWS + '\\' + fc + '_Clip') arcpy.SelectLayerByAttribute_management('TileClipLayer', 'CLEAR_SELECTION') env.workspace = outClippedBlendedWS env.overwriteOutput = True fcs = arcpy.ListFeatureClasses() arcpy.MakeFeatureLayer_management(projectAreaParcels, 'ProjAreaAOXLyr') arcpy.MakeFeatureLayer_management(GISDBASCL, 'GISDBA_SCL_STREETS') for fc in fcs: print "Performing Identity and Near Analysis on " + fc + "_Id" arcpy.Identity_analysis(fc, 'ProjAreaAOXLyr', outClippedBlendedIDWS + '\\' + fc + '_Id', 'ALL', '', 'NO_RELATIONSHIPS') arcpy.Near_analysis(outClippedBlendedIDWS + '\\' + fc + '_Id', 'GISDBA_SCL_STREETS', "300 Feet", "LOCATION", "NO_ANGLE", "PLANAR") env.workspace = outClippedBlendedIDWS env.overwriteOutput = True arcpy.MakeFeatureLayer_management(GISDBASCL, 'GISDBA_SCL_STREETS') fcs = arcpy.ListFeatureClasses() for fc in fcs: print "calculating frequency and stats on " + fc arcpy.MakeFeatureLayer_management(fc, 'lyr') arcpy.AddJoin_management('lyr', "NEAR_FID", 'GISDBA_SCL_STREETS', 'OBJECTID', 'KEEP_ALL') arcpy.Frequency_analysis( 'lyr', outClippedBlendedIDWS + '\\' + fc[:-8] + '_Frequen', '"{}.gridcode;{}.APN"'.format(fc, fc), '"{}.Shape_Area"'.format(fc)) arcpy.Statistics_analysis( outClippedBlendedIDWS + '\\' + fc[:-8] + '_Frequen', outClippedBlendedIDWS + '\\' + fc[:-8] + '_TOTAREA', "FREQUENCY COUNT;" + "{i}_Shape_Area SUM".format(i=fc), "{x}_APN".format(x=fc))
def validateIdentity(result, idtest, idtrue, idfalse): print("Running Identify") arcpy.Describe(result) iresult = arcpy.CreateScratchName("ir", "", "FeatureClass", "in_memory") mismatch = arcpy.CreateScratchName("mm", "", "FeatureClass", "in_memory") idmatch = arcpy.CreateScratchName("id", "", "FeatureClass", "in_memory") arcpy.Identity_analysis(result, idtest, iresult, "NO_FID") arcpy.Describe(iresult) arcpy.Select_analysis(iresult, mismatch, idfalse) arcpy.Select_analysis(iresult, idmatch, idtrue) return (idmatch, mismatch)
def main(): # Setup script path and workspace folder outWorkspace = flmc.SetupWorkspace("FLM_SBP_output") arcpy.env.workspace = outWorkspace arcpy.env.overwriteOutput = True # Load arguments from file args = flmc.GetArgs("FLM_SBP_params.txt") # Tool arguments Input_Features = args[0].rstrip() Clip_Features = args[1].rstrip() Out_Features = args[2].rstrip() # Local variables: OutIdentity = outWorkspace+"\\FLM_SBP_OutIdentity.shp" # Process: Identity arcpy.Identity_analysis(Input_Features, Clip_Features, OutIdentity, join_attributes="ALL", cluster_tolerance="", relationship="NO_RELATIONSHIPS") # Process: Multipart To Singlepart arcpy.MultipartToSinglepart_management(OutIdentity, Out_Features)
def geo_process(self, gbz_merge_lyr, dltb_query_lyr): """ 地理逻辑处理 :param dltb_query_lyr: 定义查询后的地类图斑数据 :param gbz_merge_lyr: 所有高标注农田的合并图层 :return: """ out_feature_class = "erase_left" arcpy.Erase_analysis(dltb_query_lyr, gbz_merge_lyr, out_feature_class) print "Erase success" builded_area = "builded_area" arcpy.Erase_analysis(dltb_query_lyr, out_feature_class, builded_area) print "Erase success" builded_area_id = "builded_area_id" arcpy.Identity_analysis(builded_area, self.input_xzq, builded_area_id) print "Identity success" fields_o = arcpy.ListFields(builded_area_id) f_exist = all([ezarcpy.check_field_exit(fields_o, i) for i in ["XZQDM", "XZQMC", "Shape_Area"]]) if not f_exist: raise RuntimeError("field does not exist") # 添加字段XZQ,用于存放XZQDM&XZQMC ezarcpy.add_field(builded_area_id, [XZQ], "TEXT", 60) expression = "[XZQDM] & [XZQMC]" arcpy.CalculateField_management(builded_area_id, XZQ, expression, "VB") cal_rel_area = "cal_rel_area" if self.name: cal_rel_area = self.name arcpy.Dissolve_management(builded_area_id, cal_rel_area, "XZQ") print "Dissolve suceess" # 添加字段MJM,用于存放面积亩 ezarcpy.add_field(cal_rel_area, [MJM], "DOUBLE") expression2 = "[Shape_Area] * 0.0015" arcpy.CalculateField_management(cal_rel_area, MJM, expression2, "VB") print u"output {}.shp".format(self.name) return cal_rel_area
def splitLines(in_workspace, job_aoi, names=[]): """ gets a list of all feature classes in a database, includes feature classes inside and outside of the feature datasets""" fcs = [] walk = arcpy.da.Walk(in_workspace, datatype="FeatureClass") for dirpath, dirnames, filenames in walk: for filename in filenames: if filename in names: fc = os.path.join(dirpath, filename) split = arcpy.Identity_analysis(fc, job_aoi, "in_memory\\split_" + filename) single = arcpy.MultipartToSinglepart_management( split, "in_memory\\split" + filename) arcpy.DeleteFeatures_management(fc) arcpy.Append_management(single, fc, "NO_TEST") arcpy.Delete_management(split) arcpy.Delete_management(single) return fcs
def _findWithinFeatures(self, baseLayerSet): enclosedFeatures = set() tempLayer = os.path.join(self._scratchFolder, 'tempLayer' + str(self.currentLayer)) tempUnionFeature = os.path.join( self._scratchFolder, 'tempUnion' + str(self.currentLayer) + ".shp") tempIdentityFeature = os.path.join( self._scratchFolder, 'tempIdentity' + str(self.currentLayer) + ".shp") # Uncheck Gap-allowed option in union tool to fill gaps (enclosed areas) in newly found first-order neighbor layer whereClause = '\"' + self._ObjIDField + '\" = ' + ( ' OR \"' + self._ObjIDField + '\" = ').join( str(i + self._ObjIDInitialValue) for i in baseLayerSet) arcpy.MakeFeatureLayer_management(self._featurePath, tempLayer, whereClause) arcpy.Union_analysis([tempLayer], tempUnionFeature, "ONLY_FID", None, "NO_GAPS") # Use identity tool to find enclosed features in newly found first-order neighbor layer arcpy.Identity_analysis(tempUnionFeature, self._featurePath, tempIdentityFeature, "ONLY_FID") # Field name of temp selected layer and orignal feature data in current generated identity feature ObjIDName_tempLayer = "FID_" + self._featureName[:6] if (self._featureName[5] == "_"): ObjIDName_originalFeature = "FID_" + self._featureName[:5] + "1" else: ObjIDName_originalFeature = "FID_" + self._featureName[:4] + "_1" with arcpy.da.SearchCursor( tempIdentityFeature, [ObjIDName_tempLayer, ObjIDName_originalFeature ]) as cursor: #@UndefinedVariable for row in cursor: if (row[0] == -1 and row[1] != -1): enclosedFeatures.add(row[1] - self._ObjIDInitialValue) #Remove scratched data arcpy.Delete_management(tempUnionFeature) arcpy.Delete_management(tempIdentityFeature) return enclosedFeatures
def calculateCountToIdentity(inputAOI, outputTheissen,endIdentPloy): arcpy.AddMessage("Calculate density start ...") outputWorkspace = os.path.dirname(outputTheissen) arcpy.env.scratchWorkspace = os.path.dirname(outputTheissen) arcpy.env.workspace = os.path.dirname(outputTheissen) IdentLayer = "IdentLayer" arcpy.Identity_analysis(inputAOI,outputTheissen,IdentLayer) arcpy.Select_analysis(outputTheissen,"tempCopy") arcpy.AddField_management("tempCopy","count_","LONG") fields = ('hexagonID', 'count_') arcpy.CalculateField_management("tempCopy","count_","0") values = [s[0] for s in arcpy.da.SearchCursor(IdentLayer, ("hexagonID"))] uniqueValues = set(values) arcpy.SetProgressor("step", "Calculate...", 0,len(uniqueValues), 1) for uv in uniqueValues: f = 0 for v in values: if uv == v: f = f + 1 expression = arcpy.AddFieldDelimiters("tempCopy", "hexagonID") + " = %d"%int(uv) with arcpy.da.UpdateCursor("tempCopy", fields,expression) as cursor: for row in cursor: if int(row[0]) == int(uv): row[1] = long(f) cursor.updateRow(row) arcpy.SetProgressorPosition() expression2 = arcpy.AddFieldDelimiters("tempCopy", "count_") + " > 0" arcpy.Select_analysis("tempCopy",endIdentPloy,expression2) arcpy.Delete_management("tempCopy") arcpy.Delete_management("IdentLayer") arcpy.AddMessage("Calculate density success ...")
arcpy.RefreshActiveView() arcpy.RefreshTOC() if xmxd != "CURRENT": legend.adjustColumnCount(1) mxd.save() arcpy.mapping.ExportToPDF(mxd, newpath+scen+"_"+inputYear+".pdf") del mxd """************************************************************************************************************************************************""" #Solar Energy Locations #The annual average solar radiation map is given and the annual average precipitation maps are used for this analysis arcpy.AddMessage("Starting solar power plant analysis....") #As the first step, the two shapefiles are merged, using identity analysis arcpy.Identity_analysis(datapath+"CASolar.shp",datapath+"Rainfall.shp", temp+"CASolRain.shp") #Make a temporary layer for selection analysis arcpy.MakeFeatureLayer_management(temp+"CASolRain.shp", temp+"CASol_lyr") #Filter the combined shapefile to remove the locations of existing solar projects, culturally sensitive lands, military lands, and potential wilderness filter_areas(temp+"CASol_lyr", "solar") #The solar file is sorted on high annual average solar radiation, followed by low annual average rainfall arcpy.Sort_management(temp+"CASol_lyr", temp+"CASolarSorted.shp", [[ "GHIANN", "DESCENDING"], ["RANGE", "ASCENDING"]]) arcpy.Delete_management(temp+"CASol_lyr") arcpy.Delete_management(temp+"CASolRain.shp") #Solar power plant energy generation calculation: #Area: Each power plant is assumed to be of 2000 acre size = 8.09X10^6 sq. m
def create_wui(forest,urban,year,save_path, forestThresh = 0, urbanThresh = 0): arcpy.CheckOutExtension("Spatial") arcpy.env.overwriteOutput=True # year = 2016 # forest = os.path.join(dir_root, r"data\WUI\forest%04d.tif"%year) # urban = os.path.join(dir_root, r"data\WUI\urban%04d.tif"%year) # forestThresh = 0 # urbanThresh = 0.5 if not(os.path.exists(save_path)): os.mkdir(savepath) #to int arcpy.env.workspace = save_path outRas = Raster(forest)>forestThresh forestName = "forest%04dInt"%year outRas.save(forestName+"_%02d.tif"%int(urbanThresh*100)) outRas = Raster(urban)>urbanThresh urbanName = "urban%04dInt"%year outRas.save(urbanName+"_%02d.tif"%int(urbanThresh*100)) # raster to polygon arcpy.RasterToPolygon_conversion(forestName+"_%02d.tif"%int(urbanThresh*100), "%s.shp"%forestName) arcpy.RasterToPolygon_conversion(urbanName+"_%02d.tif"%int(urbanThresh*100), "%s.shp"%urbanName) # identity outName = "identity%04d.shp"%year arcpy.Identity_analysis("%s.shp"%forestName, "%s.shp"%urbanName, outName) # buffer + 50, buffer -50 , erase arcpy.Buffer_analysis(outName, outName[:-4]+"Buffer50.shp", "50 Feet") arcpy.Buffer_analysis(outName, outName[:-4]+"BufferMinus50.shp", "-50 Feet") #erase arcpy.Erase_analysis(outName[:-4]+"Buffer50.shp", outName[:-4]+"BufferMinus50.shp", outName[:-4]+"Erase.shp") inFeature = outName[:-4]+"Erase.shp" outFeature = outName[:-4]+"EraseDissolve.shp" arcpy.Dissolve_management(inFeature, outFeature) ################################# inFeature = [outFeature,os.path.join(dir_root,"data","WUI","arc_export","westUsa4km.shp")] outFeature = outName[:-4]+"EraseDissolveIntersect.shp" arcpy.Intersect_analysis(inFeature, outFeature) #project inFeature = outFeature outFeature = inFeature[:-4]+"Project.shp" out_coor_system = arcpy.SpatialReference(3395)#'WGS 1984 World Mercator' arcpy.management.Project(inFeature, outFeature, out_coor_system) ########################### inFeature = outFeature fieldName = "area" fieldPrecision = 4 fieldScale = 2 arcpy.AddField_management(inFeature, fieldName, "LONG")#,field_precision=fieldPrecision,field_scale=fieldScale #arcpy.AddField_management(inFeature, fieldName, "float",field_precision=fieldPrecision,field_scale=fieldScale) expression="math.ceil(!SHAPE.AREA!)" #expression="!SHAPE.AREA!" arcpy.CalculateField_management(inFeature, fieldName, expression,"PYTHON_9.3") ## polygon to raster #outRaster = "wui_%s_%s_%04d.tif"%(forestThresh*100,urbanThresh*100,year) outRaster = "wui%04d"%year # print("begun") arcpy.env.outputCoordinateSystem = arcpy.SpatialReference("WGS 1984") arcpy.PolygonToRaster_conversion(inFeature, fieldName, outRaster, "MAXIMUM_AREA",fieldName,0.035932611) # print("done") toSave =Raster(outRaster) toSave.save(outRaster+"_%02d_%02d.tif"%(forestThresh*100,urbanThresh*100))
def generate_route_border_rule_table(workspace,route,route_id_field,boundary,boundary_id_field,buffer_size,route_border_rule_table,high_angle_threshold,offset): arcpy.AddMessage("Generating route border rule source table for {1}...".format(boundary)) try: date = datetime.now() date_string = date.strftime("%m/%d/%Y") spatial_reference = arcpy.Describe(route).spatialReference xy_resolution = "{0} {1}".format(spatial_reference.XYResolution,spatial_reference.linearUnitName) ############################################################################################################### # get all candidate border routes arcpy.AddMessage("Identifying candidate border routes...") # generate boundary border boundary_border = os.path.join(workspace,"{0}_{1}_border".format(boundary,"boundary")) arcpy.FeatureToLine_management(boundary, boundary_border) # dissolve polygon boundary based on boundary id boundary_border_dissolved = os.path.join(workspace,"{0}_boundary_border_dissolved".format(boundary)) arcpy.Dissolve_management(boundary_border,boundary_border_dissolved,[boundary_id_field]) # generate buffer around boundary # arcpy.AddMessage("generate buffer around boundary") boundary_border_buffer = os.path.join(workspace,"{0}_{1}".format(boundary,"boundary_buffer")) arcpy.Buffer_analysis(boundary_border_dissolved, boundary_border_buffer, buffer_size, "FULL", "ROUND") # get candidate border route # arcpy.AddMessage("get candidate border route") candidate_border_route_multipart = "in_memory\\candidate_{0}_border_route_multipart".format(boundary) candidate_border_route = os.path.join(workspace,"candidate_{0}_border_route".format(boundary)) arcpy.Clip_analysis(route, boundary_border_buffer, candidate_border_route_multipart) arcpy.MultipartToSinglepart_management(candidate_border_route_multipart, candidate_border_route) ################################################################################################################ ################################################################################################################ # filter out candidate border routes that 'intersects' boundary at high angles arcpy.AddMessage("Filtering out candidate border routes that 'intersects' boundary at high angles...") route_buffer = os.path.join(workspace,"{0}_{1}".format(route,"buffer_flat")) if not arcpy.Exists(route_buffer): arcpy.Buffer_analysis(route, route_buffer, buffer_size, "FULL", "FLAT") # clip boundary segments within route buffer boundary_border_within_buffer_multipart = "in_memory\\{0}_boundary_within_{1}_buffer_multipart".format(boundary,route) boundary_border_within_buffer = os.path.join(workspace,"{0}_boundary_within_{1}_buffer".format(boundary,route)) arcpy.Clip_analysis(boundary_border_dissolved, route_buffer, boundary_border_within_buffer_multipart) arcpy.MultipartToSinglepart_management(boundary_border_within_buffer_multipart, boundary_border_within_buffer) # Add 'SEGMENT_ID_ALL_CANDIDATES' field to candidate route and populate it with 'OBJECTID' arcpy.AddField_management(candidate_border_route,"SEGMENT_ID_ALL_CANDIDATES","LONG") arcpy.CalculateField_management(candidate_border_route, "SEGMENT_ID_ALL_CANDIDATES", "!OBJECTID!", "PYTHON") # Add 'ANGLE_ROUTE' field to candidate route and populate it with the angle to the true north(= 0 degree) arcpy.AddField_management(candidate_border_route,"ANGLE_ROUTE","DOUBLE") with arcpy.da.UpdateCursor(candidate_border_route,("SHAPE@","ANGLE_ROUTE")) as uCur: for row in uCur: shape = row[0] x_first = shape.firstPoint.X y_first = shape.firstPoint.Y x_last = shape.lastPoint.X y_last = shape.lastPoint.Y angle = calculate_angle(x_first,y_first,x_last,y_last) if angle >=0: row[1]=angle uCur.updateRow(row) # Add 'ANGLE_BOUNDARY' field to boundary segment within route buffer and populate it with the angle to the true north(= 0 degree) arcpy.AddField_management(boundary_border_within_buffer,"ANGLE_BOUNDARY","DOUBLE") with arcpy.da.UpdateCursor(boundary_border_within_buffer,("SHAPE@","ANGLE_BOUNDARY")) as uCur: for row in uCur: shape = row[0] x_first = shape.firstPoint.X y_first = shape.firstPoint.Y x_last = shape.lastPoint.X y_last = shape.lastPoint.Y angle = calculate_angle(x_first,y_first,x_last,y_last) if angle: row[1]=angle uCur.updateRow(row) del uCur # locate boundary segment within buffer along candidate border route. # assuming that if the boundary segment can't be located along its corresponding route, these two might have high angles. boundary_along_candidate_border_route = os.path.join(workspace,"{0}_boundary_along_candidate_{1}_border_route".format(boundary,boundary)) arcpy.LocateFeaturesAlongRoutes_lr(boundary_border_within_buffer,candidate_border_route,"SEGMENT_ID_ALL_CANDIDATES",buffer_size,\ boundary_along_candidate_border_route,"{0} {1} {2} {3}".format("RID","LINE","FMEAS","TMEAS")) arcpy.JoinField_management(boundary_along_candidate_border_route, "RID", candidate_border_route, "SEGMENT_ID_ALL_CANDIDATES", ["ANGLE_ROUTE"]) positive_candidate_border_route = [] with arcpy.da.SearchCursor(boundary_along_candidate_border_route,("RID","ANGLE_ROUTE","ANGLE_BOUNDARY")) as sCur: for row in sCur: sid = str(row[0]) angle_route = row[1] angle_boundary = row[2] if angle_route and angle_boundary: delta_angle = abs(angle_route-angle_boundary) # get real intersecting angle if delta_angle > 90 and delta_angle <= 270: delta_angle = abs(180 - delta_angle) elif delta_angle > 270: delta_angle = 360 - delta_angle else: pass # filter out negative candidate border route if delta_angle <= high_angle_threshold: if sid not in positive_candidate_border_route: positive_candidate_border_route.append(sid) del sCur candidate_border_route_lyr = "in_memory\\candidate_border_route_lyr" arcpy.MakeFeatureLayer_management(candidate_border_route, candidate_border_route_lyr) candidate_border_route_positive = os.path.join(workspace,"candidate_{0}_border_route_positive".format(boundary)) where_clause = "\"{0}\" IN ({1})".format("OBJECTID",",".join(positive_candidate_border_route)) arcpy.SelectLayerByAttribute_management(candidate_border_route_lyr, "NEW_SELECTION", where_clause) arcpy.CopyFeatures_management(candidate_border_route_lyr,candidate_border_route_positive) candidate_border_route_negative = os.path.join(workspace,"candidate_{0}_border_route_negative".format(boundary)) where_clause = "\"{0}\" NOT IN ({1})".format("OBJECTID",",".join(positive_candidate_border_route)) arcpy.SelectLayerByAttribute_management(candidate_border_route_lyr, "NEW_SELECTION", where_clause) arcpy.CopyFeatures_management(candidate_border_route_lyr,candidate_border_route_negative) ################################################################################################################ ################################################################################################################ # get left, right boundary topology of positive candidate border route # handle candidate border route segment with different L/R boundary id by offset arcpy.AddMessage("Calculating L/R boundary topology of positive candidate border route...") # generate offset around boundary boundary_border_offset= os.path.join(workspace,"{0}_{1}".format(boundary,"boundary_offset")) arcpy.Buffer_analysis(boundary_border_dissolved, boundary_border_offset, offset, "FULL", "ROUND") # get intersections between positive candidate border route and boundary offset candidate_border_route_positive_boundary_offset_intersections = os.path.join(workspace,"candidate_{0}_border_route_positive_{1}_offset_intersections".format(boundary,boundary)) arcpy.Intersect_analysis([candidate_border_route_positive,boundary_border_offset], candidate_border_route_positive_boundary_offset_intersections, "ALL", "", "point") # split positive candidate border route by intersections generated above candidate_border_route_positive_splitted_by_offset = os.path.join(workspace,"candidate_{0}_border_route_positive_splitted_by_offset".format(boundary)) arcpy.SplitLineAtPoint_management(candidate_border_route_positive,candidate_border_route_positive_boundary_offset_intersections,\ candidate_border_route_positive_splitted_by_offset,xy_resolution) # Add 'SEGMENT_ID_POSITIVE_CANDIDATES' field to splitted positive candidate route and populate it with 'OBJECTID' arcpy.AddField_management(candidate_border_route_positive_splitted_by_offset,"SEGMENT_ID_POSITIVE_CANDIDATES","LONG") arcpy.CalculateField_management(candidate_border_route_positive_splitted_by_offset, "SEGMENT_ID_POSITIVE_CANDIDATES", "!OBJECTID!", "PYTHON") # get positive candidate border route segments that within boundary offset candidate_border_route_positive_within_offset = os.path.join(workspace,"candidate_{0}_border_route_positive_within_offset".format(boundary)) candidate_border_route_positive_splitted_by_offset_lyr = "in_memory\\candidate_{0}_border_route_positive_splitted_by_offset_lyr".format(boundary) arcpy.MakeFeatureLayer_management(candidate_border_route_positive_splitted_by_offset, candidate_border_route_positive_splitted_by_offset_lyr) arcpy.SelectLayerByLocation_management (candidate_border_route_positive_splitted_by_offset_lyr, "WITHIN", boundary_border_offset) arcpy.CopyFeatures_management(candidate_border_route_positive_splitted_by_offset_lyr,candidate_border_route_positive_within_offset) # get positive candidate border route segments that out of boundary offset candidate_border_route_positive_outof_offset = os.path.join(workspace,"candidate_{0}_border_route_positive_outof_offset".format(boundary)) arcpy.SelectLayerByAttribute_management(candidate_border_route_positive_splitted_by_offset_lyr, "SWITCH_SELECTION") arcpy.CopyFeatures_management(candidate_border_route_positive_splitted_by_offset_lyr,candidate_border_route_positive_outof_offset) # generate offset around positive candidate border route within boundary offset # arcpy.AddMessage("generate offset around boundary") candidate_border_route_positive_within_offset_buffer= os.path.join(workspace,"candidate_{0}_border_route_positive_within_offset_buffer".format(boundary)) arcpy.Buffer_analysis(candidate_border_route_positive_within_offset, candidate_border_route_positive_within_offset_buffer, offset, "FULL", "FLAT") # clip boundary segments within offset distance from positive candidate route that within boundary offset boundary_border_within_positive_candidate_border_route_buffer_multipart = "in_memory\\{0}_boundary_within_positive_candidate_border_route_buffer_multipart".format(boundary) boundary_border_within_positive_candidate_border_route_buffer = os.path.join(workspace,"{0}_boundary_within_positive_candidate_border_route_buffer".format(boundary)) arcpy.Clip_analysis(boundary_border_dissolved, candidate_border_route_positive_within_offset_buffer, boundary_border_within_positive_candidate_border_route_buffer_multipart) arcpy.MultipartToSinglepart_management(boundary_border_within_positive_candidate_border_route_buffer_multipart, boundary_border_within_positive_candidate_border_route_buffer) # get endpoints of boundary border within offset buffer of splitted positive candidate border routes boundary_border_within_positive_candidate_border_route_buffer_endpoints = os.path.join(workspace,"{0}_boundary_within_positive_candidate_border_route_buffer_endpoints".format(boundary)) arcpy.FeatureVerticesToPoints_management(boundary_border_within_positive_candidate_border_route_buffer,\ boundary_border_within_positive_candidate_border_route_buffer_endpoints,"BOTH_ENDS") arcpy.DeleteIdentical_management(boundary_border_within_positive_candidate_border_route_buffer_endpoints, ["Shape"]) # split boundary border within offset buffer of splitted positive candidate border routes and endpoints location # then delete identical shape boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints = os.path.join(workspace,"{0}_boundary_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints".format(boundary)) arcpy.SplitLineAtPoint_management(boundary_border_within_positive_candidate_border_route_buffer,boundary_border_within_positive_candidate_border_route_buffer_endpoints,\ boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints,xy_resolution) arcpy.DeleteIdentical_management(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints, ["Shape"]) # Add 'SEGMENT_ID_BOUNDARY' field to boundary segments within offset distance from positive candidate route that within boundary offset and populate it with 'OBJECTID' arcpy.AddField_management(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints,"SEGMENT_ID_BOUNDARY","LONG") arcpy.CalculateField_management(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints, "SEGMENT_ID_BOUNDARY", "!OBJECTID!", "PYTHON") # locate boundary segments within offset distance of positive candidate route that within boundary offset along positive candidate route that within boundary offset boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route = os.path.join(workspace,"{0}_boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route".format(boundary)) arcpy.LocateFeaturesAlongRoutes_lr(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints,candidate_border_route_positive_within_offset,"SEGMENT_ID_POSITIVE_CANDIDATES",offset,\ boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route,"{0} {1} {2} {3}".format("RID","LINE","FMEAS","TMEAS")) # get left, right boundary topology of boundary within offset distance of positive candidate route that within boundary offset along positive candidate route that within boundary offset boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases= os.path.join(workspace,"{0}_boundary_border_within_positive_candidate_border_route_buffer_with_{1}_topology_allcases".format(boundary,boundary)) arcpy.Identity_analysis(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints, boundary, boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases,"ALL","","KEEP_RELATIONSHIPS") boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr = "in_memory\\{0}_boundary_border_within_positive_candidate_border_route_buffer_with_{1}_topology_allcases_lyr".format(boundary,boundary) arcpy.MakeFeatureLayer_management(boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases, boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr) where_clause = "\"{0}\"<>0 AND \"{1}\"<>0".format("LEFT_{0}".format(boundary),"RIGHT_{0}".format(boundary)) arcpy.SelectLayerByAttribute_management(boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr, "NEW_SELECTION", where_clause) boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology = os.path.join(workspace,"{0}_boundary_border_within_positive_candidate_border_route_buffer_with_{1}_topology".format(boundary,boundary)) arcpy.CopyFeatures_management(boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr,boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology) arcpy.JoinField_management(boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route,"SEGMENT_ID_BOUNDARY",\ boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology,"SEGMENT_ID_BOUNDARY",["LEFT_{0}".format(boundary_id_field),"RIGHT_{0}".format(boundary_id_field)]) arcpy.JoinField_management(candidate_border_route_positive_within_offset,"SEGMENT_ID_POSITIVE_CANDIDATES",\ boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route,"RID",["SEGMENT_ID_BOUNDARY","LEFT_{0}".format(boundary_id_field),"RIGHT_{0}".format(boundary_id_field)]) candidate_border_route_positive_within_offset_lyr = "in_memory\\candidate_{0}_border_route_positive_within_offset_lyr".format(boundary) arcpy.MakeFeatureLayer_management(candidate_border_route_positive_within_offset, candidate_border_route_positive_within_offset_lyr) where_clause = "\"{0}\"IS NOT NULL AND \"{1}\"IS NOT NULL".format("LEFT_{0}".format(boundary_id_field),"RIGHT_{0}".format(boundary_id_field)) arcpy.SelectLayerByAttribute_management(candidate_border_route_positive_within_offset_lyr, "NEW_SELECTION", where_clause) candidate_border_route_positive_within_offset_with_polygon_topology = os.path.join(workspace,"candidate_{0}_border_route_positive_within_offset_with_{1}_topology".format(boundary,boundary)) arcpy.CopyFeatures_management(candidate_border_route_positive_within_offset_lyr,candidate_border_route_positive_within_offset_with_polygon_topology) # get left, right boundary topology of candidate border route out of boundary offset candidate_border_route_positive_outof_offset_with_polygon_topology_allcases= os.path.join(workspace,"candidate_{0}_border_route_positive_outof_offset_with_{1}_topology_allcases".format(boundary,boundary)) arcpy.Identity_analysis(candidate_border_route_positive_outof_offset, boundary, candidate_border_route_positive_outof_offset_with_polygon_topology_allcases,"ALL","","KEEP_RELATIONSHIPS") candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr = "in_memory\\candidate_{0}_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr".format(boundary) arcpy.MakeFeatureLayer_management(candidate_border_route_positive_outof_offset_with_polygon_topology_allcases, candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr) where_clause = "\"{0}\"<>0 AND \"{1}\"<>0".format("LEFT_{0}".format(boundary),"RIGHT_{0}".format(boundary)) arcpy.SelectLayerByAttribute_management(candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr, "NEW_SELECTION", where_clause) candidate_border_route_positive_outof_offset_with_polygon_topology = os.path.join(workspace,"candidate_{0}_border_route_positive_outof_offset_with_{1}_topology".format(boundary,boundary)) arcpy.CopyFeatures_management(candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr,candidate_border_route_positive_outof_offset_with_polygon_topology) # merge candidate_border_route_positive_with_polygon_topology = "candidate_{0}_border_route_positive_with_{1}_topology".format(boundary,boundary) arcpy.FeatureClassToFeatureClass_conversion(candidate_border_route_positive_outof_offset_with_polygon_topology,workspace,candidate_border_route_positive_with_polygon_topology) arcpy.Append_management([candidate_border_route_positive_within_offset_with_polygon_topology],candidate_border_route_positive_with_polygon_topology,"NO_TEST") ################################################################################################################ ################################################################################################################ arcpy.AddMessage("Populate route_border_rule_table...") # calculate from measure and to measure of candidate border route # arcpy.AddMessage("Calculating from measure and to measure of candidate border routes...") arcpy.AddGeometryAttributes_management(candidate_border_route_positive_with_polygon_topology, "LINE_START_MID_END") # get candidte border route segment geometry arcpy.AddField_management(candidate_border_route_positive_with_polygon_topology,"SEGMENT_GEOMETRY","TEXT","","",100) arcpy.CalculateField_management(candidate_border_route_positive_with_polygon_topology,"SEGMENT_GEOMETRY","!shape.type!","PYTHON") # sort candidate border route segments based on route id and from measure, orderly # arcpy.AddMessage("sort validated output got above based on route id and from measure, orderly") candidate_border_route_positive_with_polygon_topology_sorted = os.path.join(workspace,"candidate_{0}_border_route_positive_with_polygon_topology_sorted".format(boundary)) arcpy.Sort_management(candidate_border_route_positive_with_polygon_topology,candidate_border_route_positive_with_polygon_topology_sorted,[[route_id_field,"ASCENDING"],["START_M","ASCENDING"]]) # create route_border_rule_table if arcpy.Exists(route_border_rule_table): arcpy.Delete_management(route_border_rule_table) create_route_border_rule_table_schema(workspace,route_border_rule_table) else: create_route_border_rule_table_schema(workspace,route_border_rule_table) # populate route_border_rule_table iCur = arcpy.da.InsertCursor(route_border_rule_table,["ROUTE_ID","ROUTE_START_MEASURE","ROUTE_END_MEASURE","BOUNDARY_LEFT_ID",\ "BOUNDARY_RIGHT_ID","SEGMENT_GEOMETRY","EFFECTIVE_FROM_DT","EFFECTIVE_TO_DT"]) with arcpy.da.SearchCursor(candidate_border_route_positive_with_polygon_topology_sorted,[route_id_field,"START_M","END_M","LEFT_{0}".format(boundary_id_field),\ "RIGHT_{0}".format(boundary_id_field),"SEGMENT_GEOMETRY","START_DATE","END_DATE"]) as sCur: for row in sCur: iCur.insertRow(row) del sCur del iCur arcpy.CalculateField_management(route_border_rule_table, "BRP_PROCESS_DT", "'{0}'".format(date_string), "PYTHON") ################################################################################################################ arcpy.AddMessage("done!") return route_border_rule_table except Exception: # arcpy.AddMessage(traceback.format_exc()) sys.exit(traceback.format_exc()) return False
arcpy.FeatureToPolygon_management(cafLayer,temporaryPolys) if debug: addMsgAndPrint('temporaryPolys fields are '+str(fieldNameList(temporaryPolys))) #make center points (within) from temporarypolys addMsgAndPrint(' Making '+centerPoints) testAndDelete(centerPoints) arcpy.FeatureToPoint_management(temporaryPolys, centerPoints, "INSIDE") if debug: addMsgAndPrint('centerPoints fields are '+str(fieldNameList(centerPoints))) # get rid of ORIG_FID field arcpy.DeleteField_management(centerPoints,'ORIG_FID') #identity center points with inpolys testAndDelete(centerPoints2) arcpy.Identity_analysis(centerPoints, inPolys, centerPoints2, 'NO_FID') # delete points with MapUnit = '' ## first, make layer view addMsgAndPrint(" Deleting centerPoints2 MapUnit = '' ") sqlQuery = arcpy.AddFieldDelimiters(fds,'MapUnit') + "= '' " testAndDelete('cP2Layer') arcpy.MakeFeatureLayer_management(centerPoints2,'cP2Layer',sqlQuery) ## then delete features if numberOfRows('cP2Layer') > 0: arcpy.DeleteFeatures_management('cP2Layer') #adjust center point fields (delete extra, add any missing. Use NCGMP09_Definition as guide) ## get list of fields in centerPoints2 cp2Fields = fieldNameList(centerPoints2) ## add fields not in MUP as defined in Definitions fieldDefs = tableDict['MapUnitPolys']
arcpy.AddMessage( str(tileNum) + " Intersecting diagonals with contours...") arcpy.Intersect_analysis([diagonals, inputContours], intersects, "ONLY_FID", "#", "point") deleteme.append(intersects) # for some reason Intersect makes multipoints intersectSingle = os.path.join(scratch, "intersectSingle") arcpy.MultipartToSinglepart_management(intersects, intersectSingle) deleteme.append(intersectSingle) #Identity of points with slope categories pointIdentity = os.path.join(scratch, "pointIdentity") arcpy.AddMessage( str(tileNum) + " Identity slope categories and intersections...") arcpy.Identity_analysis(intersectSingle, slopeClip, pointIdentity, "ALL") deleteme.append(pointIdentity) # Get intercept statistics from intersect points arcpy.AddMessage( str(tileNum) + " Getting intersection statistics...") stats1 = os.path.join(scratch, "stats1") arcpy.Statistics_analysis(pointIdentity, stats1, [["ID", "COUNT"]], "ID") deleteme.append(stats1) arcpy.JoinField_management(pointIdentity, "ID", stats1, "ID", "FREQUENCY; COUNT_ID") stats2 = os.path.join(scratch, "stats2") arcpy.Statistics_analysis(pointIdentity, stats2, [["COUNT_ID", "MEAN"]], "SlopeCat") deleteme.append(stats2)
def site_trees(community_spec): community_name, acres, community_id = community_spec pp_c.log_info('Siting trees.', community_name) size_stats = [0, 0, 0] landuse_stats = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] public_private_stats = [0, 0] input_fc = pp_c.get_community_fc_name(community_spec[0], pp_c.COMMUNITY_SPACES_FC) output_fc = pp_c.get_community_fc_name(community_spec[0], pp_c.COMMUNITY_TREES_FC) if arcpy.Exists(output_fc): arcpy.Delete_management(output_fc) arcpy.CreateFeatureclass_management(os.path.dirname(output_fc), os.path.basename(output_fc), "POINT", pp_c.TREES_TEMPLATE_FC, "DISABLED", "DISABLED", pp_c.TREES_TEMPLATE_FC) intermediate_output_gdb = pp_c.prepare_intermediate_output_gdb( pp_c.USE_IN_MEM) intermediate_trees = pp_c.get_intermediate_name(intermediate_output_gdb, 'trees_int', community_id, pp_c.USE_IN_MEM) intermediate_trees_lu = pp_c.get_intermediate_name(intermediate_output_gdb, 'tlu_int', community_id, pp_c.USE_IN_MEM) intermediate_trees_lu_public = pp_c.get_intermediate_name( intermediate_output_gdb, 'tlpub_int', community_id, pp_c.USE_IN_MEM) arcpy.CreateFeatureclass_management(os.path.dirname(intermediate_trees), os.path.basename(intermediate_trees), "POINT", pp_c.TREES_TEMPLATE_FC, "DISABLED", "DISABLED", pp_c.TREES_TEMPLATE_FC) arcpy.DeleteField_management(intermediate_trees, ['land_use']) community_stats_tbl = pp.stats.prepare_community_stats_tbl( community_name, community_id, pp_c.COMMUNITY_TREE_STATS_TBL, pp_c.TREE_STATS_SPEC) if WRITE_TO_DEBUG_MESH_FC: arcpy.management.DeleteFeatures(MESH_FC) pp_c.log_info("Calculating points", community_name) query = "Shape_Area > 2.5" with arcpy.da.SearchCursor(input_fc, ['OBJECTID', 'SHAPE@', 'community_id'], query) as cursor: for oid, polygon, community in cursor: x_min, y_min, x_max, y_max = polygon.extent.XMin, polygon.extent.YMin, polygon.extent.XMax, polygon.extent.YMax center = arcpy.Point((x_min + x_max) / 2, (y_min + y_max) / 2) tiers = math.ceil( max((x_max - x_min) / 2, (y_max - y_min) / 2) / MIN_DIAMETER) # The mesh orgin is the NW corner and indexed row major as [row][col] mesh_row_dim, mesh_col_dim = __get_mesh_dim(polygon, center, tiers) nw_corner = arcpy.Point( center.X - (mesh_col_dim * MIN_DIAMETER) / 2, center.Y + (mesh_row_dim * MIN_DIAMETER) / 2) center_row, center_col = __point_to_mesh(center, nw_corner) mesh_type = __get_mesh_algorithm(mesh_row_dim, mesh_col_dim, polygon) if mesh_type == MESH_ALGORITHM_SMALL: mesh = [m[:] for m in [[VACANT] * mesh_col_dim] * mesh_row_dim] elif mesh_type == MESH_ALGORITHM_BIG: mesh = __get_mesh(mesh_row_dim, mesh_col_dim, polygon, nw_corner, input_fc) plant_points = dict() for tree_category in TREE_CATEGORIES: for tier_idx in range(0, tiers + 1): for row, col in __get_tier_vacancies( center_row, center_col, tier_idx, mesh, mesh_row_dim, mesh_col_dim): fp = __get_footprint(row, col, TREE_FOOTPRINT_DIM[tree_category], mesh_row_dim, mesh_col_dim) if __is_footprint_clean(mesh, *fp): if is_point_in_polygon(row, col, polygon, nw_corner, mesh, mesh_type, plant_points): __occupy_footprint(mesh, *fp, row, col, tree_category) with arcpy.da.InsertCursor( intermediate_trees, ['SHAPE@', 'code', 'p_oid', 'community_id']) as cursor: for row, col in plant_points.keys(): cursor.insertRow([ plant_points[(row, col)], mesh[row][col], oid, community ]) if WRITE_TO_DEBUG_MESH_FC: with arcpy.da.InsertCursor( MESH_FC, ['SHAPE@', 'code', 'row', 'col', 'x', 'y', 'dim' ]) as cursor: for r in range(0, mesh_row_dim): for c in range(0, mesh_col_dim): p = __mesh_to_point(r, c, nw_corner) cursor.insertRow( [p, mesh[r][c], r, c, p.X, p.Y, mesh_row_dim]) pp_c.log_debug('Identify land use', community_name) arcpy.Identity_analysis(intermediate_trees, pp_c.LAND_USE_2015, intermediate_trees_lu, "ALL", "", "NO_RELATIONSHIPS") pp_c.delete([intermediate_trees]) arcpy.management.AlterField(intermediate_trees_lu, 'LandUse', 'land_use') pp_c.log_debug('Identify public land', community_name) arcpy.Identity_analysis(intermediate_trees_lu, pp_c.PUBLIC_LAND, intermediate_trees_lu_public, "ONLY_FID", "", "NO_RELATIONSHIPS") pp_c.delete([intermediate_trees_lu]) pp_c.log_debug('Populate the "is_public" field', community_name) arcpy.management.CalculateField( intermediate_trees_lu_public, pp_c.SPACES_PUBLIC_PRIVATE_COL, "is_public(!FID_%s!)" % (os.path.basename(pp_c.PUBLIC_LAND)), "PYTHON3", r"""def is_public (fid): if fid == -1: return 0 else: return 1""", "SHORT") # __downsize (intermediate_output_gdb, intermediate_trees_lu_public, community_name, community_id) pp_c.log_debug('Find overlaps', community_name) overlap_oids = __find_overlaps(intermediate_output_gdb, intermediate_trees_lu_public, community_name, community_id) pp_c.log_debug( 'Collecting tree statistics, fixing bad land uses, and downsizing overlaps', community_name) big_to_medium, medium_to_small, small = 0, 0, 0 with arcpy.da.UpdateCursor(intermediate_trees_lu_public, [ 'objectid', 'code', 'land_use', 'is_public', ]) as cursor: for oid, tree_size, land_use, is_public in cursor: if land_use not in pp_c.LANDUSE_DOMAIN.values(): # Fix up unrecognized land use land_use = pp_c.LANDUSE_DOMAIN['Other'] cursor.updateRow([oid, tree_size, land_use, is_public]) if oid in overlap_oids: if tree_size == BIG: tree_size = MEDIUM big_to_medium = big_to_medium + 1 cursor.updateRow([oid, tree_size, land_use, is_public]) elif tree_size == MEDIUM: tree_size = SMALL medium_to_small = medium_to_small + 1 cursor.updateRow([oid, tree_size, land_use, is_public]) else: tree_size = SMALL small = small + 1 size_stats[tree_size] = size_stats[tree_size] + 1 landuse_stats[land_use] = landuse_stats[land_use] + 1 public_private_stats[ is_public] = public_private_stats[is_public] + 1 pp_c.log_debug( "Updated feature class with new sizes. L->M=%i, M->S=%i, S=%i" % (big_to_medium, medium_to_small, small), community_name) pp_c.log_debug("Writing points to '%s'" % output_fc, community_name) arcpy.management.Append(intermediate_trees_lu_public, output_fc, "NO_TEST") pp_c.delete([intermediate_trees_lu_public]) pp.stats.update_stats( community_stats_tbl, community_id, size_stats + landuse_stats[1:] + public_private_stats, pp_c.TREE_STATS_SPEC) return
def EliminatebyGrid(tsa, vri, outFC, fraction=2): #--Need a temporary DB to assemble this stuff DeleteExists( os.path.join(os.environ['TEMP'], 'ElimTemp{0}.gdb'.format(processGroup))) elimDB = CreateTempDB(os.environ['TEMP'], name='ElimTemp{0}'.format(processGroup)) #--Get Extents of Grids to create as fraction of TSA width, height lyrTSA = 'lyrTSA' gp.MakeFeatureLayer_management(tsa, lyrTSA) desc = gp.Describe(lyrTSA) ext = gp.Describe(lyrTSA).extent extW = ( (ext.XMax - ext.XMin) / fraction) + 1 #--Add 1m to ensure we are not touching edge of grids extH = ((ext.YMax - ext.YMin) / fraction) + 1 gridTemp = os.path.join(elimDB, 'Grid') idTemp = os.path.join(elimDB, 'VRI_ID') #WriteLog(kennyloggins, 'extW - {0}\n'.format(str(extW)), True) #WriteLog(kennyloggins, 'extH - {0}\n'.format(str(extH)), True) gp.GridIndexFeatures_cartography(gridTemp, tsa, "INTERSECTFEATURE", "NO_USEPAGEUNIT", polygon_width=extW, polygon_height=extH) gp.Identity_analysis(vri, gridTemp, idTemp, "ALL", 1) outElims = [] with arcpy.da.SearchCursor(gridTemp, ['SHAPE@', 'PageName']) as cursor: for row in cursor: try: pg = row[1] WriteLog(kennyloggins, '----Doing Sub-Eliminate on - {0}\n'.format(str(pg)), True) lyrIDTemp = 'lyrIDTemp' lyrGridTemp = 'lyrGridTemp' outGrid = os.path.join(elimDB, 'Temp_{0}_1Grid'.format(pg)) outElim = os.path.join(elimDB, 'Temp_{0}_2Elim'.format(pg)) arcpy.MakeFeatureLayer_management( idTemp, lyrIDTemp, "PageName = '{0}'".format(pg)) arcpy.env.extent = arcpy.Describe(lyrIDTemp).extent arcpy.CopyFeatures_management(lyrIDTemp, outGrid) arcpy.Delete_management(lyrIDTemp) arcpy.MakeFeatureLayer_management(outGrid, lyrGridTemp) arcpy.SelectLayerByAttribute_management( lyrGridTemp, "NEW_SELECTION", "({0}/10000) <= 0.5".format(GetAreaField(outGrid))) arcpy.Eliminate_management(lyrGridTemp, outElim, "LENGTH", ex_features=gridTemp) outElims.append(outElim) arcpy.Delete_management(lyrGridTemp) arcpy.Delete_management(outGrid) WriteLog( kennyloggins, '----Done Sub-Eliminate - {0}\n'.format(str(outElims)), True) except Exception, e: WriteLog(kennyloggins, '***Error in Grid by Fraction - {0}\n'.format(str(e)), True)
lines_sr = arcpy.Describe(lines) spatialRefLines = lines_sr.SpatialReference arcpy.Project_management(lines, os.path.join(scratch, "lines"), albers, '', spatialRefLines) arcpy.env.workspace = scratch arcpy.RefreshCatalog(topoutfolder) # Add hectares field to zones arcpy.AddField_management("zones", "ZoneAreaHa", "DOUBLE") arcpy.CalculateField_management("zones", "ZoneAreaHa", "!shape.area@hectares!", "PYTHON") arcpy.RefreshCatalog(topoutfolder) # Perform identity analysis to join fields and crack roads at polygon boundaries try: arcpy.Identity_analysis("lines", "zones", "lines_identity") except: pass arcpy.RefreshCatalog(topoutfolder) try: arcpy.Identity_analysis("lines", "zones", "lines_identity") except: arcpy.AddMessage( "The output location is locking up and not allowing output to be written to it. Try it again with antivirus off and/or in a different location." ) pass # Recalculate lengths arcpy.AddField_management("lines_identity", "LengthM", "DOUBLE") arcpy.CalculateField_management("lines_identity", "LengthM", '!shape.length@meters!', "PYTHON")
xzkpath = arcpy.GetParameterAsText(1) cskpath = arcpy.GetParameterAsText(2) arcpy.env.workspace = enviroment arcpy.env.overwriteOutput = True indentitypath = "indentitypath_3" errorpath = "error_3" arcpy.AddMessage("3_计算初始库面积") getCskArea(cskpath) #标识分析 arcpy.AddMessage("3_标识分析") arcpy.Identity_analysis(cskpath, xzkpath, indentitypath, cluster_tolerance="0.01") #判断是否有产出初始库范围的错误图斑 arcpy.AddMessage("3_判断初始库和现状库是否套合") judgeError(xzkpath, indentitypath, errorpath) #收集 arcpy.AddMessage("3_收集现状库图斑数据") arcpy.SetProgressor( 'step', '3_收集现状库图斑数据', 0, int(arcpy.GetCount_management(indentitypath).getOutput(0)), 1) matchedDataDict = collectCskData(indentitypath) #还原数据 arcpy.AddMessage("3_还原数据")
"", "", "", "NULLABLE", "NON_REQUIRED", "") print(arcpy.GetMessages()) arcpy.AddField_management("streetSelectLayer", "SIGNAL_ENG_AREA", "TEXT", "", "", 20, "", "NULLABLE", "NON_REQUIRED", "") print(arcpy.GetMessages()) arcpy.AddField_management("streetSelectLayer", "TRANS_ENG_AREA", "TEXT", "", "", 20, "", "NULLABLE", "NON_REQUIRED", "") print(arcpy.GetMessages()) # STREETS LAYER TO ASMP POLYGONS print("\n" + "Make Feature Layer: ASMP Polygons Layer") arcpy.MakeFeatureLayer_management(asmpPolys, "asmpPolysLayer", "", "", "") print(arcpy.GetMessages()) print("\n" + "Identity: Between Street Select and ASMP Polygons") arcpy.Identity_analysis("streetSelectLayer", "asmpPolysLayer", streetSelectPhb, "ONLY_FID", "", "NO_RELATIONSHIPS") print(arcpy.GetMessages()) print("\n" + "Make Feature Layer: Street Select PHB") arcpy.MakeFeatureLayer_management(streetSelectPhb, "streetSelectPhbLayer", "", "", "") print(arcpy.GetMessages()) print( "\n" + "Delete Fields: Extra fields that were created from Identity with ASMP Polygons" ) arcpy.DeleteField_management( "streetSelectPhbLayer", ["FID_Street_Select_ASMP", "PHB_STREET", "FID_asmp_polygons"]) print(arcpy.GetMessages())
def cambia_caracteres(infea): for xx in dic_acentos: # ciclo que reemplaza las letras por los carateres especiales infea = infea.replace(xx, dic_acentos[xx]) return infea infea = arcpy.GetParameterAsText(0) segunda_capa = arcpy.GetParameterAsText(1) join_attributes = arcpy.GetParameterAsText(2) gdb_salida = arcpy.GetParameterAsText(3) capa_salida = arcpy.GetParameterAsText(4) infea = cambia_caracteres(infea) segunda_capa = cambia_caracteres(segunda_capa) gdb_salida = cambia_caracteres(gdb_salida) capa_salida = cambia_caracteres(capa_salida) arcpy.env.workspace = gdb_salida if __name__ == '__main__': print "Ejecutando identity a 64bits ...." print infea, segunda_capa, capa_salida, gdb_salida arcpy.Identity_analysis(in_features=infea, identity_features=segunda_capa, out_feature_class=gdb_salida + "\\" + capa_salida, join_attributes=join_attributes) except exceptions.Exception as e: print e.__class__, e.__doc__, e.message os.system("pause")
def rfpRoute(inputRFPSites): if userFacilities: ##-- User has provided a facilities layer to route to rather than a hub --## arcpy.AddMessage('User Facility layer has been provided...') arcpy.MakeFeatureLayer_management(userFacilities,'hubSites') hubNum = int(arcpy.GetCount_management('hubSites').getOutput(0)) arcpy.AddMessage('Found ' + str(hubNum) + ' user facilities.') else: ##--- Select hub site ---## arcpy.MakeFeatureLayer_management(inputRFPSites,'hubSites') arcpy.SelectLayerByAttribute_management('hubSites','NEW_SELECTION','"Hub" = 1') hubNum = int(arcpy.GetCount_management('hubSites').getOutput(0)) arcpy.AddMessage('Found ' + str(hubNum) + ' Hub Site') if hubNum < 1: return "noHub" ##--- Locate Assets to hub site against the network ---## arcpy.ImportToolbox(scriptLocation + os.sep + 'Backhaul' + os.sep + 'Backhaul.pyt') arcpy.AddMessage('Beginning Locate Assets...') arcpy.LocateAssets_backhaul(inputRFPSites,'hubSites',inputNetwork,outputLocation) arcpy.AddMessage('- Locate Assets completed successfully') ##--- Backhaul Optimization ---## arcpy.AddMessage('Creating Closest Facility layer...') backClosestFacility = arcpy.na.MakeClosestFacilityLayer(inputNetwork,"ClosestFacility","LENGTH","TRAVEL_TO","","","","ALLOW_UTURNS","","NO_HIERARCHY","","TRUE_LINES_WITH_MEASURES","","") arcpy.AddMessage('- Closest Facility layer created successfully') arcpy.ImportToolbox(scriptLocation + os.sep + 'Backhaul' + os.sep + 'Backhaul.pyt') arcpy.AddMessage('Beginning Backhaul Optimization...') arcpy.BackhaulAssets_backhaul(locRemoteAssets, locFixedAssets, locNearTable, backClosestFacility, outputLocation,"50","10","TRUE") arcpy.AddMessage('- Backhaul Optimization completed successfully') ##--- Cleanup the routes ---## arcpy.AddMessage('Cleaning up the routes...') arcpy.Intersect_analysis(backRoutes,'routes_intersected','ALL','','INPUT') arcpy.AddMessage('- Routes intersected.') arcpy.Erase_analysis(backRoutes,'routes_intersected','routes_erased','') arcpy.AddMessage('- Overlapping routes erased.') arcpy.Merge_management(['routes_intersected','routes_erased'],'routes_cleaned') arcpy.AddMessage('- Completed cleaning routes.') arcpy.DeleteIdentical_management('routes_cleaned','Shape','','') arcpy.AddMessage('- Duplicate features removed.') ##--- Determine which routes are new versus existing ---## arcpy.AddMessage('Determining New versus Existing routes...') arcpy.Identity_analysis('routes_cleaned','fiber','routes_identity','ONLY_FID') arcpy.MakeFeatureLayer_management('routes_identity','ident') selection = "\"FID_FIBERCABLE_forMultimodal\" <> -1" arcpy.SelectLayerByAttribute_management('ident',"NEW_SELECTION",selection) arcpy.AddField_management('ident','Status','Text',field_length=5) arcpy.CalculateField_management('ident','Status',"'E'",'PYTHON') arcpy.SelectLayerByAttribute_management('ident','CLEAR_SELECTION') selection = "\"FID_FIBERCABLE_forMultimodal\" = -1" arcpy.SelectLayerByAttribute_management('ident',"NEW_SELECTION",selection) arcpy.CalculateField_management('ident','Status',"'N'",'PYTHON') arcpy.SelectLayerByAttribute_management('ident','CLEAR_SELECTION') arcpy.Dissolve_management('ident','routes_dissolve',['FID_routes_cleaned','Status']) arcpy.MakeFeatureLayer_management('routes_dissolve','routes') arcpy.AddMessage('- Delineated existing routes from new routes.') #arcpy.DeleteField_management('routes',['FacilityID','FacilityRank','Name','IncidentCurbApproach','FacilityCurbApproach','IncidentID','Total_Length','startID','endID','startAsset','endAsset','startName','endName','FID_routes_cleaned','FID_FIBERCABLE_forMultimodal']) arcpy.DeleteField_management('routes',['FacilityID','FacilityRank','Name','IncidentCurbApproach','FacilityCurbApproach','IncidentID','Total_Length','startID','endID','startAsset','endAsset','startName','endName']) arcpy.AddMessage('- Removed unnecessary fields.') ##--- Copy the sites and facilities to the output gdb ---## arcpy.AddMessage('Finalizing output data...') arcpy.CopyFeatures_management(rfpGroup,'parent_sites_'+siteName) arcpy.AddMessage('- Copied RFP Sites.') arcpy.SelectLayerByLocation_management('hubSites','INTERSECT','routes') arcpy.CopyFeatures_management('hubSites','hubs_'+siteName) arcpy.AddMessage('- Copied CO Facilities.') ##--- Populate SiteName attribute for the Lateral Segments ---## arcpy.AddMessage('Calculating route attributes...') arcpy.AddField_management('routes','Site_Name','Text',field_length=255) arcpy.AddField_management('routes','Type','Text',field_length=5) arcpy.AddField_management('routes','Length_mi','FLOAT',field_scale=4) arcpy.AddField_management('routes','Route_Name','Text',field_length=255) arcpy.AddField_management('routes','FolderPath','Text',field_length=255) arcpy.AlterField_management('routes','FID_routes_cleaned','FID_Routes','FID_Routes') arcpy.MakeFeatureLayer_management('parent_sites_'+siteName,'sites') siteNum = int(arcpy.GetCount_management('sites').getOutput(0)) cursor = arcpy.SearchCursor('sites') for row in cursor: if isinstance(row.getValue(siteNameField),basestring): singleSiteName = "'" + row.getValue(siteNameField) + "'" else: singleSiteName = str(int(row.getValue(siteNameField))) selection = "\"" + siteNameField + "\" = " + singleSiteName arcpy.SelectLayerByAttribute_management('sites',"NEW_SELECTION",selection) arcpy.SelectLayerByLocation_management('routes','INTERSECT','sites') arcpy.CalculateField_management('routes','Site_Name',singleSiteName,'PYTHON') arcpy.CalculateField_management('routes','Type',"'L'",'PYTHON') arcpy.SelectLayerByAttribute_management('sites','CLEAR_SELECTION') arcpy.SelectLayerByAttribute_management('routes','CLEAR_SELECTION') selection = "\"Type\" Is NULL" arcpy.SelectLayerByAttribute_management('routes',"NEW_SELECTION",selection) arcpy.CalculateField_management('routes','Type',"'SL'",'PYTHON') arcpy.AddMessage('- Calculated Site Information and Lateral Type.') arcpy.SelectLayerByAttribute_management('routes','CLEAR_SELECTION') arcpy.CalculateField_management('routes','Length_mi',"round(!shape.length@miles!,4)","PYTHON_9.3") arcpy.AddMessage('- Calculated Route Segment Length.') arcpy.CalculateField_management("routes_dissolve","Route_Name","[FID_Routes]&\"^\" & [Type]&\"^\"& [Status]&\"^\"&[Length_mi]&\"^\"& [Site_Name]","VB") arcpy.AddMessage('- Calculated Route Name.') calcVal = "\"Fiber/"+siteName+"/\"& [Route_Name]" arcpy.CalculateField_management("routes_dissolve","FolderPath",calcVal,"VB") arcpy.AddMessage('- Calculated Folder Path.') ##--- Select by route type and total the length by type ---## summaryCSV(siteName,'routes',siteNum) outputRoute = 'routes_'+siteName arcpy.Rename_management('routes_dissolve',outputRoute) return outputRoute
#Reference MXD and layers mxd = arcpy.mapping.MapDocument(relpath + r'\AgUse.mxd') df = arcpy.mapping.ListDataFrames(mxd, "agmap")[0] ParcelLayer = arcpy.mapping.ListLayers(mxd, "Parcel")[0] AgUseLayer = arcpy.mapping.ListLayers(mxd, "AgUse")[0] SoilLayer = arcpy.mapping.ListLayers(mxd, "Soil")[0] #Set definition query on Parcels using PIN for lyr in arcpy.mapping.ListLayers(mxd): if lyr.name == "Parcel": lyr.definitionQuery = "JOINPIN = " + PIN #Identity analysis creates the aguse from (Parcels, AgUse and Soil). #Add ACRES field and Calc based on shape.area) arcpy.Identity_analysis(ParcelLayer, AgUseLayer, r'in_memory\aguse', "ALL", "1 Feet", "NO_RELATIONSHIPS") arcpy.Identity_analysis(r'in_memory\aguse', SoilLayer, r'in_memory\aguse_soil', "ALL", "1 Feet", "NO_RELATIONSHIPS") arcpy.AddField_management(r'in_memory\aguse_soil', "ACRES", "DOUBLE", "#", "1", "#", "#", "NULLABLE", "NON_REQUIRED", "#") arcpy.CalculateField_management(r'in_memory\aguse_soil', "ACRES", "!shape.area@acres!", "PYTHON_9.3", "#") #Create Frequency table using (TYPE, MUSYM and Sum byACRES). Then Add Frequency table to mxd. arcpy.Frequency_analysis(r'in_memory\aguse_soil', r'in_memory\Freq',
'MORGAN': [], 'SALT LAKE': [], 'DUCHESNE': [], 'JUAB': [], 'SAN JUAN': [] } # Set with county_ids list county_selection_where = None if len(county_ids) > 0: county_selection_where = "CountyID IN ('{}')".format("'','".join([str(c) for c in county_ids])) address_layer = 'addrpoints_' + uniqueRunNum arcpy.MakeFeatureLayer_management(addressPoints, address_layer, county_selection_where) arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(4326) arcpy.env.geographicTransformations = 'NAD_1983_To_WGS_1984_5' arcpy.Identity_analysis(address_layer, congressionalDistricts, addrPointsWithDistrict) # # Set with county_ids list # county_selection_where = None # if len(county_ids) > 0: # county_selection_where = "CountyID IN ('{}')".format("'','".join([str(c) for c in county_ids])) with arcpy.da.SearchCursor(addrPointsWithDistrict, addrFields) as addrPointCursor, \ arcpy.da.InsertCursor(countyProject, countyProjectFields) as countyProjectCursor: # outputCsv = csv.writer(outputFile) # outputCsv.writerow(countyProjectFields) for addrRow in addrPointCursor: countyRow = [] # County Project NAME countyRow.append(countyFipsDomain[int(addrRow[getFieldI('CountyID')])]) # County Project COMPANYNAME countyRow.append('AGRC')
if process_PHI: for LAD in LAD_names: arcpy.env.workspace = os.path.join(folder, LAD + ".gdb") if delete_landform: print(" Deleting overlapping 'Landform' and 'Pylon' from OSMM for " + LAD) arcpy.MakeFeatureLayer_management("OSMM_LCM", "OSMM_layer") expression = "DescriptiveGroup LIKE '%Landform%' OR DescriptiveTerm IN ('Cliff','Slope','Pylon')" arcpy.SelectLayerByAttribute_management("OSMM_layer", where_clause=expression) arcpy.DeleteFeatures_management("OSMM_layer") arcpy.Delete_management("OSMM_layer") if intersect_PHI: print ("Intersecting " + LAD) arcpy.Identity_analysis("OSMM_LCM", "PHI", out_fc, "NO_FID") if interpret_PHI: print ("Interpreting " + LAD) # Copy PHI habitat across, but not for manmade, gardens, water, unidentified PHI, wood pasture or OMHD (dealt with later) expression = "Make = 'Natural' AND DescriptiveGroup NOT LIKE '%water%' AND DescriptiveGroup NOT LIKE '%Water%' AND " \ "OSMM_hab <> 'Roadside - unknown surface' AND OSMM_hab <> 'Track' AND OSMM_hab <> 'Standing water' " expression2 = expression + " AND PHI IS NOT NULL AND PHI <> '' AND PHI NOT LIKE 'No main%' AND " \ "PHI NOT LIKE 'Wood-pasture%' AND PHI NOT LIKE 'Open Mosaic%'" MyFunctions.select_and_copy(out_fc, "Interpreted_habitat", expression2, "!PHI!") # Correction for traditional orchards in large gardens MyFunctions.select_and_copy(out_fc, "Interpreted_habitat", "PHI = 'Traditional orchard' AND OSMM_hab = 'Garden'", "'Traditional orchards'") # Other corrections / consolidations
arcpy.ClustersOutliers_stats(Tree_Crowns, "NEAR_DIST", TreeDistance, "INVERSE_DISTANCE", "EUCLIDEAN_DISTANCE", "NONE", "", "", "NO_FDR") print "\nLocal Moran's I and Clustering has been identified using distance to nearest tree.\n" log.write("\nLocal Moran's I and Clustering has been identified using distance to nearest tree:\n"+str(arcpy.GetMessages())) ##Use Local Moran's I to explore Gap-Crown ratio clustering GCRatio = "Output\\COA\\GCRatio_LISA.shp" arcpy.ClustersOutliers_stats(Tree_Crowns, "GapCrRatio", GCRatio, "INVERSE_DISTANCE", "EUCLIDEAN_DISTANCE", "NONE", "", "", "NO_FDR") print "\nLocal Moran's I and Clustering has been identified using each tree's gap to crown distance ratio.\n" log.write("\nLocal Moran's I and Clustering has been identified using gap-to-crown ratio:\n"+str(arcpy.GetMessages())) ############################################################################# ############################################################################# #### CLASSIFY SEGMENTED UNITS BASED ON PERCENT OF UNITS THAT ARE TREED #### ############################################################################# ##IDENTIFY TREE CROWNS BY SEGMENTED UNIT AND SU AREA TreeSUIdentity = "Processing\\TreeSUIdentity.shp" arcpy.Identity_analysis(Tree_Crowns, Segmented_Units, TreeSUIdentity, "ALL", "", "NO_RELATIONSHIPS") print "\nTree crowns have been identified by segmented unit.\n" log.write("\nTree crowns have been identified by segmented unit.\n"+str(arcpy.GetMessages())) ##SUMMARIZE CROWN AREA BY SEGMENTED UNIT AreaTreeToSU = "Processing\\AreaTreeToSU.dbf" arcpy.Statistics_analysis(TreeSUIdentity, AreaTreeToSU, "POLY_AREA SUM;POLY_ARE_1 MEAN", "FID_SU") print "\nTree canopy area has been summed by segmented unit.\n" log.write("\nTree canopy area has been summed by segmented unit.\n"+str(arcpy.GetMessages())) ##CALCULATE PERCENT OF SU COVERED BY TREE CROWN (PERCENT TREED) fc2=AreaTreeToSU newfield2="PCT_TREE" fieldtype2="DOUBLE" fieldname2=arcpy.ValidateFieldName(newfield2) arcpy.AddField_management(fc2,fieldname2,fieldtype2) arcpy.CalculateField_management(fc2,fieldname2, "!SUM_POLY_A! / !MEAN_POLY_!", "PYTHON") print "\nPercent of segmented units covered by tree crown calculated.\n"