# get administrative land arcpy.Clip_analysis('c_gadm', 'c_land', 'c_states') # create theissen polygons used to split slivers arcpy.Densify_edit('c_states', 'DISTANCE', '1 Kilometers') arcpy.FeatureVerticesToPoints_management( 'c_states', 'c_states_pts', 'ALL') # delete interior points for faster thiessen rendering arcpy.Dissolve_management('c_states', 'c_states_d') arcpy.MakeFeatureLayer_management('c_states_pts', 'lyr_c_states_pts') arcpy.SelectLayerByLocation_management( 'lyr_c_states_pts', 'WITHIN_CLEMENTINI', 'c_states_d') arcpy.DeleteFeatures_management('lyr_c_states_pts') # generate thiessen polygons of gadm for intersecting with land slivers arcpy.env.extent = 'c_eezland' arcpy.CreateThiessenPolygons_analysis( 'c_states_pts', 'c_states_t', 'ALL') arcpy.Dissolve_management('c_states_t', 'c_states_t_d', 'NAME_1') arcpy.RepairGeometry_management('c_states_t_d') # add detailed interior back arcpy.Erase_analysis('c_states_t_d', 'c_states', 'c_states_t_d_e') arcpy.Merge_management(['c_states', 'c_states_t_d_e'], 'c_states_t_d_e_m') arcpy.Dissolve_management('c_states_t_d_e_m', 'c_thiessen',
def Class_IndCom(SymDiff, Nutzungen, GOT, INNEN): ''' :param SymDiff: :param Nutzungen: :param GOT: :return: ''' IndGew = mem('IndGew') SymDiffIndGew = mem('SymDiffIndGew') SymDiff_P1 = tmp("A_{}_IndCom.shp".format(INNEN)) SymDiff_P1_IS = mem('SymDiff_{}_IS'.format(INNEN)) SymDiff_P1_IS_DISS = mem('SymDiff_{}_IS_DISS'.format(INNEN)) SymDiff_IS_Sel = mem('SymDiff_IS_Sel_{}'.format(INNEN)) SymDiff_join = mem('SymDiff_join_{}'.format(INNEN)) SymDiff_P1_join = mem('SymDiff_P1_join_{}'.format(INNEN)) Nutzungen_FL = arcpy.MakeFeatureLayer_management(Nutzungen) Sel = arcpy.management.SelectLayerByAttribute( Nutzungen_FL, "NEW_SELECTION", "OBJART_TXT = 'AX_IndustrieUndGewerbeflaeche'") arcpy.CopyFeatures_management(Sel, IndGew) IndGew_FL = arcpy.MakeFeatureLayer_management(IndGew) SymDiff_FL = arcpy.MakeFeatureLayer_management(SymDiff) Sel = arcpy.management.SelectLayerByLocation(SymDiff_FL, "INTERSECT", IndGew_FL, None, "NEW_SELECTION", "NOT_INVERT") arcpy.CopyFeatures_management(Sel, SymDiffIndGew) Shp_Area(SymDiffIndGew) SymDiffIndGew_FL = arcpy.MakeFeatureLayer_management(SymDiffIndGew) Sel = arcpy.management.SelectLayerByAttribute( SymDiffIndGew_FL, "NEW_SELECTION", "INNEN = '{}' And Shape_Area > 10000 And OVERLAP > {}".format( INNEN, GOT)) arcpy.CopyFeatures_management(Sel, SymDiff_P1) Shp_Area(SymDiff_P1) arcpy.analysis.Intersect([SymDiff_P1, IndGew], SymDiff_P1_IS, "ALL", None, "INPUT") if len(arcpy.ListFields(SymDiff_P1, "ORIG_FID")) > 0: arcpy.DeleteField_management(SymDiff_P1, "ORIG_FID") if len(arcpy.ListFields(SymDiff_P1_IS, "ORIG_FID")) > 0: arcpy.DeleteField_management(SymDiff_P1_IS, "ORIG_FID") arcpy.analysis.SpatialJoin(SymDiff_P1_IS, SymDiff_P1, SymDiff_P1_join) arcpy.management.Dissolve(SymDiff_P1_join, SymDiff_P1_IS_DISS, "FID_A_{}_IndCom".format(INNEN), None, "MULTI_PART", "DISSOLVE_LINES") Shp_Area(SymDiff_P1_IS_DISS) arcpy.AlterField_management(SymDiff_P1_IS_DISS, "Shape_Area", "Ind_Area") arcpy.analysis.SpatialJoin(SymDiff_P1, SymDiff_P1_IS_DISS, SymDiff_join) if len(arcpy.ListFields(SymDiff_join, "RATIO")) == 0: arcpy.management.AddField(SymDiff_join, "RATIO", "DOUBLE") arcpy.management.CalculateField(SymDiff_join, "RATIO", "!Ind_Area! / !Shape_Area! * 100", "PYTHON_9.3", None) SymDiff_join_FL = arcpy.MakeFeatureLayer_management(SymDiff_join) Sel = arcpy.management.SelectLayerByAttribute(SymDiff_join_FL, "NEW_SELECTION", "RATIO < 50") arcpy.CopyFeatures_management(Sel, SymDiff_IS_Sel) SymDiff_IS_Sel_FL = arcpy.MakeFeatureLayer_management(SymDiff_IS_Sel) SymDiff_P1_FL = arcpy.MakeFeatureLayer_management(SymDiff_P1) arcpy.management.SelectLayerByLocation(SymDiff_P1_FL, "ARE_IDENTICAL_TO", SymDiff_IS_Sel_FL, None, "NEW_SELECTION", "NOT_INVERT") arcpy.CopyFeatures_management( SymDiff_P1, "SymDiff_P1_{}_0.shp".format(INNEN)) ############# arcpy.CopyFeatures_management( SymDiff_IS_Sel_FL, "SymDiff_IS_Sel_FL_{}_0.shp".format(INNEN)) ############# arcpy.DeleteFeatures_management(SymDiff_P1_FL) arcpy.CopyFeatures_management( SymDiff_FL, "SymDiff_FL_{}_0.shp".format(INNEN)) ############# arcpy.management.SelectLayerByLocation(SymDiff_FL, "ARE_IDENTICAL_TO", SymDiff_P1_FL, None, "NEW_SELECTION", "NOT_INVERT") arcpy.DeleteFeatures_management(SymDiff_FL) arcpy.CopyFeatures_management( SymDiff_FL, "SymDiff_FL_{}_1.shp".format(INNEN)) ############# return SymDiff_FL, SymDiff_P1
def Class_Resid(SymDiff, Nutzungen, GOT, INNEN): SymDiffPos = mem('SymDiffPos') WohnGemFunk = tmp("WohnGemFunk.shp") SymDiffPos_Wohn = tmp(('SymDiffPos_Wohn.shp')) SymDiff_P2 = tmp("A_{}_Resid.shp".format(INNEN)) SymDiff_P2_IS = mem('SymDiff_{}1_IS'.format(INNEN)) SymDiff_P2_IS_DISS = mem('SymDiff_{}1_IS_DISS'.format(INNEN)) SymDiff_IS_Sel = mem('SymDiff_IS_Sel_{}'.format(INNEN)) SymDiff_join = mem('SymDiff_join_{}'.format(INNEN)) SymDiff_P2_join = mem('SymDiff_P2_join_{}'.format(INNEN)) SymDiff_FL = arcpy.MakeFeatureLayer_management(SymDiff) Nutzungen_FL = arcpy.MakeFeatureLayer_management(Nutzungen) Sel = arcpy.management.SelectLayerByAttribute(SymDiff_FL, "NEW_SELECTION", "INNEN = '{}'".format(INNEN)) arcpy.CopyFeatures_management(Sel, SymDiffPos) Shp_Area(SymDiffPos) SymDiffPos_FL = arcpy.MakeFeatureLayer_management(SymDiffPos) Sel = arcpy.management.SelectLayerByAttribute( Nutzungen_FL, "NEW_SELECTION", "OBJART_TXT = 'AX_Wohnbauflaeche' Or OBJART_TXT = 'AX_FlaecheGemischterNutzung' Or OBJART_TXT = 'AX_FlaecheBesondererFunktionalerPraegung'" ) arcpy.CopyFeatures_management(Sel, WohnGemFunk) WohnGemFunk_FL = arcpy.MakeFeatureLayer_management(WohnGemFunk) Sel = arcpy.management.SelectLayerByLocation(SymDiffPos_FL, "INTERSECT", WohnGemFunk_FL, None, "NEW_SELECTION", "NOT_INVERT") arcpy.CopyFeatures_management(Sel, SymDiffPos_Wohn) Shp_Area(SymDiffPos_Wohn) SymDiffPos_Wohn_FL = arcpy.MakeFeatureLayer_management(SymDiffPos_Wohn) Sel = arcpy.management.SelectLayerByAttribute( SymDiffPos_Wohn_FL, "NEW_SELECTION", "OVERLAP > {} And Shape_Area > 10000".format(GOT)) arcpy.CopyFeatures_management(Sel, SymDiff_P2) arcpy.analysis.Intersect([SymDiff_P2, WohnGemFunk], SymDiff_P2_IS, "ALL", None, "INPUT") arcpy.analysis.SpatialJoin(SymDiff_P2_IS, SymDiff_P2, SymDiff_P2_join) arcpy.management.Dissolve(SymDiff_P2_join, SymDiff_P2_IS_DISS, "ORIG_FID", None, "MULTI_PART", "DISSOLVE_LINES") Shp_Area(SymDiff_P2_IS_DISS) arcpy.AlterField_management(SymDiff_P2_IS_DISS, "Shape_Area", "Res_Area") arcpy.analysis.SpatialJoin(SymDiff_P2, SymDiff_P2_IS_DISS, SymDiff_join, "JOIN_ONE_TO_ONE", "KEEP_ALL", None, "INTERSECT", None, '') if len(arcpy.ListFields(SymDiff_join, "RATIO")) == 0: arcpy.management.AddField(SymDiff_join, "RATIO", "DOUBLE") arcpy.management.CalculateField(SymDiff_join, "RATIO", "!Res_Area! / !Shape_Area! * 100", "PYTHON_9.3", None) SymDiff_join_FL = arcpy.MakeFeatureLayer_management(SymDiff_join) Sel = arcpy.management.SelectLayerByAttribute(SymDiff_join_FL, "NEW_SELECTION", "RATIO < 50") arcpy.CopyFeatures_management(Sel, SymDiff_IS_Sel) arcpy.CopyFeatures_management(Sel, SymDiff_IS_Sel) SymDiff_IS_Sel_FL = arcpy.MakeFeatureLayer_management(SymDiff_IS_Sel) SymDiff_P2_FL = arcpy.MakeFeatureLayer_management(SymDiff_P2) arcpy.management.SelectLayerByLocation(SymDiff_P2_FL, "ARE_IDENTICAL_TO", SymDiff_IS_Sel_FL, None, "NEW_SELECTION", "NOT_INVERT") arcpy.DeleteFeatures_management(SymDiff_P2_FL) arcpy.management.SelectLayerByLocation(SymDiff_FL, "ARE_IDENTICAL_TO", SymDiff_P2_FL, None, "NEW_SELECTION", "NOT_INVERT") arcpy.DeleteFeatures_management(SymDiff_FL) DelName([WohnGemFunk, SymDiffPos_Wohn]) return SymDiff_FL, SymDiff_P2
def cluster(relevantMerged, countThreshold, stopInterval, relevantMergedB, bundleMerged, DEM): # config: [bufferSizeMin, bufferSizeMax] = config.getBufferSizeCluster(DEM) azimuthTreshold = config.azimuthTreshold # load relevantMerged to in_memory relevantMergeLayer = "relevantMerged" myMemoryFeature = "in_memory" + "\\" + relevantMergeLayer arcpy.CopyFeatures_management(relevantMerged, myMemoryFeature) arcpy.MakeFeatureLayer_management(myMemoryFeature, relevantMergeLayer) # calculates ID for deleting (FID is recalculated each time after deleting) #print("calculates ID for deleting") if (arcpy.ListFields(relevantMergeLayer, "ShapeId_1") == []): arcpy.AddField_management(relevantMergeLayer, "ShapeId_1", "LONG", 8, 2, "", "", "NULLABLE", "NON_REQUIRED") arcpy.CalculateField_management(relevantMergeLayer, "ShapeId_1", "!FID!", "PYTHON_9.3", "#") ### for each row in relevantMerged ### # cursor for iterate rows in length descend order! blueSet = [] bundleSet = [] # order cursor from the longest to the shortest line rows = arcpy.SearchCursor( relevantMergeLayer, "", "", "", "length D") # where clause for testing '"ShapeId_1" = 1151' # fill the blueset with the rows for row in rows: blueLine = line(row.ShapeId_1, row.azimuth, row.length) blueSet.append(blueLine) del rows # cleaning relevantBackups = [] ###################################### # for each line in blueset # ###################################### for blueLine in blueSet[:stopInterval]: myExpression = '"ShapeId_1" =%i' % (blueLine.ID) arcpy.SelectLayerByAttribute_management(relevantMergeLayer, "NEW_SELECTION", myExpression) noSelected = int( arcpy.GetCount_management(relevantMergeLayer).getOutput(0)) # if line with ID exists if (noSelected == 1): # make buffer around blueSHP (bigBuffer for completely within) tempBuffer = "in_memory\\tempBuffer" # ? Question of buffer size! # dynamic buffer size according to blueLength - 1/10 blueLength = blueLine.length bufferSize = int(blueLength / 10) # supremum of buffersize (for extra long paralel lines near together) if (bufferSize > bufferSizeMax): bufferSize = bufferSizeMax # infimum of buffersize (for short lines - small buffer not sufficient) if (bufferSize < bufferSizeMin): bufferSize = bufferSizeMin arcpy.Buffer_analysis(relevantMergeLayer, tempBuffer, "%d Meters" % bufferSize, "FULL", "ROUND", "ALL", "#") arcpy.MakeFeatureLayer_management(tempBuffer, "tempBuffer") # select all orange in buffer of blue # intersect is better but slower - we will see arcpy.SelectLayerByLocation_management(relevantMergeLayer, "COMPLETELY_WITHIN", "tempBuffer", "", "NEW_SELECTION") noSelected = int( arcpy.GetCount_management(relevantMergeLayer).getOutput(0)) isBundle = False if (noSelected >= countThreshold): # create expression +/- azimuthTreshold from blueLine blueMin = blueLine.azimuth - azimuthTreshold if blueMin < 0: blueMin += 180 blueMax = blueLine.azimuth + azimuthTreshold if blueMax > 180: blueMax -= 180 # this condition is useless. Azimuth is always >=0 and <180, after this simplification the myExpression is the same for both cases. The only important thing is to convert extremes to interval <0,180) if (blueLine.azimuth < azimuthTreshold) or ( blueLine.azimuth > 180 - azimuthTreshold): myExpression = '("azimuth" >= %i and "azimuth" < %i) or ("azimuth" > %i and "azimuth" < %i)' % ( 0, blueMax, blueMin, 180) else: myExpression = '"azimuth" > %i and "azimuth" < %i ' % ( blueMin, blueMax) ### SELECT THE CLUSTER LINES ### arcpy.SelectLayerByAttribute_management( relevantMergeLayer, "SUBSET_SELECTION", myExpression) # get count - if < countThreshold do not save, only delete! noSelected = int( arcpy.GetCount_management(relevantMergeLayer).getOutput(0)) if (noSelected >= countThreshold): isBundle = True if (isBundle): # im_memory bundle bundle = "in_memory\\line%i" % blueLine.ID try: arcpy.Buffer_analysis(relevantMergeLayer, bundle, "%d Meters" % 10, "FULL", "ROUND", "ALL", "#") # make layer from in_memory bundle arcpy.MakeFeatureLayer_management(bundle, "bundle") bundleSet.append(bundle) except: try: arcpy.Buffer_analysis(relevantMergeLayer, bundle, "%d Meters" % 12, "FULL", "ROUND", "ALL", "#") # make layer from in_memory bundle arcpy.MakeFeatureLayer_management(bundle, "bundle") bundleSet.append(bundle) except: continue arcpy.AddField_management(bundle, "count", "LONG", 9, "", "", "", "NULLABLE", "NON_REQUIRED") arcpy.AddField_management(bundle, "azimuth", "LONG", 9, "", "", "", "NULLABLE", "NON_REQUIRED") arcpy.AddField_management(bundle, "length", "LONG", 9, "", "", "", "NULLABLE", "NON_REQUIRED") arcpy.CalculateField_management(bundle, "count", noSelected, "PYTHON_9.3", "#") lengthList = [] azimuthList = [] # compute stats on selection (cluster lines) clusterRows = arcpy.SearchCursor(relevantMergeLayer) for clusterRow in clusterRows: lengthList.append(clusterRow.getValue("length")) azimuthList.append(clusterRow.getValue("azimuth")) del clusterRows # length stats [n, mean, std, median, myMin, myMax] = getProperties(lengthList) arcpy.CalculateField_management(bundle, "length", "%i" % int(mean + std), "PYTHON_9.3", "#") azimuthList.sort() azimuthMin = azimuthList[0] azimuthMax = azimuthList[n - 1] # solve problem with angle numbers! # set is on border of azimuths (180-0) if ((azimuthMax - azimuthMin) > (2 * azimuthTreshold)): # new set - recclassify azimuthListPlus = [] for azimuth in azimuthList: if azimuth > (2 * azimuthTreshold): azimuthListPlus.append(azimuth - 180) else: azimuthListPlus.append(azimuth) # replace azimuthList azimuthList = azimuthListPlus # compute azimuth statistics [n, mean, std, median, myMin, myMax] = getProperties(azimuthList) if mean < 0: mean += 180 arcpy.CalculateField_management(bundle, "azimuth", "%i" % int(mean), "PYTHON_9.3", "#") # delete from merged arcpy.DeleteFeatures_management(relevantMergeLayer) #################################### E N D F O R ########################################### #print "backup" # a) backup relevantMerged to disk arcpy.SelectLayerByAttribute_management(relevantMergeLayer, "CLEAR_SELECTION") arcpy.CopyFeatures_management(relevantMergeLayer, relevantMergedB) relevantBackups.append(relevantMergedB) # write memory bundles to disk toMerge = "" for bundle in bundleSet: toMerge += "in_memory\\%s;" % bundle # TODO: don't merge if toMerge is empty ! try: arcpy.Merge_management(toMerge, bundleMerged) relevantBackups.append(bundleMerged) except Exception, e: print toMerge print e
def assign_pickup_day(subdivs, coll_grid): correct_subnames(subdivs) arcpy.MakeFeatureLayer_management(subdivs, "sub_lyr") arcpy.SelectLayerByLocation_management("sub_lyr", "INTERSECT", coll_grid, selection_type='NEW_SELECTION') arcpy.SelectLayerByLocation_management("sub_lyr", "INTERSECT", coll_grid, selection_type='SWITCH_SELECTION') arcpy.DeleteFeatures_management("sub_lyr") arcpy.SelectLayerByAttribute_management("sub_lyr", "CLEAR_SELECTION") arcpy.SelectLayerByLocation_management("sub_lyr", "INTERSECT", coll_grid, selection_type='NEW_SELECTION') sub_names_list = [ row[0] for row in arcpy.da.SearchCursor("sub_lyr", ['Class']) ] try: arcpy.AddField_management(subdivs, "Trash_and_Recycling_Day", "TEXT") arcpy.AddField_management(subdivs, "YardWasteDay", "TEXT") arcpy.AddField_management(coll_grid, "Trash_and_Recycling_Day", "TEXT") arcpy.AddField_management(subdivs, "Current", "TEXT") arcpy.MakeFeatureLayer_management(subdivs, "SubDivs") arcpy.MakeFeatureLayer_management(coll_grid, "RecGrid") except Exception as e: tb = traceback.format_exc() email(tb) raise sys.exit() try: with arcpy.da.UpdateCursor( coll_grid, ['Recycling', "Trash_and_Recycling_Day"]) as ucur: for row in ucur: if row[0] in days_dict.keys(): row[1] = days_dict[row[0]] ucur.updateRow(row) except Exception as e: tb = traceback.format_exc() email(tb) raise sys.exit() #print sub_names_list for sub_name in sorted(sub_names_list): #print str(sub_name), type(sub_name) try: day_dict = defaultdict(int) arcpy.SelectLayerByAttribute_management( "SubDivs", "NEW_SELECTION", "\"Class\" = '{}'".format(sub_name)) #print [row[0] for row in arcpy.da.SearchCursor("SubDivs", ['Class'])] arcpy.SelectLayerByLocation_management( "RecGrid", "INTERSECT", "SubDivs", selection_type='NEW_SELECTION') count = int(arcpy.GetCount_management("RecGrid").getOutput(0)) except Exception as e: tb = traceback.format_exc() email(tb) raise sys.exit() print sub_name, "-----", [ row[0] for row in arcpy.da.SearchCursor("RecGrid", ['Street']) ], count print[f.name for f in arcpy.ListFields("RecGrid")] try: with arcpy.da.SearchCursor("RecGrid", ["Trash_and_Recycling_Day"]) as scur: for row in scur: day_dict[row[0]] += 1 for k, v in day_dict.items(): print k, v except Exception as e: tb = traceback.format_exc() email(tb) raise sys.exit() v = list(day_dict.values()) k = list(day_dict.keys()) print k print v try: major_day = k[v.index(max(v))] #print "MAJOR DAYYYYYYYYYY", major_day, type(major_day) assign_dict[str(sub_name)] = str(major_day) except Exception as e: tb = traceback.format_exc() email(tb) raise sys.exit() log.info("ASSIGNING COLLECTION DAYS") for k, v in assign_dict.items(): #print type(k), k, type(v), v log.info("\t Subdivison Collection Day Assigned values {}--{}--{}--{}". format(k, type(k), v, type(v))) return assign_dict
def perennialNetwork(nhd_orig_flowline_path, nhd_area_path, nhd_waterbody_path, outpath): # environment settings arcpy.env.overwriteOutput = 'TRUE' # -- make copy of original nhd flowlines nhd_flowlines = arcpy.CopyFeatures_management(nhd_orig_flowline_path, 'in_memory/nhd_flowlines') # add source field to track which part of workflow perennial network flowline was added arcpy.AddField_management(nhd_flowlines, 'Source', 'TEXT', '', '', 75) # --perennial coded lines-- # select lines from original nhd that are coded as perennial arcpy.MakeFeatureLayer_management(nhd_flowlines, 'nhd_flowlines_lyr') arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46006 """) flowline_per = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/flowline_per') # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: row[0] = "1. Perennial Code" cursor.updateRow(row) # --add missing major rivers-- # --subsetted artificial coded lines that are in perennial nhd area polygons-- # select perennial coded nhd area polygons arcpy.MakeFeatureLayer_management(nhd_area_path, 'nhd_area_lyr') arcpy.SelectLayerByAttribute_management('nhd_area_lyr', 'NEW_SELECTION', """ "FCODE" = 46006 """) # select and dissolve artificial coded nhd lines that are within perennial nhd area polygons arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 55800 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', 'nhd_area_lyr', '', 'SUBSET_SELECTION') flowline_art_code = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/flowline_art_code', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') # remove short lines (< 50 m) that act as artificial connectors to flowlines outside perennial nhd area polygons arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'INTERSECT', 'nhd_area_lyr', '1 Meters', 'NEW_SELECTION') arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'SUBSET_SELECTION', """ "FCODE" <> 55800 """) arcpy.MakeFeatureLayer_management(flowline_art_code, 'flowline_art_code_lyr') arcpy.SelectLayerByLocation_management('flowline_art_code_lyr', 'INTERSECT', 'nhd_flowlines_lyr', '', 'NEW_SELECTION') with arcpy.da.UpdateCursor('flowline_art_code_lyr', ['SHAPE@Length']) as cursor: for row in cursor: if row[0] < 50: cursor.deleteRow() # remove lines that end where canal starts mr_end_pt = arcpy.FeatureVerticesToPoints_management(flowline_art_code, 'in_memory/mr_end_pt', "END") arcpy.MakeFeatureLayer_management(mr_end_pt, 'mr_end_pt_lyr') arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 33600 OR "FCODE" = 3601 OR "FCODE" = 3603""") arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'INTERSECT', flowline_art_code, '1 Meters', 'SUBSET_SELECTION') canal_start_pt = arcpy.FeatureVerticesToPoints_management('nhd_flowlines_lyr', 'in_memory/canal_start_pt', "START") arcpy.SelectLayerByLocation_management('mr_end_pt_lyr', 'INTERSECT', canal_start_pt, '', 'NEW_SELECTION') arcpy.SelectLayerByLocation_management('flowline_art_code_lyr', 'INTERSECT', 'mr_end_pt_lyr', '', 'NEW_SELECTION') arcpy.DeleteFeatures_management('flowline_art_code_lyr') arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_art_code, '', 'NEW_SELECTION') # add selected flowlines to the perennial stream shp arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "2. Major Artifical in Perennial Area Polygon" cursor.updateRow(row) # --add missing flowlines in marshes-- # --artificial coded lines that are perennial gaps in marsh waterbody polygons-- # select nhd waterbodys that: # - are coded as marshes (ftype 466) # - intersect perennial stream start and end (i.e., are perennial stream inlet AND outlet) arcpy.MakeFeatureLayer_management(nhd_waterbody_path, 'nhd_waterbody_lyr') arcpy.SelectLayerByAttribute_management('nhd_waterbody_lyr', 'NEW_SELECTION', """ "FTYPE" = 466 """) marshes = arcpy.CopyFeatures_management('nhd_waterbody_lyr', 'in_memory/marshes') arcpy.MakeFeatureLayer_management(marshes, 'marshes_lyr') per_start_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_start_pt', "START") per_end_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_end_pt', "END") arcpy.SelectLayerByLocation_management('marshes_lyr', 'INTERSECT', per_start_pt, '', 'NEW_SELECTION') arcpy.SelectLayerByLocation_management('marshes_lyr', 'INTERSECT', per_end_pt, '', 'SUBSET_SELECTION') # select and dissolve nhd flowlines that: # - are coded as artificial # - fall within selected marsh waterbodies # - are not already part of perennial stream network arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 55800 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', 'marshes_lyr', '', 'SUBSET_SELECTION') arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_per, '', 'REMOVE_FROM_SELECTION') marsh_lines = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/marsh_lines', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') marsh_gap_lines = findGaps(marsh_lines, flowline_per) # add selected flowlines to the perennial stream shp arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', marsh_gap_lines, '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "3. Artificial Network Gap in Marsh Waterbody" cursor.updateRow(row) # --add missing flowlines in smaller lakes and ponds-- # select nhd waterbodys that: # - are coded as lakes/ponds (ftype 390) # - area <= .03 sq km # - are not named # - intersect perennial stream start and end (i.e., are perennial stream inlet AND outlet) arcpy.SelectLayerByLocation_management('nhd_waterbody_lyr', 'INTERSECT', flowline_per, '', 'NEW_SELECTION') arcpy.SelectLayerByAttribute_management('nhd_waterbody_lyr', 'SUBSET_SELECTION', """ "FTYPE" = 390 AND "AREASQKM" <= 0.03 AND "GNIS_NAME" = '' """) sm_lakes_ponds = arcpy.CopyFeatures_management('nhd_waterbody_lyr', 'in_memory/sm_lakes_ponds') arcpy.MakeFeatureLayer_management(sm_lakes_ponds, 'sm_lakes_ponds_lyr') per_start_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_start_pt', "START") per_end_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_end_pt', "END") arcpy.SelectLayerByLocation_management('sm_lakes_ponds_lyr', 'INTERSECT', per_start_pt, '', 'NEW_SELECTION') arcpy.SelectLayerByLocation_management('sm_lakes_ponds_lyr', 'INTERSECT', per_end_pt, '', 'SUBSET_SELECTION') # select nhd flowlines that: # - fall within selected waterbodies # - intersect a perennial streams (i.e., are gaps on perennial network) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', 'sm_lakes_ponds_lyr', '', 'NEW_SELECTION') flowline_wbody_dissolve = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/flowline_wbody_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') arcpy.MakeFeatureLayer_management(flowline_wbody_dissolve, 'flowline_wbody_dissolve_lyr') arcpy.SelectLayerByLocation_management('flowline_wbody_dissolve_lyr', 'INTERSECT', flowline_per, '', 'NEW_SELECTION') # add selected flowlines to the perennial stream shp arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', 'flowline_wbody_dissolve_lyr', '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "4. Network Gap in Small Lake/Pond Waterbody" cursor.updateRow(row) # --remove flowlines where 2 lines end but none start (indicate 'false perennial tribs')-- per_start_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_start_pt', "START") per_end_pt = arcpy.FeatureVerticesToPoints_management(flowline_per, 'in_memory/per_end_pt', "END") per_end_pt_join = arcpy.SpatialJoin_analysis(per_end_pt, per_end_pt, 'in_memory/per_end_pt_join', 'JOIN_ONE_TO_ONE', 'KEEP_ALL', '', 'INTERSECT') arcpy.MakeFeatureLayer_management(per_end_pt_join, 'per_end_pt_join_lyr') arcpy.SelectLayerByLocation_management('per_end_pt_join_lyr', 'INTERSECT', per_start_pt, '', 'NEW_SELECTION') arcpy.SelectLayerByAttribute_management('per_end_pt_join_lyr', 'SWITCH_SELECTION') arcpy.SelectLayerByAttribute_management('per_end_pt_join_lyr', 'SUBSET_SELECTION', """ "Join_Count" >= 2 """) arcpy.MakeFeatureLayer_management(flowline_per, 'flowline_per_lyr') arcpy.SelectLayerByLocation_management('flowline_per_lyr', 'INTERSECT', 'per_end_pt_join_lyr', '', 'NEW_SELECTION') arcpy.DeleteFeatures_management('flowline_per_lyr') # --add named intermittent and connector flowlines that are directly downstream of perennial stream-- # create perennial end pts shp (use to find intermittent that starts where perennial ends) flowline_per_dissolve = arcpy.Dissolve_management(flowline_per, 'in_memory/flowline_per_dissolve', '', '', 'SINGLE_PART', 'UNSPLIT_LINES') per_end_pt = arcpy.FeatureVerticesToPoints_management(flowline_per_dissolve, 'in_memory/per_end_pt', "END") # select named intermitent and connector flowlines arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """) arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'SUBSET_SELECTION', """ "GNIS_NAME" <> '' """) # dissolve selected flowlines by name flowline_int_dissolve = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/flowline_int_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') # create points at start of dissolved intermittent and connector flowlines int_start_pts = arcpy.FeatureVerticesToPoints_management(flowline_int_dissolve, 'in_memory/int_start_pts', "START") # select perennial end points that overlap intermittent/connector start points arcpy.MakeFeatureLayer_management(per_end_pt, 'per_end_pt_lyr') arcpy.SelectLayerByLocation_management('per_end_pt_lyr', 'INTERSECT', int_start_pts, '', 'NEW_SELECTION') # select dissolved intermitent and connector flowlines that intersect selected perennial end points # (these lines are directly downstream of perennial stream) arcpy.MakeFeatureLayer_management(flowline_int_dissolve, 'flowline_int_dissolve_lyr') arcpy.SelectLayerByLocation_management('flowline_int_dissolve_lyr', 'INTERSECT', 'per_end_pt_lyr', '', 'NEW_SELECTION') # add selected flowlines to the perennial stream shp arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', 'flowline_int_dissolve_lyr', '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "5. Named Intermittent/Connector Directly Downstream of Network Line" cursor.updateRow(row) # --add named intermittent flowlines that fall on gaps in the perennial network-- # select intermittent flowlines that aren't part of perennial network up to this point # these are potential network gap lines arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """) arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'SUBSET_SELECTION', """ "GNIS_NAME" <> '' """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_per, '', 'REMOVE_FROM_SELECTION') int_lines = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/int_lines') # find gaps on all selected lines int_gap_lines = findGaps(int_lines, flowline_per) # add itermittent gap to the perennial stream shp with arcpy.da.InsertCursor(flowline_per, ["SHAPE@"]) as iCursor: with arcpy.da.SearchCursor(int_gap_lines, ["SHAPE@"]) as sCursor: for row in sCursor: iCursor.insertRow([row[0]]) # find gaps on dissolved lines (grabs lines that may be split by trib and otherwise wouldn't be selected) int_lines_dissolve = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/int_lines_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') int_gap_lines_dissolve = findGaps(int_lines_dissolve, flowline_per) # add itermittent gap to the perennial stream shp arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', int_gap_lines_dissolve, '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "6. Named Intermittent/Connector Network Gap" cursor.updateRow(row) # --add intermittent flowlines that fall on gaps in the perennial network-- # select intermittent flowlines that aren't part of perennial network up to this point # these are potential network gap lines arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'ARE_IDENTICAL_TO', flowline_per, '', 'REMOVE_FROM_SELECTION') int_lines_all = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/int_lines_all') int_gap_lines_all = findGaps(int_lines_all, flowline_per) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', int_gap_lines_all, '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'ARE_IDENTICAL_TO', flowline_per, '', 'REMOVE_FROM_SELECTION') int_lines_all_dissolve = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/int_lines_all_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') int_gap_lines_all_dissolve = findGaps(int_lines_all_dissolve, flowline_per) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', int_gap_lines_all_dissolve, '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "7. Unnamed Intermittent/Connector Network Gap" cursor.updateRow(row) # --add artifical flowlines that fall on gaps in the perennial network-- # --these are potential network gap lines-- # select artificial coded flowlines that aren't part of perennial network up to this point arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 55800 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_per, '', 'REMOVE_FROM_SELECTION') # create search aoi from perennial area polygons and marsh waterbody polygons arcpy.SelectLayerByAttribute_management('nhd_waterbody_lyr', 'NEW_SELECTION', """ "FTYPE" = 466 """) marshes = arcpy.CopyFeatures_management('nhd_waterbody_lyr', 'in_memory/marshes') arcpy.SelectLayerByAttribute_management('nhd_area_lyr', 'NEW_SELECTION', """ "FCODE" = 46006 """) per_area = arcpy.CopyFeatures_management('nhd_area_lyr', 'in_memory/per_area') art_gap_aoi = arcpy.Merge_management([marshes, per_area], 'in_memory/art_gap_aoi') # subset selection to flowlines that flow throw search aoi arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', art_gap_aoi, '', 'SUBSET_SELECTION') art_lines = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/art_lines') art_gap_lines = findGaps(art_lines, flowline_per, 'True') # add artificial gap to the perennial stream shp arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', art_gap_lines, '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "8. Artificial Network Gap" cursor.updateRow(row) # --remove isolated (i.e., only intersect themselves), short (< 300 m) line segments-- flowline_per_dissolve2 = arcpy.Dissolve_management(flowline_per, 'in_memory/flowline_per_dissolve2', '', '', 'SINGLE_PART', 'UNSPLIT_LINES') flowline_per_join = arcpy.SpatialJoin_analysis(flowline_per_dissolve2, flowline_per_dissolve2, 'in_memory/flowline_per_join', 'JOIN_ONE_TO_ONE', 'KEEP_ALL', '', 'INTERSECT') arcpy.AddField_management(flowline_per_join, 'Length', 'DOUBLE') arcpy.CalculateField_management(flowline_per_join, 'Length', "!SHAPE.LENGTH@Meters!", 'PYTHON_9.3') arcpy.MakeFeatureLayer_management(flowline_per_join, 'flowline_per_join_lyr') arcpy.SelectLayerByAttribute_management('flowline_per_join_lyr', 'NEW_SELECTION', """ "Length" < 300 AND "Join_Count" <= 1 """) arcpy.SelectLayerByLocation_management('flowline_per_lyr', 'SHARE_A_LINE_SEGMENT_WITH', 'flowline_per_join_lyr', '', 'NEW_SELECTION') arcpy.DeleteFeatures_management('flowline_per_lyr') # --select and save final perennial shp-- arcpy.SelectLayerByAttribute_management('flowline_per_lyr', 'CLEAR_SELECTION') arcpy.CopyFeatures_management(flowline_per, outpath) arcpy.DeleteIdentical_management(outpath, ['Shape'])
addMsgAndPrint('angleBetween = ' + str(angleBetween)) addMsgAndPrint(' ') cursor.updateRow([ row[0], plotAz, row[2], dfs, angleBetween, row[5], appDip, rake, row[7] ]) del cursor # build framing lines addMsgAndPrint('Adding framing lines') ## make sure fc is there and empty cl = xsFDS + '/CS' + token + 'CartographicLines' if arcpy.Exists(cl): # delete all features arcpy.DeleteFeatures_management(cl) cursor = arcpy.da.InsertCursor(cl, ['Type', 'Label', 'SHAPE@']) ## z = 0 addMsgAndPrint(' adding horizon line') lin = arcpy.Polyline(arcpy.Array([toPoint1, toPoint2])) cursor.insertRow(['horizon', '', lin]) leftX = toPoint1.X rightX = toPoint2.X ## left end and right end addMsgAndPrint(' adding end caps') minYindex = -10 maxYindex = 6 minY = minYindex * 1000 maxY = maxYindex * 1000 ticLength = 200
def process_raw_grid(city_info, geo_path, hn_ranges=['MIN_LFROMA','MIN_RFROMA','MAX_LTOADD','MAX_RTOADD']): """ Code to "fix up" street grid geometry (and duplicate ranges) Steps involved: a) Dissolves multi-part street segments b) Splits on intersections c) Calls fix_dup_address_ranges Parameters ---------- city_info : list List containing city name (e.g. "Hartford"), state abbreviation (e.g. "CT"), and decade (e.g. 1930) paths : list List of file paths for R code, Python scripts, and data files hn_ranges : list (Optional) List of string variables naming min/max from/to for house number ranges in street grid Returns ------- [CITY][ST]_[DECADE]_stgrid_edit_Uns2.shp : ESRI shapefile Fixed street grid problem_segments : list List of street segments with multiple names of same length (arises from multi-part segments) """ #Create filenames to be used throughout process city_name, state_abbr, decade = city_info city_name = city_name.replace(' ','') state_abbr = state_abbr.upper() #NOTE: By defualt we are starting with 1940 cleaned grids then saving them as 19X0 grids! grid = geo_path + city_name + state_abbr + "_" + str(decade) + "_stgrid_edit.shp" grid_orig = "S:/Projects/1940Census/DirAdd/" + city_name + state_abbr + "_1940_stgrid_diradd.shp" dissolve_grid = geo_path + city_name + "_" + str(decade) + "_stgrid_Dissolve.shp" split_grid = geo_path + city_name + "_" + str(decade) + "_stgrid_Split.shp" grid_uns = geo_path + city_name + state_abbr + "_" + str(decade) + "_stgrid_edit_Uns.shp" grid_uns2 = geo_path + city_name + state_abbr + "_" + str(decade) + "_stgrid_edit_Uns2.shp" #Create copy of "diradd" file to use as grid if not os.path.isfile(grid): if not os.path.isfile(grid_orig): print("%s%s_1940_stgrid_diradd.shp not found" % (city_name, state_abbr)) else: arcpy.CopyFeatures_management(grid_orig, grid) #Can't <null> blank values, so when Dissolve Unsplit lines aggregates MIN replace with big number codeblock_min = """def replace(x): if x == ' ': return 999999 else: return x""" fieldName = "LFROMADD" expression = "replace(!LFROMADD!)" arcpy.CalculateField_management(grid, fieldName, expression, "PYTHON", codeblock_min) fieldName = "RFROMADD" expression = "replace(!RFROMADD!)" arcpy.CalculateField_management(grid, fieldName, expression, "PYTHON", codeblock_min) #Can't <null> blank values, so when Dissolve Unsplit lines aggregates MAX replace with small number codeblock_max = """def replace(x): if x == ' ': return -1 else: return x""" fieldName = "LTOADD" expression = "replace(!LTOADD!)" arcpy.CalculateField_management(grid, fieldName, expression, "PYTHON", codeblock_max) fieldName = "RTOADD" expression = "replace(!RTOADD!)" arcpy.CalculateField_management(grid, fieldName, expression, "PYTHON", codeblock_max) #First Dissolve to create split_grid (no multi-part segments, split at intersections) arcpy.Dissolve_management(grid, split_grid, multi_part="SINGLE_PART", unsplit_lines="DISSOLVE_LINES") #Add a unique, static identifier (so ranges can be changed later) expression="!FID! + 1" arcpy.AddField_management(split_grid, "grid_id", "LONG", 4, "", "","", "", "") arcpy.CalculateField_management(split_grid, "grid_id", expression, "PYTHON_9.3") #Intersect with grid temp = geo_path + "temp_step.shp" arcpy.CopyFeatures_management(grid, temp) arcpy.Intersect_analysis([temp, split_grid], grid) arcpy.DeleteFeatures_management(temp) #Second Dissolve St_Grid lines arcpy.Dissolve_management(in_features=grid, out_feature_class=grid_uns, dissolve_field="grid_id", statistics_fields="LFROMADD MIN;LTOADD MAX;RFROMADD MIN;RTOADD MAX", unsplit_lines="UNSPLIT_LINES") #Get the longest street name from multi-part segments df_grid = load_shp(grid, hn_ranges) longest_name_dict = {} problem_segments = {} for grid_id, group in df_grid.groupby(['grid_id']): max_chars = group['FULLNAME'].str.len().max() longest_name = group.loc[group['FULLNAME'].str.len()==max_chars,'FULLNAME'].drop_duplicates().tolist() if len(longest_name) > 1: problem_segments[grid_id] = longest_name # Always returns first entry in longest name (a list of names equal in length to max_chars) longest_name_dict[grid_id] = longest_name[0] #Assign longest street name by grid_id (also add city and state for geolocator) df_grid_uns = load_shp(grid_uns, hn_ranges) df_grid_uns.loc[:,'CITY'] = city_name df_grid_uns.loc[:,'STATE'] = state_abbr df_grid_uns.loc[:,'FULLNAME'] = df_grid_uns.apply(lambda x: longest_name_dict[x['grid_id']], axis=1) #Blank out the big/small numbers now that aggregation is done # Function to blank out big/small numbers def replace_nums(x): if x == "999999" or x == "-1": return ' ' else: return x for field in hn_ranges: df_grid_uns[field] = df_grid_uns[field].astype(str) df_grid_uns[field] = df_grid_uns.apply(lambda x: replace_nums(x[field]), axis=1) save_shp(df_grid_uns, grid_uns) #Add a unique, static identifier (so ranges can be changed later) arcpy.DeleteField_management(grid_uns, "grid_id") expression="!FID! + 1" arcpy.AddField_management(grid_uns, "grid_id", "LONG", 10, "", "","", "", "") arcpy.CalculateField_management(grid_uns, "grid_id", expression, "PYTHON_9.3") #Fix duplicate address ranges t = fix_dup_address_ranges(grid_uns,hn_ranges) print(t) return problem_segments
def glacier_debris(band_4, band_5, glacier_outline, out_dir): print 'Running glacier_debris' if Want_CloudRemoval == 'True': outExtractByMask = ExtractByMask( band_4, mask_dir + '\\' + band_4.split('\\')[-1].split('_b')[0][0:16] + band_4.split('\\')[-1].split('_b')[0][17:21] + 'mask.shp') outExtractByMask.save('del_nodatagone4.TIF') outExtractByMask = ExtractByMask( band_5, mask_dir + '\\' + band_4.split('\\')[-1].split('_b')[0][0:16] + band_4.split('\\')[-1].split('_b')[0][17:21] + 'mask.shp') outExtractByMask.save('del_nodatagone5.TIF') outExtractByMask = ExtractByMask('del_nodatagone4.TIF', glacier_outline) outExtractByMask.save('del_mask4.TIF') outExtractByMask = ExtractByMask('del_nodatagone5.TIF', glacier_outline) outExtractByMask.save('del_mask5.TIF') print 'extract' else: outExtractByMask = ExtractByMask(band_4, glacier_outline) outExtractByMask.save('del_mask4.TIF') outExtractByMask = ExtractByMask(band_5, glacier_outline) outExtractByMask.save('del_mask5.TIF') print 'extract' #Convert Raster to float for decimal threshold values arcpy.RasterToFloat_conversion('del_mask4.TIF', 'del_band_4a.flt') arcpy.RasterToFloat_conversion('del_mask5.TIF', 'del_band_5a.flt') arcpy.Divide_3d('del_band_4a.flt', 'del_band_5a.flt', 'del_division.TIF') print 'division' outSetNull = SetNull('del_division.TIF', 'del_division.TIF', 'VALUE > ' + str(threshold)) #path to results folder, for loops add a counter if images are from the same year and day result_name = glacier_outline.split('.shp')[0].split( '\\' )[-1] + '_' + band_4.split('\\')[-1][9:13] + 'y' + band_4.split( '\\')[-1][13:16] + 'd' + '_L' + band_4.split( '\\')[-1][2:3] + '_' + Lband.split('_')[-1][1:2] + Hband.split( '_')[-1][1:2] + 'b' + str(int( threshold * 100)) + 't' + str(A_remove) + 'r' + str(A_fill) + 'f' result_path = out_dir + glacier_outline.split('.shp')[0].split( '\\' )[-1] + '_' + band_4.split('\\')[-1][9:13] + 'y' + band_4.split( '\\')[-1][13:16] + 'd' + '_L' + band_4.split( '\\')[-1][2:3] + '_' + Lband.split('_')[-1][1:2] + Hband.split( '_')[-1][1:2] + 'b' + str(int( threshold * 100)) + 't' + str(A_remove) + 'r' + str(A_fill) + 'f' if str(result_name + '1.shp' in os.listdir(out_dir)) == 'True': result_path = result_path + '2' elif str(result_name + '2.shp' in os.listdir(out_dir)) == 'True': result_path = result_path + '3' elif str(result_name + '3.shp' in os.listdir(out_dir)) == 'True': result_path = result_path + '4' elif str(result_name + '4.shp' in os.listdir(out_dir)) == 'True': result_path = result_path + '5' elif str(result_name + '5.shp' in os.listdir(out_dir)) == 'True': result_path = result_path + '6' else: result_path = result_path + '1' result_file = result_path + '.TIF' print 'result file: ' + result_file outSetNull.save(result_file) print 'Level 1 product produced' #Float raster to integer outInt = Int(result_file) outInt.save('del_result_file_int.TIF') # Set local variables inRaster = 'del_result_file_int.TIF' outPolygons = 'del_debris.shp' field = 'VALUE' arcpy.RasterToPolygon_conversion(inRaster, outPolygons, 'NO_SIMPLIFY', field) print 'to polygon' #Process: Dissolve. need to create "value" row where all elements=0 arcpy.AddField_management('del_debris.shp', 'value', 'SHORT', 1, '', '', '', '', '') arcpy.Dissolve_management('del_debris.shp', 'del_debris_dissolve.shp', 'value') print 'dissolve' # Run the tool to create a new fc with only singlepart features arcpy.MultipartToSinglepart_management('del_debris_dissolve.shp', 'del_explode.shp') print 'explode' # Process: Calculate polygon area (km2) arcpy.CalculateAreas_stats('del_explode.shp', 'del_area.shp') arcpy.MakeFeatureLayer_management('del_area.shp', 'tempLayer') # Execute SelectLayerByAttribute to determine which features to delete expression = 'F_AREA <=' + str(A_remove) # m2 arcpy.SelectLayerByAttribute_management('tempLayer', 'NEW_SELECTION', expression) arcpy.DeleteFeatures_management('tempLayer') print 'Shapes with an area <= ' + str( A_remove) + ' m2 removed; ' + str( A_remove / 900) + ' pixles, if 30m pixels' #Delete polygons < xx m2 arcpy.Delete_management('tempLayer') print 'tempLayer deleted' result_file2 = result_path + '.shp' print 'Level 2 result file: ' + result_file2 #Process: aggrigate (distance=1 m minimum area=0 minimum hole size=xx m: ) CA.AggregatePolygons('del_area.shp', result_file2, 1, 0, A_fill, 'NON_ORTHOGONAL') print 'holes with an area <= ' + str( A_fill) + ' m2 filled/merged with debris polygon; ' + str( A_fill / 900) + ' pixles, if 30m pixels' rasterList = arcpy.ListRasters('*del*') for raster in rasterList: arcpy.Delete_management(raster) fcList = arcpy.ListFeatureClasses('*del*') for fc in fcList: arcpy.Delete_management(fc) print 'intermediate files deleted' print 'level 2 product produced'
while growthcount > 0: starttime = time.clock() count_var += 1 # Create dictionary that has object ids and the labels associated # This will be a reference for all the points with their associated labels TreeDictionary = {} MyCursor = arcpy.SearchCursor(growth) for Feature in MyCursor: TreeDictionary[Feature.getValue("OBJECTID")] = Feature.getValue( "TREE_ID") del Feature del MyCursor #Delete previous growth from the forest arcpy.DeleteFeatures_management(forest) # Run Near from the currently labeled points to the overall data arcpy.Near3D_3d(forest, growth, dist) # Label the new growth using the dictionary arcpy.SelectLayerByAttribute_management(forest, "NEW_SELECTION", ' "NEAR_FID"<>-1') with arcpy.da.UpdateCursor(forest, ['TREE_ID', 'NEAR_FID', 'Growth']) as cursor: for row in cursor: row[0] = TreeDictionary[row[1]] row[2] = count_var cursor.updateRow(row) del TreeDictionary
arcpy.DeleteField_management(centerPoints, 'ORIG_FID') #identity center points with inpolys testAndDelete(centerPoints2) arcpy.Identity_analysis(centerPoints, inPolys, centerPoints2, 'NO_FID') # delete points with MapUnit = '' ## first, make layer view addMsgAndPrint(" Deleting centerPoints2 MapUnit = '' ") sqlQuery = "{} = ''".format(arcpy.AddFieldDelimiters(centerPoints2, 'MapUnit')) testAndDelete('cP2Layer') arcpy.MakeFeatureLayer_management(centerPoints2, 'cP2Layer', sqlQuery) ## then delete features if numberOfRows('cP2Layer') > 0: arcpy.DeleteFeatures_management('cP2Layer') #adjust center point fields (delete extra, add any missing. Use NCGMP09_Definition as guide) ## get list of fields in centerPoints2 cp2Fields = fieldNameList(centerPoints2) ## add fields not in MUP as defined in Definitions fieldDefs = tableDict['MapUnitPolys'] for fDef in fieldDefs: if fDef[0] not in cp2Fields: addMsgAndPrint('field {} is missing'.format(fd)) try: if fDef[1] == 'String': arcpy.AddField_management(thisFC, fDef[0], transDict[fDef[1]], '#', '#', fDef[3], '#', transDict[fDef[2]]) else:
def delete_all(fc): arcpy.DeleteFeatures_management(fc)
arcpy.CalculateField_management('basin_other', 'basin_name', "'OT'", 'PYTHON_9.3') arcpy.Merge_management(['basins','basin_other'], 'basins_m') # setup for theissen polygons arcpy.Buffer_analysis('eez_basins', 'eez_basins_buf200km', '200 kilometers', dissolve_option='ALL') arcpy.env.extent = 'eez_basins_buf200km' arcpy.env.outputCoordinateSystem = sr_mol arcpy.CopyFeatures_management('basins_m', 'thie') arcpy.Densify_edit('thie', 'DISTANCE', '1 Kilometers') arcpy.FeatureVerticesToPoints_management('thie', 'thie_pts', 'ALL') # delete interior points arcpy.Dissolve_management('thie', 'thie_d') arcpy.MakeFeatureLayer_management('thie_pts', 'lyr_pts') arcpy.SelectLayerByLocation_management('lyr_pts', 'WITHIN_CLEMENTINI', 'thie_d') arcpy.DeleteFeatures_management('lyr_pts') # generate thiessen polygons arcpy.CreateThiessenPolygons_analysis('thie_pts', 'thie_polys', 'ALL') arcpy.env.outputCoordinateSystem = sr_gcs arcpy.Dissolve_management('thie_polys', 'thie_polys_d', ['basin_name']) arcpy.Erase_analysis('thie_polys_d', 'basins_m', 'thie_polys_d_e') arcpy.Merge_management(['thie_polys_d_e','basins_m'], 'thie_polys_d_e_m') arcpy.Dissolve_management('thie_polys_d_e_m', 'thie_polys_d_e_m_d', ['rgn_id','rgn_name']) # intersect expanded basins with eez's arcpy.Intersect_analysis(['sp_gcs','thie_polys_d_e_m_d'], 'sp_thie_m') arcpy.AddField_management('eez_basins_m', 'rgn_name', 'TEXT') arcpy.CalculateField_management('eez_basins_m', 'rgn_name', "'%s_%s' % (!eez_name!, !basin_name!)", 'PYTHON_9.3') arcpy.Dissolve_management('eez_basins_m', 'eez_basins', ['eez_name','basin_name','rgn_name'])
"FCode" = 46006 OR "FCode" = 33600 OR "FCode" = 33400 OR "FCode" = 33601""") arcpy.MakeFeatureLayer_management("preout", "poslakeorder") arcpy.SelectLayerByLocation_management("poslakeorder", "INTERSECT", "perennial", '', "NEW_SELECTION") arcpy.SelectLayerByLocation_management("poslakeorder", "INTERSECT", "perennial", '', "SWITCH_SELECTION") arcpy.SelectLayerByAttribute_management("poslakeorder", "SUBSET_SELECTION", """"Strahler" >= 0""") arcpy.CalculateField_management("poslakeorder", "Strahler", "-1", "PYTHON") arcpy.SelectLayerByAttribute_management("poslakeorder", "CLEAR_SELECTION") arcpy.CopyFeatures_management( "poslakeorder", os.path.join(outfolder, "LakeOrder_" + basename + ".shp")) lakeorder = os.path.join(outfolder, "LakeOrder_" + basename + ".shp") # Clear in memory workspace for root, dirs, files in arcpy.da.Walk(mem): for file in files: arcpy.DeleteFeatures_management(file) # Change field name from Strahler to LkOrder arcpy.AddField_management(lakeorder, "LkOrder", "SHORT") exp = "!Strahler!" arcpy.CalculateField_management(lakeorder, "LkOrder", exp, "PYTHON") deletefields = ['Strahler', 'Connection', 'Join_Count', 'TARGET_FID'] try: arcpy.DeleteField_management(lakeorder, deletefields) except: pass
addLayer = arcpy.mapping.Layer(outDipFC) arcpy.mapping.AddLayer(df, addLayer, "TOP") # Add layer to data frame # Make the Dip Lines to continue whit the profile elaboration outlines2 = [] cursor2 = arcpy.SearchCursor(strDataFC) for row2 in cursor2: Bzm = (row2.getValue("AparentDip")) Bz1 = 360 - Bzm Bz2 = Bzm I_D = (row2.getValue("I_D")) xi = (row2.getValue("Distance")) yi = (row2.getValue("POINT_Y")) start = arcpy.PointGeometry(arcpy.Point(xi, yi), sr) if I_D == "Izq": end = start.pointFromAngleAndDistance(Bz1, 500, "PLANAR") elif I_D == "Der": end = start.pointFromAngleAndDistance(Bz2, 500, "PLANAR") else: end = start outlines2.append( arcpy.Polyline(arcpy.Array([start.centroid, end.centroid]), sr)) auxLines = arcpy.CopyFeatures_management(outlines2, outAuxFC) addLayer = arcpy.mapping.Layer(outAuxFC) arcpy.mapping.AddLayer(df, addLayer, "TOP") # Add layer to data frame # Delete temporal data arcpy.DeleteFeatures_management('in_memory\Aux1')
import arcpy, os arcpy.SetSeverityLevel(1) # Test ExecuteError try: arcpy.GetCount_management("") except arcpy.ExecuteError: print(arcpy.GetMessages(2)) # Test ExecuteWarning try: arcpy.CreateFeatureclass_management(os.getcwd(), "TestExecuteShp.shp") arcpy.DeleteFeatures_management(os.getcwd() + os.sep + "TestExecuteShp.shp") except arcpy.ExecuteWarning: print(arcpy.GetMessages(1)) arcpy.Delete_management(os.getcwd() + os.sep + "TestExecuteShp.shp")
arcpy.Delete_management(AnnoScale) # Process: Import Coverage Annotation arcpy.ImportCoverageAnnotation_conversion( AnnoScaleCovLyr, AnnoScale, "1200", "CLASSES_FROM_LEVELS", "NO_MATCH", "NO_SYMBOL_REQUIRED", "STANDARD", "", "AUTO_CREATE", "AUTO_UPDATE") # Calc fields (function) CalcFields(AnnoScale) # Clean up template features AnnoScaleLyr = AnnoScale + "lyr" arcpy.MakeFeatureLayer_management(AnnoScale, AnnoScaleLyr, "TextString LIKE 'LEVEL%'", "", "") arcpy.DeleteFeatures_management(AnnoScaleLyr) print("Done with import- " + scale) logfile.write("Done with import- " + scale + '\n') # Do Taxlot Anno ------------------------------------------------------------------------------------------------- TaxlotAnnoCov = arcpy.mapping.ListLayers(mxd, "TaxLotAn.igds")[0] TaxlotAnnoFeature = "townedgeo.gdb\\TaxlotsFD\\TaxlotNumberAnno" # Delete arcpy.Delete_management(TaxlotAnnoFeature) # Process: Import Coverage Annotation arcpy.ImportCoverageAnnotation_conversion(TaxlotAnnoCov, TaxlotAnnoFeature, "1200", "CLASSES_FROM_LEVELS", "NO_MATCH", "NO_SYMBOL_REQUIRED",
def RunTest(): try: arcpy.AddMessage("Starting Test: Load Geonames File") toolbox = TestUtilities.toolbox arcpy.ImportToolbox(toolbox, "DefenseGeonames") arcpy.env.overwriteOutput = True featureClass = os.path.join(TestUtilities.inputGDB, "GeonamesTestPy") textfile = os.path.join(TestUtilities.geodatabasePath, "fr.txt") countryCodes = os.path.join(TestUtilities.inputGDB, "CountryCodes") adminCodes = os.path.join(TestUtilities.inputGDB, "AdminCodes") featureCodes = os.path.join(TestUtilities.inputGDB, "FeatureCodes") # Set environment settings print("Running from: " + str(TestUtilities.currentPath)) print("Geodatabase path: " + str(TestUtilities.geodatabasePath)) arcpy.env.overwriteOutput = True arcpy.AddMessage("Deleting features from GeonamesTestPy") arcpy.DeleteFeatures_management(featureClass) arcpy.AddMessage("Starting Load Geonames File tool...") ######################################################## # Execute the Model under test: arcpy.LoadGeonamesFile_DefenseGeonames(featureClass, textfile, countryCodes, adminCodes, featureCodes) ######################################################## # Check For Valid Input inputFeatureCount = int( arcpy.GetCount_management(featureClass).getOutput(0)) print("Input FeatureClass: " + str(featureClass)) print("Input Feature Count: " + str(inputFeatureCount)) print("Test Successful") except arcpy.ExecuteError: # Get the tool error messages msgs = arcpy.GetMessages() arcpy.AddError(msgs) # return a system error code sys.exit(-1) except Exception as e: # Get the traceback object tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] # Concatenate information together concerning the error into a message string pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n" # Return python error messages for use in script tool or Python Window arcpy.AddError(pymsg) arcpy.AddError(msgs) # return a system error code sys.exit(-1)
'4209', '4211', '4212', '4213', '4214', '4221', '4222', '4313', '4316', '4319', '4613', '4670')""") ap.CopyFeatures_management("lu_lyr", "lu_res") ap.Delete_management("lu_lyr") ap.MakeFeatureLayer_management("lu_res", "res_lyr") ap.MakeFeatureLayer_management( parcels, "parcel_lyr", """"StClsCode" IN ('A1', 'A2', 'B2', 'B3', 'B4')""") ap.SelectLayerByLocation_management( "res_lyr", "ARE_IDENTICAL_TO", "parcel_lyr") ap.DeleteFeatures_management("res_lyr") ap.Delete_management("res_lyr") ap.Delete_management("parcel_lyr") # The GetCodeCounts function uses summary statistics to calculate counts for # each different parcel code in each tract. Tracts are spatially joined to # parcels in order to assign the tract ID field to each parcel Weighted counts # are then calculated by multiplying each count by its respective parcel code weight. # All weighted counts for each tract are then summed for the total weighted count. def GetCodeCounts( parcel_fc, tract_fc, joined_parcels, code_field, tract_id, sum_field, code_list): """This function generates tract-level weighted code counts from Houston parcel data. Weights are determined by estimated housing size. The function takes 7 arguments:
"") arcpy.SelectLayerByAttribute_management( "HazMit_Parcels_Layer", "NEW_SELECTION", "\"HazMit_Parcels.URBANFIRE\" IS NULL") arcpy.CalculateField_management( "HazMit_Parcels_Layer", "URBANFIRE", "\"Not in Urban Fire Zone\"", "PYTHON", "") ###### + !"+Sort_Dissolve+".FID_Urban_Type! arcpy.RemoveJoin_management("HazMit_Parcels_Layer") del i # Process: Delete Features message(report, "Deleting Old Hazmit Information") arcpy.DeleteFeatures_management(York_Projects_HazMit_Parcel) # Process: Append message(report, "Appending new hazmit information into York Projects") arcpy.Append_management( Hazmit_Data, York_Projects_HazMit_Parcel, "NO_TEST", "\ PIDN \"PIDN\" true true false 13 Text 0 0 ,First,#," + Hazmit_Data + ",PIDN,-1,-1;\ PROPADR \"PROPADR\" true true false 8 Text 0 0 ,First,#," + Hazmit_Data + ",PROPADR,-1,-1;\ OWNER_FULL \"OWNER_FULL\" true true false 8 Text 0 0 ,First,#," + Hazmit_Data + ",OWNER_FULL,-1,-1;\ LUC \"LUC\" true true false 54 Text 0 0 ,First,#," + Hazmit_Data + ",LUC,-1,-1;\ FLOOD \"FLOOD\" true true false 81 Text 0 0 ,First,#," + Hazmit_Data + ",FLOOD,-1,-1;\
except Exception, e: arcpy.AddMessage('%s' % (e)) continue else: row[-3] = 0 row[-2] = 0 row[-1] = 0 cursor.updateRow(row) except Exception, e: arcpy.AddError('%s' % (e)) finally: del_files = ["in_memory\\clusters"] for fname in del_files: try: arcpy.DeleteFeatures_management(fname) except Exception: continue if __name__ == "__main__": ###Inputs### infc = arcpy.GetParameterAsText(0) infc2 = arcpy.GetParameterAsText(1) infc3 = arcpy.GetParameterAsText(2) main(infc, infc2, infc3)
#same different ranges with range 0, same range dir arcpy.SelectLayerByAttribute_management( BndUD, "NEW_SELECTION", "[MIN_RNG_NO]=0 AND [MAX_RNG_NO]>0 AND [MIN_RNG_Dir] = 0 AND [MAX_RNG_Dir]=1" ) arcpy.CalculateField_management(BndUD, "RANGE", '"R."&[MAX_RNG_NO]&" E"', "VB", "#") arcpy.SelectLayerByAttribute_management( BndUD, "NEW_SELECTION", "[MIN_RNG_NO]=0 AND [MAX_RNG_NO]>0 AND [MIN_RNG_Dir] = 0 AND [MAX_RNG_Dir]=2" ) arcpy.CalculateField_management(BndUD, "RANGE", '"R."&[MAX_RNG_NO]&" W"', "VB", "#") arcpy.SelectLayerByAttribute_management(BndUD, "NEW_SELECTION", BndIndx + "=0") arcpy.DeleteFeatures_management(BndUD) #if BndUD = "City": # arcpy.AddField_management(BndUD,"City","TEXT","120","#","#","#","NULLABLE","NON_REQUIRED","#") #else: # print ("going ahead") arcpy.AddField_management(BndUD, "CountyNum", "LONG", "#", "#", "#", "#", "NULLABLE", "NON_REQUIRED", "#") arcpy.AddField_management(BndUD, "City", "TEXT", "120", "#", "#", "#", "NULLABLE", "NON_REQUIRED", "#") arcpy.AddField_management(BndUD, "County", "TEXT", "120", "#", "#", "#", "NULLABLE", "NON_REQUIRED", "#") arcpy.AddField_management(BndUD, "District", "TEXT", "120", "#", "#", "#", "NULLABLE", "NON_REQUIRED", "#") arcpy.AddField_management(BndUD, "County_Abbr", "TEXT", "4", "#", "#", "#", "NULLABLE", "NON_REQUIRED", "#")
def Create_feature(list = []): arcpy.AddMessage('creating feature class for BBox: ') spatial_ref = arcpy.SpatialReference(6318) arcpy.CreateFeatureclass_management(workspace, 'BBox_temp', "POINT", infc, "DISABLED", "DISABLED", spatial_ref) # Add data to the new feature class FCFile = workspace + '\\BBox_temp' FCFile_prj = FCFile + "_Project" if not FCFile.endswith('.shp'): FCFile = FCFile + '.shp' if not FCFile_prj.endswith('.shp'): FCFile_prj = FCFile_prj + '.shp' deleFields_BBox = ['FEATURE_ID', 'STATE_ALPH', 'COUNTY_NAM', 'FEATURE_CL'] arcpy.DeleteField_management(FCFile, deleFields_BBox) # cursor = arcpy.da.InsertCursor(FCFile, newFields) for entry in list: with arcpy.da.InsertCursor(FCFile, ['FEATURE_NA', 'SHAPE@']) as cursor: # 'NAME': pointName,'X-MIN': xmin_bbox, 'Y-MIN': ymin_bbox, 'X-MAX': xmax_bbox, 'Y-MAX': ymax_bbox features_name = entry['NAME'] row = [entry['NAME'], entry['POINT']] print (row) cursor.insertRow(row) #arcpy.AddMessage(row) out_corsys = arcpy.SpatialReference(6318) #arcpy.AddMessage('AddXY....' #arcpy.AddXY_management(FCFile) arcpy.AddGeometryAttributes_management(FCFile,'POINT_X_Y_Z_M','KILOMETERS','#',out_corsys) arcpy.AddMessage('Converting to different spatial Reference....') arcpy.Project_management(FCFile, FCFile_prj, out_corsys) # take the xmin, ymax, xmax, ymin from the feature class bbox_create = [] with arcpy.da.SearchCursor(FCFile_prj, ['POINT_X', 'POINT_Y']) as cursor: for row in cursor: bbox_create.append(row[0]) bbox_create.append(row[1]) # swap the position of ymax and ymin to fit the API temp_swap = bbox_create[1] bbox_create[1] = bbox_create[3] bbox_create[3] = temp_swap prodExtents = "1 x 1 degree" datasets = "National Elevation Dataset (NED) 1/3 arc-second" arcpy.AddMessage('Requesting to get files from TNM....') TNM_API_Download(features_name,datasets, bbox_create, prodExtents) # we dont need the BBox.shp file for the next point. Delete it to save space arcpy.DeleteFeatures_management(FCFile) spat_ref_NAEC = arcpy.SpatialReference('North America Equidistant Conic') if len(merge_file) > 1: arcpy.AddMessage("Merge Raster...") arcpy.MosaicToNewRaster_management(merge_file, workspace, '\\MergedIMG_NAEC.tif', coordinate_system_for_the_raster=spat_ref_NAEC, pixel_type='32_BIT_FLOAT', number_of_bands=1, mosaic_method='LAST', mosaic_colormap_mode='FIRST') return (workspace + '\\MergedIMG_NAEC.tif') elif len(merge_file) == 0: print ('No Raster IMG found...') else: arcpy.MosaicToNewRaster_management(merge_file[0], workspace, '\\MergedIMG_NAEC.tif', coordinate_system_for_the_raster=spat_ref_NAEC, pixel_type='32_BIT_FLOAT', number_of_bands=1, mosaic_method='LAST', mosaic_colormap_mode='FIRST') return (workspace + '\\MergedIMG_NAEC.tif')
def ExtractRange(outRaster, outFilePath, file): inSQLClause = "VALUE > 0" try: # Execute ExtractByAttributes attExtract = arcpy.sa.ExtractByAttributes(outRaster, inSQLClause) print('87') sys.stdout.flush() # Save the output #attExtract.save("F:\\ree\\PM25T08.tif") rasfile = os.path.split(outRaster)[1] in_point_features = os.path.join(file, u"RasterToPoint_conversion.shp") out_feature_class = os.path.join(file, u"AggregatePoints_cartography.shp") out_SmoothPolygon_class = os.path.join(file, u"out_SmoothPolygon_class.shp") calculate_output = os.path.join(file, u"calculate_output.shp") try: arcpy.RasterToPoint_conversion(attExtract, in_point_features, "VALUE") except: pass try: arcpy.AggregatePoints_cartography(in_point_features, out_feature_class, 30) except: pass try: arcpy.SmoothPolygon_cartography(out_feature_class, out_SmoothPolygon_class, 'PAEK', 30) except: pass try: # Process: Calculate Areas... arcpy.CalculateAreas_stats(out_SmoothPolygon_class, calculate_output) except: # If an error occurred when running the tool, print out the error message. traceback.print_exc() try: arcpy.Delete_management(in_point_features) except: traceback.print_exc() try: arcpy.DeleteFeatures_management(out_SmoothPolygon_class) except: traceback.print_exc() try: arcpy.DeleteFeatures_management(out_feature_class) except: traceback.print_exc() try: arcpy.Delete_management(out_feature_class) except: traceback.print_exc() try: arcpy.Delete_management(out_SmoothPolygon_class) except: traceback.print_exc() except Exception as err: arcpy.AddMessage("ExtractByAttributes Failed") arcpy.AddMessage(err) traceback.print_exc() return
expression = hab_field + " NOT IN ('Garden', 'Amenity grassland') AND " + MakeField + " <> 'Manmade' AND " \ "(GreenSpace IS NULL OR GreenSpace = '') AND " + des_list_expression arcpy.SelectLayerByAttribute_management("sel_lyr", where_clause=expression) arcpy.CopyFeatures_management("sel_lyr", "Natural_features") arcpy.Delete_management("sel_lyr") if intersect_access: print( " Erasing and deleting existing greenspace from access layer, to reduce slivers" ) arcpy.MakeFeatureLayer_management("Public_access", "del_lyr") expression = "PADescription = 'country_park' OR PADescription = 'millennium_green' OR PADescription = 'doorstep_green'" arcpy.SelectLayerByAttribute_management("del_lyr", where_clause=expression) arcpy.DeleteFeatures_management("del_lyr") arcpy.Delete_management("del_lyr") arcpy.MakeFeatureLayer_management(base_map, "sel_lyr2") expression = "GreenSpace IS NOT NULL AND GreenSpace <> ''" arcpy.SelectLayerByAttribute_management("sel_lyr2", where_clause=expression) arcpy.Erase_analysis("Public_access", "sel_lyr2", "Public_access_erase", cluster_tolerance="0.001 Meters") print(" Deleting slivers") arcpy.MultipartToSinglepart_management("Public_access_erase", "Public_access_erase_sp") MyFunctions.delete_by_size("Public_access_erase_sp", 20)
arcpy.MakeFeatureLayer_management("New_snap_union_sp_delid", "Elim_layer") arcpy.SelectLayerByAttribute_management( "Elim_layer", where_clause="Shape_Area < " + str(sliver_size)) arcpy.Eliminate_management("Elim_layer", "New_snap_union_sp_delid_elim") arcpy.Delete_management("Elim_layer") print(" Deleting remaining standalone slivers") arcpy.CopyFeatures_management("New_snap_union_sp_delid_elim", "New_snap_union_sp_delid_elim_del") arcpy.MakeFeatureLayer_management( "New_snap_union_sp_delid_elim_del", "Del_layer") arcpy.SelectLayerByAttribute_management( "Del_layer", where_clause="Shape_Area < " + str(sliver_size)) arcpy.DeleteFeatures_management("Del_layer") arcpy.Delete_management("Del_layer") arcpy.CopyFeatures_management("New_snap_union_sp_delid_elim_del", "New_snap_clean") MyFunctions.check_and_repair("New_snap_clean") # Deciding which polygons to split, to incorporate new feature boundaries # ----------------------------------------------------------------------- if tabulate_intersections == True: # Save ObjectID to separate field as this will be used later (also area, just for info). Check first to see if new fields already added. print " ## Tabulating intersections" print(" Saving new feature Index IDs and areas") MyFunctions.check_and_add_field("New_snap_clean", new_ID, "LONG", 0) arcpy.CalculateField_management("New_snap_clean", new_ID,
def Class_Holes(SymDiff, IBS, INNEN): SymDiff_N3 = tmp("A_{}_Holes.shp".format(INNEN)) UGBDiss = mem('UGBDiss') UGB_holes = mem('UGBHoles') IBS_Diss = mem('IBS_Diss') IBS_holes = mem('IBSHoles') # identify all holes in UGB arcpy.management.Dissolve(UGB, UGBDiss, None, None, "SINGLE_PART") UGB_FL = arcpy.MakeFeatureLayer_management(UGBDiss) arcpy.management.FeatureToLine(UGBDiss, mem("UGBLine"), None, "ATTRIBUTES") arcpy.management.FeatureToPolygon(mem("UGBLine"), mem("UGBLinePoly"), None, "ATTRIBUTES", None) UGBLinePoly_FL = arcpy.MakeFeatureLayer_management(mem("UGBLinePoly")) Sel = arcpy.management.SelectLayerByLocation(UGBLinePoly_FL, "ARE_IDENTICAL_TO", UGB_FL, None, "NEW_SELECTION", "INVERT") arcpy.CopyFeatures_management(Sel, UGB_holes) # identify all holes in IBS arcpy.management.Dissolve(IBS, IBS_Diss, None, None, "SINGLE_PART") IBS_FL = arcpy.MakeFeatureLayer_management(IBS_Diss) arcpy.management.FeatureToLine(IBS_Diss, mem("IBSLine"), None, "ATTRIBUTES") arcpy.management.FeatureToPolygon(mem("IBSLine"), mem("IBSLinePoly"), None, "ATTRIBUTES", None) IBSLinePoly_FL = arcpy.MakeFeatureLayer_management(mem("IBSLinePoly")) Sel = arcpy.management.SelectLayerByLocation(IBSLinePoly_FL, "ARE_IDENTICAL_TO", IBS_FL, None, "NEW_SELECTION", "INVERT") arcpy.CopyFeatures_management(Sel, IBS_holes) SymDiff_FL = arcpy.MakeFeatureLayer_management(SymDiff) # case select if INNEN == 'POS': IBS_holes_FL = arcpy.MakeFeatureLayer_management(IBS_holes) Sel = arcpy.management.SelectLayerByLocation(SymDiff_FL, "ARE_IDENTICAL_TO", IBS_holes_FL, None, "NEW_SELECTION", "NOT_INVERT") arcpy.CopyFeatures_management(Sel, mem("SymDiffHoles")) SymDiffHoles_FL = arcpy.MakeFeatureLayer_management( mem("SymDiffHoles")) Sel = arcpy.management.SelectLayerByAttribute( SymDiffHoles_FL, "NEW_SELECTION", "OVERLAP < {} AND INNEN = '{}'".format(LBC, INNEN)) if INNEN == 'NEG': UGBHoles_FL = arcpy.MakeFeatureLayer_management(UGB_holes) Sel = arcpy.management.SelectLayerByLocation(SymDiff_FL, "ARE_IDENTICAL_TO", UGBHoles_FL, None, "NEW_SELECTION", "NOT_INVERT") arcpy.CopyFeatures_management(Sel, mem("SymDiffHoles")) SymDiffHoles_FL = arcpy.MakeFeatureLayer_management( mem("SymDiffHoles")) Sel = arcpy.management.SelectLayerByAttribute( SymDiffHoles_FL, "NEW_SELECTION", "OVERLAP < {} AND INNEN = '{}'".format(LBC, INNEN)) # create output files arcpy.CopyFeatures_management(Sel, SymDiff_N3) Shp_Area(SymDiff_N3) SymDiff_N3_FL = arcpy.MakeFeatureLayer_management(SymDiff_N3) arcpy.management.SelectLayerByLocation(SymDiff_FL, "ARE_IDENTICAL_TO", SymDiff_N3_FL, None, "NEW_SELECTION", "NOT_INVERT") arcpy.DeleteFeatures_management(SymDiff_FL) return SymDiff_FL, SymDiff_N3
def getSinuosity(shape): ## This functions calculates the sinuosity of a polyline. ## Needs as an input a polyline shapefile. ## And a list of the years that the shapefiles are refering to. ## Also divides the line into section in order to calculate the sinuosity per section. ############################################# # Catching possible Errors - Error handling. ############################################# # Catch the error of using an empty shapefile (i.e. with no features in it) f_count = arcpy.GetCount_management(shape) if int(f_count[0]) > 0: arcpy.AddMessage("The input {0} has {1} features".format( shape.split("\\")[-1], f_count)) else: arcpy.AddError( 'The input {} has no features the execution of the script will fail ... Please check the input shapefiles ...' .format(shape.split("\\")[-1])) sys.exit(0) # Catch the error of having an unknown spatial reference for the input data. spatial_ref = arcpy.Describe(shape).spatialReference if spatial_ref.name != "Unknown": arcpy.AddMessage("The spatial reference of {0} is {1}".format( shape.split("\\")[-1], spatial_ref.name)) else: arcpy.AddError( "Beware ... the used input {0} has Unknown spatial reference ... Please check the Spatial Reference of the input shapefiles ... The execution of the script will be terminated soon ..." .format(shape)) sys.exit(0) # Catch the geometry Type error (of the input shapefiles not being polyline) desc = arcpy.Describe(shape) geometryType = desc.shapeType if str(geometryType) == 'Polyline': pass else: arcpy.AddError( '{} is not a line/polyline ... Please check the input shapefiles ...' .format(shape.split("\\")[-1])) sys.exit(0) ##################### # Calculate Sinuosity ##################### arcpy.AddMessage( "### Calculating sinuosity index for the whole river ###") for year in year_list: # Go through all the different Years the user enter as input (stored in a list). if year in shape: # If the Year input connects to a shapefile input (i.e. the user did not put wrong Year). try: if int( f_count[0] ) > 1: # If the input consits of multiple features dissolve it to 1. arcpy.AddMessage( "{0} has {1} features and it will be dissolved into 1 feature ..." .format(shape.split("\\")[-1], f_count)) shape_dissolve = r'river_dissolved.shp' # Name of the shapefile for the dissolved river arcpy.Dissolve_management( shape, shape_dissolve) # Perform dissolve shape = shape_dissolve # From now on the dissolved shape is going to be the variable shape. arcpy.AddMessage("Adding Geometry field ...") arcpy.AddGeometryAttributes_management( shape, "LENGTH", "METERS" ) # Add a Geometry field to calculate the length of each feature. arcpy.AddMessage("Adding field ...") arcpy.AddField_management( shape, 'TOT_LENGTH', 'DOUBLE' ) # Add another field "TOT_LENGTH" to copy the values ofthe length field - fixing field names to avoid confusions. arcpy.AddMessage("Calculating field ...") arcpy.CalculateField_management( shape, "TOT_LENGTH", "!LENGTH!", "PYTHON" ) # Actually copying the values of "LENGTH" to the new field added above. arcpy.AddMessage("Deleting field ...") arcpy.DeleteField_management( shape, "LENGTH" ) # Delete the geometry field that was just created. arcpy.AddMessage( "Calculating total length of the river ...") cursor = arcpy.da.SearchCursor( shape, ["TOT_LENGTH"] ) # Use a search cursor to go through the "TOT_LENGTH" of the input shapefile. length = 0 for row in cursor: # For all the individual features / lines in a polyline. length += row[0] arcpy.AddMessage( "Extracting the ending point of the river ...") river_end_shp = r'end_' + str( year ) + '.shp' # Variable for the shapefile of the end point of the polyline. arcpy.AddMessage( "Extracting the starting point of the river ...") river_start_shp = r'start_' + str( year ) + '.shp' # Variable for the shapefile of the start point of the polyline. arcpy.AddMessage( "Feature Vertices to Points for the 'start' and 'end' vertices of the river ..." ) arcpy.FeatureVerticesToPoints_management( shape, river_end_shp, "end" ) # Convert the last-end vertex of the polyline (river) to point, output River_end arcpy.FeatureVerticesToPoints_management( shape, river_start_shp, "start" ) # Convert the first-start vertex of the polyline (river) to point, output River_start. arcpy.AddMessage( "Calculating straight distance between start and end vertices of the river ..." ) distance_table = r'distance' + str(year) + '.dbf' arcpy.PointDistance_analysis( river_end_shp, river_start_shp, distance_table, "" ) # Calculate the straight distance between start and end and save it to a table cursor = arcpy.da.SearchCursor( distance_table, "DISTANCE" ) # Use a search cursor to go through the distance collumn in the created distance table. d = 0 # Variable for straight distance - direct distance for rows in cursor: # For the different rows of the distance collumn in the distance_table d = rows[ 0] # Add the different rows (the distance will always in the first row though) arcpy.AddMessage( "The straight distance between the starting and ending point is now computed and stored in the {}" .format(distance_table)) if normalize_sin_bool == 'true': sinuosity = d / length # Defined as Length / d but reverse is used, Max possible sinuosity = 1 . else: sinuosity = length / d # Normalized sinuosity index as used by ESRI toolbox. except: arcpy.AddMessage(arcpy.GetMessages()) arcpy.AddMessage("Adding field ...") arcpy.AddField_management( shape, 'sinuosity', 'DOUBLE' ) # Add a field in the river shapefile to store the sinuosity value arcpy.AddMessage("Calculating field ...") arcpy.CalculateField_management( shape, 'sinuosity', sinuosity, 'VB' ) # Calculate the sinuosity field - actually store the value in the table of the shapefile. ############################### ## Sinuosity per Section Part. ############################### if river_section_bool == 'true': # This condition is satisfied if the user selected to also calculate the Sinuosity Index per section. arcpy.AddMessage( "### Calculating sinuosity index for different parts of the river ####" ) arcpy.AddMessage( "You have selected {0} sections ".format(sections) ) # Need to move in the IF for the section statement arcpy.AddMessage("Creating new shapefiles ...") points_along_shape_shp = r'points_along_shape_' + str( year ) + '.shp' # Variable for the shapefile of the points along the river line. river_section_shp = 'river_sections_year_' + str( year ) + '.shp' # Variable for the shapefile of the river divided into sections. arcpy.AddMessage( "Calculating the length of sections in % of total length ..." ) per = 100 / int( sections ) # Calculate the percentage of each section based on the Number of Sections that the user asked with his input. arcpy.AddMessage( "The percentage of the total length for each section is :{}" .format(per)) arcpy.AddMessage( "Generating points along the river line ...") arcpy.GeneratePointsAlongLines_management( shape, points_along_shape_shp, "PERCENTAGE", Percentage=per, Include_End_Points='NO_END_POINTS' ) # Generate points along the based on the above calculate percentage. ##Added to delete the last point of the points along lines. points_temp = 'points_along_shape' + str( year ) + 'filtered.shp' # Temporary shapefile used to delete the point the the edge of the line from the points along the line. arcpy.MakeFeatureLayer_management(points_along_shape_shp, points_temp) sel_exp = "\"FID\"=" + str( int(sections) - 1 ) # The last one will have FID the number of sections -1 arcpy.SelectLayerByAttribute_management( points_temp, "NEW_SELECTION", sel_exp) if int( arcpy.GetCount_management(points_temp)[0] ) > 0: # If there are any features satisfying this condition - Will be! arcpy.DeleteFeatures_management( points_temp) # Delete them. ## arcpy.AddMessage("Spliting line on points ...") arcpy.SplitLineAtPoint_management( shape, points_along_shape_shp, river_section_shp, "2000 Meters" ) # Splitting the line into sections by using the above generate points. arcpy.AddMessage("Adding Geometry field ...") arcpy.AddGeometryAttributes_management( river_section_shp, "LENGTH", "METERS") # Get the length of each section arcpy.AddMessage("Adding field ...") arcpy.AddField_management( river_section_shp, 'SEC_LENGTH', 'DOUBLE' ) # Store the length in a new field "SEC_LENGTH" to be more clear - avoid confusion. arcpy.AddMessage("Calculating field ...") arcpy.CalculateField_management(river_section_shp, "SEC_LENGTH", "!LENGTH!", "PYTHON") arcpy.AddMessage( "Deleting field ..." ) # Delete the "LENGTH" field in the same logic. arcpy.DeleteField_management(river_section_shp, "LENGTH") arcpy.AddMessage( "The calculation of the length of each section was successful, the values are stored in the field " "\"SEC_LENGTH\"" " ") river_section_shp_lvl2 = 'river_sections_year_' + str( year ) + 'lvl2' + '.shp' # Variable for the shapefile of the river sections that will be used to be sure that the script will delete all the sections # that are substantially 'small' because in such a case the sinuosity values of that sections will be missleading arcpy.CopyFeatures_management(river_section_shp, river_section_shp_lvl2) temp_sec_len_l = [ ] # Create an empty list that will store all the length values of the different sections. cursor = arcpy.da.SearchCursor( river_section_shp_lvl2, "SEC_LENGTH" ) # Use a search cursor to go through the section length field. for row in cursor: temp_sec_len_l.append( int(row[0]) ) # Populate/Append each value of the field to the list we just created. minimum_section_length = min( temp_sec_len_l) # Find the minimum length per section. mean_section_length = sum(temp_sec_len_l) / len( temp_sec_len_l ) # And find the average length per section. arcpy.AddMessage("Minimum section length :{}".format( minimum_section_length)) arcpy.AddMessage("Average section length :{}".format( mean_section_length)) arcpy.AddMessage( "Deleting the substantially small sections ...") temp = 'river_sections_year_' + str( year ) + 'lvl3' + '.shp' # Temporary shapefile used to delete the 'small' sections arcpy.MakeFeatureLayer_management(river_section_shp_lvl2, temp) delete_thres = 0.35 # Threshold of deletion (Small section) is defined as 0.35 of the average length of the sections exp_sec_len = "\"SEC_LENGTH\" <" + str( delete_thres * mean_section_length) arcpy.SelectLayerByAttribute_management( temp, "NEW_SELECTION", exp_sec_len ) # Select the features by attributes based on the above threshold/expression if int( arcpy.GetCount_management(temp)[0] ) > 0: # If there are any features satisfying this condition - arcpy.AddWarning( "{} of the generated sections were substantially smaller than the average section length, and they are being deleted ..." .format(int(arcpy.GetCount_management(temp)[0]))) arcpy.DeleteFeatures_management(temp) # Delete them ###### arcpy.AddMessage("Adding field ...") arcpy.AddField_management( river_section_shp_lvl2, "startx", "DOUBLE" ) # Field that will store the X coordinate of the starting point of each section. arcpy.AddMessage("Adding field ...") arcpy.AddField_management( river_section_shp_lvl2, "starty", "DOUBLE" ) # Field that will store the Y coordinate of the starting point of each section. arcpy.AddMessage("Adding field ...") arcpy.AddField_management( river_section_shp_lvl2, "endx", "DOUBLE" ) # Field that will store the X coordinate of the ending point of each section. arcpy.AddField_management( river_section_shp_lvl2, "endy", "DOUBLE" ) # Field that will store the Y coordinate of the ending point of each section. arcpy.AddMessage("Adding field ...") arcpy.AddField_management( river_section_shp_lvl2, 'dirdis', 'DOUBLE' ) # Field that will store the direct distance for each section of the river from starting to ending vertex. arcpy.AddMessage("Adding field ...") arcpy.AddField_management( river_section_shp_lvl2, "sec_sin", "DOUBLE" ) # Field that will store the sinuosity of EACH Section. #Expressions for the calculations of the new fields. # Create the expressions in order to populate the fields that were just created above. exp_start_X = "!Shape!.positionAlongLine(0.0,True).firstPoint.X" # expression for starting X exp_start_Y = "!Shape!.positionAlongLine(0.0,True).firstPoint.Y" # expression for starting Y exp_end_X = "!Shape!.positionAlongLine(1.0,True).firstPoint.X" # expression for ending X exp_end_Y = "!Shape!.positionAlongLine(1.0,True).firstPoint.Y" # expression for ending Y arcpy.AddMessage("Calculating field ...") # Finally arcpy.CalculateField_management( river_section_shp_lvl2, "startx", exp_start_X, "PYTHON" ) # Populate/Calculate the starting X-coordinate of each section. arcpy.AddMessage("Calculating field ...") arcpy.CalculateField_management( river_section_shp_lvl2, "starty", exp_start_Y, "PYTHON" ) # Populate/Calculate the starting X-coordinate of each section. arcpy.AddMessage("Calculating field ...") arcpy.CalculateField_management( river_section_shp_lvl2, "endx", exp_end_X, "PYTHON" ) # Populate/Calculate the starting X-coordinate of each section arcpy.AddMessage("Calculating field ...") arcpy.CalculateField_management( river_section_shp_lvl2, "endy", exp_end_Y, "PYTHON" ) # Populate/Calculate the starting X-coordinate of each section # Based on the above (Xstart-Xend,Ystart,Yend) and using dd_exp = "math.sqrt((!startx!-!endx!)**2+(!starty!-!endy!)**2)" # The pythagoreum we can now get straight distance. arcpy.AddMessage("Calculating field ...") arcpy.CalculateField_management( river_section_shp_lvl2, "dirdis", dd_exp, "PYTHON" ) # Populate the field based on the pythagoreum expression for each section. if normalize_sin_bool == 'true': sin_exp = "!dirdis!/!SEC_LENGTH!" # Defined as Length / d but reverse is used, Max possible sinuosity = 1 . else: # Expression for Sinuosity Formula (direct distance / Length). sin_exp = "!SEC_LENGTH!/!dirdis!" arcpy.AddMessage("Calculating field ...") arcpy.CalculateField_management( river_section_shp_lvl2, "sec_sin", sin_exp, "PYTHON" ) # Populate/Calculate the sections sinuosity field based on the sinuosity expression for each section. arcpy.AddMessage( "The calculation of the sinuosity per section was successful, the values are stored in a field named " "\"sec_sin\"" " ")
def main(): IntersFreature = mem('IntersFreature') IBSSP = mem('IBSSP') SymDiff_Rest = tmp('SymDiff_Rest.shp') SymDiff_Copy = tmp('SymDiff_Copy.shp') DelName(["Tmp"]) arcpy.CreateFolder_management(Workspace, "Tmp") Shp_Area(UGB) Shp_Area(IBS) SymDiff = preparation(UGB, IBS, InputHU) arcpy.CopyFeatures_management(SymDiff, SymDiff_Copy) arcpy.Intersect_analysis([UGB, IBS], IntersFreature) arcpy.management.MultipartToSinglepart(IBS, IBSSP) Shp_Area(IntersFreature) anz_IntersF, area_IntersF = stat(IntersFreature) anz_UGB, area_UGB = stat(UGB) anz_IBS, area_IBS = stat(IBSSP) SymDiff_POS, SymDiff_NEG = Divide_POS_NEG(SymDiff_Copy) anz_POS, area_POS = stat(SymDiff_POS) anz_NEG, area_NEG = stat(SymDiff_NEG) sum_area = area_POS + area_NEG + area_IntersF share_IntersF = round(area_IntersF / sum_area * 100, 1) row_format = "{:<20} {:>6} {:>6} {:>6}" data = [["Toatl area:", "", sum_area, ""], ["IBS Freq. Area:", anz_IBS, area_IBS, ""], ["UGB Freq. Area:", anz_UGB, area_UGB, ""], ["Slice Freq. Area:", anz_IntersF, area_IntersF, share_IntersF], ["", "", "", ""]] print(row_format.format("", "Frequ.", "Area", "Share")) for row in data: print(row_format.format(*row)) # delete patches < 250 m2 SymDiff_FL = arcpy.MakeFeatureLayer_management(SymDiff) arcpy.management.SelectLayerByAttribute(SymDiff_FL, "NEW_SELECTION", "Shape_Area < 250") arcpy.DeleteFeatures_management(SymDiff_FL) SymDiff, SymDiff_P1 = Class_IndCom(SymDiff, Nutzungen, GOT, 'POS') SymDiff, SymDiff_N_IndCom = Class_IndCom(SymDiff, Nutzungen, GOT, 'NEG') SymDiff, SymDiff_N1 = Class_Resid(SymDiff, Nutzungen, GOT, 'NEG') SymDiff, SymDiff_N3 = Class_Holes(SymDiff, IBS, 'NEG') SymDiff, SymDiff_N7 = Class_SettBody(SymDiff, IBS, UGB, 'NEG') SymDiff, SymDiff_P2 = Class_Resid(SymDiff, Nutzungen, GOT, 'POS') SymDiff, SymDiff_P_holes = Class_Holes(SymDiff, IBS, 'POS') SymDiff, SymDiff_P_SettBody = Class_SettBody(SymDiff, IBS, UGB, 'POS') SymDiff, SymDiff_P_LargeEmpty = Class_LargeEmptyAreas(SymDiff, 'POS') SymDiff, SymDiff_P4 = Class_BdgEdge(SymDiff, 'POS') SymDiff, SymDiff_P5 = Class_EmptyAreas(SymDiff, 'POS') SymDiff, SymDiff_P6 = Class_LowDensBdgGrp(SymDiff, GOT, 'POS') SymDiff, SymDiff_N5 = Class_LargeEmptyAreas(SymDiff, 'NEG') SymDiff, SymDiff_N8 = Class_BdgEdge(SymDiff, 'NEG') SymDiff, SymDiff_N4 = Class_EmptyAreas(SymDiff, 'NEG') SymDiff, SymDiff_N2 = Class_LowDensBdgGrp(SymDiff, GOT, 'NEG') arcpy.CopyFeatures_management(SymDiff, SymDiff_Rest) result = arcpy.GetCount_management(SymDiff_Rest) Anz = int(result.getOutput(0)) SymDiff_POS, SymDiff_NEG = Divide_POS_NEG(SymDiff_Copy) anz_POS, area_POS = stat(SymDiff_POS) anz_P1, area_P1 = stat(SymDiff_P1) anz_P2, area_P2 = stat(SymDiff_P2) anz_P4, area_P4 = stat(SymDiff_P4) anz_P5, area_P5 = stat(SymDiff_P5) anz_P6, area_P6 = stat(SymDiff_P6) anz_P_holes, area_P_holes = stat(SymDiff_P_holes) anz_P_LargeEmpty, area_P_LargeEmpty = stat(SymDiff_P_LargeEmpty) anz_P_SettBody, area_P_SettBody = stat(SymDiff_P_SettBody) sum_anz_P = anz_P1 + anz_P2 + anz_P4 + anz_P5 + anz_P6 + anz_P_LargeEmpty + anz_P_SettBody + anz_P_holes sum_area_P = area_P1 + area_P2 + area_P4 + area_P5 + area_P6 + area_P_LargeEmpty + area_P_SettBody + area_P_holes anz_NEG, area_NEG = stat(SymDiff_NEG) anz_N_IndCom, area_N_IndCom = stat(SymDiff_N_IndCom) anz_N1, area_N1 = stat(SymDiff_N1) anz_N2, area_N2 = stat(SymDiff_N2) anz_N3, area_N3 = stat(SymDiff_N3) anz_N4, area_N4 = stat(SymDiff_N4) anz_N5, area_N5 = stat(SymDiff_N5) anz_N7, area_N7 = stat(SymDiff_N7) anz_N8, area_N8 = stat(SymDiff_N8) sum_anz_N = anz_N1 + anz_N2 + anz_N3 + anz_N4 + anz_N5 + anz_N7 + anz_N8 + anz_N_IndCom sum_area_N = area_N1 + area_N2 + area_N3 + area_N4 + area_N5 + area_N7 + area_N8 + area_N_IndCom share_P1 = round(area_P1 / sum_area * 100, 1) share_P2 = round(area_P2 / sum_area * 100, 1) share_P5 = round(area_P5 / sum_area * 100, 1) share_P4 = round(area_P4 / sum_area * 100, 1) share_P6 = round(area_P6 / sum_area * 100, 1) share_P_LargeEmpty = round(area_P_LargeEmpty / sum_area * 100, 1) share_P_SettBody = round(area_P_SettBody / sum_area * 100, 1) share_P_holes = round(area_P_holes / sum_area * 100, 1) share_N_IndCom = round(area_N_IndCom / sum_area * 100, 1) share_N1 = round(area_N1 / sum_area * 100, 1) share_N2 = round(area_N2 / sum_area * 100, 1) share_N3 = round(area_N3 / sum_area * 100, 1) share_N4 = round(area_N4 / sum_area * 100, 1) share_N5 = round(area_N5 / sum_area * 100, 1) share_N7 = round(area_N7 / sum_area * 100, 1) share_N8 = round(area_N8 / sum_area * 100, 1) sum_share_P = share_P1 + share_P2 + share_P5 + share_P4 + share_P6 + share_P_LargeEmpty + share_P_SettBody + share_P_holes sum_share_N = share_N1 + share_N2 + share_N3 + share_N4 + share_N5 + share_N7 + share_N8 + share_N_IndCom data = [["Class P_IndCom:", anz_P1, area_P1, share_P1], ["Class P_Resid:", anz_P2, area_P2, share_P2], ["Class P_BdgEdg:", anz_P4, area_P4, share_P4], ["Class P_LowDens:", anz_P6, area_P6, share_P6], [ "Class P_LargeEmpty:", anz_P_LargeEmpty, area_P_LargeEmpty, share_P_LargeEmpty ], ["Class P_EmptyArea:", anz_P5, area_P5, share_P5], [ "Class P_SettBody:", anz_P_SettBody, area_P_SettBody, share_P_SettBody ], ["Class P_Holes:", anz_P_holes, area_P_holes, share_P_holes], ["Sum:", sum_anz_P, sum_area_P, sum_share_P], ["", "", "", ""], ["Class N_IndCom:", anz_N_IndCom, area_N_IndCom, share_N_IndCom], ["Class N_Resid:", anz_N1, area_N1, share_N1], ["Class N_BdgGrp:", anz_N8, area_N8, share_N8], ["Class N_LowDens:", anz_N2, area_N2, share_N2], ["Class N_LargeEmpty:", anz_N5, area_N5, share_N5], ["Class N_EmptyAreas:", anz_N4, area_N4, share_N4], ["Class N_SettBoddy:", anz_N7, area_N7, share_N7], ["Class N_Holes:", anz_N3, area_N3, share_N3], ["Sum:", sum_anz_N, sum_area_N, sum_share_N], ["", "", "", ""], [ "Unclassifid:", anz_POS + anz_NEG - sum_anz_P - sum_anz_N, sum_area - sum_area_P - sum_area_N - area_IntersF, 100 - sum_share_N - sum_share_P - share_IntersF ]] print(row_format.format("Class", "Frequ.", "Area", "Share")) for row in data: print(row_format.format(*row)) if Anz > 0: print("There are " + str(Anz) + "features left!") DelName([SymDiff])
arcpy.AddField_management("SANDYBOTTOM_" + raster, "TYPE", "TEXT", 25, "", "", "", "NULLABLE", "REQUIRED") # Execute MakeFeatureLayer arcpy.MakeFeatureLayer_management("SANDYBOTTOM_" + raster, "SANDYLAYER_" + raster) expression = 'Shape_Area < 10000' # Execute SelectLayerByAttribute to determine which features to delete arcpy.SelectLayerByAttribute_management("SANDYLAYER_" + raster, "NEW_SELECTION", expression) # Execute GetCount and if some features have been selected, then # Execute DeleteFeatures to remove the selected features. if int(arcpy.GetCount_management("SANDYLAYER_" + raster).getOutput(0)) > 0: arcpy.DeleteFeatures_management("SANDYLAYER_" + raster) # Type = BOTTOM arcpy.SelectLayerByAttribute_management("SANDYLAYER_" + raster, "NEW_SELECTION", """ gridcode = 1 """) arcpy.CalculateField_management("SANDYLAYER_" + raster, "TYPE", '"' + "BOTTOM" + '"', "PYTHON") # Type = SAND arcpy.SelectLayerByAttribute_management("SANDYLAYER_" + raster, "NEW_SELECTION", """ gridcode = 2 """) arcpy.CalculateField_management("SANDYLAYER_" + raster, "TYPE", '"' + "SAND" + '"', "PYTHON") # Type = TURBIDITY arcpy.SelectLayerByAttribute_management("SANDYLAYER_" + raster,