def main(): desc = arcpy.Describe(f) noZvalue() #add joined fields try: arcpy.AddField_management(lines_f,"from_id","LONG") arcpy.AddField_management(lines_f,"from_xy","TEXT",250) arcpy.AddField_management(lines_f,"to_id","LONG") arcpy.AddField_management(lines_f,"to_xy","TEXT",250) arcpy.AddField_management(lines_f,"length","DOUBLE") arcpy.AddField_management(lines_f,"dub","TEXT",50) except: print("fields exists") with arcpy.da.InsertCursor(lines_f,["SHAPE@","from_id","from_xy","to_id","to_xy"]) as cursor: for i in feature_id: if lines[i]: for j in lines[i]: array = arcpy.Array([arcpy.Point(total_x[i], total_y[i]), arcpy.Point(total_x[j], total_y[j])]) cursor.insertRow([arcpy.Polyline(array),i+1,str(total_x[i])+", "+str(total_y[i]),j+1,str(total_x[j])+", "+str(total_y[j])]) #remove dublicated lines arcpy.CalculateField_management(lines_f,"length","!SHAPE_Length!","PYTHON_9.3") arcpy.CalculateField_management(lines_f,"dub",'str(!length!) +", " +str(abs( !from_id! - !to_id! ))',"PYTHON_9.3") arcpy.DeleteIdentical_management(lines_f, "dub")
def extract_hydro_points(drain, show, folder, gdb): gp = arcgisscripting.create() gp.CheckOutExtension("Spatial") gp.SetProgressor('default', 'starting vertex extraction...') arcpy.env.overwriteOutput = True arcpy.env.addOutputsToMap = show if not os.path.exists(os.path.join(folder, '{}.gdb'.format(gdb))): arcpy.CreateFileGDB_management(out_folder_path=folder, out_name='{}.gdb'.format(gdb)) gp.AddMessage('Processing Extract Vertex ...') arcpy.Intersect_analysis(in_features='{} #'.format(drain), out_feature_class=os.path.join( folder, 'temp', 'hydro_multi_points.shp'), join_attributes='ALL', cluster_tolerance='-1 Unknown', output_type='POINT') arcpy.AddXY_management( in_features=os.path.join(folder, 'temp', 'hydro_multi_points.shp')) arcpy.DeleteIdentical_management(in_dataset=os.path.join( folder, 'temp', 'hydro_multi_points.shp'), fields="POINT_X;POINT_Y", xy_tolerance="", z_tolerance="0") arcpy.MultipartToSinglepart_management( in_features=os.path.join(folder, 'temp', 'hydro_multi_points.shp'), out_feature_class=os.path.join(folder, '{}.gdb'.format(gdb), 'hydro_points')) gp.AddMessage('Finish')
def integrating_network(lines, tolerance="0 Meters"): overhead_lines = arcpy.FeatureClassToFeatureClass_conversion( lines, "Network", "Lines_over_p", where_clause="Line_Type = 'ВЛ'") cable_lines = arcpy.FeatureClassToFeatureClass_conversion( lines, "Network", "Lines_cable_p", where_clause="Line_Type = 'КЛ'") arcpy.Integrate_management(overhead_lines, tolerance) arcpy.Integrate_management(cable_lines, "0.1 Meters") lines = arcpy.Merge_management([overhead_lines, cable_lines], "Lines_merge") split = arcpy.SplitLine_management(lines, "SplitLine") find = arcpy.FindIdentical_management( split, "in_memory/Find_Ident", ["Shape", "Name", "Voltage"], xy_tolerance=tolerance, output_record_option="ONLY_DUPLICATES") joined_split = arcpy.JoinField_management(split, "OBJECTID", find, "IN_FID") arcpy.DeleteIdentical_management(joined_split, ["Shape", "Name", "Voltage"], "0.1 Meters") unsplit = arcpy.Dissolve_management( joined_split, "Unsplited_Lines", [ "Name", "Voltage", "Line_Type", "Start", "End", "Circuit", "Operate_Name", "Trace_Version", "Status" ], multi_part="MULTI_PART") return unsplit
def PrepStrNet(seg_net_pathA, home, h_name): process_namep = "process_{0}_".format(h_name) outNamep = ("process_fold_{0}".format(h_name)) #### create working gdb fgb_name = ("process_fold_{0}_scratch".format(h_name)) scratch = os.path.join(home, fgb_name) if os.path.isdir(scratch): shutil.rmtree(scratch) os.mkdir(scratch) print('making copy of input network') seg_net_path = os.path.join(scratch, "workingNet.shp") arcpy.CopyFeatures_management(seg_net_pathA, seg_net_path) print('Deleting Features with identical geometries') arcpy.DeleteIdentical_management(seg_net_path, ['Shape']) ################################################## print("set up out network path") out_networkp = os.path.join(home, "Output_{0}.shp".format(h_name)) print("let's get started") seg_network = gpd.read_file(seg_net_path, driver="ESRI Shapefile") project_crs = seg_network.crs # create sequential numbers for reaches seg_network['reach_no'] = np.arange(len(seg_network)) return seg_network, project_crs, process_namep, outNamep, out_networkp, scratch
def locateEventTable(gdb,inFC,pts,dem,sDistance,eventProperties,zType,isLines = False): desc = arcpy.Describe(pts) if not desc.hasZ: addMsgAndPrint(' adding Z values') arcpy.AddSurfaceInformation_3d (pts, dem, zType, 'LINEAR') ## working around bug in LocateFeaturesAlongRoutes # add special field for duplicate detection dupDetectField = 'xDupDetect' arcpy.AddField_management(pts,dupDetectField,'LONG') # and calc this field = OBJECTID OID = arcpy.Describe(pts).OIDFieldName expr = '"!'+OID+'!"' arcpy.CalculateField_management(pts,dupDetectField,expr,"PYTHON") # locate linePts along route addMsgAndPrint(' making event table') eventTable = gdb+'/evTb_'+inFC testAndDelete(eventTable) arcpy.LocateFeaturesAlongRoutes_lr(pts,ZMline,idField,sDistance,eventTable,eventProperties) nRows = numberOfRows(eventTable) nPts = numberOfRows(pts) if nRows > nPts and not isLines: # if LocateFeaturesAlongRoutes has made duplicates (A BUG!) addMsgAndPrint(' correcting for bug in LocateFeaturesAlongRoutes') addMsgAndPrint(' '+str(nRows)+' rows in event table') addMsgAndPrint(' removing duplicate entries in event table') arcpy.DeleteIdentical_management(eventTable, dupDetectField) addMsgAndPrint(' '+str(numberOfRows(eventTable))+' rows in event table') arcpy.DeleteField_management(eventTable,dupDetectField) return eventTable
def deleteIdentical(feature): """ 定义一个删除要素类中重复要素的函数, 基本思路: 利用arcgis的DeleteIdentical_management函数删除几何重复的要素,主要针对弯头,三通这样的要素 """ arcpy.DeleteIdentical_management(feature,"Shape")
def get_footprints(centroids, shapefile): os.makedirs(resources.temp_footprints, exist_ok=True) output_feature_class = os.path.join(resources.temp_footprints, "unsorted.shp") join_operation = "JOIN_ONE_TO_MANY" join_type = "KEEP_COMMON" match_option = "CONTAINS" arcpy.SpatialJoin_analysis(shapefile, centroids, output_feature_class, join_operation=join_operation, join_type=join_type, match_option=match_option) fields = [ field.name for field in arcpy.ListFields(output_feature_class) ] valids = [ "FID", "Shape", "Roof", "Score", "Perc_000", "Perc_010", "Perc_050", "Perc_090", "Perc_100" ] to_drop = [item for item in fields if item not in valids] arcpy.DeleteField_management(output_feature_class, to_drop) output = os.path.join(resources.temp_footprints, "footprints.shp") arcpy.Sort_management(output_feature_class, output, "Shape ASCENDING", "UL") arcpy.Delete_management(output_feature_class) arcpy.DeleteIdentical_management(output, "Shape", None, 0) return output
def main(): import arcpy import urllib2 import os import zipfile import glob # create output roads folder roads_folder = os.path.join(pf_path, 'Roads') if not os.path.exists(roads_folder): os.makedirs(roads_folder) # get list of all counties geoidList = [row[0] for row in arcpy.da.SearchCursor(counties, "geoid")] # downloads roads file for each county for geoid in geoidList: url_filename = "tl_2018_" + str(geoid) + "_roads.zip" download = "https://www2.census.gov/geo/tiger/TIGER2018/ROADS/" + url_filename request = urllib2.urlopen(download) output = open(roads_folder + '/' + url_filename, "wb") output.write(request.read()) output.close() # create zipped subfolder to temporarily hold zipped folders zip_folder = os.path.join(pf_path, 'Roads', 'zipped') if not os.path.exists(zip_folder): os.makedirs(zip_folder) # unzip all downloaded folders os.chdir( roads_folder) # change directory from working dir to dir with files extension = ".zip" for item in os.listdir(roads_folder): # loop through items in dir if item.endswith(extension): # check for ".zip" extension file_name = os.path.abspath(item) # get full path of files zip_ref = zipfile.ZipFile(file_name) # create zipfile object zip_ref.extractall(os.path.join(roads_folder, 'zipped')) # extract file to dir zip_ref.close() # close file os.remove(file_name) # delete zipped file # get list of all '*.shp' shapefiles in folder os.chdir(os.path.join(roads_folder, 'zipped')) shps = glob.glob('*.shp') shps_list = ";".join(shps) # merge into single shp file roads_merged = arcpy.Merge_management(shps_list, 'in_memory/roads_merged') # project to coordinate system and save output outCS = arcpy.SpatialReference(coord_sys) arcpy.Project_management(roads_merged, os.path.join(roads_folder, out_name), outCS) # delete any identical roads arcpy.DeleteIdentical_management(os.path.join(roads_folder, out_name), ["Shape"])
def get_nodes_from_links(links_shp): im_t = "in_memory/pp" arcpy.FeatureVerticesToPoints_management(links_shp, im_t, "BOTH_ENDS") arcpy.DeleteIdentical_management(im_t, ['Shape']) arcpy.AddField_management(im_t, "_ID_", "LONG") arcpy.CalculateField_management(im_t, "_ID_", '!FID!', "PYTHON") fieldnames = [x for x in [f.name for f in arcpy.ListFields(im_t)] if x not in ['FID', 'Shape', 'OID', "_ID_"]] arcpy.DeleteField_management(im_t, fieldnames) return im_t
def extract_multiple(network, data_network, temp, fields_to_keep, data_network_folder): # Clean up the dataset so that we only keep fields we want field_names = [f.name for f in arcpy.ListFields(network)] fields_to_delete = ["Join_Count", "TARGET_FID", "Join_Cou_1"] for field in fields_to_delete: if field in field_names: arcpy.DeleteField_management(network, field) fields_to_keep += fields_to_delete keep_fields(network, fields_to_keep) # Creates a shapefile with an entry for every data_network segment that overlaps a PIBO reach arcpy.SpatialJoin_analysis(network, data_network, temp, "JOIN_ONE_TO_MANY") joined = temp field_names = [f.name for f in arcpy.ListFields(joined)] data_list = [] # Turns the shapefile from before into a Python list. This is to minimize interaction with cursors. with arcpy.da.SearchCursor(joined, '*') as cursor: for row in cursor: to_add = [] for count, field in enumerate(field_names): to_add.append(row[count]) data_list.append(to_add) # Split the data list by PIBO reach, so that per reach calculations can be done. split_list = split_list_by_reach(data_list, field_names) # This is where all of the math happens. Each PIBO reach now has the necessary data_network data. input_list = combine_segments(split_list, field_names) # Add relevant extraction type fields arcpy.AddField_management(joined, "EXT_TYPE", "TEXT") arcpy.AddField_management(joined, "EXT_MATH", "TEXT") # Remove reaches that are repeats from the same join field_names = [f.name for f in arcpy.ListFields(joined)] if "TARGET_FID" in field_names: arcpy.DeleteIdentical_management(joined, "TARGET_FID") # Create a new points shapefile to save all of this data data_points = arcpy.CreateFeatureclass_management( data_network_folder, "Extracted_Points_Multiple.shp", "POINT", joined, spatial_reference=joined) # Put extracted data on these points with arcpy.da.InsertCursor(data_points, '*') as cursor: for input_reach in input_list: cursor.insertRow(input_reach) return data_points
def nhd_merge(gdb_list, example_feature_class_name, out_fc, selection = ''): arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(102039) # USA_Contiguous_Albers_Equal_Area_Conic_USGS_version arcpy.env.workspace = 'in_memory' gdb_list = [os.path.join(gdb, os.path.basename(example_feature_class_name)) for gdb in gdb_list] gdb0 = gdb_list.pop(0) desc = arcpy.Describe(gdb0) cu.multi_msg('Merging all features together...') arcpy.CopyFeatures_management(gdb0, 'temp_merged') cu.lengthen_field('temp_merged', 'Permanent_Identifier', 255) cu.merge_many(gdb_list, 'in_memory/the_rest_merged') arcpy.Append_management('in_memory/the_rest_merged', 'temp_merged', 'NO_TEST') # use in_memory explicitly here because i haven't figured out how to pass arcgis environments to my functions :( fc_temp = 'in_memory/temp_merged' fcount1 = int(arcpy.GetCount_management(fc_temp).getOutput(0)) cu.multi_msg('Before selection and cleaning, feature count is {0}'.format(fcount1)) if selection: cu.multi_msg('Selecting features...') arcpy.Select_analysis('temp_merged', 'in_memory/merged_select', selection) fc_temp = 'in_memory/merged_select' fcount2 = int(arcpy.GetCount_management(fc_temp).getOutput(0)) cu.multi_msg('After selection and before cleaning, feature count is {0}'.format(fcount2)) cu.multi_msg('Removing complete duplicates...') fc_temp_fields = [f.name for f in arcpy.ListFields(fc_temp) if f.type <> 'OID'] arcpy.DeleteIdentical_management(fc_temp, fields = [f.name for f in arcpy.ListFields(fc_temp) if f.type <> 'OID']) fcount3 = int(arcpy.GetCount_management(fc_temp).getOutput(0)) cu.multi_msg('After removing complete duplicates only, feature count is {0}'.format(fcount3)) cu.multi_msg('Removing remaining ID duplicates...') assumptions.remove_nhd_duplicates(fc_temp, 'Permanent_Identifier', 'in_memory/no_id_dupes') fcount4 = int(arcpy.GetCount_management(fc_temp).getOutput(0)) cu.multi_msg('After removing all ID duplicates, feature count is {0}'.format(fcount4)) if desc.shapeType == 'Polygon': cu.multi_msg('Removing geographic duplicates and substantially overlapping features...') assumptions.remove_geographic_doubles('in_memory/no_id_dupes', out_fc, 'Permanent_Identifier', percent_overlap_allowed = 10) cu.multi_msg('nhd_merge complete.') fcount5 = int(arcpy.GetCount_management(fc_temp).getOutput(0)) cu.multi_msg('Final feature count is {0}'.format(fcount5))
def cleanPts(): print "Running cleanPts" arcpy.env.workspace = pathGDB countyBound = r"apth\to\CntyBndry" muniBound = r"path\to\Muni" #remove any points that arent georeferenced arcpy.MakeFeatureLayer_management('thepointsTEMP', 'thepointsTEMP_lyr') arcpy.SelectLayerByLocation_management('thepointsTEMP_lyr', 'within', countyBound) arcpy.CopyFeatures_management('thepointsTEMP_lyr', 'thepointsTEMP2') arcpy.Delete_management('thepointsTEMP') #remove duplicate points arcpy.DeleteIdentical_management('thepointsTEMP2', ["CENTER_X", "CENTER_Y"]) #recalc muni field arcpy.SpatialJoin_analysis('thepointsTEMP2', muniBound, 'thepointsFINAL') arcpy.Delete_management('thepointsTEMP2') arcpy.DeleteField_management('thepointsFINAL', ["Join_Count", "TARGET_FID", "MUNI", "DIST", "ABBREVIATION"])
def delete_dangles(KVL_dissolve, input_points_p): points_subset = arcpy.FeatureClassToFeatureClass_conversion( input_points_p, "in_memory", "Points_Subset", "Point_Type IN ('ПС', 'ЭС', 'РУ')") points_layer = arcpy.MakeFeatureLayer_management(points_subset, "Points_Layer") arcpy.Integrate_management(KVL_dissolve) split2 = arcpy.SplitLine_management(KVL_dissolve, "SplitLine2") arcpy.DeleteIdentical_management(split2, ["SHAPE", "Name"]) unsplit2 = arcpy.Dissolve_management( split2, "Unsplited_Lines2", [ "Name", "Voltage", "Start", "End", "Circuit", "Operate_Name", "Trace_Version", "Status" ], multi_part="MULTI_PART") KVL_splitted = arcpy.SplitLineAtPoint_management(unsplit2, points_subset, "SplitAtPoint", search_radius="1 Meters") dangles_new = arcpy.FeatureVerticesToPoints_management( KVL_splitted, 'Dangles_KVL', 'DANGLE') dangles_layer = arcpy.MakeFeatureLayer_management(dangles_new, "Dangles_Layer") lines_layer = arcpy.MakeFeatureLayer_management(KVL_splitted, "Lines_Layer") arcpy.SelectLayerByLocation_management(dangles_layer, "INTERSECT", points_layer) arcpy.SelectLayerByAttribute_management(dangles_layer, "SWITCH_SELECTION") arcpy.SelectLayerByLocation_management(lines_layer, "INTERSECT", dangles_layer) arcpy.DeleteFeatures_management(lines_layer) KVL_dissolve_final = arcpy.Dissolve_management( lines_layer, "KVL_Dissolve", [ "Name", "Voltage", "Start", "End", "Circuit", "Operate_Name", "Status" ], multi_part="MULTI_PART") return KVL_dissolve_final
def create_lfp(self): arcpy.gp.FocalStatistics_sa(self.i.cat, self.i.l_rng, "Rectangle 1 1 CELL", "RANGE", "DATA") # distance from boundaries arcpy.gp.Con_sa(self.i.l_rng, self.i.cat, self.i.l_ild, "#", "Value = 0") arcpy.gp.IsNull_sa(self.i.l_ild, self.i.l_ild_n) arcpy.gp.ExtractByMask_sa(self.i.l_ild_n, self.i.cat, self.i.l_ild_n_c) arcpy.gp.Con_sa(self.i.l_ild_n_c, self.e.d, self.i.l_fnd, "#", "Value = 0") arcpy.gp.Fill_sa(self.i.l_fnd, self.i.l_fill, "") arcpy.gp.FlowDirection_sa(self.i.l_fill, self.i.l_fdr, "NORMAL", "") arcpy.gp.FlowLength_sa(self.i.l_fdr, self.i.l_fln, "DOWNSTREAM", "") self.process_l_lmx() arcpy.gp.EqualTo_sa(self.i.l_lmx, self.i.l_fln, self.i.l_lmx_fln) arcpy.gp.Con_sa(self.i.l_lmx_fln, self.i.l_ild, self.i.l_mxp_r, "#", "Value = 1") arcpy.RasterToPoint_conversion(in_raster=self.i.l_mxp_r, out_point_features=self.i.l_mxp_v, raster_field="Value") arcpy.CopyFeatures_management(in_features=self.i.l_mxp_v, out_feature_class=self.i.generate_temps( "v", "mxp")) arcpy.DeleteIdentical_management(in_dataset=self.i.l_mxp_v, fields="GRID_CODE", xy_tolerance="", z_tolerance="0") arcpy.gp.CostPath_sa(self.i.l_mxp_v, self.i.fdr, self.i.fdr, self.i.l_pth, "EACH_CELL", "GRID_CODE") # main function print "Please wait...", if sleep(90) == None: arcpy.gp.StreamToFeature_sa(self.i.l_pth, self.i.fdr, self.i.l_lfp, "NO_SIMPLIFY") arcpy.SpatialJoin_analysis(target_features=self.i.l_lfp, join_features=self.i.catchment, out_feature_class=self.i.l_spa, join_operation="JOIN_ONE_TO_ONE", join_type="KEEP_ALL", match_option="HAVE_THEIR_CENTER_IN")
def execute(self, params, messages): MarxanDB = params[0].valueAsText species_lyr = params[1].valueAsText elsubid1 = params[2].valueAsText species_csv = params[3].valueAsText elsubid2 = params[4].valueAsText sname = params[5].valueAsText arcpy.env.workspace = "in_memory" arcpy.AddMessage("copying spec_table") spec_table = arcpy.TableToTable_conversion(species_lyr, env.workspace, "spec_table") arcpy.AddMessage("delete identical") arcpy.DeleteIdentical_management(spec_table, elsubid1) arcpy.AddMessage("copying lu_spec") lu_spec = arcpy.TableToTable_conversion(species_csv, env.workspace, "lu_spec") arcpy.AddMessage("joining fields") arcpy.JoinField_management(spec_table, elsubid1, lu_spec, elsubid2, [sname, "prop", "spf"]) arcpy.AddMessage("altering elsubid1") arcpy.AlterField_management(spec_table, elsubid1, "id") arcpy.AddMessage("altering sname") arcpy.AlterField_management(spec_table, sname, "name") spec_dat = os.path.join(MarxanDB, "input", "spec.dat") fields = ["id", "name", "prop", "spf"] with open(spec_dat, "a+") as f: f.write('\t'.join(fields) + '\n') with arcpy.da.SearchCursor(spec_table, fields) as cursor: for row in cursor: f.write('\t'.join([str(r) for r in row]) + '\n') f.close() return
def onClick(self): """Construction du reseau (lignes et stations) et ajout au document""" rep_data = "D:\\ProgSIG\\data" gdb = os.path.join(rep_data + "TD_itinearaire.gdb") arcpy.env.workspace = gdb layer_stations = "Layer_stations_ligne_" fc_stations = "Stations_ligne_" fc_line = "Ligne_" # Construction des stations et lignes pour chacun des fichiers texte du répertoire txt_files = [f for f in os.listdir(rep_data) if f.endswith(".txt")] for txt_file in txt_files: num_line = txt_file[ 1:-4] # on supprime la première lettre et les 4 dernières arcpy.MakeXYEventLayer_management( txt_file, "x", "y", layer_stations + num_line, "PROJCS['RGF_1993_Lambert_93',GEOGCS['GCS_RGF_1993',DATUM['D_RGF_1993',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',700000.0],PARAMETER['False_Northing',6600000.0],PARAMETER['Central_Meridian',3.0],PARAMETER['Standard_Parallel_1',44.0],PARAMETER['Standard_Parallel_2',49.0],PARAMETER['Latitude_Of_Origin',46.5],UNIT['Meter',1.0]];-35597500 -23641900 10000;-100000 10000;-100000 10000;0,001;0,001;0,001;IsHighPrecision", "") arcpy.FeatureClassToFeatureClass_conversion( layer_stations + num_line, gdb, fc_stations_name + num_line, "", "x \"x\" true true false 8 Double 0 0 ,First,#,stations_layer,x,-1,-1;y \"y\" true true false 8 Double 0 0 ,First,#,stations_layer,y,-1,-1;nom \"nom\" true true false 8000 Text 0 0 ,First,#,stations_layer,nom,-1,-1", "") arcpy.PointsToLine_management(fc_stations_name + num_line, fc_line + num_line, "", "", "NO_CLOSE") # Fusion de toutes les stations dans une unique classe d'entités fcs_stations = [] fcs = arcpy.ListFeatureClasses() for fc in fcs: if fc_stations in fc: fcs_stations.append(fc) arcpy.Merge_management(fcs_stations, FC_ALL_STATIONS) arcpy.DeleteIdentical_management(FC_ALL_STATIONS, "Shape") # On l'ajoute au document add_layer(mxd, FL_ALL_STATIONS)
def get_old_node_connections_dict(links): arcpy.FeatureVerticesToPoints_management(links, temp1, "BOTH_ENDS") arcpy.CopyFeatures_management(temp1, temp2) arcpy.DeleteIdentical_management(temp2, ['Shape']) arcpy.SpatialJoin_analysis(temp1, temp2, temp3, "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "CLOSEST") linkfid_to_node_fid_list = [[ row.getValue("ORIG_FID"), row.getValue("ORIG_FID_1") ] for row in arcpy.SearchCursor(temp3)] # creating ID->ID dict a = {} # nodefid->(linkfid1, linkfid2, linkfid3, ...) for linkfid, nodefid in linkfid_to_node_fid_list: a.setdefault(nodefid, []).append(linkfid) b = {} # linkid->(nodeid, nodeid) for linkfid, nodefid in linkfid_to_node_fid_list: b.setdefault(linkfid, []).append(nodefid) link_to_link_dict = {} for linkid, [nodeid1, nodeid2] in b.iteritems( ): #if this line throws error, its because of the link has at least one multipart feature link_to_link_dict[linkid] = list( set(a[nodeid1] + a[nodeid2]) - set([linkid])) return link_to_link_dict
arcpy.SelectLayerByAttribute_management(bhLayer, "CLEAR_SELECTION") # locate boreholes points along the cross-section eventTable = outName + '_bhEvents' rProps = 'rkey POINT RouteM' arcpy.AddMessage('Locating ' + zBoreholes + ' on ' + zmLine) arcpy.LocateFeaturesAlongRoutes_lr(zBoreholes, zmLine, 'ORIG_FID', buff, eventTable, rProps, '#', 'DISTANCE') arcpy.AddMessage(' ' + eventTable + ' written to ' + arcpy.env.scratchWorkspace) #remove duplicate records that result from what appears to be #an unresolved bug in the Locate Features Along Routes tool #some points will get more than one record in the event table #and slightly different, sub-mapunit, mValues arcpy.DeleteIdentical_management(eventTable, bhIdField) # make the borehole lines to be used as routes bhLines = outName + '_bhLines' arcpy.AddMessage('Building lines in cross-section view from ' + eventTable) boreholeLines() arcpy.AddMessage(' ' + bhLines + ' written to ' + arcpy.env.scratchWorkspace) #if no intervals table was provided, stop here and deliver the zBoreholes as #the final feature class if intervalsTable == '': if append == 'true': arcpy.AddMessage('Appending features to ' + appendFC) #schemas do not have to match but no attributes will be copied over #unless the fields are in both layers.
def main(argv=None): """Iterate over LM, BM, and restoration tasks.""" if argv is None: argv = sys.argv # Get parameters from ArcGIS tool dialog start_time = time.clock() # USER SETTINGS ###################################################### # Restoration Settings # ALL input data must be in the same projection # Set to True to restore highest ROI. Set to False to restore strongest # barrier restore_max_roi = argv[1] # Resistance value of restored habitat. Must be 1 or greater. restored_resistance_val = argv[2] # No spaces or special chars in paths or gdb names restoration_data_gdb = argv[3] # No spaces in path, avoid using dropbox or network drive # Project directories will be created in this (iter1, iter2...) as will an # output geodatabase output_dir = argv[4] # Resistance raster. Should be in input GDB resistance_ras = argv[5] # Core area feature class. Should be in input GDB 'URWA_HCAs_Doug_Grant' core_fc = argv[6] core_fn = argv[7] # Core area field name radius = argv[8] # Restoration radius in meters iterations = argv[9] # Number of restorations to perform # If less than this proportion of ag in circle, don't consider restoring # circle min_ag_threshold = argv[10] # Don't consider barriers below this improvement score (average improvement # per meter diameter restored) min_improvement_val = argv[11] # Average per-m2 parcel cost per pixel. Snapped to resistance raster. parcel_cost_ras = argv[12] # Right now this is just a raster with all pixels set to 0.113174 restoration_cost_ras = argv[13] ag_ras = argv[14] # 1=Ag, 0=Not Ag # Some restorations benefit multiple corridors. # 'Maximum' takes the greatest improvement across core area pairs # 'Sum' adds improvement scores acreoss all pairs. barrier_combine_method = argv[15] # Use cwd_thresh = None for no threshold. Use cwd_thresh = X to not # consider restorations more than X map units away from each core area. cwd_thresh = argv[16] # END USER SETTINGS ###################################################### try: # Setup path and create directories gprint('Hey! Make sure everything is in the same projection!\n') gprint('Setting up paths and creating directories') sys.path.append('..\\toolbox\\scripts') res_ras = os.path.join(restoration_data_gdb, resistance_ras) core_fc_path = os.path.join(restoration_data_gdb, core_fc) # Set up a NEW output gdb (leave previous ones on drive) i = None for i in range(1, 200): output_gdb = 'restorationOutput' + str(i) + '.gdb' if not arcpy.Exists(os.path.join(output_dir, output_gdb)): break gprint('Previous output GDB ' + output_gdb + ' exists. ' 'Delete to save disk space.') arcpy.CreateFileGDB_management(output_dir, output_gdb) output_gdb = os.path.join(output_dir, output_gdb) log_file = os.path.join(output_gdb, 'Iterate Barriers' + str(i) + '.py') # Write a copy of this file to output dir as a record of settings shutil.copyfile(__file__, log_file) arcpy.env.cellSize = res_ras arcpy.env.extent = res_ras arcpy.env.snapRaster = res_ras arcpy.env.overwriteOutput = True arcpy.env.scratchWorkspace = output_gdb arcpy.env.workspace = output_gdb spatialref = arcpy.Describe(res_ras).spatialReference mapunits = spatialref.linearUnitName gprint('Cell size = ' + str(arcpy.env.cellSize) + ' ' + mapunits + 's') # Calculate fraction of ag within radius of each pixel gprint('Calculating purchase cost, fraction of ag, etc within radius ' 'of each pixel.') ag_ras = os.path.join(restoration_data_gdb, ag_ras) in_neighborhood = arcpy.sa.NbrCircle(radius, "MAP") arcpy.env.extent = ag_ras out_focal_stats = arcpy.sa.FocalStatistics(ag_ras, in_neighborhood, "MEAN", "NODATA") proportion_ag_ras = os.path.join(output_gdb, 'proportionAgRas') out_focal_stats.save(proportion_ag_ras) arcpy.env.extent = res_ras # Calculate purchase cost of circles parcel_cost_ras = os.path.join(restoration_data_gdb, parcel_cost_ras) arcpy.env.extent = parcel_cost_ras out_focal_stats = arcpy.sa.FocalStatistics(parcel_cost_ras, in_neighborhood, "MEAN", "DATA") cost_focal_stats_ras = os.path.join(output_gdb, 'cost_focal_stats_ras') out_focal_stats.save(cost_focal_stats_ras) arcpy.env.extent = res_ras circle_area = float(npy.pi * radius * radius) outras = arcpy.sa.Raster(cost_focal_stats_ras) * circle_area purch_cost_ras = os.path.join(output_gdb, 'purchaseCostRaster') outras.save(purch_cost_ras) lu.delete_data(cost_focal_stats_ras) restoration_cost_ras = os.path.join(restoration_data_gdb, restoration_cost_ras) outras = ( arcpy.sa.Raster(purch_cost_ras) + (arcpy.sa.Raster(restoration_cost_ras) * radius * radius * npy.pi)) total_cost_ras = os.path.join(output_gdb, 'totalCostRaster') outras.save(total_cost_ras) # Create mask to remove areas without cost data arcpy.env.extent = total_cost_ras cost_mask_ras = os.path.join(output_gdb, 'costMaskRaster') cost_thresh = 0 out_con = arcpy.sa.Con( (arcpy.sa.Raster(total_cost_ras) > float(cost_thresh)), 1) out_con.save(cost_mask_ras) arcpy.env.extent = res_ras # Create mask to remove areas below ag threshold out_con = arcpy.sa.Con( (arcpy.sa.Raster(proportion_ag_ras) > float(min_ag_threshold)), 1) ag_mask_ras = os.path.join(output_gdb, 'agMaskRaster') out_con.save(ag_mask_ras) do_step_1 = 'true' do_step_2 = 'true' do_step_5 = 'false' all_restored_areas_ras = '' for cur_iter in range(1, iterations + 1): start_time1 = time.clock() # Some env settings get changed by linkage mapper and must be # reset here arcpy.env.cellSize = res_ras arcpy.env.extent = res_ras arcpy.env.snapRaster = res_ras arcpy.env.scratchWorkspace = output_gdb arcpy.env.workspace = output_gdb lu.dashline(1) gprint('Running iteration number ' + str(cur_iter)) proj_dir = os.path.join(output_dir, 'iter' + str(cur_iter) + 'Proj') lu.create_dir(output_dir) lu.delete_dir(proj_dir) lu.create_dir(proj_dir) if cur_iter > 1: # Copy previous s2 linktable to new project dir datapass_dir = os.path.join(proj_dir, 'datapass') lu.create_dir(datapass_dir) proj_dir1 = os.path.join(output_dir, 'iter1Proj') datapass_dir_iter1 = os.path.join(proj_dir1, 'datapass') s2_link_tbl_iter1 = os.path.join(datapass_dir_iter1, 'linkTable_s2.csv') s2_link_tbl = os.path.join(datapass_dir, 'linkTable_s2.csv') shutil.copyfile(s2_link_tbl_iter1, s2_link_tbl) # Run Linkage Mapper # Copy distances text file from earlier LM run to the output # directory- speeds things up! dist_file = os.path.join(output_dir, core_fc + '_dists.txt') if not os.path.exists(dist_file): if cur_iter == 1: gprint('Will calculate distance file.') dist_file = '#' else: proj_dir1 = os.path.join(output_dir, 'iter1Proj') dist_file1 = os.path.join(proj_dir1, core_fc + '_dists.txt') # Put a copy here for future runs shutil.copyfile(dist_file1, dist_file) arcpy.env.scratchWorkspace = output_gdb arcpy.env.workspace = output_gdb argv = ('lm_master.py', proj_dir, core_fc_path, core_fn, res_ras, do_step_1, do_step_2, 'Cost-Weighted & Euclidean', dist_file, 'true', 'true', 'false', '4', 'Cost-Weighted', 'true', do_step_5, 'true', '200000', '10000', '#', '#', '#', '#') gprint('Running ' + str(argv)) lm_master.lm_master(argv) do_step_1 = 'false' # Can skip for future iterations do_step_2 = 'false' # Can skip for future iterations do_step_5 = 'false' # Skipping for future iterations start_radius = str(radius) end_radius = str(radius) radius_step = '0' save_radius_ras = 'false' write_pct_ras = 'false' argv = ('barrier_master.py', proj_dir, res_ras, start_radius, end_radius, radius_step, barrier_combine_method, save_radius_ras, write_pct_ras, cwd_thresh) gprint('Running ' + str(argv)) barrier_master.bar_master(argv) # Some env settings get changed by linkage mapper and must be # reset here arcpy.env.cellSize = res_ras arcpy.env.extent = res_ras arcpy.env.snapRaster = res_ras arcpy.env.scratchWorkspace = output_gdb arcpy.env.workspace = output_gdb gprint('Finding restoration circles with max barrier score / ROI') # Find points with max ROI prefix = os.path.basename(proj_dir) if barrier_combine_method == 'Sum': sum_suffix = 'Sum' else: sum_suffix = '' barrier_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad" + str(radius)) barrier_ras = os.path.join(proj_dir, 'output', 'barriers.gdb', barrier_fn) if not arcpy.Exists(barrier_ras): msg = ('Error: cannot find barrier output: ' + barrier_ras) lu.raise_error(msg) if cur_iter > 1: gprint('Creating mask for previously restored areas') in_neighborhood = arcpy.sa.NbrCircle(radius, "MAP") arcpy.env.extent = all_restored_areas_ras out_focal_stats = arcpy.sa.FocalStatistics( all_restored_areas_ras, in_neighborhood, "MEAN", "DATA") all_restored_focal_ras = os.path.join( output_gdb, 'allRestFocRas_iter' + str(cur_iter)) # Anything > 0 would include a restored area out_focal_stats.save(all_restored_focal_ras) arcpy.env.extent = res_ras rest_mask_ras = os.path.join( output_gdb, 'restMaskRaster_iter' + str(cur_iter)) minval = 0 out_con = arcpy.sa.Con( (arcpy.sa.Raster(all_restored_focal_ras) == float(minval)), 1) out_con.save(rest_mask_ras) # Candidate areas have not been restored, have cost data, meet # minimum improvement score criteria, and have enough ag in them candidate_barrier_ras = os.path.join( output_gdb, 'candidateBarrierRaster' + '_iter' + str(cur_iter)) if cur_iter > 1: gprint('Creating candidate restoration raster using barrier ' 'results, previous restorations, and selection ' 'criteria') # ROI scores will be in terms of total improvement # (= score * diameter) out_calc = (arcpy.sa.Raster(cost_mask_ras) * arcpy.sa.Raster(ag_mask_ras) * arcpy.sa.Raster(barrier_ras) * arcpy.sa.Raster(rest_mask_ras) * (radius * 2)) else: out_calc = (arcpy.sa.Raster(cost_mask_ras) * arcpy.sa.Raster(ag_mask_ras) * arcpy.sa.Raster(barrier_ras) * radius * 2) min_barrier_score = min_improvement_val * radius * 2 if restored_resistance_val != 1: out_calc_2 = (out_calc - (2 * radius * (restored_resistance_val - 1))) out_con = arcpy.sa.Con( (out_calc_2 >= float(min_barrier_score)), out_calc_2) else: out_con = arcpy.sa.Con((out_calc >= float(min_barrier_score)), out_calc) out_con.save(candidate_barrier_ras) lu.build_stats(candidate_barrier_ras) purchase_roi_ras = os.path.join( output_gdb, 'purchaseRoiRaster' + '_iter' + str(cur_iter)) out_calc = (arcpy.sa.Raster(candidate_barrier_ras) / arcpy.sa.Raster(purch_cost_ras)) out_calc.save(purchase_roi_ras) lu.build_stats(purchase_roi_ras) total_roi_ras = os.path.join( output_gdb, 'purchaseRestRoiRaster' + '_iter' + str(cur_iter)) out_calc = (arcpy.sa.Raster(candidate_barrier_ras) / arcpy.sa.Raster(total_cost_ras)) out_calc.save(total_roi_ras) lu.build_stats(total_roi_ras) max_barrier = float( arcpy.GetRasterProperties_management(candidate_barrier_ras, "MAXIMUM").getOutput(0)) gprint('Maximum barrier improvement score: ' + str(max_barrier)) if max_barrier < 0: arcpy.AddWarning("\nNo barriers found that meet CWD or Ag " "threshold criteria.") max_purch_roi = arcpy.GetRasterProperties_management( purchase_roi_ras, "MAXIMUM") gprint('Maximum purchase ROI score: ' + str(max_purch_roi.getOutput(0))) max_roi = arcpy.GetRasterProperties_management( total_roi_ras, "MAXIMUM") gprint('Maximum total ROI score: ' + str(max_roi.getOutput(0))) if restore_max_roi: out_point = os.path.join( output_gdb, 'maxRoiPoint' + '_iter' + str(cur_iter)) gprint('Choosing circle with maximum ROI to restore') out_con = arcpy.sa.Con( (arcpy.sa.Raster(total_roi_ras) >= float( max_roi.getOutput(0))), total_roi_ras) max_roi_ras = os.path.join(output_gdb, 'max_roi_ras') out_con.save(max_roi_ras) # Save max ROI to point try: arcpy.RasterToPoint_conversion(max_roi_ras, out_point) except Exception: msg = ('Error: it looks like there are no viable ' 'restoration candidates.') lu.raise_error(msg) else: # Restoring strongest barrier instead out_point = os.path.join( output_gdb, 'maxBarrierPoint' + '_iter' + str(cur_iter)) gprint('Choosing circle with maximum BARRIER IMPROVEMENT SCORE' ' to restore') out_con = arcpy.sa.Con( (arcpy.sa.Raster(candidate_barrier_ras) >= max_barrier), candidate_barrier_ras) max_barrier_ras = os.path.join(output_gdb, 'maxBarrierRaster') out_con.save(max_barrier_ras) # Save max barrier to point try: arcpy.RasterToPoint_conversion(max_barrier_ras, out_point) except Exception: msg = ('Error: it looks like there are no viable ' 'restoration candidates.') lu.raise_error(msg) gprint('Done evaluating candidate restorations') result = int(arcpy.GetCount_management(out_point).getOutput(0)) if result > 1: # Would be better to retain point with max barrier score when # we have multiple points with same ROI arcpy.AddWarning('Deleting points with identical ' 'ROI/improvement score values') arcpy.DeleteIdentical_management(out_point, "grid_code", 0.1, 0.1) arcpy.sa.ExtractMultiValuesToPoints( out_point, [[candidate_barrier_ras, "barrierScore"], [purch_cost_ras, "purchCost"], [total_cost_ras, "totalCost"], [purchase_roi_ras, "purchaseROI"], [total_roi_ras, "totalROI"]], "NONE") arcpy.AddField_management(out_point, "restorationNumber", "SHORT") arcpy.CalculateField_management(out_point, "restorationNumber", cur_iter, "PYTHON_9.3") arcpy.AddField_management(out_point, "radius", "DOUBLE") arcpy.CalculateField_management(out_point, "radius", radius, "PYTHON_9.3") arcpy.AddField_management(out_point, "barrierScore_per_m", "DOUBLE") arcpy.CalculateField_management( out_point, "barrierScore_per_m", "(float(!barrierScore!) / (!radius! * 2))", "PYTHON_9.3") gprint('\nCreating restoration circles') if restore_max_roi: circle_fc = os.path.join( output_gdb, 'maxRoiCircle' + '_iter' + str(cur_iter)) else: circle_fc = os.path.join( output_gdb, 'maxBarrierCircle' + '_iter' + str(cur_iter)) arcpy.Buffer_analysis(out_point, circle_fc, radius) gprint('Rasterizing restoration circles') if restore_max_roi: circle_ras = os.path.join( output_gdb, 'maxRoicircle_ras' + '_iter' + str(cur_iter)) else: circle_ras = os.path.join( output_gdb, 'maxBarrierCircleRas' + '_iter' + str(cur_iter)) arcpy.FeatureToRaster_conversion(circle_fc, 'totalROI', circle_ras, arcpy.env.cellSize) # restore raster gprint('Digitally restoring resistance raster') res_ras_restored = os.path.join( output_gdb, 'resRastRestored' + '_iter' + str(cur_iter)) out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), res_ras, restored_resistance_val) out_con.save(res_ras_restored) all_restored_areas_ras = os.path.join( output_gdb, 'allRestoredAreas_iter' + str(cur_iter)) prev_restored_areas_ras = os.path.join( output_gdb, 'allRestoredAreas_iter' + str(cur_iter - 1)) if cur_iter == 1: out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), 0, 1) else: # Add this restoration to areas restored out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), prev_restored_areas_ras, 1) out_con.save(all_restored_areas_ras) lu.delete_data(circle_ras) # Use for next iteration resistance raster res_ras = res_ras_restored # Add circle into feature class with all circles if restore_max_roi: all_circles_fc = os.path.join(output_gdb, "allCirclesMaxROI") else: all_circles_fc = os.path.join(output_gdb, "allCirclesMaxBarriers") if cur_iter == 1: arcpy.CopyFeatures_management(circle_fc, all_circles_fc) else: arcpy.Append_management(circle_fc, all_circles_fc, "TEST") gprint('Finished iteration #' + str(cur_iter)) start_time1 = lu.elapsed_time(start_time1) gprint('\nDone with iterations.') start_time = lu.elapsed_time(start_time) gprint('Outputs saved in: ' + output_gdb) gprint('Back up your project directories if you want to save ' 'corridor/barrier results.') # Return GEOPROCESSING specific errors except arcpy.ExecuteError: lu.dashline(1) gprint('****Iteration script failed. Details follow.****') lu.exit_with_geoproc_error(_SCRIPT_NAME) # Return any PYTHON or system specific errors except Exception: lu.dashline(1) gprint('****Iteration script failed. Details follow.****') lu.exit_with_python_error(_SCRIPT_NAME)
def generate_route_border_rule_table(workspace,route,route_id_field,boundary,boundary_id_field,buffer_size,route_border_rule_table,high_angle_threshold,offset): arcpy.AddMessage("Generating route border rule source table for {1}...".format(boundary)) try: date = datetime.now() date_string = date.strftime("%m/%d/%Y") spatial_reference = arcpy.Describe(route).spatialReference xy_resolution = "{0} {1}".format(spatial_reference.XYResolution,spatial_reference.linearUnitName) ############################################################################################################### # get all candidate border routes arcpy.AddMessage("Identifying candidate border routes...") # generate boundary border boundary_border = os.path.join(workspace,"{0}_{1}_border".format(boundary,"boundary")) arcpy.FeatureToLine_management(boundary, boundary_border) # dissolve polygon boundary based on boundary id boundary_border_dissolved = os.path.join(workspace,"{0}_boundary_border_dissolved".format(boundary)) arcpy.Dissolve_management(boundary_border,boundary_border_dissolved,[boundary_id_field]) # generate buffer around boundary # arcpy.AddMessage("generate buffer around boundary") boundary_border_buffer = os.path.join(workspace,"{0}_{1}".format(boundary,"boundary_buffer")) arcpy.Buffer_analysis(boundary_border_dissolved, boundary_border_buffer, buffer_size, "FULL", "ROUND") # get candidate border route # arcpy.AddMessage("get candidate border route") candidate_border_route_multipart = "in_memory\\candidate_{0}_border_route_multipart".format(boundary) candidate_border_route = os.path.join(workspace,"candidate_{0}_border_route".format(boundary)) arcpy.Clip_analysis(route, boundary_border_buffer, candidate_border_route_multipart) arcpy.MultipartToSinglepart_management(candidate_border_route_multipart, candidate_border_route) ################################################################################################################ ################################################################################################################ # filter out candidate border routes that 'intersects' boundary at high angles arcpy.AddMessage("Filtering out candidate border routes that 'intersects' boundary at high angles...") route_buffer = os.path.join(workspace,"{0}_{1}".format(route,"buffer_flat")) if not arcpy.Exists(route_buffer): arcpy.Buffer_analysis(route, route_buffer, buffer_size, "FULL", "FLAT") # clip boundary segments within route buffer boundary_border_within_buffer_multipart = "in_memory\\{0}_boundary_within_{1}_buffer_multipart".format(boundary,route) boundary_border_within_buffer = os.path.join(workspace,"{0}_boundary_within_{1}_buffer".format(boundary,route)) arcpy.Clip_analysis(boundary_border_dissolved, route_buffer, boundary_border_within_buffer_multipart) arcpy.MultipartToSinglepart_management(boundary_border_within_buffer_multipart, boundary_border_within_buffer) # Add 'SEGMENT_ID_ALL_CANDIDATES' field to candidate route and populate it with 'OBJECTID' arcpy.AddField_management(candidate_border_route,"SEGMENT_ID_ALL_CANDIDATES","LONG") arcpy.CalculateField_management(candidate_border_route, "SEGMENT_ID_ALL_CANDIDATES", "!OBJECTID!", "PYTHON") # Add 'ANGLE_ROUTE' field to candidate route and populate it with the angle to the true north(= 0 degree) arcpy.AddField_management(candidate_border_route,"ANGLE_ROUTE","DOUBLE") with arcpy.da.UpdateCursor(candidate_border_route,("SHAPE@","ANGLE_ROUTE")) as uCur: for row in uCur: shape = row[0] x_first = shape.firstPoint.X y_first = shape.firstPoint.Y x_last = shape.lastPoint.X y_last = shape.lastPoint.Y angle = calculate_angle(x_first,y_first,x_last,y_last) if angle >=0: row[1]=angle uCur.updateRow(row) # Add 'ANGLE_BOUNDARY' field to boundary segment within route buffer and populate it with the angle to the true north(= 0 degree) arcpy.AddField_management(boundary_border_within_buffer,"ANGLE_BOUNDARY","DOUBLE") with arcpy.da.UpdateCursor(boundary_border_within_buffer,("SHAPE@","ANGLE_BOUNDARY")) as uCur: for row in uCur: shape = row[0] x_first = shape.firstPoint.X y_first = shape.firstPoint.Y x_last = shape.lastPoint.X y_last = shape.lastPoint.Y angle = calculate_angle(x_first,y_first,x_last,y_last) if angle: row[1]=angle uCur.updateRow(row) del uCur # locate boundary segment within buffer along candidate border route. # assuming that if the boundary segment can't be located along its corresponding route, these two might have high angles. boundary_along_candidate_border_route = os.path.join(workspace,"{0}_boundary_along_candidate_{1}_border_route".format(boundary,boundary)) arcpy.LocateFeaturesAlongRoutes_lr(boundary_border_within_buffer,candidate_border_route,"SEGMENT_ID_ALL_CANDIDATES",buffer_size,\ boundary_along_candidate_border_route,"{0} {1} {2} {3}".format("RID","LINE","FMEAS","TMEAS")) arcpy.JoinField_management(boundary_along_candidate_border_route, "RID", candidate_border_route, "SEGMENT_ID_ALL_CANDIDATES", ["ANGLE_ROUTE"]) positive_candidate_border_route = [] with arcpy.da.SearchCursor(boundary_along_candidate_border_route,("RID","ANGLE_ROUTE","ANGLE_BOUNDARY")) as sCur: for row in sCur: sid = str(row[0]) angle_route = row[1] angle_boundary = row[2] if angle_route and angle_boundary: delta_angle = abs(angle_route-angle_boundary) # get real intersecting angle if delta_angle > 90 and delta_angle <= 270: delta_angle = abs(180 - delta_angle) elif delta_angle > 270: delta_angle = 360 - delta_angle else: pass # filter out negative candidate border route if delta_angle <= high_angle_threshold: if sid not in positive_candidate_border_route: positive_candidate_border_route.append(sid) del sCur candidate_border_route_lyr = "in_memory\\candidate_border_route_lyr" arcpy.MakeFeatureLayer_management(candidate_border_route, candidate_border_route_lyr) candidate_border_route_positive = os.path.join(workspace,"candidate_{0}_border_route_positive".format(boundary)) where_clause = "\"{0}\" IN ({1})".format("OBJECTID",",".join(positive_candidate_border_route)) arcpy.SelectLayerByAttribute_management(candidate_border_route_lyr, "NEW_SELECTION", where_clause) arcpy.CopyFeatures_management(candidate_border_route_lyr,candidate_border_route_positive) candidate_border_route_negative = os.path.join(workspace,"candidate_{0}_border_route_negative".format(boundary)) where_clause = "\"{0}\" NOT IN ({1})".format("OBJECTID",",".join(positive_candidate_border_route)) arcpy.SelectLayerByAttribute_management(candidate_border_route_lyr, "NEW_SELECTION", where_clause) arcpy.CopyFeatures_management(candidate_border_route_lyr,candidate_border_route_negative) ################################################################################################################ ################################################################################################################ # get left, right boundary topology of positive candidate border route # handle candidate border route segment with different L/R boundary id by offset arcpy.AddMessage("Calculating L/R boundary topology of positive candidate border route...") # generate offset around boundary boundary_border_offset= os.path.join(workspace,"{0}_{1}".format(boundary,"boundary_offset")) arcpy.Buffer_analysis(boundary_border_dissolved, boundary_border_offset, offset, "FULL", "ROUND") # get intersections between positive candidate border route and boundary offset candidate_border_route_positive_boundary_offset_intersections = os.path.join(workspace,"candidate_{0}_border_route_positive_{1}_offset_intersections".format(boundary,boundary)) arcpy.Intersect_analysis([candidate_border_route_positive,boundary_border_offset], candidate_border_route_positive_boundary_offset_intersections, "ALL", "", "point") # split positive candidate border route by intersections generated above candidate_border_route_positive_splitted_by_offset = os.path.join(workspace,"candidate_{0}_border_route_positive_splitted_by_offset".format(boundary)) arcpy.SplitLineAtPoint_management(candidate_border_route_positive,candidate_border_route_positive_boundary_offset_intersections,\ candidate_border_route_positive_splitted_by_offset,xy_resolution) # Add 'SEGMENT_ID_POSITIVE_CANDIDATES' field to splitted positive candidate route and populate it with 'OBJECTID' arcpy.AddField_management(candidate_border_route_positive_splitted_by_offset,"SEGMENT_ID_POSITIVE_CANDIDATES","LONG") arcpy.CalculateField_management(candidate_border_route_positive_splitted_by_offset, "SEGMENT_ID_POSITIVE_CANDIDATES", "!OBJECTID!", "PYTHON") # get positive candidate border route segments that within boundary offset candidate_border_route_positive_within_offset = os.path.join(workspace,"candidate_{0}_border_route_positive_within_offset".format(boundary)) candidate_border_route_positive_splitted_by_offset_lyr = "in_memory\\candidate_{0}_border_route_positive_splitted_by_offset_lyr".format(boundary) arcpy.MakeFeatureLayer_management(candidate_border_route_positive_splitted_by_offset, candidate_border_route_positive_splitted_by_offset_lyr) arcpy.SelectLayerByLocation_management (candidate_border_route_positive_splitted_by_offset_lyr, "WITHIN", boundary_border_offset) arcpy.CopyFeatures_management(candidate_border_route_positive_splitted_by_offset_lyr,candidate_border_route_positive_within_offset) # get positive candidate border route segments that out of boundary offset candidate_border_route_positive_outof_offset = os.path.join(workspace,"candidate_{0}_border_route_positive_outof_offset".format(boundary)) arcpy.SelectLayerByAttribute_management(candidate_border_route_positive_splitted_by_offset_lyr, "SWITCH_SELECTION") arcpy.CopyFeatures_management(candidate_border_route_positive_splitted_by_offset_lyr,candidate_border_route_positive_outof_offset) # generate offset around positive candidate border route within boundary offset # arcpy.AddMessage("generate offset around boundary") candidate_border_route_positive_within_offset_buffer= os.path.join(workspace,"candidate_{0}_border_route_positive_within_offset_buffer".format(boundary)) arcpy.Buffer_analysis(candidate_border_route_positive_within_offset, candidate_border_route_positive_within_offset_buffer, offset, "FULL", "FLAT") # clip boundary segments within offset distance from positive candidate route that within boundary offset boundary_border_within_positive_candidate_border_route_buffer_multipart = "in_memory\\{0}_boundary_within_positive_candidate_border_route_buffer_multipart".format(boundary) boundary_border_within_positive_candidate_border_route_buffer = os.path.join(workspace,"{0}_boundary_within_positive_candidate_border_route_buffer".format(boundary)) arcpy.Clip_analysis(boundary_border_dissolved, candidate_border_route_positive_within_offset_buffer, boundary_border_within_positive_candidate_border_route_buffer_multipart) arcpy.MultipartToSinglepart_management(boundary_border_within_positive_candidate_border_route_buffer_multipart, boundary_border_within_positive_candidate_border_route_buffer) # get endpoints of boundary border within offset buffer of splitted positive candidate border routes boundary_border_within_positive_candidate_border_route_buffer_endpoints = os.path.join(workspace,"{0}_boundary_within_positive_candidate_border_route_buffer_endpoints".format(boundary)) arcpy.FeatureVerticesToPoints_management(boundary_border_within_positive_candidate_border_route_buffer,\ boundary_border_within_positive_candidate_border_route_buffer_endpoints,"BOTH_ENDS") arcpy.DeleteIdentical_management(boundary_border_within_positive_candidate_border_route_buffer_endpoints, ["Shape"]) # split boundary border within offset buffer of splitted positive candidate border routes and endpoints location # then delete identical shape boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints = os.path.join(workspace,"{0}_boundary_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints".format(boundary)) arcpy.SplitLineAtPoint_management(boundary_border_within_positive_candidate_border_route_buffer,boundary_border_within_positive_candidate_border_route_buffer_endpoints,\ boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints,xy_resolution) arcpy.DeleteIdentical_management(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints, ["Shape"]) # Add 'SEGMENT_ID_BOUNDARY' field to boundary segments within offset distance from positive candidate route that within boundary offset and populate it with 'OBJECTID' arcpy.AddField_management(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints,"SEGMENT_ID_BOUNDARY","LONG") arcpy.CalculateField_management(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints, "SEGMENT_ID_BOUNDARY", "!OBJECTID!", "PYTHON") # locate boundary segments within offset distance of positive candidate route that within boundary offset along positive candidate route that within boundary offset boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route = os.path.join(workspace,"{0}_boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route".format(boundary)) arcpy.LocateFeaturesAlongRoutes_lr(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints,candidate_border_route_positive_within_offset,"SEGMENT_ID_POSITIVE_CANDIDATES",offset,\ boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route,"{0} {1} {2} {3}".format("RID","LINE","FMEAS","TMEAS")) # get left, right boundary topology of boundary within offset distance of positive candidate route that within boundary offset along positive candidate route that within boundary offset boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases= os.path.join(workspace,"{0}_boundary_border_within_positive_candidate_border_route_buffer_with_{1}_topology_allcases".format(boundary,boundary)) arcpy.Identity_analysis(boundary_border_within_positive_candidate_border_route_buffer_splitted_by_own_endpoints, boundary, boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases,"ALL","","KEEP_RELATIONSHIPS") boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr = "in_memory\\{0}_boundary_border_within_positive_candidate_border_route_buffer_with_{1}_topology_allcases_lyr".format(boundary,boundary) arcpy.MakeFeatureLayer_management(boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases, boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr) where_clause = "\"{0}\"<>0 AND \"{1}\"<>0".format("LEFT_{0}".format(boundary),"RIGHT_{0}".format(boundary)) arcpy.SelectLayerByAttribute_management(boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr, "NEW_SELECTION", where_clause) boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology = os.path.join(workspace,"{0}_boundary_border_within_positive_candidate_border_route_buffer_with_{1}_topology".format(boundary,boundary)) arcpy.CopyFeatures_management(boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology_allcases_lyr,boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology) arcpy.JoinField_management(boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route,"SEGMENT_ID_BOUNDARY",\ boundary_border_within_positive_candidate_border_route_buffer_with_polygon_topology,"SEGMENT_ID_BOUNDARY",["LEFT_{0}".format(boundary_id_field),"RIGHT_{0}".format(boundary_id_field)]) arcpy.JoinField_management(candidate_border_route_positive_within_offset,"SEGMENT_ID_POSITIVE_CANDIDATES",\ boundary_border_within_positive_candidate_border_route_buffer_along_candidate_border_route,"RID",["SEGMENT_ID_BOUNDARY","LEFT_{0}".format(boundary_id_field),"RIGHT_{0}".format(boundary_id_field)]) candidate_border_route_positive_within_offset_lyr = "in_memory\\candidate_{0}_border_route_positive_within_offset_lyr".format(boundary) arcpy.MakeFeatureLayer_management(candidate_border_route_positive_within_offset, candidate_border_route_positive_within_offset_lyr) where_clause = "\"{0}\"IS NOT NULL AND \"{1}\"IS NOT NULL".format("LEFT_{0}".format(boundary_id_field),"RIGHT_{0}".format(boundary_id_field)) arcpy.SelectLayerByAttribute_management(candidate_border_route_positive_within_offset_lyr, "NEW_SELECTION", where_clause) candidate_border_route_positive_within_offset_with_polygon_topology = os.path.join(workspace,"candidate_{0}_border_route_positive_within_offset_with_{1}_topology".format(boundary,boundary)) arcpy.CopyFeatures_management(candidate_border_route_positive_within_offset_lyr,candidate_border_route_positive_within_offset_with_polygon_topology) # get left, right boundary topology of candidate border route out of boundary offset candidate_border_route_positive_outof_offset_with_polygon_topology_allcases= os.path.join(workspace,"candidate_{0}_border_route_positive_outof_offset_with_{1}_topology_allcases".format(boundary,boundary)) arcpy.Identity_analysis(candidate_border_route_positive_outof_offset, boundary, candidate_border_route_positive_outof_offset_with_polygon_topology_allcases,"ALL","","KEEP_RELATIONSHIPS") candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr = "in_memory\\candidate_{0}_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr".format(boundary) arcpy.MakeFeatureLayer_management(candidate_border_route_positive_outof_offset_with_polygon_topology_allcases, candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr) where_clause = "\"{0}\"<>0 AND \"{1}\"<>0".format("LEFT_{0}".format(boundary),"RIGHT_{0}".format(boundary)) arcpy.SelectLayerByAttribute_management(candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr, "NEW_SELECTION", where_clause) candidate_border_route_positive_outof_offset_with_polygon_topology = os.path.join(workspace,"candidate_{0}_border_route_positive_outof_offset_with_{1}_topology".format(boundary,boundary)) arcpy.CopyFeatures_management(candidate_border_route_positive_outof_offset_with_polygon_topology_allcases_lyr,candidate_border_route_positive_outof_offset_with_polygon_topology) # merge candidate_border_route_positive_with_polygon_topology = "candidate_{0}_border_route_positive_with_{1}_topology".format(boundary,boundary) arcpy.FeatureClassToFeatureClass_conversion(candidate_border_route_positive_outof_offset_with_polygon_topology,workspace,candidate_border_route_positive_with_polygon_topology) arcpy.Append_management([candidate_border_route_positive_within_offset_with_polygon_topology],candidate_border_route_positive_with_polygon_topology,"NO_TEST") ################################################################################################################ ################################################################################################################ arcpy.AddMessage("Populate route_border_rule_table...") # calculate from measure and to measure of candidate border route # arcpy.AddMessage("Calculating from measure and to measure of candidate border routes...") arcpy.AddGeometryAttributes_management(candidate_border_route_positive_with_polygon_topology, "LINE_START_MID_END") # get candidte border route segment geometry arcpy.AddField_management(candidate_border_route_positive_with_polygon_topology,"SEGMENT_GEOMETRY","TEXT","","",100) arcpy.CalculateField_management(candidate_border_route_positive_with_polygon_topology,"SEGMENT_GEOMETRY","!shape.type!","PYTHON") # sort candidate border route segments based on route id and from measure, orderly # arcpy.AddMessage("sort validated output got above based on route id and from measure, orderly") candidate_border_route_positive_with_polygon_topology_sorted = os.path.join(workspace,"candidate_{0}_border_route_positive_with_polygon_topology_sorted".format(boundary)) arcpy.Sort_management(candidate_border_route_positive_with_polygon_topology,candidate_border_route_positive_with_polygon_topology_sorted,[[route_id_field,"ASCENDING"],["START_M","ASCENDING"]]) # create route_border_rule_table if arcpy.Exists(route_border_rule_table): arcpy.Delete_management(route_border_rule_table) create_route_border_rule_table_schema(workspace,route_border_rule_table) else: create_route_border_rule_table_schema(workspace,route_border_rule_table) # populate route_border_rule_table iCur = arcpy.da.InsertCursor(route_border_rule_table,["ROUTE_ID","ROUTE_START_MEASURE","ROUTE_END_MEASURE","BOUNDARY_LEFT_ID",\ "BOUNDARY_RIGHT_ID","SEGMENT_GEOMETRY","EFFECTIVE_FROM_DT","EFFECTIVE_TO_DT"]) with arcpy.da.SearchCursor(candidate_border_route_positive_with_polygon_topology_sorted,[route_id_field,"START_M","END_M","LEFT_{0}".format(boundary_id_field),\ "RIGHT_{0}".format(boundary_id_field),"SEGMENT_GEOMETRY","START_DATE","END_DATE"]) as sCur: for row in sCur: iCur.insertRow(row) del sCur del iCur arcpy.CalculateField_management(route_border_rule_table, "BRP_PROCESS_DT", "'{0}'".format(date_string), "PYTHON") ################################################################################################################ arcpy.AddMessage("done!") return route_border_rule_table except Exception: # arcpy.AddMessage(traceback.format_exc()) sys.exit(traceback.format_exc()) return False
raster_folder + raster_fnme + "_corr" > 0) arcpy.RasterToPolygon_conversion(outcon, shp_fnme + "_rextent") # Find intersections between stream_order_n rivers and the boundary of the working area = outlets arcpy.Intersect_analysis( [shp_fnme + "_rextent.shp", shp_fnme + "_strorder.shp"], shp_fnme + "_intersect.shp", "ALL", 40, "POINT") # Merge the two intersect shapefiles arcpy.Merge_management( [shp_fnme + "_intersect.shp", shp_fnme + "_intersect2.shp"], shp_fnme + "_outlets.shp") # remove intersects_files arcpy.Delete_management(shp_fnme + "_intersect.shp") arcpy.Delete_management(shp_fnme + "_intersect2.shp") # Clean the shapefile by removing double points arcpy.AddXY_management(shp_fnme + "_outlets.shp") arcpy.DeleteIdentical_management(shp_fnme + "_outlets.shp", ["POINT_X", "POINT_Y"]) river = open(env.workspace + "/River-caract+txt", "w") river.write("river_nb \t L_watershed \n") #3) split Stream_X into shp of 1 point each, and store it into a folder Ri nbriver = int( arcpy.GetCount_management(shp_fnme + "_outlets.shp").getOutput(0)) print("Working on " + str(nbriver) + " basins") for i in range(0, nbriver): if path.exists(tmpf) != False: os.rmdir(tmpf) os.mkdir(tmpf) print(" Outlet of basin " + str(i + 1)) path_river = env.workspace + "/RProfils/R" + str(i + 1) + "/shp_files/" path_river_r = env.workspace + "/RProfils/R" + str(i + 1) + "/rasters/"
def main(): # environment settings arcpy.env.overwriteOutput = 'TRUE' # -- make copy of original nhd flowlines nhd_flowlines = arcpy.CopyFeatures_management(nhd_orig_flowline_path, 'in_memory/nhd_flowlines') # add source field to track which part of workflow perennial network flowline was added arcpy.AddField_management(nhd_flowlines, 'Source', 'TEXT', '', '', 75) # --perennial coded lines-- # select lines from original nhd that are coded as perennial arcpy.MakeFeatureLayer_management(nhd_flowlines, 'nhd_flowlines_lyr') arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46006 """) flowline_per = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/flowline_per') # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: row[0] = "1. Perennial Code" cursor.updateRow(row) # --add missing major rivers-- # --subsetted artificial coded lines that are in perennial nhd area polygons-- # select perennial coded nhd area polygons arcpy.MakeFeatureLayer_management(nhd_area_path, 'nhd_area_lyr') arcpy.SelectLayerByAttribute_management('nhd_area_lyr', 'NEW_SELECTION', """ "FCODE" = 46006 """) # select and dissolve artificial coded nhd lines that are within perennial nhd area polygons arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 55800 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', 'nhd_area_lyr', '', 'SUBSET_SELECTION') flowline_art_code = arcpy.Dissolve_management( 'nhd_flowlines_lyr', 'in_memory/flowline_art_code', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') # remove short lines (< 50 m) that act as artificial connectors to flowlines outside perennial nhd area polygons arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'INTERSECT', 'nhd_area_lyr', '1 Meters', 'NEW_SELECTION') arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'SUBSET_SELECTION', """ "FCODE" <> 55800 """) arcpy.MakeFeatureLayer_management(flowline_art_code, 'flowline_art_code_lyr') arcpy.SelectLayerByLocation_management('flowline_art_code_lyr', 'INTERSECT', 'nhd_flowlines_lyr', '', 'NEW_SELECTION') with arcpy.da.UpdateCursor('flowline_art_code_lyr', ['SHAPE@Length']) as cursor: for row in cursor: if row[0] < 50: cursor.deleteRow() # remove lines that end where canal starts mr_end_pt = arcpy.FeatureVerticesToPoints_management( flowline_art_code, 'in_memory/mr_end_pt', "END") arcpy.MakeFeatureLayer_management(mr_end_pt, 'mr_end_pt_lyr') arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 33600 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'INTERSECT', flowline_art_code, '1 Meters', 'SUBSET_SELECTION') canal_start_pt = arcpy.FeatureVerticesToPoints_management( 'nhd_flowlines_lyr', 'in_memory/canal_start_pt', "START") arcpy.SelectLayerByLocation_management('mr_end_pt_lyr', 'INTERSECT', canal_start_pt, '', 'NEW_SELECTION') arcpy.SelectLayerByLocation_management('flowline_art_code_lyr', 'INTERSECT', 'mr_end_pt_lyr', '', 'NEW_SELECTION') arcpy.DeleteFeatures_management('flowline_art_code_lyr') arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_art_code, '', 'NEW_SELECTION') # add selected flowlines to the perennial stream shp arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "2. Major Artifical in Perennial Area Polygon" cursor.updateRow(row) # --add missing flowlines in marshes-- # --artificial coded lines that are perennial gaps in marsh waterbody polygons-- # select nhd waterbodys that: # - are coded as marshes (ftype 466) # - intersect perennial stream start and end (i.e., are perennial stream inlet AND outlet) arcpy.MakeFeatureLayer_management(nhd_waterbody_path, 'nhd_waterbody_lyr') arcpy.SelectLayerByAttribute_management('nhd_waterbody_lyr', 'NEW_SELECTION', """ "FTYPE" = 466 """) marshes = arcpy.CopyFeatures_management('nhd_waterbody_lyr', 'in_memory/marshes') arcpy.MakeFeatureLayer_management(marshes, 'marshes_lyr') per_start_pt = arcpy.FeatureVerticesToPoints_management( flowline_per, 'in_memory/per_start_pt', "START") per_end_pt = arcpy.FeatureVerticesToPoints_management( flowline_per, 'in_memory/per_end_pt', "END") arcpy.SelectLayerByLocation_management('marshes_lyr', 'INTERSECT', per_start_pt, '', 'NEW_SELECTION') arcpy.SelectLayerByLocation_management('marshes_lyr', 'INTERSECT', per_end_pt, '', 'SUBSET_SELECTION') # select and dissolve nhd flowlines that: # - are coded as artificial # - fall within selected marsh waterbodies # - are not already part of perennial stream network arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 55800 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', 'marshes_lyr', '', 'SUBSET_SELECTION') arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_per, '', 'REMOVE_FROM_SELECTION') marsh_lines = arcpy.Dissolve_management('nhd_flowlines_lyr', 'in_memory/marsh_lines', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') marsh_gap_lines = findGaps(marsh_lines, flowline_per) # add selected flowlines to the perennial stream shp arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', marsh_gap_lines, '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "3. Artificial Network Gap in Marsh Waterbody" cursor.updateRow(row) # --add missing flowlines in smaller lakes and ponds-- # select nhd waterbodys that: # - are coded as lakes/ponds (ftype 390) # - area <= .03 sq km # - are not named # - intersect perennial stream start and end (i.e., are perennial stream inlet AND outlet) arcpy.SelectLayerByLocation_management('nhd_waterbody_lyr', 'INTERSECT', flowline_per, '', 'NEW_SELECTION') arcpy.SelectLayerByAttribute_management( 'nhd_waterbody_lyr', 'SUBSET_SELECTION', """ "FTYPE" = 390 AND "AREASQKM" <= 0.03 AND "GNIS_NAME" = '' """) sm_lakes_ponds = arcpy.CopyFeatures_management('nhd_waterbody_lyr', 'in_memory/sm_lakes_ponds') arcpy.MakeFeatureLayer_management(sm_lakes_ponds, 'sm_lakes_ponds_lyr') per_start_pt = arcpy.FeatureVerticesToPoints_management( flowline_per, 'in_memory/per_start_pt', "START") per_end_pt = arcpy.FeatureVerticesToPoints_management( flowline_per, 'in_memory/per_end_pt', "END") arcpy.SelectLayerByLocation_management('sm_lakes_ponds_lyr', 'INTERSECT', per_start_pt, '', 'NEW_SELECTION') arcpy.SelectLayerByLocation_management('sm_lakes_ponds_lyr', 'INTERSECT', per_end_pt, '', 'SUBSET_SELECTION') # select nhd flowlines that: # - fall within selected waterbodies # - intersect a perennial streams (i.e., are gaps on perennial network) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', 'sm_lakes_ponds_lyr', '', 'NEW_SELECTION') flowline_wbody_dissolve = arcpy.Dissolve_management( 'nhd_flowlines_lyr', 'in_memory/flowline_wbody_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') arcpy.MakeFeatureLayer_management(flowline_wbody_dissolve, 'flowline_wbody_dissolve_lyr') arcpy.SelectLayerByLocation_management('flowline_wbody_dissolve_lyr', 'INTERSECT', flowline_per, '', 'NEW_SELECTION') # add selected flowlines to the perennial stream shp arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', 'flowline_wbody_dissolve_lyr', '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "4. Network Gap in Small Lake/Pond Waterbody" cursor.updateRow(row) # --remove flowlines where 2 lines end but none start (indicate 'false perennial tribs')-- per_start_pt = arcpy.FeatureVerticesToPoints_management( flowline_per, 'in_memory/per_start_pt', "START") per_end_pt = arcpy.FeatureVerticesToPoints_management( flowline_per, 'in_memory/per_end_pt', "END") per_end_pt_join = arcpy.SpatialJoin_analysis(per_end_pt, per_end_pt, 'in_memory/per_end_pt_join', 'JOIN_ONE_TO_ONE', 'KEEP_ALL', '', 'INTERSECT') arcpy.MakeFeatureLayer_management(per_end_pt_join, 'per_end_pt_join_lyr') arcpy.SelectLayerByLocation_management('per_end_pt_join_lyr', 'INTERSECT', per_start_pt, '', 'NEW_SELECTION') arcpy.SelectLayerByAttribute_management('per_end_pt_join_lyr', 'SWITCH_SELECTION') arcpy.SelectLayerByAttribute_management('per_end_pt_join_lyr', 'SUBSET_SELECTION', """ "Join_Count" >= 2 """) arcpy.MakeFeatureLayer_management(flowline_per, 'flowline_per_lyr') arcpy.SelectLayerByLocation_management('flowline_per_lyr', 'INTERSECT', 'per_end_pt_join_lyr', '', 'NEW_SELECTION') arcpy.DeleteFeatures_management('flowline_per_lyr') # --add named intermittent and connector flowlines that are directly downstream of perennial stream-- # create perennial end pts shp (use to find intermittent that starts where perennial ends) flowline_per_dissolve = arcpy.Dissolve_management( flowline_per, 'in_memory/flowline_per_dissolve', '', '', 'SINGLE_PART', 'UNSPLIT_LINES') per_end_pt = arcpy.FeatureVerticesToPoints_management( flowline_per_dissolve, 'in_memory/per_end_pt', "END") # select named intermitent and connector flowlines arcpy.SelectLayerByAttribute_management( 'nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """) arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'SUBSET_SELECTION', """ "GNIS_NAME" <> '' """) # dissolve selected flowlines by name flowline_int_dissolve = arcpy.Dissolve_management( 'nhd_flowlines_lyr', 'in_memory/flowline_int_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') # create points at start of dissolved intermittent and connector flowlines int_start_pts = arcpy.FeatureVerticesToPoints_management( flowline_int_dissolve, 'in_memory/int_start_pts', "START") # select perennial end points that overlap intermittent/connector start points arcpy.MakeFeatureLayer_management(per_end_pt, 'per_end_pt_lyr') arcpy.SelectLayerByLocation_management('per_end_pt_lyr', 'INTERSECT', int_start_pts, '', 'NEW_SELECTION') # select dissolved intermitent and connector flowlines that intersect selected perennial end points # (these lines are directly downstream of perennial stream) arcpy.MakeFeatureLayer_management(flowline_int_dissolve, 'flowline_int_dissolve_lyr') arcpy.SelectLayerByLocation_management('flowline_int_dissolve_lyr', 'INTERSECT', 'per_end_pt_lyr', '', 'NEW_SELECTION') # add selected flowlines to the perennial stream shp arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', 'flowline_int_dissolve_lyr', '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "5. Named Intermittent/Connector Directly Downstream of Network Line" cursor.updateRow(row) # --add named intermittent flowlines that fall on gaps in the perennial network-- # select intermittent flowlines that aren't part of perennial network up to this point # these are potential network gap lines arcpy.SelectLayerByAttribute_management( 'nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """) arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'SUBSET_SELECTION', """ "GNIS_NAME" <> '' """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_per, '', 'REMOVE_FROM_SELECTION') int_lines = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/int_lines') # find gaps on all selected lines int_gap_lines = findGaps(int_lines, flowline_per) # add itermittent gap to the perennial stream shp with arcpy.da.InsertCursor(flowline_per, ["SHAPE@"]) as iCursor: with arcpy.da.SearchCursor(int_gap_lines, ["SHAPE@"]) as sCursor: for row in sCursor: iCursor.insertRow([row[0]]) # find gaps on dissolved lines (grabs lines that may be split by trib and otherwise wouldn't be selected) int_lines_dissolve = arcpy.Dissolve_management( 'nhd_flowlines_lyr', 'in_memory/int_lines_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') int_gap_lines_dissolve = findGaps(int_lines_dissolve, flowline_per) # add itermittent gap to the perennial stream shp arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', int_gap_lines_dissolve, '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "6. Named Intermittent/Connector Network Gap" cursor.updateRow(row) # --add intermittent flowlines that fall on gaps in the perennial network-- # select intermittent flowlines that aren't part of perennial network up to this point # these are potential network gap lines arcpy.SelectLayerByAttribute_management( 'nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'ARE_IDENTICAL_TO', flowline_per, '', 'REMOVE_FROM_SELECTION') int_lines_all = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/int_lines_all') int_gap_lines_all = findGaps(int_lines_all, flowline_per) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', int_gap_lines_all, '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) arcpy.SelectLayerByAttribute_management( 'nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 46003 OR "FCODE" = 33400 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'ARE_IDENTICAL_TO', flowline_per, '', 'REMOVE_FROM_SELECTION') int_lines_all_dissolve = arcpy.Dissolve_management( 'nhd_flowlines_lyr', 'in_memory/int_lines_all_dissolve', 'GNIS_NAME', '', 'SINGLE_PART', 'UNSPLIT_LINES') int_gap_lines_all_dissolve = findGaps(int_lines_all_dissolve, flowline_per) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', int_gap_lines_all_dissolve, '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "7. Unnamed Intermittent/Connector Network Gap" cursor.updateRow(row) # --add artifical flowlines that fall on gaps in the perennial network-- # --these are potential network gap lines-- # select artificial coded flowlines that aren't part of perennial network up to this point arcpy.SelectLayerByAttribute_management('nhd_flowlines_lyr', 'NEW_SELECTION', """ "FCODE" = 55800 """) arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', flowline_per, '', 'REMOVE_FROM_SELECTION') # create search aoi from perennial area polygons and marsh waterbody polygons arcpy.SelectLayerByAttribute_management('nhd_waterbody_lyr', 'NEW_SELECTION', """ "FTYPE" = 466 """) marshes = arcpy.CopyFeatures_management('nhd_waterbody_lyr', 'in_memory/marshes') arcpy.SelectLayerByAttribute_management('nhd_area_lyr', 'NEW_SELECTION', """ "FCODE" = 46006 """) per_area = arcpy.CopyFeatures_management('nhd_area_lyr', 'in_memory/per_area') art_gap_aoi = arcpy.Merge_management([marshes, per_area], 'in_memory/art_gap_aoi') # subset selection to flowlines that flow throw search aoi arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'WITHIN', art_gap_aoi, '', 'SUBSET_SELECTION') art_lines = arcpy.CopyFeatures_management('nhd_flowlines_lyr', 'in_memory/art_lines') art_gap_lines = findGaps(art_lines, flowline_per, 'True') # add artificial gap to the perennial stream shp arcpy.SelectLayerByLocation_management('nhd_flowlines_lyr', 'SHARE_A_LINE_SEGMENT_WITH', art_gap_lines, '', 'NEW_SELECTION') arcpy.Append_management('nhd_flowlines_lyr', flowline_per) # populate source field with arcpy.da.UpdateCursor(flowline_per, ['Source']) as cursor: for row in cursor: if row[0] is None: row[0] = "8. Artificial Network Gap" cursor.updateRow(row) # --remove isolated (i.e., only intersect themselves), short (< 300 m) line segments-- flowline_per_dissolve2 = arcpy.Dissolve_management( flowline_per, 'in_memory/flowline_per_dissolve2', '', '', 'SINGLE_PART', 'UNSPLIT_LINES') flowline_per_join = arcpy.SpatialJoin_analysis( flowline_per_dissolve2, flowline_per_dissolve2, 'in_memory/flowline_per_join', 'JOIN_ONE_TO_ONE', 'KEEP_ALL', '', 'INTERSECT') arcpy.AddField_management(flowline_per_join, 'Length', 'DOUBLE') arcpy.CalculateField_management(flowline_per_join, 'Length', "!SHAPE.LENGTH@Meters!", 'PYTHON_9.3') arcpy.MakeFeatureLayer_management(flowline_per_join, 'flowline_per_join_lyr') arcpy.SelectLayerByAttribute_management( 'flowline_per_join_lyr', 'NEW_SELECTION', """ "Length" < 300 AND "Join_Count" <= 1 """) arcpy.SelectLayerByLocation_management('flowline_per_lyr', 'SHARE_A_LINE_SEGMENT_WITH', 'flowline_per_join_lyr', '', 'NEW_SELECTION') arcpy.DeleteFeatures_management('flowline_per_lyr') # --select and save final perennial shp-- arcpy.SelectLayerByAttribute_management('flowline_per_lyr', 'CLEAR_SELECTION') arcpy.CopyFeatures_management(flowline_per, outpath) arcpy.DeleteIdentical_management(outpath, ['Shape'])
def deduplicate_nhd(in_feature_class, out_feature_class='', unique_id='Permanent_Identifier'): """ Returns an single feature class for all NHD features with no duplicated identifiers in it. :param in_feature_class: A feature class resulting from merging features from NHD datasets staged by subregion. :param out_feature_class: Optional. The feature class which will be created. :param unique_id: Optional. The identifier that needs to be unique in the output. :return: """ # SETUP if out_feature_class: arcpy.AddMessage("Copying initial features to output...") arcpy.CopyFeatures_management(in_feature_class, out_feature_class) else: out_feature_class = in_feature_class # EXECUTE # Delete full identicals first--these come from overlaps in staged subregion data before_count = int( arcpy.GetCount_management(out_feature_class).getOutput(0)) arcpy.AddMessage("Deleting full identicals...") # Check for full identicals on original *attribute fields*, excluding the one we specifically created to make them distinct # Also excluding object ID since that is obviously distinct excluded_fields = [ 'Shape', 'Shape_Length', 'Shape_Area', 'OBJECTID', 'nhd_merge_id' ] check_fields = [ f.name for f in arcpy.ListFields(out_feature_class) if f.name not in excluded_fields ] arcpy.DeleteIdentical_management(out_feature_class, check_fields) after_full_count = int( arcpy.GetCount_management(out_feature_class).getOutput(0)) arcpy.AddMessage( "{0} features were removed because they were full identicals to remaining features." .format(before_count - after_full_count)) # Delete duplicated IDs by taking the most recent FDate--these come from NHD editing process somehow arcpy.AddMessage("Deleting older features with duplicated identifiers...") # Get a list of distinct IDs that have duplicates arcpy.Frequency_analysis(out_feature_class, "in_memory/freqtable", unique_id) arcpy.TableSelect_analysis("in_memory/freqtable", "in_memory/dupeslist", '''"FREQUENCY" > 1''') count_dupes = int( arcpy.GetCount_management("in_memory/dupeslist").getOutput(0)) #If there are any duplicates, remove them by keeping the one with the latest FDate if count_dupes > 0: dupe_ids = [ row[0] for row in arcpy.da.SearchCursor("in_memory/dupeslist", ( unique_id)) ] dupe_filter = ''' "{}" = '{{}}' '''.format(unique_id) for id in dupe_ids: dates = [ row[0] for row in arcpy.da.SearchCursor( out_feature_class, ["FDate"], dupe_filter.format(id)) ] with arcpy.da.UpdateCursor(out_feature_class, [unique_id, "FDate"], dupe_filter.format(id)) as cursor: for row in cursor: if row[1] == max(dates): pass else: cursor.deleteRow() after_both_count = int( arcpy.GetCount_management(out_feature_class).getOutput(0)) arcpy.AddMessage( "{0} features were removed because they were less recently edited than another feature with the same identifier." .format(after_full_count - after_both_count)) arcpy.Delete_management("in_memory/freqtable") arcpy.Delete_management("in_memory/dupeslist")
import arcpy, os #: use arcgis pro python 3 #: delete identical roads arcpy.DeleteIdentical_management( in_dataset="C:/Temp/RoadGrinder.gdb/GeocodeRoads", fields= "Shape;ADDRSYS_L;ADDRSYS_R;FROMADDR_L;TOADDR_L;FROMADDR_R;TOADDR_R;PREDIR;NAME;POSTTYPE;POSTDIR;ZIPCODE_L;ZIPCODE_R;GLOBALID_SGID;Shape_Length", xy_tolerance="", z_tolerance="0") #: delete identical alt names address points table arcpy.DeleteIdentical_management( in_dataset="C:/Temp/RoadGrinder.gdb/AtlNamesAddrPnts", fields= "AddSystem;AddNum;AddNumSuffix;PrefixDir;StreetName;StreetType;SuffixDir;ZipCode;UnitType;UnitID;City;CountyID;UTAddPtID", xy_tolerance="", z_tolerance="0") #: delete identical alt names roads table arcpy.DeleteIdentical_management( in_dataset="C:/Temp/RoadGrinder.gdb/AtlNamesRoads", fields= "ADDRSYS_L;ADDRSYS_R;FROMADDR_L;TOADDR_L;FROMADDR_R;TOADDR_R;PREDIR;NAME;POSTTYPE;POSTDIR;ZIPCODE_L;ZIPCODE_R;GLOBALID_SGID", xy_tolerance="", z_tolerance="0") print("done")
#--adding VISUM link types--# if bool(Types) == True: arcpy.AddMessage("> adding PTV VISUM link types\n") arcpy.AddField_management(FC, 'STRECKENTYPEN', "LONG") arcpy.AddField_management(FC, 'STRECKENKLASSEN', "SHORT") Typ(FC) #--nodes--# if bool(Nodes) == True: arcpy.AddMessage("> creating nodes") arcpy.FeatureVerticesToPoints_management(FC, Node_Name, "BOTH_ENDS") arcpy.AddMessage("> deleting stacked nodes\n") arcpy.AddXY_management(Node_Name) fields = ["POINT_X", "POINT_Y"] arcpy.DeleteIdentical_management(Node_Name, fields) arcpy.DeleteField_management(Node_Name, fields) #--spliting links--# if bool(Split) == True: arcpy.AddMessage("> spliting links at nodes\n") arcpy.SplitLineAtPoint_management(FC, Node_Name, Split_Name, Radius) #--create cleare node and link numbers--# if bool(Double_Node) or bool(Double_Link) == True: VISUM = win32com.client.dynamic.Dispatch("Visum.Visum.22") VISUM.loadversion(Network) VISUM.Filters.InitAll() if bool(Double_Node) == True: arcpy.AddMessage("> creating clear node numbers")
def getPerClassFunc(fileFolder, grid, codetoClass, code, rasterPoints, outputDatabase, verbose): perClass = fileFolder + "\\" + grid kmls = perClass + "\\KMLs" addPoly = perClass + "\\AddPolygon" removePoly = poly = perClass + "\\RemovePolygon" tempKmls = perClass + "\\tempKmls" tempAdd = perClass + "\\tempAdd" tempRemove = perClass + "\\tempRemove" tempMerge = perClass + "\\templayermerge" os.makedirs(tempKmls) os.makedirs(tempAdd) os.makedirs(tempRemove) os.makedirs(tempMerge) tempDatabase = "temp.gdb" arcpy.CreateFileGDB_management(perClass, tempDatabase) listKmls = os.listdir(kmls) kmlNumber = len(listKmls) listAddPoly = os.listdir(addPoly) addNumber = len(listAddPoly) listRemovePoly = os.listdir(removePoly) removeNumber = len(listRemovePoly) kmlLayer = [] if kmlNumber > 0: for file in listKmls: sampKML = kmls + "\\" + file #get short name splitext = os.path.splitext(file) outputName = splitext[0] # Process: KML To Layer arcpy.KMLToLayer_conversion(sampKML, tempKmls, outputName, "NO_GROUNDOVERLAY") ## kmlLayer.append(tempKmls + "\\" + outputName + ".gdb\\Placemarks\\Points") ## #print kmlLayer if verbose: print "KML to Layer is done" if addNumber > 0: for file in listAddPoly: sampAdd = addPoly + "\\" + file splitext = os.path.splitext(file) outputName = splitext[0] # Process: KML To Layer arcpy.KMLToLayer_conversion(sampAdd, tempAdd, outputName, "NO_GROUNDOVERLAY") addPolygon = tempAdd + "\\" + outputName + ".gdb\\Placemarks\\Polygons" if removeNumber > 0: for file in listRemovePoly: sampRemove = removePoly + "\\" + file splitext = os.path.splitext(file) outputName = splitext[0] # Process: KML To Layer arcpy.KMLToLayer_conversion(sampRemove, tempRemove, outputName, "NO_GROUNDOVERLAY") removePolygon = tempRemove + "\\" + outputName + ".gdb\\Placemarks\\Polygons" # create merge feature class from different groups in perclass if kmlNumber >= 1: # Process: Merge outputmerge = perClass + "\\" + tempDatabase + "\\mergelayer" arcpy.Merge_management(kmlLayer, outputmerge) # create feature class for remove polygons: Remove points fall in removepolygons if removeNumber > 0 and kmlNumber > 0: outputmerge_rm = tempMerge + "\\mergelayer" #print outputmerge,removePolygon # Process: Select Layer By Location arcpy.MakeFeatureLayer_management(outputmerge, outputmerge_rm) arcpy.SelectLayerByLocation_management(outputmerge_rm, "WITHIN", removePolygon, "", "NEW_SELECTION") # Process: Select Layer By Attribute arcpy.SelectLayerByAttribute_management(outputmerge_rm, "SWITCH_SELECTION", "") # Process: Copy Features outputRemove = perClass + "\\" + tempDatabase + "\\outputRemove" arcpy.CopyFeatures_management(outputmerge_rm, outputRemove, "", "0", "0", "0") if verbose: print "remove points is done" #create feature class for addpolygons if addNumber > 0: # Process: Select Layer By Location rasterPointsForAdd = tempMerge + "\\rasterpoints" arcpy.MakeFeatureLayer_management(rasterPoints, rasterPointsForAdd) arcpy.SelectLayerByLocation_management(rasterPointsForAdd, "WITHIN", addPolygon, "", "NEW_SELECTION") # Process: Copy Features outputAdd = perClass + "\\" + tempDatabase + "\\outputAdd" arcpy.CopyFeatures_management(rasterPointsForAdd, outputAdd, "", "0", "0", "0") if verbose: print "add points is done" isOutput = 0 # merge if both add and remove if addNumber > 0 and removeNumber > 0: # merge outputPerClass = perClass + "\\" + tempDatabase + "\\outputPerclass" arcpy.Merge_management([outputRemove, outputAdd], outputPerClass) arcpy.DeleteIdentical_management(outputPerClass, "Shape") isOutput = 1 if verbose: print "merge add and remove is done" if addNumber > 0 and removeNumber == 0 and kmlNumber == 0: outputPerClass = outputAdd isOutput = 1 if addNumber == 0 and removeNumber > 0: outputPerClass = outputRemove isOutput = 1 if kmlNumber > 0 and addNumber == 0 and removeNumber == 0: outputPerClass = outputmerge isOutput = 1 if kmlNumber > 0 and addNumber > 0 and removeNumber == 0: # merge outputPerClass = perClass + "\\" + tempDatabase + "\\outputPerclass" arcpy.Merge_management([outputmerge, outputAdd], outputPerClass) arcpy.DeleteIdentical_management(outputPerClass, "Shape") isOutput = 1 if addNumber == 0 and removeNumber == 0 and kmlNumber > 0: outputPerClass = outputmerge isOutput = 1 if isOutput > 0: # add field and calculate field # Process: Add Field arcpy.AddField_management(outputPerClass, "cls_lbl", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") # Process: Calculate Field arcpy.CalculateField_management(outputPerClass, "cls_lbl", codetoClass[code], "VB", "") #arcpy.AddGeometryAttributes_management(outputPerClass, "POINT_X_Y_Z_M") if verbose: print "Add and calculate field is done" # get output outputGdb = fileFolder + "\\" + outputDatabase + "\\" + grid arcpy.CopyFeatures_management(outputPerClass, outputGdb, "", "0", "0", "0") if verbose: print grid + " is done"
def wczytaj_dane(infc, outfc): desc = arcpy.Describe(infc) rows = arcpy.da.UpdateCursor(infc, ["SHAPE@", "id_from", "id_to", "id_jezdni"]) arcpy.AddField_management(infc, "id_from", "TEXT") arcpy.AddField_management(infc, "id_to", "TEXT") arcpy.AddField_management(infc, "id_jezdni", "TEXT") if not arcpy.ListFields(infc, "max_V"): arcpy.AddField_management(infc, "max_V", "SHORT") # Stworzenie nowej klasy punktowej na wezly arcpy.CreateFeatureclass_management(outfc[:outfc.rfind("\\")], outfc[outfc.rfind("\\") + 1:], "POINT", "", "DISABLED", "DISABLED", arcpy.Describe(infc).spatialReference) arcpy.AddField_management(outfc, "ident", "TEXT") arcpy.AddField_management(outfc, "X", "DOUBLE") arcpy.AddField_management(outfc, "Y", "DOUBLE") arcpy.AddField_management(outfc, "identJ", "TEXT") cursor = arcpy.da.InsertCursor( outfc, ("FID", "SHAPE@XY", "ident", "X", "Y", "identJ")) for row in rows: feat = row[0] # Pobranie pierwszego punktu danego obiektu startpt = feat.firstPoint startx = startpt.X starty = startpt.Y identStart = ("".join([str(startx)[-5:], str(starty)[-5:]])) # Pobranie ostatniego punktu danego obiektu endpt = feat.lastPoint endx = endpt.X endy = endpt.Y identEnd = ("".join([str(endx)[-5:], str(endy)[-5:]])) identJezdni = "".join([identStart, identEnd]) cursor.insertRow( ("1", (startx, starty), identStart, startx, starty, identJezdni)) cursor.insertRow( ("1", (endx, endy), identEnd, endx, endy, identJezdni)) # Zaktualizowanie pol id_from, id_to utworzonymi identyfikatorami punktow row[1] = identStart row[2] = identEnd row[3] = identJezdni rows.updateRow(row) arcpy.DeleteIdentical_management(outfc, "ident") # dodanie nowej kolumny, ktora zawierac bedzie max predkosc zalezna od klasy drogi with arcpy.da.UpdateCursor(infc, ["klasaDrogi", "max_V"]) as updateCursor: for uRow in updateCursor: if uRow[0] == 'Z': uRow[1] = 50 elif uRow[0] == 'A': uRow[1] = 100 elif uRow[0] == 'S': uRow[1] = 100 elif uRow[0] == 'GP': uRow[1] = 80 elif uRow[0] == 'G': uRow[1] = 60 elif uRow[0] == 'L': uRow[1] = 40 elif uRow[0] == 'D': uRow[1] = 30 elif uRow[0] == 'I': uRow[1] = 50 updateCursor.updateRow(uRow)
ncurrentstep = 1 #=============================================================================== # CODING #=============================================================================== #/conversion of the active channel polygons to lines arcpy.AddMessage("Converting active channel to lines - Step " + str(ncurrentstep) + "/" + str(nstep)) ACtoLine = arcpy.FeatureToLine_management(ActiveChannel, "%ScratchWorkspace%\\ACtoLine", "", "ATTRIBUTES") ncurrentstep += 1 arcpy.AddMessage("Deleting identical - Step " + str(ncurrentstep) + "/" + str(nstep)) arcpy.DeleteIdentical_management(ACtoLine, ["Shape_Length"]) #/processing the contact length ncurrentstep += 1 arcpy.AddMessage( "Intersecting active channel lines with the disaggregated valley bottom - Step " + str(ncurrentstep) + "/" + str(nstep)) Intersect = arcpy.Intersect_analysis([DisaggregatedValleyBottom, ACtoLine], "%ScratchWorkspace%\\Intersect", "ALL", "", "LINE") ncurrentstep += 1 arcpy.AddMessage("Up to date the \"Shape_Length\" field - Step " + str(ncurrentstep) + "/" + str(nstep)) UPD_SL.UpToDateShapeLengthField(Intersect)
# locate points points along the cross-section eventTable = outName + '_ptEvents' rProps = 'rkey POINT RouteM' arcpy.AddMessage('Locating ' + zPts + ' on ' + zmLine) arcpy.LocateFeaturesAlongRoutes_lr(zPts, zmLine, 'ORIG_FID', buff, eventTable, rProps, '#', 'DISTANCE') arcpy.AddMessage(' ' + eventTable + ' written to ' + arcpy.env.scratchWorkspace) #remove duplicate records that result from what appears to be #an unresolved bug in the Locate Features Along Routes tool #some points will get more than one record in the event table #and slightly different, sub-mapunit, mValues try: arcpy.DeleteIdentical_management(eventTable, 'ORIG_PTID') except: pass #place points as events on the cross section line eventLyr = '_lyr' rProps = 'rkey POINT RouteM' arcpy.MakeRouteEventLayer_lr(zmLine, 'ORIG_FID', eventTable, rProps, eventLyr, '#', 'ERROR_FIELD', 'ANGLE_FIELD', 'TANGENT') eventPts = outName + '_events' arcpy.CopyFeatures_management(eventLyr, eventPts) arcpy.AddMessage(' ' + eventPts + ' feature layer written to ' + arcpy.env.scratchWorkspace) # add DistanceFromSection and LocalXsAzimuth fields
def route_data_mile(route, park, block): new_tbl = str(block)[:-4] + "_" + str(route)[:-4] arcpy.CopyRows_management(route, new_tbl) route_tbl = str(new_tbl) + "_tvw" arcpy.MakeTableView_management(new_tbl, route_tbl) # Export table with name then do additional fields per year or whatever arcpy.AddField_management(route_tbl, "GEOID10", "TEXT", "", "", 15, "GEOID10") arcpy.AddField_management(route_tbl, "SITE", "TEXT", "", "", 75, "SITE") arcpy.AddField_management(route_tbl, "ACRES", "DOUBLE", "", "", "", "ACRES") arcpy.AddField_management(route_tbl, "POP", "LONG", "", "", "", "POP") arcpy.AddField_management(route_tbl, "ACRE_PP", "DOUBLE", "", "", "", "ACRE_PP") arcpy.AddField_management(route_tbl, "PARK_PP", "DOUBLE", "", "", "", "PARK_PP") expression1 = "(!Name![0:15])" expression2 = "(!Name![18:])" expression3 = "(!SITE![:-6])" arcpy.CalculateField_management(route_tbl, "GEOID10", expression1, "PYTHON_9.3") arcpy.CalculateField_management(route_tbl, "SITE", expression2, "PYTHON_9.3") arcpy.CalculateField_management(route_tbl, "SITE", expression3, "PYTHON_9.3") arcpy.AddJoin_management(route_tbl, "SITE", park, "NAME") field_name_1 = str(park)[:-4] expression4 = "(" + "!" + field_name_1 + ".MAP_ACRES!" + ")" arcpy.CalculateField_management(route_tbl, "ACRES", expression4, "PYTHON_9.3") arcpy.RemoveJoin_management(route_tbl) arcpy.AddJoin_management(route_tbl, "GEOID10", block, "GEOID10") field_name_2 = str(block)[:-4] expression5 = "(" + "!" + field_name_2 + ".POP!" + ")" arcpy.CalculateField_management(route_tbl, "POP", expression5, "PYTHON_9.3") arcpy.RemoveJoin_management(route_tbl) # Deletes rows where GEOID10 AND SITE are duplicates arcpy.DeleteIdentical_management(route_tbl, ["GEOID10", "SITE"]) # summarize SITE by ACRES & POP site_tbl = str(route_tbl) + "_stats" arcpy.Statistics_analysis(route_tbl, site_tbl, [["ACRES", "MEAN"], ["POP", "SUM"]], "SITE") # calculate acres/person & site/person for each park arcpy.AddField_management(site_tbl, "ACRE_PP", "DOUBLE", "", "", "", "ACRE_PP") arcpy.AddField_management(site_tbl, "PARK_PP", "DOUBLE", "", "", "", "PARK_PP") expression6 = "(!MEAN_ACRES!/!SUM_POP!)" expression7 = "(1)" arcpy.CalculateField_management(site_tbl, "ACRE_PP", expression6, "PYTHON_9.3") arcpy.CalculateField_management(site_tbl, "PARK_PP", expression7, "PYTHON_9.3") arcpy.AddJoin_management(route_tbl, "SITE", site_tbl, "SITE") expression8 = "(!" + site_tbl + ".ACRE_PP!)" expression9 = "(!" + site_tbl + ".PARK_PP!)" arcpy.CalculateField_management(route_tbl, "ACRE_PP", expression8, "PYTHON_9.3") arcpy.CalculateField_management(route_tbl, "PARK_PP", expression9, "PYTHON_9.3") arcpy.RemoveJoin_management(route_tbl) # Summarize route layer by GEOID geoid_tbl = str(route_tbl) + "_geoidStats" arcpy.Statistics_analysis(route_tbl, geoid_tbl, [["ACRE_PP", "SUM"], ["PARK_PP", "SUM"]], "GEOID10") # join back to block and calculate fields arcpy.AddJoin_management(block, "GEOID10", geoid_tbl, "GEOID10") expression10 = "(!" + geoid_tbl + ".SUM_ACRE_PP!)" expression11 = "(!" + geoid_tbl + ".SUM_PARK_PP!)" arcpy.CalculateField_management(block, "ACRE_PP", expression10, "PYTHON_9.3") arcpy.CalculateField_management(block, "PARK_PP", expression11, "PYTHON_9.3") arcpy.RemoveJoin_management(block) with arcpy.da.UpdateCursor(block, ["ACRE_PP", "PARK_PP"]) as cursor: for row in cursor: if row[0] is None: row[0] = 0 if row[1] is None: row[1] = 0 cursor.updateRow(row) del row del cursor return