# Name: Task1b.py # Description: Buffers Streams feature class 1000 m and allows script to be # overwritten # Import system modules import arcpy # Sets arcpy workspace to Data folder arcpy.env.workspace = "V:/ENV859_PS4/Data" # Allows an overwrite to the output in the Data file arcpy.env.overwriteOutput = True # Set local variables # Sets the input feature to be the streams shapefile path in_features = "streams.shp" # Sets the output path for the StrmBuff1km.shp out_feature_class = "V:/ENV859_PS4/scratch/StrmBuff1km.shp" # Creates variable for buffer distance buffDist = "1000 meters" # Executes the buffer command so they streams are buffered on both sides # with a round end and dissolved as a sinlge feature arcpy.Buffer_analysis(in_features, out_feature_class, buffDist, '', '', 'ALL')
def get_lutype_acreage(fc_project, fc_poly_parcels, lutype): arcpy.AddMessage("Estimating {} acres near project...".format(lutype)) fl_parcels = "fl_parcel" fl_project = "fl_project" for fc, fl in {fc_project: fl_project, fc_poly_parcels: fl_parcels}.items(): make_fl_conditional(fc, fl) # if arcpy.Exists(fl): # arcpy.Delete_management(fl) # arcpy.MakeFeatureLayer_management(fc, fl) # else: # arcpy.MakeFeatureLayer_management(fc, fl) # create temporary buffer buff_dist = 2640 # distance in feet fc_buff = r"memory\temp_buff_qmi" arcpy.Buffer_analysis(fl_project, fc_buff, buff_dist) fl_buff = "fl_buff" arcpy.MakeFeatureLayer_management(fc_buff, fl_buff) # calculate buffer area, inclusive of water bodies and rights of way buff_area_ft2 = 0 with arcpy.da.SearchCursor(fl_buff, ["SHAPE@AREA"]) as cur: for row in cur: buff_area_ft2 += row[0] buff_acre = buff_area_ft2 / p.ft2acre # convert from ft2 to acres. may need to adjust for projection-related issues. See PPA1 for more info # create intersect layer of buffer with parcels, to "slice" parcels so you only # capture parcel portions that are within buffer. We want to do this since we are # calculating the percent of total land within buffer that is a given land use. fc_intersect = r"memory\temp_intersect" arcpy.Intersect_analysis([fl_buff, fl_parcels], fc_intersect, "ALL", "", "INPUT") fl_intersect = "fl_intersect" make_fl_conditional(fc_intersect, fl_intersect) # get total acres within intersect polygons tot_net_pclarea_ft2 = 0 lutype_intersect_ft2 = 0 with arcpy.da.SearchCursor(fl_intersect, ["SHAPE@AREA", p.col_lutype_base]) as cur: for row in cur: area = row[0] lutype_val = row[1] if lutype_val == lutype: lutype_intersect_ft2 += area tot_net_pclarea_ft2 += area # convert to acres lutype_intersect_acres = lutype_intersect_ft2 / p.ft2acre tot_net_pcl_acres = tot_net_pclarea_ft2 / p.ft2acre # pct_lutype_infullbuff = lutype_intersect_acres / buff_acre if buff_acre > 0 else 0 net_pct_lutype = lutype_intersect_acres / tot_net_pcl_acres if tot_net_pcl_acres > 0 else 0 [arcpy.Delete_management(item) for item in [fl_parcels, fl_project, fc_buff, fl_buff, fc_intersect, fl_intersect]] return {'total_buff_acres': buff_acre, 'net_onpcl_buff_acres': tot_net_pcl_acres, '{}_acres'.format(lutype): lutype_intersect_acres, 'pct_netPclAcs_{}_inBuff'.format(lutype): net_pct_lutype}
#Script to create a shapefile of bikeable businesses in SF import os import arcpy #Set environmental workspace arcpy.env.workspace = r"C:\Users\ian.conroy\Desktop\Bay Geo Classes\Python Class\GIS_Data\SF_SHPs" #Set location for output of geoprocessing tools output_folder = r"C:\Users\ian.conroy\Desktop\Bay Geo Classes\Python Class\Output_Folder" #Create buffer of bike network print('Creating buffer') #Because we set our environmental workspace to the SF_SHPs folder, we don't have to specifiy the full file path to the input we used SF_Bike_Network.shp, arcpy is adding the env workspace in front for us #our output folder is not our current workspace, so we need to use the full file path for the output shapefile arcpy.Buffer_analysis("SF_Bike_Network.shp", os.path.join(output_folder, 'SF_Bike_Network.shp'), "25 Feet", "FULL", "ROUND") #Create shapefile of businesses along bike routes print('Clipping businesses by bike route buffer') arcpy.Clip_analysis("SF_Businesses.shp", os.path.join(output_folder, 'SF_Bike_Network.shp'), os.path.join(output_folder, 'Bikeable_Businesses.shp'))
import sys # for printing . :D arcpy.env.overwriteOutput = True level_count_dict = {} buffer_increment = [20,40,60,80,100,120,140,160,180,200] base_network = "input/railway_ln_connected.shp" comparing_network = "input/NARN_LINE_03162018.shp" base_network_f = "bnf" comparing_network_f = "cnf" memory_m1 = "C:/GIS/memorym1.shp" memory_m2 = "C:/GIS/memorym2.shp" arcpy.MakeFeatureLayer_management(base_network, base_network_f) arcpy.MakeFeatureLayer_management(comparing_network, comparing_network_f) for buffer in buffer_increment: arcpy.SelectLayerByAttribute_management(comparing_network_f, "NEW_SELECTION") #, '"FID" = 100000000000'), "INVERT") arcpy.Buffer_analysis(comparing_network_f, memory_m1, str(buffer) + " Feet") arcpy.Dissolve_management(memory_m1, memory_m2) arcpy.SelectLayerByLocation_management(base_network_f, "COMPLETELY_WITHIN", "in_memory/m2", "", "NEW_SELECTION") fids_list = [_row_[0] for _row_ in arcpy.da.SearchCursor(base_network_f, ['FID'])] level_count_dict[buffer] = len(fids_list)
print "Set up complete." print # Split Gas Lines by company print "Splitting Gas_Lines layer by company..." arcpy.Select_analysis(Gas_Lines, AP_Lines, '"company" = \'AP\'') print "-Created " + AP_Lines + ", number of features:" print arcpy.GetCount_management(AP_Lines) arcpy.Select_analysis(Gas_Lines, HNG_Lines, '"company" = \'HNG\'') print "-Created " + HNG_Lines + ", number of features:" print arcpy.GetCount_management(HNG_Lines) # Apply appropriate buffers to each company layer print "Buffering " + AP_Lines + " by " + AP_Buffer_Width + "..." arcpy.Buffer_analysis(AP_Lines, AP_Lines_Buffer, AP_Buffer_Width, "FULL", "ROUND", "ALL") print "Buffering " + HNG_Lines + " by " + HNG_Buffer_Width + "..." arcpy.Buffer_analysis(HNG_Lines, HNG_Lines_Buffer, HNG_Buffer_Width, "FULL", "ROUND", "ALL") # Intersect each buffered company layer with the counties layer print "Intersecting " + AP_Lines_Buffer + " with " + Counties + "..." arcpy.Intersect_analysis([AP_Lines_Buffer, Counties], AP_Lines_Intersect) print "Intersecting " + HNG_Lines_Buffer + " with " + Counties + "..." arcpy.Intersect_analysis([HNG_Lines_Buffer, Counties], HNG_Lines_Intersect) print # Select each feature out of the intersected layers to create the final batch of layers working_folders = []
garage_layer_name, '', '') # specify input layer as new garages points, convert those points/feature class to the newly created geodatabase, specify path for new garages layer feature input_layer = garages arcpy.FeatureClassToGeodatabase_conversion(garages, gdb_path) garages_layer = gdb_path + '\\' + garage_layer_name # open campusgdb and copy building feature to my gdb campus = 'C:\\Users\\Ljhammond1996\\DevSource\\Hammond_GEOG392\\Lab\\Week04\\04\\Campus.gdb' buildings_campus = campus + '\Structures' buildings = gdb_path + '\\' + 'Buildings' arcpy.Copy_management(buildings_campus, buildings) #re-project the layers spatial_ref = arcpy.Describe(buildings).spatialReference arcpy.Project_management(garages_layer, gdb_path + '\Garage_Points_reprojected', spatial_ref) # create garage buffer layer garageBuffer = arcpy.Buffer_analysis(gdb_path + '\Garage_Points_reprojected', gdb_path + '\Garage_points_buffer', 150) # create garage buffer and buildings intersect arcpy.Intersect_analysis([garageBuffer, buildings], gdb_path + '\Garage_building_intersect', 'ALL') # convert to table arcpy.TableToTable_conversion( gdb_path + '\Garage_building_intersect', 'C:\\Users\\Ljhammond1996\\DevSource\\Hammond_GEOG392\\Lab\Week04', 'Garages_Near_Buildings_Intersect.csv')
# Local variables: Parcel = "E:\\GIS_450\\Database\\Corvallis.gdb\\Parcel" Schools = "E:\\GIS_450\\Database\\Corvallis.gdb\\Schools" Parks = "E:\\GIS_450\\Database\\Corvallis.gdb\\Parks" Railroad = "E:\\GIS_450\\Database\\Corvallis.gdb\\Railroad" Residential_Parcels = "C:\\Users\\Rob\\Documents\\ArcGIS\\Default.gdb\\Parcel_Select" schoolBuffer = "C:\\Users\\Rob\\Documents\\ArcGIS\\Default.gdb\\Schools_Buffer" parksBuffer = "C:\\Users\\Rob\\Documents\\ArcGIS\\Default.gdb\\Parks_Buffer" railroadBuffer = "C:\\Users\\Rob\\Documents\\ArcGIS\\Default.gdb\\Railroad_Buffer" schoolResClip = "C:\\Users\\Rob\\Documents\\ArcGIS\\Default.gdb\\Schools_Buffer_Clip1" schoolParksRes = "C:\\Users\\Rob\\Documents\\ArcGIS\\Default.gdb\\Parks_Buffer_Clip" residentialParcels = "C:\\Users\\Rob\\Documents\\ArcGIS\\Default.gdb\\Railroad_Buffer_Erase" # Process: Buffer (2) arcpy.Buffer_analysis(Parks, parksBuffer, "0.5 Miles", "FULL", "ROUND", "NONE", "") # Process: Buffer arcpy.Buffer_analysis(Schools, schoolBuffer, "0.5 Miles", "FULL", "ROUND", "NONE", "") # Process: Select arcpy.Select_analysis(Parcel, Residential_Parcels, "\"BENTON_ZON\" LIKE 'R%'") # Process: Clip arcpy.Clip_analysis(schoolBuffer, Residential_Parcels, schoolResClip, "") # Process: Clip (2) arcpy.Clip_analysis(parksBuffer, schoolResClip, schoolParksRes, "") # Process: Buffer (3)
import arcpy as arc #Set geoprocessing environment arc.env.workspace = "C:\\EsriTraining\\PythonGP10_0\\Data\\SanJuan.gdb" #boolean data type can also be 1 for true and 0 for false arc.env.overwriteOutput = True #Create list of feature classes in SanJuan fcList = arc.ListFeatureClasses() #Create a loop to buffer lakes and streams bufferList = [] for fc in fcList: if fc == "Lakes" or fc == "Streams": arc.Buffer_analysis(fc, fc + "Buffer", "1000 meters", dissolve_option="ALL") bufferList.append(fc + "Buffer") arc.Union_analysis(bufferList, "WaterBuffers") waterBuffers = "WaterBuffers" arc.Buffer_analysis(waterBuffers, "WaterBuffer", "1000 meters", dissolve_option="ALL") print("Union and dissolve Finished") #after the union buffer it as well
#Creates 200mi buffer around CA nuc reactors import arcpy arcpy.env.workspace = "C:/CapStnPrj/CapTest/CapTest.mdb" arcpy.Buffer_analysis("CAnr", "Npp200miBufPy", "200 miles", "", "", "all")
# 1. Calculate annual mean precipitation per State-HUC8 from 1970 - 2018 # Reproject State_HUC8 feature class to GCS NAD83 and PCS UTM12N output_CRS = arcpy.SpatialReference('NAD 1983 UTM Zone 12N') state_HUC8_NAD83_UTM12N = os.path.join(geodatabase, 'state_HUC8_NAD83_UTM12N') arcpy.Project_management(in_dataset=fc_huc8_original, out_dataset=state_HUC8_NAD83_UTM12N, out_coor_system=output_CRS) # Create a buffered version of the NAD83 UTM12N reprojected feature class state_HUC8_NAD83_UTM12N_buffered = os.path.join( geodatabase, 'state_HUC8_NAD83_UTM12N_buffered') arcpy.Buffer_analysis(in_features=state_HUC8_NAD83_UTM12N, out_feature_class=state_HUC8_NAD83_UTM12N_buffered, buffer_distance_or_field='20000 Meters', dissolve_option='ALL') # Create list of rasters by using a list comprehension to collect the yearly average raster while iterating through nested directories recursively list_prism_rasters = [ os.path.join(dirpath, f) for dirpath, dirnames, filenames in os.walk(path_prism_directory) for f in fnmatch.filter(filenames, 'PRISM_ppt_stable_4kmM?_????_bil.bil') ] list_prism_reprojected = [] for p in list_prism_rasters: year = re.split('[_.]', os.path.basename(p))[4] reprojected_raster = os.path.join(geodatabase,
def execute(self, parameters, messages): """The source code of the tool.""" # local variables and env arcpy.env.workspace = "E:/gina/poker/pip" adnr_lo_shp = "E:/gina/poker/shp/wip/land_ownership_data/adnr_gls_dls_merge_20170823_v1.shp" pfrr_popn_places = "E:/gina/poker/shp/wip/popn_places_data/pokerflat_popn_places_gcs_wgs84_to_akalbers_2.shp" pipTable = "E:/gina/poker/dbf/predicted_impact_xy.dbf" pip_point_shp = "E:/gina/poker/pip/pip_point.shp" pip_point_3338 = "E:/gina/poker/pip/pip_point_3338.shp" pip_buffer_shp = "E:/gina/poker/pip/pip_buffer.shp" pip_lo_in_buffer_shp = "E:/gina/poker/pip/pip_lo_in_buffer.shp" pip_lo_in_buf_sum_dbf = "E:/gina/poker/pip/pip_lo_in_buf_sum.dbf" pip_lo_in_buf_sum_csv = "E:/gina/poker/pip/pip_lo_in_buf_sum.csv" pip_popn_places_in_buffer_shp = "E:/gina/poker/pip/pip_popn_places_in_buffer.shp" x = parameters[0].valueAsText y = parameters[1].valueAsText r = parameters[2].valueAsText + " NauticalMiles" pipLayer = "pipLayer" srs = arcpy.SpatialReference("Alaska Albers Equal Area Conic") intersect_fc1 = [adnr_lo_shp, pip_buffer_shp] intersect_fc2 = [pfrr_popn_places, pip_buffer_shp] mxd = arcpy.mapping.MapDocument("current") dataframe = arcpy.mapping.ListDataFrames(mxd)[0] sourceLoSymbologyLayer = arcpy.mapping.Layer( "E:/gina/poker/lyr/lo2.lyr") sourcePipSymbologyLayer = arcpy.mapping.Layer( "E:/gina/poker/lyr/pip.lyr") # Process: Calculate Lon Field arcpy.CalculateField_management(pipTable, "Lon", x, "PYTHON", "") # Process: Calculate Lat Field arcpy.CalculateField_management(pipTable, "Lat", y, "PYTHON", "") # Process: Make XY Event Layer arcpy.MakeXYEventLayer_management( pipTable, "Lon", "Lat", pipLayer, "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision", "") # Process: Copy Features arcpy.CopyFeatures_management(pipLayer, pip_point_shp, "", "0", "0", "0") # Process: Project pip point arcpy.Project_management(pip_point_shp, pip_point_3338, srs) # Process: Buffer pip point arcpy.Buffer_analysis(pip_point_3338, pip_buffer_shp, r, "FULL", "ROUND", "NONE", "", "PLANAR") # Process: Intersect pip buffer with land ownership arcpy.Intersect_analysis(intersect_fc1, pip_lo_in_buffer_shp, "ALL", "", "INPUT") # Process: Intersect pip buffer with popn places arcpy.Intersect_analysis(intersect_fc2, pip_popn_places_in_buffer_shp, "ALL", "", "INPUT") # Process: Make feature layers and add to the map arcpy.MakeFeatureLayer_management(pip_point_3338, "Predicted Impact Point") arcpy.MakeFeatureLayer_management( pip_lo_in_buffer_shp, "Land Onwership within 3sigma of Predicted Impact Point") arcpy.MakeFeatureLayer_management( pip_popn_places_in_buffer_shp, "Populated Places within 3sigma of Predicted Impact Point") addPipPointLayer = arcpy.mapping.Layer("Predicted Impact Point") arcpy.mapping.AddLayer(dataframe, addPipPointLayer) add3sigmaLoLayer = arcpy.mapping.Layer( "Land Onwership within 3sigma of Predicted Impact Point") arcpy.mapping.AddLayer(dataframe, add3sigmaLoLayer) addPipPopnPlacesLayer = arcpy.mapping.Layer( "Populated Places within 3sigma of Predicted Impact Point") arcpy.mapping.AddLayer(dataframe, addPipPopnPlacesLayer) # Add and calc Acres field for intersected Land Ownership arcpy.AddField_management(pip_lo_in_buffer_shp, "Acres", "DOUBLE") arcpy.CalculateField_management(pip_lo_in_buffer_shp, "Acres", "!shape.area@acres!", "PYTHON_9.3", "") # Summarize intersected Land Ownership by Owner and total Acres arcpy.Statistics_analysis(pip_lo_in_buffer_shp, pip_lo_in_buf_sum_dbf, "Acres SUM", "OWNER") # arcpy.MakeTableView_management(pip_lo_in_buf_sum_dbf) add3sigmaLoSumTbl = arcpy.mapping.TableView(pip_lo_in_buf_sum_dbf) arcpy.mapping.AddTableView(dataframe, add3sigmaLoSumTbl) # Symbolize and Refresh layers = arcpy.mapping.ListLayers(mxd) arcpy.mapping.UpdateLayer(dataframe, layers[2], sourceLoSymbologyLayer, True) layers[2].symbology.addAllValues() arcpy.mapping.UpdateLayer(dataframe, layers[1], sourcePipSymbologyLayer, True) arcpy.RefreshTOC() arcpy.RefreshActiveView() return
# Create an output pathname string that will be used in the buffer, to include '_buff' # determine index length to see if last char is '\' outputGBLength = len(outputGB) #error handling in case user does not put a backslash at the end of their path if (outputGB[outputGBLength - 1] != '\\'): bufferOutput = outputGB + "\\" + os.path.basename( featureClass) + "_buff" else: bufferOutput = outputGB + os.path.basename(featureClass) + "_buff" # print("The bufferOutput variable is set to " + bufferOutput) # Buffer featureclass using Buffer field arcpy.Buffer_analysis(featureClass, bufferOutput, integerField) # Create an output pathname string that will be clipped # get length of clip output clipOutputLength = len(featureClassClip) # print("First try: Clip Output Length = " + str(clipOutputLength)) # clipOutput = os.path.basename(featureClassClip + "_buff") if (outputGB[outputGBLength - 1] != '\\'): clipOutput = outputGB + "\\" + os.path.basename( featureClassClip) + "_clip" else: clipOutput = outputGB + os.path.basename(featureClassClip) + "_clip" # print("Second try: Clip output" + clipOutput)
def get_npmrds_data(): #=====================RUN SCRIPT=========================== if __name__ == '__main__': start_time = time.time() workspace = r'I:\Projects\Darren\PPA_V2_GIS\scratch.gdb' arcpy.env.workspace = workspace project_line = arcpy.GetParameterAsText(0) #"NPMRDS_confl_testseg_seconn" proj_name = arcpy.GetParameterAsText(1) #"TestProj" proj_type = arcpy.GetParameterAsText(2) #"Freeway" output_dir = arcpy.GetParameterAsText(3) #r'I:\Projects\Darren\PPA_V2_GIS\Temp\Script Test Outputs' #NPMRDS data parameters -- consider putting all of these into a separate "config" python script speed_data = r"I:\Projects\Darren\PPA_V2_GIS\scratch.gdb\npmrds_metrics_v6" ff_speed = "ff_speed" congest_speed = "havg_spd_worst4hrs" reliab_ampk = "lottr_ampk" fld_tmcdir = "direction_signd" fld_roadtype = "f_system" #indicates if road is freeway or not, so that data from freeways doesn't affect data on surface streets, and vice-versa roadtypes_fwy = (1,2) #road type values corresponding to freeways #might want to make dict to enable working with multiple direction formats (e.g., {"N":"NORTHBOUND", "S":"SOUTHBOUND"...} etc.) directions_tmc = ["NORTHBOUND", "SOUTHBOUND", "EASTBOUND", "WESTBOUND"] #can modify this depending on what directions you want to consider #------------------no more user input below here, at least normally--------------- flds_speed_data = [ff_speed, congest_speed, reliab_ampk] #'avspd_3p6p','congn_6a9a','congn_3p6p' #create temporar buffer layer, flat-tipped, around TMCs; will be used to split project lines temp_tmcbuff = "TEMP_tmcbuff_4projsplit" buff_dist_ft = 90 #buffer distance, in feet, around the TMCs #select TMCs that intersect project lines fl_project = "fl_project" #fld_proj_len = "proj_len" fld_proj_name = "proj_name" fl_speed_data = "fl_speed_data" fl_tmc_buff = "fl_tmc_buff" #add fields for project length and name arcpy.AddField_management(project_line,fld_proj_name,"TEXT") #make feature layers of NPMRDS and project line arcpy.MakeFeatureLayer_management(project_line, fl_project) arcpy.MakeFeatureLayer_management(speed_data, fl_speed_data) #populate the length and name fields calc_add_len = "!shape.length@feet!" calc_set_proj_name = "'{}'".format(proj_name) arcpy.CalculateField_management(project_line, fld_proj_name, calc_set_proj_name, "PYTHON") #make flat-ended buffers around TMCs that intersect project arcpy.SelectLayerByLocation_management(fl_speed_data, "WITHIN_A_DISTANCE", fl_project, 300, "NEW_SELECTION") if proj_type == 'Freeway': sql = "{} IN {}".format(fld_roadtype, roadtypes_fwy) arcpy.SelectLayerByAttribute_management(fl_speed_data, "SUBSET_SELECTION", sql) else: sql = "{} NOT IN {}".format(fld_roadtype, roadtypes_fwy) arcpy.SelectLayerByAttribute_management(fl_speed_data, "SUBSET_SELECTION", sql) arcpy.Buffer_analysis(fl_speed_data, temp_tmcbuff, buff_dist_ft, "FULL", "FLAT") arcpy.MakeFeatureLayer_management(temp_tmcbuff, fl_tmc_buff) out_rows = [] #in theory this should only be one project line, but want to keep flexible in case #we want batch version in future. conflate_tmc2projline(fl_project, fld_proj_name, proj_name, directions_tmc, fld_tmcdir, fl_tmc_buff, flds_speed_data, out_rows) df_projdata = pd.DataFrame(out_rows) out_df = simplify_outputs(df_projdata, 'proj_length_ft','ID') #out_df.iloc[0].to_dict() will write to dict in {field:value} format--might be good for ESRI CSV output_tstamp = str(dt.datetime.now().strftime('%m%d%Y_%H%M')) output_csv = os.path.join(output_dir,"projlin_conflation_{}.csv".format(output_tstamp)) arcpy.AddMessage("writing to {}...".format(output_csv)) out_df.to_csv(output_csv, index = False) #NEXT STEP = join dataframe to project feature class, then look at map and do reasonableness check #also, how to quickly mention and bypass projects that don't intersect TMCs? #also, define a logical column order for the DF before doing join elapsed_time = round((time.time() - start_time)/60,1) print("Success! Time elapsed: {} minutes".format(elapsed_time))
# Name: module1 # Purpose: # # Author: nkolarik # # Created: 07/02/2018 # Copyright: (c) nkolarik 2018 # Licence: <your licence> #------------------------------------------------------------------------------- import arcpy import os #rootdir = "enter directory here" cats = os.listdir(rootdir) for cat in cats: for file in os.listdir(rootdir + cat): if file.endswith("clip.shp"): arcpy.Buffer_analysis( rootdir + cat + "\\" + file, rootdir + cat + "\\" + cat + "_buff\\" + file.split(".")[0] + "_buff", "Radius") print(file + " Buffed out") def main(): pass if __name__ == '__main__': main()
# Local variables: Bus_Stops = r"C:\Projects\SanFrancisco.gdb\SanFrancisco\Bus_Stops" CensusBlocks2010 = r"C:\Projects\SanFrancisco.gdb\SanFrancisco\CensusBlocks2010" Inbound71 = r"C:\Projects\SanFrancisco.gdb\Chapter2Results\Inbound71" Inbound71_400ft_buffer = r"C:\Projects\SanFrancisco.gdb\Chapter2Results\Inbound71_400ft_buffer" Intersect71Census = r"C:\Projects\SanFrancisco.gdb\Chapter2Results\Intersect71Census" # Process: Select arcpy.Select_analysis(Bus_Stops, Inbound71, "NAME = '71 IB' AND BUS_SIGNAG = 'Ferry Plaza'") # Process: Buffer arcpy.Buffer_analysis(Inbound71, Inbound71_400ft_buffer, "400 Feet", "FULL", "ROUND", "NONE", "") # Process: Intersect arcpy.Intersect_analysis([Inbound71_400ft_buffer,CensusBlocks2010], Intersect71Census, "ALL", "", "INPUT") # Organize the census block population using a dictionary # The stop ID is the key and the census block populations are added to a list as the value dataDictionary = {} with arcpy.da.SearchCursor(Intersect71Census, ["STOPID","POP10"]) as cursor: for row in cursor: busStopID = row[0] pop10 = row[1] if busStopID not in dataDictionary.keys(): dataDictionary[busStopID] = [pop10]
# the whole string is spatialReference_new = str_bef + str_cent_med + str_parall + str_after # projection arcpy.Project_management(in_dataset="CENTER_TMP", out_dataset="CENTER_PROJ", out_coor_system=spatialReference_new, transform_method="", in_coor_system=spatialReference, preserve_shape="NO_PRESERVE_SHAPE", max_deviation="", vertical="NO_VERTICAL") # buffer creation arcpy.Buffer_analysis("CENTER_PROJ", out, bufferField, sideType, endType, dissolveType) # run feature to envelope tool arcpy.FeatureEnvelopeToPolygon_management("miniarea_TMP", "miniarea_square", "SINGLEPART") #reproject in normal (basic for SLDEM2013) #arcpy.Project_management(in_dataset="miniarea_square", out_dataset="miniarea_square2", out_coor_system=spatialReference_DTM_first, transform_method="", in_coor_system=spatialReference_new, preserve_shape="NO_PRESERVE_SHAPE", max_deviation="", vertical="NO_VERTICAL") # get the extent of miniarea_square (but this is in the new coordinates! So there is a problem in the next steps desc_extent = arcpy.Describe("miniarea_square") extent = desc_extent.extent top = extent.YMax bottom = extent.YMin left = extent.XMin
arcpy.AddXY_management(outPointFCPath) # Create point layer object outPointLayer = arcpy.mapping.Layer(outPointFCPath) # Add point layer object to top of mxd data frame TOC arcpy.mapping.AddLayer(df, outPointLayer, "TOP") # Assign layer symbology arcpy.ApplySymbologyFromLayer_management(outPointLayer, inPointLayer) # ---------------- # Set Buffer1 # ---------------- # Create buffer feature class arcpy.Buffer_analysis(outPointFCPath, outBuffer1FCPath, str(buffer1Distance) + " Meters") # Create buffer layer object outBuffer1Layer = arcpy.mapping.Layer(outBuffer1FCPath) # Insert buffer layer object into mxd data frame arcpy.mapping.InsertLayer(df, outPointLayer, outBuffer1Layer, "AFTER") # ---------------- # Set Buffer2 # ---------------- # Create buffer feature class arcpy.Buffer_analysis(outPointFCPath, outBuffer2FCPath, str(buffer2Distance) + " Meters") # Create buffer layer object
def execute(self, parameters, messages): """The source code of the tool.""" # local variables and env arcpy.CreateFileGDB_management("E:/gina/poker/gdb", parameters[0].valueAsText) arcpy.env.workspace = "E:/gina/poker/gdb/" + parameters[ 0].valueAsText + ".gdb" arcpy.env.overwriteOutput = True adnr_lo_shp = "E:/gina/poker/shp/wip/land_ownership_data/adnr_gls_dls_merge_20170823_v1.shp" pfrr_popn_places = "E:/gina/poker/shp/wip/popn_places_data/pokerflat_popn_places_gcs_wgs84_to_akalbers_2.shp" afs_known_sites = "E:/gina/poker/shp/asf_data/asf_known_sites_20180629_3338.shp" pipTable = "E:/gina/poker/dbf/predicted_impact_xy.dbf" pip_point_shp = "E:/gina/poker/pip/pip_point.shp" pip_point_3338 = "E:/gina/poker/pip/pip_point_3338.shp" pip_buffer_shp = "E:/gina/poker/pip/pip_buffer.shp" pip_range_rings_shp = "E:/gina/poker/pip/pip_range_rings.shp" pip_lo_in_buffer_shp = "E:/gina/poker/pip/pip_lo_in_buffer.shp" pip_lo_in_buf_sum_dbf = "E:/gina/poker/pip/pip_lo_in_buf_sum.dbf" pip_lo_in_buf_sum_csv = "E:/gina/poker/pip/pip_lo_in_buf_sum.csv" pip_popn_places_in_buffer_shp = "E:/gina/poker/pip/pip_popn_places_in_buffer.shp" pip_known_sites_in_buffer_shp = "E:/gina/poker/pip/pip_known_sites_in_buffer.shp" x = parameters[1].valueAsText y = parameters[2].valueAsText r = parameters[3].valueAsText + " NauticalMiles" rr1 = (float(parameters[3].valueAsText)) / 3 rr2 = (rr1 * 2) rrs = str(rr1) + ";" + str(rr2) + ";" + r.split(" ")[0] pipLayer = "pipLayer1" srs = arcpy.SpatialReference("Alaska Albers Equal Area Conic") intersect_fc1 = [adnr_lo_shp, pip_buffer_shp] intersect_fc2 = [pfrr_popn_places, pip_buffer_shp] intersect_fc3 = [afs_known_sites, pip_buffer_shp] mxd = arcpy.mapping.MapDocument("current") dataframe = arcpy.mapping.ListDataFrames(mxd)[0] sourceLoSymbologyLayer = arcpy.mapping.Layer( "E:/gina/poker/lyr/lo2.lyr") sourcePipSymbologyLayer = arcpy.mapping.Layer( "E:/gina/poker/lyr/pip2.lyr") sourceRrsSymbologyLayer = arcpy.mapping.Layer( "E:/gina/poker/lyr/rrs.lyr") sourcePopSymbologyLayer = arcpy.mapping.Layer( "E:/gina/poker/lyr/pop.lyr") sourceAfsSymbologyLayer = arcpy.mapping.Layer( "E:/gina/poker/lyr/afs2.lyr") # Process: Calculate Lon Field arcpy.CalculateField_management(pipTable, "Lon", x, "PYTHON", "") # Process: Calculate Lat Field arcpy.CalculateField_management(pipTable, "Lat", y, "PYTHON", "") # Process: Make XY Event Layer arcpy.MakeXYEventLayer_management( pipTable, "Lon", "Lat", pipLayer, "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision", "") # Process: Copy Features arcpy.CopyFeatures_management(pipLayer, pip_point_shp, "", "0", "0", "0") # Process: Project pip point arcpy.Project_management(pip_point_shp, pip_point_3338, srs) # Process: Buffer pip point arcpy.Buffer_analysis(pip_point_3338, pip_buffer_shp, r, "FULL", "ROUND", "NONE", "", "PLANAR") # Process: Multiple Ring Buffer arcpy.MultipleRingBuffer_analysis(pip_point_3338, pip_range_rings_shp, rrs, "NauticalMiles", "", "NONE", "FULL") # Process: Intersect pip buffer with land ownership arcpy.Intersect_analysis(intersect_fc1, pip_lo_in_buffer_shp, "ALL", "", "INPUT") # Process: Intersect pip buffer with popn places arcpy.Intersect_analysis(intersect_fc2, pip_popn_places_in_buffer_shp, "ALL", "", "INPUT") # Process: Intersect pip buffer with afs known sites arcpy.Intersect_analysis(intersect_fc3, pip_known_sites_in_buffer_shp, "ALL", "", "INPUT") # Process: Make feature layers and add to the map ## pip feature class list fclist = arcpy.ListFeatureClasses() ## pip layer arcpy.MakeFeatureLayer_management(pip_point_3338, "Predicted Impact Point") ## land ownership layer arcpy.MakeFeatureLayer_management( pip_lo_in_buffer_shp, "Land Ownership within 3sigma of Predicted Impact Point") ## Range Rings arcpy.MakeFeatureLayer_management(pip_range_rings_shp, "Range Rings") ## populated places layer popn_places_records = int( arcpy.GetCount_management(pip_popn_places_in_buffer_shp).getOutput( 0)) if popn_places_records > 0: arcpy.MakeFeatureLayer_management( pip_popn_places_in_buffer_shp, "Populated Places within 3sigma of Predicted Impact Point") addPipPopnPlacesLayer = arcpy.mapping.Layer( "Populated Places within 3sigma of Predicted Impact Point") arcpy.mapping.AddLayer(dataframe, addPipPopnPlacesLayer) ## known sites layer known_sites_records = int( arcpy.GetCount_management(pip_known_sites_in_buffer_shp).getOutput( 0)) if known_sites_records > 0: arcpy.MakeFeatureLayer_management( pip_known_sites_in_buffer_shp, "AFS Known Sites within 3sigma of Predicted Impact Point") addPipKnownSitesLayer = arcpy.mapping.Layer( "AFS Known Sites within 3sigma of Predicted Impact Point") arcpy.mapping.AddLayer(dataframe, addPipKnownSitesLayer) addPipPointLayer = arcpy.mapping.Layer("Predicted Impact Point") arcpy.mapping.AddLayer(dataframe, addPipPointLayer) add3sigmaLoLayer = arcpy.mapping.Layer( "Land Ownership within 3sigma of Predicted Impact Point") arcpy.mapping.AddLayer(dataframe, add3sigmaLoLayer) addRangeRings = arcpy.mapping.Layer("Range Rings") arcpy.mapping.AddLayer(dataframe, addRangeRings) # Add and calc Acres field for intersected Land Ownership arcpy.AddField_management(pip_lo_in_buffer_shp, "Acres", "DOUBLE") arcpy.CalculateField_management(pip_lo_in_buffer_shp, "Acres", "!shape.area@acres!", "PYTHON_9.3", "") # Summarize intersected Land Ownership by Owner and total Acres arcpy.Statistics_analysis(pip_lo_in_buffer_shp, pip_lo_in_buf_sum_dbf, "Acres SUM", "OWNER") arcpy.MakeTableView_management(pip_lo_in_buf_sum_dbf) add3sigmaLoSumTbl = arcpy.mapping.TableView(pip_lo_in_buf_sum_dbf) arcpy.mapping.AddTableView(dataframe, add3sigmaLoSumTbl) # Symbolize and Refresh lo_layer = arcpy.mapping.ListLayers( mxd, "*Land Ownership within 3sigma of Predicted Impact Point*", dataframe)[0] arcpy.mapping.UpdateLayer(dataframe, lo_layer, sourceLoSymbologyLayer, True) lo_layer.symbology.addAllValues() pip_layer = arcpy.mapping.ListLayers(mxd, "*Predicted Impact Point*", dataframe)[0] arcpy.mapping.UpdateLayer(dataframe, pip_layer, sourcePipSymbologyLayer, True) rr_layer = arcpy.mapping.ListLayers(mxd, "*Range Rings*", dataframe)[0] arcpy.mapping.UpdateLayer(dataframe, rr_layer, sourceRrsSymbologyLayer, True) pop_layer = arcpy.mapping.ListLayers(mxd, "*Populated Places*", dataframe)[0] arcpy.mapping.UpdateLayer(dataframe, pop_layer, sourcePopSymbologyLayer, True) afs_layer = arcpy.mapping.ListLayers(mxd, "*Known Sites*", dataframe)[0] arcpy.mapping.UpdateLayer(dataframe, afs_layer, sourceAfsSymbologyLayer, True) arcpy.RefreshTOC() arcpy.RefreshActiveView() # Populate Mission GDB mission_layers = [ pip_point_3338, pip_lo_in_buffer_shp, pip_popn_places_in_buffer_shp, pip_range_rings_shp, pip_known_sites_in_buffer_shp ] arcpy.FeatureClassToGeodatabase_conversion(mission_layers, arcpy.env.workspace) return
from arcpy.sa import * env.overwriteOutput = True arcpy.CheckOutExtension("Spatial") env.extent = "MAXOF" arcpy.env.workspace = "E:\gra_stu\paper2\data.gdb" # make sure it is not too long DEM = "DEM_modeled_drainage_network" #the coordinate must be the same RS_vector = "RS_mapped_river_network" # RS vector river RS_Raster = "RS_water_mask" # RS raster mask cellsize = 10 delete_length = 300 # area less than 300 m2 will be deleted ##delete error buffer_DEM_clip = "buffer50_" + DEM arcpy.Buffer_analysis(DEM, buffer_DEM_clip, "50 Meters", "FULL", "ROUND", "ALL") RS = "clip_" + RS_vector arcpy.Intersect_analysis([RS_vector, buffer_DEM_clip], RS, "ALL", "0", "LINE") #(RS_vector,buffer_DEM_clip,RS) print "finish delete error" ##snap DEM to RS arcpy.Snap_edit(DEM, [[RS, "EDGE", "50 Meters"]]) print "finish snap" ##Get RS and DEM raster buffer_DEM = "snaped_buffer50_" + DEM arcpy.Buffer_analysis(DEM, buffer_DEM, "50 Meters", "FULL", "ROUND", "ALL") arcpy.AddField_management(buffer_DEM, "value", "LONG", "", "") arcpy.CalculateField_management(buffer_DEM, "value", "2", "PYTHON_9.3")
#------------------------------------------------------------------------------- # this recipe shows how # to obtain the output of a tool and use it as input to another tool. # script runs inside ArcMap # import module import arcpy # save workspace arcpy.env.workspace = "c:/ArcpyBook/data/TravisCounty" # Start a try statement and add variables for the streams, buffered streams layer, # distance, and schools try: # buffer areas of impact around major roads streams = "Streams.shp" streamsBuffer = "StreamsBuffer.shp" distance = "2640 Feet" schools2mile = "Schools.shp" schoolsLyrFile = 'Schools2Mile_lyr' # execute buffer tool arcpy.Buffer_analysis(streams, streamsBuffer,distance,'FULL','ROUND','ALL') # create a temporary layer for the schools arcpy.MakeFeatureLayer_management(schools2mile, schoolsLyrFile) # select all school within a half mile of a stream arcpy.SelectLayerByLocation_management(schoolsLyrFile, 'intersect', streamsBuffer) #add except block to catch errors except Exception as e: print(e.message)
AddMsgAndPrint("\nBuffer size applied on Culverts: " + bufferSize) else: bufferSize = str(demCellSize) + " Unknown" AddMsgAndPrint( "\nBuffer size applied on Culverts: Equivalent of 1 pixel since linear units are unknown", 0) # Buffer the culverts to 1 pixel culvertBuffered = arcpy.CreateScratchName( "culvertBuffered", data_type="FeatureClass", workspace="in_memory") arcpy.Buffer_analysis(culverts, culvertBuffered, bufferSize, "FULL", "ROUND", "NONE", "") # Dummy field just to execute Zonal stats on each feature expression = "!" + arcpy.da.Describe( culvertBuffered)['OIDFieldName'] + "!" arcpy.AddField_management(culvertBuffered, "ZONE", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED") arcpy.CalculateField_management(culvertBuffered, "ZONE", expression, "PYTHON3") # Get the minimum elevation value for each culvert culvertRaster = watershedGDB_path + os.sep + "culvertRaster" culvertMinValue = ZonalStatistics(culvertBuffered, "ZONE", DEM_aoi, "MINIMUM",
import arcpy from arcpy import env env.workspace = "C:/GEO6533/Python/Data/Exercise07" fc = "airports.shp" unique_name = arcpy.CreateUniqueName("Results/buffer.shp") arcpy.Buffer_analysis(fc, unique_name, "5000 METERS")
##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~## #--- label clustering setup, used in sewermapbook mxd --- #set mxd mxd = arcpy.mapping.MapDocument( r'G:\Field_Assets\Wastewater\MapBookPrinting\MXD\Sewer_MapBooks.mxd') lyrs = [ 'CleanOuts', 'Fittings', 'GreaseTraps', 'LiftStation', 'Manholes', 'MeterStation', 'NetworkStructures', 'PumpStation' ] #create 25ft buffer fcs for lyr in arcpy.mapping.ListLayers(mxd): if lyr.name in lyrs: fc_name = lyr.name + '_buff' arcpy.Buffer_analysis(lyr, fc_name, "25 feet", "", "", "ALL", "", "PLANAR") #merge all buffer fcs if arcpy.Exists('AllFeatures_Merge'): arcpy.Delete_management('AllFeatures_Merge') if arcpy.Exists('AllFeatures_Dissolve'): arcpy.Delete_management('AllFeatures_Dissolve') if arcpy.Exists('AllFeatures_Dissolve_Multi'): arcpy.Delete_management('AllFeatures_Dissolve_Multi') arcpy.Merge_management( [fc for fc in arcpy.ListFeatureClasses(feature_type='Polygon')], 'AllFeatures_Merge') #dissolve merge fc from previous step arcpy.Dissolve_management('AllFeatures_Merge', 'AllFeatures_Dissolve', "", "",
def makeBuffer(inTable,pointType,distance,route): return arcpy.Buffer_analysis(inTable, pointType+"buffer"+route, distance)
def main(): """ Create Selection """ while 'Curb' in display_List: env.overwriteOutput = True arcpy.AddMessage("Processing Curb data") GID_bool = False display_List.remove("Curb") SDE_FC = SDE_Base + '\\' + sdeName + "CurbGutter" SDE_memory = r"\in_memory" SDE_FC_test = arcpy.MakeFeatureLayer_management( SDE_FC, SDE_memory, sql) print "Created Feature Layer..." print "Now loading identical function..." # Establish target_Display variable to hit the Final Displays for curbs (Polyline) target_Display = target + '\\' + targetName + 'CurbMeasure_DisplayFinal' identical(SDE_FC_test, target_Display, sql, GID_bool) print display_List print "Return..." while 'Sidewalk' in display_List: arcpy.AddMessage("Processing Sidewalk data") display_List.remove("Sidewalk") # Create list for feature class buffer analysis and Create Merge Feature Class fc_BuffList = [] """Update to allow Omit = Yes - 2/3/14""" sql_N = "\"strCCEProjectNumber\" = '" + cceNo + "'" sql_Y = "\"strCCEProjectNumber\" = '" + cceNo + "' AND \"OmitYN\" = 'Yes'" # Establish a list of Polyline Feature Classes for Buffer env.workspace = SDE_Base fc_List = arcpy.ListFeatureClasses("*", "POLYLINE") print fc_List for fc in fc_List: print fc if fc == sdeName + 'Sidewalk': GID_bool = True fcTemp = scratch + '\\' + 'Sidewalk' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "sidewalkTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) fc_BuffList.append(fcMem) if fc == sdeName + 'SidewalkOmit': GID_bool = True fcTemp = scratch + '\\' + 'SidewalkOmit' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "sidewalkOmitTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) arcpy.AddField_management(fcMem, "MeasureType", "TEXT", "", "", 50, "Measure Type", "NULLABLE") cursor = arcpy.UpdateCursor(fcMem, "", "", "MeasureType") for row in cursor: row.setValue("MeasureType", "SidewalkOmit") cursor.updateRow(row) #print "SidewalkOmit row updated..." del cursor fc_BuffList.append(fcMem) if fc == sdeName + 'SidewalkRamp': GID_bool = True fcTemp = scratch + '\\' + 'SidewalkRamp' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "sidewalkRampTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) fc_BuffList.append(fcMem) if fc == sdeName + 'SidewalkRampOmit': GID_bool = True fcTemp = scratch + '\\' + 'SidewalkRampOmit' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "sidewalkRampOmitTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) arcpy.AddField_management(fcMem, "MeasureType", "TEXT", "", "", 50, "Measure Type", "NULLABLE") cursor = arcpy.UpdateCursor(fcMem, "", "", "MeasureType") for row in cursor: row.setValue("MeasureType", "SidewalkRampOmit") cursor.updateRow(row) #print "SidewalkRampOmit row updated..." del cursor fc_BuffList.append(fcMem) if fc == sdeName + 'SidewalkRampRemove': GID_bool = True fcTemp = scratch + '\\' + 'SidewalkRampRemove' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "sidewalkRampRemoveTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) arcpy.AddField_management(fcMem, "MeasureType", "TEXT", "", "", 50, "Measure Type", "NULLABLE") cursor = arcpy.UpdateCursor(fcMem, "", "", "MeasureType") for row in cursor: row.setValue("MeasureType", "SidewalkRampRemove") cursor.updateRow(row) #print "SidewalkRampRemove row updated..." del cursor fc_BuffList.append(fcMem) if fc == sdeName + 'AlleyDriveway': GID_bool = True fcTemp = scratch + '\\' + 'AlleyDriveway' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "alleyDrivewayTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) fc_BuffList.append(fcMem) if fc == sdeName + 'AlleyPavement': GID_bool = True fcTemp = scratch + '\\' + 'AlleyPavement' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "alleyPavementTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) fc_BuffList.append(fcMem) if fc == sdeName + 'ConcreteBusPad': GID_bool = True fcTemp = scratch + '\\' + 'ConcreteBusPad' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "concreteBusPadTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) arcpy.AddField_management(fcMem, "MeasureType", "TEXT", "", "", 50, "Measure Type", "NULLABLE") cursor = arcpy.UpdateCursor(fcMem, "", "", "MeasureType") for row in cursor: row.setValue("MeasureType", "ConcreteBusPad") cursor.updateRow(row) #print "ConcreteBusPad row updated..." del cursor fc_BuffList.append(fcMem) if fc == sdeName + 'ConcretePavement': GID_bool = True fcTemp = scratch + '\\' + 'ConcretePavement' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "concretePavementTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) arcpy.AddField_management(fcMem, "MeasureType", "TEXT", "", "", 50, "Measure Type", "NULLABLE") cursor = arcpy.UpdateCursor(fcMem, "", "", "MeasureType") for row in cursor: row.setValue("MeasureType", "ConcretePavement") cursor.updateRow(row) #print "ConcretePavement row updated..." del cursor fc_BuffList.append(fcMem) if fc == sdeName + 'DrivewayRemove': GID_bool = True fcTemp = scratch + '\\' + 'DrivewayRemove' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "drivewayRemoveTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) arcpy.AddField_management(fcMem, "MeasureType", "TEXT", "", "", 50, "Measure Type", "NULLABLE") cursor = arcpy.UpdateCursor(fcMem, "", "", "MeasureType") for row in cursor: row.setValue("MeasureType", "DrivewayRemove") cursor.updateRow(row) #print "DrivewayRemove row updated..." del cursor fc_BuffList.append(fcMem) print fc_BuffList # Merge tool is run on fc_BuffList to create one Feature Class to apply a buff distance field using an update cursor. # Buffer tool is then run to create polygon feature class FC_merge = arcpy.Merge_management(fc_BuffList, scratch + '\\' + "MERGE_TEST") print "Features have been merged..." arcpy.AddField_management(FC_merge, "buffdistance", "LONG", "", "", 9, "BuffDistance", "NULLABLE") print "Buffer Distance field has been added..." cursor = arcpy.UpdateCursor(FC_merge) for row in cursor: z = row.getValue("MeasureWidth") w = z / 2 row.setValue("buffdistance", w) print "Buffer of '" + str(w) + "' has been updated to row..." cursor.updateRow(row) del cursor # Buffer Analysis SDE_FC_test = arcpy.Buffer_analysis(FC_merge, scratch + '\\' + "BUFF_TEST", "buffdistance", "FULL", "FLAT", "NONE") # Establish target_Display variable to hit the Final Displays for sidewalks (Polygon) target_Display = target + '\\' + targetName + 'SidewalkDriveway_DisplayFinal' # Run Functions identical(SDE_FC_test, target_Display, sql, GID_bool) del fc_List print display_List print "Return..." while 'Street' in display_List: arcpy.AddMessage("Processing Street data") GID_bool = False display_List.remove("Street") streetList = [] """Update to allow Omit = Yes - 2/3/14""" sql_N = "\"strCCEProjectNumber\" = '" + cceNo + "'" env.workspace = SDE_Base fc_List = arcpy.ListFeatureClasses("*", "POLYLINE") print fc_List for fc in fc_List: print fc if fc == sdeName + 'StreetPavement': GID_bool = True fcTemp = scratch + '\\' + 'StreetPavement' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "streetPavementTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) arcpy.AddField_management(fcMem, "MeasureType", "TEXT", "", "", 50, "Measure Type", "NULLABLE") cursor = arcpy.UpdateCursor(fcMem, "", "", "MeasureType") for row in cursor: row.setValue("MeasureType", "StreetPavement") cursor.updateRow(row) del cursor streetList.append(fcMem) if fc == sdeName + 'AdditionalPavement': GID_bool = True fcTemp = scratch + '\\' + 'AdditionalPavement' fcMem = arcpy.CopyFeatures_management( fcTemp, scratch + '\\' + "additionalPavementTEST") GlobalID_Function(fc, fcMem, GID_bool, sql_N) arcpy.AddField_management(fcMem, "MeasureType", "TEXT", "", "", 50, "Measure Type", "NULLABLE") cursor = arcpy.UpdateCursor(fcMem, "", "", "MeasureType") for row in cursor: row.setValue("MeasureType", "AdditionalPavement") cursor.updateRow(row) #print "AdditionalPavement row updated..." del cursor streetList.append(fcMem) print "Created Merged Feature Class..." SDE_FC_test = arcpy.Merge_management(streetList, scratch + '\\' + "MERGE_TEST") print "Features have been merged..." print "Now loading identical function..." # Establish target_Display variable to hit the Final Displays for Street target_Display = target + '\\' + targetName + 'StreetMeasurements_DisplayFinal' identical(SDE_FC_test, target_Display, sql, GID_bool) print display_List print "Return..." print "...Completed"
def idRoutes(year, root_dir, routes, final_gdb_loc): gdb = f"IdentifiedRoutes{year}.gdb" ap.env.workspace = os.path.join(root_dir, gdb) # -----> Change ap.ClearWorkspaceCache_management() working_gdb = ap.env.workspace working_file = "IdentifiedRoutes_working" # Get input demographic feature classes from previous function outputs # minority_gdb = os.path.join(root_dir, f"Minority{year}.gdb") # -----> Change Year # poverty_gdb = os.path.join(root_dir, f"Poverty{year}.gdb") # -----> Change Year # lep_gdb = os.path.join(root_dir, f"LEP{year}.gdb") minority_file = os.path.join(final_gdb_loc, f"Minority{year}_Final") # minority_file = os.path.join(minority_gdb, f"Minority{year}_Final") poverty_file = os.path.join(final_gdb_loc, f"Poverty{year}_Final") # poverty_file = os.path.join(poverty_gdb, f"Poverty{year}_Final") lep_file = os.path.join(final_gdb_loc, f"LEP{year}_Final") medhhinc_file = os.path.join(final_gdb_loc, f"MedHHInc{year}_Final") # lep_file = os.path.join(lep_gdb, f"LEP{year}_Final") # Working feature classes minority_working_file = f"Minority{year}_BG" poverty_working_file = f"Poverty{year}_BG" lep_working_file = f"LEP{year}_BG" medhhinc_working_file = f"MedHHInc{year}_BG" routes_file = f"IdentifiedRoutes{year}" routes_working = os.path.join(working_gdb, routes_file) # define inputs for the for loop - one set for each demographic category working_list = [ { "org_file": minority_file, # input feature class "working_file": minority_working_file, # working feature class for calcs "identified_field": "RegMinBG", # field containing the threshold value for the region "add_fields": [['MinorityLength', 'double'], ['PMinority', 'double'], ['MinorityRoute', 'SHORT']] }, # route fields to be added { "org_file": poverty_file, "working_file": poverty_working_file, "identified_field": "RegPovBG", "add_fields": [['PovertyLength', 'double'], ['PPoverty', 'double'], ['PovertyRoute', 'SHORT']] }, { "org_file": medhhinc_file, "working_file": medhhinc_working_file, "identified_field": "RegBelMedInc", "add_fields": [['MedHHIncLength', 'double'], ['PMedHHInc', 'double'], ['MedHHIncRoute', 'SHORT']] }, { "org_file": lep_file, "working_file": lep_working_file, "identified_field": "RegAbvLEP", "add_fields": [['LEPLength', 'double'], ['PLEP', 'double'], ['LEPRoute', 'SHORT']] } ] # ! is this a helper function now if os.path.exists(working_gdb) and os.path.isdir(working_gdb): shutil.rmtree(working_gdb) print(f"{gdb} DELETED!!!") # CREATE WORKING GDB ap.CreateFileGDB_management(root_dir, gdb) print("GEODATABASE CREATED!!!") # CREATE WORKING MINORITY, POVERTY AND ROUTES FEATURE CLASSES ap.FeatureClassToFeatureClass_conversion(routes, working_gdb, routes_file) print("FEATURE CLASS CREATED!!!") ap.AddFields_management(routes_working, [['FullLength', 'double']]) print('INTIIAL FIELDS ADDED TO ROUTES_WORKING FILE!!!') ap.CalculateFields_management(routes_working, 'PYTHON3', [['FullLength', '!shape.length@miles!']]) print('CALCULATE FULL LENGTH OF ROUTES!!!') # loop through each demographic category, first collecting inputs from the working list, # then for item in working_list: # WORKING LIST ITEM DEFINITIONS org_file = item["org_file"] working_file = item["working_file"] identified_field = item["identified_field"] add_fields = item["add_fields"] routes_analysis = "routes_" + str(working_file) length_field = add_fields[0][0] percent_field = add_fields[1][0] id_field = add_fields[2][0] print("") print("--------------------------------") print("********************************") print("START OF " + working_file) print("********************************") print("--------------------------------") print("") # FOR LOOP FILE NAME DEFINITIONS dissolve_file = str(working_file) + "_dissolve" buffer_file = str(dissolve_file) + "_buffer" clip_routes = str(routes_analysis) + "_clip" dissolve_routes = str(clip_routes) + "_dissolve" # FOR LOOP POLYGON AND ROUTE GEOPROCESSING selected_bg = str( identified_field ) + " = 1" # "where" expression filtering for identified blockgroups print(selected_bg) ap.FeatureClassToFeatureClass_conversion(org_file, working_gdb, working_file, selected_bg) print(working_file + " CREATED!!!") ap.FeatureClassToFeatureClass_conversion(routes_working, working_gdb, routes_analysis) print(routes_analysis + " FILE CREATED!!!") ap.Dissolve_management(working_file, dissolve_file, '') # dissolve all into one shape print(dissolve_file + " CREATED!!!") ap.Buffer_analysis(dissolve_file, buffer_file, "50 feet") # buffer by 50 feet print(buffer_file + " CREATED!!!") ap.Clip_analysis(routes_working, buffer_file, clip_routes) # clip routes using the dissolve shape print(clip_routes + " CREATED!!!") # calculate length of route inside identified blockgroups and compare to total length ap.AddField_management(clip_routes, "IdLength", "double") print("IdLength Field Added for " + working_file) ap.CalculateField_management(clip_routes, "IdLength", "!shape.geodesicLength@miles!") print("IdLength Field Calculated for " + working_file) ap.Dissolve_management( clip_routes, dissolve_routes, 'LineAbbr', [["IdLength", 'sum']]) # collect route pieces by route print(clip_routes + " DISSOLVED") ap.JoinField_management(routes_working, "LineAbbr", dissolve_routes, "LineAbbr", ["SUM_IdLength"]) # join and sum ID'ed length print(routes_working + " JOINED WITH " + dissolve_routes) ap.AddFields_management(routes_working, add_fields) print("FIELDS ADDED TO " + routes_working) # compute percentage of total that is ID'ed then flag if greater than 0.33 ap.CalculateFields_management( routes_working, 'PYTHON3', [[length_field, '!SUM_IdLength!'], [percent_field, f'percent(!{length_field}!, !FullLength!)']], '''def percent(calc, full): if calc is None: return 0 else: return calc / full ''') ap.CalculateFields_management( routes_working, 'PYTHON3', [[id_field, f'ifBlock(!{percent_field}!)']], '''def ifBlock(percent): if percent > 0.33: return 1 else: return 0 ''') print(routes_working + " FIELDS CALCULATED") ap.DeleteField_management(routes_working, "SUM_IdLength") print("IdLength Field Deleted") ## loop end ## ap.ClearWorkspaceCache_management() deleteFeatureClass(routes_file, final_gdb_loc) # CREATE FINAL FEATURE CLASS ap.FeatureClassToFeatureClass_conversion(routes_file, final_gdb_loc, routes_file) print("---------------------------")
def CreateWatersheds(Huc12, FlowAcc, FlowDir, FlowLines, StrBuff, workspace, OutFile, OutRaster, OutSheds): try: env.workspace = workspace env.overwriteOutput = True Huc12lyr = arcpy.MakeFeatureLayer_management(Huc12, 'huc12lyr') LyrLength = arcpy.GetCount_management(Huc12lyr) SaveNum = 1 OutList = [] for row in arcpy.da.SearchCursor(Huc12, ['TNMID']): GPMsg("Creating pourpoint " + str(SaveNum) + " of " + str(LyrLength.getOutput(0))) # Creates ad hoc temp files for each HUC input. where = "TNMID" + "='" + row[0] + "'" arcpy.SelectLayerByAttribute_management(Huc12lyr, 'NEW_SELECTION', where) TempHuc12 = workspace + os.sep + 'TempHuc12' arcpy.CopyFeatures_management(Huc12lyr, TempHuc12) # Clips and buffers flowlines to the HUC12 arcpy.Clip_analysis(FlowLines, TempHuc12, 'TempClipFL') TempClipFL = 'TempClipFL' TempBuff = 'TempBufferFL' arcpy.Buffer_analysis(TempClipFL, TempBuff, str(StrBuff) + " Meters", "FULL", "FLAT", "ALL") arcpy.Clip_analysis(TempBuff, TempHuc12, 'TempBuff2') arcpy.Delete_management(TempBuff) TempBuff = 'TempBuff2' TempMask = 'TempMask' # Extacts the FlowAcc model relevant to the existing flowlines. Mask = ExtractByMask(FlowAcc, TempBuff) Mask.save(TempMask) #Find the maximum value in the flow accumulation raster, uses this as #the pourpoint Result = arcpy.GetRasterProperties_management(TempMask, "MAXIMUM") ResultOut = float(Result.getOutput(0)) outRast = 'outRast' + str(SaveNum) outRas = Con(Raster(TempMask), 1, "", "Value =" + str(Result)) outRas.save(outRast) SaveNum = SaveNum + 1 OutList.append(outRast) TempList = [TempBuff, TempClipFL, TempHuc12, TempMask] for Temp in TempList: arcpy.Delete_management(Temp) # Creats the actual point files (initially these were polygons, hence the naming # scheme, it didn't work). GPMsg("Saving output...") OutPolys = [] SaveNum = 1 for item in OutList: OutPoly = 'OutPoly' + str(SaveNum) arcpy.RasterToPoint_conversion(item, OutPoly) OutPolys.append(OutPoly) SaveNum = SaveNum + 1 arcpy.Delete_management(item) PourPointPolygons = OutFile for poly in OutPolys: if arcpy.Exists(PourPointPolygons): arcpy.Append_management(poly, PourPointPolygons, "NO_TEST", "", "") arcpy.Delete_management(poly) else: arcpy.CopyFeatures_management(poly, PourPointPolygons) arcpy.Delete_management(poly) # Adding an attribution step GPMsg("Adding Attribution...") PourPoints = OutFile arcpy.AddField_management(PourPoints, "Huc12", "TEXT", field_length=14) PourPointsLyr = arcpy.MakeFeatureLayer_management( PourPoints, 'PourPoints_lyr') for point in arcpy.da.SearchCursor(PourPoints, ['OBJECTID']): TempPoint = "TempPoint_" + str(point[0]) where = "OBJECTID" + "=" + str(point[0]) arcpy.SelectLayerByAttribute_management(PourPointsLyr, 'NEW_SELECTION', where) arcpy.CopyFeatures_management(PourPointsLyr, TempPoint) arcpy.SelectLayerByLocation_management(Huc12lyr, "contains", TempPoint) cursor = arcpy.da.SearchCursor(Huc12lyr, ['HUC12']) for row in cursor: HucVal = str(row[0]) with arcpy.da.UpdateCursor(PourPoints, ['OBJECTID', 'Huc12'], where) as update: for value in update: value[1] = HucVal update.updateRow(value) arcpy.Delete_management(TempPoint) # Runs SnapPoints to build a snap points raster for the Watershed step. GPMsg("Snapping pour points...") SnapPoints = SnapPourPoint(PourPointPolygons, FlowAcc, "10", 'OBJECTID') SnapPoints.save(OutRaster) # Runs sa.Watershed and RasterToPolygon conversion. GPMsg("Building watersheds...") outWatershed = Watershed(FlowDir, OutRaster, "VALUE") outWatershed.save('ShedRaster') arcpy.RasterToPolygon_conversion('ShedRaster', OutSheds, "SIMPLIFY", "VALUE") # Adding and filling out HUC12, calculate area and QA/QC fields. watersheds = OutSheds pourPoints = PourPoints arcpy.AddField_management(watersheds, "Huc12", "TEXT", field_length=14) arcpy.AddField_management(watersheds, "SqKm", "DOUBLE") arcpy.AddField_management(watersheds, "QAComp", "DOUBLE") arcpy.AddField_management(watersheds, "QAPriority", "TEXT", field_length=10) arcpy.AddField_management(watersheds, "QAReason", "TEXT", field_length=50) arcpy.AddField_management(watersheds, "QAComment", "TEXT", field_length=200) # populate the Sqkm field sqKmExp = "!SHAPE.AREA@SQUAREKILOMETERS!" arcpy.CalculateField_management(watersheds, "SqKm", sqKmExp, "PYTHON_9.3") # populate the Huc12 field ShedsLyr = arcpy.MakeFeatureLayer_management(watersheds, 'Sheds_lyr') PointsLyr = arcpy.MakeFeatureLayer_management(pourPoints, 'Points_lyr') for shed in arcpy.da.SearchCursor(watersheds, ['OBJECTID']): tempPoly = "TempPoly" + str(shed[0]) where = "OBJECTID" + "=" + str(shed[0]) arcpy.SelectLayerByAttribute_management(ShedsLyr, 'NEW_SELECTION', where) arcpy.CopyFeatures_management(ShedsLyr, tempPoly) arcpy.SelectLayerByLocation_management(PointsLyr, "WITHIN", tempPoly) cursor = arcpy.da.SearchCursor(PointsLyr, ['Huc12']) for row in cursor: HucVal = str(row[0]) with arcpy.da.UpdateCursor(watersheds, ['OBJECTID', 'Huc12'], where) as update: for value in update: value[1] = HucVal update.updateRow(value) arcpy.Delete_management(tempPoly) # Checks in the field for null values, usually caused by slivers created in the # rasterToPolygon process. Else statment fills in the QAComp field. with arcpy.da.UpdateCursor( watersheds, ['Huc12', 'QAPriority', 'QAReason', 'SqKm', 'QAComp']) as update: for row in update: if row[0] is None: row[1] = "HIGH" row[2] = "HUC12 code is null" update.updateRow(row) else: where = "HUC12" + "='" + row[0] + "'" arcpy.SelectLayerByAttribute_management( Huc12lyr, "NEW_SELECTION", where) cursor = arcpy.da.SearchCursor(Huc12lyr, ['HUC12', 'AreaSqKm']) for cur in cursor: row[4] = row[3] / cur[1] update.updateRow(row) if row[4] is not None: if row[4] >= 1.10: row[1] = "HIGH" row[2] = "Area mismatch > 10%" update.updateRow(row) elif row[4] < 1.10 and row[4] > 1.05: row[1] = "check" row[2] = "Area mismatch 5%-10%" update.updateRow(row) elif row[4] <= 0.90: row[1] = "HIGH" row[2] = "Area mismatch > 10%" update.updateRow(row) elif row[4] > 0.90 and row[4] < 0.95: row[1] = "check" row[2] = "Area mismatch 5%-10%" update.updateRow(row) except MsgError, xmsg: GPMsg("e", str(xmsg))
arcpy.CalculateField_management("Polygons", "AreaMGCP", "abs([Shape_Area] - .2025)", "VB") # Select only polygons that are within a 'reasonable' range of the true size of the GCPs (Each GCP is .45cm^2 = .2025m^2). # 'Reasonable' range defined here as within .12m^2 # Create a new set of polygons out of those selected # This step is solely for efficiency... here we remove thousands of polygons that are irrelevant prior to a costly buffer step. Field = "AreaMGCP" selectionQuery = '"' + Field + '"' + ' <= .12' arcpy.AddMessage(selectionQuery) arcpy.MakeFeatureLayer_management("Polygons", "Polygons_Focused", selectionQuery) # Buffer the "polygons" feature class inward so that any protruding pixels that have been classified as part of the square GCP are removed. arcpy.AddMessage("Buffering Inward") arcpy.Buffer_analysis("Polygons_Focused", "Buffered_Inward", -0.1, "#", "FLAT") # Sort the buffered attribute table on the basis of the difference from the true size of the GCPs arcpy.Sort_management("Buffered_Inward", "Sorted", [["AreaMGCP", "Descending"]]) # Extract n largest polygons from sorted attribute table, where n is a user-defined number of GCPs to georeference. Field = "OBJECTID" UpperBounds = arcpy.GetParameterAsText(3) # selectionQuery = '"' + Field + " > 10" +'"' selectionQuery = '"' + Field + '"' + ' > 0 AND ' + '"' + Field + '"' + ' <= ' + UpperBounds arcpy.AddMessage(selectionQuery) arcpy.MakeFeatureLayer_management("Sorted", "GCPLayer", selectionQuery) arcpy.CopyFeatures_management("GCPLayer", "GCPs") # Determine the centroid for each GCP
def nombre_ejes_viales(): arcpy.AddField_management(tb_viviendas_ordenadas, 'ID_FRENTE', 'TEXT') arcpy.AddField_management(tb_viviendas_ordenadas, 'NOM_CAT_AL', 'TEXT') arcpy.AddField_management(tb_viviendas_ordenadas, 'NOM_VIA_AL', 'TEXT') arcpy.AddField_management(tb_viviendas_ordenadas, 'ERROR_VIA', 'SHORT') with arcpy.da.UpdateCursor( tb_viviendas_ordenadas, ["UBIGEO", "ZONA", "MANZANA", "FRENTE_ORD", "ID_FRENTE"]) as cursor: for x in cursor: x[4] = u'{}{}{}{}'.format(x[0], x[1], x[2], x[3]) cursor.updateRow(x) #arcpy.CalculateField_management(tb_viviendas_ordenadas,'ID_FRENTE','!UBIGEO!+!ZONA!+!MANZANA!+str(!FRENTE_ORD!)','PYTHON_9.3') viviendas_mfl = arcpy.MakeFeatureLayer_management(tb_viviendas_ordenadas, "viviendas_mfl") mzs_line = arcpy.FeatureToLine_management(tb_manzanas_ordenadas, "in_memory/mzs_line") viviendas_selecc_frentes_mfl = arcpy.SelectLayerByLocation_management( viviendas_mfl, "INTERSECT", mzs_line) viviendas_selecc_frentes = arcpy.CopyFeatures_management( viviendas_selecc_frentes_mfl, "in_memory/viv_selecc_frentes") ejes_viales_buffer = arcpy.Buffer_analysis( tb_ejes_viales, "in_memory/ejes_viales_buffer", "60 meters", "", "", "LIST", [ "CAT_VIA", "NOMBRE_CAT", "NOMBRE_VIA", "NOMBRE_ALT", "CAT_NOM", "UBIGEO" ]) intersect_viv_vias = arcpy.Intersect_analysis( [viviendas_selecc_frentes, ejes_viales_buffer], "in_memory/intersect_viv_vias") list_id_frentes_validos = list( set([ u"{}{}{}{}".format(x[0], x[1], x[2], x[3]) for x in arcpy.da.SearchCursor( intersect_viv_vias, ["UBIGEO", "ZONA", "MANZANA", "FRENTE_ORD"], "P20=NOMBRE_CAT AND NOMBRE_VIA=P21") ])) print list_id_frentes_validos where_expression_list = "" with arcpy.da.UpdateCursor(tb_viviendas_ordenadas, ["ID_FRENTE", "ERROR_VIA"]) as cursor: for x in cursor: if x[0] not in list_id_frentes_validos: x[1] = 1 cursor.updateRow(x) viviendas_no_enlazadas = arcpy.Select_analysis(tb_viviendas_ordenadas, error_10, "ERROR_VIA=1") viviendas_no_enlazadas_mfl = arcpy.MakeFeatureLayer_management( viviendas_no_enlazadas) viviendas_no_enlazadas_select = arcpy.SelectLayerByLocation_management( viviendas_no_enlazadas_mfl, "INTERSECT", mzs_line, '5 METERS', "NEW_SELECTION") lineas_viv_no_en = arcpy.PointsToLine_management( viviendas_no_enlazadas_select, path_calidad + '/lineas_viv_no_en.shp', 'p21', 'ID_FRENTE') ejes_viales_buffer = arcpy.Buffer_analysis( tb_ejes_viales, "in_memory/ejes_viales_buffer", "40 meters", "", "", "LIST", [ "CAT_VIA", "NOMBRE_CAT", "NOMBRE_VIA", "NOMBRE_ALT", "CAT_NOM", "UBIGEO" ]) where_eje = "NOMBRE_VIA<>'{}'".format("SN") ejes_viales_buffer_select = arcpy.Select_analysis( ejes_viales_buffer, 'in_memory/ejes_viales_buffer_select', where_eje) ejes_viales_buffer_select_mfl = arcpy.MakeFeatureLayer_management( ejes_viales_buffer_select) temp = arcpy.SpatialJoin_analysis(ejes_viales_buffer_select_mfl, lineas_viv_no_en, path_calidad + '/temp.shp', 'JOIN_ONE_TO_MANY', '', '', 'CONTAINS') arcpy.AddField_management(temp, 'AREA', 'DOUBLE') exp = "!SHAPE.AREA@METERS!" arcpy.CalculateField_management(temp, 'AREA', exp, 'PYTHON_9.3') temp_sort = arcpy.Sort_management( temp, 'in_memory/temp_sort', [["JOIN_FID", "ASCENDING"], ["AREA", "ASCENDING"]]) temp_sort_select = arcpy.Select_analysis( temp_sort, path_calidad + '/temp_sort_select.shp', "JOIN_FID<>-1") arcpy.DeleteIdentical_management(temp_sort_select, ["JOIN_FID"]) list_correcion_vias = list( set([(x[0], x[1], x[2]) for x in arcpy.da.SearchCursor( temp_sort_select, ["p21", "NOMBRE_CAT", "NOMBRE_VIA"])])) #list_correcion_vias_p21=list(set([x[0] for x in arcpy.da.SearchCursor(temp_sort_select,["p21"])])) arcpy.AddField_management(error_10, 'NOM_CAT_AL', 'TEXT') arcpy.AddField_management(error_10, 'NOM_VIA_AL', 'TEXT') ########################################################################################################## with arcpy.da.UpdateCursor(error_10, [ "UBIGEO", "ZONA", "MANZANA", "ID_REG_OR", "p21", "NOM_CAT_AL", "NOM_VIA_AL" ]) as cursor: for x in cursor: for y in list_correcion_vias: if (y[0] == x[4]): x[5] = y[1] x[6] = y[2] break cursor.updateRow(x)
actualRd = "actual_rd.shp" #simulated road simRd = "ot1_line.shp" #create output table tab = "buffercells.dbf" #arcpy.Delete_management(tab) arcpy.CreateTable_management(path, tab) arcpy.AddField_management(tab, "Buffer", "LONG", 5) arcpy.AddField_management(tab, "Ncells", "LONG", 7) arcpy.AddField_management(tab, "PropIn", "FLOAT", 4, 3) #create buffer around actual road bufroad = "bufroad.shp" arcpy.Buffer_analysis(actualRd, bufroad, "100 Meters") #intersect buffer with sim road inters = "buf_intersect.shp" arcpy.Intersect_analysis([simRd, bufroad], inters, "ALL", "", "") #find total length of actual road realrd = [] with arcpy.da.SearchCursor(actualRd, ["Shape_Leng"]) as cursor: for row in cursor: realrd.append(row[0]) fullrd = sum(realrd) #find cells of sim road in intersect #insert values to table with insert cursor