def createCumulaitve(root_dir, years, dataset): # Set the workspace environment to local file geodatabase os.chdir(root_dir) for year in years: if year == 2004: fc_prev = "2003.gdb\\clu_2003" arcpy.Union_analysis(in_features=[fc_prev, layer], out_feature_class='clu_2004_{}') ##----repair geometry # arcpy.RepairGeometry_management('clu_{}_crop_cumm'.format(str(year))) elif year == 2011: fc_2009 = "2009.gdb\\clu_2009_{}" arcpy.Union_analysis(in_features=[fc_2009, layer], out_feature_class='clu_2011_crop_c') ##----repair geometry # arcpy.RepairGeometry_management('clu_{}_crop_cumm'.format(str(year))) elif year > 2004 & year != 2011: fc_prev = "{0}.gdb\\clu_{0}_crop_c".format(str(year - 1)) arcpy.Union_analysis(in_features=[fc_prev, layer], out_feature_class='clu_{}_crop_c'.format( str(year)))
def flatten_poly_fc(in_layer_path, out_gdb_path, query=None): '''Check for overlaps and flatten, super region poly knockoff, POLYID joins back to original data''' try: log("Flattening {} due to overlaps".format(in_layer_path)) in_layer_nm = os.path.splitext(os.path.basename(in_layer_path))[0] shattered_fc = os.path.join(out_gdb_path, in_layer_nm + "_shattered") if query: log("We have a query: {}".format(query)) f_lyr = "f_lyr" arcpy.MakeFeatureLayer_management(in_layer_path, f_lyr, where_clause=query) arcpy.Union_analysis(f_lyr, shattered_fc, "ALL", "", "GAPS") log(arcpy.GetMessages()) else: arcpy.Union_analysis(in_layer_path, shattered_fc, "ALL", "", "GAPS"); log(arcpy.GetMessages()) shattered_singlepart_fc = os.path.join(out_gdb_path, in_layer_nm + "_shattered_singlepart") #this arcpy.MultipartToSinglepart_management(shattered_fc, shattered_singlepart_fc); log(arcpy.GetMessages()) polyid_field_nm = "POLYID" arcpy.AddField_management(shattered_singlepart_fc, polyid_field_nm, "LONG"); log(arcpy.GetMessages()) polyid_dict = {} polyid_value = 1 decimal_tolerance = 2 field_list = ["OID@","SHAPE@XY","SHAPE@AREA", polyid_field_nm] update_rows = arcpy.da.UpdateCursor(shattered_singlepart_fc, field_list) for row in update_rows: axyvalue = (round(row[1][0], decimal_tolerance), round(row[1][1], decimal_tolerance), round(row[2], decimal_tolerance)) if axyvalue not in polyid_dict: polyid_dict[axyvalue] = polyid_value polyid_value = polyid_value + 1 row[3] = polyid_dict[axyvalue] update_rows.updateRow(row) del row, update_rows del polyid_dict final_fc = os.path.join(out_gdb_path, in_layer_nm + "_flattened") try: arcpy.Dissolve_management(shattered_singlepart_fc, final_fc, polyid_field_nm, "", "SINGLE_PART"); log(arcpy.GetMessages()) except: log("Failed initial Dissolve, repairing geometry and trying again") arcpy.RepairGeometry_management(shattered_singlepart_fc); log(arcpy.GetMessages()) arcpy.Dissolve_management(shattered_singlepart_fc, final_fc, polyid_field_nm, "", "SINGLE_PART"); log(arcpy.GetMessages()) log("Creating POLYID lookup table") polyid_fc = os.path.join(out_gdb_path, in_layer_nm + "_polyid") fid_field = next(i.name for i in arcpy.ListFields(shattered_singlepart_fc) if "FID" in i.name) arcpy.Frequency_analysis(shattered_singlepart_fc, polyid_fc,"POLYID;{}".format(fid_field), "");log(arcpy.GetMessages()) arcpy.AddField_management(polyid_fc, "flattened_POLYID", "LONG");log(arcpy.GetMessages()) arcpy.CalculateField_management(polyid_fc,"flattened_POLYID", "!POLYID!", "PYTHON");log(arcpy.GetMessages()) arcpy.DeleteField_management(polyid_fc, "FREQUENCY;POLYID"); log(arcpy.GetMessages()) log("Successful finish to flattening routine") return [final_fc, polyid_fc] except Exception as e: log("EXCEPTION hit: {}".format(e))
def data_prep(): #If final_table already exists, delete it gdb_tools.check_del(arc_gdb, final_table) #Copy Parcel points to gdb arcpy.CopyFeatures_management(sde_parcel_point, parcel_point) #Merge imp_list to create imperv arcpy.Merge_management(imp_list, imperv) #Union imperv to create union_out arcpy.Union_analysis(imperv, union_out) #Intersect parcel polygons with union_out, create intersect feature arcpy.Intersect_analysis([sde_parcel_area, union_out], intersect) #Disolve intersect around GPIN, create dissolve feature arcpy.Dissolve_management(intersect, dissolve, 'GPIN') #Join parcel points with dissolve arcpy.JoinField_management(parcel_point, 'PARCELSPOL', dissolve, 'GPIN') #Add PIN to join listed above arcpy.AddField_management(parcel_point, 'PIN', 'TEXT', '', '', '15') #Populate PIN with PROP_ID field arcpy.CalculateField_management(parcel_point, 'PIN', '[PROP_ID]', 'VB') #Add TOTAL_IMP_AREA field arcpy.AddField_management(parcel_point, 'TOTAL_IMP_AREA', 'DOUBLE') #Populate TOTAL_IMP_AREA as shape area of all impervious arcpy.CalculateField_management(parcel_point, 'TOTAL_IMP_AREA', '[Shape_Area]', 'VB') #Export joins to imp_points arcpy.Copy_management(parcel_point, imp_points)
def extractBySpatialUnion(): inFeatures = [ "fishnet_indexs.shp", os.path.join(vector_data_path, "landuse") ] outFeatures = "landuse.shp" arcpy.Union_analysis(inFeatures, outFeatures, join_attributes='NO_FID')
def unionAnalysis( workspace, in_features, out_feature_class ): env.workspace = workspace arcpy.Union_analysis(in_features, out_feature_class, join_attributes, cluster_tolerance, gaps)
def estabFOVfootprint(DEM_raster_layer,Camera_point_layer,Smoothing_Tolerance): # Spatial Analyst Tools > Surface > Visibility arcpy.gp.Visibility_sa(DEM_raster_layer,Camera_point_layer,"in_memory\\rawVisRast",\ "","FREQUENCY","NODATA","0.00001201","FLAT_EARTH","0.13","","",\ "OFFSETA","","","AZIMUTH1","AZIMUTH2","VERT1","VERT2") # Spatial Analyst Tools > Generalization > Boundary Clean arcpy.gp.BoundaryClean_sa("in_memory\\rawVisRast","in_memory\\clnVisRast","ASCEND","TWO_WAY") arcpy.Delete_management("in_memory\\rawVisRast") # Conversion Tools > From Raster > Raster to Polygon arcpy.RasterToPolygon_conversion("in_memory\\clnVisRast","in_memory\\visPoly","NO_SIMPLIFY","") arcpy.Delete_management("in_memory\\clnVisRast") # Cartographic Tools > Generalization > Smooth Polygon arcpy.SmoothPolygon_cartography("in_memory\\visPoly","in_memory\\smthVisPoly","PAEK",\ Smoothing_Tolerance,"NO_FIXED","NO_CHECK") arcpy.Delete_management("in_memory\\visPoly") # Analysis Tools > Overlay > Union arcpy.Union_analysis("in_memory\\smthVisPoly","in_memory\\uniVisPoly","ALL","","NO_GAPS") arcpy.Delete_management("in_memory\\smthVisPoly") # Data Management Tools > Generalization > Dissolve footprintFOVout = "in_memory\\whlVisPoly" arcpy.Dissolve_management("in_memory\\uniVisPoly",footprintFOVout,"","",\ "SINGLE_PART","DISSOLVE_LINES") arcpy.Delete_management("in_memory\\uniVisPoly") return footprintFOVout
def webProducts(rast, project=True, method="POINT_REMOVE", tolerance=15, minimumArea=3000): rastName = arcpy.Describe(rast).baseName if project: arcpy.ProjectRaster_management( rastName, "WEB" + rastName, r"Coordinate Systems/Projected Coordinate Systems/World/WGS 1984 Web Mercator (Auxiliary Sphere).prj", "BILINEAR", "", "NAD_1983_to_WGS_1984_5") raster = "WEB" + rastName q = arcpy.RasterDomain_3d(raster, raster + "q", "POLYGON") qq = arcpy.Union_analysis(q, raster + "qq", "ALL", 0.1, "NO_GAPS") qqq = arcpy.Dissolve_management(qq, raster + "qqq") qqqq = arcpy.cartography.SimplifyPolygon(qqq, raster + "qqqq", method, tolerance, minimumArea, "NO_CHECK", "NO_KEEP") arcpy.Buffer_analysis(qqqq, "out_" + raster, "30 Feet", "FULL", "", "NONE") print "Products created." arcpy.Delete_management(rast) arcpy.Delete_management(raster + "q") arcpy.Delete_management(raster + "qq") arcpy.Delete_management(raster + "qqq") arcpy.Delete_management(raster + "qqqq")
def areaatt(clippedparcels, outline, attlyr, muniname, newname, newalias): # Take union- will retain info of each unioned piece. attname = os.path.basename(os.path.normpath(attlyr)) clippedparcelname = os.path.basename(os.path.normpath(clippedparcels)) unionname = 'sites_union_' + attname arcpy.Union_analysis([clippedparcels, attlyr], unionname) # Find those that overlap BOTH exp1 = 'FID_' + clippedparcelname + ' <> -1 AND FID_' + attname + ' <> -1' filteredname = 'sites_' + attname + '_filtered' arcpy.Select_analysis(unionname, filteredname, exp1) # Calculate the area of each in acres exp = "float(!SHAPE.AREA@ACRES!)" arcpy.AddField_management(filteredname, 'areacalc', 'DOUBLE') arcpy.CalculateField_management(filteredname, 'areacalc', exp, "PYTHON_9.3") # Spatially join back to original parcels, maintaining "overlap_p" newlayer = addatt(clippedparcels, filteredname, muniname, 'areacalc', newname, newalias, method = 'CONTAINS') # Garbage collection! arcpy.Delete_management(unionname) arcpy.Delete_management(filteredname) return(newlayer)
def union_sp_files(in_ws, out_inter, subset_group_bool, ent_list): unionlist = [] if subset_group: out_inter = out_inter_location + os.sep + enlistfc_name + '_inter' print out_inter if not arcpy.Exists(out_inter): start_union_time = datetime.datetime.now() print "\nStarting {0} at {1}".format(out_inter, start_union_time) arcpy.env.workspace = in_ws fc_list = arcpy.ListFeatureClasses() if len(fc_list) != 0: if subset_group_bool: for fcs in fc_list: entid = fcs.split('_') entid = str(entid[1]) if entid in ent_list: unionlist.append(str(in_ws + os.sep + str(fcs))) else: unionlist = fc_list try: arcpy.Union_analysis(unionlist, out_inter, "ALL") except Exception as error: print(error.args[0]) arcpy.Delete_management(out_inter) print "\nCreated output {0} in {1}".format( out_inter, (datetime.datetime.now() - start_union_time)) else: pass else: print '\nAlready union {0}'.format(out_inter)
def overlay(path): import arcpy, os, re # from arcpy.sa import * from arcpy import env arcpy.env.workspace = path print('Overlaying the HRU map with lakes') HRU2 = os.path.join(path, "HRU2" + "." + "shp") HRU3 = os.path.join(path, "HRU3" + "." + "shp") HRU4 = os.path.join(path, "HRU4" + "." + "shp") arcpy.Intersect_analysis([HRU3, "lacs.shp"], "HRU_intersect", "ALL") arcpy.Union_analysis([HRU3, "lacs.shp"], "HRU_union", "ALL") arcpy.MakeFeatureLayer_management('HRU_intersect.shp', 'HRU_intersect_lyr') arcpy.MakeFeatureLayer_management('HRU_union.shp', 'HRU_union_lyr') arcpy.SelectLayerByLocation_management("HRU_union_lyr", "ARE_IDENTICAL_TO", "HRU_intersect_lyr") if arcpy.Describe("HRU_union_lyr").FIDSet: arcpy.DeleteFeatures_management("HRU_union_lyr") arcpy.CopyFeatures_management("HRU_union_lyr", HRU4) arcpy.DeleteField_management(HRU4, ["FID_lacs", "ident", "FID_HRU3"]) # arcpy.DeleteField_management("HRU4.shp",["ident"]) # arcpy.DeleteField_management("HRU4.shp",["FID_HRU3"]) HRUintersect = os.path.join(path, "HRU_intersect" + "." + "shp") arcpy.Delete_management(HRUintersect) HRUunion = os.path.join(path, "HRU_union" + "." + "shp") arcpy.Delete_management(HRUunion) arcpy.Delete_management(HRU2) arcpy.Delete_management(HRU3) print('done!')
def create_karabakh( in_polygon1, in_line, in_polygon0, out_shp ): # Arcpy doesn't have a method for cutting polygons with polylines. We use a solution suggested by https://gis.stackexchange.com/a/24757 print "...Removing Nakhichevan" select_shp = "b_temp/select_shp.shp" arcpy.Select_analysis(in_polygon1, select_shp, '"ADM2" = \'az3100\'') print "...Creating the buffer on the north-east side of the Line of Contact" # This covers the whole of Azerbaijan proper. Thus, it can be used to erase this part of Azerbaijan to extract Nagorno-Karabakh. buffer_shp = "b_temp/temp_buffer.shp" arcpy.Buffer_analysis( in_line, buffer_shp, "10 DecimalDegrees", "LEFT", "ROUND", "NONE", "", "GEODESIC" ) # see http://desktop.arcgis.com/en/arcmap/10.3/tools/analysis-toolbox/buffer.htm print "...Removing Azerbaijan proper" erase_shp = "b_temp/temp_erase.shp" arcpy.Erase_analysis(select_shp, buffer_shp, erase_shp) print "...Merging Karabakh polygon with the rest of Azerbaijan" inFeatures = [in_polygon0, erase_shp] outFeatures = out_shp arcpy.Union_analysis(inFeatures, outFeatures, "ONLY_FID") print "...creating the territory indicator 1/2" arcpy.AddField_management(out_shp, "territory", "TEXT") print "...creating the territory indicator 2/2" arcpy.CalculateField_management( out_shp, "territory", "Reclass(!FID_temp_e!)", "PYTHON_9.3", "def Reclass(name):\\n if (name == 0):\\n return \"NKR\"\\n else:\\n return \"AZE\"" ) # FID_temp_e comes from Erase tool's output (erase_shp), taking the value of 0 for Karabakh and -1 for the rest print "Deleting intermediate files" files_to_delete = [ in_polygon1, in_polygon0, select_shp, buffer_shp, erase_shp ] for file in files_to_delete: delete_if_exists(file)
def __remove_exclusionary_areas(self, target_features, exclusion_list, workspace, timestamp): exclusive_features = 'tmp_exc_sut_{}'.format(timestamp) if exclusion_list: arcpy.AddMessage('Removing exclusionary areas...') if len(exclusion_list) == 1: exclusion_features = exclusion_list[0] else: exclusion_features = 'tmp_exc_fet_{}'.format(timestamp) arcpy.Union_analysis(in_features=exclusion_list, out_feature_class=os.path.join( workspace, exclusion_features), join_attributes='ONLY_FID') arcpy.Erase_analysis(in_features=target_features, erase_features=exclusion_features, out_feature_class=exclusive_features) else: exclusive_features = target_features return exclusive_features
def handle_results(base_dir, task_dir, target_lasd): print('Handling Task Results') output_folders = os.listdir(task_dir) results = os.path.join(base_dir, 'RESULTS') os.mkdir(results) e_process = [] f_process = [] for folder in output_folders: e = os.path.join(task_dir, folder, 'e.shp') f = os.path.join(task_dir, folder, 'f.shp') if arcpy.Exists(e): e_process.append(e) if arcpy.Exists(f): f_process.append(f) # E - Merge e_merge_name = os.path.join(results, 'e_merge.shp') e_merge = arcpy.Merge_management(e_process, e_merge_name) # E - Dissolve (Drivers) e_diss_name = os.path.join(results, 'e_dissolve.shp') e_diss = arcpy.Dissolve_management(e_merge, e_diss_name, '#', '#', 'False', 'True') # Create Acres Field & Calculate on Dissolved E arcpy.AddField_management(e_diss, 'ACRES', 'DOUBLE') arcpy.CalculateField_management(e_diss, 'ACRES', '!shape.area@acres!') # Select & Copy Features (Clean Multipart Edge Slivers After Dissolve) e_diss_sel_criteria = '"ACRES" > 1.5' e_diss_sel_layer_name = 'e_diss_sel_lyr_' e_diss_sel_layer = arcpy.MakeFeatureLayer_management( e_diss, e_diss_sel_layer_name, e_diss_sel_criteria) e_diss_name = os.path.join(results, D01_DRIVERS) arcpy.CopyFeatures_management(e_diss_sel_layer, e_diss_name) arcpy.Delete_management(e_diss_sel_layer) # F - Merge f_merge_name = os.path.join(results, 'f_merge.shp') f_merge = arcpy.Merge_management(f_process, f_merge_name) # F - Union f_union_name = os.path.join(results, 'f_union.shp') f_union = arcpy.Union_analysis(f_merge, f_union_name, 'ALL', '#', 'NO_GAPS') # Dissolve G (Data Domain) f_diss_name = os.path.join(results, D01_DATA_DOMAIN) data_domain = arcpy.Dissolve_management(f_union, f_diss_name, '#', '#', 'False', 'True') # Apply Data Domain As Soft Clip to LASD constraint_param = [[data_domain, "<None>", "Soft_Clip"]] arcpy.AddFilesToLasDataset_management(target_lasd, "", "", constraint_param)
def CombineShorelinePolygons(bndMTL: str, bndMHW: str, inletLines: str, ShorelinePts: str, bndpoly: str, SA_bounds: str='', verbose: bool=True): """ Use MTL and MHW contour polygons to create shoreline polygon. 'Shoreline' = MHW on oceanside and MTL on bayside """ start = time.clock() # Inlet lines must intersect the MHW polygon symdiff = os.path.join(arcpy.env.scratchGDB, 'shore_1symdiff') split = os.path.join(arcpy.env.scratchGDB, 'shore_2split') join = os.path.join(arcpy.env.scratchGDB, 'shore_3_oceanMTL') erase = os.path.join(arcpy.env.scratchGDB, 'shore_4_bayMTL') union_2 = os.path.join(arcpy.env.scratchGDB, 'shore_5union') # Create layer (symdiff) of land between MTL and MHW and split by inlets print("...delineating land between MTL and MHW elevations...") arcpy.Delete_management(symdiff) # delete if already exists arcpy.SymDiff_analysis(bndMTL, bndMHW, symdiff) # Split symdiff at inlets (and SA_bounds) print("...removing the MHW-MTL areas on the oceanside...") if len(SA_bounds) > 0: arcpy.FeatureToPolygon_management([symdiff, inletLines, SA_bounds], split) # Split MTL features at inlets and study area bounds else: arcpy.FeatureToPolygon_management([symdiff, inletLines], split) # Split MTL features at inlets # Isolate polygons touching shoreline points and erase from symdiff arcpy.SpatialJoin_analysis(split, ShorelinePts, split+'_join', "#","KEEP_COMMON", match_option="COMPLETELY_CONTAINS") arcpy.Erase_analysis(symdiff, split+'_join', erase) # Merge bayside MHW-MTL with above-MHW polygon arcpy.Union_analysis([erase, bndMHW], union_2) arcpy.Dissolve_management(union_2, bndpoly, multi_part='SINGLE_PART') # Dissolve all features in union_2 to single part polygons print('''User input required! Select extra features in {} for deletion.\nRecommended technique: select the polygon/s to keep and then Switch Selection.\n'''.format(os.path.basename(bndpoly))) return(bndpoly)
def growthRatesJoin(urbanGrowthFc, ruralGrowthFc, countryBoundaries, urbanAreasShp, iso3, outGDB): try: # Extract polygons by country iso arcpy.FeatureClassToFeatureClass_conversion(countryBoundaries, "in_memory", "countryBoundary", """ iso_alpha3 = '%s' """ % (iso3,)) arcpy.FeatureClassToFeatureClass_conversion(urbanAreasShp, "in_memory", "urban_extract", """ ISO3 = '%s' """ % (iso3,)) # Union of urban and boundary polygons arcpy.Union_analysis(["in_memory/countryBoundary", "in_memory/urban_extract"], "in_memory/countryUrbanRural") # Separate urban and rural polygons arcpy.FeatureClassToFeatureClass_conversion("in_memory/countryUrbanRural", "in_memory", "countryUrban", """ ONES = 1 """) arcpy.FeatureClassToFeatureClass_conversion("in_memory/countryUrbanRural", "in_memory", "countryRural", """ ONES = 0 """) # Join growth rates data arcpy.JoinField_management("in_memory/countryUrban", "iso_alpha2", urbanGrowthFc, "ISO2", ["Growth20102015"]) arcpy.JoinField_management("in_memory/countryRural", "iso_alpha2", ruralGrowthFc, "ISO2", ["Growth20102015"]) # Merge urban and rural data back together arcpy.Merge_management(["in_memory/countryUrban", "in_memory/countryRural"], outGDB + "/growthRates%s" % iso3) finally: # Tidy up arcpy.Delete_management("in_memory")
def union_inputs(input): #(name, fc_list) name, fc_list = input[0], input[1] union_output = output_path + "\\Inputs\\" + name arcpy.Union_analysis(fc_list, "in_memory\\dissolve") arcpy.Dissolve_management("in_memory\\dissolve", union_output) arcpy.Delete_management("in_memory\\dissolve") return
def intersectLayers(self): pp = self.ProgressPrinter.newProcess(inspect.stack()[0][3], 1, 1).start() # Intersecting disturbance and Inventory layers... arcpy.Union_analysis([self.inventory_layer3, self.disturbances_layer2], self.temp_overlay, "ALL") pp.finish()
def union_inputs(name, dest, fc_list): union_output = output_path + "\\Input_" + dest + "\\" + name arcpy.Union_analysis(fc_list, "in_memory\\dissolve") arcpy.Dissolve_management("in_memory\\dissolve", union_output) # Uncomment below and comment 2 llines above to choose to not dissolve / delete attribute data # Note: overlapping polygon errors are possible in acreage counts! #arcpy.Union_analysis(fc_list, union_output) return
def union(lyrA, lyrB, outShp, api_gis="arcpy"): """ Calculates the geometric union of the overlayed polygon layers, i.e. the intersection plus the symmetrical difference of layers A and B. API's Available: * arcpy; * saga; * grass_cmd; * grass_cmd; """ if api_gis == "arcpy": import arcpy if type(lyrB) == list: lst = [lyrA] + lyrB else: lst = [lyrA, lyrB] arcpy.Union_analysis(";".join(lst), outShp, "ALL", "", "GAPS") elif api_gis == "saga": from gasp import exec_cmd rcmd = exec_cmd( ("saga_cmd shapes_polygons 17 -A {} -B {} -RESULT {} -SPLIT 1" ).format(lyrA, lyrB, outShp)) elif api_gis == "grass": from grass.pygrass.modules import Module un = Module("v.overlay", ainput=lyrA, atype="area", binput=lyrB, btype="area", operator="or", output=outShp, overwrite=True, run_=False, quiet=True) un() elif api_gis == "grass_cmd": from gasp import exec_cmd outcmd = exec_cmd( ("v.overlay ainput={} atype=area binput={} btype=area " "operator=or output={} --overwrite --quiet").format( lyrA, lyrB, outShp)) else: raise ValueError("{} is not available!".format(api_gis)) return outShp
def stackMutipleFC(): arcpy.env.workspace = defineGDBpath(['refinement','refinement']) # # Set local variables in_features = arcpy.ListFeatureClasses("*_shp") print in_features out_feature_class = "stacked_features" join_attributes = "NO_FID" cluster_tolerance = 0.0003 arcpy.Union_analysis (in_features, out_feature_class, join_attributes, cluster_tolerance)
def union(fc_list, output_fc): # Performs a union of all the feature classes in the input feature class list. # join attributes are "ALL", no cluster tolerance, and gaps set to "GAPS". # Reference: https://pro.arcgis.com/en/pro-app/latest/tool-reference/analysis/union.htm arcpy.AddMessage('\nStarting union') try: result = arcpy.Union_analysis(fc_list, output_fc, "ALL", "", "GAPS") check_status(result) except arcpy.ExecuteError: arcpy.AddError(arcpy.GetMessages(2)) arcpy.AddMessage('union complete\n')
def trial(): global House_TM Onion = 'AOI_TM_D_Housez' arcpy.env.workspace = str(TempGDB) arcpy.AddField_management(AOI_H_D, "TM_TOT_HA", "FLOAT", "", "", "", "", "NULLABLE") arcpy.CalculateField_management(AOI_H_D, "TM_TOT_HA", "!SHAPE.AREA@HECTARES!", "PYTHON_9.3") arcpy.Union_analysis( [AOI_H_D, Housez], Onion) ################################################ field_names = [f.name for f in arcpy.ListFields(Onion)] print(field_names) for fname in field_names: if 'FID' in fname: field_value = fname fieldname_val_temp = field_value.strip("u'") fieldname_value = fieldname_val_temp.strip("',") with arcpy.da.UpdateCursor(Onion, fieldname_value) as cursor: for row in cursor: if row[0] == -1: cursor.deleteRow() arcpy.Dissolve_management(Onion, 'Onion', ['TM', 'WILPNAMES', "PDEEK", 'TM_TOT_HA']) arcpy.AddField_management('Onion', "TM_Hz_TOT_HA", "FLOAT", "", "", "", "", "NULLABLE") arcpy.CalculateField_management('Onion', "TM_Hz_TOT_HA", "!SHAPE.AREA@HECTARES!", "PYTHON_9.3") ############THIS Section will need to be edited for your report. col1_col = [] col2_col = [] col3_col = [] col4_col = [] col1 = "WILP_TM" col2 = "WILP_NAME" col3 = "TM_TOT_AREA" col4 = "WILP_TM_AREA" l = 0 with arcpy.da.SearchCursor( 'Onion', ["TM", "WILPNAMES", "TM_TOT_HA", "TM_Hz_TOT_HA"]) as cursor: for row in cursor: col1_col.append(cursor[0]) col2_col.append(cursor[1]) col3_col.append(cursor[2]) col4_col.append(cursor[3]) House_TM = pd.DataFrame(list(zip(col1_col, col2_col, col3_col, col4_col)), columns=[col1, col2, col3, col4])
def createMaxSpeed (featureclass): fcName = os.path.basename(featureclass).rstrip(os.path.splitext(featureclass)[1]).partition("_") [2] arcpy.AddField_management (featureclass, "Down", "SHORT") arcpy.CalculateField_management (featureclass, "Down", "!MAXADDOWN!", "PYTHON") arcpy.Union_analysis (featureclass, "temp_union_" + fcName, "ALL") arcpy.Sort_management ("temp_union_" + fcName, "temp_union_sort" + fcName, [["Down", "DESCENDING"]]) arcpy.AddField_management ("temp_union_sort" + fcName, "XCOORD", "DOUBLE") arcpy.AddField_management ("temp_union_sort" + fcName, "YCOORD", "DOUBLE") arcpy.CalculateField_management ("temp_union_sort" + fcName, "XCOORD", "!SHAPE.CENTROID!.split()[0]", "PYTHON") arcpy.CalculateField_management ("temp_union_sort" + fcName, "YCOORD", "!SHAPE.CENTROID!.split()[1]", "PYTHON") arcpy.Dissolve_management ("temp_union_sort" + fcName, "temp_union_sort_dz" + fcName, ["XCOORD", "YCOORD", "SHAPE_Area"], "Down FIRST", "MULTI_PART", "") arcpy.Dissolve_management ("temp_union_sort_dz" + fcName, "fc_mt_final_" + fcName + "_max_speed", ["FIRST_Down"], "", "MULTI_PART", "")
def generate_scratch_data(directory): # create new fgdb print "Creating Scratch File Geodatabase..." network_fgdb = arcpy.CreateFileGDB_management(directory, 'NetworkBuild_scratchData.gdb') # union the census urban areas and the sgid muni print "Union the Census Urban Areas and SGID Munis" unioned_fc = arcpy.Union_analysis(in_features="'Database Connections/internal@[email protected]/SGID.DEMOGRAPHIC.UrbanAreasCensus2010' #;'Database Connections/internal@[email protected]/SGID.BOUNDARIES.Municipalities' #", out_feature_class= str(directory) + "/NetworkBuild_scratchData.gdb/UrbanAreasMuni_Union", join_attributes="ONLY_FID", cluster_tolerance="", gaps="GAPS") # dissolve this unioned data print "Dissolve the unioned layer" return arcpy.Dissolve_management(in_features=unioned_fc, out_feature_class= str(directory) + "/NetworkBuild_scratchData.gdb/UrbanAreasMuni_Union_Dissolved", dissolve_field="", statistics_fields="", multi_part="MULTI_PART", unsplit_lines="DISSOLVE_LINES")
def RemoveFeatures(file_list, out_name): """ Intersects the provided feature classes and, if overlapping features exist, unions the overlapping features with the first feature class, selects the overlapping features, and deletes those features. If no overlap exists, creates a copy of the first feature class saved as the out_name. :param file_list: a list of feature classes where overlapping features will be removed from the first feature classw :param out_name: a name to save the output, as a string :return: the name of the feature class with the remaining features as a string, the name of the overlapping feature as a string. The overlap feature class is used to remove from the proposed surface disturbance any previously-disturbed surface by Debit Tool 4 in ID HQT. """ # Remove features that will be updated overlap = arcpy.Intersect_analysis(file_list, "overlap") test = arcpy.GetCount_management(overlap) count = int(test.getOutput(0)) if count > 0: # Union the first provided feature class with the result # of the intersect (i.e., overlapping features) union = arcpy.Union_analysis([file_list[0], overlap], "union") # Select from the union features identical to the overlap # and delete from the first provided feature class selected = arcpy.MakeFeatureLayer_management(union, "union_lyr") arcpy.SelectLayerByLocation_management(selected, "ARE_IDENTICAL_TO", overlap) arcpy.DeleteFeatures_management(selected) # Save the output as the out_name remaining_features = arcpy.CopyFeatures_management(selected, out_name) arcpy.Delete_management("union") else: # Update message arcpy.AddMessage("No overlapping features identified") # Return None for overlap overlap = None # Make a copy of the first provided feature class remaining_features = arcpy.CopyFeatures_management( file_list[0], out_name) # arcpy.Delete_management("overlap") return remaining_features, overlap
def erase(input_features, erase_features, output_feature_class): """ Works by UNIONing the two input feature classes, SELECTing the created features that do not overlap with the erase_features using the "erasefeaturesname_FID", and then CLIPping the original input_features to include just those features. If either input_features or erase_features is not a polygon it will be BUFFERed to a polygon prior to the union. """ arcpy.AddMessage("BEGINNING ERASE PROCESS...") arcpy.Delete_management("in_memory") attr = erase_features.split('/')[-1] desc = arcpy.Describe(input_features) if desc.shapeType != 'Polygon': arcpy.AddMessage("BUFFERING INPUT FEATURES TO POLYGON...") arcpy.Buffer_analysis(in_features=input_features, out_feature_class="in_memory/buffered", buffer_distance_or_field="0.25 Meters") arcpy.AddMessage("INPUT FEATURES BUFFERED") desc = arcpy.Describe(erase_features) if desc.shapeType != 'Polygon': arcpy.AddMessage("BUFFERING ERASE FEATURES TO POLYGON...") arcpy.Buffer_analysis(in_features=erase_features, out_feature_class="in_memory/erase_buffered", buffer_distance_or_field="0.25 Meters") erase_features = "in_memory/erase_buffered" attr = erase_features.split('/')[-1] arcpy.AddMessage("ERASE FEATURES BUFFERED") arcpy.AddMessage("UNIONING...") arcpy.Union_analysis(in_features=["in_memory/buffered", erase_features], out_feature_class="in_memory/unioned") arcpy.AddMessage("UNIONED") arcpy.AddMessage("SELECTING...") arcpy.Select_analysis(in_features="in_memory/unioned", out_feature_class="in_memory/selected", where_clause="FID_" + attr + " = -1") arcpy.AddMessage("SELECTED") arcpy.AddMessage("CLIPPING...") arcpy.Clip_analysis(in_features=input_features, clip_features="in_memory/selected", out_feature_class=output_feature_class) arcpy.AddMessage("CLIPPED") arcpy.AddMessage("ERASED")
def CreatePreDefinedMapUnits(Map_Units, in_features, field_name=None): """ Intersects the Map Units feature class with the in_features feature class. A field name may be provided from the in_features to include in the output feature class as a label for the map unit, the field will be updated with 'N/A' for any map units that don't interstect the in_features. :param Map_Units: the Map Units feature class :param in_features: a feature class to create pre-defined map units from :param field_name: the name of a field in the in_features attribute table to preserve in the output. Will be updated with 'N/A' if no overlap. :return: None """ # Clip the provided features to the Map_Units layer clip_features = Map_Units out_feature_class = "in_memory/clip" arcpy.Clip_analysis(in_features, clip_features, out_feature_class) # Union the clipped features and the Map Units layer FCs = [Map_Units, out_feature_class] out_feature_class = "in_memory/Map_Units_Union" Map_Units_Union = arcpy.Union_analysis(FCs, out_feature_class) # Overwrite the existing Map_Units layer util.RenameFeatureClass(Map_Units_Union, Map_Units) # Populate blank fields with N/A if field_name: with arcpy.da.UpdateCursor(Map_Units, field_name) as cursor: for row in cursor: if row[0] is None or row[0] == "": row[0] = "N/A" cursor.updateRow(row) # # Add fields and populate with 'True' wherever a new map unit was created # if field_name: # fieldsToAdd = [field_name] # fieldTypes = ["TEXT"] # AddFields(Map_Units, fieldsToAdd, fieldTypes) # FID_field = "FID_clip" # with arcpy.da.UpdateCursor(Map_Units, # [FID_field, field_name]) as cursor: # for row in cursor: # if row[0] > -1: # row[1] = "True" # else: # row[1] = "N/A" # cursor.updateRow(row) # Clean up arcpy.Delete_management("in_memory")
def dissolvePolygon(polygon): # Takes a possibly multipart polygon and dissolves it prefix = os.path.join(arcpy.env.scratchGDB, "diss_") polyUnion = prefix + "polyUnion" polyDiss = prefix + "polyDiss" # Reduce multiple features where possible arcpy.Union_analysis(polygon, polyUnion, "ONLY_FID", "", "NO_GAPS") arcpy.Dissolve_management(polyUnion, polyDiss, "", "", "SINGLE_PART", "DISSOLVE_LINES") return polyDiss
def test_poi_corrcet(extracted_area,poi_region,out_path): #arcpy.Intersect_analysis([extracted_area,poi_region], 'temp.shp') arcpy.MakeFeatureLayer_management(poi_region,'selected_area') arcpy.SelectLayerByLocation_management('selected_area','CROSSED_BY_THE_OUTLINE_OF',extracted_area) arcpy.Erase_analysis('selected_area', extracted_area, 'temp.shp') arcpy.Union_analysis([extracted_area,'temp.shp'],'temp_1.shp') arcpy.Dissolve_management('temp_1.shp','temp_2') if os.path.isfile(workspace_path +'\\'+out_path): print('File exists.Overwrite the original shp') arcpy.Delete_management(out_path) arcpy.Erase_analysis('temp_2.shp', '\\data\\waterregion.shp', out_path) arcpy.Delete_management('temp.shp') arcpy.Delete_management('temp_1.shp') arcpy.Delete_management('temp_2.shp')
def procesar(): #--------------------------- arcpy.env.overwriteOutput = True avance = etapa1(cartografia, cws + os.path.sep + actas, cfecha, intersect) crearSalidas(avance, NoPlanif, Remanente) union = "union" funion = scr + os.path.sep + union fcarto = cws + os.path.sep + cartografia if not arcpy.Exists(fcarto): imprimir("PROBLEMAS CARTO=" + cartografia + " no existe") return factas = cws + os.path.sep + actas if not arcpy.Exists(factas): imprimir("PROBLEMAS ACTAS=" + actas + " no existe") return arcpy.Union_analysis([[fcarto, 1], [factas, 2]], funion, "all", eps) agregarCampos(funion, cfecha) noPlanificados = arcpy.AddFieldDelimiters(funion, "FID_" + actas) + " =-1" Remanentes = arcpy.AddFieldDelimiters(funion, "FID_" + cartografia) + "=-1" seleccion = "seleccion" arcpy.MakeFeatureLayer_management(funion, seleccion) arcpy.SelectLayerByAttribute_management(seleccion, "NEW_SELECTION", noPlanificados) arcpy.Append_management(seleccion, scr + os.path.sep + NoPlanif) arcpy.SelectLayerByAttribute_management(seleccion, "NEW_SELECTION", Remanentes) arcpy.Append_management(seleccion, scr + os.path.sep + Remanente, "NO_TEST") if eliminarSliver: if eco: imprimir("\nEliminando Slivers en " + NoPlanif + "," + Remanente) sliver(scr + os.path.sep + NoPlanif) sliver(scr + os.path.sep + Remanente) # explode los remanantes if eco: imprimir("MULTIPART TO SINGLE PART...") arcpy.MultipartToSinglepart_management(Remanente, Remanente + "1") arcpy.Delete_management(Remanente) arcpy.Rename_management(Remanente + "1", Remanente) avanceDi = procesarAvance(actas) resumen(avanceDi)