def CleanFeatures(inFeats, outFeats): '''Repairs geometry, then explodes multipart polygons to prepare features for geoprocessing.''' # Process: Repair Geometry arcpy.RepairGeometry_management(inFeats, "DELETE_NULL") # Have to add the while/try/except below b/c polygon explosion sometimes fails inexplicably. # This gives it 10 tries to overcome the problem with repeated geometry repairs, then gives up. counter = 1 while counter <= 10: try: # Process: Multipart To Singlepart arcpy.MultipartToSinglepart_management(inFeats, outFeats) counter = 11 except: arcpy.AddMessage("Polygon explosion failed.") # Process: Repair Geometry arcpy.AddMessage("Trying to repair geometry (try # %s)" % str(counter)) arcpy.RepairGeometry_management(inFeats, "DELETE_NULL") counter += 1 if counter == 11: arcpy.AddMessage( "Polygon explosion problem could not be resolved. Copying features." ) arcpy.CopyFeatures_management(inFeats, outFeats) return outFeats
def sbdd_checkGeometry(thePre, myFL): #check to see if check geometry has been run, if has not, run it arcpy.AddMessage(" Checking geometry: " + myFL) #arcpy.AddMessage(" theFD + thePre + myFL: " + theFD + thePre + myFL) geoCnt = int(arcpy.GetCount_management(theFD + thePre + myFL).getOutput(0)) theFGDB = theFD.rstrip("NATL_Broadband_Map") if arcpy.Exists(theFGDB + myFL): arcpy.Delete_management(theFGDB + myFL) if not arcpy.Exists(theFGDB + myFL): arcpy.CheckGeometry_management(theFD + thePre + myFL, theFGDB + myFL) myCnt = int(arcpy.GetCount_management(theFGDB + myFL).getOutput(0)) if myCnt > 0: #there is a geometry problem, we need to correct it arcpy.AddMessage(" FIXING geometry: " + myFL) arcpy.RepairGeometry_management(theFD + thePre + myFL) geoCnt = int( arcpy.GetCount_management(theFD + thePre + myFL).getOutput(0)) myMsg = " Geometry FAILED and fixed: Layer now has " + \ str(geoCnt) + " records. \n" myFile.write(myMsg) else: myMsg = " Geometry PASSED: Layer has " + str( geoCnt) + " records. \n" myFile.write(myMsg) del myMsg, myCnt, geoCnt, theFGDB return ()
def load2SDEtol(newParcelsFinalSDE, path, county, schema, newParcelsFinal, startTime): print('Creating FC with SDE tols and Load data') if arcpy.Exists(newParcelsFinalSDE): print(' Exists - Deleting', newParcelsFinalSDE) arcpy.Delete_management(newParcelsFinalSDE) print('Creating', newParcelsFinalSDE) arcpy.CreateFeatureclass_management(path + '\\BasicParcels_2020.gdb', county + '_Parcels_2020', "POLYGON", schema, "DISABLED", "DISABLED", schema) print('Appending to', newParcelsFinalSDE) arcpy.Append_management( newParcelsFinal, newParcelsFinalSDE, "NO_TEST", ) count = int(arcpy.GetCount_management(newParcelsFinalSDE).getOutput(0)) print(str(count), 'features appended') # Repair Geometry - SDE Tol data print('Repairing Geometry -', newParcelsFinalSDE, 'with SDE Tolerance data') arcpy.RepairGeometry_management(newParcelsFinalSDE, "DELETE_NULL") stopTime3 = time.clock() elapsedTime = stopTime3 - startTime elapsedTime = elapsedTime / 60 print('Time for operation:', str(round(elapsedTime, 1)), 'minutes')
def Insert_needed_arc(parcel_bankal, arc_bankal, Keshet, gdb): random_name = str(uuid.uuid4())[::5] arc_diss = r'in_memory' + '\\' + 'arc__Diss' + random_name parce_to_line = r'in_memory' + '\\' + 'parcel_to_line' + random_name error_line = gdb + '\\' + 'Errors_Line' deleteErrorCode(error_line, ["7"]) polygon_to_line(parcel_bankal.layer, parce_to_line) arcpy.Dissolve_management(arc_bankal.layer, arc_diss) data = [i.shape for i in arcpy.SearchCursor(arc_diss)][0] with arcpy.da.UpdateCursor(parce_to_line, ['SHAPE@']) as cursor: for row in cursor: geom = row[0] new_geom = geom.difference(data) row[0] = new_geom cursor.updateRow(row) Del_Layer_on_ref(parce_to_line, Keshet) arcpy.RepairGeometry_management(parce_to_line) # delete lines that on polygone with holes feat_lyr = 'par_to_line_lyr' + str(uuid.uuid4())[::5] arcpy.MakeFeatureLayer_management(parce_to_line, feat_lyr) arcpy.SelectLayerByLocation_management(feat_lyr, "SHARE_A_LINE_SEGMENT_WITH", parcel_bankal.layer, "0.01 Meters", '', 'INVERT') arcpy.DeleteFeatures_management(feat_lyr) del_geom(parce_to_line) Calc_field_value_error(parce_to_line, error_line, "7", ErrorDictionary["7"])
def runDatasetChecks(dataset,table,qaRulesDataset): qaRules = qaRulesDataset.split(",") success = True for rule in qaRules: if rule == "RepairGeometry": i = 0 count = 1 gzSupport.addMessage("Running " + rule + " for " + table) while i < 3 and count > 0: arcpy.RepairGeometry_management(table) count = checkGeometry(table) i += 1 if count > 0: err = str(count) + " Geometry Errors found after repairing " + str(i) + " times" gzSupport.addError(err) gzSupport.logProcessError(table,rule,rule,str(count),err) success = False else: gzSupport.addMessage("Geometry successfully repaired") elif rule == "CheckGeometry": gzSupport.addMessage("Running " + rule + " for " + table) count = checkGeometry(table) if count > 0: success = False gzSupport.logProcessError(table,rule,rule,str(count),"Geometry Errors Found") return success
def polygon_to_line(fc, layer_new): ws, fc_name = os.path.split(layer_new) s_r = arcpy.Describe(fc).spatialReference if arcpy.Exists(layer_new): arcpy.Delete_management(layer_new) line = arcpy.CreateFeatureclass_management(ws, fc_name, 'POLYLINE', spatial_reference=s_r) Search = arcpy.da.SearchCursor(fc, "SHAPE@") insert = arcpy.da.InsertCursor(line, "SHAPE@") for row in Search: points = [ arcpy.Point(point.X, point.Y) for shape in row[0] for point in shape if point ] array = arcpy.Array(points) polyline = arcpy.Polyline(array) insert.insertRow([polyline]) arcpy.RepairGeometry_management(layer_new) return layer_new
def Feature_to_polygon(path,Out_put): path_diss = arcpy.Dissolve_management(path,r'in_memory\Dissolve_temp') polygon = [] cursor = arcpy.SearchCursor(path_diss) for row in cursor: geom = row.shape for part in geom: num = 0 for pt in part: if str(type(pt)) != "<type 'NoneType'>": polygon.append([pt.X,pt.Y]) else: polygon.append(None) poly = Split_List_by_value(polygon,None,True) feature = arcpy.CopyFeatures_management(path,Out_put) for i in poly[1:]: array = arcpy.Array() for n in i: array.add(arcpy.Point(n[0],n[1])) polygon = arcpy.Polygon(array, arcpy.SpatialReference("Israel TM Grid")) in_rows = arcpy.InsertCursor(feature) in_row = in_rows.newRow() in_row.Shape = polygon in_rows.insertRow(in_row) arcpy.RepairGeometry_management(Out_put) return Out_put
def update_gfwid(self): """ For each row, take the hash of the well known text representation of the geometry This will be used in the API to cache analysis results for geometries previously analyzed :return: """ logging.debug('Starting vector_layer.update_gfwid for {0}'.format( self.name)) if "gfwid" not in util.list_fields(self.source, self.gfw_env): arcpy.AddField_management(self.source, "gfwid", "TEXT", field_length=50, field_alias="GFW ID") # Required to prevent calcuate field failures-- will likely fail to hash the !Shape! object if there are # null geometries logging.debug('Starting repair geometry') arcpy.RepairGeometry_management(self.source, "DELETE_NULL") logging.debug('Starting to calculate gfwid') arcpy.CalculateField_management(self.source, "gfwid", "md5(!Shape!.WKT)", "PYTHON_9.3", code_block="import hashlib\n" "def md5(shape):\n" " hash = hashlib.md5()\n" " hash.update(shape)\n" " return hash.hexdigest()")
def CreateUnsuitableSlopes(in_dem, outUnsuitSlope): arcpy.ProjectRaster_management(in_dem, "project", spatial_ref) x = arcpy.Describe("project").meanCellWidth y = arcpy.Describe("project").meanCellHeight cellSizeXY = "{} {}".format(x, y) #print(cellSizeXY) arcpy.Resample_management("project", "resample", cellSizeXY, "CUBIC") arcpy.Delete_management("project") # Run slope generation slope_raster = arcpy.sa.Slope("resample", "PERCENT_RISE") outInt = arcpy.sa.Int(slope_raster) arcpy.Delete_management("resample") del slope_raster # Set parameters for raster Reclassification as a boolean max_slope = outInt.maximum myRemapRange = arcpy.sa.RemapRange([[0, 10, 0], [10, int(max_slope), 1]]) ### Run reclassification outReclassRR = arcpy.sa.Reclassify(outInt, "Value", myRemapRange) # del outInt ### Query Bad Slopes #arcpy.ProjectRaster_management(outInt, "project", spatial_ref) slope_unSuit = arcpy.sa.ExtractByAttributes(outReclassRR, 'Value = 1') del outReclassRR ## Convert to Vector arcpy.RasterToPolygon_conversion(slope_unSuit, outUnsuitSlope, raster_field="Value") del slope_unSuit # Clean up geometry arcpy.RepairGeometry_management(outUnsuitSlope)
def ShpFixGeometry(fileRoot, item, newTempDir, timeItem, type): ###检查几何 arcpy.env.scratchWorkspace = newTempDir out_table = newTempDir + '\\' + 'outputTable' arcpy.CheckGeometry_management( fileRoot + '\\' + 'shapefile' + '_' + str(resolution) + '\\' + type + '_' + timeItem + '_' + str(resolution) + '\\' + item, out_table) ###修复几何 fcs = [] for row in arcpy.da.SearchCursor(out_table, ("CLASS")): if not row[0] in fcs: fcs.append(row[0]) for fc in fcs: # print("Processing " + fc) lyr = 'temporary_layer' if arcpy.Exists(lyr): arcpy.Delete_management(lyr) tv = "cg_table_view" if arcpy.Exists(tv): arcpy.Delete_management(tv) arcpy.MakeTableView_management(out_table, tv, ("\"CLASS\" = '%s'" % fc)) arcpy.MakeFeatureLayer_management(fc, lyr) arcpy.AddJoin_management(lyr, arcpy.Describe(lyr).OIDFieldName, tv, "FEATURE_ID") arcpy.RemoveJoin_management(lyr, os.path.basename(out_table)) arcpy.RepairGeometry_management(lyr)
def mosaic_files(): #clean files first myFCs = ["mydata", "mydata_dis"] for myFC in myFCs: if arcpy.Exists(myFC): arcpy.Delete_management(myFC) # get the list of the shape files in that directory for dirname, dirnames, filenames in os.walk(theLocation): for filename in filenames: #print os.path.join(dirname, filename) #arcpy.AddMessage(os.path.join(dirname, filename)) #arcpy.AddMessage("the right side is: " + os.path.join(dirname, filename)[-4:]) if os.path.join(dirname, filename)[-4:] == ".shp": arcpy.AddMessage("thefile is: " + os.path.join(dirname, filename)) #copy in the source shapefile as the featureclass mydata arcpy.CopyFeatures_management(os.path.join(dirname, filename), thePGDB + "/mydata") #repair geometry on this arcpy.RepairGeometry_management(thePGDB + "/mydata") #create a new mydata featureclass which is a dissolve of the fields in the input shp arcpy.Dissolve_management("mydata", thePGDB + "/mydata_dis", ["mkg_name", "entity", "protocol"]) #append the mydata featureclass to mosaic_all featureclass arcpy.Append_management([ "mydata_dis" ], "mosaic_all") #, {schema_type}, {field_mapping}, {subtype}) myFCs = ["mydata", "mydata_dis"] for myFC in myFCs: if arcpy.Exists(myFC): arcpy.Delete_management(myFC) return ()
def Delete_polygons(fc, del_layer, Out_put): desc = arcpy.Describe(fc) fc = arcpy.CopyFeatures_management(fc, Out_put) if desc.ShapeType == u'Point': del_layer_temp = 'in_memory' + '\\' + 'Temp' if arcpy.Exists(del_layer_temp): arcpy.Delete_management(del_layer_temp) arcpy.Dissolve_management(del_layer, del_layer_temp) if desc.ShapeType == u'Point': geom_del = [ row.shape for row in arcpy.SearchCursor(del_layer_temp) ][0] Ucursor = arcpy.UpdateCursor(Out_put) for row in Ucursor: point_shape = row.shape.centroid if geom_del.distanceTo(point_shape) == 0: Ucursor.deleteRow(row) del Ucursor else: print "no points in the layer" else: count_me = int(str(arcpy.GetCount_management(del_layer))) if count_me > 0: temp = 'in_memory' + '\\' + '_temp' if arcpy.Exists(temp): arcpy.Delete_management(temp) arcpy.Dissolve_management(del_layer, temp) geom_del = [row.shape for row in arcpy.SearchCursor(temp)][0] Ucursor = arcpy.UpdateCursor(Out_put) for row in Ucursor: geom_up = row.shape new_geom = geom_up.difference(geom_del) try: row.shape = new_geom Ucursor.updateRow(row) except: pass del Ucursor arcpy.Delete_management(temp) else: pass if desc.ShapeType == u'Point': pass else: up_cursor = arcpy.UpdateCursor(Out_put) for row in up_cursor: geom = row.shape if geom.area == 0: up_cursor.deleteRow(row) del up_cursor arcpy.RepairGeometry_management(Out_put) return Out_put
def Polygon_To_Line(fc,layer_new): ws, fc_name = os.path.split (layer_new) s_r = arcpy.Describe (fc).spatialReference if arcpy.Exists(layer_new): arcpy.Delete_management(layer_new) line = arcpy.CreateFeatureclass_management (ws, fc_name, 'POLYLINE', spatial_reference=s_r) insert = arcpy.da.InsertCursor(line,"SHAPE@") Search = arcpy.da.SearchCursor(fc,"SHAPE@" ) Get_Line_list = [] pid = 0 for row in Search: for part in row[0]: for pt in part: if pt: Get_Line_list.append([pid,pt.X,pt.Y]) else: pass pid +=1 for i in range(pid): points = [arcpy.Point(n[1],n[2]) for n in Get_Line_list if n[0] == i] array = arcpy.Array(points) polyline = arcpy.Polyline(array) insert.insertRow([polyline]) arcpy.RepairGeometry_management(layer_new)
def repair(featureClass): if not arcpy.Exists(featureClass): raise ExistsError try: arcpy.RepairGeometry_management(featureClass) except: config.run_error_message(featureClass, "Repair geometry failure") gc.collect()
def Reproj_Clip_Dissolve_Simplify_Polygon_arcgis(layer_path, Class_Col, tempfolder, mask_layer): """Preprocess user provided polygons Function that will reproject clip input polygon with subbasin polygon and will dissolve the input polygon based on their ID, such as landuse id or soil id. Parameters ---------- processing : qgis object context : qgis object layer_path : string The path to a specific polygon, for example path to landuse layer Project_crs : string the EPSG code of a projected coodinate system that will be used to calcuate HRU area and slope. trg_crs : string the EPSG code of a coodinate system that will be used to calcuate reproject input polygon Class_Col : string the column name in the input polygon (layer_path) that contains their ID, for example land use ID or soil ID. Layer_clip : qgis object A shpfile with extent of the watershed, will be used to clip input input polygon Notes ------- # TODO: May be add some function to simplify the input polygons for example, remove the landuse type with small areas or merge small landuse polygon into the surrounding polygon Returns: ------- layer_dis : qgis object it is a polygon after preprocess """ arcpy.Project_management( layer_path, os.path.join(tempfolder, Class_Col + "_proj.shp"), arcpy.Describe(mask_layer).spatialReference, ) arcpy.Clip_analysis(os.path.join(tempfolder, Class_Col + "_proj.shp"), mask_layer, os.path.join(tempfolder, Class_Col + "_clip.shp")) arcpy.Dissolve_management( os.path.join(tempfolder, Class_Col + "_clip.shp"), os.path.join(tempfolder, Class_Col + "_dislve.shp"), [Class_Col]) arcpy.RepairGeometry_management( os.path.join(tempfolder, Class_Col + "_dislve.shp")) arcpy.AddSpatialIndex_management( os.path.join(tempfolder, Class_Col + "_dislve.shp")) return os.path.join(tempfolder, Class_Col + "_dislve.shp")
def Feature_to_polygon(path, Out_put): dif_name = str(uuid.uuid4())[::5] path_diss = arcpy.Dissolve_management( path, r'in_memory\Dissolve_temp' + dif_name) def Split_List_by_value(list1, value, del_value=False): list_index = [] for n, val in enumerate(list1): if val == value: list_index.append(n) list_index.append(len(list1)) list_val = [] num = 0 for i in list_index: list_val.append(list1[num:i]) num = +i if del_value: for i in list_val: for n in i: if n is None: i.remove(value) return list_val polygon = [] cursor = arcpy.SearchCursor(path_diss) for row in cursor: geom = row.shape for part in geom: num = 0 for pt in part: if pt: polygon.append([pt.X, pt.Y]) else: polygon.append(None) poly = Split_List_by_value(polygon, None, True) feature = arcpy.CopyFeatures_management(path, Out_put) for i in poly[1:]: array = arcpy.Array() for n in i: array.add(arcpy.Point(n[0], n[1])) polygon = arcpy.Polygon(array, arcpy.SpatialReference("Israel TM Grid")) in_rows = arcpy.InsertCursor(feature) in_row = in_rows.newRow() in_row.Shape = polygon in_rows.insertRow(in_row) arcpy.RepairGeometry_management(Out_put) return Out_put
def check_and_repair(in_file): # Check and repair geometry print(" Checking and repairing " + in_file) out_table = "CheckGeom" arcpy.CheckGeometry_management(in_file, out_table) num_errors = arcpy.GetCount_management(out_table)[0] print(" {} geometry problems found, see {} for details.".format( num_errors, out_table)) if num_errors > 0: arcpy.RepairGeometry_management(in_file) print(" Finished repairing geometries ") return
def flatten_poly_fc(in_layer_path, out_gdb_path, query=None): '''Check for overlaps and flatten, super region poly knockoff, POLYID joins back to original data''' try: log("Flattening {} due to overlaps".format(in_layer_path)) in_layer_nm = os.path.splitext(os.path.basename(in_layer_path))[0] shattered_fc = os.path.join(out_gdb_path, in_layer_nm + "_shattered") if query: log("We have a query: {}".format(query)) f_lyr = "f_lyr" arcpy.MakeFeatureLayer_management(in_layer_path, f_lyr, where_clause=query) arcpy.Union_analysis(f_lyr, shattered_fc, "ALL", "", "GAPS") log(arcpy.GetMessages()) else: arcpy.Union_analysis(in_layer_path, shattered_fc, "ALL", "", "GAPS"); log(arcpy.GetMessages()) shattered_singlepart_fc = os.path.join(out_gdb_path, in_layer_nm + "_shattered_singlepart") #this arcpy.MultipartToSinglepart_management(shattered_fc, shattered_singlepart_fc); log(arcpy.GetMessages()) polyid_field_nm = "POLYID" arcpy.AddField_management(shattered_singlepart_fc, polyid_field_nm, "LONG"); log(arcpy.GetMessages()) polyid_dict = {} polyid_value = 1 decimal_tolerance = 2 field_list = ["OID@","SHAPE@XY","SHAPE@AREA", polyid_field_nm] update_rows = arcpy.da.UpdateCursor(shattered_singlepart_fc, field_list) for row in update_rows: axyvalue = (round(row[1][0], decimal_tolerance), round(row[1][1], decimal_tolerance), round(row[2], decimal_tolerance)) if axyvalue not in polyid_dict: polyid_dict[axyvalue] = polyid_value polyid_value = polyid_value + 1 row[3] = polyid_dict[axyvalue] update_rows.updateRow(row) del row, update_rows del polyid_dict final_fc = os.path.join(out_gdb_path, in_layer_nm + "_flattened") try: arcpy.Dissolve_management(shattered_singlepart_fc, final_fc, polyid_field_nm, "", "SINGLE_PART"); log(arcpy.GetMessages()) except: log("Failed initial Dissolve, repairing geometry and trying again") arcpy.RepairGeometry_management(shattered_singlepart_fc); log(arcpy.GetMessages()) arcpy.Dissolve_management(shattered_singlepart_fc, final_fc, polyid_field_nm, "", "SINGLE_PART"); log(arcpy.GetMessages()) log("Creating POLYID lookup table") polyid_fc = os.path.join(out_gdb_path, in_layer_nm + "_polyid") fid_field = next(i.name for i in arcpy.ListFields(shattered_singlepart_fc) if "FID" in i.name) arcpy.Frequency_analysis(shattered_singlepart_fc, polyid_fc,"POLYID;{}".format(fid_field), "");log(arcpy.GetMessages()) arcpy.AddField_management(polyid_fc, "flattened_POLYID", "LONG");log(arcpy.GetMessages()) arcpy.CalculateField_management(polyid_fc,"flattened_POLYID", "!POLYID!", "PYTHON");log(arcpy.GetMessages()) arcpy.DeleteField_management(polyid_fc, "FREQUENCY;POLYID"); log(arcpy.GetMessages()) log("Successful finish to flattening routine") return [final_fc, polyid_fc] except Exception as e: log("EXCEPTION hit: {}".format(e))
def repair_geo(ft_list, ml): try: arcpy.env.workspace = ml except: print('Unable to connect to workspace \n') quit() for ft in ft_list: ft = ml + '\\' + ft print('Repairing feature: {}'.format(ft) + '\n') arcpy.RepairGeometry_management(ft) print('Done Repairing Parcel and Index' + '\n') time.sleep(30)
def removeNonConcurring(self): pp = self.ProgressPrinter.newProcess(inspect.stack()[0][3], 1, 1).start() nonConcurrence_whereClause = "{} <> ' '".format( arcpy.AddFieldDelimiters(self.output, "CELL_ID")) # Removing disturbance polygons where inventory doesnt spatially concur that a disturbance took place... # print nonConcurrence_whereClause arcpy.Select_analysis(self.temp_overlay, self.output, nonConcurrence_whereClause) # Repairing Geometry... arcpy.RepairGeometry_management(self.output, "DELETE_NULL") # print(arcpy.GetMessages()) pp.finish()
def sbdd_ProcessCAI (myFD, myFL): arcpy.AddMessage(" Begining CAI Processing") if arcpy.Exists("CAI"): arcpy.Delete_management("CAI") if int(arcpy.GetCount_management(myFD + "/" + myFL).getOutput(0)) > 0: arcpy.Copy_management(myFD + "/" + myFL, "CAI") arcpy.AddMessage(" Repairing geometry ...") arcpy.RepairGeometry_management("CAI") else: arcpy.AddMessage(" Nothing to do ...") sbdd_ExportToShape("CAI") del myFD, myFL return ()
def main(): logFileName = "T:/getIntRds.log" logFile = file(logFileName, "w") tsaNums = sys.argv[1] root = sys.argv[2] year = str(sys.argv[3]) inRds = sys.argv[4] tsas = [] fileListToArray(tsas, tsaNums) arcpy.env.overwriteOutput = True # get path of input FC list = inRds.split("\\")[0:-1] delim = "\\" rdsGDB = delim.join(list) tempGDB = "t:\\tempRDS12345.gdb" gdbName = "tempRDS12345.gdb" tempRDS = "t:\\tempRDS12345.gdb\\tempRDs" src = tempGDB + "\\" + inRds.split("\\")[-1] srcLyr = "srcLyr" if arcpy.Exists(tempGDB): arcpy.Delete_management(tempGDB) # Copy file geodatabase to T drive if not arcpy.Exists(tempGDB): arcpy.Copy_management(rdsGDB, tempGDB) arcpy.MakeFeatureLayer_management(src, srcLyr) for tsa in tsas: rootTSAgdb = root + "\\" + tsa + "\\" + tsaNum + "_" + year + ".gdb" rootTSAgdbRds = rootTSAgdb + "\\src\\IntegratedRoadsBuffers" bnd = rootTSAgdb + "\\wrk\\bnd" # Deleting existing FC delFC(rootTSAgdbRds) arcpy.SelectLayerByLocation_management(srcLyr, "INTERSECT", bnd) arcpy.CopyFeatures_management(srcLyr, tempRDS) arcpy.RepairGeometry_management(tempRDS) #Clip and copy fc to Units directory arcpy.AddMessage("Clipping...") print "output is " + wrk arcpy.Clip_analysis(tempRDS, bnd, rootTSAgdbRds) print "Elapsed time: %d seconds" % (time.clock()) logFile.close()
def repair_geometry(input_file): """Repair geometry error and report the number of errors if any.""" check = arcpy.CheckGeometry_management(input_file) # Check geometry first_count = arcpy.GetCount_management(check) # Number of Errors found. arcpy.Delete_management(check) # Delete count table arcpy.RepairGeometry_management(input_file) # Repair Geometry check = arcpy.CheckGeometry_management(input_file) # Check geometry secound_count = arcpy.GetCount_management(check) # Number of Errors found. arcpy.Delete_management(check) # Delete count table return [str(first_count), str(secound_count)]
def Polygon_To_Line_holes(Polygon,New_Line): Multi_to_single(Polygon) ws, fc_name = os.path.split (New_Line) s_r = arcpy.Describe (Polygon).spatialReference arcpy.CreateFeatureclass_management (ws, fc_name, 'POLYLINE', spatial_reference=s_r) ins_cursor = arcpy.da.InsertCursor (New_Line, ["SHAPE@"]) New_Lines = [] with arcpy.da.SearchCursor(Polygon,['SHAPE@','OBJECTID']) as cursor: for row in cursor: geom = row[0] array_temp = arcpy.Array() conti = True for part in geom: for pt in part: if pt: if conti: array_temp.append(pt) else: New_Lines.append(arcpy.Point(pt.X,pt.Y)) else: New_Lines.append(None) conti = False polyline = arcpy.Polyline (array_temp,s_r) ins_cursor.insertRow ([polyline]) del cursor # Insert rings in polygon, and make them lines InsertCursor = arcpy.InsertCursor(New_Line) insert = InsertCursor.newRow() New_Lines = Split_List_by_value(New_Lines,None,True) for row in New_Lines: if row: row = arcpy.Array(row) line = arcpy.Polyline(row) insert.shape = line InsertCursor.insertRow (insert) arcpy.RepairGeometry_management(New_Line)
def repairGeom(featureClass): try: print ' Repairing Geometry...' in_features = featureClass delete_null = 'DELETE_NULL' arcpy.RepairGeometry_management (in_features, delete_null) print ' Repair complete!' except Exception as e: print 'There was an ERROR with repairGeom:' print str(e) success = False
def process(sources): """ Download sources and process the data to the specified output layer. :param sources: Source JSON file or directory of files. :param output: Output .gdb layer """ os.environ["DOWNLOAD_CACHE"] = "download_cache" # create temp wksp temp_gdb = os.path.join(tempfile.gettempdir(), "temp_national_parks.gdb") if arcpy.Exists(temp_gdb): arcpy.Delete_management(temp_gdb) arcplus.create_gdb(temp_gdb) # load all data sources to temp wksp temp_layers = [] for path in utils.get_files(sources): fc_name = os.path.splitext(os.path.split(path)[1])[0] source = utils.read_json(path) fp = utils.download(source['url']) src = utils.extract(fp, source['filetype'], source['file'], source.get("layer", None)) temp = arcplus.transform_properties(src, os.path.join(temp_gdb, fc_name), source['properties']) arcpy.RepairGeometry_management(temp) temp_layers.append(temp) # project inputs to bc albers arcplus.project_all(temp_gdb, 3005) # merge data into single layer arcpy.Merge_management(temp_layers, os.path.join(temp_gdb, "national_parks")) # repair that geometry too, just in case arcpy.RepairGeometry_management(os.path.join(temp_gdb, "national_parks"))
def clean_source_shps(self, shp_list): """ After the data has been unzipped, repair geometry, remove fields, and add date and orig_fname :param shp_list: list of cleaned shapefiles ready to be appended to final output :return: """ cleaned_shp_list = [] for shp in shp_list: shp_name = os.path.basename(shp).replace('-', '_') single_part_path = os.path.join(os.path.dirname(shp), shp_name.replace('.shp', '') + '_singlepart.shp') # unclear why this extra garbage is added to the filename, but it is if 'desmatamento' in shp and int(shp[shp.index('2018')+5:shp.index('2018')+7]) <= 7: shp = os.path.splitext(shp)[0] + '_01102018.shp' # sometimes the zip files have dashes after the month, sometimes underscores # who can ever know why if not os.path.exists(shp): idx_2018 = shp.index('2018') + 4 shp = shp[:idx_2018] + '-' + shp[idx_2018 + 1:] logging.info('Starting multipart to singlepart for ' + shp_name) arcpy.MultipartToSinglepart_management(shp, single_part_path) arcpy.RepairGeometry_management(single_part_path, "DELETE_NULL") # Must have one field before we delete all the other ones. So says arcgis anyway orig_oid_field = 'orig_oid' util.add_field_and_calculate(single_part_path, orig_oid_field, 'Text', '255', '!FID!', self.gfw_env) self.remove_all_fields_except(single_part_path, keep_field_list=[orig_oid_field]) imazon_date_str = self.get_date_from_filename(os.path.basename(shp)) util.add_field_and_calculate(single_part_path, 'Date', 'DATE', "", imazon_date_str, self.gfw_env) util.add_field_and_calculate(single_part_path, 'date_alias', 'DATE', "", imazon_date_str, self.gfw_env) util.add_field_and_calculate(single_part_path, 'data_type', 'TEXT', "255", self.data_type(shp), self.gfw_env) util.add_field_and_calculate(single_part_path, 'orig_fname', 'TEXT', "255", shp_name, self.gfw_env) self.calculate_area_ha_eckert_iv(single_part_path) cleaned_shp_list.append(single_part_path) return cleaned_shp_list
def repairGeometry(newParcels, startTime): stopTime1 = time.clock() elapsedTime = stopTime1 - startTime elapsedTime = elapsedTime / 60 print('Time for operation:', str(round(elapsedTime, 1)), 'minutes') print('Repairing Geometry - Orig data') arcpy.RepairGeometry_management(newParcels, "DELETE_NULL") stopTime2 = time.clock() elapsedTime = stopTime2 - startTime elapsedTime = elapsedTime / 60 print('Time for operation:', str(round(elapsedTime, 1)), 'minutes')
def clean_source_shps(self, shp_list): """ After the data has been unzipped, repair geometry, remove fields, and add date and orig_fname :param shp_list: list of cleaned shapefiles ready to be appended to final output :return: """ cleaned_shp_list = [] for shp in shp_list: shp_name = os.path.basename(shp).replace('-', '_') single_part_path = os.path.join( os.path.dirname(shp), shp_name.replace('.shp', '') + '_singlepart.shp') logging.info('Starting multipart to singlepart for ' + shp_name) arcpy.MultipartToSinglepart_management(shp, single_part_path) arcpy.RepairGeometry_management(single_part_path, "DELETE_NULL") # Must have one field before we delete all the other ones. So says arcgis anyway orig_oid_field = 'orig_oid' util.add_field_and_calculate(single_part_path, orig_oid_field, 'Text', '255', '!FID!', self.gfw_env) self.remove_all_fields_except(single_part_path, keep_field_list=[orig_oid_field]) imazon_date_str = self.get_date_from_filename( os.path.basename(shp)) util.add_field_and_calculate(single_part_path, 'Date', 'DATE', "", imazon_date_str, self.gfw_env) util.add_field_and_calculate(single_part_path, 'date_alias', 'DATE', "", imazon_date_str, self.gfw_env) util.add_field_and_calculate(single_part_path, 'data_type', 'TEXT', "255", self.data_type(shp), self.gfw_env) util.add_field_and_calculate(single_part_path, 'orig_fname', 'TEXT', "255", shp_name, self.gfw_env) self.calculate_area_ha_eckert_iv(single_part_path) cleaned_shp_list.append(single_part_path) return cleaned_shp_list
def updateOverrides(fcs): """ loops through all feature classes and applies overrides to the geometry""" for fc in fcs: arcpy.env.overwriteOutput = True arcpy.env.addOutputsToMap = False desc = arcpy.Describe(fc) rep_name = "" if hasattr(desc, "representations"): reps = desc.representations for rep in reps: rep_name = rep.name arcpy.AddMessage("Applying Rep Overrides for " + str(fc)) arcpy.UpdateOverride_cartography(fc, rep_name, "BOTH") arcpy.AddMessage("Repairing Geometry for " + str(fc)) arcpy.RepairGeometry_management(fc) return fcs