def repair_geometry(input_file): """Repair geometry error and report the number of errors if any.""" check = arcpy.CheckGeometry_management(input_file) # Check geometry first_count = arcpy.GetCount_management(check) # Number of Errors found. arcpy.Delete_management(check) # Delete count table arcpy.RepairGeometry_management(input_file) # Repair Geometry check = arcpy.CheckGeometry_management(input_file) # Check geometry secound_count = arcpy.GetCount_management(check) # Number of Errors found. arcpy.Delete_management(check) # Delete count table return [str(first_count), str(secound_count)]
def sbdd_checkGeometry(thePre, myFL): #check to see if check geometry has been run, if has not, run it arcpy.AddMessage(" Checking geometry: " + myFL) #arcpy.AddMessage(" theFD + thePre + myFL: " + theFD + thePre + myFL) geoCnt = int(arcpy.GetCount_management(theFD + thePre + myFL).getOutput(0)) theFGDB = theFD.rstrip("NATL_Broadband_Map") if arcpy.Exists(theFGDB + myFL): arcpy.Delete_management(theFGDB + myFL) if not arcpy.Exists(theFGDB + myFL): arcpy.CheckGeometry_management(theFD + thePre + myFL, theFGDB + myFL) myCnt = int(arcpy.GetCount_management(theFGDB + myFL).getOutput(0)) if myCnt > 0: #there is a geometry problem, we need to correct it arcpy.AddMessage(" FIXING geometry: " + myFL) arcpy.RepairGeometry_management(theFD + thePre + myFL) geoCnt = int( arcpy.GetCount_management(theFD + thePre + myFL).getOutput(0)) myMsg = " Geometry FAILED and fixed: Layer now has " + \ str(geoCnt) + " records. \n" myFile.write(myMsg) else: myMsg = " Geometry PASSED: Layer has " + str( geoCnt) + " records. \n" myFile.write(myMsg) del myMsg, myCnt, geoCnt, theFGDB return ()
def ShpFixGeometry(fileRoot, item, newTempDir, timeItem, type): ###检查几何 arcpy.env.scratchWorkspace = newTempDir out_table = newTempDir + '\\' + 'outputTable' arcpy.CheckGeometry_management( fileRoot + '\\' + 'shapefile' + '_' + str(resolution) + '\\' + type + '_' + timeItem + '_' + str(resolution) + '\\' + item, out_table) ###修复几何 fcs = [] for row in arcpy.da.SearchCursor(out_table, ("CLASS")): if not row[0] in fcs: fcs.append(row[0]) for fc in fcs: # print("Processing " + fc) lyr = 'temporary_layer' if arcpy.Exists(lyr): arcpy.Delete_management(lyr) tv = "cg_table_view" if arcpy.Exists(tv): arcpy.Delete_management(tv) arcpy.MakeTableView_management(out_table, tv, ("\"CLASS\" = '%s'" % fc)) arcpy.MakeFeatureLayer_management(fc, lyr) arcpy.AddJoin_management(lyr, arcpy.Describe(lyr).OIDFieldName, tv, "FEATURE_ID") arcpy.RemoveJoin_management(lyr, os.path.basename(out_table)) arcpy.RepairGeometry_management(lyr)
def check_and_repair(in_file): # Check and repair geometry print(" Checking and repairing " + in_file) out_table = "CheckGeom" arcpy.CheckGeometry_management(in_file, out_table) num_errors = arcpy.GetCount_management(out_table)[0] print(" {} geometry problems found, see {} for details.".format( num_errors, out_table)) if num_errors > 0: arcpy.RepairGeometry_management(in_file) print(" Finished repairing geometries ") return
def check_geometry(self): Printboth('Checking geometry...') result = IUCNresult('-----CHECK GEOMETRY-----') try: arcpy.CheckGeometry_management(self.inputFL, self.outputLoc + os.sep + 'geometry_checks.dbf') except: Printboth(arcpy.GetMessage(2)) result.log('\t'+arcpy.GetMessage(2)) else: result.log('\tPlease check \'geometry_checks.dbf\' in the same folder for detailed information') finally: self.resultList.append(result)
def dissolve1(xzkpath, dissolvepath, outputxzkpath1): """融合相同属性,bsm一致的图斑,保证只有属性变化的图斑与周边图斑不在同一个分组号内""" arcpy.CheckGeometry_management(xzkpath, "dissolvepath_7_CheckGeometry") searchFields = ['cskbsm', 'dlbm', 'zldwdm', 'gdlx', 'tbxhdm', 'gdzzsxdm'] arcpy.AddMessage("融合字段:" + ';'.join(searchFields)) arcpy.Dissolve_management(xzkpath, dissolvepath1, "ZLDWDM;DLBM;GDLX;TBXHDM;GDZZSXDM;cskbsm", "#", "SINGLE_PART", "UNSPLIT_LINES") arcpyDeal.deleteFields(dissolvepath1, searchFields) arcpy.SpatialJoin_analysis( dissolvepath1, xzkpath, outputxzkpath1, join_operation="JOIN_ONE_TO_ONE", join_type="KEEP_ALL", match_option="CONTAINS", field_mapping=""" DLBM "DLBM" true true false 5 Text 0 0 ,First,#,%s,DLBM,-1,-1; ZLDWDM "ZLDWDM" true true false 19 Text 0 0 ,First,#,%s,ZLDWDM,-1,-1; GDLX "GDLX" true true false 2 Text 0 0 ,First,#,%s,GDLX,-1,-1; TBXHDM "TBXHDM" true true false 4 Text 0 0 ,First,#,%s,TBXHDM,-1,-1; GDZZSXDM "GDZZSXDM" true true false 8 Text 0 0 ,First,#,%s,GDZZSXDM,-1,-1; LINKTBS "LINKTBS" true true false 254 Text 0 0 ,First,#,%s,LINKTBS,-1,-1; TBXHMC "TBXHMC" true true false 20 Text 0 0 ,First,#,%s,TBXHMC,-1,-1; GDZZSXMC "GDZZSXMC" true true false 20 Text 0 0 ,First,#,%s,GDZZSXMC,-1,-1; CZCSXM "CZCSXM" true true false 4 Text 0 0 ,First,#,%s,CZCSXM,-1,-1; TSTYBM "TSTYBM" true true false 100 Text 0 0 ,First,#,%s,TSTYBM,-1,-1; SJDLBM "SJDLBM" true true false 100 Text 0 0 ,First,#,%s,SJDLBM,-1,-1; OLDTAG "OLDTAG" true true false 20 Text 0 0 ,First,#,%s,OLDTAG,-1,-1; JZSJ "JZSJ" true true false 8 Date 0 0 ,First,#,%s,JZSJ,-1,-1; BSM "BSM" true true false 18 Text 0 0 ,First,#,%s,BSM,-1,-1; ZZJZTB "ZZJZTB" true true false 1 Text 0 0 ,First,#,%s,ZZJZTB,-1,-1; WJZLX "WJZLX" true true false 8 Text 0 0 ,First,#,%s,WJZLX,-1,-1; cskmianji "cskmianji" true true false 8 Double 0 0 ,First,#,%s,cskmianji,-1,-1; cskbsm "cskbsm" true true false 255 Text 0 0 ,First,#,%s,cskbsm,-1,-1; cskzldwdm "cskzldwdm" true true false 255 Text 0 0 ,First,#,%s,cskzldwdm,-1,-1; cskdlbm "cskdlbm" true true false 255 Text 0 0 ,First,#,%s,cskdlbm,-1,-1; cskczcsxm "cskczcsxm" true true false 255 Text 0 0 ,First,#,%s,cskczcsxm,-1,-1; tstybmlist "tstybmlist" true true false 2000 Text 0 0 ,Join,",",%s,TSTYBM,-1,-1;""" % (xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath, xzkpath))
def checkGeometry(table): errTable = table + "_Check" if arcpy.Exists(errTable): arcpy.Delete_management(errTable) gzSupport.addMessage("Deleted existing " + errTable) arcpy.CheckGeometry_management(table,errTable) count = int(arcpy.GetCount_management(errTable).getOutput(0)) if count == 0: gzSupport.addMessage("No Geometry Errors found") arcpy.Delete_management(errTable) else: gzSupport.addMessage(str(count) + " Errors located in " + errTable) return count
def GeometryCheck(): outreport = checkfile + "/GeomCheck" arcpy.env.workspace = checkfile fcs = [] # List all feature classes in feature datasets for fds in arcpy.ListDatasets(): print fds fcs += arcpy.ListFeatureClasses("*", "", fds) print fcs print "Running the check geometry tool on %i feature classes" % len(fcs) for fc in fcs: print fc outrep = outreport + fc arcpy.CheckGeometry_management(fc, outrep) print( str(arcpy.GetCount_management(outrep)) + " geometry problems were found.") print("See " + outrep + " for full details")
def update_acres(path_to_fc, acres_field_name='Acres'): ''' Look for an acres field, add if not found, update acres field, return total acres. Parameters: * path_to_fc: path to a feature class (must be a polygon) * acres_field_name: name of field to look for or create if not found. Return: * Total acres ''' # Check Geometry arcpy.CheckGeometry_management(path_to_fc, r'in_memory\check_geo_table') if int(arcpy.GetCount_management(r'in_memory\check_geo_table')[0]): arcpy.AddMessage('Error in feature class: {}'.format(path_to_fc)) return None # List fields in feature class field_names = [field.name for field in arcpy.ListFields(path_to_fc)] # Look for acres field, create if not found if not acres_field_name in field_names: print 'Adding field' arcpy.AddField_management(path_to_fc, acres_field_name, "DOUBLE") # Update acres print 'Updating acres' arcpy.CalculateField_management(path_to_fc, acres_field_name, "!shape.area@ACRES!", "PYTHON_9.3") try: acres_total = sum([ float(row[0]) for row in arcpy.da.SearchCursor(path_to_fc, acres_field_name) ]) except: acres_total = None print 'Total acres: ' + str(acres_total) return acres_total
def checkGeometry(table): try: errTable = table + "_Check" if arcpy.Exists(errTable): arcpy.Delete_management(errTable) gzSupport.addMessage("Deleted existing " + errTable) arcpy.CheckGeometry_management(table, errTable) count = int(arcpy.GetCount_management(errTable).getOutput(0)) if count == 0: gzSupport.addMessage("No Geometry Errors found") arcpy.Delete_management(errTable) else: gzSupport.addMessage(str(count) + " Errors located in " + errTable) except: gzSupport.showTraceback() gzSupport.addMessage( "Unable to perform geometry check, see error listed above") count = 0 return count
#CHECK GEOMETRY # The workspace in which the feature classes will be checked outTable = theGDB + "/checkGeometryResult" if arcpy.Exists(outTable): arcpy.Delete_management(outTable) # A variable that will hold the list of all the feature classes # inside the geodatabase fcs = [] # List all standalone feature classes fcs = arcpy.ListFeatureClasses() print "Running the check geometry tool on %i feature classes" % len(fcs) arcpy.CheckGeometry_management(fcs, outTable) if (str(arcpy.GetCount_management(outTable))) <> '0': print( str(arcpy.GetCount_management(outTable)) + " geometry problems were found.") arcpy.AddMessage( str(arcpy.GetCount_management(outTable)) + " geometry problems were found.") arcpy.RepairGeometry_management(fcs) arcpy.Delete_management(outTable) else: print( str(arcpy.GetCount_management(outTable)) + " geometry problems were found. - - Deleting Table") arcpy.AddMessage(
def join_spatiallyrs(lyr1, lyr2, outlyr, joinfield = '', newname = '', newalias = '', method = 'INTERSECT'): ''' This function spatially joins two shapefiles. "lyr2" is the target shapefile. "lyr1" is the joining shapefile. "outlyr" is the name of the resulting file. The default method is 'INTERSECT', assuming the joining shapefile is polygon data. Alternatively, users could run the function using "WITHIN" for point joining data. The join operation is 'JOIN_ONE_TO_MANY' so that multiple join features intersecting/within the target parcel are all reported in the resulting shapefile. However, this means individual parcels will have multiple entries in the resulting attribute table.''' def findindex(table, fieldname): ''' Function from https://gis.stackexchange.com/questions/101540/finding-the-index-of-a-field-with-its-name-using-arcpy to find the index of a table's fields from the field name ''' return [i.name for i in arcpy.ListFields(table)].index(fieldname) # Repair geometry of both input feature classes # Check for geometry problems outtable = AutoName('geomtable') arcpy.AddMessage('Checking ' + lyr1 + ' geometry...') print('Checking ' + lyr1 + ' geometry...') arcpy.CheckGeometry_management(lyr1, outtable) #Repair geometry problems arcpy.AddMessage('Repairing ' + lyr1 + ' geometry...') print('Repairing ' + lyr1 + ' geometry...') arcpy.RepairGeometry_management(lyr1) arcpy.Delete_management(outtable) # Initiate a fieldmap object arcpy.AddMessage('Checking field maps') print('Checking field maps') fieldmappings = arcpy.FieldMappings() # Add two layer tables to the fieldmapping object fieldmappings.addTable(lyr2) fieldmappings.addTable(lyr1) if len(joinfield) < 1: # No meaningful join field: Just join them with no field method arcpy.AddMessage('Conducting spatial join') print('Conducting spatial join') arcpy.SpatialJoin_analysis(lyr2, lyr1, outlyr, 'JOIN_ONE_TO_ONE', 'KEEP_ALL', method) else: targetfields = arcpy.ListFields(lyr2) joinfields = arcpy.ListFields(lyr1) keepfield = findindex(lyr1, joinfield) keepfieldprops = joinfields[keepfield] fieldlength = keepfieldprops.length fieldtype = keepfieldprops.type # reset fieldtype to add field-compatible equivalent if fieldtype == 'Integer': fieldtype = 'LONG' elif fieldtype == 'String': fieldtype = 'TEXT' elif fieldtype == 'SmallInteger': fieldtype = 'SHORT' else: pass targetfields.append(joinfields[keepfield]) keepers = list() for k in range(len(targetfields)): keepers.append(targetfields[k].name) for field in fieldmappings.fields: if field.name not in keepers: fieldmappings.removeFieldMap(fieldmappings.findFieldMapIndex(field.name)) else: field.mergeRule = 'maximum' arcpy.AddMessage('Conducting spatial join') print('Conducting spatial join') arcpy.SpatialJoin_analysis(lyr2, lyr1, outlyr, 'JOIN_ONE_TO_ONE', 'KEEP_ALL', fieldmappings, method) # Re-name field arcpy.AddField_management(outlyr, newname, field_type = fieldtype) arcpy.CalculateField_management(outlyr, newname, "!" + joinfield + "!", "PYTHON_9.3") arcpy.DeleteField_management(outlyr, joinfield) return()
fcsAll = [] fcsRepair = [] outTable = "CheckGeometry" outTablePath = os.path.join(outputGDB, outTable) #Build list of feature classes to test for valid geometry for dataset in arcpy.ListDatasets(): fcsAll += arcpy.ListFeatureClasses('*', '', dataset) #Check geometry print("Running the check geometry tool on {} feature classes".format( len(fcsAll))) arcpy.AddMessage( "Running the check geometry tool on {} feature classes".format( len(fcsAll))) arcpy.CheckGeometry_management(fcsAll, outTablePath) geoErrorCount = arcpy.GetCount_management(outTablePath)[0] print("{} geometry problems found, results table at {}.".format( geoErrorCount, outTablePath)) arcpy.AddMessage("{} geometry problems found, results table at {}.".format( geoErrorCount, outTablePath)) #Release lock arcpy.ClearWorkspaceCache_management() if geoErrorCount > 0: #Loop through Geometry Error Table to get list of feature needing repair for row in arcpy.da.SearchCursor(outTablePath, ('CLASS')): if not row[0] in fcsRepair: fcsRepair.append(row[0])
def chkgeom(file): tbl = "in_memory/tablewitherrors" arcpy.CheckGeometry_management(file, tbl) if int(arcpy.GetCount_management(tbl)[0]) > 0: arcpy.RepairGeometry_management(file) arcpy.Delete_management(tbl)
wsG.write(0, 0, "Laufende Nummer", grey) wsG.write(0, 1, "Objektklasse", grey) wsG.write(0, 2, "Objekt-ID", grey) wsG.write(0, 3, "Problem", grey) #set dim for columns of -----GEOMETRY SHEET----- wsG.set_column(0, 0, 12) wsG.set_column(1, 1, 70) wsG.set_column(2, 2, 15) wsG.set_column(3, 3, 40) '''----------GEOMETRY CHECK----------''' #path for the second excel (checkGeometry) outTable = os.path.join(gdbPath, os.path.splitext(filename)[0]) print("Running the check geometry tool on {} feature classes".format( len(filename_path))) checkResult = arcpy.CheckGeometry_management(filename_path, outTable) print("{} geometry problems found, see {} for details.".format( arcpy.GetCount_management(outTable)[0], outTable)) #adding data to excel 2ndSheet (checKGeometry) checkRow = 1 cursorTable = arcpy.da.SearchCursor( outTable, ["OBJECTID", "CLASS", "FEATURE_ID", "PROBLEM"]) for row in cursorTable: #getting the shapeName to column wsG.write(checkRow, 1, filename, style) #translating to german if row[3] in d_problems: wsG.write(checkRow, 3, d_problems[row[3]], style) else: wsG.write(checkRow, 3, row[3], pink)
arcpy.AddField_management(tempFC7, "YCOORD", "LONG") arcpy.CalculateField_management(tempFC7, "XCOORD", "!SHAPE.CENTROID!.split()[0]", "PYTHON") arcpy.CalculateField_management(tempFC7, "YCOORD", "!SHAPE.CENTROID!.split()[1]", "PYTHON") #Dissolve on XCoord, YCoord, & Area with Statistics SORT-First & CLASS-First arcpy.Dissolve_management(tempFC7, tempFC8, ["XCOORD", "YCOORD", "SHAPE_Area"], "CLASS FIRST", "MULTI_PART", "") #Re-Add CLASS Field arcpy.AddField_management(tempFC8, "CLASS", "TEXT", "", "", 20) arcpy.CalculateField_management(tempFC8, "CLASS", '!FIRST_CLASS!', "PYTHON_9.3") #Final Dissolve on CLASS Field arcpy.Dissolve_management(tempFC8, finalFC, "CLASS") #Check and Repair Geometry arcpy.CheckGeometry_management(finalFC, tempTblChkGeo) if arcpy.GetCount_management(tempTblChkGeo)[0] < "1": arcpy.AddMessage( "The feature class does not contain any geomeotry errors") else: arcpy.RepairGeometry_management(finalFC) #Delete Temporary Feature Classes arcpy.env.workspace = scratchGDB fcTemp = arcpy.ListFeatureClasses("tempFC*", "") for fcTempDel in fcTemp: arcpy.Delete_management(fcTempDel, "") except Exception as e: print e.message arcpy.AddError(e.message)
muniparcelnames = list() for k in np.arange(0, len(townnames)): muniname = townnames[k] muninamecaps = townnames_caps[k] print('Starting ' + muniname) muniparcelnames.append( muni_addatts(parcels, townpolys, muniname, muninamecaps)) arcpy.AddMessage("Completed all municipalities") ''' Combine municipal-specific results into one feature class. ''' # Create an empty feature class with the desired schema outfile = AutoName('Parcels_complete') arcpy.CopyFeatures_management(muniparcelnames[0], outfile) arcpy.DeleteRows_management(outfile) # Empty the output file # Append all municipal files onto empty feature class with appropriate schema arcpy.AddMessage("Re-merging municipalities") arcpy.Append_management(muniparcelnames, outfile, schema_type="TEST") for k in range(len(muniparcelnames)): arcpy.Delete_management(muniparcelnames[k]) # Repair geometry outtable = AutoName('geomtable') arcpy.AddMessage('Checking ' + outfile + ' geometry...') arcpy.CheckGeometry_management(outfile, outtable) #Repair geometry problems arcpy.AddMessage('Repairing ' + outfile + ' geometry...') arcpy.RepairGeometry_management(outfile) arcpy.Delete_management(outtable)
OutputProject = DataPath + "\\Scratch.gdb\\" + InputFileName + "_Project" OutputShapefile = DataPath + os.path.sep + "Shapefiles" + os.path.sep OutputZipFiles = DataPath + os.path.sep + "ZipFiles" + os.path.sep OutputFCLocation = DataPath + "\\MTUpdate20140930.gdb\\Provider_Coverage\\" OutTable = DataPath + "\\MTUpdate20140930.gdb\\" + "tbl_CheckGeo_" + InputFileName # Create a copy of the input FeatureClass arcpy.FeatureClassToFeatureClass_conversion(InputDataset, OutputFCLocation, InputFileName, "") # Process: Project the Input Dataset to WGS84 arcpy.Project_management(InputDataset, OutputProject, outCS, "WGS_1984_(ITRF00)_To_NAD_1983") #Check Geometry arcpy.CheckGeometry_management(OutputProject, OutTable) #Repair Geometry arcpy.RepairGeometry_management(OutputProject) values = [row[0] for row in arcpy.da.SearchCursor(OutputProject, (fieldName))] uniqueValues = set(values) uniqueValues2 = list(uniqueValues) x = 0 for value in uniqueValues2: #Create the query query = fieldName + " = '" + uniqueValues2[x] + "'" #Execute the Select tool
def concave_hull(input_feature_class, output_feature_class, k_0=3, field_choice="", includenull=True): try: import arcpy import itertools import math import os import sys import traceback import string arcpy.overwriteOutput = True # Functions that consolidate reuable actions # # Function to return an OID list for k nearest eligible neighbours of a feature def kNeighbours(k, oid, pDict, excludeList=[]): hypotList = [ math.hypot(pDict[oid][0] - pDict[id][0], pDict[oid][1] - pDict[id][1]) for id in pDict.keys() if id <> oid and id not in excludeList ] hypotList.sort() hypotList = hypotList[0:k] oidList = [ id for id in pDict.keys() if math.hypot(pDict[oid][0] - pDict[id][0], pDict[oid][1] - pDict[id][1]) in hypotList and id <> oid and id not in excludeList ] return oidList # Function to rotate a point about another point, returning a list [X,Y] def RotateXY(x, y, xc=0, yc=0, angle=0): x = x - xc y = y - yc xr = (x * math.cos(angle)) - (y * math.sin(angle)) + xc yr = (x * math.sin(angle)) + (y * math.cos(angle)) + yc return [xr, yr] # Function finding the feature OID at the rightmost angle from an origin OID, with respect to an input angle def Rightmost(oid, angle, pDict, oidList): origxyList = [pDict[id] for id in pDict.keys() if id in oidList] rotxyList = [] for p in range(len(origxyList)): rotxyList.append( RotateXY(origxyList[p][0], origxyList[p][1], pDict[oid][0], pDict[oid][1], angle)) minATAN = min([ math.atan2((xy[1] - pDict[oid][1]), (xy[0] - pDict[oid][0])) for xy in rotxyList ]) rightmostIndex = rotxyList.index([ xy for xy in rotxyList if math.atan2((xy[1] - pDict[oid][1]), (xy[0] - pDict[oid][0])) == minATAN ][0]) return oidList[rightmostIndex] # Function to detect single-part polyline self-intersection def selfIntersects(polyline): lList = [] selfIntersects = False for n in range(0, len(line.getPart(0)) - 1): lList.append( arcpy.Polyline( arcpy.Array( [line.getPart(0)[n], line.getPart(0)[n + 1]]))) for pair in itertools.product(lList, repeat=2): if pair[0].crosses(pair[1]): selfIntersects = True break return selfIntersects # Function to construct the Hull def createHull(pDict, outCaseField, lastValue, kStart, dictCount, includeNull): # Value of k must result in enclosing all data points; create condition flag enclosesPoints = False notNullGeometry = False k = kStart if dictCount > 1: pList = [arcpy.Point(xy[0], xy[1]) for xy in pDict.values()] mPoint = arcpy.Multipoint(arcpy.Array(pList), sR) minY = min([xy[1] for xy in pDict.values()]) while not enclosesPoints and k <= 30: arcpy.AddMessage("Finding hull for k = " + str(k)) # Find start point (lowest Y value) startOID = [ id for id in pDict.keys() if pDict[id][1] == minY ][0] # Select the next point (rightmost turn from horizontal, from start point) kOIDList = kNeighbours(k, startOID, pDict, []) minATAN = min([ math.atan2(pDict[id][1] - pDict[startOID][1], pDict[id][0] - pDict[startOID][0]) for id in kOIDList ]) nextOID = [ id for id in kOIDList if math.atan2(pDict[id][1] - pDict[startOID][1], pDict[id][0] - pDict[startOID][0]) == minATAN ][0] # Initialise the boundary array bArray = arcpy.Array( arcpy.Point(pDict[startOID][0], pDict[startOID][1])) bArray.add( arcpy.Point(pDict[nextOID][0], pDict[nextOID][1])) # Initialise current segment lists currentOID = nextOID prevOID = startOID # Initialise list to be excluded from candidate consideration (start point handled additionally later) excludeList = [startOID, nextOID] # Build the boundary array - taking the closest rightmost point that does not cause a self-intersection. steps = 2 while currentOID <> startOID and len(pDict) <> len( excludeList): try: angle = math.atan2( (pDict[currentOID][1] - pDict[prevOID][1]), (pDict[currentOID][0] - pDict[prevOID][0])) oidList = kNeighbours(k, currentOID, pDict, excludeList) nextOID = Rightmost(currentOID, 0 - angle, pDict, oidList) pcArray = arcpy.Array([arcpy.Point(pDict[currentOID][0], pDict[currentOID][1]), \ arcpy.Point(pDict[nextOID][0], pDict[nextOID][1])]) while arcpy.Polyline(bArray, sR).crosses( arcpy.Polyline(pcArray, sR)) and len(oidList) > 0: # arcpy.AddMessage("Rightmost point from " + str(currentOID) + " : " + str(nextOID) + " causes self intersection - selecting again") excludeList.append(nextOID) oidList.remove(nextOID) oidList = kNeighbours(k, currentOID, pDict, excludeList) if len(oidList) > 0: nextOID = Rightmost( currentOID, 0 - angle, pDict, oidList) # arcpy.AddMessage("nextOID candidate: " + str(nextOID)) pcArray = arcpy.Array([arcpy.Point(pDict[currentOID][0], pDict[currentOID][1]), \ arcpy.Point(pDict[nextOID][0], pDict[nextOID][1])]) bArray.add( arcpy.Point(pDict[nextOID][0], pDict[nextOID][1])) prevOID = currentOID currentOID = nextOID excludeList.append(currentOID) # arcpy.AddMessage("CurrentOID = " + str(currentOID)) steps += 1 if steps == 4: excludeList.remove(startOID) except ValueError: arcpy.AddMessage( "Zero reachable nearest neighbours at " + str(pDict[currentOID]) + " , expanding search") break # Close the boundary and test for enclosure bArray.add( arcpy.Point(pDict[startOID][0], pDict[startOID][1])) pPoly = arcpy.Polygon(bArray, sR) if pPoly.length == 0: break else: notNullGeometry = True if mPoint.within(arcpy.Polygon(bArray, sR)): enclosesPoints = True else: arcpy.AddMessage( "Hull does not enclose data, incrementing k") k += 1 # if not mPoint.within(arcpy.Polygon(bArray, sR)): arcpy.AddWarning( "Hull does not enclose data - probable cause is outlier points" ) # Insert the Polygons if (notNullGeometry and includeNull == False) or includeNull: if outCaseField > " ": insFields = [ outCaseField, "POINT_CNT", "ENCLOSED", "SHAPE@" ] else: insFields = ["POINT_CNT", "ENCLOSED", "SHAPE@"] rows = arcpy.da.InsertCursor(outFC, insFields) row = [] if outCaseField > " ": row.append(lastValue) row.append(dictCount) if notNullGeometry: row.append(enclosesPoints) row.append(arcpy.Polygon(bArray, sR)) else: row.append(-1) row.append(None) rows.insertRow(row) del row del rows elif outCaseField > " ": arcpy.AddMessage("\nExcluded Null Geometry for case value " + str(lastValue) + "!") else: arcpy.AddMessage("\nExcluded Null Geometry!") # Main Body of the program. # # # Get the input feature class or layer inPoints = input_feature_class inDesc = arcpy.Describe(inPoints) inPath = os.path.dirname(inDesc.CatalogPath) sR = inDesc.spatialReference # Get k k = int(k_0) kStart = k # Get output Feature Class outFC = output_feature_class outPath = os.path.dirname(outFC) outName = os.path.basename(outFC) # Get case field and ensure it is valid caseField = field_choice if caseField > " ": fields = inDesc.fields for field in fields: # Check the case field type if field.name == caseField: caseFieldType = field.type if caseFieldType not in [ "SmallInteger", "Integer", "Single", "Double", "String", "Date" ]: arcpy.AddMessage( "\nThe Case Field named " + caseField + " is not a valid case field type! The Case Field will be ignored!\n" ) caseField = " " else: if caseFieldType in [ "SmallInteger", "Integer", "Single", "Double" ]: caseFieldLength = 0 caseFieldScale = field.scale caseFieldPrecision = field.precision elif caseFieldType == "String": caseFieldLength = field.length caseFieldScale = 0 caseFieldPrecision = 0 else: caseFieldLength = 0 caseFieldScale = 0 caseFieldPrecision = 0 # Define an output case field name that is compliant with the output feature class outCaseField = str.upper(str(caseField)) if outCaseField == "ENCLOSED": outCaseField = "ENCLOSED1" if outCaseField == "POINT_CNT": outCaseField = "POINT_CNT1" if outFC.split(".")[-1] in ("shp", "dbf"): outCaseField = outCaseField[ 0:10] # field names in the output are limited to 10 charaters! # Get Include Null Geometry Feature flag includeNull = includenull # Some housekeeping inDesc = arcpy.Describe(inPoints) sR = inDesc.spatialReference arcpy.env.OutputCoordinateSystem = sR oidName = str(inDesc.OIDFieldName) if inDesc.dataType == "FeatureClass": inPoints = arcpy.MakeFeatureLayer_management(inPoints) # Create the output arcpy.AddMessage("\nCreating Feature Class...") if '.SHP' in outName.upper(): outName = outName[:-4] arcpy.AddMessage(outPath + "; " + outName) outFC = arcpy.CreateFeatureclass_management(outPath, outName, "POLYGON", "#", "#", "#", sR).getOutput(0) if caseField > " ": if caseFieldType in [ "SmallInteger", "Integer", "Single", "Double" ]: arcpy.AddField_management(outFC, outCaseField, caseFieldType, str(caseFieldScale), str(caseFieldPrecision)) elif caseFieldType == "String": arcpy.AddField_management(outFC, outCaseField, caseFieldType, "", "", str(caseFieldLength)) else: arcpy.AddField_management(outFC, outCaseField, caseFieldType) arcpy.AddField_management(outFC, "POINT_CNT", "Long") arcpy.AddField_management(outFC, "ENCLOSED", "SmallInteger") # Build required data structures arcpy.AddMessage("\nCreating data structures...") rowCount = 0 caseCount = 0 dictCount = 0 pDict = { } # dictionary keyed on oid with [X,Y] list values, no duplicate points if caseField > " ": fields = [caseField, 'OID@', 'SHAPE@X', 'SHAPE@Y'] valueDict = {} with arcpy.da.SearchCursor(inPoints, fields) as searchRows: for searchRow in searchRows: keyValue = searchRow[0] if not keyValue in valueDict: # assign a new keyValue entry to the dictionary storing a list of the first NumberField value and 1 for the first record counter value valueDict[keyValue] = [[ searchRow[1], searchRow[2], searchRow[3] ]] # Sum the last summary of NumberField value with the current record and increment the record count when keyvalue is already in the dictionary else: valueDict[keyValue].append( [searchRow[1], searchRow[2], searchRow[3]]) for lastValue in sorted(valueDict): caseCount += 1 for p in valueDict[lastValue]: rowCount += 1 # Continue processing the current point subset. if [p[1], p[2]] not in pDict.values(): pDict[p[0]] = [p[1], p[2]] dictCount += 1 createHull(pDict, outCaseField, lastValue, kStart, dictCount, includeNull) # Reset variables for processing the next point subset. pDict = {} dictCount = 0 else: fields = ['OID@', 'SHAPE@X', 'SHAPE@Y'] for p in arcpy.da.SearchCursor(inPoints, fields): rowCount += 1 if [p[1], p[2]] not in pDict.values(): pDict[p[0]] = [p[1], p[2]] dictCount += 1 lastValue = 0 # Final create hull call and wrap up of the program's massaging createHull(pDict, outCaseField, lastValue, kStart, dictCount, includeNull) arcpy.AddMessage("\n" + str(rowCount) + " points processed. " + str(caseCount) + " case value(s) processed.") if caseField == " " and arcpy.GetParameterAsText(3) > " ": arcpy.AddMessage("\nThe Case Field named " + arcpy.GetParameterAsText(3) + " was not a valid field type and was ignored!") arcpy.AddMessage("\nFinished") # Error handling except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" arcpy.AddError(pymsg) msgs = "GP ERRORS:\n" + arcpy.GetMessages(2) + "\n" arcpy.AddError(msgs) arcpy.CheckGeometry_management(output_feature_class, "C:/Studia/PPG_II/Egzamin/Wyniki/spr") if arcpy.GetCount_management( "C:/Studia/PPG_II/Egzamin/Wyniki/spr")[0] == "2": return "Error" else: return polygon_to_polyline(output_feature_class)
myWrite(logFile, "01 - Creating a gdb...") arcpy.CreateFileGDB_management(tempOutputDir, "YT.gdb") myWrite(logFile, " Done\n") layer = "YT" inFile = InputDir + "/" + gdbName + "/" + tblName myWrite(logFile, "02 - Making a layer from " + inFile + "...") arcpy.MakeFeatureLayer_management(inFile, layer) outFile = tempOutputDir + "/YT.gdb/" + layer arcpy.CopyFeatures_management(layer, outFile) myWrite(logFile, " Done\n") inFile = tempOutputDir + "/YT.gdb/" + layer outFile = checkgeoOutputDir + "/" + layer + ".dbf" myWrite(logFile, "03 - Checking geometry of " + inFile + "...") arcpy.CheckGeometry_management(inFile, outFile) myWrite(logFile, " Done\n") ## Calculate number of rows to create only shp files and csv files when data exist inFile = checkgeoOutputDir + "/" + layer + ".dbf" count = str(arcpy.GetCount_management(inFile)) myWrite(logFile, "04 - Repairing geometry of for " + count + " polygons: ") if count == '0': arcpy.Delete_management(inFile) if count > '0': inFile = tempOutputDir + "/YT.gdb/" + layer arcpy.RepairGeometry_management(inFile, "KEEP_NULL") myWrite(logFile, count + " wrong geometry\n") ## Create AREA field and set its value
indbconnection = 'TEST_US' #arcpy.GetParameterAsText(0)#r'\\cabcvan1gis005\GISData\Connection to GMTESTC.sde'#arcpy.GetParameterAsText(0) #location of connection file infc = r'\\CABCVAN1FPR001\DATA\12CTESTING\US\_Federal\FUDS\2021_07_01\PYLOADER\Formerly_Used_Defense_Sites__FUDS__Public_Property_Boundaries.shp' #arcpy.GetParameterAsText(1)#r'C:\Users\JLoucks\Desktop\HFA_CO.shp'#arcpy.GetParameterAsText(1) This can be a shapefile or .gdb featureclass outfc = 'FUDS_PUB_PROPERTY_BOUND' #arcpy.GetParameterAsText(2)#'HFA_CO_test'#arcpy.GetParameterAsText(2) #name of table that will be imported to db scratch = r'C:\Users\JLoucks\Documents\JL\test2' #arcpy.env.scratchFolder#r'C:\Users\JLoucks\Documents\JL\test1' scratchgdb = os.path.join(scratch, 'scratch.gdb') arcpy.CreateFileGDB_management(scratch, 'scratch.gdb') geo_check_table = os.path.join(scratch, "checkGeometryResult") try: arcpy.AddMessage('Stripping M Z values and projecting to WGS84') infc = str(getspatial_ref(infc, outfc)) arcpy.AddMessage('Grabbing connection file') indbconnection = OracleCredential(indbconnection).get_sde_con_file() arcpy.AddMessage(indbconnection) arcpy.AddMessage('Checking geometry...') arcpy.CheckGeometry_management(infc, geo_check_table) if int(arcpy.GetCount_management(geo_check_table)[0]) > 0: arcpy.AddWarning( str(arcpy.GetCount_management(geo_check_table)[0]) + ' problems found with geometry, please check log here: ' + geo_check_table) else: arcpy.AddMessage('Geometry OK') if arcpy.Exists(os.path.join(indbconnection, outfc)): arcpy.AddWarning('Deleting existing FC...') arcpy.Delete_management(os.path.join(indbconnection, outfc)) arcpy.AddMessage('Copying Feature Class to DB') arcpy.FeatureClassToFeatureClass_conversion(infc, indbconnection, outfc, config_keyword='SDO_GEOMETRY')
def main(): for f in os.listdir(dataPath): if os.path.isdir(os.path.join(dataPath, f)): # For each data/<species dir> printlog('Processing Species Folder: ' + f) try: for sf in os.listdir(os.path.join(dataPath, f)): if sf.endswith('.zip') and not sf.startswith('old'): # Make backup of zipped gdb newSpeciesZip = os.path.join(dataPath, f, sf) oldSpeciesZip = os.path.join(dataPath, f, 'old-' + sf) zipcnt = 1 while (os.path.exists(oldSpeciesZip)): oldSpeciesZip = os.path.join( dataPath, f, 'old' + str(zipcnt) + '-' + sf) zipcnt = zipcnt + 1 printlog('Backing up zip: ' + oldSpeciesZip) os.rename(newSpeciesZip, oldSpeciesZip) # open feature layer in it newGdbPath = newSpeciesZip.replace('.zip', '') # clear gdb if it exists if os.path.exists(newGdbPath): printlog('Deleting: ' + newGdbPath) arcpy.Delete_management(newGdbPath) # Unzip it printlog('Unzipping: ' + newSpeciesZip) with ZipFile(oldSpeciesZip) as toUnzip: toUnzip.extractall(os.path.join(dataPath, f)) # check for expected gdb if os.path.exists(newGdbPath): env.workspace = newGdbPath featureClasses = arcpy.ListFeatureClasses() for fc in featureClasses: # backup the layer in case dice fails oldFeatureClass = 'old_' + fc printlog('Backing up layer: ' + fc) arcpy.CopyFeatures_management( fc, oldFeatureClass) # clear the current arcpy.Delete_management(fc) row_count = arcpy.GetCount_management( oldFeatureClass) printlog('Original Feature Count: ' + str(row_count)) printlog('Checking for bad geometries in: ' + fc) checkGeomPath = os.path.join( dataPath, f, fc + '_checkGeom.csv') if os.path.exists(checkGeomPath): printlog('Deleting: ' + checkGeomPath) os.remove(checkGeomPath) os.remove( os.path.join(dataPath, f, fc + '_checkGeom.txt.xml')) printlog('Look for geom errors in: ' + checkGeomPath) arcpy.CheckGeometry_management( oldFeatureClass, checkGeomPath) printlog( 'Repairing any potentially bad geometries in: ' + fc) arcpy.RepairGeometry_management( oldFeatureClass) printlog('Dicing layer: ' + fc) # Dice it with 10K vertices making it new fc arcpy.Dice_management(oldFeatureClass, fc, max_vertices) # Clear the backup once to this point printlog( 'Dicing successful, deleting backup layer') arcpy.Delete_management(oldFeatureClass) row_count = arcpy.GetCount_management(fc) printlog('New Feature Count: ' + str(row_count)) printlog('Rezipping: ' + newSpeciesZip) with ZipFile(newSpeciesZip, 'w') as toZip: file_paths = get_all_file_paths(newGdbPath) zipbasedir = os.path.dirname(newGdbPath) for file in file_paths: toZip.write(file, file.replace(zipbasedir, '')) printlog('Deleting: ' + newGdbPath) arcpy.Delete_management(newGdbPath) printlog('---------') else: printlog( 'ERROR: Zip did not contain expected gdb. Skipping to next folder.' ) except: exc_type, exc_value, exc_traceback = sys.exc_info() text = ''.join( traceback.format_exception(exc_type, exc_value, exc_traceback)) printlog( 'ERROR: Unexpected exception while processing %s.\nSkipping to next folder.\nYou will need to rename old-<name>.zip to <name>.zip if you want to process this folder again.\nException: %s' % (f, text)) printlog('---------') printlog('Finished!')
dzFields = ["PROVNAME", "DBANAME", "FRN", "TRANSTECH", "SPECTRUM", "MAXADDOWN", "MAXADUP"] techFiber = "50" techCopper = "30" try: # Select the DCN Members Features from the Web_ProviderTecnology Feature Class arcpy.Select_analysis(inWebProvTech, tempFC1, provName) # Select the Fiber to the End User Features and Create the DCN Clone arcpy.Select_analysis(tempFC1, tempFC2, "\"TRANSTECH\" = 50") createDCN (tempFC2, techFiber) # Select the DSL and Copper Features and Create the DCN Clone arcpy.Select_analysis(tempFC1, tempFC3, "\"TRANSTECH\" = 10 OR \"TRANSTECH\" = 20 OR \"TRANSTECH\" = 30") createDCN (tempFC3, techCopper) # Check Geometry of the Final Feature Class and Repair if Errors Found arcpy.CheckGeometry_management(inWebProvTech, tempTblChkGeo) if arcpy.GetCount_management(tempTblChkGeo) [0] < "1": arcpy.AddMessage ("The feature class does not contain any geomeotry errors") else: arcpy.RepairGeometry_management (inWebProvTech) #Delete Temporary Feature Classes arcpy.env.workspace = scratchGDB fcTemp = arcpy.ListFeatureClasses ("tempFC*", "") for fcTempDel in fcTemp: arcpy.Delete_management (fcTempDel, "") except Exception as e: print e.message arcpy.AddError(e.message)
def extract_then_load(pk, zfilepath, test_method=None): strip_excess = False stripz_output = None messages = [] #: open zip file and get paths arcpy.AddMessage('uploaded {}.'.format(zfilepath)) zfile = ZipFile(zfilepath) zfilenames = zfile.namelist() zfile_exts = [name.split('.')[1] for name in zfilenames] zfile_name = zfilenames[0].split('.')[0] zfile_folder = join(arcpy.env.scratchFolder, zfile_name) shapefile = join(zfile_folder, zfile_name + '.shp') arcpy.AddMessage('verify that all files are present') #: verify that all files are present for ext in required_files: if ext not in zfile_exts: raise Exception('Missing .{} file'.format(ext)) zfile.extractall(zfile_folder) arcpy.AddMessage('validating geometry') #: validate geometry checkgeom_output = 'in_memory/checkgeometry' arcpy.CheckGeometry_management(shapefile, checkgeom_output) if int(arcpy.GetCount_management(checkgeom_output).getOutput(0)) > 0: with arcpy.da.SearchCursor(checkgeom_output, ['PROBLEM']) as scur: arcpy.AddError('Geometry Error: {}'.format(scur.next()[0])) raise Exception('Geometry Error: {}'.format(scur.next()[0])) arcpy.AddMessage('validating geometry type') #: validate geometry type for category described = arcpy.Describe(shapefile) if described.shapeType != 'Polygon': arcpy.AddError( 'Incorrect shape type of {}. Fire perimeters are polygons.'.format( described.shapeType)) raise Exception( 'Incorrect shape type of {}. Fire perimeters are polygons.'.format( described.shapeType)) if described.hasZ or described.hasM: strip_excess = True arcpy.AddMessage('reprojecting if necessary') #: reproject if necessary reprojected_fc = None input_sr = described.spatialReference if input_sr.name != utm.name: #: Project doesn't support the in_memory workspace arcpy.AddMessage('Reprojected data from {} to {}'.format( input_sr.factoryCode, utm.factoryCode)) messages.append('Reprojected data from {} to {}'.format( input_sr.factoryCode, utm.factoryCode)) reprojected_fc = '{}/project'.format(arcpy.env.scratchGDB) shapefile = arcpy.Project_management(shapefile, reprojected_fc, utm) arcpy.AddMessage('Removing m and z if necessary') if strip_excess and input_sr.name == utm.name: arcpy.AddMessage('Removing m and z') stripz_output = 'in_memory/stripz' shapefile = arcpy.management.CopyFeatures(shapefile, stripz_output) arcpy.AddMessage('unioning all shapes') #: union all shapes in shapefile mergedGeometry = None features = 0 with arcpy.da.SearchCursor(shapefile, ['SHAPE@']) as scur: for shape, in scur: features = features + 1 if mergedGeometry is None: mergedGeometry = shape continue mergedGeometry = mergedGeometry.union(shape) if features == 0: arcpy.AddError('Shapefile is empty') raise Exception('Shapefile is empty') if features > 1: arcpy.AddMessage('Unioned {} features into one.'.format(features)) messages.append('Unioned {} features into one.'.format(features)) arcpy.AddMessage('cleaning up temp data') #: delete temp data if reprojected_fc is not None and arcpy.Exists(reprojected_fc): arcpy.Delete_management(reprojected_fc) if stripz_output is not None and arcpy.Exists(stripz_output): arcpy.Delete_management(stripz_output) if arcpy.Exists(zfile_folder): arcpy.Delete_management(zfile_folder) arcpy.AddMessage('inserting geometry') #: insert geometry into database db_method = store_geometry_for if test_method is not None: db_method = test_method status, message = db_method(pk, mergedGeometry.WKT) arcpy.AddMessage('db response {}, {}'.format(status, message)) if message is not None: messages.append(message) return (status, messages)