#spatial reference checking if cmp(arcpy.Describe(roadFC).spatialReference.name, "GCS_WGS_1984") != 0: raise Exception( "buslines spatialreference error. not equal to 'GCS_WGS_1984'") if cmp(arcpy.Describe(stopFC).spatialReference.name, "GCS_WGS_1984") != 0: raise Exception( "busstops spatialreference error. not equal to 'GCS_WGS_1984'") #fileds checking if len(arcpy.ListFields(roadFC)) != 9: raise Exception("buslines fields checking error") if len(arcpy.ListFields(stopFC)) != 4: raise Exception("busstops fields checking error") buslines = json.loads(open(jsonFile, 'r').read()) curLine = arcpy.InsertCursor(roadFC) curStop = arcpy.InsertCursor(stopFC) count = 0 for busline in buslines: lineArray = arcpy.Array() for point in busline["points"]: pnt = arcpy.Point() pnt.X = point["lng"] pnt.Y = point["lat"] lineArray.add(pnt) #buslines featureclass feat = curLine.newRow() feat.shape = lineArray feat.name = busline["name"] print busline["name"] feat.startTime = busline["startTime"]
def splitline (inFC,FCName,alongDist): OutDir = env.workspace outFCName = FCName outFC = OutDir+"/"+outFCName def distPoint(p1, p2): calc1 = p1.X - p2.X calc2 = p1.Y - p2.Y return math.sqrt((calc1**2)+(calc2**2)) def midpoint(prevpoint,nextpoint,targetDist,totalDist): newX = prevpoint.X + ((nextpoint.X - prevpoint.X) * (targetDist/totalDist)) newY = prevpoint.Y + ((nextpoint.Y - prevpoint.Y) * (targetDist/totalDist)) return arcpy.Point(newX, newY) def splitShape(feat,splitDist): # Count the number of points in the current multipart feature # partcount = feat.partCount partnum = 0 # Enter while loop for each part in the feature (if a singlepart feature # this will occur only once) # lineArray = arcpy.Array() while partnum < partcount: # Print the part number # #print "Part " + str(partnum) + ":" part = feat.getPart(partnum) #print part.count totalDist = 0 pnt = part.next() pntcount = 0 prevpoint = None shapelist = [] # Enter while loop for each vertex # while pnt: if not (prevpoint is None): thisDist = distPoint(prevpoint,pnt) maxAdditionalDist = splitDist - totalDist print thisDist, totalDist, maxAdditionalDist if (totalDist+thisDist)> splitDist: while(totalDist+thisDist) > splitDist: maxAdditionalDist = splitDist - totalDist #print thisDist, totalDist, maxAdditionalDist newpoint = midpoint(prevpoint,pnt,maxAdditionalDist,thisDist) lineArray.add(newpoint) shapelist.append(lineArray) lineArray = arcpy.Array() lineArray.add(newpoint) prevpoint = newpoint thisDist = distPoint(prevpoint,pnt) totalDist = 0 lineArray.add(pnt) totalDist+=thisDist else: totalDist+=thisDist lineArray.add(pnt) #shapelist.append(lineArray) else: lineArray.add(pnt) totalDist = 0 prevpoint = pnt pntcount += 1 pnt = part.next() # If pnt is null, either the part is finished or there is an # interior ring # if not pnt: pnt = part.next() if pnt: print "Interior Ring:" partnum += 1 if (lineArray.count > 1): shapelist.append(lineArray) return shapelist if arcpy.Exists(outFC): arcpy.Delete_management(outFC) arcpy.Copy_management(inFC,outFC) #origDesc = arcpy.Describe(inFC) #sR = origDesc.spatialReference #revDesc = arcpy.Describe(outFC) #revDesc.ShapeFieldName deleterows = arcpy.UpdateCursor(outFC) for iDRow in deleterows: deleterows.deleteRow(iDRow) try: del iDRow del deleterows except: pass inputRows = arcpy.SearchCursor(inFC) outputRows = arcpy.InsertCursor(outFC) fields = arcpy.ListFields(inFC) numRecords = int(arcpy.GetCount_management(inFC).getOutput(0)) OnePercentThreshold = numRecords // 100 #printit(numRecords) iCounter = 0 iCounter2 = 0 for iInRow in inputRows: inGeom = iInRow.shape iCounter+=1 iCounter2+=1 if (iCounter2 > (OnePercentThreshold+0)): #printit("Processing Record "+str(iCounter) + " of "+ str(numRecords)) iCounter2=0 if (inGeom.length > alongDist): shapeList = splitShape(iInRow.shape,alongDist) for itmp in shapeList: newRow = outputRows.newRow() for ifield in fields: if (ifield.editable): newRow.setValue(ifield.name,iInRow.getValue(ifield.name)) newRow.shape = itmp outputRows.insertRow(newRow) else: outputRows.insertRow(iInRow) del inputRows del outputRows
print "Creating table in ArcStormSurge Geodatabase" # Process: Create the empty table arcpy.CreateTable_management( os.path.dirname(newTB), os.path.basename(newTB), ) # Process: Add attribute fields to table arcpy.AddField_management(newTB, "node", "long") arcpy.AddField_management(newTB, "wl", "float") arcpy.AddField_management(newTB, "input_time", "text") arcpy.AddField_management(newTB, "time", "date") # Describe the new feature class # Create point and cursor objects cur = arcpy.InsertCursor(newTB) # Open the text file input = file(txtFile, "r") print 'Reading the 63 File' # Loop through the coordinate values input.readline() line = input.readline() data = line.split() nodes = long(data[1]) ### time_step = long(data[0]) ### row = cur.newRow()
def compareGDBs(installGDB,compGDB): ''' Parameters ---------- installGDB = geodatabase to be compared against 'compGDB' compGDB = geodatabase that install GDB is compared against Returns ------- MissingFDS (table) : which feature datasets are missing in the installGDB that are included in compGDB? - fields within MissingFDS Table 1) INSTALLATION - name of installGDB 2) FDS_MISSING - name of feature dataset missing MissingFC (table) : within feature datasets correctly included, which feature classes are missing? - fields within MissingData Table 1) INSTALLATION - name of installGDB 2) FDS - name of feature dataset for feature class being analyzed 3) FC_MISSING- name of feature class missing MissingFields (table) : within the feature dataset/feature class combo correctly included, which fields are missing? - fields within MissingData Table 1) INSTALLATION - name of installGDB 2) FDS - name of feature dataset for field being analyzed 3) FC- name of feature feature class for field being analyzed 4) FIELD_MISSING - name of field missing from feature dataset/feature class that is included in the comparison GDB. MissingData (table) : within the feature dataset/feature class combo correctly included, what data is missing? - fields within MissingData Table 1) INSTALLATION - name of installGDB 2) FDS - name of feature dataset for field being analyzed 3) FC- name of feature feature class for field being analyzed 4) FIELD - name of field being analyzed 5) FIELD_NONSDS - True or False? If the field is not included in compGDB == T 6) EMPTY_FC - is this feature class empty? T/F 7) NULL_FC_COUNT - the count of features with NULL values within field. NULL values are counted if cell equals any of following: [None, "None", "none", "NONE", "",99999,-99999, " ", "NA", "N/A", "n/a","NULL","Null","<NULL>","<Null>"] 8) TBD_FC_COUNT - the count of features with TBD values within field. TBD values are counted if cell equals any of following: ["tbd","TBD","To be determined"] 9) OTHER_FC_COUNT - the count of features with OTHER values within field. OTHER values are counted if cell equals any of following: [ "Other", "other", "OTHER"] 10) NULL_VALUE_COUNTS - the individual counts of each unique entry for NULL cells. e.g.: " '' has 1 feature. ' ' has 1 feature. 'None' has 2 feature. " 11) TBD_VALUE_COUNTS - the individual counts of each unique entry for TBD cells. e.g.: " 'tbd' has 1 feature. 'TBD' has 1 feature. 'To be determined' has 2 feature. " 12) OTHER_VALUE_COUNTS - the individual counts of each unique entry for OTHER cells. e.g.: " 'OTHER' has 1 feature. 'other' has 1 feature. 'Other' has 2 feature. " 13) TOTAL_INDT_COUNT - total count of cells with INDETERMINANT values (i.e.: Null, TBD, or Other values) per field 14) TOTAL_DET_COUNT - total count of cells with DETERMINANT values (i.e.: NOT Null, NOT TBD, or NOT Other values) per field 15) POP_VALS_COUNT - total count of features POPULATED (either INDETERMINANT or DETERMINANT) within fields 16) POP_VALS - the individual counts of each unique entry for DETERMINED (not null, tbd, or other) cells that are 'correctly' populated (i.e.: adheres to domain-contraint or text in non-domain contrained field) e.g.: "'BX Exchange' has 1 feature. 'Homestead Air Reserve Base' has 2 feature. U.S. 'Customs Ramp Area' has 1 feature." 17) INC_POP_VALS - the individual counts of each unique entry for DETERMINED (not null, tbd, or other) cells that are 'incorrectly' populated (i.e.: DOES NOT adhere to domain-contrained field) e.g.: "'9999' has 1 feature. '341' has 1 feature. '343' has 1 feature. " Example ------- compareGDBs(comparisonGDB.gdb,targetGDB.gdb) ''' start_time = datetime.now() arcpy.env.workspace = installGDB installationName = os.path.splitext(os.path.basename(installGDB))[0] compName = os.path.splitext(os.path.basename(compGDB))[0] # remove any locks that might exist on the installation gdb arcpy.AddMessage("Removing any locks on " + installationName+".gdb") def clearWSLocks(inputWS): '''Attempts to clear locks on a workspace, returns stupid message.''' if all([arcpy.Exists(inputWS), arcpy.Compact_management(inputWS), arcpy.Exists(inputWS)]): return 'Workspace (%s) clear to continue...' % inputWS else: return '!!!!!!!! ERROR WITH WORKSPACE %s !!!!!!!!' % inputWS clearWSLocks(installGDB) # output table names, with comparison geodatabase name prepended missingFDTblName=compName+"_MissingFDS" missingFCTblName=compName+"_MissingFCs" missingFLDTblName=compName+"_MissingFields" nullTableName=compName+"_MissingData" # IF THE TABLE EXISTS, DELETE ROWS, # ELSE CREATE ERROR TABLE FOR EACH FEATURE DATASET IN COMPGDB ### TK put this table creation part in a loop # CREATE MISSING FEATURE DATASET TABLE if arcpy.Exists(os.path.join(installGDB,missingFDTblName)): arcpy.Delete_management(os.path.join(installGDB,missingFDTblName)) arcpy.AddMessage (missingFDTblName + " Table already exists in " + os.path.splitext(os.path.basename(installGDB) )[0]+ ".gdb -- REPLACING") createMissingFDstbl(installGDB,missingFDTblName) else: createMissingFDstbl(installGDB,missingFDTblName) # CREATE MISSING FEATURE CLASS TABLE if arcpy.Exists(os.path.join(installGDB,missingFCTblName)): arcpy.Delete_management(os.path.join(installGDB,missingFCTblName)) arcpy.AddMessage (missingFCTblName + " Table already exists in " + os.path.splitext(os.path.basename(installGDB) )[0]+ ".gdb -- REPLACING") createMissingFCtbl(installGDB,missingFCTblName) else: createMissingFCtbl(installGDB,missingFCTblName) # CREATE MISSING FIELD TABLE if arcpy.Exists(os.path.join(installGDB,missingFLDTblName)): arcpy.Delete_management(os.path.join(installGDB,missingFLDTblName)) arcpy.AddMessage (missingFLDTblName + " Table already exists in " + os.path.splitext(os.path.basename(installGDB) )[0]+ ".gdb -- REPLACING") createMissingFLDtbl(installGDB,missingFLDTblName) else: createMissingFLDtbl(installGDB,missingFLDTblName) # CREATE NULL DATA TABLE if arcpy.Exists(os.path.join(installGDB,nullTableName)): arcpy.Delete_management(os.path.join(installGDB,nullTableName)) arcpy.AddMessage (nullTableName + " Table already exists in " + os.path.splitext(os.path.basename(installGDB) )[0]+ ".gdb -- REPLACING") createNullTable(installGDB,nullTableName) else: createNullTable(installGDB,nullTableName) edit = arcpy.da.Editor(arcpy.env.workspace) edit.startEditing(False, False) edit.startOperation() # WITHIN EACH REQUIRED FEATURE DATASET AND FEATURE CLASS THAT THE INSTALLATION HAS, # WHICH FIELDS ARE MISSING? nullTable = os.path.join(installGDB,nullTableName) nullrows = arcpy.InsertCursor(nullTable) # WITHIN EACH REQUIRED FEATURE DATASET AND FEATURE CLASS THAT THE INSTALLATION HAS, # WHICH FIELDS ARE MISSING? missFLDTable = os.path.join(installGDB,missingFLDTblName) fldrows = arcpy.InsertCursor(missFLDTable) # WITHIN THE FEATURE DATASETS THAT THE INSTALLATION HAS, # WHICH FEATURE CLASSES ARE MISSING? missFCTable = os.path.join(installGDB,missingFCTblName) fcrows = arcpy.InsertCursor(missFCTable) # WHICH FEATURE DATASETS ARE MISSING FROM THE INSTALLATION DATABASE COMPARED TO COMPARISON DATABASE missFDSTable = os.path.join(installGDB,missingFDTblName) fdrows = arcpy.InsertCursor(missFDSTable) ## THEN WORK ON MISSING DATA arcpy.env.workspace = compGDB theFDSs = list(arcpy.ListDatasets()) for theFDS in theFDSs: arcpy.env.workspace = compGDB theFCs = list(arcpy.ListFeatureClasses(feature_dataset=theFDS)) for theFC in theFCs: time_elapsed = datetime.now() - start_time arcpy.AddMessage('Time elapsed (hh:mm:ss.ms) {}'.format(time_elapsed)) today = date.today() timenow = time.strftime('%I:%M:%S-%p') printDate = today.strftime('%mm_%dd_%Y') arcpy.AddMessage(": Comparing "+installationName + " to " +compName+" --- " + printDate + " at " + timenow + " --- Feature : " + theFDS + "//" + theFC ) # CHECK FOR EXISTANCE OF REQUIRED FEATURE DATASET if arcpy.Exists(os.path.join(installGDB,str(theFDS).upper())): # CHECK FOR EXISTANCE OF REQUIRED FEATURE CLASS in FEATURE DATASET if arcpy.Exists(os.path.join(installGDB,str(theFDS).upper(),str(theFC).upper())): minFields = (fld.name.upper() for fld in arcpy.ListFields(os.path.join(compGDB,theFDS,theFC)) if fld.name not in ['Shape'.upper(), 'OBJECTID', 'Shape_Length'.upper(), 'Shape_Area'.upper()]) minFl = list(minFields) minF = [x.upper() for x in minFl] #reqDomains = (fld.domain for fld in arcpy.ListFields(os.path.join(compGDB,theFDS,theFC)) if str(fld.name) not in ['Shape', 'OBJECTID', 'Shape_Length', 'Shape_Area']) minFieldsInstall = (fld.name.upper() for fld in arcpy.ListFields(os.path.join(installGDB,theFDS,theFC)) if fld.name not in ['Shape'.upper(), 'OBJECTID', 'Shape_Length'.upper(), 'Shape_Area'.upper()]) minFlInstall_l = list(minFieldsInstall) minFlInstall = [x.upper() for x in minFlInstall_l] # CHECK FOR EXISTANCE OF REQUIRED FIELD in FEATURE CLASS def findField(fc, fi): fieldnames = [field.name.upper() for field in arcpy.ListFields(fc)] if fi.upper() in fieldnames: return True else: return False # IF required field exists.... for theFLD in minFlInstall: arcpy.env.workspace = installGDB row = nullrows.newRow() ignoreFLD = ['Shape'.upper(), 'OBJECTID'.upper(), 'Shape_Length'.upper(), 'Shape_Area'.upper()] if theFLD not in ignoreFLD: if theFLD not in minF: arcpy.AddMessage(theFLD + " *NOT* included in "+compName+"/"+theFC+" fields") row.setValue("FIELD_NONSDS", "T") else: arcpy.AddMessage(theFLD + " included in "+compName+"/"+theFC+" fields") row.setValue("FIELD_NONSDS", "F") with arcpy.da.SearchCursor(os.path.join(installGDB,theFDS,theFC), theFLD.upper()) as cur: row.setValue("FIELD", theFLD) instFCFields = [(afld.name.upper(), afld) for afld in arcpy.ListFields(os.path.join(installGDB,theFDS,theFC))] domains = arcpy.da.ListDomains() idx = map(itemgetter(0), instFCFields).index(theFLD) row.setValue("FDS", theFDS) row.setValue("FC", theFC) #CREATE SEARCH CURSOR ON FDS, FC, AND FIELDS TO BUILD LIST OF VALUES AND COUNTS #with arcpy.da.SearchCursor(os.path.join(installGDB,"Recreation","RecArea_A"), str("recreationAreaType").upper()) as cur: nullValues = [None, "None", "none", "NONE", "","-99999","77777",77777, " ", "NA", "na", "N/A", "n/a","NULL","Null","<NULL>","null","<null>""<Null>"," "," "," "," "] otherValues = [ "Other", "other", "OTHER","88888",88888] tbdValues = ["tbd","TBD","To be determined","Tbd",99999,"99999"] #indtList = nullValues + otherValues+ tbdValues ## GET TOTAL COUNT OF VALUES countValues = collections.Counter(row[0] for row in cur) sumValues = sum(collections.Counter(countValues).values()) # GET TOTAL COUNT OF 'NULL' VALUES for each NULL VALUE 'CODE' countNulls = list((n[0], n[1]) for n in countValues.items() if n[0] in nullValues) sumNulls = sum(n[1] for n in countNulls) # GET TOTAL COUNT OF 'TBD' VALUES for each NULL VALUE 'CODE' countTBD = list((n[0], n[1]) for n in countValues.items() if n[0] in tbdValues) sumTBD = sum(n[1] for n in countTBD) # GET TOTAL COUNT OF 'OTHER' VALUES for each NULL VALUE 'CODE' countOthers = list((n[0], n[1]) for n in countValues.items() if n[0] in otherValues) sumOther = sum(n[1] for n in countOthers) sumIndt = sumNulls + sumTBD + sumOther sumDetr = sumValues - sumIndt #populate counts of populated values, nulls, tbds, and others row.setValue("INSTALLATION",installationName) row.setValue("POP_VALS_COUNT",sumValues) row.setValue("NULL_FC_COUNT",sumNulls) row.setValue("TBD_FC_COUNT",sumTBD) row.setValue("OTHER_FC_COUNT",sumOther) row.setValue("TOTAL_INDT_COUNT", sumIndt) row.setValue("TOTAL_DET_COUNT", sumDetr) if sumValues == 0: row.setValue("EMPTY_FC", "T") else: row.setValue("EMPTY_FC", "F") indtArray=[[countOthers,"OTHER_VALUE_COUNTS"],[countTBD,"TBD_VALUE_COUNTS"],[countNulls,"NULL_VALUE_COUNTS"]] for indtCounts in indtArray: Strings = str() if not indtCounts[0]: # if list is empty, pass pass else: arcpy.AddMessage (indtCounts[0]) for element in indtCounts[0]: if element[0] is None: value = "NULL" elif element[0] is int or type(element[0]) is float or type(element[0]) is int or type(element[0]) is datetime or type(element[0]) is tuple: #elif element[0] is not str: value =unicode(str(element[0]).encode('utf-8'), errors="ignore") else: value =unicode(element[0].encode('utf-8'), errors="ignore") count =str(element[1]) if int(count) < 2 : valCount = count+" feature is '"+value+"'. " Strings = Strings + valCount else: valCount = count+" features are '"+value+"'. " Strings = Strings + valCount row.setValue(indtCounts[1], Strings) domainName = map(itemgetter(1), instFCFields)[idx].domain domainVals = [] domainRng = [] for domain in domains: if domain.name == domainName: if domain.domainType == 'CodedValue': domainVals = [val for val, desc in domain.codedValues.items()] elif domain.domainType == 'Range': domainRng = range(int(domain.range[0]), int((domain.range[1]+1))) #for domain.name not in reqDomains: #.... # get list of counts for each unique value in field vals = sorted(countValues.items(), key=lambda x:x[1]) # set and remove all values that have TBD, OTHER, or NULL vals = set(vals) - set(countTBD) - set(countOthers) - set(countNulls) # get set back to list vals = list(vals) # create empty string to concatenate each value ## valstr = 'correctly' populated values (either conforms to domain, or text in non-domain contrained field) ## incvalstr = incorrectly populated values (values not in accordance with domain) valstr = str() incvalstr = str() if not vals: pass else: for v in vals: # OPEN TEXT FIELDS; NO DOMAIN CONSTRAINT if (domainVals == [] and domainRng == []) or (domainVals != [] and v[0] in domainVals) or (domainRng != [] and v[0] in domainRng): if v[0] is None: dom = "NULL" elif v[0] is int or type(v[0]) is float or type(v[0]) is int or type(v[0]) is datetime or type(v[0]) is tuple: #elif v[0] is not str: dom =unicode(str(v[0]).encode('utf-8'), errors="ignore") else: dom =unicode(v[0].encode('utf-8'), errors="ignore") val =str(v[1]) if val < 2: domCount = val+" feature is '"+dom+"'. " valstr = valstr + domCount +". " else: domCount = val+" features are '"+dom+"'. " valstr = valstr + domCount # INCORRECTLY POPULATED VALUES WITHIN DOMAIN CONSTRAINED FIELDS else: if v[0] is None: dom = "NULL" elif v[0] is int or type(v[0]) is float or type(v[0]) is int or type(v[0]) is datetime or type(v[0]) is tuple: # elif v[0] is not str: dom =unicode(str(v[0]).encode('utf-8'), errors="ignore") else: dom =unicode(v[0].encode('utf-8'), errors="ignore") val =str(v[1]) if val < 2: domCount = val+" feature is '"+dom+"'. " incvalstr = incvalstr + domCount else: domCount = val+" features are '"+dom+"'. " incvalstr = incvalstr + domCount if len(valstr) > 32766: valstr = "Unique value counts exceed field character limit -- not listed here." elif len(incvalstr) > 32766: incvalstr = "Unique value counts exceed field character limit -- not listed here." else: # remove last comma at end of value string row.setValue("POP_VALS",valstr) row.setValue("INC_POP_VALS",incvalstr) nullrows.insertRow(row) else: pass del row for mF in minF: if mF.upper() not in minFlInstall: fldrow = fldrows.newRow() fldrow.setValue("FDS", theFDS) fldrow.setValue("FC", theFC) fldrow.setValue("FIELD_MISSING", mF) fldrow.setValue("INSTALLATION", installationName) fldrows.insertRow(fldrow) del fldrow #required FEATURE CLASS does not exist else: fcrow = fcrows.newRow() fcrow.setValue("FDS", theFDS) fcrow.setValue("FC_MISSING", theFC) fcrow.setValue("INSTALLATION", installationName) fcrows.insertRow(fcrow) del fcrow #required FEATURE DATASET does not exist else: fdrow = fdrows.newRow() fdrow.setValue("FDS_MISSING", theFDS) fdrow.setValue("INSTALLATION", installationName) fdrows.insertRow(fdrow) del fdrow # Missing FDS is appended for each record in loop... remove duplicates columns_to_check=['FDS_MISSING','INSTALLATION'] arcpy.DeleteIdentical_management(missFDSTable,fields=columns_to_check) columns_to_check=['FDS','FC_MISSING','INSTALLATION'] arcpy.DeleteIdentical_management(missFCTable,fields=columns_to_check) columns_to_check=['FDS','INSTALLATION','FC','FIELD_MISSING'] arcpy.DeleteIdentical_management(missFLDTable,fields=columns_to_check) edit.stopOperation() edit.stopEditing(True) del nullrows del fdrows del fldrows del fcrows arcpy.AddMessage('Comparisons between ' + installationName + " & " + compName + ' Completed!') time_elapsed = datetime.now() - start_time arcpy.AddMessage('Time elapsed (hh:mm:ss.ms) {}'.format(time_elapsed))
path = "U:\\Desktop\\nyct2000_16c" arcpy.env.overwriteOutput = True arcpy.env.workspace = path # create empty shapefile, be of a point geometry type out_path = path out_name = 'By vehicle_Stateplane_dwell_without_charge.shp' # create output NYC_shapefile = "nyct2000.shp" spRef = arcpy.Describe(NYC_shapefile).spatialReference # same as NYC TAZ file #arcpy.env.outputCoordinateSystem = spRef arcpy.CreateFeatureclass_management(out_path, out_name, 'POINT', '', '', '', spRef) # empty shapefile must already exist, be of a point geometry type # import GPS data cursor = arcpy.InsertCursor(path + '\\' + out_name) # use insert cursor with open('By vehicle_Stateplane_dwell_without_charge.csv', 'rb') as f: reader = csv.DictReader(f) for row in reader: # Create the feature feature = cursor.newRow() # Add the point geometry to the feature vertex = arcpy.CreateObject("Point") vertex.X = row['dropoff_longitude'] vertex.Y = row['dropoff_latitude'] feature.shape = vertex # write to shapefile cursor.insertRow(feature)
# ---------------------------------------------------------------------- Create final output # Copy Station Points arcpy.CopyFeatures_management(pointsProj, outPoints) # Create Txt file if selected and write attributes of station points if text == True: AddMsgAndPrint("Creating Output text file:\n",0) AddMsgAndPrint("\t" + str(outTxt) + "\n",0) t = open(outTxt, 'w') t.write("ID, STATION, X, Y, Z") t.close() rows = arcpy.SearchCursor(outPoints, "", "", "STATION", "STATION" + " A") txtRows = arcpy.InsertCursor(outTxt) row = rows.next() while row: newRow = txtRows.newRow() newRow.ID = row.ID newRow.STATION = row.STATION newRow.X = row.POINT_X newRow.Y = row.POINT_Y newRow.Z = row.POINT_Z txtRows.insertRow(newRow) row = rows.next() del txtRows del newRow
def SLEM(Line, Distance, Output, TempFolder, TF): CopyLine = arcpy.CopyFeatures_management(Line, "%ScratchWorkspace%\CopyLine") fieldnames = [f.name for f in arcpy.ListFields(CopyLine)] #/identification of the polyline type : raw, UGOs, sequenced UGOs, or AGOs k = 0 if "Rank_AGO" in fieldnames: k = 3 elif "Order_ID" in fieldnames: k = 2 elif "Rank_UGO" in fieldnames: k = 1 arcpy.AddMessage(k) ################################ ########## Raw polyline ######## ################################ if k == 0: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "Rank_UGO", "[" + fieldnames[0] + "]", "VB", "") arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "VB", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "To_Measure", "!shape.length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_UGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Distance", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.Rank_UGO = line.Rank_UGO row.To_M = tempdistance + float(Distance) row.Distance = tempdistance rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents #/creation of the route event layer MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_UGO", PointEventTEMP, "Rank_UGO LINE Distance To_M", "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") Sort = arcpy.Sort_management( Split, Output, [["Rank_UGO", "ASCENDING"], ["Distance", "ASCENDING"]]) arcpy.DeleteField_management(Sort, "To_M") #/calculation of the "Distance" field UPD_SL.UpToDateShapeLengthField(Sort) rows1 = arcpy.UpdateCursor(Sort) rows2 = arcpy.UpdateCursor(Sort) line2 = rows2.next() line2.Distance = 0 rows2.updateRow(line2) nrows = int(str(arcpy.GetCount_management(Sort))) n = 0 for line1 in rows1: line2 = rows2.next() if n == nrows - 1: break if n == 0: line1.Distance = 0 if line2.Rank_UGO == line1.Rank_UGO: line2.Distance = line1.Distance + line1.Shape_Length rows2.updateRow(line2) if line2.Rank_UGO != line1.Rank_UGO: line2.Distance = 0 rows2.updateRow(line2) n += 1 #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) ################## ###### UGO ####### ################## if k == 1: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "VB", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "To_Measure", "!shape.length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_UGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Distance", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.Rank_UGO = line.Rank_UGO row.To_M = tempdistance + float(Distance) row.Distance = tempdistance rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents #/creation of the route event layer MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_UGO", PointEventTEMP, "Rank_UGO LINE Distance To_M", "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") Sort = arcpy.Sort_management( Split, Output, [["Rank_UGO", "ASCENDING"], ["Distance", "ASCENDING"]]) arcpy.DeleteField_management(Sort, "To_M") #/calculation of the "Distance" field UPD_SL.UpToDateShapeLengthField(Sort) rows1 = arcpy.UpdateCursor(Sort) rows2 = arcpy.UpdateCursor(Sort) line2 = rows2.next() line2.Distance = 0 rows2.updateRow(line2) nrows = int(str(arcpy.GetCount_management(Sort))) n = 0 for line1 in rows1: line2 = rows2.next() if n == nrows - 1: break if n == 0: line1.Distance = 0 if line2.Rank_UGO == line1.Rank_UGO: line2.Distance = line1.Distance + line1.Shape_Length rows2.updateRow(line2) if line2.Rank_UGO != line1.Rank_UGO: line2.Distance = 0 rows2.updateRow(line2) n += 1 #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) ################################ ######### Sequenced UGO ######## ################################ if k == 2: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "VB", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "To_Measure", "!Shape_Length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_UGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") arcpy.AddField_management(LineRoutes, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") Sort = arcpy.Sort_management(Line, "%ScratchWorkspace%\\Sort", [["Rank_UGO", "ASCENDING"]]) rows1 = arcpy.UpdateCursor(LineRoutes) rows2 = arcpy.SearchCursor(Sort) for line1 in rows1: line2 = rows2.next() line1.Order_ID = line2.Order_ID rows1.updateRow(line1) #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Distance", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.To_M = tempdistance + float(Distance) row.Order_ID = line.Order_ID row.Rank_UGO = line.Rank_UGO row.Distance = tempdistance rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_UGO", PointEventTEMP, "Rank_UGO LINE Distance To_M", "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") Sort = arcpy.Sort_management( Split, Output, [["Rank_UGO", "ASCENDING"], ["Distance", "ASCENDING"]]) arcpy.DeleteField_management(Sort, "To_M") #/calculation of the "Distance" field UPD_SL.UpToDateShapeLengthField(Sort) rows1 = arcpy.UpdateCursor(Sort) rows2 = arcpy.UpdateCursor(Sort) line2 = rows2.next() line2.Distance = 0 rows2.updateRow(line2) nrows = int(str(arcpy.GetCount_management(Split))) n = 0 for line1 in rows1: line2 = rows2.next() if n >= nrows - 1: break if n == 0: line1.Distance = 0 if line2.Rank_UGO == line1.Rank_UGO: line2.Distance = line1.Distance + line1.Shape_Length rows2.updateRow(line2) if line2.Rank_UGO != line1.Rank_UGO: line2.Distance = 0 rows2.updateRow(line2) n += 1 #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) ############# #### AGO #### ############# if k == 3: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "VB", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") try: arcpy.CalculateField_management(CopyLine, "To_Measure", "!shape.length!", "PYTHON_9.3", "") except: arcpy.CalculateField_management(CopyLine, "To_Measure", "!forme.length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_AGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") arcpy.AddField_management(LineRoutes, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(LineRoutes, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(LineRoutes, "AGO_Val", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) Ext.Export(CopyLine, TempFolder, "ExportTable") fichier = open(TempFolder + "\\ExportTable.txt", 'r') Order_ID = [] Rank_UGO = [] Dist = [] Rank_AGO = [] AGO_Val = [] head = fichier.readline().split('\n')[0].split(';') iOrder_ID = head.index("Order_ID") iRank_UGO = head.index("Rank_UGO") iRank_AGO = head.index("Rank_AGO") iAGO_Val = head.index("AGO_Val") for l in fichier: Order_ID.append(int(l.split('\n')[0].split(';')[iOrder_ID])) Rank_UGO.append(int(l.split('\n')[0].split(';')[iRank_UGO])) Rank_AGO.append(float(l.split('\n')[0].split(';')[iRank_AGO])) AGO_Val.append( float(l.split('\n')[0].split(';')[iAGO_Val].replace(',', '.'))) p = 0 rows1 = arcpy.UpdateCursor(LineRoutes) for line1 in rows1: line1.Order_ID = Order_ID[p] line1.Rank_UGO = Rank_UGO[p] line1.Rank_AGO = Rank_AGO[p] line1.AGO_Val = AGO_Val[p] rows1.updateRow(line1) p += 1 #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "Distance_From_Start", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Rank_AGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "AGO_Val", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.Distance_From_Start = tempdistance row.To_M = tempdistance + float(Distance) row.Order_ID = line.Order_ID row.Rank_UGO = line.Rank_UGO row.Rank_AGO = line.Rank_AGO row.AGO_Val = line.AGO_Val rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_AGO", PointEventTEMP, "Rank_AGO LINE Distance_From_Start To_M", "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") arcpy.AddField_management(Split, "Distance", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(Split, "Distance", "!Distance_From_Start!", "PYTHON_9.3", "") arcpy.DeleteField_management(Split, ["To_M", "Distance_From_Start"]) Sort = arcpy.Sort_management( Split, Output, [["Order_ID", "ASCENDING"], ["Rank_UGO", "ASCENDING"], ["Rank_AGO", "ASCENDING"], ["Distance", "ASCENDING"]]) UPD_SL.UpToDateShapeLengthField(Sort) #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) return Sort
def createThiessPolygonFast(CellShanXingPois, cellThiessFeature, cellThiessFinal, inOutField, ShengBianJie): print "过滤室外站生成泰森多边形" logging.info("过滤室外站生成泰森多边形") CellsOutPointsLTE = arcpy.Select_analysis(CellShanXingPois, "in_memory/CellsOutPointsLTE", inOutField + "='室外'") print "创建小区泰森多边形" logging.info("创建小区泰森多边形") cellThiessFeatureCacheLTE = arcpy.CreateThiessenPolygons_analysis( CellsOutPointsLTE, "in_memory/cellThiessFeatureCacheLTE", "ALL") print "将泰森多边形进行省边界切割" logging.info("将泰森多边形进行省边界切割") arcpy.Clip_analysis(cellThiessFeatureCacheLTE, ShengBianJie, cellThiessFeature, "") cellsThiessPolygonCurs = arcpy.SearchCursor(cellThiessFeature) cellThiessFinalInsertCur = arcpy.InsertCursor(cellThiessFinal) row = cellsThiessPolygonCurs.next() while row: currentThiessenPolygon = row.getValue(shapefieldname) finalCellThiessRow = cellThiessFinalInsertCur.newRow() if (row.getValue("Shape.STArea()") > 0.0006 or row.getValue("Shape.STLength()") > 0.05): print "面积大于0.001,执行裁剪" ThiessenFID = row.ORIG_FID CurrentCellPointCur = arcpy.SearchCursor( CellShanXingPois, "ORIG_FID=" + repr(ThiessenFID)) CurrentCellPoinRow = CurrentCellPointCur.next() CenterPoint = CurrentCellPoinRow.getValue(shapefieldname) CenterPointPnt = CenterPoint.getPart() rdloncos = 111 * math.cos(CenterPointPnt.Y * rad) pointSingleArray = arcpy.Array() for si in range(5): singleAngle = 90 * si if (singleAngle == 360): singleAngle = 0 rslonEndSingle = CutRad * math.sin(singleAngle * rad) rslatEndSingle = CutRad * math.cos(singleAngle * rad) lonEndSingle = CenterPointPnt.X + (rslonEndSingle / rdloncos) latEndSingle = CenterPointPnt.Y + rslatEndSingle / 111 pointEndSingle = arcpy.Point(lonEndSingle, latEndSingle) pointSingleArray.add(pointEndSingle) XiaoQuShanXingSingle = arcpy.Polygon(pointSingleArray, spatialReference) CELL_TSDBX_SINGLE = currentThiessenPolygon.intersect( XiaoQuShanXingSingle, 4) finalCellThiessRow.shape = CELL_TSDBX_SINGLE for XiaoQuField in XiaoQuFields: XQFieldName = XiaoQuField.name if (XQFieldName != "OBJECTID"): finalCellThiessRow.setValue(XQFieldName, row.getValue(XQFieldName)) finalCellThiessRow.Input_FID = row.Input_FID print "CI:" + str(row.CI), row.CITY_NAME cellThiessFinalInsertCur.insertRow(finalCellThiessRow) else: finalCellThiessRow.shape = currentThiessenPolygon for XiaoQuField in XiaoQuFields: XQFieldName = XiaoQuField.name if (XQFieldName != "OBJECTID"): finalCellThiessRow.setValue(XQFieldName, row.getValue(XQFieldName)) finalCellThiessRow.Input_FID = row.Input_FID print "CI:" + str(row.CI), row.CITY_NAME cellThiessFinalInsertCur.insertRow(finalCellThiessRow) row = cellsThiessPolygonCurs.next() del cellsThiessPolygonCurs, cellThiessFinalInsertCur, CellsOutPointsLTE, cellThiessFeatureCacheLTE, row
arcpy.CalculateField_management(outshp, 'yCentroid', yExpression, "PYTHON_9.3") #3. Create a shapefile of centroid in_rows = arcpy.SearchCursor(outshp) outPointSHP = os.path.join(scratch, outPointFileName) #outPointSHP = r'in_memory/SiteMarker' point1 = arcpy.Point() array1 = arcpy.Array() arcpy.CreateFeatureclass_management(scratch, outPointFileName, "POINT", "", "DISABLED", "DISABLED", srGCS83) #arcpy.CreateFeatureclass_management('in_memory', 'SiteMarker', "POINT", "", "DISABLED", "DISABLED", srGCS83) cursor = arcpy.InsertCursor(outPointSHP) feat = cursor.newRow() for in_row in in_rows: # Set X and Y for start and end points point1.X = in_row.xCentroid point1.Y = in_row.yCentroid array1.add(point1) centerpoint = arcpy.Multipoint(array1) array1.removeAll() feat.shape = point1 cursor.insertRow(feat) del feat del cursor
paths.append(path) if debug == True: arcpy.AddMessage("Points in path: " + str(len(path))) if debug == True: arcpy.AddMessage("paths: " + str(paths)) arcpy.AddMessage("Creating target feature class ...") arcpy.CreateFeatureclass_management(os.path.dirname(tempFans),os.path.basename(tempFans),"Polygon","#","DISABLED","DISABLED",webMercator) arcpy.AddField_management(tempFans,"Range","DOUBLE","#","#","#","Range (meters)") arcpy.AddField_management(tempFans,"Bearing","DOUBLE","#","#","#","Bearing (degrees)") arcpy.AddField_management(tempFans,"Traversal","DOUBLE","#","#","#","Traversal (degrees)") arcpy.AddField_management(tempFans,"LeftAz","DOUBLE","#","#","#","Left Bearing (degrees)") arcpy.AddField_management(tempFans,"RightAz","DOUBLE","#","#","#","Right Bearing (degrees)") deleteme.append(tempFans) # take the points and add them into the output fc arcpy.AddMessage("Building " + str(len(paths)) + " fans ...") cur = arcpy.InsertCursor(tempFans) for outPath in paths: lineArray = arcpy.Array() for vertex in outPath: pnt = arcpy.Point() pnt.X = vertex[0] pnt.Y = vertex[1] lineArray.add(pnt) del pnt feat = cur.newRow() feat.shape = lineArray feat.Range = maxRange feat.Bearing = initialBearing feat.Traversal = traversal feat.LeftAz = leftBearing feat.RightAz = rightBearing
import arcpy as arc from arcpy import env #set the environment variables env.workspace = "C:/EsriTraining/PythonGP10_0/Data/SanJuan.gdb" env.overwriteOutput = True #Create New Table arc.CreateTable_management("C:/EsriTraining/PythonGP10_0/Data/SanJuan.gdb", "Buffer_Distance") #Create new fields arc.AddField_management("Buffer_Distance", "ROUTE_TYPE", "TEXT") arc.AddField_management("Buffer_Distance", "DISTANCE", "SHORT") rows = arc.InsertCursor("Buffer_Distance") row = rows.newRow() row.ROUTE_TYPE = "Primary" row.DISTANCE = 2000 rows.insertRow(row) del rows del row print("Finished Inserting")
import arcpy, os arcpy.CreateFolder_management(os.getcwd(), "TestFcBatch") in_folder = os.getcwd() + os.sep + "TestFcBatch" for i in range(20): arcpy.CreateTable_management(in_folder, "tt" + str(i) + ".dbf") arcpy.env.workspace = in_folder table_list = arcpy.ListTables() for table in table_list: print table arcpy.AddField_management(table, "TableName", "TEXT") rows = arcpy.InsertCursor(table) for i in range(1, 26): row = rows.newRow() row.setValue('TableName', table) rows.insertRow(row) del row del rows # arcpy.Delete_management(in_folder)
def betriebsstruktur(projektname): import sys, os, arcpy #Pfade festlegen base_path = str(sys.path[0]).split("2 Planungsprojekte analysieren")[0] workspace_projekt = join( base_path, '3 Benutzerdefinierte Projekte', projektname, 'FGDB_11_Definition_Projekt_' + projektname + '.gdb') tabelle_gewerbe_teilflaechen = join(workspace_projekt, "Gewerbe_Teilflaechen") tabelle_gewerbe_betriebsstruktur = join(workspace_projekt, "Gewerbe_Betriebsstruktur") tabelle_parameter_aufsiedlungsdauer = join( base_path, '2_Tool', '11_Definition_Projekt', 'FGDB_11_Definition_Projekt_Tool.gdb', 'gewerbe_aufsiedlungsdauer') tabelle_gewerbe_teilflaechen_Search = arcpy.SearchCursor( tabelle_gewerbe_teilflaechen) tabelle_gewerbe_betriebsstruktur_Insert = arcpy.InsertCursor( tabelle_gewerbe_betriebsstruktur) for row in tabelle_gewerbe_teilflaechen_Search: anzahl_betriebe_start = row.anzahlBetriebe aufsiedlungsdauer = row.aufsiedlungsdauer sql = "Dauer_Jahre = " + str(aufsiedlungsdauer) tabelle_parameter_aufsiedlungsdauer_Search = arcpy.SearchCursor( tabelle_parameter_aufsiedlungsdauer, sql) for row2 in tabelle_parameter_aufsiedlungsdauer_Search: parameter_jahr1 = row2.Jahr1 parameter_jahr2 = row2.Jahr2 parameter_jahr3 = row2.Jahr3 parameter_jahr4 = row2.Jahr4 parameter_jahr5 = row2.Jahr5 parameter_jahr6 = row2.Jahr6 parameter_jahr7 = row2.Jahr7 parameter_jahr8 = row2.Jahr8 parameter_jahr9 = row2.Jahr9 parameter_jahr10 = row2.Jahr10 parameter_jahr11 = row2.Jahr11 parameter_jahr12 = row2.Jahr12 parameter_jahr13 = row2.Jahr13 parameter_jahr14 = row2.Jahr14 parameter_jahr15 = row2.Jahr15 parameter_jahr16 = row2.Jahr16 parameter_jahr17 = row2.Jahr17 parameter_jahr18 = row2.Jahr18 parameter_jahr19 = row2.Jahr19 parameter_jahr20 = row2.Jahr20 parameter_jahr21 = row2.Jahr21 parameter_jahr22 = row2.Jahr22 parameter_jahr23 = row2.Jahr23 parameter_jahr24 = row2.Jahr24 parameter_jahr25 = row2.Jahr25 parameter_jahr26 = row2.Jahr26 parameter_jahr27 = row2.Jahr27 parameter_jahr28 = row2.Jahr28 parameter_jahr29 = row2.Jahr29 parameter_jahr30 = row2.Jahr30 del row2, tabelle_parameter_aufsiedlungsdauer_Search #Ergebnisse berechnen und einfuegen row3 = tabelle_gewerbe_betriebsstruktur_Insert.newRow() row3.anzahl_betriebe_jahr_1 = anzahl_betriebe_start * parameter_jahr1 row3.anzahl_betriebe_jahr_2 = anzahl_betriebe_start * parameter_jahr2 row3.anzahl_betriebe_jahr_3 = anzahl_betriebe_start * parameter_jahr3 row3.anzahl_betriebe_jahr_4 = anzahl_betriebe_start * parameter_jahr4 row3.anzahl_betriebe_jahr_5 = anzahl_betriebe_start * parameter_jahr5 row3.anzahl_betriebe_jahr_6 = anzahl_betriebe_start * parameter_jahr6 row3.anzahl_betriebe_jahr_7 = anzahl_betriebe_start * parameter_jahr7 row3.anzahl_betriebe_jahr_8 = anzahl_betriebe_start * parameter_jahr8 row3.anzahl_betriebe_jahr_9 = anzahl_betriebe_start * parameter_jahr9 row3.anzahl_betriebe_jahr_10 = anzahl_betriebe_start * parameter_jahr10 row3.anzahl_betriebe_jahr_11 = anzahl_betriebe_start * parameter_jahr11 row3.anzahl_betriebe_jahr_12 = anzahl_betriebe_start * parameter_jahr12 row3.anzahl_betriebe_jahr_13 = anzahl_betriebe_start * parameter_jahr13 row3.anzahl_betriebe_jahr_14 = anzahl_betriebe_start * parameter_jahr14 row3.anzahl_betriebe_jahr_15 = anzahl_betriebe_start * parameter_jahr15 row3.anzahl_betriebe_jahr_16 = anzahl_betriebe_start * parameter_jahr16 row3.anzahl_betriebe_jahr_17 = anzahl_betriebe_start * parameter_jahr17 row3.anzahl_betriebe_jahr_18 = anzahl_betriebe_start * parameter_jahr18 row3.anzahl_betriebe_jahr_19 = anzahl_betriebe_start * parameter_jahr19 row3.anzahl_betriebe_jahr_20 = anzahl_betriebe_start * parameter_jahr20 row3.anzahl_betriebe_jahr_21 = anzahl_betriebe_start * parameter_jahr21 row3.anzahl_betriebe_jahr_22 = anzahl_betriebe_start * parameter_jahr22 row3.anzahl_betriebe_jahr_23 = anzahl_betriebe_start * parameter_jahr23 row3.anzahl_betriebe_jahr_24 = anzahl_betriebe_start * parameter_jahr24 row3.anzahl_betriebe_jahr_25 = anzahl_betriebe_start * parameter_jahr25 row3.anzahl_betriebe_jahr_26 = anzahl_betriebe_start * parameter_jahr26 row3.anzahl_betriebe_jahr_27 = anzahl_betriebe_start * parameter_jahr27 row3.anzahl_betriebe_jahr_28 = anzahl_betriebe_start * parameter_jahr28 row3.anzahl_betriebe_jahr_29 = anzahl_betriebe_start * parameter_jahr29 row3.anzahl_betriebe_jahr_30 = anzahl_betriebe_start * parameter_jahr30 row3.projekt = projektname row3.teilflaeche = row.teilflaeche row3.branche = row.gewerbetyp tabelle_gewerbe_betriebsstruktur_Insert.insertRow(row3) ##Bodenbedeckung berechnen # #Parameter extrahieren # bodenbedeckung_pfad = join(base_path,'2_Tool','11_Definition_Projekt','FGDB_11_Definition_Projekt_Tool.gdb','gewerbe_flaechennutzungNettobaulandAllgemein') # bodenbedeckung_cursor = arcpy.SearchCursor(bodenbedeckung_pfad) # for row in bodenbedeckung_cursor: # if row.Branche == "C": # C = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "D": # D = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "E": # E = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "F": # F = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "G": # G = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "H": # H = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "I": # I = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "J": # J = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "K": # K = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "L": # L = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "M": # M = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "N": # N = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche] # if row.Branche == "S": # S = [row.Uberbaute_Flaechen,row.Asphalt_Beton,row.Platten,row.Kleinpflaster,row.Wassergebundene_Decke_Rasengittersteine,row.Offener_Boden_Acker,row.Rasen,row.Baeume_Straeucher,row.Stauden,row.Wiese,row.Natuerliche_Wasserflaeche]
#Find all fiber lines that are in UIT DB that are not in GIS DB #Write them into a temp table in FGDB infile.write("CONDUIT DUCT BANKS IN UIT BUT NOT IN GIS:\n") arcpy.AddJoin_management(uitFiberLinesView, "ID", gisFiberLinesView, "conduit_bank", "KEEP_ALL") sc1 = arcpy.SearchCursor(uitFiberLinesView) print "Missing conduit duct banks:\n" for row1 in sc1: if row1.getValue("UUSD.DBO.Fiber_Lines.conduit_bank") == None: print row1.getValue("uitFiberLinesExport.ID") #Use insert cursor to write values in to temp table intcur1 = arcpy.InsertCursor(tempMissingLines) icur1 = intcur1.newRow() icur1.setValue("cb_id", row1.getValue("uitFiberLinesExport.ID")) icur1.setValue( "type", row1.getValue("uitFiberLinesExport.CONDUIT_ROUTE_TYPE")) intcur1.insertRow(icur1) del intcur1 del row1 del sc1 count1 = arcpy.GetCount_management(gisMissingLinesView).getOutput(0) #Compare this list with known missing conduit duct banks #And determine if there are missing CB that we aren't aware of print "\nThere are " + str(
def doConversion(fileToConvert): # class for error handling class InputIndexError(Exception): pass testIndextemp = 0 lineWith20 = 0 f = 0 try: # specify file name (including file extension), e.g. "ried_1930_hl.txt" # ****************************************************************************************** # specify file name (including file extension) # fileToConvert = r"D:\lukas\Geodata\Glacier\024_moiry\rand_20100922.txt" # ****************************************************************************************** # Getting the working directory based on the absolute file path. The shapefile will be written in the same directory as the input file. currentWorkingDirectory = os.path.split(fileToConvert)[0] # open the text file inTextFile = open(fileToConvert, "r") # create name for output shapefile processingName = (os.path.split(fileToConvert)[1]).split( "." )[0] # splits the path and name of inputfile at "." and [0] takes first string (everything until first ".") # processingName = e.g. "ried_1930_hl" if inputfile was "ried_1930_hl.xyzn" # define name of outputfile (shapefile) -> keeps the name of input file for name of the shapefile outShapefileName = processingName + ".shp" # text file is stored in a list ("lines") # content text file: e.g. "610656.00 105838.00 2400.00 21" arcpy.AddMessage("Reading input text file...") lines = inTextFile.readlines() # Open an insert cursor for the new feature class arcpy.AddMessage("Prepare the shapefile for editing...") # define coordinate system (spatial reference), usually this is CH1903 LV03 spatial_ref = arcpy.SpatialReference( r"C:\Program Files (x86)\ArcGIS\Desktop10.0\Coordinate Systems\Projected Coordinate Systems\National Grids\Europe\CH1903 LV03.prj" ) # Quick hack for ArcGIS 10.0 # syntax: arcpy.CreateFeatureclass_management(out_path, out_name, geometry_type, template, has_m, has_z, spatial_reference) arcpy.AddMessage("Working directory: " + currentWorkingDirectory) arcpy.AddMessage("Shapefile :" + outShapefileName) arcpy.CreateFeatureclass_management(currentWorkingDirectory, outShapefileName, "POLYLINE", "", "ENABLED", "ENABLED", spatial_ref) # Open an insert cursor for the new feature class cur = arcpy.InsertCursor( os.path.join(currentWorkingDirectory, outShapefileName)) # Create an array and point object needed to create features lineArray = arcpy.Array() pnt = arcpy.Point() # count-number, only used for error handling i = 0 # iterate through the lines in the input text file for line in lines: # type line is str # Increasing the counter as very first operation. The line is already read. i += 1 # Skip the first line with the header information. if i == 1: continue # print line just for feedback in PyScripter arcpy.AddMessage("line " + str(i) + ": " + line) # assign values from each textfile-line to x, y, and z coordinate; 21 = firstPoint, 22 = points along the line, 23 = lastPoint (preconditions from output from VAW) pnt.X, pnt.Y, pnt.Z, indextemp = line.split(",") if ((float(indextemp) == 21) ^ (float(indextemp) == 22) ^ (float(indextemp) == 23)): # add single points to the array which represents one polyline lineArray.add(pnt) # short test just to be sure that no other indexes than 21, 22, or 23 (inputfile problems) if ((float(indextemp) != 21) ^ (float(indextemp) != 22) ^ (float(indextemp) != 23)): testIndextemp = 1 # =1 if there was another code (usually "20") f += 1 # count how many times another code is in the input file if (f == 1): lineWith20 = i # remember line number where code was not 21/22/23 the first time (for feedback in the end only) # the InputIndexError below is not used in this version of the script #raise InputIndexError, ("FAILURE: the index number in the input file is not 21, 22, nor 23 (line: " + str(i) + "). Check the input file.") if int(indextemp) == 23: # create a new row (new polyline) in the feature class (next point in list will be first point of a new line) feat = cur.newRow() # Set the geometry of the new feature to the array of points feat.shape = lineArray # Insert the feature cur.insertRow(feat) # empty array to prepare for next line lineArray.removeAll() # close the text file inTextFile.close() # delete the cursor to unlock feature class del cur arcpy.AddMessage("Process finished successfully.") if (testIndextemp == 1): arcpy.AddMessage( "*** BE AWARE THAT " + str(f) + " VERTICES HAVE CODE UNEQUAL TO 21/22/23 (FIRST TIME IN LINE: " + str(lineWith20) + ")") arcpy.AddMessage( "*** This shouln't be the case if you are processing polylines! -> Check input file for possible reasons." ) return os.path.join(currentWorkingDirectory, outShapefileName) except Exception as e: # Returns error messages arcpy.AddMessage(e.message)
# then turning the set back into a list checked=[] for j in myList: if j not in checked: checked.append(j) myList = checked del checked arcpy.DeleteRows_management(fc1) for xd in myList: arcpy.AddMessage(xd) rows = arcpy.InsertCursor(fc1) row = rows.newRow() row.Area_Name = xd rows.insertRow(row) del rows del row domTable = fc1 codeField = "Area_Name" descField = "Area_Name" dWorkspace = workspc domName = "Area_Names" domDesc = "Search area names"
arcpy.CreateFeatureclass_management(os.path.dirname(outputPolygons), os.path.basename(outputPolygons), "POLYGON", "#", "#", "#", sr) #Add ID field arcpy.AddMessage("Adding ID field ...") arcpy.AddField_management(outputPolygons, inputIDFieldName, "LONG") ellipseCount = arcpy.GetCount_management(inputPolylines) arcpy.AddMessage("Opening cursors ...") #Open Search cursor on polyline lineShapeFieldName = arcpy.Describe(inputPolylines).shapeFieldName inRows = arcpy.SearchCursor(inputPolylines) #Open Insert cursor on polygons outRows = arcpy.InsertCursor(outputPolygons, sr) for inRow in inRows: #Read polyline geometry as point collection inFeat = inRow.getValue(lineShapeFieldName) #Get ID inID = inRow.getValue(inputIDFieldName) arcpy.AddMessage("Building ellipse " + str(inID + 1) + " of " + str(ellipseCount) + " from row " + str(inID)) if debug == True: arcpy.AddMessage(str(inID)) #Set point collection to polygon feature outFeat = outRows.newRow() outFeat.shape = inFeat #Set ID outFeat.setValue(inputIDFieldName, inID) outRows.insertRow(outFeat)
arcpy.CreateTable_management(input_geodatabase, table_name) # Add Z column to table arcpy.AddZInformation_3d(CrossSections, 'Z_MIN', 'NO_FILTER') # Get the Z_min value arcpy.AddSurfaceInformation_3d(CrossSections, DEM, "Z_MIN") # Extract XS_IDs XS_IDs = [row[0] for row in arcpy.da.SearchCursor(CrossSections, "XS_ID")] # Extract Min_Z Values Min_Zs = [row[0] for row in arcpy.da.SearchCursor(CrossSections, "Z_min")] # Insert values into Stage Data Table zipped = zip(XS_IDs, Min_Zs) arcpy.AddField_management(table_name, "XS_ID", "TEXT") arcpy.AddField_management(table_name, "MIN_Z_Value", "FLOAT") # Use insert cursor to add rows rows = arcpy.InsertCursor(table_name) for i in zipped: row = rows.newRow() row.setValue("XS_ID", i[0]) row.setValue("MIN_Z_Value", i[1]) rows.insertRow(row) del row del rows arcpy.AddMessage("") arcpy.AddMessage("Script finished")
import arcpy insertCurs = arcpy.InsertCursor( r"C:\Users\Me\Desktop\GIS Programming\Training\Data\Hospitals.shp") row = insertCurs.newRow() row.NAME = "Ron" row.ADDRESS = "Test" insertCurs.insertRow(row) del row del insertCurs
x = centerPointX + (range * math.cos(math.radians(d))) y = centerPointY + (range * math.sin(math.radians(d))) path.append([x, y]) path.append([centerPointX, centerPointY]) # add last point paths.append(path) arcpy.AddMessage("Creating target feature class ...") arcpy.CreateFeatureclass_management(os.path.dirname(outFeature), os.path.basename(outFeature), "Polygon", "#", "DISABLED", "DISABLED", webMercator) arcpy.AddField_management(outFeature, "Range", "DOUBLE") arcpy.AddField_management(outFeature, "Bearing", "DOUBLE") arcpy.AddMessage("Buiding " + str(len(paths)) + " fans ...") cur = arcpy.InsertCursor(outFeature) for outPath in paths: lineArray = arcpy.Array() for vertex in outPath: pnt = arcpy.Point() pnt.X = vertex[0] pnt.Y = vertex[1] lineArray.add(pnt) del pnt feat = cur.newRow() feat.shape = lineArray feat.Range = range feat.Bearing = initialBearing cur.insertRow(feat) del lineArray del feat
row.MapUnit = row.Label row.Name = msRows[matchRow][1] row.Age = msRows[matchRow][2] row.Description = msRows[matchRow][3] row.ParagraphStyle = msRows[matchRow][4] row.HierarchyKey = msRows[matchRow][5] dmuRows.updateRow(row) i = i + 1 del row del dmuRows # open insertion cursor on DMU table # step through msRows. If msRow number not in msRowsMatched, # insert row in DMU addMsgAndPrint('Adding new rows to DescriptionOfMapUnits') dmuRows = arcpy.InsertCursor(gdb + '/DescriptionOfMapUnits') for i in range(len(msRows)): if not i in msRowsMatched: addMsgAndPrint(' ' + str(msRows[i])[:40] + '...') row = dmuRows.newRow() row.Label = msRows[i][0] row.MapUnit = msRows[i][0] #addMsgAndPrint(str(i)+' '+str(msRows[i][0])[:20]) row.Name = msRows[i][1] row.Age = msRows[i][2] row.Description = msRows[i][3] row.ParagraphStyle = msRows[i][4] row.HierarchyKey = msRows[i][5] try: dmuRows.insertRow(row) except:
kol2 = "RFTOMSCH" #------------------------------------------------------- arcpy.AddMessage("\n >>> START PROFIELNAAM BEPALEN... <<<") # describe de geometry lineDesc = arcpy.Describe(inLine) shapefieldname = lineDesc.ShapeFieldName # cursor lines = arcpy.UpdateCursor(inLine) for line in lines: # lijn uitlezen en begin en eindpunt selecteren. lineGeom = line.getValue(shapefieldname) endPoints = lineGeom #, lineGeom.lastPoint arcpy.AddMessage("\n Punt: " + str(line.OBJECTID)) uitPNT = arcpy.CreateFeatureclass_management("in_memory", "BeginEindPNT", "POINT", "", "", "", inLine) ptRows = arcpy.InsertCursor(uitPNT) ptRow = ptRows.newRow() ptRow.shape = endPoints ptRows.insertRow(ptRow) # Nu locatie op route zoeken tol = "5 Meters" # Zoekafstand 5 meter tbl = "locate_points" props = "RID POINT MEASPnt" # uitvoer kolommen # Execute LocateFeaturesAlongRoutes Mtabel = arcpy.LocateFeaturesAlongRoutes_lr(uitPNT, inRT, Rkol, tol, tbl, props) meas = arcpy.SearchCursor(Mtabel) VanTot = [] for r in meas: naam = r.RID dijkpl = naam.split("_")[1]
arcpy.SpatialJoin_analysis(bufferMergeFL, routesFL, spatialJoin, "JOIN_ONE_TO_MANY", "", "", "INTERSECT") sc = arcpy.SearchCursor(spatialJoin) print "Spatial Join :" for row in sc: #Get Field Values lineAbbr = row.LineAbbr stopID = row.UTAStopID if lineAbbr != None: print str(stopID) + " - " + str(lineAbbr) #Use insert cursor to write values to the routes table intcur = arcpy.InsertCursor(routesTable) icur = intcur.newRow() icur.setValue("RouteNum", lineAbbr) icur.setValue("UTAStopID", stopID) intcur.insertRow(icur) del intcur del row del sc #Add URL to routes table fields = ("RouteNum", "Website") edit.startEditing( False, True) #Multi-User mode must be set to true or error will occur edit.startOperation() with arcpy.da.UpdateCursor(routesTable, fields) as cursor:
def RotateFeatureClass(inputFC, outputFC, angle=0, pivot_point=None): """Rotate Feature Class inputFC Input features outputFC Output feature class angle Angle to rotate, in degrees pivot_point X,Y coordinates (as space-separated string) Default is lower-left of inputFC As the output feature class no longer has a "real" xy locations, after rotation, it no coordinate system defined. """ def RotateXY(x, y, xc=0, yc=0, angle=0, units="DEGREES"): """Rotate an xy cooordinate about a specified origin x,y xy coordinates xc,yc center of rotation angle angle units "DEGREES" (default) or "RADIANS" """ import math x = x - xc y = y - yc # make angle clockwise (like Rotate_management) angle = angle * -1 if units == "DEGREES": angle = math.radians(angle) xr = (x * math.cos(angle)) - (y * math.sin(angle)) + xc yr = (x * math.sin(angle)) + (y * math.cos(angle)) + yc return xr, yr # temp names for cleanup env_file = None lyrFC, lyrTmp, lyrOut = [None] * 3 # layers tmpFC = None # temp dataset Row, Rows, oRow, oRows = [None] * 4 # cursors try: # process parameters try: xcen, ycen = [float(xy) for xy in pivot_point.split()] pivot_point = xcen, ycen except: # if pivot point was not specified, get it from # the lower-left corner of the feature class ext = arcpy.Describe(inputFC).extent xcen, ycen = ext.XMin, ext.YMin pivot_point = xcen, ycen angle = float(angle) # set up environment env_file = arcpy.CreateScratchName("xxenv", ".xml", "file", os.environ["TEMP"]) arcpy.SaveSettings(env_file) # Disable any GP environment clips or project on the fly arcpy.ClearEnvironment("extent") arcpy.ClearEnvironment("outputCoordinateSystem") WKS = env.workspace if not WKS: if os.path.dirname(outputFC): WKS = os.path.dirname(outputFC) else: WKS = os.path.dirname(arcpy.Describe(inputFC).catalogPath) env.workspace = env.scratchWorkspace = WKS # Disable GP environment clips or project on the fly arcpy.ClearEnvironment("extent") arcpy.ClearEnvironment("outputCoordinateSystem") # get feature class properties lyrFC = "lyrFC" arcpy.MakeFeatureLayer_management(inputFC, lyrFC) dFC = arcpy.Describe(lyrFC) shpField = dFC.shapeFieldName shpType = dFC.shapeType FID = dFC.OIDFieldName # create temp feature class tmpFC = arcpy.CreateScratchName("xxfc", "", "featureclass") arcpy.CreateFeatureclass_management(os.path.dirname(tmpFC), os.path.basename(tmpFC), shpType) lyrTmp = "lyrTmp" arcpy.MakeFeatureLayer_management(tmpFC, lyrTmp) # set up id field (used to join later) TFID = "XXXX_FID" arcpy.AddField_management(lyrTmp, TFID, "LONG") arcpy.DeleteField_management(lyrTmp, "ID") # rotate the feature class coordinates # only points, polylines, and polygons are supported # open read and write cursors Rows = arcpy.SearchCursor(lyrFC, "", "", "%s;%s" % (shpField, FID)) oRows = arcpy.InsertCursor(lyrTmp) arcpy.AddMessage("Opened search cursor") if shpType == "Point": for Row in Rows: shp = Row.getValue(shpField) pnt = shp.getPart() pnt.X, pnt.Y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle) oRow = oRows.newRow() oRow.setValue(shpField, pnt) oRow.setValue(TFID, Row.getValue(FID)) oRows.insertRow(oRow) elif shpType in ["Polyline", "Polygon"]: parts = arcpy.Array() rings = arcpy.Array() ring = arcpy.Array() for Row in Rows: shp = Row.getValue(shpField) p = 0 for part in shp: for pnt in part: if pnt: x, y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle) ring.add(arcpy.Point(x, y, pnt.ID)) else: # if we have a ring, save it if len(ring) > 0: rings.add(ring) ring.removeAll() # we have our last ring, add it rings.add(ring) ring.removeAll() # if only one, remove nesting if len(rings) == 1: rings = rings.getObject(0) parts.add(rings) rings.removeAll() p += 1 # if only one, remove nesting if len(parts) == 1: parts = parts.getObject(0) if dFC.shapeType == "Polyline": shp = arcpy.Polyline(parts) else: shp = arcpy.Polygon(parts) parts.removeAll() oRow = oRows.newRow() oRow.setValue(shpField, shp) oRow.setValue(TFID, Row.getValue(FID)) oRows.insertRow(oRow) else: #raise Exception, "Shape type {0} is not supported".format(shpType) #UPDATE raise Exception("Shape type {0} is not supported".format(shpType)) del oRow, oRows # close write cursor (ensure buffer written) oRow, oRows = None, None # restore variables for cleanup # join attributes, and copy to output arcpy.AddJoin_management(lyrTmp, TFID, lyrFC, FID) env.qualifiedFieldNames = False arcpy.Merge_management(lyrTmp, outputFC) lyrOut = "lyrOut" arcpy.MakeFeatureLayer_management(outputFC, lyrOut) # drop temp fields 2,3 (TFID, FID) fnames = [f.name for f in arcpy.ListFields(lyrOut)] dropList = ";".join(fnames[2:4]) arcpy.DeleteField_management(lyrOut, dropList) #except MsgError, xmsg: #UPDATE except MsgError as xmsg: arcpy.AddError(str(xmsg)) except arcpy.ExecuteError: tbinfo = traceback.format_tb(sys.exc_info()[2])[0] arcpy.AddError(tbinfo.strip()) arcpy.AddError(arcpy.GetMessages()) numMsg = arcpy.GetMessageCount() for i in range(0, numMsg): arcpy.AddReturnMessage(i) #except Exception, xmsg: #UPDATE except Exception as xmsg: tbinfo = traceback.format_tb(sys.exc_info()[2])[0] arcpy.AddError(tbinfo + str(xmsg)) finally: # reset environment if env_file: arcpy.LoadSettings(env_file) # Clean up temp files for f in [lyrFC, lyrTmp, lyrOut, tmpFC, env_file]: try: if f: arcpy.Delete_management(f) except: pass # delete cursors try: for c in [Row, Rows, oRow, oRows]: del c except: pass # return pivot point try: pivot_point = "{0} {1}".format(*pivot_point) except: pivot_point = None return pivot_point
def main(): '''Main RLOS''' try: # get/set initial environment env.overwriteOutput = True installInfo = arcpy.GetInstallInfo("desktop") # get observer's vibility modifier maximums obsMaximums = maxVizModifiers(observers) removeSPOT = obsMaximums['REMOVE_SPOT'] if removeSPOT is True: arcpy.AddMessage("Observer SPOT is <NULL>, deleteing field ...") arcpy.DeleteField_management(observers, "SPOT") # Do a Minimum Bounding Geometry (MBG) on the input observers observers_mbg = os.path.join(env.scratchWorkspace, "observers_mbg") delete_me.append(observers_mbg) arcpy.AddMessage("Finding observer's minimum bounding envelope ...") # ENVELOPE would be better but would make it ArcInfo-only. arcpy.MinimumBoundingGeometry_management(observers, observers_mbg, "RECTANGLE_BY_AREA") # Now find the center of the (MBG) arcpy.AddMessage("Finding center of observers ...") mbgCenterPoint = os.path.join(env.scratchWorkspace, "mbgCenterPoint") mbgExtent = arcpy.Describe(observers_mbg).extent mbgSR = arcpy.Describe(observers_mbg).spatialReference mbgCenterX = mbgExtent.XMin + (mbgExtent.XMax - mbgExtent.XMin) mbgCenterY = mbgExtent.YMin + (mbgExtent.YMax - mbgExtent.YMin) arcpy.CreateFeatureclass_management(os.path.dirname(mbgCenterPoint), os.path.basename(mbgCenterPoint), "POINT", "#", "DISABLED", "DISABLED", mbgSR) mbgShapeFieldName = arcpy.Describe(mbgCenterPoint).ShapeFieldName rows = arcpy.InsertCursor(mbgCenterPoint) feat = rows.newRow() feat.setValue(mbgShapeFieldName, arcpy.Point(mbgCenterX, mbgCenterY)) rows.insertRow(feat) del rows delete_me.append(mbgCenterPoint) # Get the maximum radius of the observers maxRad = obsMaximums['RADIUS2'] maxOffset = obsMaximums['OFFSETA'] horizonDistance = 0.0 z_factor = float(zfactor(observers)) if RADIUS2_to_infinity is True: ''' if going to infinity what we really need is the distance to the horizon based on height/elevation''' arcpy.AddMessage("Finding horizon distance ...") result = arcpy.GetCellValue_management( input_surface, str(mbgCenterX) + " " + str(mbgCenterY)) centroid_elev = result.getOutput(0) R2 = float(centroid_elev) + float(maxOffset) # length, in meters, of semimajor axis of WGS_1984 spheroid. R = 6378137.0 horizonDistance = math.sqrt(math.pow((R + R2), 2) - math.pow(R, 2)) arcpy.AddMessage(str(horizonDistance) + " meters.") horizonExtent = (str(mbgCenterX - horizonDistance) + " " + str(mbgCenterY - horizonDistance) + " " + str(mbgCenterX + horizonDistance) + " " + str(mbgCenterY + horizonDistance)) # since we are doing infinity we can drop the RADIUS2 field arcpy.AddMessage( "Analysis to edge of surface, dropping RADIUS2 field ...") arcpy.DeleteField_management(observers, "RADIUS2") else: pass # reset center of AZED using Lat/Lon of MBG center point # Project point to WGS 84 arcpy.AddMessage("Recentering Azimuthal Equidistant to centroid ...") mbgCenterWGS84 = os.path.join(env.scratchWorkspace, "mbgCenterWGS84") arcpy.Project_management(mbgCenterPoint, mbgCenterWGS84, GCS_WGS_1984) arcpy.AddXY_management(mbgCenterWGS84) pointx = 0.0 pointy = 0.0 shapeField = arcpy.Describe(mbgCenterWGS84).ShapeFieldName rows = arcpy.SearchCursor(mbgCenterWGS84) for row in rows: feat = row.getValue(shapeField) pnt = feat.getPart() pointx = pnt.X pointy = pnt.Y del row del rows # write new central meridian and latitude of origin... strAZED = '''PROJCS["World_Azimuthal_Equidistant", GEOGCS["GCS_WGS_1984", DATUM["D_WGS_1984", SPHEROID["WGS_1984",6378137.0,298.257223563]], PRIMEM["Greenwich",0.0], UNIT["Degree",0.0174532925199433]], PROJECTION["Azimuthal_Equidistant"], PARAMETER["False_Easting",0.0], PARAMETER["False_Northing",0.0], PARAMETER["Central_Meridian",' + str(pointx) + '], PARAMETER["Latitude_Of_Origin",' + str(pointy) + '], UNIT["Meter",1.0], AUTHORITY["ESRI",54032]]''' delete_me.append(mbgCenterWGS84) # Clip the input surface to the maximum visibilty range and extract # it to a 1000 x 1000 raster # if going to infinity then clip to horizion extent surf_extract = os.path.join(env.scratchWorkspace, "surf_extract") if RADIUS2_to_infinity is True: mbgBuffer = os.path.join(env.scratchWorkspace, "mbgBuffer") arcpy.Buffer_analysis(observers_mbg, mbgBuffer, horizonDistance) delete_me.append(mbgBuffer) surfaceSR = arcpy.Describe(input_surface).spatialReference mbgBufferPrj = os.path.join(env.scratchWorkspace, "mbgBufferPrj") arcpy.Project_management(mbgBuffer, mbgBufferPrj, surfaceSR) delete_me.append(mbgBufferPrj) mbgBufferPrjExtent = arcpy.Describe(mbgBufferPrj).extent cellSize = max( float(mbgBufferPrjExtent.width) / 1000.0, float(mbgBufferPrjExtent.height) / 1000.0) env.cellSize = cellSize arcpy.AddMessage( "Clipping and resampling surface to analysis area with " + str(cellSize) + " meter cell size ...") arcpy.Clip_management(input_surface, "#", surf_extract, mbgBufferPrj) else: # buffer MBG by max RADIUS 2 + 10% mbgBuffer = os.path.join(env.scratchWorkspace, "mbgBuffer") arcpy.Buffer_analysis(observers_mbg, mbgBuffer, obsMaximums['RADIUS2']) delete_me.append(mbgBuffer) # project buffer to surface SR surfaceSR = arcpy.Describe(input_surface).spatialReference mbgBufferPrj = os.path.join(env.scratchWorkspace, "mbgBufferPrj") arcpy.Project_management(mbgBuffer, mbgBufferPrj, surfaceSR) delete_me.append(mbgBufferPrj) # clip surface to projected buffer arcpy.Clip_management(input_surface, "#", surf_extract, mbgBufferPrj) delete_me.append(surf_extract) # Project surface to the new AZED extract_prj = os.path.join(env.scratchWorkspace, "extract_prj") arcpy.AddMessage("Projecting surface ...") arcpy.ProjectRaster_management(surf_extract, extract_prj, strAZED) delete_me.append(extract_prj) # Project observers to the new AZED obs_prj = os.path.join(env.scratchWorkspace, "obs_prj") arcpy.AddMessage("Projecting observers ...") arcpy.Project_management(observers, obs_prj, strAZED) delete_me.append(obs_prj) # Project the MBG buffer to AZED obs_buf = os.path.join(env.scratchWorkspace, "obs_buf") # if RADIUS2_to_infinity == True: # arcpy.Buffer_analysis(obs_prj,obs_buf,horizonDistance) # else: # arcpy.Project_management(mbgBufferPrj,obs_buf,strAZED) arcpy.Project_management(mbgBufferPrj, obs_buf, strAZED) delete_me.append(obs_buf) # Finally ... run Viewshed arcpy.AddMessage("Calculating Viewshed ...") vshed = os.path.join(env.scratchWorkspace, "vshed") delete_me.append(vshed) outVshed = sa.Viewshed(extract_prj, obs_prj, 1.0, "CURVED_EARTH", terrestrial_refractivity_coefficient) outVshed.save(vshed) # Raster To Polygon arcpy.AddMessage("Converting to polygons ...") ras_poly = os.path.join(env.scratchWorkspace, "ras_poly") arcpy.RasterToPolygon_conversion(vshed, ras_poly, polygon_simplify) delete_me.append(ras_poly) # clip output polys to buffer if RADIUS2_to_infinity is not True: out_buf = os.path.join(env.scratchWorkspace, "out_buf") arcpy.Buffer_analysis(obs_prj, out_buf, "RADIUS2") delete_me.append(out_buf) arcpy.Clip_analysis(ras_poly, out_buf, output_rlos) else: arcpy.CopyFeatures_management(ras_poly, output_rlos) # set output arcpy.SetParameter(2, output_rlos) # cleanup arcpy.AddMessage("Removing scratch datasets:") for ds in delete_me: arcpy.AddMessage(str(ds)) arcpy.Delete_management(ds) except arcpy.ExecuteError: # Get the tool error messages msgs = arcpy.GetMessages() arcpy.AddError(msgs) # print msgs #UPDATE2to3 print(msgs) except: # Get the traceback object tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] # Concatenate information together concerning the error into a # message string pymsg = ("PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])) msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n" # Return python error messages for use in script tool or Python Window arcpy.AddError(pymsg) arcpy.AddError(msgs) # Print Python error messages for use in Python / Python Window # print pymsg + "\n" #UPDATE2to3 print(pymsg + "\n") # print msgs #UPDATE2to3 print(msgs)
arcpy.AddField_management(stationTable, "STATION", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(stationTable, "POINT_X", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(stationTable, "POINT_Y", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(stationTable, "POINT_Z", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") # Calculate location for each station along the line rows = arcpy.SearchCursor(lineTemp) row = rows.next() while row: stations = row.NO_STATIONS length = int(row.LENGTH_FT) stationRows = arcpy.InsertCursor(stationTable) newRow = stationRows.newRow() newRow.ID = row.ID newRow.STATION = length stationRows.insertRow(newRow) currentStation = 0 while currentStation < stations: newRow = stationRows.newRow() newRow.ID = row.ID newRow.STATION = currentStation * interval stationRows.insertRow(newRow) currentStation = currentStation + 1 row = rows.next()
def getSnapMetres(coord, up): if (up == True): return (((int(coord / 1000)) + 1) * 1000) else: return (((int(coord / 1000)) - 1) * 1000) #CONSTANT DECLARATIONS PRIORITY_FIELDNAME = "Priority" ALL_SPECIES_FC = "Ranges" SPECIES_TABLE = "SpeciesData" EXTENT_FC = "in_memory\\RangesFC" outputTable = r"E:\cottaan\My Documents\ArcGIS\SpeciesRangeTable.dbf" rows = arcpy.InsertCursor(outputTable) #INPUT PARAMETERS speciesFL = arcpy.GetParameterAsText(0) tmpRaster = r"E:\cottaan\My Documents\ArcGIS\tmpRaster" scratchFC = r"E:\cottaan\My Documents\ArcGIS\tmp.shp" #ENVIRONMENT VARIABLES arcpy.env.overwriteOutput = True arcpy.env.rasterStatistics = None arcpy.env.outputCoordinateSystem = "Coordinate Systems/Projected Coordinate Systems/World/WGS 1984 Web Mercator.prj" arcpy.env.compression = "PackBits" #ADD THE PRIORITY FIELD IF IT IS NOT ALREADY PRESENT if (len(arcpy.ListFields(speciesFL, PRIORITY_FIELDNAME)) == 0): arcpy.AddMessage("Adding priority field to species feature class")
def create_thiessen_points(study_area, side_length, output_fc): """Creates points spaced such that Thiessen polygons will be hexagons. Arguments: study_area -- feature class defining area of interest side_length -- length of regular hexagon side output_fc -- name and location of output feature class Remarks: Hexagons can be created for Thiessen polygons built from points spaced in a pattern like the one below. * * * * * * * * * * * * * * * * * * """ # Validate inputs count = int(str(arcpy.GetCount_management(study_area))) if count == 0: arcpy.AddError('Error: No features found in ' + str(study_area)) return side_length = float(side_length) if side_length <= 0: arcpy.AddError('Error: Hexagon side length must be greater than zero.') return # Determine point spacing dx = 3.0 * side_length dy = side_length / 2.0 * math.sqrt(3.0) indent = dx / 2 # Get the extent of the study area. # If in ArcMap, make sure we use feature coordinates, not map coordinates. desc = arcpy.Describe(study_area) if desc.dataType == "FeatureLayer": desc = arcpy.Describe(desc.featureClass.catalogPath) ext = desc.extent # Determine number of rows and columns. Add extra just to be sure. xmin = ext.XMin - dx ymin = ext.YMin - dy * 3.0 xmax = ext.XMax + dx ymax = ext.YMax + dy * 3.0 num_rows = int((ymax - ymin) / dy) + 1 num_cols = int((xmax - xmin) / dx) + 2 # Create the output feature class spatial_ref = desc.spatialReference workspace = os.path.dirname(output_fc) fc_name = os.path.basename(output_fc) fc = arcpy.CreateFeatureclass_management(workspace, fc_name, "POINT", "", "", "", spatial_ref) # Populate output features arcpy.AddMessage('Creating ' + str(num_rows * num_cols) + ' points...') cursor = arcpy.InsertCursor(output_fc) feature = None try: y = ymin for r in range(num_rows): x = xmin - indent / 2 if r % 2 != 0: x += indent for c in range(num_cols): feature = cursor.newRow() p = arcpy.Point() p.X = x p.Y = y feature.shape = p cursor.insertRow(feature) x += dx y += dy finally: if feature: del feature if cursor: del cursor
def segment_created(): arcpy.SetProgressor("step", "Creating PipeRackFrame...", 0, nrows - 1, 1) # cur = arcpy.InsertCursor(r'JLYWS\PipeSegment') cur = arcpy.InsertCursor(r'JLYWS\piperack_polyline') # fldname = [field.name for field in arcpy.ListFields(r'JLYWS\PipeSegment')] fldname = [ field.name for field in arcpy.ListFields(r'JLYWS\piperack_polyline') ] for i in range(1, nrows): arcpy.SetProgressorLabel("Execute {} line".format(i)) L1 = table.cell(i, getColumnIndex(table, "x1")).value B1 = table.cell(i, getColumnIndex(table, "y1")).value H1 = table.cell(i, getColumnIndex(table, "z1")).value L2 = table.cell(i, getColumnIndex(table, "x2")).value B2 = table.cell(i, getColumnIndex(table, "y2")).value H2 = table.cell(i, getColumnIndex(table, "z2")).value if '' in [L1, L2, B1, B2, H1, H2]: continue arcpy.AddMessage(str(L1) + "\t" + str(B1) + "\t" + str(H1)) arcpy.AddMessage(str(L1) + "\t" + str(B1) + "\t" + str(H1)) arcpy.AddMessage('\n') row = cur.newRow() array = arcpy.Array([ arcpy.Point(round(float(L1), 8), round(float(B1), 8), float(H1)), arcpy.Point(round(float(L2), 8), round(float(B2), 8), float(H2)) ]) #lineFeature = arcpy.Polyline(array) #row.shape = lineFeature row.shape = array # updates = arcpy.UpdateCursor(r'JLYWS\PipeSegment') # for cursor in updates: for fldn in fldname: for j in range(0, ncols): #print 112 if fldn == (table.cell(0, j).value.strip()).upper(): # print table.cell(i, j).ctype if table.cell(i, j).ctype == 3: # print table.cell(i, j).ctype date = xlrd.xldate_as_tuple(table.cell(i, j).value, 0) # print(date) tt = datetime.datetime.strftime( datetime.datetime(*date), "%Y-%m-%d") row.setValue(fldn, tt) else: try: row.setValue(fldn, table.cell(i, j).value) except Exception as e: print e arcpy.AddMessage(e) array.removeAll() cur.insertRow(row) arcpy.SetProgressorPosition() del cur arcpy.ResetProgressor() arcpy.AddMessage("Created {} piperackframe .".format(nrows - 1))
def split_shp_based_on_comparation(shp, f1, f2, inEpsg, outWork): """ Split shp in two datasets: - shp_equal with the features with the same value in the attributes in f1 e f2; - shp_diff with the features with different values in the attributes in f1 e f2. f1 and f2 could be a string or a list """ import os from gesri.df.lyr import feat_lyr from gesri.df.prop.feat import get_gtype from gesri.df.prop.fld import lst_flds from gesri.df.mng.featcls import create_feat_class from gesri.df.mng.fld import copy_fields f1 = [f1] if type(f1) == str else f1 if type(f1) == list else None f2 = [f2] if type(f2) == str else f2 if type(f2) == list else None if not f1 or not f2: raise ValueError('f1 and f2 values are not valid') if len(f1) != len(f2): raise ValueError('f1 and f2 should have the same length') arcpy.env.overwriteOutput = True # Create outputs inGeom = get_gtype(shp) equalShp = create_feat_class( os.path.join( outWork, '{}_equal{}'.format( os.path.splitext(os.path.basename(shp))[0], os.path.splitext(shp)[1])), inGeom, inEpsg) equalLyr = feat_lyr(equalShp) difShp = create_feat_class( os.path.join( outWork, '{}_dif{}'.format( os.path.splitext(os.path.basename(shp))[0], os.path.splitext(shp)[1])), inGeom, inEpsg) difLyr = feat_lyr(difShp) # Copy Fields inLyr = feat_lyr(shp) fields = lst_flds(inLyr) copy_fields(inLyr, equalLyr) copy_fields(inLyr, difLyr) # Read inputs and write in the outputs cursorRead = arcpy.SearchCursor(inLyr) cursorEqual = arcpy.InsertCursor(equalLyr) cursorDif = arcpy.InsertCursor(difLyr) line = cursorRead.next() while line: val_1 = [line.getValue(f) for f in f1] val_2 = [line.getValue(f) for f in f2] if val_1 == val_2: new_row = cursorEqual.newRow() else: new_row = cursorDif.newRow() new_row.Shape = line.Shape for field in fields: if field == 'FID' or field == 'Shape' or field == 'ID': continue new_row.setValue(field, line.getValue(field)) if val_1 == val_2: cursorEqual.insertRow(new_row) else: cursorDif.insertRow(new_row) line = cursorRead.next()