arcpy.env.overwriteOutput = True # User input inLine = sys.argv[1] # in 10m profielpunten VAKkol = "PROFIELNAAM" # PROFIELnaam inRT = sys.argv[2] # in routesysteem Rkol = "ROUTE_ID" # de route kolom opbouw: 47_144 traj = sys.argv[3] # Normtrajectnummer kol1 = "AFSTAND_TOT_DIJKPAAL" kol2 = "RFTOMSCH" #------------------------------------------------------- arcpy.AddMessage("\n >>> START PROFIELNAAM BEPALEN... <<<") # describe de geometry lineDesc = arcpy.Describe(inLine) shapefieldname = lineDesc.ShapeFieldName # cursor lines = arcpy.UpdateCursor(inLine) for line in lines: # lijn uitlezen en begin en eindpunt selecteren. lineGeom = line.getValue(shapefieldname) endPoints = lineGeom #, lineGeom.lastPoint arcpy.AddMessage("\n Punt: " + str(line.OBJECTID)) uitPNT = arcpy.CreateFeatureclass_management("in_memory", "BeginEindPNT", "POINT", "", "", "", inLine) ptRows = arcpy.InsertCursor(uitPNT) ptRow = ptRows.newRow() ptRow.shape = endPoints ptRows.insertRow(ptRow) # Nu locatie op route zoeken tol = "5 Meters" # Zoekafstand 5 meter tbl = "locate_points" props = "RID POINT MEASPnt" # uitvoer kolommen
media = cal.getSL_K_Media() medio = cal.getSL_KPuntoMedio() # Crea una copia del shape para modificarlo arcpy.CopyFeatures_management(PointsDir, SaveDir) # Reemplaza los backslashes por slashes SaveDirr = SaveDir.replace("\\", "/") # Agrega los nueevos campos al shape for campo in nuevosCampos: arcpy.AddField_management(SaveDirr + ".shp", campo, "DOUBLE", "", "", "", "", "NULLABLE") # Actualizacion del shape cursor = arcpy.UpdateCursor(SaveDirr + ".shp") for row in cursor: row.setValue(campos[0], orden_X[cont]) row.setValue(campos[1], orden_Y[cont]) row.setValue(campos[2], orden_Z[cont]) row.setValue("SLK", SLK[cont]) row.setValue("DIST", Distancia[cont]) row.setValue("P_MEDIO", medio[cont]) row.setValue("SLK/MEDIA", media[cont]) cont = cont + 1 cursor.updateRow(row) #Creacion del objeto para graficar grafica = GraficarG() grafica.graficarGRF(lectura.get_Zcoor(), lectura.get_Dist(), cal.get_SL_K(), lim1_In, lim1_Sp, lim2_In, lim2_Sp)
for direction in stoplist: stops = stoplist[direction] outputname = outStopsname if direction != None: outputname += str(direction) outStops = os.path.join(outGDB, outputname) outStops, outStopList = BBB_SharedFunctions.MakeStopsFeatureClass( outStops, stops) # Add a route_id and direction_id field and populate it arcpy.management.AddField(outStops, "route_id", "TEXT") arcpy.management.AddField(outStops, "direction_id", "TEXT") fields = ["route_id", "direction_id"] if BBB_SharedFunctions.ArcVersion == "10.0": cursor = arcpy.UpdateCursor(outStops) for row in cursor: row.setValue("route_id", route_id) row.setValue("direction_id", direction) cursor.updateRow(row) del cursor else: with arcpy.da.UpdateCursor(outStops, fields) as cursor: for row in cursor: row[0] = route_id row[1] = direction cursor.updateRow(row) except: arcpy.AddError("Error creating feature class of GTFS stops.") raise
c = "c.tif" d = "d.tif" dR = "dReclassify.tif" e = "e.shp" elyr = r"F:\example\elyr.lyr" f = "f.shp" f1 = "f1.shp" g = "g.shp" gg = "gg.shp" Input_true_raster_or_constant_value = "1" Input_false_raster_or_constant_value = "0" # Process: Iterate Row Selection cursor = arcpy.UpdateCursor(inFeatures) n = 0 arcpy.Delete_management(g, "") #arcpy.Delete_management(g, "") for row in cursor: arcpy.Delete_management(a, "") arcpy.Delete_management(c, "") arcpy.Delete_management(d, "") arcpy.Delete_management(e, "") arcpy.Delete_management(f, "") arcpy.Delete_management(f1, "") arcpy.Delete_management(b, "") arcpy.Delete_management(elyr, "") arcpy.Delete_management(dR, "") print n
#Lasketaan pisteiden etäisyydet tieverkostosta ############################################### msg("Lasketaan kävelyaika ja matka tieverkostosta") arcpy.SetProgressorLabel( "KOKONAISMATKAKETJUN LASKENTA...Lasketaan kävelyaika ja matka tieverkostosta..." ) Aloitus() arcpy.Near_analysis(Origins, LiikenneElementit, "500 Meters", "NO_LOCATION", "NO_ANGLE") arcpy.Near_analysis(Destinations, LiikenneElementit, "500 Meters", "NO_LOCATION", "NO_ANGLE") #Lasketaan kävelyaika ja matka tieverkostosta: OrigReader = arcpy.UpdateCursor(Origins) DestReader = arcpy.UpdateCursor(Destinations) for row in OrigReader: row.Kavely_O_T = row.NEAR_DIST / Kavelynopeus row.KavelDistO = row.NEAR_DIST OrigReader.updateRow(row) del row for row in DestReader: row.Kavely_T_D = row.NEAR_DIST / Kavelynopeus row.KavelDistD = row.NEAR_DIST DestReader.updateRow(row) del row del OrigReader
import arcpy arcpy.env.overwriteoutput = True inFeature1 = arcpy.GetParameterAsText(0) inFeature2 = arcpy.GetParameterAsText(1) inFeatureTime_1 = arcpy.GetParameterAsText(2) inFeatureTime_2 = arcpy.GetParameterAsText(3) fields = arcpy.ListFields(inFeature1) if "TIME" not in fields: arcpy.AddField_management(inFeature1, "TIME", "DOUBLE", "", "", "25", "", "NULLABLE", "REQUIRED", "") cursor = arcpy.UpdateCursor(inFeature1) for row in cursor: row.setValue("TIME", inFeatureTime_1) cursor.updateRow(row) fields = arcpy.ListFields(inFeature2) if "TIME" not in fields: arcpy.AddField_management(inFeature2, "TIME", "DOUBLE", "", "", "25", "", "NULLABLE", "REQUIRED", "") cursor = arcpy.UpdateCursor(inFeature2) for row in cursor: row.setValue("TIME", inFeatureTime_2) cursor.updateRow(row) inFeatures = [inFeature1, inFeature2] outFeatures = arcpy.GetParameterAsText(4) arcpy.Intersect_analysis(inFeatures, outFeatures, "ALL")
def updateValue(fc, field, value): cursor = arcpy.UpdateCursor(fc) for row in cursor: row.setValue(field, value) cursor.updateRow(row) return
def onClick(self): def getCoor(WKT): return WKT[18:len(WKT) - 2] def IsEnclosed(WKT): coordinates = WKT.split(" ") if (coordinates[0].strip(",") == coordinates[-2].strip(",")) & ( coordinates[1].strip(",") == coordinates[-1].strip(",")): return True else: return False def PolylintToPolygon(WKT): return arcpy.FromWKT("MULTIPOLYGON (((" + WKT + ")))") input_fc = arcpy.env.workspace fc1 = input_fc.split('\\') name1 = fc1[-1] name = name1 + '.shp' path = '' for i in range(len(fc1[0:-1])): path += fc1[i] if i != len(fc1[0:-1]) - 1: path += '\\' arcpy.env.workspace = path target = name1 + "_Polygon" arcpy.CreateFeatureclass_management(arcpy.env.workspace, target, "POLYGON") fNamTyp = {} for field in arcpy.ListFields(name1): if field.baseName in ["FID", "Shape"]: continue else: fNamTyp[field.baseName] = field.type for key in fNamTyp: arcpy.AddField_management(target, key, fNamTyp[key]) fc = arcpy.env.workspace + '\\' + name cursor = None cursor = arcpy.SearchCursor(fc) fValues = [] for road in cursor: row = {} for key in fNamTyp: row[key] = road.getValue(key) fValues.append(row) cursorT = arcpy.da.InsertCursor(target, ["SHAPE@"]) cursor = None cursor = arcpy.SearchCursor(fc) NonLineRing = [] index = -1 for road in cursor: index += 1 record = road.getValue("Shape") WKT = getCoor(record.WKT) if IsEnclosed(WKT): try: cursorT.insertRow([PolylintToPolygon(WKT)]) except: continue else: NonLineRing.append(index) continue cursor = None cursorT = None cursorT = arcpy.UpdateCursor(target) for key in fNamTyp: index = -1 with arcpy.da.UpdateCursor(target, key) as cursor: for row in cursor: index += 1 if index in NonLineRing: continue else: row[0] = fValues[index][key] cursor.updateRow(row) cusorT = None
def unitid_dissolve(filename): arcpy.AddMessage(" ____________________________________________________________________") arcpy.AddMessage("Updating UnitID field from intersection") cur = arcpy.UpdateCursor(filename) field = "UnitID_FS" fieldrank = "GRANK_FIRE" fieldforest = "FORESTNAME" fieldother = "Type" fieldspecies = "SNAME_FIRE" plant0512num = 0 ranaboyliinum = 0 cohosalmnum = 0 ranamuscosanum = 0 unprotforestnum = 0 csvfile = in_workspace + "\\csv_tables\AllMerge_SummaryTable.csv" if layerType == "CNDDB": arcpy.AddMessage("csv File: " + csvfile) arcpy.AddMessage("NOTE: Code will operate differently for csv in Pro vs 10.x!!!!!") arcpy.AddMessage("Version of Python: " + sys.version) if sys.version_info[0] < 3: # uncomment when using arcgis 10.3 with open(csvfile, 'rb') as f: reader = csv.reader(f) selectionList = list(reader) else: # use when using arcgis pro with open(csvfile) as f: reader = csv.reader(f) selectionList = list(reader) # populating UnitID field with UnitID_FS field for row in cur: speciesname = row.getValue(fieldspecies) forestname = row.getValue(fieldforest) row.UnitID = row.getValue(field) cur.updateRow(row) if layerType == "Wildlife_Observations": if speciesname == "Oncorhynchus kisutch" \ and str(row.getValue(field)) == "0516": cur.deleteRow(row) cohosalmnum += 1 arcpy.AddMessage( "Deleting row for Oncorhynchus kisutch because forest not protected, found in " + forestname) elif layerType == "Critical_Habitat_Polygons": if speciesname == "Rana muscosa" \ and str(row.getValue(field)) != "0501" \ and str(row.getValue(field)) != "0512" \ and str(row.getValue(field)) != "0502" \ and str(row.getValue(field)) != "0507": cur.deleteRow(row) ranamuscosanum += 1 arcpy.AddMessage( "Deleting row for Rana muscosa because not Southern forest species, found in " + forestname) # Used for filtering out records in CNDDB elif layerType == "CNDDB": # Used for deleting all the plant records in San Bernardino for CNDDB if str(row.getValue(field)) == "0512" \ and row.getValue(fieldrank) != "Sensitive" \ and row.getValue(fieldother) == "PLANT": cur.deleteRow(row) plant0512num += 1 arcpy.AddMessage("deleted a row for 0512 Plant: " + speciesname) # Used for deleting all the Rana boylii not in the following three forests elif str(row.getValue(field)) != "0507" \ and str(row.getValue(field)) != "0513" \ and str(row.getValue(field)) != "0515" \ and speciesname == "Rana boylii": cur.deleteRow(row) ranaboyliinum += 1 arcpy.AddMessage("deleted a row for Rana boylii in forest: " + forestname) # elif (str(row.getValue(field)) == "0508" \ # or str(row.getValue(field)) == "0514" \ # or str(row.getValue(field)) == "0510" \ # or str(row.getValue(field)) == "0505") \ # and speciesname == "Rana muscosa": # cur.deleteRow(row) # arcpy.AddMessage("deleted a row for Rana muscosa in forest: " + forestname) else: # Used for deleting all the species selected not in a particular forest for item in selectionList: if item[0].startswith(speciesname) \ and speciesname != "Rana boylii" \ and speciesname != "Rana muscosa": if item[3] == "": break elif item[3] != forestname.upper(): cur.deleteRow(row) unprotforestnum += 1 arcpy.AddMessage("deleted row for " + speciesname + " because found in " + forestname) del cur # running export to gdb just for datasets that required additional filtering others were ran prior to this function if layerType == "CNDDB": arcpy.AddMessage("Total records deleted because they were Plants from San Bernardino : " + str(plant0512num)) arcpy.AddMessage("Total records deleted because they were Rana boylii not in target forests : " + str(ranaboyliinum)) arcpy.AddMessage("Total records deleted because they were species found in unprotected forests : " + str(unprotforestnum)) copy_to_gdb("Interim", filename) elif layerType == "Wildlife_Observations": arcpy.AddMessage("Total records deleted because they were Oncorhynchus kisutch in STF : " + str(cohosalmnum)) copy_to_gdb("Interim", filename) elif layerType == "Critical_Habitat_Polygons": arcpy.AddMessage("Total records deleted because they were Rana muscosa not in southern forests : " + str(ranamuscosanum)) copy_to_gdb("Interim", filename) arcpy.AddMessage("Repairing Geometry ......") arcpy.RepairGeometry_management(filename) arcpy.AddMessage("Dissolving Features") dissolveFeatureClass = filename + "_dissolved" dissolvefields = ["UnitID", "GRANK_FIRE", "SNAME_FIRE", "CNAME_FIRE", "SOURCEFIRE", "BUFFT_FIRE", "BUFFM_FIRE", "CMNT_FIRE", "INST_FIRE"] if layerType != "Critical_Habitat_Lines" and layerType != "Critical_Habitat_Polygons" and layerType != "NOAA_ESU": dissolvefields.append("BUFF_DIST") if sys.version_info[0] < 3: arcpy.Dissolve_management(filename, dissolveFeatureClass, dissolvefields, "", "SINGLE_PART") else: arcpy.PairwiseDissolve_analysis(intersectFeatureClass, dissolveFeatureClass, dissolvefields) arcpy.AddMessage("Repairing Dissolved Geometry ......") arcpy.RepairGeometry_management(filename) arcpy.AddMessage("Dissolve and Repair complete") arcpy.AddMessage(" ____________________________________________________________________") return dissolveFeatureClass
try: # Prepare the fields we're going to add to the feature classes ending = "_" + dayshort + "_" + start_time_pretty + "_" + end_time_pretty fields_to_fill = ["NumTrips" + ending, "NumTripsPerHr" + ending, "MaxWaitTime" + ending, "AvgHeadway" + ending] fields_to_read = ["stop_id", "route_id", "direction_id"] + fields_to_fill field_type_dict = {"NumTrips" + ending: "Short", "NumTripsPerHr" + ending: "Double", "MaxWaitTime" + ending: "Short", "AvgHeadway" + ending: "Short"} for FC in FCList: # We probably need to add new fields for our calculations, but if the field # is already there, don't add it because we'll overwrite it. for field in fields_to_fill: if field not in FieldNames[FC]: arcpy.management.AddField(FC, field, field_type_dict[field]) if BBB_SharedFunctions.ArcVersion == "10.0": cur2 = arcpy.UpdateCursor(FC, "", "", ";".join(fields_to_read)) for row in cur2: rtpairtuple = (row.getValue("route_id"), row.getValue("direction_id")) stop = row.getValue("stop_id") NumTrips, NumTripsPerHr, MaxWaitTime, AvgHeadway = RetrieveStatsForStop(stop, rtpairtuple) row.setValue("NumTrips" + ending, NumTrips) row.setValue("NumTripsPerHr" + ending, NumTripsPerHr) row.setValue("MaxWaitTime" + ending, MaxWaitTime) row.setValue("AvgHeadway" + ending, AvgHeadway) cur2.updateRow(row) else: # For everything 10.1 and forward cur2 = arcpy.da.UpdateCursor(FC, fields_to_read) for row in cur2: rtpairtuple = (row[1], row[2]) # (route_id, direction_id) stop = row[0]
# Basically repeats the above loop, but does so to populate the geodb # is the "yes"/"no" values for fc in fcs: # In order to flag duplicates, you need to create # a field. First, list all the fields in each feature class fieldList = arcpy.ListFields(fc) # Add the field to the feature class, # if it does not already exist UpdateField = "Duplicate" if not UpdateField in fieldList: arcpy.AddField_management(fc, UpdateField, "text", "1") # To populate the field "Duplicate". Use the Update Cursor. rows = arcpy.UpdateCursor(fc) # Move to the first row row = rows.next() # Make a list of all the address values in the Address field # loop through the list till it reaches an end while row <> None: # Declare the field to search (i.e. row.___). addressString = str(row.Address) # Ignore any blank Address record if addressString != ' ' and addressString in addressDictionary.keys( ): # check if there are duplicates by the dictionary value (aka count) if addressDictionary[addressString] > 1:
fileCount_start = math.ceil(start_id / length_id) fileCount_end = math.ceil(end_id / length_id) fileCount = numpy.arange(fileCount_start, fileCount_end + 1) for fc in fileCount: print("Processing all parameters subset " + str(fc) + " from object ID " + str(start_id) + " to" + str(start_id + length_id - 1)) allParameters_subset = arcpy.MakeFeatureLayer_management( allParameters, "allParameters_" + str(fc), ' "OBJECTID" >= ' + str(start_id) + ' AND ' + '"OBJECTID" <= ' + str(start_id + length_id - 1)) #allParameters_subset = arcpy.CopyFeatures_management("allParameters_" + str(fc), "in_memory\\params") c = 0 cursor = arcpy.UpdateCursor(allParameters_subset) for row in cursor: # print("row =" + str(c)) FID = row.getValue("OBJECTID") K = row.getValue("weibull_shape_K") A = row.getValue("weibull_scale_A") wind_speed = row.getValue("wind_speed") air_density = row.getValue("air_density") #### If calculating air density from elevation and temperature data #### # elev_m = row.getValue("demgadm_m") # temp_C = row.getValue("tmean_Cx10") / 10 # since worldClim data has units of deg C*10 # temp_K = temp_C + 273.15 # deg Kelvin # airDensity = (Po / (R * temp_K)) * math.exp((-g * elev_m) / (R * temp_K)) ## kg/m3 # air density = std atmsospheric pressure (at sea level) / (gas constant*Temp) * # exp(-gravitational acc * elevation (m)/ (gas constant*temp))
## expression = "round(!SHAPE_Length! * 3.280839896,1)" ## else: ## AddMsgAndPrint("\tXY Units are unknown, unable to proceed. Exiting...\n",2) ## sys.exit() #arcpy.CalculateField_management(lineTemp, "LENGTH_FT", expression, "VB", "") expression = "round(!Shape.Length@feet!,1)" arcpy.CalculateField_management(lineTemp, "LENGTH_FT", expression, "PYTHON_9.3") del expression # Calculate number of stations / remainder AddMsgAndPrint("\nCalculating the number of stations...", 0) AddMsgAndPrint("\n\tStation Point interval: " + str(interval) + " Feet", 0) rows = arcpy.UpdateCursor(lineTemp) row = rows.next() while row: row.ID = row.OBJECTID if row.LENGTH_FT < interval: AddMsgAndPrint("\tThe Length of line " + str(row.ID) + " is less ", 2) AddMsgAndPrint( "\tthan the specified interval of " + str(interval) + " feet.", 2) AddMsgAndPrint( "\tChoose a lower interval or supply a longer line. Exiting...\n", 2) sys.exit() exp = row.LENGTH_FT / interval - 0.5 + 1 row.NO_STATIONS = str(round(exp))
arcpy.Delete_management("in_memory") # Add a field which will be used to add the grid labels arcpy.AddMessage("Adding field for labeling the grid") gridField = "Grid" arcpy.AddField_management(tempSort, gridField, "TEXT") # Number the fields arcpy.AddMessage("Numbering the grids") letterIndex = 1 secondLetterIndex = 1 letter = 'A' secondLetter = 'A' number = 1 lastY = -9999 cursor = arcpy.UpdateCursor(tempSort) for row in cursor: yPoint = row.getValue("SHAPE").firstPoint.Y if (lastY != yPoint) and (lastY != -9999): letterIndex += 1 letter = ColIdxToXlName(letterIndex) if (labelStyle != "Numeric"): number = 1 secondLetter = 'A' secondLetterIndex = 1 lastY = yPoint if (labelStyle == "Alpha-Numeric"): row.setValue(gridField, str(letter) + str(number)) elif (labelStyle == "Alpha-Alpha"): row.setValue(gridField, str(letter) + str(secondLetter))
arcpy.AddMessage("Sorting the grid for labeling") arcpy.Sort_management(tempOutput, outputFeatureClass, [["Shape", "ASCENDING"]], "UL") arcpy.Delete_management("in_memory") # Add a field which will be used to add the grid labels arcpy.AddMessage("Adding field for labeling the grid") gridField = "Grid" arcpy.AddField_management(outputFeatureClass, gridField, "TEXT") # Number the fields arcpy.AddMessage("Numbering the grids") letter = 'A' number = 1 lastY = -9999 cursor = arcpy.UpdateCursor(outputFeatureClass) for row in cursor: yPoint = row.getValue("SHAPE").firstPoint.Y if (lastY != yPoint) and (lastY != -9999): letter = chr(ord(letter) + 1) number = 1 lastY = yPoint row.setValue(gridField, str(letter) + str(number)) cursor.updateRow(row) number += 1 # Get and label the output feature #TODO: Pro updates for arcpy.mapping layerToAdd = arcpy.mapping.Layer(outputFeatureClass) arcpy.mapping.AddLayer(df, layerToAdd, "AUTO_ARRANGE")
fcparcel = arcpy.GetParameterAsText(0) poilayer = arcpy.GetParameterAsText(1) addfieldname = arcpy.GetParameterAsText(2) tempData = arcpy.env.scratchGDB + os.path.sep + "parcelpoint" temptable = arcpy.env.scratchGDB + os.path.sep + "outtable" temptable2 = arcpy.env.scratchGDB + os.path.sep + "outtable2" arcpy.FeatureToPoint_management(fcparcel, tempData) arcpy.PointDistance_analysis(tempData, poilayer, temptable) arcpy.DeleteField_management(fcparcel, "SUM_ca") arcpy.DeleteField_management(temptable, "ca") arcpy.AddField_management(temptable, "ca", "FLOAT") cur = arcpy.UpdateCursor(temptable) for row in cur: newdistance = row.getValue("DISTANCE") if newdistance != 0: row.setValue("ca", 1.0 / newdistance**2) cur.updateRow(row) else: row.setValue("ca", 0) cur.updateRow(row) arcpy.Statistics_analysis(temptable, temptable2, [["ca", "SUM"]], "INPUT_FID") arcpy.JoinField_management(fcparcel, "FID", temptable2, "INPUT_FID", ["SUM_ca"]) arcpy.DeleteField_management(fcparcel, addfieldname) arcpy.AddField_management(fcparcel, addfieldname, "FLOAT")
def main(): logFileName = "T:/getBnd.log" logFile = file(logFileName, "w") tsaNums = sys.argv[1] root = sys.argv[2] year = str(sys.argv[3]) bnds = sys.argv[4] tsas = [] fileListToArray(tsas, tsaNums) arcpy.env.overwriteOutput = True bnd = "bnd" layer = "xx00001_layer" for tsa in tsas: tsaNum = tsa.split("tsa")[-1] rootTSAgdb = root + "\\" + tsa + "\\" + tsa + "_" + year + ".gdb" rootTSAgdbWrk = rootTSAgdb + "\\wrk" rootTSAgdbBnd = rootTSAgdbWrk + "\\bnd" scratch00001 = rootTSAgdb + "\\wrk\\xx00001" scratch00002 = rootTSAgdb + "\\wrk\\xx00002" delFC(scratch00001) delFC(scratch00002) delFC(rootTSAgdbBnd) arcpy.AddMessage("Making feature layer from %s..." % bnds) arcpy.MakeFeatureLayer_management(bnds, layer, "\"TSA_NUMBER\" = '" + tsaNum + "'", "", "TSA_NUMBER TSA_NUMBER VISIBLE") arcpy.AddMessage("Coping features to %s..." % (scratch00001)) arcpy.CopyFeatures_management(layer, scratch00001) arcpy.AddMessage("Dissolving on TSA_NUMBER...") arcpy.Dissolve_management(scratch00001, scratch00002, "TSA_NUMBER;included", "", "SINGLE_PART") arcpy.AddMessage("Converting to %s/%s..." % (rootTSAgdbWrk, bnd)) fieldMapping = "tsa_number tsa_number True True False 3 String 0 0 ,First,#," + rootTSAgdbWrk + "/" + scratch00002 + ",TSA_NUMBER,-1,-1" arcpy.FeatureClassToFeatureClass_conversion(scratch00002, rootTSAgdbWrk, bnd, "", fieldMapping) arcpy.AddMessage("Populating bnd_fid in " + rootTSAgdbWrk + "/" + bnd + "...") arcpy.AddField_management(rootTSAgdbBnd, "bnd_fid", "LONG") rows = arcpy.UpdateCursor(rootTSAgdbBnd) row = rows.next() n = 1 while row: row.bnd_fid = n rows.updateRow(row) row = rows.next() n = n + 1 print n - 1 # cleanup delFC(scratch00001) delFC(scratch00002) logFile.close()
def SLEM(Line, Distance, Output, TempFolder, TF): CopyLine = arcpy.CopyFeatures_management(Line, "%ScratchWorkspace%\CopyLine") fieldnames = [f.name for f in arcpy.ListFields(CopyLine)] #/identification of the polyline type : raw, UGOs, sequenced UGOs, or AGOs k = 0 if "Rank_AGO" in fieldnames: k = 3 elif "Order_ID" in fieldnames: k = 2 elif "Rank_UGO" in fieldnames: k = 1 arcpy.AddMessage(k) ################################ ########## Raw polyline ######## ################################ if k == 0: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "Rank_UGO", "[" + fieldnames[0] + "]", "VB", "") arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "VB", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "To_Measure", "!shape.length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_UGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Distance", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.Rank_UGO = line.Rank_UGO row.To_M = tempdistance + float(Distance) row.Distance = tempdistance rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents #/creation of the route event layer MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_UGO", PointEventTEMP, "Rank_UGO LINE Distance To_M", "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") Sort = arcpy.Sort_management( Split, Output, [["Rank_UGO", "ASCENDING"], ["Distance", "ASCENDING"]]) arcpy.DeleteField_management(Sort, "To_M") #/calculation of the "Distance" field UPD_SL.UpToDateShapeLengthField(Sort) rows1 = arcpy.UpdateCursor(Sort) rows2 = arcpy.UpdateCursor(Sort) line2 = rows2.next() line2.Distance = 0 rows2.updateRow(line2) nrows = int(str(arcpy.GetCount_management(Sort))) n = 0 for line1 in rows1: line2 = rows2.next() if n == nrows - 1: break if n == 0: line1.Distance = 0 if line2.Rank_UGO == line1.Rank_UGO: line2.Distance = line1.Distance + line1.Shape_Length rows2.updateRow(line2) if line2.Rank_UGO != line1.Rank_UGO: line2.Distance = 0 rows2.updateRow(line2) n += 1 #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) ################## ###### UGO ####### ################## if k == 1: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "VB", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "To_Measure", "!shape.length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_UGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Distance", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.Rank_UGO = line.Rank_UGO row.To_M = tempdistance + float(Distance) row.Distance = tempdistance rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents #/creation of the route event layer MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_UGO", PointEventTEMP, "Rank_UGO LINE Distance To_M", "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") Sort = arcpy.Sort_management( Split, Output, [["Rank_UGO", "ASCENDING"], ["Distance", "ASCENDING"]]) arcpy.DeleteField_management(Sort, "To_M") #/calculation of the "Distance" field UPD_SL.UpToDateShapeLengthField(Sort) rows1 = arcpy.UpdateCursor(Sort) rows2 = arcpy.UpdateCursor(Sort) line2 = rows2.next() line2.Distance = 0 rows2.updateRow(line2) nrows = int(str(arcpy.GetCount_management(Sort))) n = 0 for line1 in rows1: line2 = rows2.next() if n == nrows - 1: break if n == 0: line1.Distance = 0 if line2.Rank_UGO == line1.Rank_UGO: line2.Distance = line1.Distance + line1.Shape_Length rows2.updateRow(line2) if line2.Rank_UGO != line1.Rank_UGO: line2.Distance = 0 rows2.updateRow(line2) n += 1 #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) ################################ ######### Sequenced UGO ######## ################################ if k == 2: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "VB", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "To_Measure", "!Shape_Length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_UGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") arcpy.AddField_management(LineRoutes, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") Sort = arcpy.Sort_management(Line, "%ScratchWorkspace%\\Sort", [["Rank_UGO", "ASCENDING"]]) rows1 = arcpy.UpdateCursor(LineRoutes) rows2 = arcpy.SearchCursor(Sort) for line1 in rows1: line2 = rows2.next() line1.Order_ID = line2.Order_ID rows1.updateRow(line1) #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Distance", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.To_M = tempdistance + float(Distance) row.Order_ID = line.Order_ID row.Rank_UGO = line.Rank_UGO row.Distance = tempdistance rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_UGO", PointEventTEMP, "Rank_UGO LINE Distance To_M", "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") Sort = arcpy.Sort_management( Split, Output, [["Rank_UGO", "ASCENDING"], ["Distance", "ASCENDING"]]) arcpy.DeleteField_management(Sort, "To_M") #/calculation of the "Distance" field UPD_SL.UpToDateShapeLengthField(Sort) rows1 = arcpy.UpdateCursor(Sort) rows2 = arcpy.UpdateCursor(Sort) line2 = rows2.next() line2.Distance = 0 rows2.updateRow(line2) nrows = int(str(arcpy.GetCount_management(Split))) n = 0 for line1 in rows1: line2 = rows2.next() if n >= nrows - 1: break if n == 0: line1.Distance = 0 if line2.Rank_UGO == line1.Rank_UGO: line2.Distance = line1.Distance + line1.Shape_Length rows2.updateRow(line2) if line2.Rank_UGO != line1.Rank_UGO: line2.Distance = 0 rows2.updateRow(line2) n += 1 #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) ############# #### AGO #### ############# if k == 3: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "VB", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") try: arcpy.CalculateField_management(CopyLine, "To_Measure", "!shape.length!", "PYTHON_9.3", "") except: arcpy.CalculateField_management(CopyLine, "To_Measure", "!forme.length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_AGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") arcpy.AddField_management(LineRoutes, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(LineRoutes, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(LineRoutes, "AGO_Val", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) Ext.Export(CopyLine, TempFolder, "ExportTable") fichier = open(TempFolder + "\\ExportTable.txt", 'r') Order_ID = [] Rank_UGO = [] Dist = [] Rank_AGO = [] AGO_Val = [] head = fichier.readline().split('\n')[0].split(';') iOrder_ID = head.index("Order_ID") iRank_UGO = head.index("Rank_UGO") iRank_AGO = head.index("Rank_AGO") iAGO_Val = head.index("AGO_Val") for l in fichier: Order_ID.append(int(l.split('\n')[0].split(';')[iOrder_ID])) Rank_UGO.append(int(l.split('\n')[0].split(';')[iRank_UGO])) Rank_AGO.append(float(l.split('\n')[0].split(';')[iRank_AGO])) AGO_Val.append( float(l.split('\n')[0].split(';')[iAGO_Val].replace(',', '.'))) p = 0 rows1 = arcpy.UpdateCursor(LineRoutes) for line1 in rows1: line1.Order_ID = Order_ID[p] line1.Rank_UGO = Rank_UGO[p] line1.Rank_AGO = Rank_AGO[p] line1.AGO_Val = AGO_Val[p] rows1.updateRow(line1) p += 1 #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "Distance_From_Start", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Rank_AGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "AGO_Val", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.Distance_From_Start = tempdistance row.To_M = tempdistance + float(Distance) row.Order_ID = line.Order_ID row.Rank_UGO = line.Rank_UGO row.Rank_AGO = line.Rank_AGO row.AGO_Val = line.AGO_Val rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_AGO", PointEventTEMP, "Rank_AGO LINE Distance_From_Start To_M", "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") arcpy.AddField_management(Split, "Distance", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(Split, "Distance", "!Distance_From_Start!", "PYTHON_9.3", "") arcpy.DeleteField_management(Split, ["To_M", "Distance_From_Start"]) Sort = arcpy.Sort_management( Split, Output, [["Order_ID", "ASCENDING"], ["Rank_UGO", "ASCENDING"], ["Rank_AGO", "ASCENDING"], ["Distance", "ASCENDING"]]) UPD_SL.UpToDateShapeLengthField(Sort) #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) return Sort
def splitline (inFC,FCName,alongDist): OutDir = env.workspace outFCName = FCName outFC = OutDir+"/"+outFCName def distPoint(p1, p2): calc1 = p1.X - p2.X calc2 = p1.Y - p2.Y return math.sqrt((calc1**2)+(calc2**2)) def midpoint(prevpoint,nextpoint,targetDist,totalDist): newX = prevpoint.X + ((nextpoint.X - prevpoint.X) * (targetDist/totalDist)) newY = prevpoint.Y + ((nextpoint.Y - prevpoint.Y) * (targetDist/totalDist)) return arcpy.Point(newX, newY) def splitShape(feat,splitDist): # Count the number of points in the current multipart feature # partcount = feat.partCount partnum = 0 # Enter while loop for each part in the feature (if a singlepart feature # this will occur only once) # lineArray = arcpy.Array() while partnum < partcount: # Print the part number # #print "Part " + str(partnum) + ":" part = feat.getPart(partnum) #print part.count totalDist = 0 pnt = part.next() pntcount = 0 prevpoint = None shapelist = [] # Enter while loop for each vertex # while pnt: if not (prevpoint is None): thisDist = distPoint(prevpoint,pnt) maxAdditionalDist = splitDist - totalDist print thisDist, totalDist, maxAdditionalDist if (totalDist+thisDist)> splitDist: while(totalDist+thisDist) > splitDist: maxAdditionalDist = splitDist - totalDist #print thisDist, totalDist, maxAdditionalDist newpoint = midpoint(prevpoint,pnt,maxAdditionalDist,thisDist) lineArray.add(newpoint) shapelist.append(lineArray) lineArray = arcpy.Array() lineArray.add(newpoint) prevpoint = newpoint thisDist = distPoint(prevpoint,pnt) totalDist = 0 lineArray.add(pnt) totalDist+=thisDist else: totalDist+=thisDist lineArray.add(pnt) #shapelist.append(lineArray) else: lineArray.add(pnt) totalDist = 0 prevpoint = pnt pntcount += 1 pnt = part.next() # If pnt is null, either the part is finished or there is an # interior ring # if not pnt: pnt = part.next() if pnt: print "Interior Ring:" partnum += 1 if (lineArray.count > 1): shapelist.append(lineArray) return shapelist if arcpy.Exists(outFC): arcpy.Delete_management(outFC) arcpy.Copy_management(inFC,outFC) #origDesc = arcpy.Describe(inFC) #sR = origDesc.spatialReference #revDesc = arcpy.Describe(outFC) #revDesc.ShapeFieldName deleterows = arcpy.UpdateCursor(outFC) for iDRow in deleterows: deleterows.deleteRow(iDRow) try: del iDRow del deleterows except: pass inputRows = arcpy.SearchCursor(inFC) outputRows = arcpy.InsertCursor(outFC) fields = arcpy.ListFields(inFC) numRecords = int(arcpy.GetCount_management(inFC).getOutput(0)) OnePercentThreshold = numRecords // 100 #printit(numRecords) iCounter = 0 iCounter2 = 0 for iInRow in inputRows: inGeom = iInRow.shape iCounter+=1 iCounter2+=1 if (iCounter2 > (OnePercentThreshold+0)): #printit("Processing Record "+str(iCounter) + " of "+ str(numRecords)) iCounter2=0 if (inGeom.length > alongDist): shapeList = splitShape(iInRow.shape,alongDist) for itmp in shapeList: newRow = outputRows.newRow() for ifield in fields: if (ifield.editable): newRow.setValue(ifield.name,iInRow.getValue(ifield.name)) newRow.shape = itmp outputRows.insertRow(newRow) else: outputRows.insertRow(iInRow) del inputRows del outputRows
minCalcStr = minCalcStr[:-1] + ")" maxCalcStr = maxCalcStr[:-1] + ")" rangeCalcStr = "!{}! - !{}!".format(maxUpliftFld,minUpliftFld) #Apply the calcStrings for min/max/range msg("...calculating min uplift") arcpy.CalculateField_management(upliftTable,minUpliftFld,minCalcStr,"PYTHON") msg("...calculating max uplift") arcpy.CalculateField_management(upliftTable,maxUpliftFld,maxCalcStr,"PYTHON") msg("...calculating uplift range") msg(rangeCalcStr) arcpy.CalculateField_management(upliftTable,rngUpliftFld,rangeCalcStr,"PYTHON") #Create the update cursor to calculate highCount and lowCount msg("Computing counts above and below thresholds") records = arcpy.UpdateCursor(upliftTable) rec = records.next() while rec: #Initialize the values hiCount = 0.0 loCount = 0.0 #Loop through uplift fields and count # spp above and below thresholds for fld in upliftFlds: if rec.getValue(fld) > highThreshold: hiCount += 1 if rec.getValue(fld) < lowThreshold: loCount += 1 #Update the values in the high and low columns rec.setValue(hiCountFld,hiCount / fldCount) rec.setValue(loCountFld,loCount / fldCount) #Committ updates records.updateRow(rec) #Move to the next record
# --- Erase Data # --- Erase Class 2 From Class 1 arcpy.Erase_analysis(in_features="Class1_Eliminate", erase_features="Class2_Eliminate", out_feature_class="Class1_Erase_Class2", cluster_tolerance="") # --- Erase Class 3 From New Class 1 arcpy.Erase_analysis(in_features="Class1_Erase_Class2", erase_features="Class3_Eliminate", out_feature_class="Class1_Erase_Class3", cluster_tolerance="") print ( "Class 2 and 3 Is Overwritten Class 1") # --- Erase Class 3 From Class 2 arcpy.Erase_analysis(in_features="Class2_Eliminate", erase_features="Class3_Eliminate", out_feature_class="Class2_Erase_Class3", cluster_tolerance="") print ( "Class 3 Is Overwritten Class 2") # --- Give Class 1 Value 1 features = arcpy.UpdateCursor("Class1_Erase_Class3") for feature in features: if feature.ORIG_FID == 1: feature.ORIG_FID = 1 features.updateRow(feature) del feature,features # --- Give Class 2 Value 2 features = arcpy.UpdateCursor("Class2_Erase_Class3") for feature in features: if feature.ORIG_FID == 1: feature.ORIG_FID = 2 features.updateRow(feature) del feature,features # --- Give Class 3 Value 3
Pts = arcpy.FeatureVerticesToPoints_management(Out, "%ScratchWorkspace%\\Pts", "BOTH_ENDS") arcpy.AddXY_management(Pts) #/extraction and calculation of the topologic metrics # Ajout Aurelie arcpy.CheckOutExtension("3D") # Fin Ajout Aurelie arcpy.AddSurfaceInformation_3d(Out, DEM, "Z_MEAN", "BILINEAR") arcpy.AddSurfaceInformation_3d(Pts, DEM, "Z", "BILINEAR") arcpy.AddField_management(Out, "Slope", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(Out, "Slope3D", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") ncurrentstep+=1 arcpy.AddMessage("Calculating metrics - Step " + str(ncurrentstep) + "/" + str(nstep)) rows1 = arcpy.UpdateCursor(Out) rows2 = arcpy.SearchCursor(Pts) rows3 = arcpy.SearchCursor(Pts) line2 = rows2.next() line3 = rows3.next() line3 = rows3.next() for line1 in rows1 : line1.Z_Up = line2.Z line1.Z_Down = line3.Z line1.Slope = (line1.Z_Up - line1.Z_Down) / (((line3.POINT_X-line2.POINT_X)**2 + (line3.POINT_Y-line2.POINT_Y)**2)**0.5) line1.Slope3D = (line1.Z_Up - line1.Z_Down) / line1.Shape_Length
def one2N(shp): #列出所有字段 fieldObjList = arcpy.ListFields(shp) fieldArr = [] # 获取当前 SHP 属性字段列表 for f in fieldObjList: fieldArr.append(f.name) # 核心字段名 coreFieldlist = [ {"fieldName": "cg", "fieldAlias": u"层高"}, {"fieldName": "cs", "fieldAlias": u"层数"}, {"fieldName": "zg", "fieldAlias": u"总高度"}] # 如果不存在字段则生成 for c in coreFieldlist: fieldPrecision = 99 if not c["fieldName"] in fieldArr: fieldName = c["fieldName"] fieldAlias = c["fieldAlias"] arcpy.AddField_management(shp, fieldName, "LONG", fieldPrecision, "", "", fieldAlias, "NULLABLE") print u"已生成" + fieldName + u"字段" arcpy.AddField_management(newShp, c["fieldName"], "LONG", fieldPrecision, "", "", c["fieldAlias"], "NULLABLE") # 遍历每个要素 cursor = arcpy.UpdateCursor(shp) # 原始 SHP 总要素数量 features_cnt = arcpy.GetCount_management(shp).getOutput(0) print u"要素总数:{0}".format(features_cnt) # 原始 SHP 要素执行次数 feature_index = 1 for row in cursor: # 设置核心三参数 parms["zg"] = parms["zg"] if(parms["zg"]) else "zg" parms["cg"] = parms["cg"] if(parms["cg"]) else "cg" parms["cs"] = parms["cs"] if(parms["cs"]) else "cs" _name = row.getValue('name') # 楼栋总高度 _zg = row.getValue(parms["zg"]) if(row.getValue(parms["zg"])) else 0 # 楼栋层高 _cg = row.getValue(parms["cg"]) if(row.getValue(parms["cg"])) else 3 # 楼栋层数 _cs = int(row.getValue(parms["cs"])) if(row.getValue(parms["cs"])) else int(_zg / _cg) # 更新 总高度 字段属性 row.setValue("zg", _zg) # 更新 层高 字段属性 row.setValue("cg", _cg) # 更新 层数 字段属性 row.setValue("cs", _cs) # 更新此行数据 cursor.updateRow(row) print u"【{0} 总高度 {1} 米,层高 {2} 米,楼高 {3} 层】".format(_name, _zg, _cg, _cs) # 根据层数复制生成要素 arcpy.AddField_management(newShp, "name", "TEXT", fieldPrecision, "", "", "名称", "NULLABLE") # 定义新增 SHP 插入游标 insert_cursor = arcpy.InsertCursor(newShp) # 对原有的每个要素进行处理,根据层数新增SHP for x in range(0, _cs): rowX = row rowX.setValue("name", _name) rowX.setValue("cg", _cg) rowX.setValue("zg", (x+1) * _cg) rowX.setValue("cs", (x+1)) insert_cursor.insertRow(rowX) print u"总体进度 {0} / {1} ,正在为 {2} 要素生成楼栋体,当前楼栋进度:{3} / {4} ".format(feature_index, features_cnt, _name, x+1, _cs) feature_index = feature_index + 1 return
msRowNameDict = {} msRowDescDict = {} msRowMapUnitDict = {} for i in range(len(msRows)): sType = styleType(msRows[i][4]) if sType == 'Unit': msRowLabelDict[msRows[i][0]] = i msRowMapUnitDict[msRows[i][0]] = i if sType == 'Heading': msRowNameDict[msRows[i][1]] = i if sType == 'Headnote': msRowDescDict[msRows[i][3][0:20]] = i # open updateCursor on DMU table msRowsMatched = [] dmuRows = arcpy.UpdateCursor(gdb + '/DescriptionOfMapUnits') # step through DMU table, trying to match label or Name or Description # if DMU row matches msRow, append msRow number to msRowsMatched addMsgAndPrint('Updating any matching rows in DescriptionOfMapUnits') i = 1 for row in dmuRows: matchRow = -1 addMsgAndPrint(row.Label) if row.Label != '' and row.Label in msRowLabelDict: matchRow = msRowLabelDict[row.Label] elif row.Name != '' and row.Name in msRowNameDict: matchRow = msRowNameDict[row.Name] elif row.MapUnit != '' and row.MapUnit in msRowMapUnitDict: matchRow = msRowNameDict[row.MapUnit] elif row.Description != None and row.Description != '' and row.Description[ 0:20] in msRowDescDict:
for i in xrange( 0, length + NUM, NUM): # assuming units are in meters for feature spatial reference point = row[0].positionAlongLine(i) points.append(point) j = j + 1 GullyIDList.append(j) arcpy.CopyFeatures_management(points, PointsDir) k = 0 for row in arcpy.da.SearchCursor(PolylineDir, ["ID"]): # [NumOfPoints,GullyID] GullyIDList[k] = [GullyIDList[k], row[0]] k = k + 1 arcpy.AddField_management(PointsDir, "Gully_ID", "LONG", 3, "", "", "", "NULLABLE") cursor = arcpy.UpdateCursor(PointsDir) #the ID of points k1 = 0 #the ID of the gully points belong to k2 = 0 k = GullyIDList[k2][0] arcpy.AddMessage(GullyIDList) for row in cursor: k1 = k1 + 1 if k1 > k: #update k2, the gully ID k2 = k2 + 1 #update k k = GullyIDList[k2][0] + k row.setValue("Gully_ID", GullyIDList[k2][1]) cursor.updateRow(row)
def _categorize_feature_class( self, feature_class, raster, class_value_field, class_name_field, confidence_field, cell_size, coordinate_system, predict_function, batch_size, overwrite ): import arcpy arcpy.env.overwriteOutput = overwrite if batch_size is None: batch_size = self._data.batch_size if predict_function is None: predict_function = _prediction_function norm_mean = torch.tensor(imagenet_stats[0]) norm_std = torch.tensor(imagenet_stats[1]) fcdesc = arcpy.Describe(feature_class) oid_field = fcdesc.OIDFieldName if not (fcdesc.dataType == 'FeatureClass' and fcdesc.shapeType == 'Polygon'): e = Exception(f"The specified FeatureClass at '{feature_class}' is not valid, it should be Polygon FeatureClass") raise(e) fields = arcpy.ListFields(feature_class) field_names = [f.name for f in fields] if class_value_field in field_names: if not overwrite: e = Exception(f"The specified class_value_field '{class_value_field}' already exists in the target FeatureClass, please specify a different name or set `overwrite=True`") raise(e) arcpy.DeleteField_management(feature_class, [ class_value_field ]) arcpy.AddField_management(feature_class, class_value_field, "LONG") if class_name_field in field_names: if not overwrite: e = Exception(f"The specified class_name_field '{class_name_field}' already exists in the target FeatureClass, please specify a different name or set `overwrite=True`") raise(e) arcpy.DeleteField_management(feature_class, [ class_name_field ]) arcpy.AddField_management(feature_class, class_name_field, "TEXT") if confidence_field is not None: if confidence_field in field_names: if not overwrite: e = Exception(f"The specified confidence_field '{confidence_field}' already exists in the target FeatureClass, please specify a different name or set `overwrite=True`") raise(e) arcpy.DeleteField_management(feature_class, [ confidence_field ]) arcpy.AddField_management(feature_class, confidence_field, "DOUBLE") if raster is not None: #Arcpy Environment to export data arcpy.env.cellSize = cell_size arcpy.env.outputCoordinateSystem = coordinate_system arcpy.env.cartographicCoordinateSystem = coordinate_system tempid_field = _tempid_field = 'f_fcuid' i = 1 while tempid_field in field_names: tempid_field = _tempid_field + str(i) i+=1 arcpy.AddField_management(feature_class, tempid_field, "LONG") arcpy.CalculateField_management(feature_class, tempid_field, f"!{oid_field}!") temp_folder = arcpy.env.scratchFolder temp_datafldr = os.path.join(temp_folder, 'categorize_features_'+str(int(time.time()))) result = arcpy.ia.ExportTrainingDataForDeepLearning( in_raster=raster, out_folder=temp_datafldr, in_class_data=feature_class, image_chip_format="TIFF", tile_size_x=self._data.chip_size, tile_size_y=self._data.chip_size, stride_x=0, stride_y=0, output_nofeature_tiles="ALL_TILES", metadata_format="Labeled_Tiles", start_index=0, class_value_field=tempid_field, buffer_radius=0, in_mask_polygons=None, rotation_angle=0 ) # cleanup arcpy.DeleteField_management(feature_class, [ tempid_field ]) image_list = ImageList.from_folder(os.path.join(temp_datafldr, 'images')) def get_id(imagepath): with open(os.path.join(temp_datafldr, 'labels', os.path.basename(imagepath)[:-3]+'xml')) as f: return(int(f.read().split('<name>')[1].split('<')[0])) for i in range(0, len(image_list), batch_size): # Get Temporary Ids tempids =[ get_id(f) for f in image_list.items[i:i+batch_size] ] # Get Image batch image_batch = torch.stack([ im.data for im in image_list[i:i+batch_size] ]) image_batch = normalize(image_batch, mean=norm_mean, std=norm_std) # Get Predications predicted_classes, predictions_conf = self._predict_batch(image_batch) # Update Feature Class where_clause = f"{oid_field} IN ({','.join(str(e) for e in tempids)})" update_cursor = arcpy.UpdateCursor( feature_class, where_clause=where_clause, sort_fields=f"{oid_field} A" ) for row in update_cursor: row_tempid = row.getValue(oid_field) ui = tempids.index(row_tempid) classvalue = self._data.classes[predicted_classes[ui]] row.setValue(class_value_field, classvalue) row.setValue(class_name_field, self._data.class_mapping[classvalue]) if confidence_field is not None: row.setValue(confidence_field, predictions_conf[ui]) update_cursor.updateRow(row) # Remove Locks del row del update_cursor # Cleanup arcpy.Delete_management(temp_datafldr) shutil.rmtree(temp_datafldr, ignore_errors=True) else: feature_class_attach = feature_class+'__ATTACH' nrows = arcpy.GetCount_management(feature_class_attach)[0] store={} for i in range(0, int(nrows), batch_size): attachment_ids = [] rel_objectids = [] image_batch = [] # Get Image Batch with arcpy.da.SearchCursor(feature_class_attach, [ 'ATTACHMENTID', 'REL_OBJECTID', 'DATA' ]) as search_cursor: for c, item in enumerate(search_cursor): if c >= i and c < i+batch_size : attachment_ids.append(item[0]) rel_objectids.append(item[1]) attachment = item[-1] im = open_image(io.BytesIO(attachment.tobytes())) # Read Bytes im = im.resize(self._data.chip_size) # Resize image_batch.append(im.data) # Convert to tensor del item del attachment #del im image_batch = torch.stack(image_batch) image_batch = normalize(image_batch, mean=norm_mean, std=norm_std) # Get Predictions and save to store predicted_classes, predictions_conf = self._predict_batch(image_batch) for ai in range(len(attachment_ids)): if store.get(rel_objectids[ai]) is None: store[rel_objectids[ai]] = [] store[rel_objectids[ai]].append([predicted_classes[ai], predictions_conf[ai]]) # Update Feature Class update_cursor = arcpy.UpdateCursor(feature_class) for row in update_cursor: row_oid = row.getValue(oid_field) max_prediction_class, max_prediction_value = predict_function(store[row_oid]) if max_prediction_class is not None: classvalue = self._data.classes[max_prediction_class] classname = self._data.class_mapping[classvalue] else: classvalue = None classname = None row.setValue(class_value_field, classvalue) row.setValue(class_name_field, classname) if confidence_field is not None: row.setValue(confidence_field, max_prediction_value) update_cursor.updateRow(row) # Remove Locks del row del update_cursor return True
# We get the OBJECTID of the new first and end points from the UDPts to store it into the output shp. # This enable to get the NextDown and NextUp information ncurrentstep += 1 arcpy.AddMessage("Storing the new first point - Step " + str(ncurrentstep) + "/" + str(nstep)) FPt = arcpy.CopyFeatures_management(firstPt, "%ScratchWorkspace%\\FPt") EPt = arcpy.CopyFeatures_management(endPt, "%ScratchWorkspace%\\EPt") MakeFPt = arcpy.MakeFeatureLayer_management( FPt, "%ScratchWorkspace%\\MakeFPt", "", "", "ORIG_FID ORIG_FID VISIBLE NONE") MakeEPt = arcpy.MakeFeatureLayer_management( EPt, "%ScratchWorkspace%\\MakeEPt", "", "", "ORIG_FID ORIG_FID VISIBLE NONE") rowsOut = arcpy.UpdateCursor(InputFCMTS) lineOut = rowsOut.next() SelecStart = arcpy.SelectLayerByLocation_management( MakeUDPts, "ARE_IDENTICAL_TO", MakeFPt, "", "NEW_SELECTION") rowsStart = arcpy.SearchCursor(SelecStart) lineStart = rowsStart.next() lineOut.NextUpID = lineStart.OBJECTID lineOut.From_X = lineStart.POINT_X rowsOut.updateRow(lineOut) SelecEnd = arcpy.SelectLayerByLocation_management(MakeUDPts, "ARE_IDENTICAL_TO", MakeEPt, "", "NEW_SELECTION") rowsEnd = arcpy.SearchCursor(SelecEnd)
desc = arcpy.Describe(dirname) if hasattr(desc, "datasetType") and desc.datasetType == 'FeatureDataset': dirname = os.path.dirname(dirname) print "Directory Name: " + str(dirname) print "Description: " + str(desc) #dfcOutput = "DFC_RESULT" #dfcResult = arcpy.Describe(updateFeatures).catalogPath + "\\DFC_RESULT" #dfcOutput = arcpy.Describe(updateFeatures).catalogPath + "\\DFC_RESULT" dfcOutput = dirname + "\\DFC_WeberToWeber" print "begin converting nulls to emtpy" # convert nulls to empty in both the update fc and basefeatures fc list = [updateFeatures, baseFeatures] for item in list: rows = arcpy.UpdateCursor(item) for row in rows: if row.PREDIR == ' ' or row.PREDIR == None or row.PREDIR is None: row.PREDIR = "" if row.S_NAME == ' ' or row.S_NAME == None or row.S_NAME is None: row.S_NAME = "" if row.STREETTYPE == ' ' or row.STREETTYPE == None or row.STREETTYPE is None: row.STREETTYPE = "" if row.SUFDIR == ' ' or row.SUFDIR == None or row.SUFDIR is None: row.SUFDIR = "" if row.ALIAS == ' ' or row.ALIAS == None or row.ALIAS is None: row.ALIAS = "" if row.ACS_ALIAS == ' ' or row.ACS_ALIAS == None or row.ACS_ALIAS is None: row.ACS_ALIAS = "" if row.SUFFIX_911 == ' ' or row.SUFFIX_911 == None or row.SUFFIX_911 is None: row.SUFFIX_911 = ""
def ImportIntAtt(Intersections, TrafficControl, Routes, RouteID, BMP, EMP, AttTable, Fields, Output, OutputTable): def FindAngle(O, P): import math if P[0] == O[0]: if P[1] == O[1]: #arcpy.AddWarning(str(O) + str(P)) return 0 #1 else: if P[1] > O[1]: return 90 #2 if P[1] < O[1]: return 270 #3 else: if P[1] == O[1]: if P[0] > O[0]: return 0 #4 else: return 180 #5 else: if (P[0] - O[0]) > 0 and (P[1] - O[1]) > 0: return math.degrees( math.atan((P[1] - O[1]) / (P[0] - O[0]))) #6 elif (P[0] - O[0]) > 0 and (P[1] - O[1]) < 0: return 360 - math.degrees( math.atan(-(P[1] - O[1]) / (P[0] - O[0]))) #7 elif (P[0] - O[0]) < 0 and (P[1] - O[1]) > 0: return 180 - math.degrees( math.atan(-(P[1] - O[1]) / (P[0] - O[0]))) #8 elif (P[0] - O[0]) < 0 and (P[1] - O[1]) < 0: return 180 + math.degrees( math.atan((P[1] - O[1]) / (P[0] - O[0]))) def FindClosestPoint(PolylineList, IntPoint): n = len(PolylineList) Dist0 = ((PolylineList[0][0] - IntPoint[0])**2 + (PolylineList[0][1] - IntPoint[1])**2)**0.5 Distn = ((PolylineList[n - 1][0] - IntPoint[0])**2 + (PolylineList[n - 1][1] - IntPoint[1])**2)**0.5 if Dist0 <= Distn: return [PolylineList[0], PolylineList[1]] else: return [PolylineList[n - 1], PolylineList[n - 2]] Buffer = "80 Feet" Tolerance = "10 Feet" Int = common.CreateOutPath(MainFile=Output, appendix='Int', Extension='') arcpy.Intersect_analysis(in_features=Routes, out_feature_class=Int, join_attributes="ALL", cluster_tolerance="-1 Unknown", output_type="POINT") SPJ = common.CreateOutPath(MainFile=Output, appendix='SPJ', Extension='') arcpy.SpatialJoin_analysis(target_features=Int, join_features=Intersections, out_feature_class=SPJ, join_operation="JOIN_ONE_TO_ONE", join_type="KEEP_COMMON", match_option="CLOSEST", search_radius=Buffer, distance_field_name="") arcpy.DeleteIdentical_management(in_dataset=SPJ, fields=arcpy.Describe(SPJ).ShapeFieldName, xy_tolerance="", z_tolerance="0") OrgFields = [f.name for f in arcpy.ListFields(Intersections)] arcpy.DeleteField_management(SPJ, [ f.name for f in arcpy.ListFields(SPJ) if not f.required and not f.name in OrgFields ]) arcpy.SpatialJoin_analysis(target_features=SPJ, join_features=TrafficControl, out_feature_class=Output, join_operation="JOIN_ONE_TO_ONE", join_type="KEEP_COMMON", match_option="CLOSEST", search_radius=Buffer, distance_field_name="") OrgFields.extend(['TRAF_CONT', 'LEG_COUNT', 'PeerGroup_CH2M_TJM']) arcpy.DeleteField_management(Output, [ f.name for f in arcpy.ListFields(Output) if not f.required and not f.name in OrgFields ]) EventTable = common.CreateOutPath(MainFile=Output, appendix='EventTable', Extension='') arcpy.LocateFeaturesAlongRoutes_lr(in_features=Output, in_routes=Routes, route_id_field=RouteID, radius_or_tolerance=Tolerance, out_table=EventTable, out_event_properties=" ".join( [RouteID, "POINT", "MP"]), route_locations="ALL", in_fields="FIELDS", m_direction_offsetting="M_DIRECTON") # Milepost Correction EMPDict = { r.getValue('INVENTORY'): r.getValue('Shape').lastPoint.M for r in arcpy.SearchCursor(Routes) } r = 0 uc = arcpy.UpdateCursor(EventTable) for r in uc: inv = r.getValue('INVENTORY') MP = r.getValue('MP') if MP < 0: r.setValue('MP', 0) uc.updateRow(r) if MP > EMPDict[inv]: r.setValue('MP', EMPDict[inv]) uc.updateRow(r) del uc, r AllF = [f.name for f in arcpy.ListFields(AttTable)] MF = [f for f in Fields if not f in AllF] if not MF == []: print(str(MF) + ' not found in ' + AttTable) IRIS_Diss = common.CreateOutPath(MainFile=Output, appendix='diss', Extension='') arcpy.DissolveRouteEvents_lr( in_events=AttTable, in_event_properties=' '.join([RouteID, 'LINE', BMP, EMP]), dissolve_field=';'.join(Fields), out_table=IRIS_Diss, out_event_properties=' '.join([RouteID, 'LINE', BMP, EMP]), dissolve_type="DISSOLVE", build_index="INDEX") arcpy.OverlayRouteEvents_lr( in_table=EventTable, in_event_properties=' '.join([RouteID, 'POINT', 'MP']), overlay_table=IRIS_Diss, overlay_event_properties=' '.join([RouteID, 'LINE', BMP, EMP]), overlay_type="INTERSECT", out_table=OutputTable, out_event_properties=' '.join([RouteID, 'POINT', 'MP']), in_fields="FIELDS", build_index="INDEX") common.AddField(Output, [ fields_SC.intr.AADT_Major, fields_SC.intr.AADT_Minor, fields_SC.crash.ABuffer, fields_SC.crash.BBuffer ]) arcpy.AddField_management(OutputTable, 'ApprType', 'TEXT') #arcpy.AddField_management(OutputTable,'ApprDeg','Double') Approach = {r.getValue('SiteID'): [] for r in arcpy.SearchCursor(Output)} OID = arcpy.Describe(OutputTable).OIDFieldName for r in arcpy.SearchCursor(OutputTable): k = r.getValue('SiteID') if k in Approach.keys(): Approach[k].append({ 'OID': r.getValue(OID), 'INV': r.getValue('INVENTORY'), 'AADT': common.GetIntVal(r, 'AADT'), 'Lanes': common.GetIntVal(r, 'LNS', 2), 'Urban': r.getValue('URBAN'), 'SurfWid': common.GetFloatVal(r, 'SURF_WTH', 24), 'MedWid': common.GetFloatVal(r, 'MED_WTH') }) for k in Approach.keys(): AADT = [i['AADT'] for i in Approach[k]] INV = [i['INV'] for i in Approach[k]] major_i = AADT.index(max(AADT)) major_inv = INV[major_i] for i, appr in enumerate(Approach[k]): if appr['AADT'] == max(AADT) or appr['INV'] == major_inv: Approach[k][i].update({'ApprType': 'Major'}) else: Approach[k][i].update({'ApprType': 'Minor'}) UC = arcpy.UpdateCursor(OutputTable) for r in UC: k = r.getValue('SiteID') o = r.getValue(OID) Type = '' for appr in Approach[k]: if appr['OID'] == o: Type = appr['ApprType'] r.setValue('ApprType', Type) UC.updateRow(r) UC = arcpy.UpdateCursor(Output) for r in UC: k = r.getValue('SiteID') try: r.setValue( fields_SC.intr.AADT_Major['name'], max([ appr['AADT'] for appr in Approach[k] if appr['ApprType'] == 'Major' ])) except: r.setValue(fields_SC.intr.AADT_Major['name'], 0) try: r.setValue( fields_SC.intr.AADT_Minor['name'], max([ appr['AADT'] for appr in Approach[k] if appr['ApprType'] == 'Minor' ])) except: r.setValue(fields_SC.intr.AADT_Minor['name'], 0) try: W_Major = max([ appr['SurfWid'] + appr['MedWid'] for appr in Approach[k] if appr['ApprType'] == 'Major' ]) except: W_Major = 24 try: W_Minor = max([ appr['SurfWid'] + appr['MedWid'] for appr in Approach[k] if appr['ApprType'] == 'Minor' ]) except: W_Minor = 24 ABuffer = max(1.2 * (W_Major**2 + W_Minor**2)**0.5, 50) r.setValue(fields_SC.crash.ABuffer['name'], ABuffer) r.setValue(fields_SC.crash.BBuffer['name'], max(ABuffer, 250)) AADT = [i['AADT'] for i in Approach[k]] major_i = AADT.index(max(AADT)) LaneMajor = [i['Lanes'] for i in Approach[k]][0] UC.updateRow(r) arcpy.Delete_management(Int) arcpy.Delete_management(EventTable) arcpy.Delete_management(SPJ) arcpy.Delete_management(IRIS_Diss)
fmid_name.name = 'NHD_ID' fmid_name.aliasName = 'NHD_ID' fmid.outputField = fmid_name fmstrahler.mergeRule = 'Max' fms.addFieldMap(fmcon) fms.addFieldMap(fmid) fms.addFieldMap(fmstrahler) join = "%s" % (basename) arcpy.SpatialJoin_analysis(csilakes, "drainpts", join, '', '', fms) # Assign Headwater Lakes a value of zero in the Strahler field. hwfield = "Strahler" cursor = arcpy.UpdateCursor(join, """"Connection" = 'Headwater'""") for row in cursor: # Change to zero row.setValue(hwfield, 0) cursor.updateRow(row) del row del cursor # Assign Isolated Lakes a value of -3 in the Strahler field. seepfield = "Strahler" cursor = arcpy.UpdateCursor(join, """"Connection" = 'Isolated'""") for row in cursor: # Change to neg 3 row.setValue(seepfield, -3) cursor.updateRow(row)