def reportExtensionForQCGDB(singlePartPointErrors, singlePartLineErrors): # Get a count for the singlepart features (if they exist) # and append the count data to the end of the errorReportCSV. if Exists(singlePartPointErrors) and Exists(singlePartLineErrors): singlePartPointFeaturesName = returnFeatureClass(singlePartPointErrors) singlePartPointErrorsResult = GetCount_management( singlePartPointErrors) singlePartPointErrorsCount = int( singlePartPointErrorsResult.getOutput(0)) singlePartLineFeaturesName = returnFeatureClass(singlePartLineErrors) singlePartLineErrorsResult = GetCount_management(singlePartLineErrors) singlePartLineErrorsCount = int( singlePartLineErrorsResult.getOutput(0)) try: with open(errorReportCSV, 'a') as fHandle: fHandle.write(singlePartPointFeaturesName + ', ' + str(singlePartPointErrorsCount) + '\n') fHandle.write(singlePartLineFeaturesName + ', ' + str(singlePartLineErrorsCount) + '\n') except: print("There was an error writing to the file.") else: print("The Single Part output was not found.") print( "Will not add the Single Part information to the errors report csv." )
def routesSourceCreation(): env.workspace = returnGDBOrSDEPath(routesSourceCenterlines) env.overwriteOutput = 1 # Checking to see if the output already exists. # If so, remove it. if Exists(routesSourceCenterlines): Delete_management(routesSourceCenterlines) else: pass # Create a new file for the output. print("Making a copy of " + returnFeatureClass(inputCenterlines) + " called " + returnFeatureClass(routesSourceCenterlines) + ".") CopyFeatures_management(inputCenterlines, routesSourceCenterlines) print("Adding fields to " + returnFeatureClass(routesSourceCenterlines) + ".") #Addfields: # SourceRouteId (Text, 50) AddField_management(routesSourceCenterlines, "SourceRouteId", "TEXT", "", "", 50, "SourceRouteId", nullable) # SourceFromMeasure (Double) AddField_management(routesSourceCenterlines, "SourceFromMeasure", "DOUBLE", "", "", "", "SourceFromMeasure", nullable) # SourceToMeasure (Double) AddField_management(routesSourceCenterlines, "SourceToMeasure", "DOUBLE", "", "", "", "SourceToMeasure", nullable) if useNewFieldLogic == True: KDOTKeyCalculation_NewFieldLogic() else: TranscendFieldCalculation() TranscendRampReplacement() if useNewFieldLogic == True: KDOTKeyCalculation_NewFieldLogic() else: TranscendFieldCalculation() LocalRouteReduction()
def moveLocalErrorsToSQL(prefixKeyName): print("Moving errors from the Local gdb to SQL for the prefix key name of: " + str(prefixKeyName) + ".") for errorItemFC in prefixesToMoveErrorsFor: errorItemFCBaseName = returnFeatureClass(errorItemFC) sqlPath = returnGDBOrSDEPath(dailyProcessSDESourceCenterlinesLocation) errorItemCopyName = prefixKeyName + '_' + errorItemFCBaseName errorItemSQLLocation = os.path.join(sqlPath, errorItemCopyName) lowerstrErrorItemFCBaseName = str(errorItemFCBaseName).lower() if lowerstrErrorItemFCBaseName.find('selfintclassification') >= 0: # Already includes the prefix name in the basename, so just use the full BaseName for the class. errorItemSQLLocation = os.path.join(sqlPath, errorItemFCBaseName) else: pass if Exists(errorItemSQLLocation): try: Delete_management(errorItemSQLLocation) except: print("Could not delete the FC at: " + str(errorItemSQLLocation) + ".") print("Please make sure that the FC does not have any locks on it and try again.") try: CopyFeatures_management(errorItemFC, errorItemSQLLocation) except: print("Could not copy from the FC at: " + str(errorItemFC)) print("to the FC at: " + str(errorItemSQLLocation) + ".") print("Please make sure that the FC to copy from exists") print("and that the FC to copy to is not locked.")
def reportExtensionForRAndHCheck(featuresToCheck): if Exists(featuresToCheck): featuresName = returnFeatureClass(featuresToCheck) errorsFromRAndH = 'RAndHErrorsAsFeatureLayer' MakeFeatureLayer_management(featuresToCheck, errorsFromRAndH) errorsFromRAndHResult = GetCount_management(errorsFromRAndH) errorsFromRAndHCount = int(errorsFromRAndHResult.getOutput(0)) print("Roads & Highways Non-Monotonic Check output was found.") print( "Extending the errors report with information from the Roads & Highways Non-Monotonicity Check." ) with open(errorReportCSV, 'a') as fHandle: fHandle.write('\n' + 'Roads & Highways checks follow: ' + '\n') fHandle.write(featuresName + ', ' + str(errorsFromRAndHCount) + '\n') #errorsRHGDB = returnGDBOrSDEName(featuresToCheck) #errorsFeatureClass = returnFeatureClass(featuresToCheck) #previousWorkspace = env.workspace #env.workspace = errorsRHGDB #time.sleep(25) #print("Also adding ReviewUser and ReviewInfo text fields to the") #print("Roads & Highways Non-Monotonicity Check error output feature class.") #AddField_management(errorsFeatureClass, "OptionalInfo", "TEXT", "", "", 250, "ReviewingInfo", nullable) #env.workspace = previousWorkspace else: print("No Roads & Highways Non-Monotonic Check output found.") print("Will not add additional information to the errors report csv.")
def KDOTKeyCalculation_Modified(): # Until the KDOT process is included here, # this defaults to the Transcend process. #FieldPopulation with selections and FieldCalculate: MakeFeatureLayer_management(routesSourceCenterlines, featureLayer) tempDesc = Describe(featureLayer) print("Calculating values for new LRS and measure fields in " + returnFeatureClass(tempDesc.catalogPath) + ".") try: del tempDesc except: pass # Select LRS_ROUTE_PREFIX IN ('I', 'U', 'K') selectionQuery = """ "LRS_ROUTE_PREFIX" IN ('I', 'U', 'K') """ SelectLayerByAttribute_management(featureLayer, "NEW_SELECTION", selectionQuery) # SourceRouteId = StateKey1 CalculateField_management(featureLayer, "SourceRouteId", "!StateKey1!", "PYTHON_9.3") # SourceFromMeasure = STATE_BEGIN_MP CalculateField_management(featureLayer, "SourceFromMeasure", "!STATE_BEGIN_MP!", "PYTHON_9.3") # SourceToMeasure = STATE_END_MP CalculateField_management(featureLayer, "SourceToMeasure", "!STATE_END_MP!", "PYTHON_9.3") # Select LRS_ROUTE_PREFIX NOT IN ('I', 'U', 'K') AND LRSKEY IS NOT NULL selectionQuery = """ "LRS_ROUTE_PREFIX" NOT IN ('I', 'U', 'K') AND "LRSKEY" IS NOT NULL """ SelectLayerByAttribute_management(featureLayer, "NEW_SELECTION", selectionQuery) # SourceRouteId = LRSKEY CalculateField_management(featureLayer, "SourceRouteId", "!LRSKEY!", "PYTHON_9.3") # SourceFromMeasure = NON_STATE_BEGIN_MP CalculateField_management(featureLayer, "SourceFromMeasure", "!NON_STATE_BEGIN_MP!", "PYTHON_9.3") # SourceToMeasure = NON_STATE_END_MP CalculateField_management(featureLayer, "SourceToMeasure", "!NON_STATE_END_MP!", "PYTHON_9.3") # Select LRS_ROUTE_PREFIX IN ('C') AND LRSKEY NOT LIKE '%W0' selectionQuery = """ "LRS_ROUTE_PREFIX" IN ('C') AND "LRSKEY" NOT LIKE '%W0' """ SelectLayerByAttribute_management(featureLayer, "NEW_SELECTION", selectionQuery) # SourceRouteID = left([LRSKEY], 11) & "W0" # This is the VB version. # Python version would be calcExpression1 = "!LRSKEY![0:11] + 'W0'" calcExpression1 = 'Left([LRSKEY] ,11 ) & "W0"' CalculateField_management(featureLayer, "SourceRouteID", calcExpression1, "VB")
def localRouteNumbering(): tempDesc = Describe(routeFeaturesFC) print("Calculating LRS Key sub-parts for features in " + returnFeatureClass(tempDesc.catalogPath) + ".") OIDFieldName = tempDesc.OIDFieldName try: del tempDesc except: pass # ReAdd the fields that we're interested in, in the correct order. # sortPointDist will be appended later, making it [-1] and 'LRS_UNIQUE_IDENT1' will be [-2]. fieldsToUse = [ OIDFieldName, 'SHAPE@', 'LABEL', 'LRS_ROUTE_PREFIX', 'LRS_ROUTE_NUM', 'LRS_UNIQUE_IDENT', 'LRS_UNIQUE_IDENT1' ] currentFields = fieldsToUse shapeTokenPosition = 1 uniqueCountyCodeDict = dict() countyCodeFieldsList = ['KDOT_COUNTY_L', 'KDOT_COUNTY_R'] newCursor = daSearchCursor(routeFeaturesFC, countyCodeFieldsList) for cursorRow in newCursor: uniqueCountyCodeDict[str(cursorRow[0])] = 1 uniqueCountyCodeDict[str(cursorRow[1])] = 1 try: del newCursor except: pass uniqueCountyCodeList = list() for uniqueCountyCode in uniqueCountyCodeDict.keys(): uniqueCountyCodeList.append(uniqueCountyCode) try: del uniqueCountyCodeDict except: pass try: uniqueCountyCodeList.remove('None') except: pass sortedUniqueCountyCodes = sorted( uniqueCountyCodeList ) # No need to specify a key since it's one column. for uniqueCountyCodeItem in sortedUniqueCountyCodes: print('Selecting features based on countyCode: ' + str(uniqueCountyCodeItem) + '.') routeFeaturesList = list() uniqueLabelDict = dict() # This should include a check for LRS_PREFIX = 'L' when the KDOT_LRS_KEY IS NULL, instead of taking everything that has a NULL # KDOT LRS_KEY. Need another parenthesis to group the condition inside the current parenthesis. selectionQuery1 = """ KDOT_COUNTY_L = '""" + str( uniqueCountyCodeItem ) + """' AND ((KDOT_LRS_KEY IS NULL AND LRS_PREFIX = 'L') OR KDOT_LRS_KEY LIKE '%L%') """ selectionQuery2 = """ KDOT_COUNTY_L IS NULL AND KDOT_COUNTY_R = '""" + str( uniqueCountyCodeItem ) + """' AND ((KDOT_LRS_KEY IS NULL AND LRS_PREFIX = 'L') OR KDOT_LRS_KEY LIKE '%L%') """ labelField = ['LABEL'] newCursor = daSearchCursor(routeFeaturesFC, labelField, selectionQuery1) for cursorRow in newCursor: uniqueLabelDict[str(cursorRow[0])] = 1 try: del newCursor except: pass newCursor = daSearchCursor(routeFeaturesFC, labelField, selectionQuery2) for cursorRow in newCursor: uniqueLabelDict[str(cursorRow[0])] = 1 try: del newCursor except: pass countyLocalNumber = 0 # Narrow the features that are looked at further. ### Change this to just give you the features instead of ### ### cursoring them back out. ### ### Figure out a way to create dicts/lists that store the features ### in the way that you want them instead of having to run another ### separate pair of selects after this. for uniqueLabelKey in uniqueLabelDict.keys(): if uniqueLabelKey is not 'None': # Labels with single quotes cause problems in selections. if str.find(uniqueLabelKey, "'") > -1: # So, escape them by replacing individual single quotes with double single quotes. uniqueLabelKey = str.replace(uniqueLabelKey, "'", "''") else: pass print("Using the LABEL field value of: " + str(uniqueLabelKey) + ".") countyLocalNumber += 1 selectionQuery3 = selectionQuery1 + """ AND LABEL = '""" + str( uniqueLabelKey) + """' """ selectionQuery4 = selectionQuery2 + """ AND LABEL = '""" + str( uniqueLabelKey) + """' """ labeledRouteFeaturesList = list() firstCounter = 0 newCursor = daSearchCursor(routeFeaturesFC, currentFields, selectionQuery3) for cursorRow in newCursor: firstCounter += 1 labeledRouteFeaturesList.append(list(cursorRow)) try: del newCursor except: pass print("FirstCounter found : " + str(firstCounter) + " segments.") secondCounter = 0 newCursor = daSearchCursor(routeFeaturesFC, currentFields, selectionQuery4) for cursorRow in newCursor: secondCounter += 1 labeledRouteFeaturesList.append(list(cursorRow)) try: del newCursor except: pass print("SecondCounter found : " + str(secondCounter) + " segments.") sortedLabeledRouteFeaturesList = addDistanceAndSort( labeledRouteFeaturesList, shapeTokenPosition) del labeledRouteFeaturesList labelUniqueNumber = 0 previousFeatureGeom = None outputFeaturesDict = dict() for sortedLabeledRouteFeatureItem in sortedLabeledRouteFeaturesList: if previousFeatureGeom == None: # This is the first feature of this label. # Don't need to check for incrementing the labelUniqueNumber. # Just assign the current countyLocalNumber to this feature. # Then, set the previousFeatureGeom to this feature's shape. previousFeatureGeom = sortedLabeledRouteFeatureItem[ shapeTokenPosition] else: # Check to see if this feature's firstpoint or lastpoint are # a match for the previous feature's firstpoint or lastpoint. thisFeatureGeom = sortedLabeledRouteFeatureItem[ shapeTokenPosition] ## This part needs work because it always fails. :(. ## Create a function to check the arrays for relevant matching values instead. ## And let it "match" when there are points that are within the feature tolerance ## of one another. The non-matching is most likely a problem with floating point ## math and equalities. -- See videolog lookup C# for ideas, if needed. # Change this to look at math.abs(firstPoint.X - other.firstPoint.X) < 2*Epsilon, # and math.abs(firstPoint.Y - other.firstPoint.Y) < 2*Epsilon # Since each Point is a Python object and they won't have the same # identity in Python when it performs a comparison on them. # The only time that this will work correctly is when you have # two variable names referencing the same object # (in the same memory location). if polylineStartEndPointsMatch( thisFeatureGeom, previousFeatureGeom, currentXYTolerance) == True: # The feature is contiguous without a gap. The labelUniqueNumber doesn't need to be incremented. # Assign the county code as it's routeNumber and the labelUniqueNumber as its unique ID. pass else: # If not, increment the labelUniqueNumber by 1 # prior to assignment on this feature. labelUniqueNumber += 1 # If greater than 99, then you have to split it so that part of it goes into LRS_UNIQUE_IDENT # and part of it goes into LRS_UNIQUE_IDENT1. if labelUniqueNumber > 99: onesAndTens = labelUniqueNumber % 100 hundredsAndThousands = labelUniqueNumber / 100 pass else: onesAndTens = labelUniqueNumber hundredsAndThousands = 0 sortedLabeledRouteFeatureItem[-2] = str( onesAndTens).zfill(2) ## 2 chars sortedLabeledRouteFeatureItem[-3] = str( hundredsAndThousands).zfill( 1 ) ## 2 chars # Should only be 1char, but /shrug sortedLabeledRouteFeatureItem[-4] = str( countyLocalNumber).zfill(5) ## 5 chars # Then, set the previousFeatureGeom to this feature's shape. previousFeatureGeom = sortedLabeledRouteFeatureItem[ shapeTokenPosition] print( "Adding a feature to the outputFeaturesDict with a countyLocalNumber of: " + str(countyLocalNumber) + " and a labelUniqueNumber of: " + str(labelUniqueNumber) + ".") outputFeaturesDict[sortedLabeledRouteFeatureItem[ 0]] = sortedLabeledRouteFeatureItem[:-1] newCursor = daUpdateCursor(routeFeaturesFC, currentFields, selectionQuery3) for cursorRow in newCursor: if cursorRow[0] in outputFeaturesDict.keys(): newCursor.updateRow(outputFeaturesDict[cursorRow[0]]) try: del newCursor except: pass newCursor = daUpdateCursor(routeFeaturesFC, currentFields, selectionQuery4) for cursorRow in newCursor: if cursorRow[0] in outputFeaturesDict.keys(): newCursor.updateRow(outputFeaturesDict[cursorRow[0]]) try: del newCursor except: pass try: del sortedLabeledRouteFeaturesList except: pass else: pass #Cleanup try: del sortedLabeledRouteFeaturesList except: pass try: del sortedLabeledRouteFeaturesList except: pass
def main(): tempDesc = Describe(fcToCalculateTargetKeysIn) print("Parsing the LRS values in " + returnFeatureClass(tempDesc.catalogPath) + " to figure out what the LRS_ROUTE_PREFIX should be.") currentFieldObjects = tempDesc.fields try: del tempDesc except: pass currentFieldNames = [x.name for x in currentFieldObjects] if targetRouteNumChanged not in currentFieldNames: AddField_management(fcToCalculateTargetKeysIn, targetRouteNumChanged, "TEXT", "", "", 10, targetRouteNumChanged, nullable) print("The " + str(targetRouteNumChanged) + " field was added to the " + str(fcToCalculateTargetKeysIn) + " layer.") else: print("The " + str(targetRouteNumChanged) + " field will not be added to the " + str(fcToCalculateTargetKeysIn) + " layer, because it already exists.") if targetCountyLRSKey not in currentFieldNames: AddField_management(fcToCalculateTargetKeysIn, targetCountyLRSKey, "TEXT", "", "", 13, targetCountyLRSKey, nullable) print("The " + str(targetCountyLRSKey) + " field was added to the " + str(fcToCalculateTargetKeysIn) + " layer.") else: print("The " + str(targetCountyLRSKey) + " field will not be added to the " + str(fcToCalculateTargetKeysIn) + " layer, because it already exists.") if targetStateLRSKey not in currentFieldNames: AddField_management(fcToCalculateTargetKeysIn, targetStateLRSKey, "TEXT", "", "", 9, targetStateLRSKey, nullable) print("The " + str(targetStateLRSKey) + " field was added to the " + str(fcToCalculateTargetKeysIn) + " layer.") else: print("The " + str(targetStateLRSKey) + " field will not be added to the " + str(fcToCalculateTargetKeysIn) + " layer, because it already exists.") MakeFeatureLayer_management(fcToCalculateTargetKeysIn, targetKeyCalculationLayer) routePrefixTarget = "ROUTE_PREFIX_TARGET" lrsRouteNumSource = "LRS_ROUTE_NUM" lrsRouteNumTarget = "LRS_ROUTE_NUM_TARGET" lrsUniqueSource = "LRS_UNIQUE_IDENT" ##lrsUniqueSource2 = "LRS_UNIQUE_IDENT1" lrsUniqueTarget = "LRS_UNIQUE_TARGET" # County source to county target selectionQuery = """ SourceRouteId LIKE '___I%' OR LRS_ROUTE_PREFIX = 'I' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'1'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___U%' OR LRS_ROUTE_PREFIX = 'U' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'2'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___K%' OR LRS_ROUTE_PREFIX = 'K' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'3'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___X%' OR LRS_ROUTE_PREFIX = 'X' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'4'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___R%' OR LRS_ROUTE_PREFIX = 'R' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'5'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___M%' OR LRS_ROUTE_PREFIX = 'M' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'5'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___C%' OR LRS_ROUTE_PREFIX = 'C' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'5'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___L%' OR LRS_ROUTE_PREFIX = 'L' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'6'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___P%' OR LRS_ROUTE_PREFIX = 'P' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'7'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___A%' OR LRS_ROUTE_PREFIX = 'A' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'8'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___O%' OR LRS_ROUTE_PREFIX = 'O' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'8'", "PYTHON_9.3") selectionQuery = """ SourceRouteId LIKE '___Q%' OR LRS_ROUTE_PREFIX = 'Q' """ SelectLayerByAttribute_management(targetKeyCalculationLayer, "NEW_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, routePrefixTarget, "'8'", "PYTHON_9.3") # For every Prefix: SelectLayerByAttribute_management(targetKeyCalculationLayer, "CLEAR_SELECTION", selectionQuery) SelectLayerByAttribute_management(targetKeyCalculationLayer, "SWITCH_SELECTION", selectionQuery) CalculateField_management(targetKeyCalculationLayer, lrsRouteNumTarget, "!" + str(lrsRouteNumSource) + "!", "PYTHON_9.3") # Using the 2 characters for unique id. Local routes with more than 2 characters in unique id are probably errors. CalculateField_management(targetKeyCalculationLayer, lrsUniqueSource, "!" + str(uniqueIdTarget) + "!", "PYTHON_9.3") # Since the above calculation might not be accurate for ramps, use this one instead. calculateRampUniqueIdValues() # Function that looks at the M and C routes and renumbers them if there are conflicts between # an M route and an R route, or between a C route and an R/M route. duplicateCheckForMAndC() # Calculate the full target route keys from their pieces. concatFullTargetKeys()
def routeCreation(): env.workspace = returnGDBOrSDEPath(createRoutesOutputFC) env.overwriteOutput = 1 ''' # Need to match what Transcend used. -- Done. routeId = 'SourceRouteId' measureSource = 'TWO_FIELDS' fMeasureField = 'SourceFromMeasure' tMeasureField = 'SourceToMeasure' coordinatePriority = 'UPPER_LEFT' measureFactor = 1 measureOffset = 0 ignoreGaps = True buildIndex = True ''' # Modifications for County Target Network. routeId = 'TargetCountyLRSKey' measureSource = 'Shapelength' #fMeasureField = 'SourceFromMeasure' #tMeasureField = 'SourceToMeasure' coordinatePriority = 'LOWER_LEFT' measureFactor = 1 measureOffset = 0 ignoreGaps = True buildIndex = True routesOutputGDB = returnGDBOrSDEPath(createRoutesOutputFC) routesOutputGDBName = returnGDBOrSDEName(routesOutputGDB) # Need to implement a new path function to get the GDB's folder. routesOutputGDBFolder = mainFolder if Exists(routesOutputGDB): Delete_management(routesOutputGDB) else: pass CreateFileGDB_management(routesOutputGDBFolder, routesOutputGDBName) # Checking to see if the copy for routes output exists. # If so, remove it. #if Exists(createRoutesOutputFC): # Delete_management(createRoutesOutputFC) #else: # pass print("Creating the lrs routes.") # CreateRoutes_lr GP Tool CreateRoutes_lr(createRoutesInputFC, routeId, createRoutesOutputFC, measureSource, fMeasureField, tMeasureField, coordinatePriority, measureFactor, measureOffset, ignoreGaps, buildIndex) print("Adding date fields to " + returnFeatureClass(createRoutesOutputFC) + ".") #Addfields: AddField_management(createRoutesOutputFC, "F_Date", "DATE", "", "", "", "F_Date", nullable) pyDateExpression = '''def pyFindTheDate(): import time return time.strftime("%Y/%m/%d")''' CalculateField_management(createRoutesOutputFC, "F_Date", "pyFindTheDate()", "PYTHON_9.3", pyDateExpression) # T_Date (Date) AddField_management(createRoutesOutputFC, "T_Date", "DATE", "", "", "", "T_Date", nullable) # ---- Add route calibration point creation steps for Start & End points. ---- MakeFeatureLayer_management(createRoutesInputFC, 'tempFeatureLayer') # Checking to see if the output already exists. # If so, remove it so that it can be recreated. if Exists(startCalibrationPoints): Delete_management(startCalibrationPoints) else: pass if Exists(endCalibrationPoints): Delete_management(endCalibrationPoints) else: pass # Create 2 fieldInfo objects. Turn off all the fields in each one. featureDesc = Describe('tempFeatureLayer') if featureDesc.dataType == "FeatureLayer": fieldInfo_For_Start_CP_Fields = featureDesc.fieldInfo fieldInfo_For_End_CP_Fields = featureDesc.fieldInfo # Use the count property to iterate through all the fields for index in range(0, fieldInfo_For_Start_CP_Fields.count): fieldInfo_For_Start_CP_Fields.setVisible(index, 'HIDDEN') fieldInfo_For_End_CP_Fields.setVisible(index, 'HIDDEN') # Turn on the needed fields. visibile_Fields_For_Start_CP_Layer = [routeId, 'SourceFromMeasure'] for visibile_Field in visibile_Fields_For_Start_CP_Layer: tempIndex = fieldInfo_For_Start_CP_Fields.findFieldByName( visibile_Field) fieldInfo_For_Start_CP_Fields.setVisible(tempIndex, 'VISIBLE') # Create a feature layer that only shows the needed fields. MakeFeatureLayer_management(createRoutesInputFC, featureLayerCL_For_Start_CP, "", "", fieldInfo_For_Start_CP_Fields) # Use that feature layer to create the 1st calibration point set. FeatureVerticesToPoints_management(featureLayerCL_For_Start_CP, startCalibrationPoints, "START") # Turn on the needed fields. visibile_Fields_For_End_CP_Layer = [routeId, 'SourceToMeasure'] for visibile_Field in visibile_Fields_For_End_CP_Layer: tempIndex = fieldInfo_For_End_CP_Fields.findFieldByName(visibile_Field) fieldInfo_For_End_CP_Fields.setVisible(tempIndex, 'VISIBLE') # Create a feature layer that only shows the needed fields. MakeFeatureLayer_management(createRoutesInputFC, featureLayerCL_For_End_CP, "", "", fieldInfo_For_End_CP_Fields) # Use that feature layer to create the 2nd calibration point set. FeatureVerticesToPoints_management(featureLayerCL_For_End_CP, endCalibrationPoints, "END") # ---- Merge the Start & End calibration points. ---- # Checking to see if the output already exists. # If so, remove it so that it can be recreated. if Exists(mergedCalibrationPoints): Delete_management(mergedCalibrationPoints) else: pass # RoutesSource_Start_CP.SourceRouteId to CalPts_Merge.RouteId # RoutesSource_End_CP.SourceRouteId to CalPts_Merge.RouteId mcp_Field1 = FieldMap() mcp_Field1.addInputField(startCalibrationPoints, routeId) mcp_Field1.addInputField(endCalibrationPoints, routeId) mcp_Field1_OutField = mcp_Field1.outputField mcp_Field1_OutField.name = 'RouteId' mcp_Field1_OutField.aliasName = 'RouteId' mcp_Field1_OutField.type = 'String' mcp_Field1_OutField.length = 50 mcp_Field1.outputField = mcp_Field1_OutField # RoutesSource_Start_CP.SourceFromMeasure to CalPts_Merge.Measure mcp_Field2 = FieldMap() mcp_Field2.addInputField(startCalibrationPoints, 'SourceFromMeasure') mcp_Field2.addInputField(endCalibrationPoints, 'SourceToMeasure') mcp_Field2_OutField = mcp_Field2.outputField mcp_Field2_OutField.name = 'Measure' mcp_Field2_OutField.aliasName = 'Measure' mcp_Field2_OutField.type = 'Double' mcp_Field2.outputField = mcp_Field2_OutField # Create a fieldMappings object for the layer merge. calibrationPointsMappings = FieldMappings() calibrationPointsMappings.addFieldMap(mcp_Field1) calibrationPointsMappings.addFieldMap(mcp_Field2) #Merge the points together into a single feature class. inputMergeLayers = [startCalibrationPoints, endCalibrationPoints] Merge_management(inputMergeLayers, mergedCalibrationPoints, calibrationPointsMappings) MakeFeatureLayer_management(mergedCalibrationPoints, 'tempMergedPoints') dissolveFields = ["RouteId", "Measure"] print('Dissolving points.') Dissolve_management('tempMergedPoints', dissolvedCalibrationPoints, dissolveFields, "#", "SINGLE_PART")
def TranscendRampReplacement(): MakeFeatureLayer_management (routesSourceCenterlines, routesSourceFeatureLayer) SelectLayerByAttribute_management(routesSourceFeatureLayer, "CLEAR_SELECTION") selectionQuery = """ "LRS_ROUTE_PREFIX" = 'X' AND "Ramps_LRSKey" IS NOT NULL AND "Ramps_LRSKey" <> '' """ SelectLayerByAttribute_management(routesSourceFeatureLayer, "NEW_SELECTION", selectionQuery) countResult = GetCount_management(routesSourceFeatureLayer) intCount = int(countResult.getOutput(0)) print('Selected ' + str(intCount) + ' ramp features to be replaced.') if intCount > 0: print("Deleting those ramp features from the " + returnFeatureClass(routesSourceCenterlines) + " layer.") DeleteFeatures_management(routesSourceFeatureLayer) else: print("No features selected. Skipping feature deletion.") # Remove the matching routes to prepare for the Interchange_Ramps information. ## After error matching is achieved, use replace geometry and replace attributes to not lose data ## from using the less effective method of: ## deleting the old Interchange_Ramps information, then re-adding with append. # Add the Interchange_Ramps information. # Checking to see if the copy for repairing already exists. # If so, remove it. if Exists(interchangeRampFCRepairCopy): Delete_management(interchangeRampFCRepairCopy) else: pass # Create a new file for the copy for repairing since repair modifies the input. CopyFeatures_management(interchangeRampFC, interchangeRampFCRepairCopy) # Repairs the geometry, modifies input. # Deletes features with null geometry (2 expected, until Shared.Interchange_Ramp is fixed). print("Repairing ramp geometry in the " + returnFeatureClass(interchangeRampFCRepairCopy) + " layer.") RepairGeometry_management(interchangeRampFCRepairCopy, "DELETE_NULL") # Create a fieldmapping object so that the Interchange_Ramps can be correctly imported with append. appendInputs = [interchangeRampFCRepairCopy] appendTarget = routesSourceCenterlines schemaType = "NO_TEST" # Field mapping goes here. # Interchange_Ramp.LRS_KEY to RoutesSource_Test.LRSKEY fm_Field1 = FieldMap() fm_Field1.addInputField(interchangeRampFCRepairCopy, "LRS_KEY") fm_Field1_OutField = fm_Field1.outputField fm_Field1_OutField.name = 'LRSKEY' fm_Field1.outputField = fm_Field1_OutField # Interchange_Ramp.BEG_CNTY_LOGMILE to RoutesSource_Test.NON_STATE_BEGIN_MP fm_Field2 = FieldMap() fm_Field2.addInputField(interchangeRampFCRepairCopy, "BEG_CNTY_LOGMILE") fm_Field2_OutField = fm_Field2.outputField fm_Field2_OutField.name = 'NON_STATE_BEGIN_MP' fm_Field2.outputField = fm_Field2_OutField # Interchange_Ramp.END_CNTY_LOGMILE to RoutesSource_Test.NON_STATE_END_MP fm_Field3 = FieldMap() fm_Field3.addInputField(interchangeRampFCRepairCopy, "END_CNTY_LOGMILE") fm_Field3_OutField = fm_Field3.outputField fm_Field3_OutField.name = 'NON_STATE_END_MP' fm_Field3.outputField = fm_Field3_OutField # Create the fieldMappings object interchangeRampsMappings = FieldMappings() interchangeRampsMappings.addFieldMap(fm_Field1) interchangeRampsMappings.addFieldMap(fm_Field2) interchangeRampsMappings.addFieldMap(fm_Field3) # Add the fieldMap objects to the fieldMappings object. print("Appending the features from " + returnFeatureClass(interchangeRampFCRepairCopy) + " into " + returnFeatureClass(routesSourceCenterlines) + ".") Append_management(appendInputs, appendTarget, schemaType, interchangeRampsMappings)
checkRoutes, dissolvedCalibrationPoints as checkPointsPath, gdbBaseName) from pathFunctions import (returnGDBOrSDEPath, returnFeatureClass, returnGDBOrSDEName) #---Testing---# #---Testing---# rbjDiffTextLocation1 = reviewerBatchJobOutput[:-4] + '_DiffText.txt' testingRbjFile = r'C:\GIS\Geodatabases\KHUB\SourceChecks_09C_Prime.rbj' rbjDiffTextLocation2 = testingRbjFile[:-4] + '_DiffText.txt' #---Testing---# #---Testing---# RoutesMatch = r'RoutesTemplateForReplacement' RoutesUpdate = returnFeatureClass(checkRoutes) #RoutesUpdate = r'RoutesTemplateForReplacement' CalPtsMatch = r'CalPtsTemplateForReplacement' CalPtsUpdate = returnFeatureClass(checkPointsPath) #CalPtsUpdate = r'CalPtsTemplateForReplacement' GeodatabaseMatch = r'GeodatabaseTemplateForReplacement' GeodatabaseUpdate = workspaceToReview BatchJobNameTagMatch = r'BatchJobName' BatchJobNameTextUpdate = reviewerBatchJobOutput BrowseNameTagMatch = r'BrowseName' BrowseNameTextUpdate = gdbBaseName def main(): #---Testing---#
def changedFeaturesImport(sourceFeatures, targetFeatures): # 1.) Make an in_memory copy of the centerlines. #### A field map would have to be created for each dissolved feature layer, so it's not really worth it. # 2.) Get a list of all of the unique Keys # 3.) Loop through the list of unique Keys # 4.) For each Key, select all the features with that Key. # 5.) Count selected features. # 6.) Make a new layer or dissolved layer from this selection. # 7.) Count the number of dissolved features. # 8a.) If the number of dissolved features is 0, then append the error to the error file # and go on to the next Key in the loop. # 8b.) Else, spatially select features in the original feature class with 'SHARE_A_LINE_SEGMENT_WITH'. # 9.) From the spatial select, reselect features that have the same Key. # 10.) Count to make sure that at least one feature is selected. # 11.) If so, delete that feature. # 12.) Cursor the features out of the dissolve layer. # 13.) Insert the features from the dissolve layer into the in_memory copy of the centerlines. # 14.) When the loop is complete, save the in_memory copy of the centerlines # to a gdb on disk. # Won't work for shapefiles. Hopefully you're not using those though. targetFeaturesCopy = targetFeatures + '_Copy' try: del targetFeatureLayer except: pass targetFeatureLayer = returnFeatureClass(targetFeaturesCopy) + '_FL' try: del sourceFeatureLayer except: pass sourceFeatureLayer = returnFeatureClass(sourceFeatures) + '_FL' # Perform cleanup to prevent object creation collisions. layersOrFCsToRemove = [targetFeaturesCopy, targetFeatureLayer, sourceFeatureLayer] for layerOrFCItem in layersOrFCstoRemove: if Exists(layerOrFCItem): try: Delete_management(layerOrFCItem) except: pass else: pass # 1a.) Make an copy of the simplified and flipped centerlines to modify with dissolves. CopyFeatures_management(targetFeatures, targetFeaturesCopy) # 1b.) Add the ReviewUser and ReviewInfo fields to the copy if they do not already exist. # That way, the information from the edits can flow through to future edit sessions # and error checks. tfcDesc = Describe(targetFeaturesCopy) tfcFields = tfcDesc.fields tfcFieldNames = [x.name for x in tfcFields] # Check for ReviewUser field in the targetFeaturesCopy, then add it if missing. if 'ReviewUser' not in tfcFieldNames: #ReviewUser (String, 50) AddField_management(targetFeaturesCopy, 'ReviewUser', "TEXT", "", "", 50, 'ReviewUser', nullable) else: pass # Check for ReviewInfo field in the targetFeaturesCopy, then add it if missing. if 'ReviewInfo' not in tfcFieldNames: #ReviewInfo (String, 250) AddField_management(targetFeaturesCopy, 'ReviewInfo', "TEXT", "", "", 250, 'ReviewInfo', nullable) else: pass sourceSelectionQuery = ''' "''' + str(uniqueKeyFieldToUse) + '''" IS NOT NULL AND "''' + str(uniqueKeyFieldToUse) + '''" IN (''' sourceSelectionQuery = sourceSelectionQuery[:-2] + ''') ''' MakeFeatureLayer_management(targetFeaturesCopy, targetFeatureLayer) MakeFeatureLayer_management(sourceFeatures, sourceFeatureLayer) # 2.) Get a list of all of the unique Keys in the source. ############ Modify this process to only get a list of Keys that have more than one feature. ############ everything else can be skipped for the purpose of dissolving. uniqueKeyFieldList = [str(uniqueKeyFieldToUse)] newCursor = daSearchCursor(sourceFeatureLayer, uniqueKeyFieldList) uniqueKeysDict = dict() for cursorRow in newCursor: uniqueKeysDict[str(cursorRow[0])] = 1 try: del newCursor except: pass uniqueKeysList = uniqueKeysDict.keys() try: uniqueKeysList.remove('None') except: print("Could not remove 'None' from the list of uniqueKeys since it was not a part of the list.") print("Unique Key list creation successful.") print('Found ' + str(len(uniqueKeysList)) + ' unique Keys in the changed features.') #Use multiSelection instead. multiSelectionQuery = ''' "''' + str(uniqueKeyFieldToUse) + '''" IS NOT NULL AND "''' + str(uniqueKeyFieldToUse) + '''" IN (''' multiCounter = 0 # 3.) Loop through the list of unique Keys for uniqueKeyItem in uniqueKeysList: # 4.) For groups of 2000 Keys, select all the features with those Keys. if multiCounter <= 1999: multiSelectionQuery += """'""" + str(uniqueKeyItem) + """'""" + """, """ multiCounter += 1 else: # Add the current item multiSelectionQuery += """'""" + str(uniqueKeyItem) + """'""" + """, """ # Then, remove the trailing ", " and add a closing parenthesis. multiSelectionQuery = multiSelectionQuery[:-2] + ''') ''' featureReplacement(sourceFeatureLayer, targetFeatureLayer, multiSelectionQuery) multiSelectionQuery = ''' "''' + str(uniqueKeyFieldToUse) + '''" IS NOT NULL AND "''' + str(uniqueKeyFieldToUse) + '''" IN (''' multiCounter = 0 # After the for loop, if there is still anything remaining which was unselected in the # the previous multiSelectionQuery steps. # Remove the trailing ", " and add a closing parenthesis. multiSelectionQuery = multiSelectionQuery[:-2] + """) """ featureReplacement(sourceFeatureLayer, targetFeatureLayer, multiSelectionQuery)
def main(): # Do this by county. # Get a list of all of the available county numbers. # Then create an updateCursor for each county, using # a selection that looks at the LRS_COUNTY_PRE and LRS_PREFIX or existing KDOT_LRS_KEY. tempDesc = Describe(fcWithLocalRoutesToDissolveAndMeasure) print( "Updating the concatenated LRS Key Field and start/end measures for selected features in " + returnFeatureClass(tempDesc.catalogPath) + ".") OIDFieldName = str(tempDesc.OIDFieldName) try: del tempDesc except: pass uniqueCountyCodeDict = dict() countyCodeFieldsList = ['KDOT_COUNTY_L', 'KDOT_COUNTY_R'] newCursor = daSearchCursor(fcWithLocalRoutesToDissolveAndMeasure, countyCodeFieldsList) for cursorRow in newCursor: uniqueCountyCodeDict[str(cursorRow[0])] = 1 uniqueCountyCodeDict[str(cursorRow[1])] = 1 try: del newCursor except: pass uniqueCountyCodeList = list() for uniqueCountyCode in uniqueCountyCodeDict.keys(): uniqueCountyCodeList.append(uniqueCountyCode) try: del uniqueCountyCodeDict except: pass try: uniqueCountyCodeList.remove('None') except: pass sortedUniqueCountyCodes = sorted( uniqueCountyCodeList ) # No need to specify a key since it's one column. for uniqueCountyCodeItem in sortedUniqueCountyCodes: print('Selecting features based on countyCode: ' + str(uniqueCountyCodeItem) + '.') routeFeaturesList = list() listOfFieldsToUse = [ OIDFieldName, 'LRS_COUNTY_PRE', 'LRS_ROUTE_PREFIX', 'LRS_ROUTE_NUM', 'LRS_ROUTE_SUFFIX', 'LRS_UNIQUE_IDENT', 'LRS_UNIQUE_IDENT1', lrsKeyToUse ] # Modified from the original localroutenumbering sql to include an exception for '%W%' routes, because I don't think that those # were included in the localroutenumbering, even though they should have been. selectionQuery1 = """ KDOT_COUNTY_L = '""" + str( uniqueCountyCodeItem ) + """' AND (((KDOT_LRS_KEY IS NULL AND LRS_ROUTE_PREFIX = 'L') OR KDOT_LRS_KEY LIKE '%L%') AND NOT KDOT_LRS_KEY LIKE '%W%') """ selectionQuery2 = """ KDOT_COUNTY_L IS NULL AND KDOT_COUNTY_R = '""" + str( uniqueCountyCodeItem ) + """' AND (((KDOT_LRS_KEY IS NULL AND LRS_ROUTE_PREFIX = 'L') OR KDOT_LRS_KEY LIKE '%L%') AND NOT KDOT_LRS_KEY LIKE '%W%') """ newCursor = daUpdateCursor(fcWithLocalRoutesToDissolveAndMeasure, listOfFieldsToUse, selectionQuery1) for cursorRow in newCursor: cursorListItem = list(cursorRow) # change each cursorRow to a list # then pass the list to a function that concats the parts # into the LRS key field. concattedKeyListItem = concatTheLRSParts(cursorListItem, uniqueCountyCodeItem) # Receive the list back and use it to update the # row. newCursor.updateRow(concattedKeyListItem) try: del newCursor except: pass newCursor = daUpdateCursor(fcWithLocalRoutesToDissolveAndMeasure, listOfFieldsToUse, selectionQuery2) for cursorRow in newCursor: cursorListItem = list(cursorRow) # change each cursorRow to a list # then pass the list to a function that concats the parts # into the LRS key field. concattedKeyListItem = concatTheLRSParts(cursorListItem, uniqueCountyCodeItem) # Receive the list back and use it to update the # row. newCursor.updateRow(concattedKeyListItem) try: del newCursor except: pass # At this point, all of the local features in a county should have had # their LRS keys updated. # What we need to do now is to dissolve them based on LRS key. dissolveBasedOnLocalRouteKeys(fcWithLocalRoutesToDissolveAndMeasure, selectionQuery1) dissolveBasedOnLocalRouteKeys(fcWithLocalRoutesToDissolveAndMeasure, selectionQuery2) # At this point, all of the local featuers should be dissolved into # single part lines. # Go ahead and add the measures based on 0-to-shapelength. calculateMeasuresForLocalRoutes(fcWithLocalRoutesToDissolveAndMeasure, selectionQuery1) calculateMeasuresForLocalRoutes(fcWithLocalRoutesToDissolveAndMeasure, selectionQuery2)
def mainProcessFeatureSimplification(inputFeatures, maxCount, outputFeatures): # Split the input features into intermediary features: # Add each intermediary feature class to a list and # pass one feature class of the intermediary features # to each subprocess. # When all of the subprocesses complete, use the # list of the intermediary feature classes to append # the data into the output features. countResult = GetCount_management(inputFeatures) intCount = int(countResult.getOutput(0)) # debug print print("Counted " + str(intCount) + " features in the " + inputFeatures + " feature class.") if maxCount > 15000: maxCount = 15000 elif maxCount < 2000: maxCount = 7000 else: pass neededMirrors = intCount / maxCount + 1 # debug print print("Will create " + str(neededMirrors) + " reflection gdbs.") infoForSubprocess = list() gdbToCreateList = list() for countItem in xrange(0, neededMirrors): gdbMirrorName = mirrorBaseName + '_' + '0' + str(countItem) + '.gdb' gdbMirrorFullPath = os.path.join(mainFolder, gdbMirrorName) gdbToCreateList.append(gdbMirrorFullPath) try: if Exists(gdbMirrorFullPath): try: Delete_management(gdbMirrorFullPath) except: pass else: pass except: pass CreateFileGDB_management(mainFolder, gdbMirrorName) # do a selection on the input features here # then copyfeatures to get the selected features # output to the target gdb. if Exists(simplifyTempLayer): try: Delete_management(simplifyTempLayer) except: pass else: pass MakeFeatureLayer_management(inputFeatures, simplifyTempLayer) currentSelectMin = int(countItem * maxCount) currentSelectMax = int((countItem + 1) * maxCount) dynSelectClause = """"OBJECTID" >= """ + str(currentSelectMin) + """ AND "OBJECTID" < """ + str(currentSelectMax) + """""" SelectLayerByAttribute_management(simplifyTempLayer, "NEW_SELECTION", dynSelectClause) selectedSimplifyFeatures = os.path.join(gdbMirrorFullPath, simplifyInputName) CopyFeatures_management(simplifyTempLayer, selectedSimplifyFeatures) subprocessInfoItem = [mainFolder, gdbMirrorFullPath, simplifyAlgorithm, simplifyDistance] infoForSubprocess.append(subprocessInfoItem) # Predivide the list of data driven pages that each process needs to run # and pass it as a list of exportItems. coreCount = mp.cpu_count() # To support running this on the slow AR60, reduce the coreCount used to try to keep # this script from crashing there. if coreCount >= 3 and useMultithreading == True: coreCount = coreCount - 1 print("Starting a multi-threaded job which will use (up to) " + str(coreCount) + " cores at once.") workPool = mp.Pool(processes=coreCount) # Note: This is a different usage of the word map than the one generally used in GIS. workPool.map(subProcessFeatureSimplification, infoForSubprocess) print("Multi-threaded job's done!") print("Waiting a few moments before closing down the worker processes...") time.sleep(20) workPool.close() time.sleep(20) workPool.join() print("Worker processes closed.") else: # Don't use multithreading here. print("Using the single threaded process for feature simplification.") print("This will be slower than the multi-threaded version,") print("but it should also be less likely to crash on slower machines") print("or those with low core counts.") for singleThreadedProcessInfoListItem in infoForSubprocess: singleThreadedProcessForSlowMachines(singleThreadedProcessInfoListItem) print("Waiting a few moments before continuing to the next part of the script...") time.sleep(20) # Delete the output target prior to recreating it and appending data into it. if Exists(outputFeatures): try: Delete_management(outputFeatures) except: pass else: pass # Need the gdb and fc name here from outputFeatures. outGDB = returnGDBOrSDEPath(outputFeatures) outGDBName = returnGDBOrSDEName(outGDB) outGDBFolder = returnGDBOrSDEFolder(outGDB) outFCName = returnFeatureClass(outputFeatures) if not Exists(outGDB): CreateFileGDB_management(outGDBFolder, outGDBName) # Use the inputFeatures as a template. CreateFeatureclass_management(outGDB, outFCName, "", inputFeatures) appendOutputFCList = list() for gdbToCreate in gdbToCreateList: appendOutputFC = os.path.join(gdbToCreate, 'simplificationOutput') appendOutputFCList.append(appendOutputFC) # Do appends here, then sleep again for a bit. # Shouldn't need a field mapping since they should all be the same. Append_management(appendOutputFCList, outputFeatures, "NO_TEST") print "Waiting a few moments to be sure that all of the locks have been removed prior to deleting the reflection gdbs..." time.sleep(20) # Then remove the mirror gdbs. for gdbToCreate in gdbToCreateList: try: if Exists(gdbToCreate): try: Delete_management(gdbToCreate) except: pass else: pass except: pass