class CIIMSDev(object): srcdb =r'sdedev_ciims.sde' if Exists(r'Database Connections/'+srcdb): Delete_management(DCL+r'\\Database Connections\\'+srcdb) shutil.copy(conns+"/"+srcdb, DCL+"/"+srcdb) srcschema = 'CIIMS' srctbl ='CIIMS_VWCROSSINGGIS3' source = r'Database Connections/'+srcdb +'/'+srcschema+'.'+srctbl Lat = "CROSSINGLATITUDE" Long = "CROSSINGLONGITUDE" loaded = "LOADDATE" dstdb = r'sdedev_ciims.sde' if Exists(r'Database Connections/'+dstdb): pass else: shutil.copy(conns+"/"+dstdb, DCL+"/"+dstdb) dstschema = 'CIIMS' dstfd = 'CIIMS' dstfc = 'Static_Crossings' dst = r'Database Connections/'+dstdb+'/'+dstschema+'.'+dstfd+'/'+dstschema+'.'+dstfc GCS = "GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119521E-09;0.001;0.001;IsHighPrecision" XYFC(source, dst, Lat, Long, GCS, loaded)
def reportExtensionForQCGDB(singlePartPointErrors, singlePartLineErrors): # Get a count for the singlepart features (if they exist) # and append the count data to the end of the errorReportCSV. if Exists(singlePartPointErrors) and Exists(singlePartLineErrors): singlePartPointFeaturesName = returnFeatureClass(singlePartPointErrors) singlePartPointErrorsResult = GetCount_management( singlePartPointErrors) singlePartPointErrorsCount = int( singlePartPointErrorsResult.getOutput(0)) singlePartLineFeaturesName = returnFeatureClass(singlePartLineErrors) singlePartLineErrorsResult = GetCount_management(singlePartLineErrors) singlePartLineErrorsCount = int( singlePartLineErrorsResult.getOutput(0)) try: with open(errorReportCSV, 'a') as fHandle: fHandle.write(singlePartPointFeaturesName + ', ' + str(singlePartPointErrorsCount) + '\n') fHandle.write(singlePartLineFeaturesName + ', ' + str(singlePartLineErrorsCount) + '\n') except: print("There was an error writing to the file.") else: print("The Single Part output was not found.") print( "Will not add the Single Part information to the errors report csv." )
def complete(self): try: for dict_item in self.parameters: for item in self.parameters[dict_item]: if isinstance(item, str): if dict_item == self.param_groups[1]: if not Exists(self.test_config[item]): raise InaccessibleData( "'{0}' does not exist or is inaccessible.". format(item)) elif dict_item == self.param_groups[0]: if not Exists( self.test_config['dashboard_database'] + os.sep + self.test_config[item]): raise InaccessibleData( "'{0}' does not exist or is inaccessible.". format(item)) else: raise ConfigFileIssue( "There is an issue with the config file. ({0})". format(item)) return True except (InaccessibleData, ConfigFileIssue) as e: self.logs.error(e.message) raise Exit()
def lineErrorsExportToQCGDB(): rev_table_main = os.path.join(reviewerSessionGDB, "REVTABLEMAIN") rev_table_line = os.path.join(reviewerSessionGDB, "REVTABLELINE") print("Exporting the data reviewer lines.") if (Exists(multipart_line_errors)): try: Delete_management(multipart_line_errors) except: print("The feature class at: " + str(multipart_line_errors) + " already exists and could not be deleted.") else: pass if (Exists(single_part_line_errors)): try: Delete_management(single_part_line_errors) except: print("The feature class at: " + str(single_part_line_errors) + " already exists and could not be deleted.") else: pass Copy_management(rev_table_line, multipart_line_errors) JoinField_management(multipart_line_errors, rev_join_field1, rev_table_main, rev_join_field2) MultipartToSinglepart_management(multipart_line_errors, single_part_line_errors)
class ACCESSPERMDev(object): print "access permit points: GO "+ str(datetime.datetime.now()) srcdb =r'ATLASPROD.odc' if Exists(r'Database Connections/'+srcdb): Delete_management(DCL+r'\\Database Connections\\'+srcdb) shutil.copy(conns+"/"+srcdb, DCL+"/"+srcdb) srcschema = 'KDOT' srctbl ='KGATE_ACCESSPOINTS_TEST' source = r'Database Connections/'+srcdb +'/'+srcschema+'.'+srctbl print source Lat = "GPS_LATITUDE" Long = "GPS_LONGITUDE" loaded = "LOAD_DATE" dstdb = 'GISTEST.sde' if Exists(r'Database Connections/'+dstdb): print dstdb +" exists" pass else: shutil.copy(conns+"/"+dstdb, DCL+"/"+dstdb) dstschema = 'SHARED' dstfc = 'ACCESS_POINTS' dst = r'Database Connections/'+dstdb+'/'+dstschema+'.'+dstfc print source print dst GCS = "GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119521E-09;0.001;0.001;IsHighPrecision" XYFC(source, dst, Lat, Long, GCS, loaded)
def MergeCatchment(self, inbasin): dstemp = None try: catchments = Config()["catchment"] env.workspace = self._TempLocation resultworkspace = os.path.join(self._WorkspaceDirectory, self.WorkspaceID + '.gdb', "Layers") downstreamcatchment = os.path.join(resultworkspace, catchments["downstream"]) dsDesc = Describe(downstreamcatchment) if not Exists(downstreamcatchment): raise Exception("downstream catchment doesn't exist") sr = dsDesc.spatialReference wrkingbasin = self.ProjectFeature(inbasin, sr) #strip downstream catchment from mask dstemp = Erase_analysis(wrkingbasin, downstreamcatchment, "dstemp") #remove any weird verticies associated with Erase globalbasin = os.path.join(resultworkspace, catchments["global"]) FeatureToPolygon_management(dstemp, globalbasin, cluster_tolerance="50 Meters", attributes="ATTRIBUTES", label_features="") #Delete multipolys created by Featuretopolygon maxOid = max({ key: value for (key, value) in arcpy.da.SearchCursor( globalbasin, ['OID@', 'Shape_Area']) }.iteritems(), key=operator.itemgetter(1))[0] with arcpy.da.UpdateCursor(globalbasin, 'OID@') as cursor: for row in cursor: if row[0] != maxOid: cursor.deleteRow() #https://gis.stackexchange.com/questions/152481/how-to-delete-selected-rows-using-arcpy if not Exists(globalbasin): raise Exception("Failed to create basin " + GetMessages()) return True except: tb = traceback.format_exc() self._sm("Merge basin Error " + tb, "ERROR") return False finally: #cleanup for fs in arcpy.ListFeatureClasses(): arcpy.Delete_management(fs) for rs in arcpy.ListRasters(): arcpy.Delete_management(rs)
def routesSourceCreation(): env.workspace = returnGDBOrSDEPath(routesSourceCenterlines) env.overwriteOutput = 1 # Checking to see if the output already exists. # If so, remove it. if Exists(routesSourceCenterlines): Delete_management(routesSourceCenterlines) else: pass # Create a new file for the output. print("Making a copy of " + returnFeatureClass(inputCenterlines) + " called " + returnFeatureClass(routesSourceCenterlines) + ".") CopyFeatures_management(inputCenterlines, routesSourceCenterlines) print("Adding fields to " + returnFeatureClass(routesSourceCenterlines) + ".") #Addfields: # SourceRouteId (Text, 50) AddField_management(routesSourceCenterlines, "SourceRouteId", "TEXT", "", "", 50, "SourceRouteId", nullable) # SourceFromMeasure (Double) AddField_management(routesSourceCenterlines, "SourceFromMeasure", "DOUBLE", "", "", "", "SourceFromMeasure", nullable) # SourceToMeasure (Double) AddField_management(routesSourceCenterlines, "SourceToMeasure", "DOUBLE", "", "", "", "SourceToMeasure", nullable) if useNewFieldLogic == True: KDOTKeyCalculation_NewFieldLogic() else: TranscendFieldCalculation() TranscendRampReplacement() if useNewFieldLogic == True: KDOTKeyCalculation_NewFieldLogic() else: TranscendFieldCalculation() LocalRouteReduction()
def FileChecker(ShapeFileDate, CheckLayer): #runnext = False SourceFileTxt = str(ShapeFileDate.replace("-", "_")) print ShapeFileDate selectdate = "Sourcefile = '" + SourceFileTxt + "'" print selectdate print CheckLayer if Exists("Check1"): Delete_management("Check1") try: MakeFeatureLayer_management(CheckLayer, "Check1", selectdate) except: MakeTableView_management(CheckLayer, "Check1", selectdate) CheckExistence = GetCount_management("Check1") print CheckExistence if int(str(CheckExistence)) > 0: print "This source file info is already in the target feature" runnext = False elif int(str(CheckExistence)) == 0: print 'the information is new for this source file and will be added.' runnext = True else: print 'something isnt working here' print runnext return runnext
def AnnualizeData(YearToAnnualize): annualLayer = gdb + "\KTRIPS.SDE.Ktrips_Annual" currentyYear = gdb + "\KTRIPS.SDE.Ktrips_CurrentYear" SelectYear = YearSelTest CalcYear = str(int(YearSelTest - 1)) YearSelTest = "TripYear = '" + SelectYear + "'" if Exists("Check1"): Delete_management("Check1") MakeFeatureLayer_management(annualLayer, "Check1", YearSelTest) CheckExistence = GetCount_management("Check1") print CheckExistence if int(str(CheckExistence)) > 0: print "This source file info is already in the target feature" runnext = False elif int(str(CheckExistence)) == 0: print 'the information is new for this source file and will be added.' runnext = True Append_management(currentyYear, annualLayer, "NO_TEST", "#") CalculateField_management(annualLayer, "TripYear", CalcYear, "PYTHON_9.3") TruncateTable_management(currentyYear) else: print 'something isnt working here' print runnext pass
def appender_DWBI_OLD(ShapeFileDate): print "appending the modeled data" env.workspace = repo filename1 = r"DWBI_SEGS" #C:\Workspace\pydot\sde_connections_10.3\sde@KTRIPS_sqlgiprod.sde\KTRIPS.SDE.KTRIPS_ROUTES enterprisedbRoutes = gdb + r"\KTRIPS.SDE.KTRIPS_ROUTE_Segments" print enterprisedbRoutes #Append_management(filename1, enterprisedbRoutes, "NO_TEST", "#") if Exists(filename1): MakeTableView_management(filename1, "AppendCheck", "#", "#", "#") AddJoin_management("AppendCheck", "PRMT_ID", enterprisedbRoutes, "PRMT_ID", join_type="KEEP_COMMON") recordsTest = str(GetCount_management("AppendCheck")) RemoveJoin_management("AppendCheck") if recordsTest == '0': print recordsTest + " of these records exist, appending now" Append_management(filename1, enterprisedbRoutes, "NO_TEST", "#") else: print recordsTest + " records already have been appended" else: print "there was a problem, " + str(filename1) + " could not be found" pass
def FileTransfer(ShapeFileDate): import os, sys try: from SECURE_FILES.KTRIPS_FTP_config import (ftpHost, ftpUser, ftpPass) except: pass print ftpHost, ftpPass, ftpUser ftp = FTP(ftpHost) # connect to host, default port ftp.login(ftpUser, ftpPass) monthlyfile = ShapeFileDate[:-4] filename1 = repo + r"/" + ShapeFileDate # user, passwd @ ftp.retrlines('LIST') # list directory contents suffixtypes = ['.dbf', '.prj', '.shx', '.shp'] for filetype in suffixtypes: filename1 = monthlyfile + filetype print filename1 if Exists(repo + "/" + filename1): print "file already transferred" exist = "yes" else: exist = "newload" with open(filename1, "wb") as newfile: ftp.retrbinary('RETR %s' % filename1, newfile.write) ftp.quit() print "File Transfer Completed"
def moveLocalErrorsToSQL(prefixKeyName): print("Moving errors from the Local gdb to SQL for the prefix key name of: " + str(prefixKeyName) + ".") for errorItemFC in prefixesToMoveErrorsFor: errorItemFCBaseName = returnFeatureClass(errorItemFC) sqlPath = returnGDBOrSDEPath(dailyProcessSDESourceCenterlinesLocation) errorItemCopyName = prefixKeyName + '_' + errorItemFCBaseName errorItemSQLLocation = os.path.join(sqlPath, errorItemCopyName) lowerstrErrorItemFCBaseName = str(errorItemFCBaseName).lower() if lowerstrErrorItemFCBaseName.find('selfintclassification') >= 0: # Already includes the prefix name in the basename, so just use the full BaseName for the class. errorItemSQLLocation = os.path.join(sqlPath, errorItemFCBaseName) else: pass if Exists(errorItemSQLLocation): try: Delete_management(errorItemSQLLocation) except: print("Could not delete the FC at: " + str(errorItemSQLLocation) + ".") print("Please make sure that the FC does not have any locks on it and try again.") try: CopyFeatures_management(errorItemFC, errorItemSQLLocation) except: print("Could not copy from the FC at: " + str(errorItemFC)) print("to the FC at: " + str(errorItemSQLLocation) + ".") print("Please make sure that the FC to copy from exists") print("and that the FC to copy to is not locked.")
def modeler(ShapeFileDate): env.workspace = repo filename1 = repo + r"/" + ShapeFileDate SourceFileTxt = str(ShapeFileDate.replace("-", "_")) print "modeling the data schema" if Exists(filename1): Generalize_edit(filename1, "60 Feet") AddIndex_management(filename1, "PRMT_ID", "", "NON_UNIQUE", "NON_ASCENDING") AddField_management(filename1, "SourceFile", "TEXT") AddField_management(filename1, "Tonnage", "Double") AddField_management(filename1, "WidthFt", "Double") AddField_management(filename1, "HeightFt", "Double") AddField_management(filename1, "LengthFt", "Double") MakeTableView_management(filename1, "Ton_Calc", "#", "#", "#") CalculateField_management("Ton_Calc", "SourceFile", "'" + SourceFileTxt + "'", "PYTHON_9.3", "#") CalculateField_management("Ton_Calc", "Tonnage", "40", "PYTHON_9.3", "#") CalculateField_management("Ton_Calc", "LengthFt", "Round([LENGTH] /12,2)", "VB", "#") CalculateField_management("Ton_Calc", "HeightFt", "Round([HEIGHT] /12,2)", "VB", "#") CalculateField_management("Ton_Calc", "WidthFt", "Round([WIDTH] /12,2)", "VB", "#") MakeTableView_management(filename1, "ActualTon_Calc", """"GVW" >80000""", "#", "#") CalculateField_management("ActualTon_Calc", "Tonnage", "!GVW!/2000", "PYTHON_9.3", "#")
def reportExtensionForRAndHCheck(featuresToCheck): if Exists(featuresToCheck): featuresName = returnFeatureClass(featuresToCheck) errorsFromRAndH = 'RAndHErrorsAsFeatureLayer' MakeFeatureLayer_management(featuresToCheck, errorsFromRAndH) errorsFromRAndHResult = GetCount_management(errorsFromRAndH) errorsFromRAndHCount = int(errorsFromRAndHResult.getOutput(0)) print("Roads & Highways Non-Monotonic Check output was found.") print( "Extending the errors report with information from the Roads & Highways Non-Monotonicity Check." ) with open(errorReportCSV, 'a') as fHandle: fHandle.write('\n' + 'Roads & Highways checks follow: ' + '\n') fHandle.write(featuresName + ', ' + str(errorsFromRAndHCount) + '\n') #errorsRHGDB = returnGDBOrSDEName(featuresToCheck) #errorsFeatureClass = returnFeatureClass(featuresToCheck) #previousWorkspace = env.workspace #env.workspace = errorsRHGDB #time.sleep(25) #print("Also adding ReviewUser and ReviewInfo text fields to the") #print("Roads & Highways Non-Monotonicity Check error output feature class.") #AddField_management(errorsFeatureClass, "OptionalInfo", "TEXT", "", "", 250, "ReviewingInfo", nullable) #env.workspace = previousWorkspace else: print("No Roads & Highways Non-Monotonic Check output found.") print("Will not add additional information to the errors report csv.")
def setupEnv(): print "run at "+ str(datetime.datetime.now()) rsel = "ENDDATE IS NULL" MakeTableView_management(resolve, "CCL_Resolution_tbl", rsel) CalculateField_management("CCL_Resolution_tbl", "CCL_LRS", 'str(!CITYNUMBER!)+str(!LRS_KEY![3:14])', "PYTHON" ) MakeTableView_management(connection1+"CCL_Resolution", "CCL_Resolution_tbl10", 'CITYNUMBER<100') CalculateField_management("CCL_Resolution_tbl10", "CCL_LRS", '"0"+str(!CITYNUMBER!)+str(!LRS_KEY![3:14])', "PYTHON") MakeFeatureLayer_management(cntyroutelyr, "cmlrs") MakeFeatureLayer_management(stateroutelyr, "smlrs") MakeFeatureLayer_management(citylimits, "CityLimits", "TYPE IN ( 'CS', 'ON')") LocateFeaturesAlongRoutes_lr(citylimits,"cmlrs","LRS_KEY","0 Feet",connection1+"GIS_CITY","LRS_KEY LINE Beg_CMP End_CMP","FIRST","DISTANCE","NO_ZERO","FIELDS","M_DIRECTON") MakeRouteEventLayer_lr("cmlrs","LRS_KEY","CCL_Resolution_tbl","LRS_KEY LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE","City_Connecting_Links","#","ERROR_FIELD","NO_ANGLE_FIELD","NORMAL","ANGLE","LEFT","POINT") MakeTableView_management(connection1+"GIS_CITY", "GIS_CITY") MakeTableView_management(laneclass, "LaneClass") MakeRouteEventLayer_lr("cmlrs","LRS_KEY","GIS_CITY","LRS_KEY LINE BEG_CMP END_CMP","GIS_BASED_CCL","#","ERROR_FIELD","NO_ANGLE_FIELD","NORMAL","ANGLE","LEFT","POINT") OverlayRouteEvents_lr(connection1+"CCL_Resolution","LRS_KEY LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE",laneclass,"LRS_KEY LINE BCMP ECMP","INTERSECT",connection1+"CCL_LANE_CLASS_OVERLAY","LRS_KEY LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE","NO_ZERO","FIELDS","INDEX") print "create Route Layer specific to City Connecting Link locations" FeatureClassToFeatureClass_conversion("City_Connecting_Links", connection0, "CITY_CONNECTING_LINK_CENTERLINE") LocateFeaturesAlongRoutes_lr(connection1+"CITY_CONNECTING_LINK_CENTERLINE",stateroutelyr,"LRS_ROUTE","0 Meters",connection1+"CCL_STATE_LRS_tbl","LRS_ROUTE LINE BEG_STATE_LOGMILE END_STATE_LOGMILE","FIRST","DISTANCE","ZERO","FIELDS","M_DIRECTON") MakeRouteEventLayer_lr("smlrs", "LRS_ROUTE",connection1+"CCL_STATE_LRS_tbl","LRS_ROUTE LINE BEG_STATE_LOGMILE END_STATE_LOGMILE","CCL_STATE_LRS","#","ERROR_FIELD","NO_ANGLE_FIELD","NORMAL","ANGLE","LEFT","POINT") FeatureClassToFeatureClass_conversion("CCL_STATE_LRS", connection0, "CITY_CONNECTING_LINK_STATEREF") if Exists(connection1+"CITY_CONNECTING_LINK_STATE"): Delete_management(connection1+"CITY_CONNECTING_LINK_STATE") Dissolve_management(connection1+"CITY_CONNECTING_LINK_STATEREF",connection1+"CITY_CONNECTING_LINK_STATE","LRS_ROUTE;CITY;CITYNUMBER;DESCRIPTION;CCL_LRS","BEG_STATE_LOGMILE MIN;END_STATE_LOGMILE MAX","MULTI_PART","UNSPLIT_LINES") Dissolve_management(connection1+"CITY_CONNECTING_LINK_STATEREF",connection1+"CITY_CONNECTING_LINK_STATE_D","CCL_LRS","BEG_STATE_LOGMILE MIN;END_STATE_LOGMILE MAX","MULTI_PART","DISSOLVE_LINES") print "processes to Create the layer that will be used to create a new LRS for city connecting links"
def HighwayCalc(): """Pull out State Highways to preserve KDOT LRS Key (CANSYS FORMAT - non directional CRAD) for the KDOT primary route""" if Exists(gdb + "\RoadAlias_Sort"): Delete(gdb + "\RoadAlias_Sort") else: pass Sort_management(Alias, gdb + "\RoadAlias_Sort", "KDOT_CODE ASCENDING;KDOT_ROUTENAME ASCENDING", "UR") #Remember to check the primary route heirarchy calculates correctly where US rides US and I rides I Heriarchy = ["K", "U", "I"] for routeClass in Heriarchy: rideselect = "KDOT_PREFIX LIKE '" + routeClass + "%'" print rideselect, routeClass TableView(gdb + "\RoadAlias_Sort", "RoadAlias_Sort", rideselect) JoinTbl(lyr, "SEGID", "RoadAlias_Sort", "SEGID", "KEEP_COMMON") CalcField(lyr, lyr + ".KDOTPreType", "!RoadAlias_Sort.KDOT_PREFIX!", "PYTHON_9.3", "#") CalcField( lyr, lyr + ".Soundex", "!RoadAlias_Sort.KDOT_PREFIX!+!RoadAlias_Sort.KDOT_ROUTENAME!", "PYTHON_9.3", "#") CalcField(lyr, "KDOT_ADMO", "'S'", "PYTHON_9.3", "#") CalcField(lyr, "PreCode", "0", "PYTHON_9.3", "#") removeJoin(lyr) CalcField( lyr, "RID", "str(!KDOT_COUNTY_R!)+str(!KDOT_COUNTY_L!)+str(!KDOT_CITY_R!)+str(!KDOT_CITY_L!)+str(!PreCode!) + !Soundex! + str(!SuffCode!)+str(!UniqueNo!)+str(!TDirCode!)", "PYTHON_9.3", "#") CalcField(lyr, "LRSKEY", "str(!RID!)", "PYTHON_9.3", "#")
def XYFC(source, dst, Lat, Long, GCS, loaded): if Exists("FCtbl"): Delete_management("FCtbl") else: pass if Exists("FC_Layer"): Delete_management("FC_Layer") else: pass print "start XYFC "+ str(datetime.datetime.now()) MakeTableView_management(source, 'FCtbl', "#", "#", "") MakeXYEventLayer_management("FCtbl",Long, Lat,"FC_Layer", GCS,"#") TruncateTable_management(dst) Append_management("FC_Layer",dst,"NO_TEST","#","#") CalculateField_management(dst, loaded,"datetime.datetime.now( )","PYTHON_9.3","#") print "XYFC complete for " +str(dst)+ " at " + str(datetime.datetime.now())
def StreetNetworkCheck(): """removes street centerlines from the topology and creates geometric network, then checks geometric network connectivity""" from arcpy import (VerifyAndRepairGeometricNetworkConnectivity_management, RemoveFeatureClassFromTopology_management, CreateGeometricNetwork_management, FindDisconnectedFeaturesInGeometricNetwork_management) print gdb env.workspace = gdb fd = ListDatasets("*", "Feature") fdNG = fd[0] print fd[0] topo = gdb + "/" + fdNG + "/NG911_Topology" #topo = ListDatasets("*") #TOPO CANNOT GET TO WORK BY LIST FUNCTOINS in V10.2.2 geonet = gdb + "/" + fdNG + "/RoadCenterlineGeoNet" print topo if Exists(geonet): print "Street Geometric Network Already Exists" else: try: RemoveFeatureClassFromTopology_management(topo, "RoadCenterline") except: print "could not remove road centerlines from topology" CreateGeometricNetwork_management(gdb + "/" + fdNG, "RoadCenterlineGeoNet", "RoadCenterline SIMPLE_EDGE NO", "#", "#", "#", "#", "#") FindDisconnectedFeaturesInGeometricNetwork_management( gdb + "/" + fdNG + "/RoadCenterline", "Roads_Disconnected") StreetLogfile = os.path.join( os.path.dirname(currentPathSettings.gdbPath), os.path.basename(currentPathSettings.gdbPath)[:-4] + "_Centerline.log") VerifyAndRepairGeometricNetworkConnectivity_management( geonet, StreetLogfile, "VERIFY_ONLY", "EXHAUSTIVE_CHECK", "0, 0, 10000000, 10000000")
def RecordResults(resultType, values, gdb): from os.path import join from arcpy import Exists, CreateTable_management, AddField_management from arcpy.da import InsertCursor if resultType == "template": tbl = "TemplateCheckResults" elif resultType == "fieldValues": tbl = "FieldValuesCheckResults" table = join(gdb, tbl) fieldList = [] if not Exists(table): CreateTable_management(gdb, tbl) fieldInfo = getAddFieldInfo(table) for fi in fieldInfo: #add field with desired parameters AddField_management(fi[0], fi[1], fi[2], fi[3], fi[4], fi[5]) #populate field list fieldList.append(fi[1]) if fieldList == []: fieldList = getResultsFieldList(table) cursor = InsertCursor(table, fieldList) for row in values: cursor.insertRow(row) del cursor
def calculateMeasuresForLocalRoutes(routesToMeasure, subsetSelectionQuery): # Make a feature layer # Select it with the subsetSelectionQuery # If the number of selected features is at least 1 # Then, run the calculateField_management calls for # the selected features. fcAsFeatureLayerForMeasuring = 'FCAsFeatureLayer_Measures' if Exists(fcAsFeatureLayerForMeasuring): Delete_management(fcAsFeatureLayerForMeasuring) else: pass MakeFeatureLayer_management(routesToMeasure, fcAsFeatureLayerForMeasuring) SelectLayerByAttribute_management(fcAsFeatureLayerForMeasuring, 'CLEAR_SELECTION') SelectLayerByAttribute_management(fcAsFeatureLayerForMeasuring, 'NEW_SELECTION', subsetSelectionQuery) countResult = GetCount_management(fcAsFeatureLayerForMeasuring) intCount = int(countResult.getOutput(0)) print('There were ' + str(intCount) + ' features selected in the fcAsFeatureLayerForMeasuring layer.') if intCount >= 1: expressionText1 = 0 CalculateField_management(fcAsFeatureLayerForMeasuring, startMeasure, expressionText1, "PYTHON_9.3") expressionText2 = 'float("{0:.3f}".format(!Shape_Length! / 5280.00))' CalculateField_management(fcAsFeatureLayerForMeasuring, endMeasure, expressionText2, "PYTHON_9.3") else: print "Not calculating due to lack of selected features."
def UpdateLocalFileGDB(): import datetime, time fDateTime = datetime.datetime.now() from arcpy import FeatureClassToFeatureClass_conversion, CreateFileGDB_management, Exists, Delete_management from KhubCode25.KhubCode25Config import ( localProProjectPath, localProFileGDBWorkspace, prodDataSourceSDE, devDataSourceSDE, dbname, dbownername, countylines, devorprod) if devorprod == 'prod': database = prodDataSourceSDE print("running on " + devorprod) else: database = devDataSourceSDE print("running on " + devorprod) fileformatDateStr = fDateTime.strftime("%Y%m%d") localfilegdb = localProFileGDBWorkspace + '\\' + 'KhubRoadCenterlines' + fileformatDateStr + '.gdb' #print(fileformatDateStr) if Exists(localfilegdb): print(localfilegdb + " exists and will be deleted") Delete_management(localfilegdb) time.sleep(1) CreateFileGDB_management(localProFileGDBWorkspace, "KhubRoadCenterlines" + fileformatDateStr, "CURRENT") FeatureClassesUsed = [ 'All_Road_Centerlines', 'All_Road_Centerlines_D1', 'MARKUP_POINT', 'All_Roads_Stitch_Points', 'Videolog_CURRENT_LANETRACE', 'Videolog_CURRENT_RAMPTRACE', 'HPMS_RAMPS' ] for FeatureClass in FeatureClassesUsed: loopFC = localProProjectPath + '/' + database + "/" + dbname + "." + dbownername + "." + FeatureClass FeatureClassToFeatureClass_conversion(loopFC, localfilegdb, FeatureClass) FeatureClassToFeatureClass_conversion( localProProjectPath + '/' + countylines, localfilegdb, "SHARED_COUNTY_LINES")
def RouteCheck(RID): #when running this function, pass the RID/LRS KEY Value into the function to update the desired RID #RID is structured '030C0011800W0' #Class should be L, C, or RM print "what route number should be updated?" #RID = '030C0011800W0' Class = RID[3] if Class in ("R", "M"): Class = "RM" else: pass print Class tablename = Class+RID RIDExp = "RID = '"+RID+"'" print "Updating route "+ str(RID) if Exists("UpdateGISPROD"): pass else: AddTable = Class+"_NON_STATE_EVENTS" MakeTableView_management(r"C:/temp/Nusys_Check.gdb/"+AddTable, tablename+"_view","#") TableToTable_conversion(tablename+"_view", "in_memory", tablename, RIDExp) RecordCount = str(GetCount_management(tablename)) if RecordCount = '0': print "No Records to Calculate"
def RemoveGpHistory_fc(out_xml_dir): remove_gp_history_xslt = r"C:\GIS\metadataremoval\removeGeoprocessingHistory.xslt" print "Trying to remove out_xml_dir/metadtaTempFolder..." if Exists(out_xml_dir): Delete_management(out_xml_dir) else: pass os.mkdir(out_xml_dir) env.workspace = out_xml_dir ClearWorkspaceCache_management() try: print "Starting xml conversion." name_xml = "CMLRS_LAM.xml" #Process: XSLT Transformation XSLTransform_conversion(gdb_93_CMLRS, remove_gp_history_xslt, name_xml, "") print("Completed xml conversion on %s") % (gdb_93_CMLRS) # Process: Metadata Importer MetadataImporter_conversion(name_xml, gdb_93_CMLRS) except: print("Could not complete xml conversion on %s") % (gdb_93_CMLRS) endTime = datetime.datetime.now() ScriptStatusLogging('Cansys_CMLRS_Transfer', 'SharedSDEProd.gdb\SHARED_CANSYS_CMLRS', scriptFailure, startTime, endTime, "Could not complete xml conversion on " + gdb_93_CMLRS, pythonLogTable) # Reraise the error to stop execution and prevent a success message # from being inserted into the table. raise
def execute(self, parameters, messages): from_db = parameters[reproject_from_db].valueAsText to_db = parameters[reproject_to_db].valueAsText projection = parameters[reproject_projection].valueAsText skip_empty = parameters[reproject_skip_empty].valueAsText AddMessage('Tool received parameters: {}'.format(', '.join( [p.valueAsText for p in parameters]))) from arcpy import env, Exists if skip_empty == 'true': env.skipEmpty = True else: env.skipEmpty = False #run the functions if not Exists(projection): AddMessage('Projection file {} does not exist'.format(projection)) return # just set the output coordinate system and outputs # will be projected :) env.skipAttach = True env.outputCoordinateSystem = projection #call the create datasets function passing the foreach layer function to it Geodatabase.process_datasets(from_db, to_db, None, None, None)
def preparingSourceCountyData(): print("Starting the preparingSourceCountyData function!") if Exists(preRouteSourceCRML): try: Delete_management(preRouteSourceCRML) except: print("Could not delete the features located at: " + str(preRouteSourceCRML) + ".") else: pass # Make a copy CopyFeatures_management(routesSourceCountyLRSArnold, preRouteSourceCRML) # Remove unnecessary fields preRouteSourceCRMLDescription = Describe(preRouteSourceCRML) preRouteSourceCRMLOIDFieldName = preRouteSourceCRMLDescription.OIDFieldName preRouteSourceCRMLShapeFieldName = preRouteSourceCRMLDescription.shapeFieldName preRouteSourceCRMLShapeAndOIDFieldNames = [ preRouteSourceCRMLOIDFieldName, preRouteSourceCRMLShapeFieldName ] preRouteSourceCRMLFieldObjectsList = ListFields(preRouteSourceCRML) preRouteSourceFieldNames = [ x.name for x in preRouteSourceCRMLFieldObjectsList ] fieldNamesToKeep = [ y for y in preRouteSourceFieldNames if y in preRouteSourceCRMLFields or y in preRouteSourceCRMLShapeAndOIDFieldNames ] fieldNamesToRemove = [ z for z in preRouteSourceFieldNames if z not in fieldNamesToKeep ] for fieldNameItem in fieldNamesToRemove: DeleteField_management(preRouteSourceCRML, fieldNameItem) print("Done deleting unnecessary fields.") MakeFeatureLayer_management(preRouteSourceCRML, fcAsFeatureLayerLG) selectionQueryL1 = """ SourceRouteId IS NULL OR LRS_ROUTE_PREFIX IN ('I', 'U', 'K') """ SelectLayerByAttribute(fcAsFeatureLayerLG, NEW_SELECTION_CONST, selectionQueryL1) CopyFeatures_management(fcAsFeatureLayerLG, stateRoutesAndNullRouteIDs) DeleteRows_management(fcAsFeatureLayerLG) selectionQueryL2 = """ SourceFromMeasure IS NULL OR SourceToMeasure IS NULL """ SelectLayerByAttribute(fcAsFeatureLayerLG, NEW_SELECTION_CONST, selectionQueryL2) CopyFeatures_management(fcAsFeatureLayerLG, preRouteSourceNoMeasures) selectionQueryL3 = """ SourceFromMeasure IS NULL """ SelectLayerByAttribute(fcAsFeatureLayerLG, NEW_SELECTION_CONST, selectionQueryL3) CalculateField_management(fcAsFeatureLayerLG, "SourceFromMeasure", "0", PYTHON_9_3_CONST) selectionQueryL4 = """ SourceToMeasure IS NULL """ SelectLayerByAttribute(fcAsFeatureLayerLG, NEW_SELECTION_CONST, selectionQueryL4) CalculateField_management(fcAsFeatureLayerLG, "SourceToMeasure", "!SHAPE.LENGTH@MILES!", PYTHON_9_3_CONST)
def copyfromstaged(lyrlist, admin, fdset, fcoutpath): for lyr in lyrlist: print(fcoutpath + admin + '.' + lyr) if Exists(fcoutpath + admin + '.' + lyr): DeleteFeatures_management(fcoutpath + admin + '.' + lyr) Append_management(ws + "/" + tempgdb + '/' + lyr, fcoutpath + admin + '.' + lyr, "NO_TEST", "#") print "updated " + lyr + " in " + fcoutpath else: FeatureClassToFeatureClass_conversion( ws + "/" + tempgdb + '/' + lyr, fcoutpath, lyr) print "copied new " + lyr + " feature class to " + fcoutpath print " Check roles and privleges on this " + lyr + " at " + fcoutpath try: CalculateField_management(fcoutpath + admin + '.' + lyr, "NETWORKDATE", "datetime.datetime.now( )", "PYTHON_9.3", "#") print "copy date field updated" except: AddField_management(fcoutpath + admin + '.' + lyr, "NETWORKDATE", "DATE") CalculateField_management(fcoutpath + admin + '.' + lyr, "NETWORKDATE", "datetime.datetime.now( )", "PYTHON_9.3", "#") print "copy date field added and updated" pass return
def RemoveGpHistory_fc(out_xml_dir): remove_gp_history_xslt = r"D:\kandrive\harvesters\scheduled-tasks\metadataremoval\removeGeoprocessingHistory.xslt" print "Trying to remove out_xml_dir/metadtaTempFolder..." if Exists(out_xml_dir): Delete_management(out_xml_dir) else: pass os.mkdir(out_xml_dir) env.workspace = out_xml_dir ClearWorkspaceCache_management() try: print "Starting xml conversion." name_xml = "RCRS_LAM.xml" #Process: XSLT Transformation XSLTransform_conversion(kanDriveSpatialConditions, remove_gp_history_xslt, name_xml, "") print("Completed xml conversion on %s") % (kanDriveSpatialConditions) # Process: Metadata Importer MetadataImporter_conversion(name_xml, kanDriveSpatialConditions) except: print("Could not complete xml conversion on %s") % ( kanDriveSpatialConditions) endTime = datetime.datetime.now() ScriptStatusLogging( 'KanDrive_Spatial_Conditions_Update', 'kandrive_spatial.DBO.Conditions', scriptFailure, startTime, endTime, "Could not complete xml conversion on " + kanDriveSpatialConditions, pythonLogTable) # Reraise the error to stop execution and prevent a success message # from being inserted into the table. raise
def setupQCGDB(): print("Setting up the QC GDB.") if (Exists(errorFeaturesQCGDB)): Delete_management(errorFeaturesQCGDB) else: pass CreateFileGDB_management(mainFolder, errorFeaturesQCGDBName)
def Maintenance(): print "reference maintenance agreement table" MakeTableView_management(maintenance, "Maint_tview") MakeRouteEventLayer_lr(cntyroutelyr,"LRS_KEY", "Maint_tview","LRSKEY LINE BEGMILEPOST END_MP","Maintenance_Events_CNTY","#","ERROR_FIELD","ANGLE_FIELD","NORMAL","ANGLE","LEFT","POINT") if Exists(connection1+"MAINTENANCE_CCL"): Delete_management(connection1+"MAINTENANCE_CCL") LocateFeaturesAlongRoutes_lr("Maintenance_Events_CNTY",connection1+"CCL_LRS_ROUTE",NewRouteKey,"1 Feet",connection1+"MAINTENANCE_CCL","CCL_LRS LINE CCL_BEGIN CCL_END","ALL","DISTANCE","ZERO","FIELDS","M_DIRECTON") print "show lane classification referenced to city connecting link LRS"
def _tri_tool(): """Triangulation for tool """ in_fc = sys.argv[1] tri_type = sys.argv[2] out_fc = sys.argv[3] xtent = sys.argv[4] desc = Describe(in_fc) SR = desc['spatialReference'] flds = ['SHAPE@X', 'SHAPE@Y'] allpnts = False z = FeatureClassToNumPyArray(in_fc, flds, "", SR, allpnts) a = np.zeros((z.shape[0], 2), dtype='<f8') a[:, 0] = z['SHAPE@X'] a[:, 1] = z['SHAPE@Y'] # if tri_type == 'Delaunay': tweet("Delaunay... clip extent {}".format(xtent)) t = tri_pnts(a, True) # must be a list of list of points polys = poly(t, SR) if Exists(out_fc): Delete(out_fc) CopyFeatures(polys, "in_memory/temp") MakeFeatureLayer("in_memory/temp", "temp") if xtent not in ("", None): Clip("temp", xtent, out_fc, None) else: CopyFeatures("temp", out_fc) else: tweet("Voronoi... clip extent {}".format(xtent)) c = infinity_circle(a, fac=10) aa = np.vstack((a, c)) v = vor_pnts(aa, testing=False) polys = poly([v], SR) if Exists(out_fc): Delete(out_fc) CopyFeatures(polys, "in_memory/temp") MakeFeatureLayer("in_memory/temp", "temp") if xtent not in ( "", None, ): Clip("temp", xtent, out_fc, None) else: CopyFeatures("temp", out_fc)