def LoadMonthlyStats(ShapeFileDate): env.overwriteOutput = 1 SourceFileFGB = str(ShapeFileDate[12:-4].replace("-", "")) SourceFileINT = SourceFileFGB[2:6] + SourceFileFGB[0:2] infileMonthly = "in_memory\LRS" + SourceFileFGB IntersectionMonthly = "in_memory\IntptKTRIPS" + SourceFileINT SourceFileTxt = str(ShapeFileDate[:-4].replace("-", "_")) MakeTableView_management(infileMonthly, "AppendCheckMo", "#", "#", "#") #Config this enterpriseDBMonthly = gdb + "\KTRIPS.SDE.KTRIPS_MonthlySum" #enterpriseDBMonthly = gdb+r"\INTERMODAL.DBO.KTRIPS_MonthlySum" #inputfc = r"C:\input.shp" outputView = "AppendCheckMonthly" fieldname = "SourceFile" fieldvalue = SourceFileTxt whereclause = str( BuildWhereClause(enterpriseDBMonthly, fieldname, fieldvalue)) MakeTableView_management(enterpriseDBMonthly, outputView, whereclause) recordsTest = str(GetCount_management(outputView)) if recordsTest == '0': print recordsTest + " of these records existed and will be appended right now" Append_management(infileMonthly, enterpriseDBMonthly, schema_type="NO_TEST", field_mapping="#", subtype="") Append_management(IntersectionMonthly, enterpriseDBMonthly + "Intr", schema_type="NO_TEST", field_mapping="#", subtype="") else: print recordsTest + " records already have been appended"
def MakeRouteLayers(OpEnvironmentMode): from EXOR_GIS_CONFIG import OpEnvironment OpRunIn= OpEnvironment.OpRunInRoutes # @UndefinedVariable OpRunOut= OpEnvironment.OpRunOut # @UndefinedVariable #adm=OpEnvironment.adm # @UndefinedVariable Owner=OpEnvironment.Owner # @UndefinedVariable DB=OpEnvironment.DB # @UndefinedVariable env.workspace = OpRunIn env.overwriteOutput = True print OpRunIn #combine the connection, db, and owner to the destination path for enterprise geodatabase output OpRunFullOut = OpRunOut+r"/"+DB+"."+Owner+"." print "Updating CRND" #add the Map Extract Event Table limited to primary direction into memory TruncateTable_management(OpRunFullOut+"CRND") Append_management("CRND", OpRunFullOut+"CRND", "NO_TEST") print "Updating SRND" TruncateTable_management(OpRunFullOut+"SRND") Append_management("SRND", OpRunFullOut+"SRND", "NO_TEST") print "Updating NSND" TruncateTable_management(OpRunFullOut+"NSND") Append_management("NSND", OpRunFullOut+"NSND", "NO_TEST") if GetCount_management("MAP_EXTRACT")>0: MakeTableView_management("MAP_EXTRACT", "V_MV_MAP_EXTRACT", "DIRECTION < 3") #Add the CRND CANSYS rotue layer, dynseg the event table, truncate and load to CMLRS MakeFeatureLayer_management("CRND", "CRND") MakeRouteEventLayer_lr("CRND", "NE_UNIQUE", "MAP_EXTRACT", "NQR_DESCRIPTION LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE", "CMLRS1", "", "ERROR_FIELD", "NO_ANGLE_FIELD", "NORMAL", "ANGLE", "LEFT", "POINT") try: print "truncation and appending the CMLRS" TruncateTable_management(OpRunFullOut+"CMLRS") Append_management("CMLRS1", OpRunFullOut+"CMLRS", "NO_TEST") except: print "could not truncate, overwriting CMLRS" FeatureClassToFeatureClass_conversion("CMLRS1", OpRunOut, "CMLRS","#", "#", "#") #except: # print "could not update the CMLRS" MakeFeatureLayer_management("SRND", "SRND") MakeRouteEventLayer_lr("SRND", "NE_UNIQUE", "MAP_EXTRACT", "STATE_NQR_DESCRIPTION LINE BEG_STATE_LOGMILE END_STATE_LOGMILE", out_layer="SMLRS1", offset_field="", add_error_field="ERROR_FIELD", add_angle_field="NO_ANGLE_FIELD", angle_type="NORMAL", complement_angle="ANGLE", offset_direction="LEFT", point_event_type="POINT") try: print "truncation and appending the SMLRS" TruncateTable_management(OpRunFullOut+"SMLRS") Append_management("SMLRS1", OpRunFullOut+"SMLRS", "NO_TEST") except: print "could not truncate, overwriting SMLRS" FeatureClassToFeatureClass_conversion("SMLRS1", OpRunOut, "SMLRS","#", "#", "#") #except: # print "could not update the SMLRS" print "Route Layers Updated" else: print "the map extract is unreliable and was not exported"
def TruncateAndAppend(mxd, TargetLT, TargetST): lyrs = mapping.ListLayers(mxd) print "Updating data for " +str(lyrs[0]) TruncateTable_management(TargetST) Append_management(lyrs[0], TargetST, "NO_TEST", "#") print "Updating data for " +str(lyrs[1]) TruncateTable_management(TargetLT) Append_management(lyrs[1], TargetLT, "NO_TEST", "#")
def UpdateSniceBoundaries(): from arcpy import TruncateTable_management, Append_management subarea = r'Database Connections\GISPROD_Shared.sde\SHARED.KDOT_SNICE_SUBAREA' area = r'Database Connections\GISPROD_Shared.sde\SHARED.KDOT_SNICE_AREAS' district = r'Database Connections\GISPROD_Shared.sde\SHARED.KDOT_SNICE_Districts' #TruncateTable_management(subarea) TruncateTable_management(area) TruncateTable_management(district) #Append_management("in_memory/KDOT_SNICE_SubArea", subarea, "NO_TEST") Append_management("in_memory/KDOT_SNICE_Area", area, "NO_TEST") Append_management("in_memory/KDOT_SNICE_District", district, "NO_TEST")
def LoadARegion(): from arcpy import Append_management, ListFeatureClasses, ListDatasets, env, ListTables #importGDB = r"//gisdata/planning/Cart/projects/Conflation/GIS_DATA/GEO_COMM/REGION3_20151002/REGION3_20151002.gdb" #importGDB = r"\\gisdata\planning\Cart\projects\Conflation\GIS_DATA\GEO_COMM\REGION4_20151021\REGION4_20151021\REGION4_20151021.gdb" importGDB = r"\\gisdata\planning\Cart\projects\Conflation\GIS_DATA\GEO_COMM\REGION5_20151211\REGION5_20151211.gdb" LoadTarget = r"\\gisdata\planning\Cart\projects\Conflation\Workflow\conflation_sqlgis_geo.sde\Conflation.GEO." env.workspace = importGDB ### There are no tables in the conflated dataset products - handle similarly and separately skiplist = [ 'Stitch_Lines', 'RoadCenterlines', 'Overlaps_Gaps_MunicipalBoundary', 'Overlaps_Gaps_FIRE', 'Overlaps_Gaps_LAW', 'Overlaps_Gaps_PSAP', 'Overlaps_Gaps_ESZ', 'Overlaps_Gaps_EMS', 'Overlaps_Gaps_CountyBoundary', 'Overlaps_Gaps_AuthoritativeBoundary' ] tables = ListTables() for table in tables: print table target = LoadTarget + table print target datasets = ListDatasets("*") for fd in datasets: print fd featureClasses = ListFeatureClasses("*", "All", fd) for fc in featureClasses: print fc if fc in skiplist: print 'skipping' else: target = LoadTarget + fd + "/" + fc print "loading to " + target Append_management(fc, target, schema_type="NO_TEST")
def AddInsert(fc, layer_name, newtbl, workspace): MakeTableView_management(newtbl, "NEWROWS_View", "#", "#", "#") addcount = int(GetCount_management("NEWROWS_View").getOutput(0)) if addcount == 0: print "no new records" pass else: MakeFeatureLayer_management(fc, layer_name) MakeXYEventLayer_management( "NEWROWS_View", "CROSSINGLONGITUDE", "CROSSINGLATITUDE", "NEWROWS_Layer", "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],VERTCS['NAVD_1988',VDATUM['North_American_Vertical_Datum_1988'],PARAMETER['Vertical_Shift',0.0],PARAMETER['Direction',1.0],UNIT['Meter',1.0]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision", "#") FeatureClassToFeatureClass_conversion( "NEWROWS_Layer", "D:/Temp", "LOADTHIS1.shp", "#", """CROSSINGID "CROSSINGID" true false false 30 Text 0 0 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGID,-1,-1;CROSSINGLA "CROSSINGLA" true true false 8 Double 10 38 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGLATITUDE,-1,-1;CROSSINGLO "CROSSINGLO" true true false 8 Double 10 38 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGLONGITUDE,-1,-1;CROSSINGTY "CROSSINGTY" true true false 2 Text 0 0 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGTYPE,-1,-1""", "#") Append_management( "D:/Temp/LOADTHIS1.shp", layer_name, "NO_TEST", """CROSSINGID "CROSSINGID" true false false 30 Text 0 0 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGID,-1,-1;CROSSINGLATITUDE "CROSSINGLATITUDE" true true false 8 Double 10 38 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGLA,-1,-1;CROSSINGLONGITUDE "CROSSINGLONGITUDE" true true false 8 Double 10 38 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGLO,-1,-1;CROSSINGTYPE "CROSSINGTYPE" true true false 2 Text 0 0 ,First,#,D:/Temp/LOADTHIS1.shp,CROSSINGTY,-1,-1;LOADDATE "LOADDATE" true true false 36 Date 0 0 ,First,#""", "#") Delete_management("D:/Temp/LOADTHIS1.shp", "#") updatelyr = layer_name + "new" MakeFeatureLayer_management(layer_name, updatelyr, "LOADDATE IS NULL") with da.Editor(workspace) as edit: CalculateField_management(updatelyr, "LOADDATE", "datetime.datetime.now( )", "PYTHON_9.3", "#") del fc, layer_name, newtbl, workspace, updatelyr print "new rows inserted into Static_Crossings"
def appender_DWBI_OLD(ShapeFileDate): print "appending the modeled data" env.workspace = repo filename1 = r"DWBI_SEGS" #C:\Workspace\pydot\sde_connections_10.3\sde@KTRIPS_sqlgiprod.sde\KTRIPS.SDE.KTRIPS_ROUTES enterprisedbRoutes = gdb + r"\KTRIPS.SDE.KTRIPS_ROUTE_Segments" print enterprisedbRoutes #Append_management(filename1, enterprisedbRoutes, "NO_TEST", "#") if Exists(filename1): MakeTableView_management(filename1, "AppendCheck", "#", "#", "#") AddJoin_management("AppendCheck", "PRMT_ID", enterprisedbRoutes, "PRMT_ID", join_type="KEEP_COMMON") recordsTest = str(GetCount_management("AppendCheck")) RemoveJoin_management("AppendCheck") if recordsTest == '0': print recordsTest + " of these records exist, appending now" Append_management(filename1, enterprisedbRoutes, "NO_TEST", "#") else: print recordsTest + " records already have been appended" else: print "there was a problem, " + str(filename1) + " could not be found" pass
def AnnualizeData(YearToAnnualize): annualLayer = gdb + "\KTRIPS.SDE.Ktrips_Annual" currentyYear = gdb + "\KTRIPS.SDE.Ktrips_CurrentYear" SelectYear = YearSelTest CalcYear = str(int(YearSelTest - 1)) YearSelTest = "TripYear = '" + SelectYear + "'" if Exists("Check1"): Delete_management("Check1") MakeFeatureLayer_management(annualLayer, "Check1", YearSelTest) CheckExistence = GetCount_management("Check1") print CheckExistence if int(str(CheckExistence)) > 0: print "This source file info is already in the target feature" runnext = False elif int(str(CheckExistence)) == 0: print 'the information is new for this source file and will be added.' runnext = True Append_management(currentyYear, annualLayer, "NO_TEST", "#") CalculateField_management(annualLayer, "TripYear", CalcYear, "PYTHON_9.3") TruncateTable_management(currentyYear) else: print 'something isnt working here' print runnext pass
def copyfromstaged(lyrlist, admin, fdset, fcoutpath): for lyr in lyrlist: print(fcoutpath + admin + '.' + lyr) if Exists(fcoutpath + admin + '.' + lyr): DeleteFeatures_management(fcoutpath + admin + '.' + lyr) Append_management(ws + "/" + tempgdb + '/' + lyr, fcoutpath + admin + '.' + lyr, "NO_TEST", "#") print "updated " + lyr + " in " + fcoutpath else: FeatureClassToFeatureClass_conversion( ws + "/" + tempgdb + '/' + lyr, fcoutpath, lyr) print "copied new " + lyr + " feature class to " + fcoutpath print " Check roles and privleges on this " + lyr + " at " + fcoutpath try: CalculateField_management(fcoutpath + admin + '.' + lyr, "NETWORKDATE", "datetime.datetime.now( )", "PYTHON_9.3", "#") print "copy date field updated" except: AddField_management(fcoutpath + admin + '.' + lyr, "NETWORKDATE", "DATE") CalculateField_management(fcoutpath + admin + '.' + lyr, "NETWORKDATE", "datetime.datetime.now( )", "PYTHON_9.3", "#") print "copy date field added and updated" pass return
def LoadFinalStreets(inFile, inFeatureclass, inTable): LoadThis = inFeatureclass FromThis = inFile LoadTable = inTable targetpath = currentPathSettings.EntDB + '/' + currentPathSettings.EDBName + '.' + currentPathSettings.EDBO env.workspace = currentPathSettings.FinalPath print str(currentPathSettings.FinalPath) #list the file gebdatabases in the final directory workspace = ListWorkspaces(FromThis, "FileGDB") for ws in workspace: print ws env.workspace = ws #print the tables in the geodatabase tablelist = ListTables(LoadTable) print "tables" for table in tablelist: print " " + table print "loading " + ws + "/" + table + " to " + targetpath + table #CalledUpon(ws) #Try to load/append the rows i nthe alias table the aggregated geodatabase try: Append_management(ws + "/" + table, targetpath + "." + table, "NO_TEST", "#") except: print 'there was a problem loading alias table for' + ws #print the features classes stored in feature datasets Datasets = ListDatasets("*") for fd in Datasets: #print fd #print "feature classes - Polygon" #FCListPoly = ListFeatureClasses("*", "Polygon", fd) #for fc in FCListPoly: # print " "+fc #print "feature classes - Lines" FCListLine = ListFeatureClasses(LoadThis, "Polyline", fd) for fc in FCListLine: #print " "+fc print "loading " + ws + "/" + fc + " to " + targetpath + '.' + currentPathSettings.EFD + "." + fc try: Append_management( fc, targetpath + '.' + currentPathSettings.EFD + "/" + fc, "NO_TEST", "#") except: print 'there was a problem loading centerlines for' + ws
def RouteMaker(): from arcpy import CreateRoutes_lr, Append_management CreateRoutes_lr(SSoutput + "Unsplit", "CountyKey1", RouteOutput + "CountyRoute", "TWO_FIELDS", "MIN_F_CNTY_2", "MAX_T_CNTY_2", "UPPER_LEFT", "1", "0", "IGNORE", "INDEX") CreateRoutes_lr(ClassOutput + "Unsplit", "CountyKey1", "in_memory/RMCRoute", "TWO_FIELDS", "MIN_F_CNTY_2", "MAX_T_CNTY_2", "UPPER_LEFT", "1", "0", "IGNORE", "INDEX") Append_management("RMCRoute", RouteOutput + "CountyRoute", "TEST", "", "")
def AppendIt(): for fc in KSdb_fc_list: fcappendfrom = source + fd + r'/' + fc fcappendto = final + fd2 + r'/' + fc tempfc = temp + r'/' + fc if Exists(tempfc): Delete_management(tempfc) else: pass FeatureClassToFeatureClass_conversion(fcappendfrom, temp, fc) Append_management(tempfc, fcappendto, "NO_TEST", "#", "#") print "appended " + str(fc) for tbl in KSdb_Tbl_list: tblappendfrom = source + r'/' + tbl tblappendto = final + tbl Append_management(tblappendfrom, tblappendto, "NO_TEST", "#", "#") print "appended " + str(tbl)
def AppendAddedRows(): #separating append into a separate function may be a good idea, #but now hte function doesn't know what the 'FMIS event table' is #altered the append to incorporate the SYSTEM CODE, we are now appending the NHS_VN code to the table as NHS_VN, derive SYSTEM_CODE in Oracle Append_management( "FIMS_EventTable", FMIS_LOAD, "NO_TEST", """ ROUTE_ID "ROUTE_ID" true true false 14 Text 0 0 ,First,#,FIMS_EventTable,RID,-1,-1; BEG_CNTY_MP "BEG_CNTY_MP" true true false 8 Double 10 38 ,First,#,FIMS_EventTable,CNTY_BEG,-1,-1; END_CNTY_MP "END_CNTY_MP" true true false 8 Double 10 38 ,First,#,FIMS_EventTable,CNTY_END,-1,-1; CONGRESSIONAL_DISTRICT "CONGRESSIONAL_DISTRICT" true true false 50 Text 0 0 ,First,#,FIMS_EventTable,DISTRICT_1,-1,-1; URBAN_ID "URBAN_ID" true true false 10 Text 0 0 ,First,#,FIMS_EventTable,ID_1,-1,-1; FUN_CLASS "FUN_CLASS" true true false 3 Text 0 0 ,First,#,FIMS_EventTable,F_SYSTEM_V,-1,-1; NHS_VN "NHS_VN" true true false 10 Text 0 0 ,First,#,FIMS_EventTable,NHS_VN,-1,-1; PROJECT_NUMBER "PROJECT_NUMBER" true true false 15 Text 0 0 ,First,#,FIMS_EventTable,PROJECT_ID,-1,-1; COUNTY "COUNTY" true true false 3 Text 0 0 ,First,#,FIMS_EventTable,COUNTY_NUMBER,-1,-1""", "#")
def XYFC(source, dst, Lat, Long, GCS, loaded): if Exists("FCtbl"): Delete_management("FCtbl") else: pass if Exists("FC_Layer"): Delete_management("FC_Layer") else: pass print "start XYFC "+ str(datetime.datetime.now()) MakeTableView_management(source, 'FCtbl', "#", "#", "") MakeXYEventLayer_management("FCtbl",Long, Lat,"FC_Layer", GCS,"#") TruncateTable_management(dst) Append_management("FC_Layer",dst,"NO_TEST","#","#") CalculateField_management(dst, loaded,"datetime.datetime.now( )","PYTHON_9.3","#") print "XYFC complete for " +str(dst)+ " at " + str(datetime.datetime.now())
def AnnualStats(ShapeFileDate): env.overwriteOutput = 1 #SourceFileTxt = str(ShapeFileDate[12:-4].replace("-", "")) #infileMonthly = yeardb+r"\Kansas\LRS"+SourceFileTxt qyear = ShapeFileDate[16:20] #try: # Delete_management(yeardb+"/KTRIPS_MonthlySum_Statistics") # Delete_management(yeardb+"/RunningTotal") #except: # print "nothing deleted" #sumfile = gdb+"\INTERMODAL.DBO.KTRIPS_MonthlySum" sumfile = gdb + "\KTRIPS.SDE.KTRIPS_MonthlySum" whereclause = str(BuildWhereClauseLike(sumfile, "SourceFile", qyear)) if Exists("ThisYearMonthly"): Delete_management("ThisYearMonthly") MakeFeatureLayer_management(sumfile, "ThisYearMonthly", whereclause, "#", "#") Statistics_analysis("ThisYearMonthly", "in_memory/KTRIPS_MonthlySum_Statistics", "Join_Count SUM;Tonnage SUM", "LRS_KEY;BEG_CNTY_LOGMILE;END_CNTY_LOGMILE") AddField_management("in_memory/KTRIPS_MonthlySum_Statistics", "TonMiles", "DOUBLE", "#", "#", "#", "#", "NULLABLE", "NON_REQUIRED", "#") CalculateField_management( "in_memory/KTRIPS_MonthlySum_Statistics", "TonMiles", "!SUM_Tonnage! /(!END_CNTY_LOGMILE! - !BEG_CNTY_LOGMILE!)", "PYTHON_9.3", "#") if Exists("KTRIPS_RunningTotal_CurentYear"): Delete_management("KTRIPS_RunningTotal_CurentYear") MakeRouteEventLayer_lr(cansys, "LRS_KEY", "in_memory/KTRIPS_MonthlySum_Statistics", "LRS_KEY LINE BEG_CNTY_LOGMILE END_CNTY_LOGMILE", "KTRIPS_RunningTotal_CurentYear", "#", "ERROR_FIELD", "NO_ANGLE_FIELD", "NORMAL", "ANGLE", "LEFT", "POINT") CurrentYrStat = gdb + "\KTRIPS.SDE.Ktrips_CurrentYear" TruncateTable_management(CurrentYrStat) Append_management("KTRIPS_RunningTotal_CurentYear", CurrentYrStat, "NO_TEST", "#", "") print "annual Stats have been recalculated from the latest Monthly Statistics" Delete_management("in_memory/KTRIPS_MonthlySum_Statistics")
def AddInsert(fc, layer_name, newtbl): MakeFeatureLayer_management(fc, layer_name) MakeTableView_management(newtbl, "NEWROWS_View", "#", "#", "#") MakeXYEventLayer_management( "NEWROWS_View", "CROSSINGLONGITUDE", "CROSSINGLATITUDE", "NEWROWS_Layer", "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],VERTCS['NAVD_1988',VDATUM['North_American_Vertical_Datum_1988'],PARAMETER['Vertical_Shift',0.0],PARAMETER['Direction',1.0],UNIT['Meter',1.0]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;0.001;0.001;IsHighPrecision", "#") FeatureClassToFeatureClass_conversion( "NEWROWS_Layer", "D:/Temp", "LOADTHIS.shp", "#", """CROSSINGID "CROSSINGID" true false false 30 Text 0 0 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGID,-1,-1;CROSSINGLA "CROSSINGLA" true true false 8 Double 10 38 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGLATITUDE,-1,-1;CROSSINGLO "CROSSINGLO" true true false 8 Double 10 38 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGLONGITUDE,-1,-1;CROSSINGTY "CROSSINGTY" true true false 2 Text 0 0 ,First,#,Database Connections/sdedev_ciims.sde/CIIMS.NEWROWS_Features,CROSSINGTYPE,-1,-1""", "#") Append_management( "D:/Temp/LOADTHIS.shp", layer_name, "NO_TEST", """CROSSINGID "CROSSINGID" true false false 30 Text 0 0 ,First,#,D:/Temp/LOADTHIS.shp,CROSSINGID,-1,-1;CROSSINGLATITUDE "CROSSINGLATITUDE" true true false 8 Double 10 38 ,First,#,D:/Temp/LOADTHIS.shp,CROSSINGLA,-1,-1;CROSSINGLONGITUDE "CROSSINGLONGITUDE" true true false 8 Double 10 38 ,First,#,D:/Temp/LOADTHIS.shp,CROSSINGLO,-1,-1;CROSSINGTYPE "CROSSINGTYPE" true true false 2 Text 0 0 ,First,#,D:/Temp/LOADTHIS.shp,CROSSINGTY,-1,-1;LOADDATE "LOADDATE" true true false 36 Date 0 0 ,First,#""", "#") Delete_management("D:/Temp/LOADTHIS.shp", "#") del fc, layer_name, newtbl print "new rows inserted into Static_Crossings"
def LoadAliasTables(): from arcpy import Append_management, env, ListTables, ListWorkspaces, CalculateField_management importFolder = r"\\gisdata\planning\Cart\projects\Conflation\GIS_DATA\R1\Alias3" LoadTarget = r"Database Connections\Conflation2012_sde.sde\Conflation.SDE.RoadAlias" env.workspace = importFolder GDBList = [] for gdb in ListWorkspaces("*", "FileGDB"): GDBList.append(gdb) for geodatabase in GDBList: env.workspace = geodatabase tables = ListTables("RoadAlias") for table in tables: print table CalculateField_management(table, "SEGID", expression="""[STEWARD]&" "& [SEGID]""", expression_type="VB", code_block="") Append_management(table, LoadTarget, schema_type="NO_TEST") print geodatabase
def CityLimitsMod_OLD(): env.overwriteOutput = True env.workspace = mainFeaturesLocation modfile = r'Database Connections\SDEPROD_GIS.sde\GIS.CITY_LIMITS_MODS_KDOT_1' taxfile = r'Database Connections\SDEPROD_GIS.sde\GIS.CITY_LIMITS_KDOR_1' cityLimitsAdd = "CITY_LIMITS_MODS_ADD" cityLimitsSub = "CITY_LIMITS_MODS_SUBTRACT" CalculateField_management( modfile, "LOAD_DATE", "str(datetime.datetime.now( ))[0:4]+str(datetime.datetime.now( ))[5:7]+str(datetime.datetime.now( ))[8:10]", "PYTHON_9.3", "#") MakeFeatureLayer_management(modfile, cityLimitsAdd, "MODTYPE = 'ADD'", "#", "#") MakeFeatureLayer_management(modfile, cityLimitsSub, "MODTYPE = 'SUBTRACT'", "#", "#") KDOTcity = r'Database Connections\SDEPROD_GIS.sde\GIS.CITY_LIMITS_1' CityTemp = r'Database Connections\SDEPROD_GIS.sde\GIS.CITY_TEMP' CityOutlines = r'Database Connections\SDEPROD_GIS.sde\GIS.CITY_LIMITS_LN_1' DisconnectHarshly_SDEPROD() AllowConnections_SDEPROD() if Exists(CityTemp): print "CityTemp exists" Delete_management(CityTemp) else: print "I don't think CityTemp exists." pass # Instead of erase, might try a union, then selection for only the features # that do not have their centers within the 2nd feature class, # then output the selected features. # MakeFeatureLayer, Union, SelectByLocation, CopyFeatures. Erase_analysis(taxfile, cityLimitsSub, CityTemp, "0 Feet") Append_management(cityLimitsAdd, CityTemp, "NO_TEST", "#", "#") Dissolve_management( CityTemp, KDOTcity, "CITYNUMBER;CITY;COUNTY;DIST;TYPE;POPCENSUS;POPCURRENT;ID1", "LOAD_DATE MAX", "MULTI_PART", "DISSOLVE_LINES") FeatureToLine_management(KDOTcity, CityOutlines, "0.01 Feet", "ATTRIBUTES")
def CityLimitsMod(): from arcpy import MakeFeatureLayer_management, Erase_analysis, Append_management, env, CalculateField_management, Dissolve_management, FeatureToLine_management env.overwriteOutput = 1 modfile = r'Database Connections\SDEPROD_GIS.sde\GIS.Administrative_Boundary\GIS.CITY_LIMITS_MODS_KDOT' taxfile = r'Database Connections\SDEPROD_GIS.sde\GIS.Administrative_Boundary\GIS.CITY_LIMITS_KDOR' CalculateField_management( modfile, "LOAD_DATE", "str(datetime.datetime.now( ))[0:4]+str(datetime.datetime.now( ))[5:7]+str(datetime.datetime.now( ))[8:10]", "PYTHON_9.3", "#") MakeFeatureLayer_management(modfile, "CITY_LIMITS_MODS_ADD", "MODTYPE = 'ADD'", "#", "#") MakeFeatureLayer_management(modfile, "CITY_LIMITS_MODS_SUBTRACT", "MODTYPE = 'SUBTRACT'", "#", "#") KDOTcity = r'Database Connections\SDEPROD_GIS.sde\GIS.Administrative_Boundary\GIS.CITY_LIMITS' CityTemp = r'Database Connections\SDEPROD_GIS.sde\GIS.Administrative_Boundary\GIS.TempCity' Erase_analysis(taxfile, "CITY_LIMITS_MODS_SUBTRACT", CityTemp, "#") Append_management("CITY_LIMITS_MODS_ADD", CityTemp, "NO_TEST", "#", "#") Dissolve_management( CityTemp, KDOTcity, "CITYNUMBER;CITY;COUNTY;DIST;TYPE;POPCENSUS;POPCURRENT;ID1", "LOAD_DATE MAX", "MULTI_PART", "DISSOLVE_LINES") CityOutlines = r"Database Connections/SDEPROD_GIS.sde/GIS.Administrative_Boundary/GIS.CITY_LIMITS_LN" FeatureToLine_management("GIS.CITY_LIMITS", CityOutlines, "0.01 Feet", "ATTRIBUTES")
Milepost "Railroad Milepost" true true false 8 Double 0 0 ,First,#; Quadrants "Quadrants Blocks" true true false 4 Long 0 0 ,First,#; Illumination "Crossing Illumination" true true false 1 Text 0 0 ,First,#; PowerAvail "Commercial Power Available" true true false 1 Text 0 0 ,First,#; Horn "Wayside Horn" true true false 1 Text 0 0 ,First,#; Gouge "Gouge Marks" true true false 1 Text 0 0 ,First,#; PostedNo "Crossing Number Posted" true true false 1 Text 0 0 ,First,#; PublicCrossing "Open to Public" true true false 1 Text 0 0 ,First,#; QuadGates "Four Quad Gates Present" true true false 1 Text 0 0 ,First,#; Quiet "Quiet Zone" true true false 1 Text 0 0 ,First,#; DownRoad "Track Down Road" true true false 1 Text 0 0 ,First,#"""''' # Append_management("KGATE_CROSSINGS", "RailCrossing", "NO_TEST", field_mapping, subtype="") #AddField_management("KGATE_CROSSINGS", "Subtype_field", "SHORT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") #SelectLayerByAttribute_management(in_layer_or_view="KGATE_CROSSINGS", selection_type="NEW_SELECTION", where_clause=""""CROSSING_TYPE" = 'Active, Crossbuck only'""") #CalculateField_management(in_table="KGATE_CROSSINGS", field="Subtype_field", expression="3", expression_type="VB", code_block="") #SelectLayerByAttribute_management(in_layer_or_view="KGATE_CROSSINGS", selection_type="NEW_SELECTION", where_clause=""""CROSSING_TYPE" = 'Active, Flashing lights with gates'""") #CalculateField_management(in_table="KGATE_CROSSINGS", field="Subtype_field", expression="1", expression_type="VB", code_block="") #SelectLayerByAttribute_management(in_layer_or_view="KGATE_CROSSINGS", selection_type="NEW_SELECTION", where_clause=""""CROSSING_TYPE" = 'Active, Flashing lights, no gates'""") #CalculateField_management(in_table="KGATE_CROSSINGS", field="Subtype_field", expression="2", expression_type="VB", code_block="") #SelectLayerByAttribute_management(in_layer_or_view="KGATE_CROSSINGS", selection_type="NEW_SELECTION", where_clause=""""CROSSING_TYPE" = 'Active, No Flashing lights, gates, or crossbuck'""") #CalculateField_management(in_table="KGATE_CROSSINGS", field="Subtype_field", expression="4", expression_type="VB", code_block="")
def TranscendRampReplacement(): MakeFeatureLayer_management (routesSourceCenterlines, routesSourceFeatureLayer) SelectLayerByAttribute_management(routesSourceFeatureLayer, "CLEAR_SELECTION") selectionQuery = """ "LRS_ROUTE_PREFIX" = 'X' AND "Ramps_LRSKey" IS NOT NULL AND "Ramps_LRSKey" <> '' """ SelectLayerByAttribute_management(routesSourceFeatureLayer, "NEW_SELECTION", selectionQuery) countResult = GetCount_management(routesSourceFeatureLayer) intCount = int(countResult.getOutput(0)) print('Selected ' + str(intCount) + ' ramp features to be replaced.') if intCount > 0: print("Deleting those ramp features from the " + returnFeatureClass(routesSourceCenterlines) + " layer.") DeleteFeatures_management(routesSourceFeatureLayer) else: print("No features selected. Skipping feature deletion.") # Remove the matching routes to prepare for the Interchange_Ramps information. ## After error matching is achieved, use replace geometry and replace attributes to not lose data ## from using the less effective method of: ## deleting the old Interchange_Ramps information, then re-adding with append. # Add the Interchange_Ramps information. # Checking to see if the copy for repairing already exists. # If so, remove it. if Exists(interchangeRampFCRepairCopy): Delete_management(interchangeRampFCRepairCopy) else: pass # Create a new file for the copy for repairing since repair modifies the input. CopyFeatures_management(interchangeRampFC, interchangeRampFCRepairCopy) # Repairs the geometry, modifies input. # Deletes features with null geometry (2 expected, until Shared.Interchange_Ramp is fixed). print("Repairing ramp geometry in the " + returnFeatureClass(interchangeRampFCRepairCopy) + " layer.") RepairGeometry_management(interchangeRampFCRepairCopy, "DELETE_NULL") # Create a fieldmapping object so that the Interchange_Ramps can be correctly imported with append. appendInputs = [interchangeRampFCRepairCopy] appendTarget = routesSourceCenterlines schemaType = "NO_TEST" # Field mapping goes here. # Interchange_Ramp.LRS_KEY to RoutesSource_Test.LRSKEY fm_Field1 = FieldMap() fm_Field1.addInputField(interchangeRampFCRepairCopy, "LRS_KEY") fm_Field1_OutField = fm_Field1.outputField fm_Field1_OutField.name = 'LRSKEY' fm_Field1.outputField = fm_Field1_OutField # Interchange_Ramp.BEG_CNTY_LOGMILE to RoutesSource_Test.NON_STATE_BEGIN_MP fm_Field2 = FieldMap() fm_Field2.addInputField(interchangeRampFCRepairCopy, "BEG_CNTY_LOGMILE") fm_Field2_OutField = fm_Field2.outputField fm_Field2_OutField.name = 'NON_STATE_BEGIN_MP' fm_Field2.outputField = fm_Field2_OutField # Interchange_Ramp.END_CNTY_LOGMILE to RoutesSource_Test.NON_STATE_END_MP fm_Field3 = FieldMap() fm_Field3.addInputField(interchangeRampFCRepairCopy, "END_CNTY_LOGMILE") fm_Field3_OutField = fm_Field3.outputField fm_Field3_OutField.name = 'NON_STATE_END_MP' fm_Field3.outputField = fm_Field3_OutField # Create the fieldMappings object interchangeRampsMappings = FieldMappings() interchangeRampsMappings.addFieldMap(fm_Field1) interchangeRampsMappings.addFieldMap(fm_Field2) interchangeRampsMappings.addFieldMap(fm_Field3) # Add the fieldMap objects to the fieldMappings object. print("Appending the features from " + returnFeatureClass(interchangeRampFCRepairCopy) + " into " + returnFeatureClass(routesSourceCenterlines) + ".") Append_management(appendInputs, appendTarget, schemaType, interchangeRampsMappings)
def DWBIappender(ShapeFileDate): print "appending the modeled data" env.workspace = repo filename1 = r"DWBI_SEGS" #C:\Workspace\pydot\sde_connections_10.3\sde@KTRIPS_sqlgiprod.sde\KTRIPS.SDE.KTRIPS_ROUTES enterprisedbRoutes = gdb + r"\KTRIPS.SDE.KTRIPS_ROUTE_Segments" print enterprisedbRoutes ##enterprisedbRoutes = gdb+"\KTRIPS_ROUTES" if Exists(filename1): shapeRouteDict = dict() # Create a searchCursor here to build a dictionary # of tuples from the filename1 shapefile. newCursor = SearchCursor(filename1, ["OBJECTID", "PRMT_ID", "LEG_NO"]) for newRow in newCursor: shapeRouteDict[newRow[0]] = (newRow[1], newRow[2]) try: del newCursor except: pass existingRouteTupleList = list() # Then, create another searchCursor here to # get another list of tuples from the enterprisedbRoutes # feature class so that you can compare the two # and select the routes in filename1 which # do not exist in enterprisedbRoutes. newCursor = SearchCursor(enterprisedbRoutes, ["PRMT_ID", "LEG_NO"]) for newRow in newCursor: existingRouteTupleList.append(newRow) try: del newCursor except: pass listContainer = list() featureIDList = list() featureIDListCounter = 0 # This next part is a bit difficult to follow. # So, here is an explanation of what it does: # It takes the list of the dictionary keys in # the shaperoutedict and uses them to retrieve # each tuple in the shaperoutedict. Then, it # compares it to the list of tuples # in the existingRouteTupleList. # If it is not found, then we need to append # it into the target dataset. # To get it in there, we'll have to select # the correct featureID. The featureID in question # is also the dictionary key for the current # tuple, so append that to a list. # If the list gets too big, our select # statement might fail, so we keep them # to a manageable size by testing and # incrementing the value of the featureIDListCounter. # The lists that get created go into a # bigger list that holds them, so we that # we can do cool stuff like # for list in listOfLists: # for listItem in list: # doThingsRelatedToEachListItem(listItem) for featureIDAsKey in shapeRouteDict.keys(): if shapeRouteDict[ featureIDAsKey] not in existingRouteTupleList and featureIDListCounter <= 998: featureIDList.append(featureIDAsKey) featureIDListCounter += 1 elif shapeRouteDict[ featureIDAsKey] not in existingRouteTupleList and featureIDListCounter > 998: featureIDListCounter = 0 listContainer.append(featureIDList) featureIDList = list() featureIDList.append(featureIDAsKey) featureIDListCounter += 1 else: pass listContainer.append(featureIDList) try: del shapeRouteDict, existingRouteTupleList, featureIDList except: pass print "The DWBI Segment List Container has " + str( len(listContainer)) + " list(s) within it." shapefileAsLyr = 'loadedShapefileAsLyr' MakeTableView_management(filename1, shapefileAsLyr) # Each sublist in the container list is # used to create a whereClause and select # from the features, then append the # selected features to the target feature class. try: for featureIDList in listContainer: if len(featureIDList) > 0: selectionClause = '"OBJECTID" IN ( ' for featureID in featureIDList: selectionClause += str(featureID) + ',' selectionClause = selectionClause[:-1] + ')' # Remove the last comma SelectLayerByAttribute_management(shapefileAsLyr, "NEW_SELECTION", selectionClause) selectionCount = GetCount_management(shapefileAsLyr) if selectionCount > 0: print str( selectionCount ) + " new records found, and will be appended right now." Append_management(shapefileAsLyr, enterprisedbRoutes, "NO_TEST", "#") else: pass # This elif is only true when nothing has been added any of the featureIDLists. elif len(listContainer) == 1 and len(featureIDList) == 0: print "All available records have already been appended." else: pass except: print "The selectionClause that may be causing the failure: " + selectionClause raise # Reraises the previous exception else: print "There was a problem. " + str(filename1) + " could not be found." try: del listContainer, shapefileAsLyr except: pass
def appender_OLD(ShapeFileDate): env.workspace = repo filename1 = repo + r"\\" + ShapeFileDate enterprisedbRoutes = gdb + "\KTRIPS.SDE.KTRIPS_ROUTES" Append_management(filename1, enterprisedbRoutes, "NO_TEST", "#") '''if Exists(filename1):
def FIMS_GIS(): #arcpy.MakeFeatureLayer_management(CPMSlyr, 'CPMS', ProjectSelect) MakeFeatureLayer_management(CPMSlyr, 'CPMS') MakeFeatureLayer_management(CountyLyr, 'County') MakeFeatureLayer_management(HPMSlyr, 'HPMS') MakeFeatureLayer_management(MPOLyr, 'MPO') MakeFeatureLayer_management(CONGDistlyr, 'CONG') MakeFeatureLayer_management(ws + "/Polygons", 'Polygons') MakeTableView_management(deltbl, 'DeleteView') MakeTableView_management(newtbl, 'InsertView') #make the polygon analysis layer for Districts, Counties, and MPOs #arcpy.Union_analysis("CONG #;MPO #;County #",ws+"/Polygons","ALL","1 feet","GAPS") # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script # The following inputs are layers or table views: "CPMS", "CPMS.CPMS_STAGING_TMP" MakeTableView_management(FMIS_PROJ, "CPMS_STAGING_TMP") AddJoin_management("CPMS", "PROJECT_ID", "InsertView", "PROJECT_NUMBER", "KEEP_COMMON") Output_Event_Table_Properties = 'RID LINE CNTY_BEG CNTY_END' outtblH = ws + "/FIMS_EventTableLines" if Exists(outtblH): Delete_management(outtblH) print "locating CPMS to HPMS route" LocateFeaturesAlongRoutes_lr('CPMS', 'HPMS', "Route_ID", "0 miles", outtblH, Output_Event_Table_Properties, "FIRST", "DISTANCE", "NO_ZERO", "FIELDS", "M_DIRECTON") #the 30 foot tolerance we allowed here also created a bunch of 30' segments at project intersections. Those should be handled. #...Or the locate tolerance changed to 0 #cleansel = "RID <> CRND_RTE" #is thisnot the right way to handle this, because it will delete the short segments crossing the GIS county boundary? not really #selection statement deleted non-state highway system might be better #MakeTableView_management(outtblH, "cleanup", cleansel) #DeleteRows_management("cleanup") MakeRouteEventLayer_lr("HPMS", "Route_ID", ws + "/FIMS_EventTableLines", "rid LINE CNTY_BEG CNTY_END", "FIMS_EventTableLineLyr", "#", "ERROR_FIELD", "NO_ANGLE_FIELD", "NORMAL", "ANGLE", "LEFT", "POINT") print "Intersection routes to areas" Intersect_analysis("FIMS_EventTableLineLyr #;HPMS #;Polygons #", ws + "/FMIS_Data", "ALL", "#", "LINE") if Exists(ws + "/HPMS_DataD"): Delete_management(ws + "/HPMS_DataD") Dissolve_management( ws + "/FMIS_Data", ws + "/HPMS_DataD", "PROJECT_ID;F_SYSTEM_V;NHS_VN;DISTRICT_1;COUNTY_NUMBER;ID_1", "#", "MULTI_PART", "UNSPLIT_LINES") # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script print "locating processed data to HPMS mileage" if Exists(ws + "/FIMS_EventTable"): Delete_management(ws + "/FIMS_EventTable") LocateFeaturesAlongRoutes_lr(ws + "/HPMS_DataD", "HPMS", "Route_ID", "0 miles", ws + "/FIMS_EventTable", "RID LINE CNTY_BEG CNTY_END", "FIRST", "DISTANCE", "NO_ZERO", "FIELDS", "M_DIRECTON") #MaleTableView(ws+"/FIMS_EventTable", ) Append_management( ws + "/FIMS_EventTable", FMIS_LOAD, "NO_TEST", """ROUTE_ID "ROUTE_ID" true true false 14 Text 0 0 ,First,#,FIMS_EventTable,RID,-1,-1;BEG_CNTY_MP "BEG_CNTY_MP" true true false 8 Double 10 38 ,First,#,FIMS_EventTable,CNTY_BEG,-1,-1;END_CNTY_MP "END_CNTY_MP" true true false 8 Double 10 38 ,First,#,FIMS_EventTable,CNTY_END,-1,-1;CONGRESSIONAL_DISTRICT "CONGRESSIONAL_DISTRICT" true true false 50 Text 0 0 ,First,#,FIMS_EventTable,DISTRICT_1,-1,-1;URBAN_ID "URBAN_ID" true true false 10 Text 0 0 ,First,#,FIMS_EventTable,ID_1,-1,-1;FUN_CLASS "FUN_CLASS" true true false 3 Text 0 0 ,First,#,FIMS_EventTable,F_SYSTEM_V,-1,-1;SYSTEM_CODE "SYSTEM_CODE" true true false 10 Text 0 0 ,First,#,FIMS_EventTable,NHS_VN,-1,-1;PROJECT_NUMBER "PROJECT_NUMBER" true true false 15 Text 0 0 ,First,#,FIMS_EventTable,PROJECT_ID,-1,-1;COUNTY "COUNTY" true true false 3 Text 0 0 ,First,#,FIMS_EventTable,COUNTY_NUMBER,-1,-1""", "#") print "Rows appended to CPMS Load Table CPMS_HPMS_FMIS_DATA"
''' Created on Aug 13, 2014 Truncate and Append LRS elements from the Gateway MXD to static features classes in the Gateway SQL server GEodatabase Moved to Production on Aug 20 2014 @author: kyleg ''' from arcpy import mapping, Append_management, TruncateTable_management GDB = r'D:\HNTB_GATEWAY\ProductionMOT\SQL54_GATEWAY15.sde' mxd = mapping.MapDocument( r'D:\HNTB_GATEWAY\ProductionMOT\2014111401_GatewayExec.mxd') lyrs = mapping.ListLayers(mxd) #D:\HNTB_GATEWAY\ProductionMOT\SQL54_GATEWAY15.sde\Gateway2015.GATEWAY_SPATIAL.LongTermApproved TargetLT = r"D:\HNTB_GATEWAY\ProductionMOT\SQL54_GATEWAY15.sde\Gateway2015.GATEWAY_SPATIAL.LongTermApproved" TargetST = r'D:\HNTB_GATEWAY\ProductionMOT\SQL54_GATEWAY15.sde\Gateway2015.Gateway_Spatial.ShortTermApproved' print lyrs[0] TruncateTable_management(TargetST) Append_management(lyrs[0], TargetST, "NO_TEST", "#") print lyrs[1] TruncateTable_management(TargetLT) Append_management(lyrs[1], TargetLT, "NO_TEST", "#") #if __name__ == '__main__': # pass
def DissolveNonDirectionalItems(OpEnvironmentMode): OpRunIn = OpEnvironment.OpRunInSum # @UndefinedVariable OpRunOut = OpEnvironment.OpRunOut # @UndefinedVariable adm = OpEnvironment.adm # @UndefinedVariable Owner = OpEnvironment.Owner # @UndefinedVariable DB = OpEnvironment.DB # @UndefinedVariable env.workspace = OpRunIn env.overwriteOutput = True print OpRunIn #combine the connection, db, and owner to the destination path for enterprise geodatabase output OpRunFullOut = OpRunOut + "/" + DB + "." + Owner + "." print OpRunFullOut FCList = ListFeatureClasses() print "dissolving items in the primary direction" FCGlobalFieldsDissolve = [ "LRS_KEY", "COUNTY_CD", "COUNTY_NAME", "DISTRICT" ] FCGlobalFieldsSummarize = "BSMP MIN;ESMP MAX;BCMP MIN;ECMP MAX" FCFieldsIgnore = [ "OBJECTID", "CRND_RTE", "LANE_DIRECTION", "DIV_UNDIV", "SHAPE", "SHAPE.STLength()", "BSMP", "ESMP", "BCMP", "ECMP", "OLD_FUN_CLASS", "FUN_DT" ] for Item in FCList: ItemOut = Item[2:] ItemDissolveFields = [] print ItemOut fields = ListFields(Item) for field in fields: if field.name not in FCFieldsIgnore: #print " "+field.name ItemDissolveFields.append(field.name) dissolvelist = ItemDissolveFields + FCGlobalFieldsDissolve DissolveFields = ';'.join(dissolvelist) if Exists(OpRunFullOut + ItemOut): try: print "feature class " + str( ItemOut) + " exists and will be updated" Dissolve_management(Item, "in_memory/" + ItemOut, DissolveFields, FCGlobalFieldsSummarize, "MULTI_PART", "DISSOLVE_LINES") TruncateTable_management(OpRunFullOut + ItemOut) Append_management("in_memory/" + ItemOut, OpRunFullOut + ItemOut, "NO_TEST", "#") Delete_management("in_memory/" + ItemOut) print "feature class " + str( ItemOut) + " was successfully updated" except ExecuteError: print "update failed because the schema has changed from what existed" #need to add locking DisconnectUser(adm, "readonly") AcceptConnections(adm, True) Delete_management(OpRunFullOut + ItemOut) print "recreating the dissolved feature class for " + str( ItemOut) Dissolve_management(Item, OpRunFullOut + ItemOut, DissolveFields, FCGlobalFieldsSummarize, "MULTI_PART", "DISSOLVE_LINES") ChangePrivileges_management(OpRunFullOut + ItemOut, "readonly", "GRANT", "AS_IS") except: print "another error happened on updating the feature class" else: print "feature class " + str( ItemOut) + " will be created or overwritten" DisconnectUser(adm, "readonly") AcceptConnections(adm, True) Dissolve_management(Item, OpRunFullOut + ItemOut, DissolveFields, FCGlobalFieldsSummarize, "MULTI_PART", "DISSOLVE_LINES") ChangePrivileges_management(OpRunFullOut + ItemOut, "readonly", "GRANT", "AS_IS") try: Delete_management("in_memory/" + Item) except: pass
MakeFeatureLayer_management(stagews + "\CDRS_Project", "ClosedRoutes", '"ALERT_STATUS" = 2 AND "FEA_CLOSED" = 1') CalculateField_management("ClosedRoutes", "STATUS", '"Closed"', "PYTHON_9.3", "#") MakeFeatureLayer_management(stagews + "\CDRS_Project", "PlannedRoutes", '"ALERT_STATUS" = 1') CalculateField_management("PlannedRoutes", "STATUS", '"Planned"', "PYTHON_9.3", "#") print str( datetime.datetime.now()) + " truncating CDRS segments in WICHWAY SPATIAL" TruncateTable_management(sdeWichwayCDRS) print str(datetime.datetime.now()) + " appending CDRS segments" Append_management( "CDRS_Project", sdeWichwayCDRS, "NO_TEST", """RouteName "RouteName" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,RouteName,-1,-1;BeginMP "BeginMP" true true false 8 Double 8 38 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,BEG_STATE_LOGMILE,-1,-1;EndMP "EndMP" true true false 8 Double 8 38 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,END_STATE_LOGMILE,-1,-1;County "County" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,BEG_COUNTY_NAME,-1,-1;StartDate "StartDate" true true false 36 Date 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,ALERT_DATE,-1,-1,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,COMP_DATE,-1,-1;CompDate "CompDate" true true false 36 Date 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,COMP_DATE,-1,-1;AlertType "AlertType" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,ALERT_TYPE_TXT,-1,-1;AlertDescription "AlertDescription" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,ALERT_DESC_TXT,-1,-1;HeightLimit "HeightLimit" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,VERT_RESTRICTION,-1,-1;WidthLimit "WidthLimit" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,WIDTH_RESTRICTION,-1,-1;TrafficRouting "TrafficRouting" true true false 50 Text 0 0 ,First,#;TimeDelay "TimeDelay" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,TIME_DELAY_TXT,-1,-1;Comments "Comments" true true false 4000 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,PUBLIC_COMMENT,-1,-1;DetourType "DetourType" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,KANROAD_CDRS_WZ_DETAIL_DETOUR_TYPE_TXT,-1,-1;DetourDescription "DetourDescription" true true false 1500 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,KANROAD_CDRS_WZ_DETAIL_DETOUR_DESC,-1,-1;ContactName "ContactName" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,CONTACT_NAME,-1,-1;ContactPhone "ContactPhone" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,CONTACT_PHONE,-1,-1;ContactEmail "ContactEmail" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,CONTACT_EMAIL,-1,-1;WebLink "WebLink" true true false 500 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,ALERT_HYPERLINK,-1,-1;X "X" true true false 8 Double 8 38 ,First,#;Y "Y" true true false 8 Double 8 38 ,First,#;AlertStatus "AlertStatus" true true false 4 Long 0 10 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,Alert_Status_I,-1,-1;FeaClosed "FeaClosed" true true false 4 Long 0 10 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,FEA_CLOSED,-1,-1;Status "Status" true true false 50 Text 0 0 ,First,#,D:/wichway/harvesters/python/CDRS.gdb/CDRS_Project,STATUS,-1,-1;LoadDate "LoadDate" true true false 36 Date 0 0 ,First,#;SHAPE_STLength__ "SHAPE_STLength__" true false false 8 Double 8 38 ,First,#;Shape.STLength() "Shape.STLength()" false false true 0 Double 0 0 ,First,#""", "#") try: MakeFeatureLayer_management(sdeWichwayCDRS, "loaded", "#") CalculateField_management("loaded", "LoadDate", "datetime.datetime.now( )", "PYTHON_9.3", "#") print str(datetime.datetime.now()) + " It Ran, time for lunch" except: print str(datetime.datetime.now() ) + " It Ran, but didn't calc the LoadDate field"
def extendAndIntersectRoadFeatures( quarterOrHalf ): # Place the operations that extend each road line segment by a certain distance here. # Should extend all the features that exist in the post-erase dataset. Might be more difficult # to calculate the angle of these lines accurately, but it should be easier to figure out # than trying to get the lines to split correctly with the buggy SplitLineAtPoint tool. if quarterOrHalf.lower() == "quarter": extensionLinesTextName = "createdExtensionLines_Q" createdExtensionLines = createdExtensionLines_Q # 9000 ft increase for _Q version. # Must be larger than the county bufferDistance (20000) extensionDistance = 31176 extensionLinesTextName = "createdExtensionLines_Q" countyRoadNameRosette = countyRoadNameRosette_Q rosetteTextName = "countyRoadNameRosette_Q" tempRoadNameRosette = tempRoadNameRosette_Q tempRosetteTextName = "tempRoadNameRosette_Q" tempRoadNameRosetteSP = tempRoadNameRosetteSinglePoint_Q tempRosetteSPTextName = "tempRoadNameRosetteSinglePoint_Q" countyBorderFeature = countyBorderFeature_Q elif quarterOrHalf.lower() == "half": extensionLinesTextName = "createdExtensionLines_H" createdExtensionLines = createdExtensionLines_H # Must be larger than the county bufferDistance (11000) extensionDistance = 22176 extensionLinesTextName = "createdExtensionLines_H" countyRoadNameRosette = countyRoadNameRosette_H rosetteTextName = "countyRoadNameRosette_H" tempRoadNameRosette = tempRoadNameRosette_H tempRosetteTextName = "tempRoadNameRosette_H" tempRoadNameRosetteSP = tempRoadNameRosetteSinglePoint_H tempRosetteSPTextName = "tempRoadNameRosetteSinglePoint_H" countyBorderFeature = countyBorderFeature_H else: print "quarterOrHalf variable not correctly defined." raise (Exception("quarterOrHalf value error.")) print "Starting to extend and intersect road features." if Exists(createdExtensionLines): Delete_management(createdExtensionLines) else: pass CreateFeatureclass_management(inMemGDB, extensionLinesTextName, "POLYLINE", "", "", "", spatialReferenceProjection) # Add a column for roadname called roadNameForSplit. AddField_management(createdExtensionLines, "roadNameForSplit", "TEXT", "", "", "55") # Add a column which stores the angle to display a label called called LabelAngle. AddField_management(createdExtensionLines, "LabelAngle", "DOUBLE", "", "", "") # Change to double. # Add a column which stores the County Number. AddField_management(createdExtensionLines, "County_Number", "DOUBLE", "", "", "") roadLinesToInsertList = list() roadLinesList = getRoadLinesList() for roadLinesItem in roadLinesList: roadNameToUse = roadLinesItem[2] countyNumber = roadLinesItem[3] linePointsArray = ArcgisArray() firstPointTuple = (roadLinesItem[1].firstPoint.X, roadLinesItem[1].firstPoint.Y) lastPointTuple = (roadLinesItem[1].lastPoint.X, roadLinesItem[1].lastPoint.Y) # Make this a two-step process. # Might be as simple as # adding _1 to the end of the first set of variables, # adding _2 to the end of the second set of variables, # then making the extensions in both directions # and creating a new line that has the endpoints # from both sides as it's first and last point. # if necessary, could add the other points in between # but probably not necessary just for generating # an intersection point. yValue_1 = -(lastPointTuple[1] - firstPointTuple[1] ) # made y value negative xValue_1 = lastPointTuple[0] - firstPointTuple[0] lineDirectionAngle_1 = math.degrees(math.atan2( xValue_1, yValue_1)) # reversed x and y lineDirectionAngle_1 = -(((lineDirectionAngle_1 + 180) % 360) - 180 ) # correction for certain quadrants #print "lineDirectionAngle: " + str(lineDirectionAngle_1) origin_x_1 = firstPointTuple[0] origin_y_1 = firstPointTuple[1] yValue_2 = -(firstPointTuple[1] - lastPointTuple[1] ) # made y value negative xValue_2 = firstPointTuple[0] - lastPointTuple[0] lineDirectionAngle_2 = math.degrees(math.atan2( xValue_2, yValue_2)) # reversed x and y lineDirectionAngle_2 = -(((lineDirectionAngle_2 + 180) % 360) - 180 ) # correction for certain quadrants #print "lineDirectionAngle: " + str(lineDirectionAngle_2) origin_x_2 = lastPointTuple[0] origin_y_2 = lastPointTuple[1] (disp_x_1, disp_y_1) = (extensionDistance * math.sin(math.radians(lineDirectionAngle_1)), extensionDistance * math.cos(math.radians(lineDirectionAngle_1))) (end_x_1, end_y_1) = (origin_x_1 + disp_x_1, origin_y_1 + disp_y_1) (disp_x_2, disp_y_2) = (extensionDistance * math.sin(math.radians(lineDirectionAngle_2)), extensionDistance * math.cos(math.radians(lineDirectionAngle_2))) (end_x_2, end_y_2) = (origin_x_2 + disp_x_2, origin_y_2 + disp_y_2) startPoint = ArcgisPoint() endPoint = ArcgisPoint() startPoint.ID = 0 startPoint.X = end_x_1 startPoint.Y = end_y_1 endPoint.ID = 1 endPoint.X = end_x_2 endPoint.Y = end_y_2 linePointsArray.add(startPoint) linePointsArray.add(endPoint) newLineFeature = ArcgisPolyLine(linePointsArray) # Need to create an extension for both ends of the line and add them # to the array. #newLineFeature = createdExtensionLinesCursor.newRow() #newLineFeature.SHAPE = linePointsArray lineDirectionOutput = "0" if lineDirectionAngle_1 > 0: lineDirectionOutput = lineDirectionAngle_1 elif lineDirectionAngle_2 > 0: lineDirectionOutput = lineDirectionAngle_2 else: pass roadLinesToInsertList.append( [newLineFeature, roadNameToUse, lineDirectionOutput, countyNumber]) #createdExtensionLinesCursor.insertRow([newLineFeature, roadNameToUse, lineDirectionOutput]) if "newLineFeature" in locals(): del newLineFeature else: pass # Consider building this as a separate list and then just looping # through the list to put it into the cursor instead # of doing logic and inserting into the cursor at the same place. #start editing session #newEditingSession = daEditor(sqlGdbLocation) #newEditingSession.startEditing() #newEditingSession.startOperation() createdExtensionLinesCursor = daInsertCursor( createdExtensionLines, ["SHAPE@", "roadNameForSplit", "LabelAngle", "County_Number"]) for roadLinesToInsertItem in roadLinesToInsertList: createdExtensionLinesCursor.insertRow(roadLinesToInsertItem) # End editing session #newEditingSession.stopOperation() #newEditingSession.stopEditing(True) if "createdExtensionLinesCursor" in locals(): del createdExtensionLinesCursor else: pass # Remove the previous countyRoadNameRosette so that it can be recreated. if Exists(rosetteTextName): Delete_management(rosetteTextName) else: pass CreateFeatureclass_management(sqlGdbLocation, rosetteTextName, "POINT", "", "", "", spatialReferenceProjection) AddField_management(countyRoadNameRosette, "roadNameForSplit", "TEXT", "", "", "55") AddField_management(countyRoadNameRosette, "LabelAngle", "DOUBLE", "", "", "") # Change to double. AddField_management(countyRoadNameRosette, "County_Number", "DOUBLE", "", "", "") AddField_management(countyRoadNameRosette, "COUNTY_NAME", "TEXT", "", "", "55") # Now then, need to check for the existence # of and delete the point intersection layer # if it exists. # Then, recreate it and the proper fields. inMemoryCountyBorderExtension = "aCountyBorderExtensionBuffer" inMemoryExtensionLines = "aLoadedExtensionLines" try: Delete_management(inMemoryCountyBorderExtension) except: pass try: Delete_management(inMemoryExtensionLines) except: pass # Temporary layer, use CopyFeatures_management to persist to disk. MakeFeatureLayer_management( countyBorderFeature, inMemoryCountyBorderExtension) # County Border extension feature # Temporary layer, use CopyFeatures_management to persist to disk. MakeFeatureLayer_management( createdExtensionLines, inMemoryExtensionLines) # Line extension feature borderFeatureList = getBorderFeatureList(quarterOrHalf) borderFeatureList = sorted(borderFeatureList, key=lambda feature: feature[3]) for borderFeature in borderFeatureList: borderFeatureName = borderFeature[2] borderFeatureNumber = borderFeature[3] print "borderFeatureName: " + str( borderFeatureName) + " & borderFeatureNumber: " + str( int(borderFeatureNumber)) countyBorderWhereClause = ' "COUNTY_NUMBER" = ' + str( int(borderFeatureNumber)) + ' ' SelectLayerByAttribute_management(inMemoryCountyBorderExtension, "NEW_SELECTION", countyBorderWhereClause) countyBorderSelectionCount = GetCount_management( inMemoryCountyBorderExtension) print "County Borders Selected: " + str(countyBorderSelectionCount) # Had to single-quote the borderFeatureNumber because it is stored as a string in the table. # Unsingle quoted because it was changed to a float. extensionLinesWhereClause = ' "COUNTY_NUMBER" = ' + str( int(borderFeatureNumber)) + ' ' SelectLayerByAttribute_management(inMemoryExtensionLines, "NEW_SELECTION", extensionLinesWhereClause) extensionLineSelectionCount = GetCount_management( inMemoryExtensionLines) print "Extension Lines Selected: " + str(extensionLineSelectionCount) if Exists(tempRosetteTextName): Delete_management(tempRosetteTextName) else: pass if Exists(tempRosetteSPTextName): Delete_management(tempRosetteSPTextName) else: pass Intersect_analysis( [inMemoryCountyBorderExtension, inMemoryExtensionLines], tempRoadNameRosette, "ALL", "", "POINT") # Intersect to an output temp layer. # Next, need to loop through all of the counties. # Get the county number and use it to select # a county extension buffer in the county # extension buffers layer. # Then, use the county number to select # all of the lines for that county # in the extension lines layer. # Then, export those to a temp layer in the fgdb. # Change multipoint to singlepoint. # Was working until I moved from gisprod to sdedev for the data source. # not sure why. Check to make sure projections match. # ^ Fixed. try: # Run the tool to create a new fc with only singlepart features MultipartToSinglepart_management(tempRoadNameRosette, tempRoadNameRosetteSP) # Check if there is a different number of features in the output # than there was in the input inCount = int( GetCount_management(tempRoadNameRosette).getOutput(0)) outCount = int( GetCount_management(tempRoadNameRosetteSP).getOutput(0)) if inCount != outCount: print "Found " + str(outCount - inCount) + " multipart features." #print "inCount, including multipart = " + str(inCount) #print "outCount, singlepart only = " + str(outCount) else: print "No multipart features were found" except ExecuteError: print GetMessages() except Exception as e: print e print "Appending the temp point layer to the county point intersection layer." Append_management([tempRoadNameRosetteSP], countyRoadNameRosette, "NO_TEST") # K, worked correctly. Just need to change LabelAngle to a float and it might be what # I want. print "Done adding points to the countyRoadNameRosette feature class."
def compute_adjacency_list(input_points, input_network, id_attribute, impedance_attribute, accumulator_attributes, search_radius, output_location, adj_dbf_name): """ |input_points|: point shape file marking entity (e.g. building) locations |input_network|: street network in which |input_points| is located |id_attribute|: the name of attribute that distinguishes between input points |impedance_attribute|: distance between neighboring nodes will be based on this attribute |accumulator_attributes|: distance between neighboring nodes will also be recorded for these attributes |search_radius|: the maximum extent for centrality computation |output_location|: adjacency list dbf will be saved here |adj_dbf_name|: the name of the adjacency list dbf """ # Number of points in |input_points| input_point_count = int(GetCount_management(input_points).getOutput(0)) # Make a directory to store all auxiliary files auxiliary_dir = join(output_location, AUXILIARY_DIR_NAME) if not Exists(auxiliary_dir): mkdir(auxiliary_dir) # Record the edge and junction source names of |input_network| junction_feature, edge_feature = network_features(input_network) # Calculate network locations if not already calculated test_input_point = UpdateCursor(input_points).next() locations_calculated = all( row_has_field(test_input_point, field) for field in NETWORK_LOCATION_FIELDS) if not locations_calculated: calculate_network_locations(input_points, input_network) # Calculate barrier cost per input point if not already calculated barrier_costs_calculated = row_has_field(test_input_point, trim(BARRIER_COST_FIELD)) if not barrier_costs_calculated: AddMessage(BARRIER_COST_COMPUTATION_STARTED) # Add |BARRIER_COST_FIELD| column in |input_points| AddField_management(in_table=input_points, field_name=trim(BARRIER_COST_FIELD), field_type="DOUBLE", field_is_nullable="NON_NULLABLE") # Initialize a dictionary to store the frequencies of (SnapX, SnapY) values xy_count = {} # A method to retrieve a (SnapX, SnapY) pair for a row in |input_points| get_xy = lambda row: (row.getValue(trim("SnapX")), row.getValue(trim("SnapY"))) barrier_pre_progress = Progress_Bar(input_point_count, 1, BARRIER_COST_PRE_PROCESSING) rows = UpdateCursor(input_points) for row in rows: snap_xy = get_xy(row) if snap_xy in xy_count: xy_count[snap_xy] += 1 else: xy_count[snap_xy] = 1 barrier_pre_progress.step() # Populate |BARRIER_COST_FIELD|, this will be used in OD matrix computation barrier_progress = Progress_Bar(input_point_count, 1, BARRIER_COST_COMPUTATION) rows = UpdateCursor(input_points) for row in rows: barrier_cost = BARRIER_COST / xy_count[get_xy(row)] row.setValue(trim(BARRIER_COST_FIELD), barrier_cost) rows.updateRow(row) barrier_progress.step() AddMessage(BARRIER_COST_COMPUTATION_FINISHED) # Necessary files od_cost_matrix_layer = join(auxiliary_dir, OD_COST_MATRIX_LAYER_NAME) od_cost_matrix_lines = join(od_cost_matrix_layer, OD_COST_MATRIX_LINES) temp_adj_dbf_name = TEMP_ADJACENCY_DBF_NAME(adj_dbf_name) temp_adj_dbf = join(output_location, temp_adj_dbf_name) adj_dbf = join(output_location, adj_dbf_name) partial_adj_dbf = join(auxiliary_dir, PARTIAL_ADJACENCY_LIST_NAME) polygons = join(auxiliary_dir, POLYGONS_SHAPEFILE_NAME) raster = join(auxiliary_dir, RASTER_NAME) polygons_layer = join(auxiliary_dir, POLYGONS_LAYER_NAME) input_points_layer = join(auxiliary_dir, INPUT_POINTS_LAYER_NAME) # Make sure none of these files already exists for path in [ od_cost_matrix_layer, temp_adj_dbf, adj_dbf, partial_adj_dbf, polygons, raster, polygons_layer, input_points_layer, od_cost_matrix_lines ]: delete(path) # Cutoff radius for OD matrix computation cutoff_radius = 2 * BARRIER_COST + min(search_radius, BARRIER_COST / 2) # Compute OD matrix MakeODCostMatrixLayer_na(in_network_dataset=input_network, out_network_analysis_layer=od_cost_matrix_layer, impedance_attribute=impedance_attribute, default_cutoff=str(cutoff_radius), accumulate_attribute_name=accumulator_attributes, UTurn_policy="ALLOW_UTURNS", hierarchy="NO_HIERARCHY", output_path_shape="NO_LINES") # Determine raster cell size points_per_raster_cell = OD_MATRIX_ENTRIES / input_point_count raster_cell_count = max(1, input_point_count / points_per_raster_cell) input_points_extent = Describe(input_points).Extent raster_cell_area = (input_points_extent.width * input_points_extent.height / raster_cell_count) raster_cell_size = int(sqrt(raster_cell_area)) # Construct |raster| from |input_points| PointToRaster_conversion(in_features=input_points, value_field=id_attribute, out_rasterdataset=raster, cell_assignment="MOST_FREQUENT", priority_field="NONE", cellsize=str(raster_cell_size)) # Construct |polygons| from |raster| RasterToPolygon_conversion(in_raster=raster, out_polygon_features=polygons, simplify="NO_SIMPLIFY", raster_field="VALUE") # Export empty |od_cost_matrix_lines| to |temp_dbf| to start adjacency list TableToTable_conversion(in_rows=od_cost_matrix_lines, out_path=output_location, out_name=temp_adj_dbf_name) # Construct |polygons_layer| and |input_points_layer| for (feature, layer) in [(polygons, polygons_layer), (input_points, input_points_layer)]: MakeFeatureLayer_management(in_features=feature, out_layer=layer) def add_locations(sub_layer, field_mappings=""): """ |sub_layer|: one of "Origins", "Destinations", "Barrier Points" |field_mappings|: field mappings in addition to those for "Name" and "CurbApproach" """ AddLocations_na(in_network_analysis_layer=od_cost_matrix_layer, sub_layer=sub_layer, in_table=input_points_layer, field_mappings=("Name %s #; CurbApproach # 0; %s" % (id_attribute, field_mappings)), search_tolerance=SEARCH_TOLERANCE, search_criteria=("%s SHAPE; %s SHAPE;" % (junction_feature, edge_feature)), append="CLEAR", snap_to_position_along_network="SNAP", snap_offset=SNAP_OFFSET) # OD cost matrix destinations AddMessage(ADDING_DESTINATIONS_STARTED) SelectLayerByLocation_management(in_layer=input_points_layer) add_locations("Destinations") AddMessage(ADDING_DESTINATIONS_FINISHED) # OD cost matrix point barriers AddMessage(ADDING_BARRIERS_STARTED) add_locations("Point Barriers", ("FullEdge # 0; BarrierType # 2;" "Attr_%s %s #;" % (impedance_attribute, trim(BARRIER_COST_FIELD)))) AddMessage(ADDING_BARRIERS_FINISHED) # Compute adjacency list, one raster cell at a time progress = Progress_Bar(raster_cell_count, 1, STEP_1) rows = UpdateCursor(polygons) for row in rows: # Select the current polygon SelectLayerByAttribute_management(in_layer_or_view=polygons_layer, selection_type="NEW_SELECTION", where_clause="FID = %s" % str(row.FID)) # Origins SelectLayerByLocation_management(in_layer=input_points_layer, select_features=polygons_layer) add_locations("Origins") # Solve OD Cost matrix Solve_na(in_network_analysis_layer=od_cost_matrix_layer, ignore_invalids="SKIP") # Add origin and destination fields to the adjacency list dbf for (index, field) in [(0, ORIGIN_ID_FIELD_NAME), (1, DESTINATION_ID_FIELD_NAME)]: CalculateField_management(in_table=od_cost_matrix_lines, field=field, expression="!Name!.split(' - ')[%d]" % index, expression_type="PYTHON") # Record actual distance between neighboring nodes distance_field = "Total_%s" % impedance_attribute CalculateField_management(in_table=od_cost_matrix_lines, field=distance_field, expression="!%s! - 2 * %d" % (distance_field, BARRIER_COST), expression_type="PYTHON") # Append result to |temp_adj_dbf| TableToTable_conversion(in_rows=od_cost_matrix_lines, out_path=auxiliary_dir, out_name=PARTIAL_ADJACENCY_LIST_NAME) Append_management(inputs=partial_adj_dbf, target=temp_adj_dbf, schema_type="TEST") progress.step() # Copy data from |temp_adj_dbf| to |adj_dbf| Rename_management(in_data=temp_adj_dbf, out_data=adj_dbf) # Clean up for path in [ od_cost_matrix_layer, partial_adj_dbf, polygons, raster, polygons_layer, input_points_layer, auxiliary_dir ]: delete(path)
datetime.datetime.now()) + " reformatting the Route name for US routes" AddField_management("CDRS_DETAIL", "RouteName", "TEXT", "#", "10") CalculateField_management("CDRS_DETAIL", "RouteName", """Left([ROUTE], 1) & "-"&Mid( [ROUTE], 2, 9)""", "VB", "#") MakeFeatureLayer_management("CDRS_DETAIL", "USRoutes", "ROUTE LIKE 'U%'") CalculateField_management("USRoutes", "RouteName", """Left([ROUTE], 1) & "S-"&Mid( [ROUTE], 2, 9)""", "VB", "#") print str(datetime.datetime.now()) + " reprojection processing" DefineProjection_management("CDRS_DETAIL", labmertCC) Project_management( "CDRS_DETAIL", "CDRS_Project", "PROJCS['WGS_1984_Web_Mercator_Auxiliary_Sphere',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator_Auxiliary_Sphere'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],PARAMETER['Auxiliary_Sphere_Type',0.0],UNIT['Meter',1.0]]", "WGS_1984_(ITRF00)_To_NAD_1983", "PROJCS['NAD_83_Kansas_Lambert_Conformal_Conic_Meters',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['false_easting',0.0],PARAMETER['false_northing',0.0],PARAMETER['central_meridian',-98.0],PARAMETER['standard_parallel_1',38.0],PARAMETER['standard_parallel_2',39.0],PARAMETER['scale_factor',1.0],PARAMETER['latitude_of_origin',38.5],UNIT['Meter',1.0]]" ) print str(datetime.datetime.now()) + " truncating CDRS segments" TruncateTable_management(sdeConnectionString) print str(datetime.datetime.now()) + " appending CDRS segments" Append_management( "CDRS_Project", sdeConnectionString, "NO_TEST", """RouteName "RouteName" true true false 50 Text 0 0 ,First,#,CDRS_Project,RouteName,-1,-1;BeginMP "BeginMP" true true false 8 Double 8 38 ,First,#;EndMP "EndMP" true true false 8 Double 8 38 ,First,#,CDRS_Project,EndMP,-1,-1;County "County" true true false 50 Text 0 0 ,First,#,CDRS_Project,County,-1,-1;StartDate "StartDate" true true false 36 Date 0 0 ,First,#,CDRS_Project,StartDate,-1,-1;CompDate "CompDate" true true false 36 Date 0 0 ,First,#,CDRS_Project,CompDate,-1,-1;AlertType "AlertType" true true false 50 Text 0 0 ,First,#,CDRS_Project,AlertType,-1,-1;AlertDescription "AlertDescription" true true false 50 Text 0 0 ,First,#,CDRS_Project,AlertDescription,-1,-1;HeightLimit "HeightLimit" true true false 50 Text 0 0 ,First,#,CDRS_Project,HeightLimit,-1,-1;WidthLimit "WidthLimit" true true false 50 Text 0 0 ,First,#,CDRS_Project,WidthLimit,-1,-1;TrafficRouting "TrafficRouting" true true false 50 Text 0 0 ,First,#;TimeDelay "TimeDelay" true true false 50 Text 0 0 ,First,#,CDRS_Project,TimeDelay,-1,-1;Comments "Comments" true true false 1073741822 Text 0 0 ,First,#,CDRS_Project,Comments,-1,-1;DetourType "DetourType" true true false 50 Text 0 0 ,First,#,CDRS_Project,DetourType,-1,-1;ContactName "ContactName" true true false 50 Text 0 0 ,First,#,CDRS_Project,ContactName,-1,-1;ContactPhone "ContactPhone" true true false 50 Text 0 0 ,First,#,CDRS_Project,ContactPhone,-1,-1;ContactEmail "ContactEmail" true true false 50 Text 0 0 ,First,#,CDRS_Project,ContactEmail,-1,-1;X "X" true true false 8 Double 8 38 ,First,#;Y "Y" true true false 8 Double 8 38 ,First,#;AlertStatus "AlertStatus" true true false 4 Long 0 10 ,First,#,CDRS_Project,AlertStatus,-1,-1;FeaClosed "FeaClosed" true true false 4 Long 0 10 ,First,#,CDRS_Project,FeaClosed,-1,-1;Status "Status" true true false 50 Text 0 0 ,First,#;WebLink "WebLink" true true false 255 Text 0 0 ,First,#,CDRS_Project,WebLink,-1,-1;DetourDescription "DetourDescription" true true false 500 Text 0 0 ,First,#,CDRS_Project,DetourDescription,-1,-1;SHAPE.STLength() "SHAPE.STLength()" false false true 0 Double 0 0 ,First,#""", "#") print str(datetime.datetime.now()) + " It Ran, time for lunch" #ran on AR52 in about 1.5 min