def CITY (): itemname = "CITY" cantbl = itemname+"_ln_1" domname0 = 'CITY_TYPE' domstate = """IAL_DOMAIN = 'CITY_TYPE' AND IAL_END_DATE is NULL""" domfields = "CT_CITY_NBRR_INCORPORATED_CITY" domtbl = itemname+"_"+domfields disfields = domfields domname = "CT_CITY_NBR" MakeTableView_management("Database Connections/ATLASPROD.odc/V_NM_CTY",domname) TableToTable_conversion(domname, wsouttbl, domname+"R", "#") MakeTableView_management(wsouttbl+"//"+cantbl,itemname+"PD",""""NE_OWNER" IN ( 'EB' , 'NB' )""","#") DissolveRouteEvents_lr(itemname+"PD","LRS_ROUTE LINE Beg_State_Logmile End_State_Logmile", "CITY_NBR", wsouttbl+"//"+itemname+"_SD","LRS_ROUTE LINE Beg_State_Logmile End_State_Logmile","CONCATENATE","INDEX") AddIndex_management(wsouttbl+"//"+itemname+"_SD","CITY_NBR","CITY_NBR","NON_UNIQUE","NON_ASCENDING") AddField_management(wsouttbl+"//"+domname+"R", "CITY_NBR", "FLOAT", "#", "#", "#") CalculateField_management(wsouttbl+"//"+domname+"R", "CITY_NBR", "[CITY_NUMBER]", "VB") MakeTableView_management(wsouttbl+"//"+itemname+"_SD", itemname+"_view", "#") AddJoin_management(itemname+"_view", "CITY_NBR", wsouttbl+"//"+domname+"R", "CITY_NBR", "KEEP_ALL") TableToTable_conversion(itemname+"_view", wsouttbl, itemname+"_EVENT", "#") AssignDomainToField_management(wsouttbl+"//"+itemname+"_EVENT",domfields,domname0) DeleteField_management(wsouttbl+"//"+itemname+"_EVENT","CTYR_OBJECTID;CTYR_IIT_NE_ID;CTYR_IIT_INV_TYPE;CTYR_IIT_PRIMARY_KEY;CTYR_IIT_START_DATE;CTYR_IIT_DATE_CREATED;CTYR_IIT_DATE_MODIFIED;CTYR_IIT_CREATED_BY;CTYR_IIT_MODIFIED_BY;CTYR_IIT_ADMIN_UNIT;CTYR_IIT_NOTE;CTYR_IIT_PEO_INVENT_BY_ID;CTYR_NAU_UNIT_CODE;CTYR_IIT_END_DATE;CTYR_CITY_NBR") MakeRouteEventLayer_lr(wsouttbl+"//SMLRS","LRS_ROUTE",wsouttbl+"//"+itemname+"_EVENT","LRS_ROUTE LINE Beg_State_Logmile End_State_Logmile",itemname+"_ITEM","#","ERROR_FIELD","NO_ANGLE_FIELD","NORMAL","ANGLE","LEFT","POINT") FeatureClassToFeatureClass_conversion(itemname+"_ITEM", wsouttbl, itemname) DeleteField_management(wsouttbl+"//"+itemname,"CTYR_OBJECTID;CTYR_IIT_NE_ID;CTYR_IIT_INV_TYPE;CTYR_IIT_PRIMARY_KEY;CTYR_IIT_START_DATE;CTYR_IIT_DATE_CREATED;CTYR_IIT_DATE_MODIFIED;CTYR_IIT_CREATED_BY;CTYR_IIT_MODIFIED_BY;CTYR_IIT_ADMIN_UNIT;CTYR_IIT_NOTE;CTYR_IIT_PEO_INVENT_BY_ID;CTYR_NAU_UNIT_CODE;CTYR_IIT_END_DATE;CTYR_CITY_NBR") print "we have cities"
def UpdateGISPROD_NON_STATE_Check(): print "what route number should be updated?" try: RID = GetParameterAsText(0) except: RID = '460C0387500W0' RIDExp = "RID = '" + RID + "'" MakeTableView_management(r"C:\temp\Nusys_Check.gdb\NON_STATE_EVENTS", "UpdateGISPROD", "#", "in_memory") # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script # The following inputs are layers or table views: "UpdateGISPROD" TableToTable_conversion(in_rows="UpdateGISPROD", out_path="in_memory", out_name="updatetblh", RIDExp, "#", config_keyword="") AddIndex_management("updatetblh", "FID_NON_STATE_SYSTEM", "ID2", "UNIQUE", "ASCENDING") MakeFeatureLayer_management(NonState, "NonStateAll") AddJoin_management("NonStateAll", "ID2", "updatetblh", "FID_NON_STATE_SYSTEM", "KEEP_COMMON") print "Check the numbers one more time, and review" print "start Edit session on NON_STATE_SYSTEM now and type /'Calc()/' if it all looks good "
def RouteCheck(RID): #when running this function, pass the RID/LRS KEY Value into the function to update the desired RID #RID is structured '030C0011800W0' #Class should be L, C, or RM print "what route number should be updated?" #RID = '030C0011800W0' Class = RID[3] if Class in ("R", "M"): Class = "RM" else: pass print Class tablename = Class+RID RIDExp = "RID = '"+RID+"'" print "Updating route "+ str(RID) if Exists("UpdateGISPROD"): pass else: AddTable = Class+"_NON_STATE_EVENTS" MakeTableView_management(r"C:/temp/Nusys_Check.gdb/"+AddTable, tablename+"_view","#") TableToTable_conversion(tablename+"_view", "in_memory", tablename, RIDExp) RecordCount = str(GetCount_management(tablename)) if RecordCount = '0': print "No Records to Calculate"
def INTR (): TableToTable_conversion("INTR_pt_1",wsouttbl,"INTR_pt_2","#","""LRS_KEY "LRS_KEY" true true false 13 Text 0 0 ,First,#,INTR_pt_1,LRS_KEY,-1,-1;Cnty_Logmile "Cnty_Logmile" true true false 8 Double 0 0 ,First,#,INTR_pt_1,Cnty_Logmile,-1,-1;Distance "Distance" true true false 8 Double 0 0 ,First,#,INTR_pt_1,Distance,-1,-1;IIT_NE_ID "IIT_NE_ID" true false false 4 Long 0 0 ,First,#,INTR_pt_1,IIT_NE_ID,-1,-1;IIT_INV_TYPE "IIT_INV_TYPE" true false false 4 Text 0 0 ,First,#,INTR_pt_1,IIT_INV_TYPE,-1,-1;IIT_PRIMARY_KEY "IIT_PRIMARY_KEY" true false false 50 Text 0 0 ,First,#,INTR_pt_1,IIT_PRIMARY_KEY,-1,-1;IIT_START_DATE "IIT_START_DATE" true false false 8 Date 0 0 ,First,#,INTR_pt_1,IIT_START_DATE,-1,-1;IIT_DATE_CREATED "IIT_DATE_CREATED" true false false 8 Date 0 0 ,First,#,INTR_pt_1,IIT_DATE_CREATED,-1,-1;IIT_DATE_MODIFIED "IIT_DATE_MODIFIED" true false false 8 Date 0 0 ,First,#,INTR_pt_1,IIT_DATE_MODIFIED,-1,-1;IIT_CREATED_BY "IIT_CREATED_BY" true false false 30 Text 0 0 ,First,#,INTR_pt_1,IIT_CREATED_BY,-1,-1;IIT_MODIFIED_BY "IIT_MODIFIED_BY" true false false 30 Text 0 0 ,First,#,INTR_pt_1,IIT_MODIFIED_BY,-1,-1;IIT_ADMIN_UNIT "IIT_ADMIN_UNIT" true false false 4 Long 0 0 ,First,#,INTR_pt_1,IIT_ADMIN_UNIT,-1,-1;IIT_DESCR "IIT_DESCR" true true false 40 Text 0 0 ,First,#,INTR_pt_1,IIT_DESCR,-1,-1;IIT_NOTE "IIT_NOTE" true true false 40 Text 0 0 ,First,#,INTR_pt_1,IIT_NOTE,-1,-1;IIT_PEO_INVENT_BY_ID "IIT_PEO_INVENT_BY_ID" true true false 4 Long 0 0 ,First,#,INTR_pt_1,IIT_PEO_INVENT_BY_ID,-1,-1;NAU_UNIT_CODE "NAU_UNIT_CODE" true true false 10 Text 0 0 ,First,#,INTR_pt_1,NAU_UNIT_CODE,-1,-1;IIT_END_DATE "IIT_END_DATE" true true false 8 Date 0 0 ,First,#,INTR_pt_1,IIT_END_DATE,-1,-1;INTRSCTN_NAME "INTRSCTN_NAME" true true false 50 Text 0 0 ,First,#,INTR_pt_1,INTRSCTN_NAME,-1,-1;ON_STATE_NONSTATE "ON_STATE_NONSTATE" true true false 16 Text 0 0 ,First,#,INTR_pt_1,ON_STATE_NONSTATE,-1,-1;INTRSCTN_DESC "INTRSCTN_DESC" true true false 200 Text 0 0 ,First,#,INTR_pt_1,INTRSCTN_DESC,-1,-1;INTRSCTN_ID "INTRSCTN_ID" true false false 50 Text 0 0 ,First,#,INTR_pt_1,INTRSCTN_ID,-1,-1;TFO_IND "TFO_IND" true true false 1 Text 0 0 ,First,#,INTR_pt_1,TFO_IND,-1,-1;CART_NODE_ID "CART_NODE_ID" true true false 4 Long 0 0 ,First,#,INTR_pt_1,CART_NODE_ID,-1,-1;LEFT_TURN_LN "LEFT_TURN_LN" true true false 4 Long 0 0 ,First,#,INTR_pt_1,LEFT_TURN_LN,-1,-1;RIGHT_TURN_LN "RIGHT_TURN_LN" true true false 4 Long 0 0 ,First,#,INTR_pt_1,RIGHT_TURN_LN,-1,-1;INTERSECTION_CONTROL "INTERSECTION_CONTROL" true true false 4 Long 0 0 ,First,#,INTR_pt_1,INTERSECTION_CONTROL,-1,-1;PER_GREEN_TIME "PER_GREEN_TIME" true true false 4 Double 0 0 ,First,#,INTR_pt_1,PER_GREEN_TIME,-1,-1;LONGITUDE "LONGITUDE" true true false 4 Float 0 0 ,First,#,INTR_pt_1,LONGITUDE,-1,-1;LATITUDE "LATITUDE" true true false 4 Float 0 0 ,First,#,INTR_pt_1,LATITUDE,-1,-1;NM_NE_ID_IN "NM_NE_ID_IN" true false false 4 Long 0 0 ,First,#,INTR_pt_1,NM_NE_ID_IN,-1,-1;NM_NE_ID_OF "NM_NE_ID_OF" true false false 4 Long 0 0 ,First,#,INTR_pt_1,NM_NE_ID_OF,-1,-1;NM_TYPE "NM_TYPE" true false false 4 Text 0 0 ,First,#,INTR_pt_1,NM_TYPE,-1,-1;NM_OBJ_TYPE "NM_OBJ_TYPE" true false false 4 Text 0 0 ,First,#,INTR_pt_1,NM_OBJ_TYPE,-1,-1;NM_BEGIN_MP "NM_BEGIN_MP" true false false 8 Double 0 0 ,First,#,INTR_pt_1,NM_BEGIN_MP,-1,-1;NM_START_DATE "NM_START_DATE" true false false 8 Date 0 0 ,First,#,INTR_pt_1,NM_START_DATE,-1,-1;NM_END_DATE "NM_END_DATE" true true false 8 Date 0 0 ,First,#,INTR_pt_1,NM_END_DATE,-1,-1;NM_END_MP "NM_END_MP" true false false 8 Double 0 0 ,First,#,INTR_pt_1,NM_END_MP,-1,-1;NM_SLK "NM_SLK" true true false 8 Double 0 0 ,First,#,INTR_pt_1,NM_SLK,-1,-1;NM_CARDINALITY "NM_CARDINALITY" true true false 8 Double 0 0 ,First,#,INTR_pt_1,NM_CARDINALITY,-1,-1;NM_ADMIN_UNIT "NM_ADMIN_UNIT" true false false 4 Long 0 0 ,First,#,INTR_pt_1,NM_ADMIN_UNIT,-1,-1;NM_DATE_CREATED "NM_DATE_CREATED" true true false 8 Date 0 0 ,First,#,INTR_pt_1,NM_DATE_CREATED,-1,-1;NM_DATE_MODIFIED "NM_DATE_MODIFIED" true true false 8 Date 0 0 ,First,#,INTR_pt_1,NM_DATE_MODIFIED,-1,-1;NM_MODIFIED_BY "NM_MODIFIED_BY" true true false 30 Text 0 0 ,First,#,INTR_pt_1,NM_MODIFIED_BY,-1,-1;NM_CREATED_BY "NM_CREATED_BY" true true false 30 Text 0 0 ,First,#,INTR_pt_1,NM_CREATED_BY,-1,-1;NM_SEQ_NO "NM_SEQ_NO" true true false 8 Double 0 0 ,First,#,INTR_pt_1,NM_SEQ_NO,-1,-1;NM_SEG_NO "NM_SEG_NO" true true false 4 Long 0 0 ,First,#,INTR_pt_1,NM_SEG_NO,-1,-1;NM_TRUE "NM_TRUE" true true false 8 Double 0 0 ,First,#,INTR_pt_1,NM_TRUE,-1,-1;NM_END_SLK "NM_END_SLK" true true false 8 Double 0 0 ,First,#,INTR_pt_1,NM_END_SLK,-1,-1;NM_END_TRUE "NM_END_TRUE" true true false 8 Double 0 0 ,First,#,INTR_pt_1,NM_END_TRUE,-1,-1;NE_ID "NE_ID" true false false 4 Long 0 0 ,First,#,INTR_pt_1,NE_ID,-1,-1;NE_UNIQUE "NE_UNIQUE" true false false 30 Text 0 0 ,First,#,INTR_pt_1,NE_UNIQUE,-1,-1;NE_TYPE "NE_TYPE" true false false 4 Text 0 0 ,First,#,INTR_pt_1,NE_TYPE,-1,-1;NE_NT_TYPE "NE_NT_TYPE" true false false 4 Text 0 0 ,First,#,INTR_pt_1,NE_NT_TYPE,-1,-1;NE_DESCR "NE_DESCR" true false false 240 Text 0 0 ,First,#,INTR_pt_1,NE_DESCR,-1,-1;NE_LENGTH "NE_LENGTH" true true false 8 Double 0 0 ,First,#,INTR_pt_1,NE_LENGTH,-1,-1;NE_ADMIN_UNIT "NE_ADMIN_UNIT" true false false 4 Long 0 0 ,First,#,INTR_pt_1,NE_ADMIN_UNIT,-1,-1;NE_DATE_CREATED "NE_DATE_CREATED" true false false 8 Date 0 0 ,First,#,INTR_pt_1,NE_DATE_CREATED,-1,-1;NE_DATE_MODIFIED "NE_DATE_MODIFIED" true false false 8 Date 0 0 ,First,#,INTR_pt_1,NE_DATE_MODIFIED,-1,-1;NE_MODIFIED_BY "NE_MODIFIED_BY" true false false 30 Text 0 0 ,First,#,INTR_pt_1,NE_MODIFIED_BY,-1,-1;NE_CREATED_BY "NE_CREATED_BY" true false false 30 Text 0 0 ,First,#,INTR_pt_1,NE_CREATED_BY,-1,-1;NE_START_DATE "NE_START_DATE" true false false 8 Date 0 0 ,First,#,INTR_pt_1,NE_START_DATE,-1,-1;NE_END_DATE "NE_END_DATE" true true false 8 Date 0 0 ,First,#,INTR_pt_1,NE_END_DATE,-1,-1;NE_GTY_GROUP_TYPE "NE_GTY_GROUP_TYPE" true true false 4 Text 0 0 ,First,#,INTR_pt_1,NE_GTY_GROUP_TYPE,-1,-1;NE_OWNER "NE_OWNER" true true false 4 Text 0 0 ,First,#,INTR_pt_1,NE_OWNER,-1,-1;NE_NAME_1 "NE_NAME_1" true true false 80 Text 0 0 ,First,#,INTR_pt_1,NE_NAME_1,-1,-1;NE_NAME_2 "NE_NAME_2" true true false 80 Text 0 0 ,First,#,INTR_pt_1,NE_NAME_2,-1,-1;NE_PREFIX "NE_PREFIX" true true false 4 Text 0 0 ,First,#,INTR_pt_1,NE_PREFIX,-1,-1;NE_NUMBER "NE_NUMBER" true true false 8 Text 0 0 ,First,#,INTR_pt_1,NE_NUMBER,-1,-1;NE_SUB_TYPE "NE_SUB_TYPE" true true false 2 Text 0 0 ,First,#,INTR_pt_1,NE_SUB_TYPE,-1,-1;NE_GROUP "NE_GROUP" true true false 30 Text 0 0 ,First,#,INTR_pt_1,NE_GROUP,-1,-1;NE_NO_START "NE_NO_START" true true false 4 Long 0 0 ,First,#,INTR_pt_1,NE_NO_START,-1,-1;NE_NO_END "NE_NO_END" true true false 4 Long 0 0 ,First,#,INTR_pt_1,NE_NO_END,-1,-1;NE_SUB_CLASS "NE_SUB_CLASS" true true false 4 Text 0 0 ,First,#,INTR_pt_1,NE_SUB_CLASS,-1,-1;NE_NSG_REF "NE_NSG_REF" true true false 240 Text 0 0 ,First,#,INTR_pt_1,NE_NSG_REF,-1,-1;NE_VERSION_NO "NE_VERSION_NO" true true false 240 Text 0 0 ,First,#,INTR_pt_1,NE_VERSION_NO,-1,-1""","#") #convert EXOR domains to SDE Database domains - intersection type itemname = 'INTR' itemdom = 'IN_INTRSCTN_TYPE' #IN INTRSCTN_TYPE, TURN_LANE newdomname = 'INTRSCTN_TYPE' itemsel = """"IAL_DOMAIN" = '"""+itemdom+"'" itemdescr = 'Intersection Type' #convert EXOR domains to SDE Database domains - turn lanes type itemdom1 = 'TURN_LANE' #IN INTRSCTN_TYPE, TURN_LANE newdomname1 = 'INTERSECTION_TURN_LANE' itemsel1 = """"IAL_DOMAIN" = '"""+itemdom1+"'" itemdescr1 = 'Turn Lane Type' MakeTableView_management(itemname+"_pt_2", itemname+"PD","#","#") print ("Intersections are located on a directional basis in some cases, so primary and secondary directions are allowed until advised differently") DissolveRouteEvents_lr(itemname+"PD","LRS_Key POINT Cnty_Logmile","IIT_NE_ID;INTRSCTN_NAME;ON_STATE_NONSTATE;INTRSCTN_DESC;INTRSCTN_ID;TFO_IND;CART_NODE_ID;LEFT_TURN_LN;RIGHT_TURN_LN;INTERSECTION_CONTROL;PER_GREEN_TIME;NE_OWNER",wsouttbl+"//"+itemname+"_EVENT","LRS_Key POINT Cnty_Logmile","DISSOLVE","INDEX") AssignDomainToField_management(itemname+"_EVENT","INTERSECTION_CONTROL",itemdom) AssignDomainToField_management(itemname+"_EVENT","LEFT_TURN_LN",itemdom1) AssignDomainToField_management(itemname+"_EVENT","RIGHT_TURN_LN",itemdom1) MakeRouteEventLayer_lr("CMLRS","LRS_KEY",itemname+"_EVENT","LRS_Key POINT Cnty_Logmile",itemname+"_ITEM","#","ERROR_FIELD","NO_ANGLE_FIELD","NORMAL","ANGLE","LEFT","POINT") #arcpy.FeatureClassToFeatureClass_conversion(itemname+"_ITEM", wsouttbl, itemname) lyrlist = ["INTR"] for lyr in lyrlist: lyrin = lyr+"_ITEM"# FeatureClassToFeatureClass_conversion(lyrin, ws+"\\"+tempgdb,lyr) #23 sec
def load_csv(csv): """loads a csv into the ArcMap scratch geodatabase. Use for temporary files only. Output: path to the imported csv """ t = so("csv", "random", "fgdb") TableToTable_conversion(in_rows=csv, out_path=os.path.dirname(t), out_name=os.path.basename(t)) return t
def UAB(): itemname = "UAB" cantbl = itemname+"_ln_1" domname = "UABR" MakeTableView_management("Database Connections/ATLASPROD.odc/V_NM_UABR",domname) TableToTable_conversion(domname, wsouttbl, domname+"R", "#") MakeTableView_management(wsouttbl+"//"+cantbl,itemname+"PD",""""NE_OWNER" IN ( 'EB' , 'NB' )""","#") DissolveRouteEvents_lr(itemname+"PD","LRS_ROUTE LINE Beg_State_Logmile End_State_Logmile", "CITY_NBR", wsouttbl+"//"+itemname+"_SD","LRS_ROUTE LINE Beg_State_Logmile End_State_Logmile","CONCATENATE","INDEX") AddIndex_management(wsouttbl+"//"+itemname+"_SD","CITY_NBR","CITY_NBR","NON_UNIQUE","NON_ASCENDING") AddField_management(wsouttbl+"//"+domname+"R", "CITY_NBR", "FLOAT", "#", "#", "#") CalculateField_management(wsouttbl+"//"+domname+"R", "CITY_NBR", "[CITY_NUMBER]", "VB") MakeTableView_management(wsouttbl+"//"+itemname+"_SD", itemname+"_view", "#") AddJoin_management(itemname+"_view", "CITY_NBR", wsouttbl+"//"+domname+"R", "CITY_NBR", "KEEP_ALL") TableToTable_conversion(itemname+"_view", wsouttbl, domname+"J", "#") MakeRouteEventLayer_lr(wsouttbl+"//SMLRS","LRS_ROUTE",wsouttbl+"//"+domname+"J","LRS_ROUTE LINE Beg_State_Logmile End_State_Logmile",itemname+"_ITEM","#","ERROR_FIELD","NO_ANGLE_FIELD","NORMAL","ANGLE","LEFT","POINT") FeatureClassToFeatureClass_conversion(itemname+"_ITEM", wsouttbl, itemname) print "we have UABs"
def RouteCheck(RID): #when running this function, pass the RID/LRS KEY Value into the function to update the desired RID #RID is structured '030C0011800W0' #Class should be L, C, or RM print "what route number should be updated?" #RID = '030C0011800W0' Class = RID[3] if Class in ("R", "M"): Class = "RM" else: pass print RID RID_ = RID.replace('-', '_') RIDExp = "RID = '" + RID + "'" tablename = Class + RID_ print RIDExp print "Updating route " + str(RID) + " in table " + str(RID_) if Exists("UpdateGISPROD"): print "this exists" pass else: AddTable = Class + "P_NON_STATE_EVENTS" MakeTableView_management(r"in_memory/" + AddTable, tablename + "_view", "#") MakeFeatureLayer_management( NonState, "NonStateUpdate", "((LRS_KEY LIKE '%C%' OR LRS_ROUTE_PREFIX = 'C') AND (MILEAGE_COUNTED = -1)) OR (LRS_DIR_OF_TRAVEL = 'P' and SURFACE = 'Propose')" ) TableToTable_conversion(tablename + "_view", "in_memory", tablename, RIDExp) if str(GetCount_management(tablename)) == '0': print "No Records to Calculate" else: try: RemoveJoin_management("NonStateUpdate") except: print "no NonStateUpdate, creating the NonStateSystem layer" MakeFeatureLayer_management( NonState, "NonStateUpdate", "(MILEAGE_COUNTED = -1 OR SURFACE = 'Propose')") AddJoin_management("NonStateUpdate", "ID2", tablename, "FID_NON_STATE_SYSTEM", "KEEP_COMMON") print "Check the numbers one more time, and review" print "start Edit session on NonStateUpdate now and type RouteCalc(RID) if it all looks good" print "RteChk script completed successfully"
def ExportCANPLyrs(): print "exporting initialized at " + str(datetime.datetime.now()) destConnection = r"D:\SQL61_GIS_CANSYS.sde" TableToTable_conversion("MAP_EXTRACT", r"Database Connections/SQL61_GIS_CANSYS.sde", "Map_Extract", "#", "#", "#") for lyr in mapping.ListLayers(mxd): if lyr.name in CANPlist: try: lyrname = lyr.name[11:] print lyrname + " exporting..." outlyrname = "V_" + lyrname outlyrobj = destConnection + "\\GIS_CANSYS.DBO." + outlyrname FeatureClassToFeatureClass_conversion(lyr, destConnection, outlyrname, "#", "#", "#") ChangePrivileges_management(outlyrobj, "readonly", "GRANT", "AS_IS") print lyrname + " exported to " + outlyrname + " " + str( datetime.datetime.now()) except ExecuteError: msgs = GetMessages(2) AddError(msgs) print msgs pass except (RuntimeError, TypeError, NameError): print "TypeError on item" + lyr.name pass except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] # Concatenate information together concerning the error into a message string # pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) msgs = "ArcPy ERRORS:\n" + GetMessages(2) + "\n" print pymsg + "\n" print msgs else: #print lyr.name +" was not in the export list and will be skipped" pass
def copy_tables(input_ws, output_ws, foreach_table = None): """ copies tables or sends each table to a function input_ws - the input database output_ws - the output database foreach_table - the optional function to process each table """ from arcpy import env, ListTables, AddMessage, AddWarning, \ TableToGeodatabase_conversion, GetCount_management, \ TableToTable_conversion from os.path import join env.workspace = input_ws for table in ListTables(): AddMessage('Processing table: {}'.format(table)) if env.skipAttach and '_attach' in table.lower(): AddWarning('Skipping attachments table {}'.format(table)) continue if env.skipEmpty: count = int(GetCount_management(table)[0]) if count == 0: AddWarning('Skipping because table is empty: {} (empty)'.format(table)) continue try: if foreach_table: foreach_table(input_ws, output_ws, table) else: output_path = join(output_ws, get_name(table)) delete_existing(output_path) TableToTable_conversion(table, output_ws, get_name(table)) except Exception as e: AddWarning('Error on table: {} - {}'.format(table, e)) pass
STAGEDB = r"Database Connections\SQL61_GIS_CANSYS.sde" MResolution = 0.0005 MTolerance = 0.001 #Control the MXD for processing mxd = mapping.MapDocument("CURRENT") env.MTolerance = MTolerance env.MResolution = MResolution env.overwriteOutput = "True" #Copy Network Layers from CANSYS to Staging Area. SRND and CRND layers have full history FeatureClassToFeatureClass_conversion( r"Database Connections\CANP.sde\NM3.NM_NLT_SRND_SRND_SDO", STAGEDB, "SRND") FeatureClassToFeatureClass_conversion( r"Database Connections\CANP.sde\NM3.NM_NLT_CRND_CRND_SDO", STAGEDB, "CRND") TableToTable_conversion(r'Database Connections\CANP.sde\KDOT.MV_MAP_EXTRACT', STAGEDB, "Map_Extract") TableToTable_conversion(r'Database Connections\CANP.sde\NM3.NM_ELEMENTS', STAGEDB, "ELEMENTS") TableToTable_conversion(r'Database Connections\CANP.sde\NM3.NM_MEMBERS', STAGEDB, "MEMBERS") #Members takes a long time to copy TableToTable_conversion( r'Database Connections\CANP_Report_User.sde\NM3.NM_INV_ITEMS', STAGEDB, "ITEMS_ALL") #To copy the SECT layer it must be joined to preserve RSE_HE_ID MakeFeatureLayer_management( r"Database Connections\CANP.sde\NM3.NM3_NET_MAP_SECT_TABLE", "NM_SECT") MakeTableView_management(STAGEDB + r"\Elements_ALL", "ELEMENTS") AddJoin_management("NM_SECT", "RSE_HE_ID", "ELEMENTS", "NE_ID", "KEEP_COMMON") FeatureClassToFeatureClass_conversion("NM_SECT", STAGEDB, "SECT")
def RouteFix(RID): #when running this function, pass the RID/LRS KEY Value into the function to update the desired RID #RID is structured '030C0011800W0' #Class should be L, C, or RM print "what route number should be updated?" #RID = '030C0011800W0' Class = RID[3] if Class in ("R", "M"): Class = "RM" else: pass print RID RID_ = RID.replace('-', '_') tablename = Class + RID_ if RID == "000C": RIDExp = "#" else: RIDExp = "RID = '" + RID + "'" print "Updating route " + str(RID) if Exists("UpdateGISPROD"): pass else: AddTable = Class + "P_NON_STATE_EVENTS" MakeTableView_management("in_memory/" + AddTable, tablename + "_view", "#") TableToTable_conversion(tablename + "_view", "in_memory", tablename, RIDExp) if str(GetCount_management(tablename)) == '0': print "No Records to Calculate" else: try: RemoveJoin_management("NonStateUpdate") except: print "no NonStateUpdate, creating the NonStateSystem layer" #AddIndex_management("updatetblh", "FID_NON_STATE_SYSTEM", "ID2", "UNIQUE", "ASCENDING") MakeFeatureLayer_management( NonState, "NonStateUpdate", "(MILEAGE_COUNTED = -1 OR SURFACE = 'Propose')") AddJoin_management("NonStateUpdate", "ID2", tablename, "FID_NON_STATE_SYSTEM", "KEEP_COMMON") print "Check the numbers one more time, and review" print "start Edit session on NonStateUpdate now and type RouteCalc(RID) if it all looks good" print "RteChk script completed successfully & RteCalc script starting..." Class = RID[3] if Class in ("R", "M"): Class = "RM" else: pass print RID tablename = Class + RID SelectLayerByAttribute_management("NonStateUpdate", "NEW_SELECTION", "1=1") CalculateField_management("NonStateUpdate", "SHARED.NON_STATE_SYSTEM.LRS_BEG_CNTY_LOGMILE", "[" + tablename + ".NEW_BEGLOG]", "VB", "") CalculateField_management("NonStateUpdate", "SHARED.NON_STATE_SYSTEM.LRS_END_CNTY_LOGMILE", "[" + tablename + ".NEW_ENDLOG]", "VB", "") CalculateField_management("NonStateUpdate", "SHARED.NON_STATE_SYSTEM.LENGTH", "[" + tablename + ".AdjLength]", "VB", "") print "adjusted begin and end logmiles were recalculated to the source GISPROD database" print "end your edit session" #Delete_management("in_memory/"+tablename)" print "RteFix script completed successfully"
def CRB(): print "querying the shared.NON_STATE_SYSTEM to obtain only urban classified primary C routes with mileage that should be counted and for resolution segments." MakeFeatureLayer_management( NonState, "NonStateCP", "((LRS_KEY LIKE '%C%' OR LRS_ROUTE_PREFIX = 'C') AND (MILEAGE_COUNTED = -1)) OR (LRS_DIR_OF_TRAVEL = 'P' and SURFACE = 'Propose')" ) print "querying the shared.NON_STATE_SYSTEM to obtain only urban classified NonPrimary C routes with mileage that should be counted and for resolution segments." MakeFeatureLayer_management( NonState, "NonStateCNP", "(LRS_KEY LIKE '%C%' OR LRS_ROUTE_PREFIX = 'C') AND (MILEAGE_COUNTED = 0) AND (LRS_DIR_OF_TRAVEL = 'S') and (COUNTY_NUMBER <> 0)" ) print "shared.Non_State_System has unique IDS that we desire to keep as persistent IDs for comparison with GeoMedia, so we are spatially intersecting the state boundary to keep them as is." Buffer_analysis(StateBnd, "State_Boundary_1Mile", "5280 Feet", "FULL", "ROUND", "NONE", "", "PLANAR") MakeFeatureLayer_management("State_Boundary_1Mile", "StateBnd") Intersect_analysis("NonStateCP #;StateBnd #", "Non_State_Classified_Primary", "ALL", "-1 Unknown", "LINE") Intersect_analysis("NonStateCNP #;StateBnd #", "Non_State_Classified_NonPrimary", "ALL", "-1 Unknown", "LINE") NonStateCP_fx = r'Non_State_Classified_Primary' NonStateCNP_fx = r'Non_State_Classified_NonPrimary' MakeFeatureLayer_management(NonStateCP_fx, "Non_State_Classified_Primary") MakeFeatureLayer_management(NonStateCNP_fx, "Non_State_Classified_NonPrimary") CP_ET = 'CP_NON_STATE_EVENTS' CNP_ET = 'CNP_NON_STATE_EVENTS' MakeFeatureLayer_management(NUSYS, "Nusys_Extract", "NSEC_SUB_CLASS <> 'R'") print "creating primary C Non_State_Routes by the Shape Length" MakeFeatureLayer_management( "Non_State_Classified_Primary", "BackwardSegsCP", "LRS_BACKWARDS = -1 AND (MILEAGE_COUNTED = -1 OR (LRS_DIR_OF_TRAVEL = 'P' and SURFACE = 'Propose'))" ) FlipLine_edit("BackwardSegsCP") Dissolve_management("Non_State_Classified_Primary", "CPRouteShapeLength", "NQR_DESCRIPTION", "", "MULTI_PART", "DISSOLVE_LINES") AddField_management("CPRouteShapeLength", "BCM", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") CalculateField_management("CPRouteShapeLength", "BCM", "0", "VB", "") AddField_management("CPRouteShapeLength", "ECM", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") CalculateField_management("CPRouteShapeLength", "ECM", "!Shape.length@miles!", "Python") CreateRoutes_lr("CPRouteShapeLength", "NQR_DESCRIPTION", destdb + "\CP_ShapeLengthRoute", "TWO_FIELDS", "BCM", "ECM", "UPPER_LEFT", "1", "0", "IGNORE", "INDEX") #Flip them back to the original direction FlipLine_edit(in_features="BackwardSegsCP") LocateFeaturesAlongRoutes_lr("Non_State_Classified_Primary", "CP_ShapeLengthRoute", "NQR_DESCRIPTION", "0 Feet", "CP_NON_STATE_EVENTS", "RID LINE FMEAS TMEAS", "FIRST", "DISTANCE", "ZERO", "FIELDS", "M_DIRECTON") AddField_management("CP_NON_STATE_EVENTS", "AdjBegin", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CP_NON_STATE_EVENTS", "AdjEnd", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CP_NON_STATE_EVENTS", "CHG_BEGLOG", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CP_NON_STATE_EVENTS", "CHG_ENDLOG", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CP_NON_STATE_EVENTS", "NEW_BEGLOG", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CP_NON_STATE_EVENTS", "NEW_ENDLOG", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CP_NON_STATE_EVENTS", "AdjLength", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CP_NON_STATE_EVENTS", "CHANGE", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") CalculateField_management("CP_NON_STATE_EVENTS", "AdjBegin", "round( [FMEAS] , 3 )", "VB", code_block="") CalculateField_management("CP_NON_STATE_EVENTS", "AdjEnd", "round( [TMEAS] , 3 )", "VB", code_block="") CalculateField_management("CP_NON_STATE_EVENTS", "CHG_BEGLOG", "[AdjBegin] - [LRS_BEG_CNTY_LOGMILE]", "VB", code_block="") CalculateField_management("CP_NON_STATE_EVENTS", "CHG_ENDLOG", "[AdjEnd] - [LRS_END_CNTY_LOGMILE]", "VB", code_block="") CalculateField_management("CP_NON_STATE_EVENTS", "NEW_BEGLOG", "[AdjBegin]", "VB", code_block="") CalculateField_management("CP_NON_STATE_EVENTS", "NEW_ENDLOG", "[AdjEnd]", "VB", code_block="") CalculateField_management("CP_NON_STATE_EVENTS", "AdjLength", "[AdjEnd] - [AdjBegin]", "VB", code_block="") CalculateField_management("CP_NON_STATE_EVENTS", "CHANGE", "abs([LENGTH] - [AdjLength])", "VB", code_block="") MakeRouteEventLayer_lr("CP_ShapeLengthRoute", "NQR_DESCRIPTION", "CP_NON_STATE_EVENTS", "RID LINE FMEAS TMEAS", "CP_LRS_Review_Events", "", "ERROR_FIELD", "NO_ANGLE_FIELD", "NORMAL", "ANGLE", "LEFT", "POINT") print "CP-Rte Builder script completed successfully" print "creating NonPrimary C Non_State_Routes by the Shape Length" MakeFeatureLayer_management( "Non_State_Classified_NonPrimary", "BackwardSegsCNP", "LRS_BACKWARDS = -1 AND LRS_DIR_OF_TRAVEL = 'S' and COUNTY_NUMBER <> 0" ) FlipLine_edit("BackwardSegsCNP") Dissolve_management("Non_State_Classified_NonPrimary", "CNPRouteShapeLength", "LRS_KEY", "", "MULTI_PART", "DISSOLVE_LINES") AddField_management("CNPRouteShapeLength", "BCM", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") CalculateField_management("CNPRouteShapeLength", "BCM", "0", "VB", "") AddField_management("CNPRouteShapeLength", "ECM", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") CalculateField_management("CNPRouteShapeLength", "ECM", "!Shape.length@miles!", "Python") CreateRoutes_lr("CNPRouteShapeLength", "LRS_KEY", destdb + "\CNP_ShapeLengthRoute", "TWO_FIELDS", "BCM", "ECM", "UPPER_LEFT", "1", "0", "IGNORE", "INDEX") #Flip them back to the original direction FlipLine_edit(in_features="BackwardSegsCNP") LocateFeaturesAlongRoutes_lr("Non_State_Classified_NonPrimary", "CNP_ShapeLengthRoute", "LRS_KEY", "0 Feet", "CNP_NON_STATE_EVENTS", "RID LINE FMEAS TMEAS", "FIRST", "DISTANCE", "ZERO", "FIELDS", "M_DIRECTON") AddField_management("CNP_NON_STATE_EVENTS", "AdjBegin", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNP_NON_STATE_EVENTS", "AdjEnd", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNP_NON_STATE_EVENTS", "CHG_BEGLOG", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNP_NON_STATE_EVENTS", "CHG_ENDLOG", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNP_NON_STATE_EVENTS", "NEW_BEGLOG", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNP_NON_STATE_EVENTS", "NEW_ENDLOG", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNP_NON_STATE_EVENTS", "AdjLength", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNP_NON_STATE_EVENTS", "CHANGE", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") CalculateField_management("CNP_NON_STATE_EVENTS", "AdjBegin", "round( [FMEAS] , 3 )", "VB", "") CalculateField_management("CNP_NON_STATE_EVENTS", "AdjEnd", "round( [TMEAS] , 3 )", "VB", "") CalculateField_management("CNP_NON_STATE_EVENTS", "CHG_BEGLOG", "[AdjBegin] - [LRS_BEG_CNTY_LOGMILE]", "VB", "") CalculateField_management("CNP_NON_STATE_EVENTS", "CHG_ENDLOG", "[AdjEnd] - [LRS_END_CNTY_LOGMILE]", "VB", "") CalculateField_management("CNP_NON_STATE_EVENTS", "NEW_BEGLOG", "[AdjBegin]", "VB", "") CalculateField_management("CNP_NON_STATE_EVENTS", "NEW_ENDLOG", "[AdjEnd]", "VB", "") CalculateField_management("CNP_NON_STATE_EVENTS", "AdjLength", "[AdjEnd] - [AdjBegin]", "VB", code_block="") CalculateField_management("CNP_NON_STATE_EVENTS", "CHANGE", "abs([LENGTH] - [AdjLength])", "VB", code_block="") MakeRouteEventLayer_lr("CNP_ShapeLengthRoute", "LRS_KEY", "CNP_NON_STATE_EVENTS", "RID LINE FMEAS TMEAS", "CNP_LRS_Review_Events", "", "ERROR_FIELD", "NO_ANGLE_FIELD", "NORMAL", "ANGLE", "LEFT", "POINT") AddField_management("CNPRouteShapeLength", "PersistentID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNPRouteShapeLength", "Pbeg", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNPRouteShapeLength", "Pend", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") CalculateField_management("CNPRouteShapeLength", "PersistentID", "!ID2!", "PYTHON_9.3", "") TableToTable_conversion("CNPRouteShapeLength", "in_memory", "CNP_Events", "") endpoints = ["beg", "end"] for pos in endpoints: out_event = "CNP_Events_" + pos print out_event out_lyr = "CNP_Events_Features_" + pos print out_lyr outfield = "P" + pos print outfield if pos == "beg": print "Will locate begin point" routesettings = "LRS_KEY POINT BCM" else: print "locating end point" routesettings = "LRS_KEY POINT ECM" MakeRouteEventLayer_lr("CNP_ShapeLengthRoute", "LRS_KEY", "CNP_Events", routesettings, out_event, "", "ERROR_FIELD", "ANGLE_FIELD", "NORMAL", "ANGLE", "LEFT", "POINT") LocateFeaturesAlongRoutes_lr(out_event, "CP_ShapeLengthRoute", "NQR_DESCRIPTION", "500 Feet", out_lyr, "RID POINT MEAS", "ALL", "DISTANCE", "ZERO", "FIELDS", "M_DIRECTON") AddJoin_management(out_lyr, "PersistentID", "CNPRouteShapeLength", "PersistentID", "KEEP_ALL") selexp = out_lyr + ".RID <> CNPRouteShapeLength.LRS_KEY" print selexp SelectLayerByAttribute_management(out_lyr, "NEW_SELECTION", selexp) DeleteRows_management(out_lyr) RemoveJoin_management(out_lyr) AddJoin_management("CNPRouteShapeLength", "PersistentID", out_lyr, "PersistentID", "KEEP_ALL") #expression = "[CNP_Events_Features_Begin.MEAS]" expression = "[" + out_lyr + ".MEAS]" print expression calcfield = "CNPRouteShapeLength." + outfield #CNPRouteShapeLength.Pbeg CalculateField_management("CNPRouteShapeLength", calcfield, expression, "VB", "") RemoveJoin_management("CNPRouteShapeLength", "") #test flipped routes and calculate mileage and flip flag AddField_management("CNPRouteShapeLength", "FlipTest", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNPRouteShapeLength", "Adj_Beg", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") AddField_management("CNPRouteShapeLength", "Adj_End", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") SelectLayerByAttribute_management("CNPRouteShapeLength", "NEW_SELECTION", '"Pbeg" > "Pend"') CalculateField_management("CNPRouteShapeLength", "FlipTest", "1", "VB", "") CalculateField_management("CNPRouteShapeLength", "Adj_Beg", "!Pend!", "Python", "") CalculateField_management("CNPRouteShapeLength", "Adj_End", "!Pbeg!", "Python", "") SelectLayerByAttribute_management("CNPRouteShapeLength", "NEW_SELECTION", '"Pbeg" < "Pend"') CalculateField_management("CNPRouteShapeLength", "FlipTest", "0", "VB", "") CalculateField_management("CNPRouteShapeLength", "Adj_Beg", "!Pbeg!", "Python", "") CalculateField_management("CNPRouteShapeLength", "Adj_End", "!Pend!", "Python", "") SelectLayerByAttribute_management("CNPRouteShapeLength", "NEW_SELECTION", '"Adj_Beg" <= 0.003') CalculateField_management("CNPRouteShapeLength", "Adj_Beg", "0", "Python", "") print "CNP-Rte Builder script completed successfully"
def CalcUsingSQLserver(DBPassword): #for ArcGIS Pro python 3.6 #Propy will not load pymssql package for some reason #new plan on 3/13/2018 - use pyodbc and system DSN to connect to SQL Server, because it works. #another option to test someday might be http://desktop.arcgis.com/en/arcmap/latest/analyze/arcpy-classes/arcsdesqlexecute.htm import datetime startDateTime = datetime.datetime.now() print( "starting SQL calculation at " + str(startDateTime) + ", it should take a couple minutes to refresh tables and calculate measures" ) #Calibration process completed in 0:03:36.252839 hours, minutes, seconds import getpass fileformatDateStr = startDateTime.strftime("%Y%m%d") from KhubCode25.KhubCode25Config import devorprod, dbname, dbownername, localProFileGDBWorkspace, devDataSourceSDE, prodDataSourceSDE, KDOTConnections, devSqlDSN, prodSqlDSN from arcpy import SelectLayerByAttribute_management, FlipLine_edit username = dbownername #use Collect Password upfront so we dont need to wait for the sequence to run before entering this password dbpassword = DBPassword if devorprod == 'prod': database = prodDataSourceSDE dsn = prodSqlDSN print("running on " + devorprod) else: database = devDataSourceSDE dsn = devSqlDSN print("running on " + devorprod) import pyodbc # @UnresolvedImport for pydev/eclipse from arcpy import Delete_management, TableToTable_conversion, Exists localfilegdb = localProFileGDBWorkspace + '\\' + 'KhubRoadCenterlinesTemp' + fileformatDateStr + '.gdb' connectionstring = 'DSN=' + dsn + ';UID=' + username + ';PWD=' + dbpassword print(connectionstring) sdegdb = KDOTConnections + r'\\' + database + r'\\' + dbname + "." + dbownername CalTables = [ "START_COUNTY", "END_COUNTY", "END_D1_COUNTY", "START_D1_COUNTY", "START_D1_C_D", "END_D1_C_D", "START_D1_RM_D", "END_D1_RM_D" ] for table in CalTables: if Exists(sdegdb + "." + table): Delete_management(sdegdb + "." + table) else: pass print("copying table " + str(table)) TableToTable_conversion(localfilegdb + "\\" + table, KDOTConnections + r'\\' + database, table) print("connecting to SQL server...") cnxn = pyodbc.connect(connectionstring) cursor = cnxn.cursor() querystringSetStateSystemMiles = """ update [sde].[All_Road_Centerlines] set [county_log_begin] = bm.[MEAS_COUNTY] from [sde].[All_Road_Centerlines] r JOIN (select distinct [KDOT_STATIC_ID1], [COUNTY_LRS], [MEAS_COUNTY], [KDOT_LRS_KEY] from [sde].[START_COUNTY] bm where 1=1 and substring([COUNTY_LRS],0,10) =substring([KDOT_LRS_KEY],0,10)) bm on r.[KDOT_STATIC_ID1] = bm.[KDOT_STATIC_ID1] update [sde].[All_Road_Centerlines] set [county_log_end] = em.[MEAS_COUNTY] from [sde].[All_Road_Centerlines] r JOIN (select distinct [COUNTY_LRS], [MEAS_COUNTY], [KDOT_STATIC_ID1], [KDOT_LRS_KEY] from [sde].[END_COUNTY] em where 1=1 and substring([COUNTY_LRS],0,10) = substring([KDOT_LRS_KEY],0,10)) em on r.[KDOT_STATIC_ID1] = em.[KDOT_STATIC_ID1] update [sde].[All_Road_Centerlines_D1] set [county_log_begin] = bm.[MEAS_COUNTY] from [sde].[All_Road_Centerlines_D1] r JOIN (select distinct [KDOT_STATIC_ID2], [COUNTY_LRS], [MEAS_COUNTY], [KDOT_LRS_KEY] from [sde].[START_D1_COUNTY] bm where 1=1 and substring([COUNTY_LRS],0,10) =substring([KDOT_LRS_KEY],0,10)) bm on r.[KDOT_STATIC_ID2] = bm.[KDOT_STATIC_ID2] update [sde].[All_Road_Centerlines_D1] set [county_log_end] = em.[MEAS_COUNTY] from [sde].[All_Road_Centerlines_D1] r JOIN (select distinct [COUNTY_LRS], [MEAS_COUNTY], [KDOT_STATIC_ID2], [KDOT_LRS_KEY] from [sde].[END_D1_COUNTY] em where 1=1 and substring([COUNTY_LRS],0,10) = substring([KDOT_LRS_KEY],0,10)) em on r.[KDOT_STATIC_ID2] = em.[KDOT_STATIC_ID2] update [sde].[All_Road_Centerlines_D1] set [county_log_begin] = bm.[C_MEAS] from [sde].[All_Road_Centerlines_D1] r JOIN (select distinct [FIRST_KDOT_STATIC_ID2], [LRS_KEY], [C_MEAS], [KDOT_LRS_KEY] from [sde].[START_D1_C_D]bm where 1=1 and substring([LRS_KEY],0,9) =substring([KDOT_LRS_KEY],0,9)) bm on r.[KDOT_STATIC_ID2] = bm.[FIRST_KDOT_STATIC_ID2] update [sde].[All_Road_Centerlines_D1] set [county_log_end] = em.[C_MEAS] from [sde].[All_Road_Centerlines_D1] r JOIN (select distinct [FIRST_KDOT_STATIC_ID2], [LRS_KEY], [C_MEAS], [KDOT_LRS_KEY] from [sde].[END_D1_C_D]em where 1=1 and substring([LRS_KEY],0,9) =substring([KDOT_LRS_KEY],0,9)) em on r.[KDOT_STATIC_ID2] = em.[FIRST_KDOT_STATIC_ID2] update [sde].[All_Road_Centerlines_D1] set [county_log_begin] = bm.[RM_MEAS] from [sde].[All_Road_Centerlines_D1] r JOIN (select distinct [FIRST_KDOT_STATIC_ID2], [LRS_KEY], [RM_MEAS], [KDOT_LRS_KEY] from [sde].[START_D1_RM_D]bm where 1=1 and substring([LRS_KEY],0,9) =substring([KDOT_LRS_KEY],0,9)) bm on r.[KDOT_STATIC_ID2] = bm.[FIRST_KDOT_STATIC_ID2] update [sde].[All_Road_Centerlines_D1] set [county_log_end] = em.[RM_MEAS] from [sde].[All_Road_Centerlines_D1] r JOIN (select distinct [FIRST_KDOT_STATIC_ID2], [LRS_KEY], [RM_MEAS], [KDOT_LRS_KEY] from [sde].[END_D1_RM_D]em where 1=1 and substring([LRS_KEY],0,9) =substring([KDOT_LRS_KEY],0,9)) em on r.[KDOT_STATIC_ID2] = em.[FIRST_KDOT_STATIC_ID2] """ cursor.execute(querystringSetStateSystemMiles) cursor.execute("COMMIT") cursor.close() del cursor # F:\Cart\projects\Conflation\SQL\SQLServer\Conflation2012\KHUB is where that SQL query lives, its the State System Calibration 2 SQL file print( 'Calibration process completed in {} hours, minutes, seconds.'.format( datetime.datetime.now() - startDateTime)) print( "calculated using sql server, need to incorporate flipping directions based on D1 calibration" )
def TnA(): try: env.workspace = stagews #copying oracle tables to memory print str(datetime.datetime.now()) + ' copying oracle tables to memory' FeatureClassToFeatureClass_conversion(sdeCDRS,"in_memory","Construction","#","ALERT_STATUS <> 3") MakeQueryTable_management(sdeCDRSWZ,"wz1","USE_KEY_FIELDS","KANROAD.CDRS_WZ_DETAIL.CDRS_WZ_DETAIL_ID", """KANROAD.CDRS_WZ_DETAIL.CDRS_WZ_DETAIL_ID #;KANROAD.CDRS_WZ_DETAIL.CDRS_DETOUR_TYPE_ID #; KANROAD.CDRS_WZ_DETAIL.WORK_ZONE_DESC #;KANROAD.CDRS_WZ_DETAIL.WORK_ZONE_SPEED_RESTRIC #; KANROAD.CDRS_WZ_DETAIL.DETOUR_TYPE_TXT #;KANROAD.CDRS_WZ_DETAIL.DETOUR_SPEED_RESTRIC #; KANROAD.CDRS_WZ_DETAIL.DETOUR_DESC #""", "#") TableToTable_conversion("wz1", 'in_memory', 'wz') #Joining the Oracle CDRS WZ table print str(datetime.datetime.now()) + " Joining the Oracle CDRS WZ table" MakeFeatureLayer_management("Construction", "ConstJoin") AddJoin_management("ConstJoin","CDRS_WZ_DETAIL_ID","wz","KANROAD_CDRS_WZ_DETAIL_CDRS_WZ_DETAIL_ID","KEEP_ALL") FeatureClassToFeatureClass_conversion("ConstJoin","in_memory","CDRS","#",'ConstJoin.ALERT_STATUS < 3', "#") #reformatting the Route name for US routes print str(datetime.datetime.now()) + " reformatting the Route name for US routes" AddField_management("CDRS", "RouteName", "TEXT", "#", "10") routenamed = '!Construction_BEG_LRS_ROUTE![0:1] +str(!Construction_BEG_LRS_ROUTE![3:6]).lstrip("0")' # calculation expression #Calculate the Route names for User Display print routenamed CalculateField_management("CDRS", "RouteName", routenamed, "PYTHON_9.3","#") AddField_management("CDRS", "STATUS", "TEXT", "#", "10") AddField_management("CDRS", "Alert_Status_I", "LONG", "#", "#") CalculateField_management("CDRS", "Alert_Status_I", '!Construction_ALERT_STATUS!' , "PYTHON_9.3", "#") #Assigning projection for KanRoad CDRS Alert Route Layer print str(datetime.datetime.now()) + " Assigning projection for KanRoad CDRS Alert Route Layer" DefineProjection_management("CDRS", lambertCC) #reformatting the Route name for US routes print str(datetime.datetime.now()) + " reformatting the Route name for US routes" MakeFeatureLayer_management("CDRS", "ACTIVERoutes", '"Construction_ALERT_STATUS" = 2' ) CalculateField_management("ACTIVERoutes","STATUS",'"Active"',"PYTHON_9.3","#") MakeFeatureLayer_management("CDRS", "ClosedRoutes", '"Construction_ALERT_STATUS" = 2 AND "Construction_FEA_CLOSED" = 1') CalculateField_management("ClosedRoutes","STATUS",'"Closed"',"PYTHON_9.3","#") MakeFeatureLayer_management("CDRS", "PlannedRoutes", '"Construction_ALERT_STATUS" = 1' ) CalculateField_management("PlannedRoutes","STATUS",'"Planned"',"PYTHON_9.3","#") #copying joined oracle tables to memory for loading in Wichway Schema print str(datetime.datetime.now()) + " copying joined oracle tables to memory for loading in Wichway Schema" FeatureClassToFeatureClass_conversion(sdeKandriveConstruction, "in_memory", "CDRS_Segments", "#", "#") #delete rows in the destination feature class DeleteRows_management("CDRS_Segments") ############################################################################################################### # Maintainability information: # If you need to add another field to transfer between the two, just add it to the searchCursorFields and the # insertCursorFields lists and make sure that it is in the same position in the list order for both of # them. # Besides 'LoadDate', the order does not matter, so long as each field name in the # searchCursorFields has a counterpart in the insertCursorFields and vice versa. # 'LoadDate' should always be last for the insertCursorFields as it is appended to each row after all # of the other items from the searchCursorFields. ############################################################################################################### featuresToTransfer = list() # searchCursorFields go to "in_memory\CDRS". (Input table) searchCursorFields = ['SHAPE@', 'RouteName', 'Construction_BEG_STATE_LOGMILE', 'Construction_END_STATE_LOGMILE', 'Construction_BEG_COUNTY_NAME', 'Construction_ALERT_DATE', 'Construction_COMP_DATE', 'Construction_ALERT_TYPE_TXT', 'Construction_ALERT_DESC_TXT', 'Construction_VERT_RESTRICTION', 'Construction_WIDTH_RESTRICTION', 'Construction_TIME_DELAY_TXT', 'Construction_PUBLIC_COMMENT', 'wz_KANROAD_CDRS_WZ_DETAIL_DETOUR_TYPE_TXT', 'wz_KANROAD_CDRS_WZ_DETAIL_DETOUR_DESC', 'Construction_CONTACT_NAME', 'Construction_CONTACT_PHONE', 'Construction_CONTACT_EMAIL', 'Construction_ALERT_HYPERLINK', 'Alert_Status_I', 'Construction_FEA_CLOSED', 'STATUS', 'Construction_ALERT_DIREC_TXT', 'Construction_BEG_LONGITUDE', 'Construction_BEG_LATITUDE'] # insertCursorFields go to sdeKandriveConstruction. (Output table) insertCursorFields = ['SHAPE@', 'RouteName', 'BeginMP', 'EndMP', 'County', 'StartDate', 'CompDate', 'AlertType', 'AlertDescription', 'HeightLimit', 'WidthLimit', 'TimeDelay', 'Comments', 'DetourType', 'DetourDescription', 'ContactName', 'ContactPhone', 'ContactEmail', 'WebLink', 'AlertStatus', 'FeaClosed', 'Status', 'AlertDirectTxt', 'X', 'Y', 'LoadDate'] cdrsSearchCursor = daSearchCursor(r"in_memory\CDRS", searchCursorFields, """ "Alert_Status_I" <> 3""") for cdrsCursorItem in cdrsSearchCursor: featureItem = list(cdrsCursorItem) featureItem.append(starttime) featuresToTransfer.append(featureItem) ##Debug for feature in featuresToTransfer: print feature ## RemoveJoin_management("ConstJoin", "wz") #truncating CDRS segments in WICHWAY SPATIAL print str(datetime.datetime.now()) + " truncating CDRS segments in WICHWAY SPATIAL" TruncateTable_management(sdeKandriveConstruction) cdrsInsertCursor = daInsertCursor(sdeKandriveConstruction, insertCursorFields) for cdrsFeature in featuresToTransfer: insertOID = cdrsInsertCursor.insertRow(cdrsFeature) print "Inserted a row with the OID of: " + str(insertOID) except: print "An error occurred." errorItem = sys.exc_info()[1] print errorItem.args[0] try: del errorItem except: pass raise finally: try: del cdrsSearchCursor except: pass try: del cdrsInsertCursor except: pass
#written by Kyle 10/11/12 # Description # Create Domain, select distinct values in field, append to domain, and assign domain to field # from arcpy import SearchCursor, TableToTable_conversion, TableToDomain_management, env, AssignDomainToField_management, RegisterWithGeodatabase_management env.overwriteOutput = 1 fcName = r'Database Connections\Collect16_SDE.sde\Collect16.sde.CIIMS_CROSSINGDATA_DOMAINVAL' DomainGDB = r'C:\temp\CrossingDomainTables.gdb' DataTable = r'Database Connections\Collect16_SDE.sde\Collect16.SDE.CIIMS_CROSSINGDATA' EnterpriseGDB = r'Database Connections\Collect16_SDE.sde' fldName = 'CrossingData' #RegisterWithGeodatabase_management(EnterpriseGDB+r"/Collect16.sde.CIIMS_CROSSINGDATA_DOMAINVAL") myList = set([row.getValue(fldName) for row in SearchCursor(fcName, fields=fldName)]) print myList for attrib in myList: sels = str(fldName.strip())+" = '"+ attrib+"'" print sels outname = 'd'+str(attrib) print outname TableToTable_conversion(fcName,DomainGDB,outname,sels) TableToDomain_management(DomainGDB+"/"+outname,"DESCRIPTIONS","DESCRIPTIONS",EnterpriseGDB, outname, attrib, "REPLACE") AssignDomainToField_management(DataTable, attrib, outname, subtype_code="") print "All Cansys Domains copied to temp mdb as coded text values"
#Set the workspace database connections EXOR_PROD = "Database Connections\CANP.odc\:" STAGEDB = r"Database Connections\SQL61_GIS_CANSYS.sde" MResolution = 0.0005 MTolerance = 0.001 #Control the MXD for processing mxd = mapping.MapDocument("CURRENT") env.MTolerance = MTolerance env.MResolution = MResolution #Copy Network Layers from CANSYS to Staging Area. SRND and CRND layers have full history FeatureClassToFeatureClass_conversion(r"Database Connections\CANP.sde\NM3.NM_NLT_SRND_SRND_SDO", STAGEDB, "SRND" ) FeatureClassToFeatureClass_conversion(r"Database Connections\CANP.sde\NM3.NM_NLT_CRND_CRND_SDO", STAGEDB, "CRND" ) TableToTable_conversion(r'Database Connections\CANP.sde\KDOT.MV_MAP_EXTRACT', STAGEDB, "Map_Extract") TableToTable_conversion(r'Database Connections\CANP.sde\NM3.NM_ELEMENTS_ALL', STAGEDB, "ELEMENTS_ALL") TableToTable_conversion(r'Database Connections\CANP.sde\NM3.NM_MEMBERS_ALL', STAGEDB, "MEMBERS_ALL") #Members takes a long time to copy #To copy the SECT layer it must be joined to preserve RSE_HE_ID MakeFeatureLayer_management(r"Database Connections\CANP.sde\NM3.NM3_NET_MAP_SECT_TABLE", "NM_SECT" ) MakeTableView_management(STAGEDB+r"\Elements_ALL", "ELEMENTS") AddJoin_management("NM_SECT", "RSE_HE_ID", "ELEMENTS", "NE_ID", "KEEP_COMMON") FeatureClassToFeatureClass_conversion("NM_SECT", STAGEDB, "SECT" ) #Remove the CANP Prod layers for df in mapping.ListDataFrames(mxd): for lyr in mapping.ListLayers(mxd, "", df): mapping.RemoveLayer(df, lyr) RefreshActiveView()
def compute_adjacency_list(input_points, input_network, id_attribute, impedance_attribute, accumulator_attributes, search_radius, output_location, adj_dbf_name): """ |input_points|: point shape file marking entity (e.g. building) locations |input_network|: street network in which |input_points| is located |id_attribute|: the name of attribute that distinguishes between input points |impedance_attribute|: distance between neighboring nodes will be based on this attribute |accumulator_attributes|: distance between neighboring nodes will also be recorded for these attributes |search_radius|: the maximum extent for centrality computation |output_location|: adjacency list dbf will be saved here |adj_dbf_name|: the name of the adjacency list dbf """ # Number of points in |input_points| input_point_count = int(GetCount_management(input_points).getOutput(0)) # Make a directory to store all auxiliary files auxiliary_dir = join(output_location, AUXILIARY_DIR_NAME) if not Exists(auxiliary_dir): mkdir(auxiliary_dir) # Record the edge and junction source names of |input_network| junction_feature, edge_feature = network_features(input_network) # Calculate network locations if not already calculated test_input_point = UpdateCursor(input_points).next() locations_calculated = all( row_has_field(test_input_point, field) for field in NETWORK_LOCATION_FIELDS) if not locations_calculated: calculate_network_locations(input_points, input_network) # Calculate barrier cost per input point if not already calculated barrier_costs_calculated = row_has_field(test_input_point, trim(BARRIER_COST_FIELD)) if not barrier_costs_calculated: AddMessage(BARRIER_COST_COMPUTATION_STARTED) # Add |BARRIER_COST_FIELD| column in |input_points| AddField_management(in_table=input_points, field_name=trim(BARRIER_COST_FIELD), field_type="DOUBLE", field_is_nullable="NON_NULLABLE") # Initialize a dictionary to store the frequencies of (SnapX, SnapY) values xy_count = {} # A method to retrieve a (SnapX, SnapY) pair for a row in |input_points| get_xy = lambda row: (row.getValue(trim("SnapX")), row.getValue(trim("SnapY"))) barrier_pre_progress = Progress_Bar(input_point_count, 1, BARRIER_COST_PRE_PROCESSING) rows = UpdateCursor(input_points) for row in rows: snap_xy = get_xy(row) if snap_xy in xy_count: xy_count[snap_xy] += 1 else: xy_count[snap_xy] = 1 barrier_pre_progress.step() # Populate |BARRIER_COST_FIELD|, this will be used in OD matrix computation barrier_progress = Progress_Bar(input_point_count, 1, BARRIER_COST_COMPUTATION) rows = UpdateCursor(input_points) for row in rows: barrier_cost = BARRIER_COST / xy_count[get_xy(row)] row.setValue(trim(BARRIER_COST_FIELD), barrier_cost) rows.updateRow(row) barrier_progress.step() AddMessage(BARRIER_COST_COMPUTATION_FINISHED) # Necessary files od_cost_matrix_layer = join(auxiliary_dir, OD_COST_MATRIX_LAYER_NAME) od_cost_matrix_lines = join(od_cost_matrix_layer, OD_COST_MATRIX_LINES) temp_adj_dbf_name = TEMP_ADJACENCY_DBF_NAME(adj_dbf_name) temp_adj_dbf = join(output_location, temp_adj_dbf_name) adj_dbf = join(output_location, adj_dbf_name) partial_adj_dbf = join(auxiliary_dir, PARTIAL_ADJACENCY_LIST_NAME) polygons = join(auxiliary_dir, POLYGONS_SHAPEFILE_NAME) raster = join(auxiliary_dir, RASTER_NAME) polygons_layer = join(auxiliary_dir, POLYGONS_LAYER_NAME) input_points_layer = join(auxiliary_dir, INPUT_POINTS_LAYER_NAME) # Make sure none of these files already exists for path in [ od_cost_matrix_layer, temp_adj_dbf, adj_dbf, partial_adj_dbf, polygons, raster, polygons_layer, input_points_layer, od_cost_matrix_lines ]: delete(path) # Cutoff radius for OD matrix computation cutoff_radius = 2 * BARRIER_COST + min(search_radius, BARRIER_COST / 2) # Compute OD matrix MakeODCostMatrixLayer_na(in_network_dataset=input_network, out_network_analysis_layer=od_cost_matrix_layer, impedance_attribute=impedance_attribute, default_cutoff=str(cutoff_radius), accumulate_attribute_name=accumulator_attributes, UTurn_policy="ALLOW_UTURNS", hierarchy="NO_HIERARCHY", output_path_shape="NO_LINES") # Determine raster cell size points_per_raster_cell = OD_MATRIX_ENTRIES / input_point_count raster_cell_count = max(1, input_point_count / points_per_raster_cell) input_points_extent = Describe(input_points).Extent raster_cell_area = (input_points_extent.width * input_points_extent.height / raster_cell_count) raster_cell_size = int(sqrt(raster_cell_area)) # Construct |raster| from |input_points| PointToRaster_conversion(in_features=input_points, value_field=id_attribute, out_rasterdataset=raster, cell_assignment="MOST_FREQUENT", priority_field="NONE", cellsize=str(raster_cell_size)) # Construct |polygons| from |raster| RasterToPolygon_conversion(in_raster=raster, out_polygon_features=polygons, simplify="NO_SIMPLIFY", raster_field="VALUE") # Export empty |od_cost_matrix_lines| to |temp_dbf| to start adjacency list TableToTable_conversion(in_rows=od_cost_matrix_lines, out_path=output_location, out_name=temp_adj_dbf_name) # Construct |polygons_layer| and |input_points_layer| for (feature, layer) in [(polygons, polygons_layer), (input_points, input_points_layer)]: MakeFeatureLayer_management(in_features=feature, out_layer=layer) def add_locations(sub_layer, field_mappings=""): """ |sub_layer|: one of "Origins", "Destinations", "Barrier Points" |field_mappings|: field mappings in addition to those for "Name" and "CurbApproach" """ AddLocations_na(in_network_analysis_layer=od_cost_matrix_layer, sub_layer=sub_layer, in_table=input_points_layer, field_mappings=("Name %s #; CurbApproach # 0; %s" % (id_attribute, field_mappings)), search_tolerance=SEARCH_TOLERANCE, search_criteria=("%s SHAPE; %s SHAPE;" % (junction_feature, edge_feature)), append="CLEAR", snap_to_position_along_network="SNAP", snap_offset=SNAP_OFFSET) # OD cost matrix destinations AddMessage(ADDING_DESTINATIONS_STARTED) SelectLayerByLocation_management(in_layer=input_points_layer) add_locations("Destinations") AddMessage(ADDING_DESTINATIONS_FINISHED) # OD cost matrix point barriers AddMessage(ADDING_BARRIERS_STARTED) add_locations("Point Barriers", ("FullEdge # 0; BarrierType # 2;" "Attr_%s %s #;" % (impedance_attribute, trim(BARRIER_COST_FIELD)))) AddMessage(ADDING_BARRIERS_FINISHED) # Compute adjacency list, one raster cell at a time progress = Progress_Bar(raster_cell_count, 1, STEP_1) rows = UpdateCursor(polygons) for row in rows: # Select the current polygon SelectLayerByAttribute_management(in_layer_or_view=polygons_layer, selection_type="NEW_SELECTION", where_clause="FID = %s" % str(row.FID)) # Origins SelectLayerByLocation_management(in_layer=input_points_layer, select_features=polygons_layer) add_locations("Origins") # Solve OD Cost matrix Solve_na(in_network_analysis_layer=od_cost_matrix_layer, ignore_invalids="SKIP") # Add origin and destination fields to the adjacency list dbf for (index, field) in [(0, ORIGIN_ID_FIELD_NAME), (1, DESTINATION_ID_FIELD_NAME)]: CalculateField_management(in_table=od_cost_matrix_lines, field=field, expression="!Name!.split(' - ')[%d]" % index, expression_type="PYTHON") # Record actual distance between neighboring nodes distance_field = "Total_%s" % impedance_attribute CalculateField_management(in_table=od_cost_matrix_lines, field=distance_field, expression="!%s! - 2 * %d" % (distance_field, BARRIER_COST), expression_type="PYTHON") # Append result to |temp_adj_dbf| TableToTable_conversion(in_rows=od_cost_matrix_lines, out_path=auxiliary_dir, out_name=PARTIAL_ADJACENCY_LIST_NAME) Append_management(inputs=partial_adj_dbf, target=temp_adj_dbf, schema_type="TEST") progress.step() # Copy data from |temp_adj_dbf| to |adj_dbf| Rename_management(in_data=temp_adj_dbf, out_data=adj_dbf) # Clean up for path in [ od_cost_matrix_layer, partial_adj_dbf, polygons, raster, polygons_layer, input_points_layer, auxiliary_dir ]: delete(path)
def OffsetDirectionMatrix2(offsetOptions): """Update the accidentDataWithOffsetOutput geometry with data from geocodedFeatures. Keyword arguments to be included in the options class: gdbLocation -- The gdb where the outputWithOffsetLocations feature class resides. accidentDataAtIntersections -- A point feature class containing geocoded accident information. accidentDataWithOffsetOutput -- A point feature class with the same structure as the geocodedFeatuers AND an "isOffset" row of type "TEXT" with length of at least 5. whereClauseInUse -- Whether or not the script will use a where clause. Boolean value. roadsFeaturesLocation -- The path to the local roads centerline feature class. aliasTable -- The path to the roads alias table for the roads centerline feature class. maxDegreesDifference -- The number of degrees that a potentially matching accident offset location can be from the direction specified. If this is set to -1, the check will be skipped and no matching accident offset locations will be rejected, even if they are in the opposite direction from where the accident record says they should be. I.e. the accident could be offset to the North when the accident record says that it should be South of the intersection when this check is skipped. XYFieldList -- The list of fields to use from the copy of the geocoded accidents feature class after that copy has had POINT_X and POINT_Y fields added and calculated. """ ########################################################################### ## Function overview: ## For each row in the feature class of accidents that have been geolocated ## to an intersection: ########################################################################### # Make sure that the Status for the point is not 'U' -- Unlocated. # Might take care of test for 'U' points before getting to this # step in the process, but if not, be sure to test for it here. # Create/calculate intersection X & Y field named POINT_X and POINT_Y. # Then, calculate those fields. # Then, create a buffer. # Then, select the On_Road in the roads layer. # Then, intersect the buffer with the roads layer to create an offset # points layer. # Then, split the offset points from potential multipart points to # singlepart points. ########################################################################### # Then, use the "SHAPE@XY" token to access the X & Y of the individual # offset points and compare them to the X & Y values in the POINT_X and # POINT_Y fields, which hold the values for the related roads' intersection # that the accidents were geolocated to. # Then, test the intersected points to find the best one for the given # direction. ########################################################################### # Then, append the information for that point into a list. # Then, delete the buffer and intersection layer. # Repeat for each other row... ########################################################################### # When all the rows are finished, # Append the attribute information for the # related accident into each offset point's row. # Lastly, write the data for all the offset point rows # into the output layer. ########################################################################### # Maximum angle difference code confirmed to be working. -- 2015-03-18 # 771/771 manually checked look good (for the information given) using # UpdateKdotNameInCenterline(), Where Clause for selection, and # Maximum Angle Difference. # Locates 771/862 non-'U' points without the modified versions of # ON_ROAD_NAME/AT_ROAD/AT_ROAD_DIRECTION/AT_ROAD_DIST_FEET labeled fields # and 803/862 with them. ########################################################################### AddMessage("The value of the useKDOTFields option is: " + str(offsetOptions.useKDOTFields)) roadsToIntersect = offsetOptions.roadsFeaturesLocation roadsAliasTable = offsetOptions.aliasTable geocodedFeatures = offsetOptions.accidentDataAtIntersections outputWithOffsetLocations = offsetOptions.accidentDataWithOffsetOutput whereClauseFlag = offsetOptions.whereClauseInUse maximumDegreesDifference = offsetOptions.maxDegreesDifference KDOTFieldUse = offsetOptions.useKDOTFields AddMessage("The value for KDOTFieldUse is: " + str(KDOTFieldUse)) if str(KDOTFieldUse).lower() == 'false': featuresWithXYFieldList = offsetOptions.NonKDOTXYFieldList AddMessage("Using nonKDOTXYFieldList.") else: featuresWithXYFieldList = offsetOptions.KDOTXYFieldList geodatabaseLocation = getGDBLocationFromFC(outputWithOffsetLocations) env.workspace = geodatabaseLocation env.overwriteOutput = True geocodedWhereClause = "STATUS <> 'U'" featuresWithXY = 'geocodedWithXY' geocodedLocXY = r'in_memory\geocodedFeatures_Loc_XY' # Changed this to an in_memory location also. # Scratch data locations intermediateAccidentBuffer = r'in_memory\intermediateAccidentBuffer' intermediateAccidentIntersect = r'in_memory\intermediateAccidentIntersect' intermediateAccidentIntersectSinglePart = r'in_memory\intermediateAccidentIntersectSinglePart' # Added 2016-09-06 after the Wichita Area points started processing. Slowly. intermediateRoadsToIntersect = r'in_memory\roadsToIntersect' intermediateRoadsAliasTable = r'in_memory\roadsAliasTable' descSpatialReference = Describe(geocodedFeatures).spatialReference # Make a feature layer of geocodedFeatures using a where clause to restrict to those points # which have been located to an intersection, then add XY to it. MakeFeatureLayer_management(geocodedFeatures, featuresWithXY, geocodedWhereClause) CopyFeatures_management(featuresWithXY, geocodedLocXY) AddXY_management(geocodedLocXY) roadsAsFeatureLayer = 'ConflatedRoadsFeatureLayer' # Roads copied to memory. CopyFeatures_management(roadsToIntersect, intermediateRoadsToIntersect) MakeFeatureLayer_management(intermediateRoadsToIntersect, roadsAsFeatureLayer) # Use Point_X & Point_Y for the geolocated intersection location. # Use shape tokens for the x & y of the points which # result from intersecting the buffer & road geometries. geocodedAccidentsList = list() singlePartOffsetAccidentsList = list() print "The path of the geocodedFeatures used is: " + geocodedFeatures #AddMessage("The field names used in the search cursor are:") #for fieldListItem in featuresWithXYFieldList: # AddMessage(fieldListItem) accidentsCursor = SearchCursor(geocodedLocXY, featuresWithXYFieldList) for accidentRow in accidentsCursor: geocodedAccidentsList.append(accidentRow) try: del accidentsCursor except: pass print 'whereClauseFlag is: ' + str(whereClauseFlag) print 'Starting the offset process...' accCounter = -1 env.outputCoordinateSystem = descSpatialReference if whereClauseFlag == True: # Don't need to create a relate or a join. # Just need to do a select on the would-be joined/related table # to get the SEGIDs, then use those to do a select # for the GCIDs the conflation roads. # Try using table to table here instead of copy features. # For some reason, arcpy doesn't like this table when it's in the # ar63 FGDBs. TableToTable_conversion(roadsAliasTable, 'in_memory', 'roadsAliasTable') # == intermediateRoadsAliasTable #CopyFeatures_management(roadsAliasTable, intermediateRoadsAliasTable) roadsAliasTableView = MakeTableView_management(intermediateRoadsAliasTable, 'roadsAliasTableView') for geocodedAccident in geocodedAccidentsList: accCounter += 1 print 'Working on geocodedAccident #' + str(accCounter) # Create a point here with the x & y from the geocodedAccident, # add the coordinate system, OBJECTID, and AccidentID # from the geocodedAccident layer. # Then, create a buffer with it. #if geocodedAccident[2] is not None and geocodedAccident[3] is not None: tempPoint = Point(geocodedAccident[2], geocodedAccident[3]) #print "\t " + str(tempPoint.X) + ", " + str(tempPoint.Y) tempPointGeometry = PointGeometry(tempPoint, descSpatialReference) accidentDistanceOffset = geocodedAccident[7] accidentClusterTolerance = 1 try: ##################### # Offsetting while using a WhereClause follows: ##################### if accidentDistanceOffset is not None: # In Python it's None, whereas in an ArcGIS table it's <null> accidentDistanceOffset = int(accidentDistanceOffset) if accidentDistanceOffset != 0: Buffer_analysis(tempPointGeometry, intermediateAccidentBuffer, accidentDistanceOffset) firstRoadName = str(geocodedAccident[5]) if firstRoadName is not None: firstRoadName = firstRoadName.upper() else: firstRoadName = 'NotAValidRoad' secondRoadName = str(geocodedAccident[8]) if secondRoadName is not None: secondRoadName = secondRoadName.upper() else: secondRoadName = 'NotAValidRoad' thirdRoadName = ParseMatchAddr(geocodedAccident[9]) if thirdRoadName is not None: thirdRoadName = thirdRoadName.upper() else: thirdRoadName = 'NotAValidRoad' roadNamesList = [firstRoadName, secondRoadName, thirdRoadName] aliasIDsList = getAliasIDs(roadNamesList, roadsAliasTableView) aliasIDsLength = len(aliasIDsList) if aliasIDsLength != 0: aliasIDsString = """(""" for x in xrange(aliasIDsLength): if (x != (aliasIDsLength - 1)): aliasIDsString += """'""" + aliasIDsList[x] + """',""" else: aliasIDsString += """'""" + aliasIDsList[x] + """')""" streetWhereClause = (""" "RD" = '""" + firstRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + firstRoadName + """'""" + """ OR """ + """ "RD" = '""" + secondRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + secondRoadName + """'""" + """ OR """ + """ "RD" = '""" + thirdRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + thirdRoadName + """'""" + """ OR GCID IN """ + aliasIDsString) else: #Without the aliasIDs. streetWhereClause = (""" "RD" = '""" + firstRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + firstRoadName + """'""" + """ OR """ + """ "RD" = '""" + secondRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + secondRoadName + """'""" + """ OR """ + """ "RD" = '""" + thirdRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + thirdRoadName + """'""") SelectLayerByAttribute_management(roadsAsFeatureLayer, "NEW_SELECTION", streetWhereClause) selectionCount = str(int(GetCount_management(roadsAsFeatureLayer).getOutput(0))) if int(selectionCount) != 0: featuresToIntersect = [roadsAsFeatureLayer, intermediateAccidentBuffer] Intersect_analysis(featuresToIntersect, intermediateAccidentIntersect, "ALL", "", "POINT") if int(str(GetCount_management(intermediateAccidentIntersect))) > 0: MultipartToSinglepart_management(intermediateAccidentIntersect, intermediateAccidentIntersectSinglePart) singlePartsCursor = SearchCursor(intermediateAccidentIntersectSinglePart, ['SHAPE@XY']) for singlePart in singlePartsCursor: singlePartListItem = [singlePart[0], geocodedAccident[2], geocodedAccident[3], geocodedAccident[4], geocodedAccident[6], geocodedAccident[0]] singlePartOffsetAccidentsList.append(singlePartListItem) try: del singlePartsCursor except: pass else: pass try: del intermediateAccidentIntersect except: pass else: pass #print 'Zero road segments selected. Will not attempt to offset.' else: pass #print 'AT_ROAD_DIST_FEET is 0. Will not attempt to offset. else: pass #print 'AT_ROAD_DIST_FEET is null. Will not attempt to offset.' except: # Need to log the warnings with details so that I know what's wrong with them. print "WARNING:" print "An error occurred which prevented the accident point with Acc_Key: " + str(geocodedAccident[4]) print "from being buffered and/or offset properly." errorItem = sys.exc_info()[1] errorStatement = str(errorItem.args[0]) print errorStatement try: del errorItem except: pass elif whereClauseFlag == False: for geocodedAccident in geocodedAccidentsList: # Create a point here with the x & y from the geocodedAccident, # add the coordinate system, OBJECTID, and AccidentID # from the geocodedAccident layer. # Then, create a buffer with it. #if geocodedAccident[2] is not None and geocodedAccident[3] is not None: tempPoint = Point(geocodedAccident[2], geocodedAccident[3]) #print "\t " + str(tempPoint.X) + ", " + str(tempPoint.Y) tempPointGeometry = PointGeometry(tempPoint, descSpatialReference) accidentDistanceOffset = geocodedAccident[7] ##accidentClusterTolerance = 2 try: ##################### # Offsetting while not using a WhereClause follows: ##################### if accidentDistanceOffset is not None: if int(accidentDistanceOffset) != 0: accidentDistanceOffset = int(accidentDistanceOffset) Buffer_analysis(tempPointGeometry, intermediateAccidentBuffer, accidentDistanceOffset) featuresToIntersect = [roadsAsFeatureLayer, intermediateAccidentBuffer] Intersect_analysis(featuresToIntersect, intermediateAccidentIntersect, "ALL", "", "POINT") if int(str(GetCount_management(intermediateAccidentIntersect))) > 0: MultipartToSinglepart_management(intermediateAccidentIntersect, intermediateAccidentIntersectSinglePart) singlePartsCursor = SearchCursor(intermediateAccidentIntersectSinglePart, ['SHAPE@XY']) for singlePart in singlePartsCursor: singlePartListItem = [singlePart[0], geocodedAccident[2], geocodedAccident[3], geocodedAccident[4], geocodedAccident[6], geocodedAccident[0]] singlePartOffsetAccidentsList.append(singlePartListItem) try: del singlePartsCursor except: pass try: del intermediateAccidentIntersect except: pass else: pass else: pass # Need to change this to being offset to the intersection, i.e. no movement, but # considered to be correctly offset all the same. #print 'AT_ROAD_DIST_FEET is 0. Will not attempt to offset.' else: pass #print 'AT_ROAD_DIST_FEET is null. Will not attempt to offset.' except: print "WARNING:" print "An error occurred which prevented the accident point with Acc_Key: " + str(geocodedAccident[4]) print "from being buffered and/or offset properly." errorItem = sys.exc_info()[1] errorStatement = str(errorItem.args[0]) print errorStatement try: del errorItem except: pass else: print 'Please set the whereClauseFlag to either (boolean) True or False.' #pass offsetDictionaryByAccidentKey = dict() listContainer = list() # Group the rows by accident_key for further analysis, # and add them to the dictionary/list/list data structure. for singlePartOffsetItem in singlePartOffsetAccidentsList: if singlePartOffsetItem[3] in offsetDictionaryByAccidentKey.keys(): listContainer = offsetDictionaryByAccidentKey[singlePartOffsetItem[3]] listContainer.append(singlePartOffsetItem) offsetDictionaryByAccidentKey[singlePartOffsetItem[3]] = listContainer else: listContainer = list() listContainer.append(singlePartOffsetItem) offsetDictionaryByAccidentKey[singlePartOffsetItem[3]] = listContainer updateListValues = list() for accidentKey in offsetDictionaryByAccidentKey.keys(): # accidentKey will be a unique accident key from the table listContainer = offsetDictionaryByAccidentKey[accidentKey] updateList = [-1, -1, -1, "False"] try: # Get the AT_ROAD_KDOT_DIRECTION/AT_ROAD_DIRECTION from the first (0th) entry. directionToTest = listContainer[0][4] if directionToTest is not None: directionToTest = str(directionToTest).upper() updateList = findTheMostInterestingRow(listContainer, directionToTest, maximumDegreesDifference) if updateList[0] != -1: updateListValues.append(updateList) else: # -1 is not a valid Acc_Key. Slight optimization for the next for loop that uses this list so that # it doesn't have to be re-checked each time through the list for each accident in the table. pass else: print 'Direction to test is null.' except: pass accidentUpdateCursorFields = ['ACCIDENT_KEY', 'Shape@XY', 'isOffset'] accidentUpdateCursor = UpdateCursor(outputWithOffsetLocations, accidentUpdateCursorFields) for cursorItem in accidentUpdateCursor: for updateListItem in updateListValues: if cursorItem[0] == updateListItem[0]: if str(cursorItem[2]).upper() == 'TRUE': # Don't make any changes if true. AddMessage('The accident point with Acc_Key: ' + str(cursorItem[0]) + ' is already offset.') else: # Otherwise, offset the point. editableCursorItem = list(cursorItem) #AddMessage('Found a matching cursorItem with an Accident_Key of ' + str(cursorItem[0]) + ".") editableCursorItem[1] = (updateListItem[1], updateListItem[2]) editableCursorItem[2] = updateListItem[3] #AddMessage(str(editableCursorItem)) accidentUpdateCursor.updateRow(editableCursorItem) else: pass