sas3_log = os.path.join(MHN.temp_dir, '{0}.log'.format(sas3_name)) sas3_lst = os.path.join(MHN.temp_dir, '{0}.lst'.format(sas3_name)) bus_route_csv = os.path.join(MHN.temp_dir, 'bus_route.csv') bus_itin_csv = os.path.join(MHN.temp_dir, 'bus_itin.csv') oneline_itin_txt = os.path.join(MHN.temp_dir, 'oneline_itin.txt') # gtfs_collapse_routes.py input file (called by gtfs_reformat_feed.sas) feed_groups_txt = os.path.join(MHN.temp_dir, 'feed_groups.txt') # gtfs_collapse_routes.py output file missing_links_csv = os.path.join(MHN.out_dir, 'missing_bus_links.csv') link_dict_txt = os.path.join(MHN.out_dir, 'link_dictionary.txt') # shortest_path.py input file (called by generate_transit_files_2.sas) short_path_txt = os.path.join(MHN.out_dir, 'short_path.txt') # shortest_path.py output file path_errors_txt = os.path.join(MHN.temp_dir, 'path_errors.txt') # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) MHN.delete_if_exists(sas2_log) MHN.delete_if_exists(sas2_lst) MHN.delete_if_exists(sas3_log) MHN.delete_if_exists(sas3_lst) MHN.delete_if_exists(bus_route_csv) MHN.delete_if_exists(bus_itin_csv) MHN.delete_if_exists(oneline_itin_txt) MHN.delete_if_exists(feed_groups_txt) MHN.delete_if_exists(missing_links_csv) MHN.delete_if_exists(link_dict_txt) MHN.delete_if_exists(short_path_txt) MHN.delete_if_exists(path_errors_txt)
# ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- overlap_year_csv = os.path.join(MHN.temp_dir, "overlap_year.csv") overlap_transact_csv = os.path.join(MHN.temp_dir, "overlap_transact.csv") overlap_network_csv = os.path.join(MHN.temp_dir, "overlap_network.csv") sas1_log = os.path.join(MHN.temp_dir, "{0}.log".format(sas1_name)) sas1_lst = os.path.join(MHN.temp_dir, "{0}.lst".format(sas1_name)) # sas2_log & sas2_lst are scenario-dependent, defined below # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(overlap_year_csv) MHN.delete_if_exists(overlap_transact_csv) MHN.delete_if_exists(overlap_network_csv) MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) # ----------------------------------------------------------------------------- # Write tollsys.flag file if desired. # ----------------------------------------------------------------------------- if create_tollsys_flag: arcpy.AddMessage("\nGenerating tollsys.flag file...") tollsys_flag = os.path.join(hwy_path, "tollsys.flag") MHN.write_arc_flag_file(tollsys_flag, '"TOLLSYS" = 1')
MHN = MasterHighwayNetwork(mhn_gdb_path) #arcpy.AddWarning('\nCurrently updating {0}.'.format(MHN.gdb)) # ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- bad_arcs_shp = os.path.join(MHN.temp_dir, 'bad_arcs.shp') duplicate_nodes_shp = os.path.join(MHN.temp_dir, 'duplicate_nodes.shp') overlapping_nodes_shp = os.path.join(MHN.temp_dir, 'overlapping_nodes.shp') # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(bad_arcs_shp) MHN.delete_if_exists(duplicate_nodes_shp) MHN.delete_if_exists(overlapping_nodes_shp) # ----------------------------------------------------------------------------- # Check arcs for all required attributes. # ----------------------------------------------------------------------------- arcpy.AddMessage('\nValidating edits:') # Make a copy of the unmodified arcs. temp_arcs = os.path.join(MHN.mem, 'temp_arcs') arcpy.CopyFeatures_management(MHN.arc, temp_arcs) # Set null values to 0 or a space. MHN.set_nulls_to_zero(temp_arcs, ['ANODE','BNODE','DIRECTIONS','TYPE1','TYPE2','THRULANES1','THRULANES2',
# ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- sas1_log = os.path.join(MHN.temp_dir, '{0}.log'.format(sas1_name)) sas1_lst = os.path.join(MHN.temp_dir, '{0}.lst'.format(sas1_name)) year_csv = os.path.join(MHN.temp_dir, 'year.csv') transact_csv = os.path.join(MHN.temp_dir, 'transact.csv') network_csv = os.path.join(MHN.temp_dir, 'network.csv') update_link_csv = os.path.join(MHN.temp_dir, 'update_link.csv') # SAS output flag_node_csv = os.path.join(MHN.temp_dir, 'flag_node.csv') # SAS output # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) MHN.delete_if_exists(year_csv) MHN.delete_if_exists(transact_csv) MHN.delete_if_exists(network_csv) MHN.delete_if_exists(update_link_csv) MHN.delete_if_exists(flag_node_csv) # ----------------------------------------------------------------------------- # Write data relevant to specified year and pass to SAS for processing. # ----------------------------------------------------------------------------- arcpy.AddMessage('\nPreparing {0} network attributes...'.format(build_year)) # Export coding for highway projects completed by scenario year. hwyproj_id_field = MHN.route_systems[MHN.hwyproj][1]
# ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- tipid_all_csv = os.path.join(MHN.temp_dir, 'tipid_all.csv') early_scenarios_csv = os.path.join(MHN.out_dir, 'early_transit_scenarios.csv') late_scenarios_csv = os.path.join(MHN.out_dir, 'late_transit_scenarios.csv') unknown_trans_ids_csv = os.path.join(MHN.out_dir, 'unknown_transit_tipids.csv') in_year_not_mhn_txt = os.path.join(MHN.out_dir, 'in_year_not_mhn.txt') in_mhn_not_year_txt = os.path.join(MHN.out_dir, 'in_mhn_not_year.txt') # ----------------------------------------------------------------------------- # Clean up old temp/output files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(tipid_all_csv) MHN.delete_if_exists(early_scenarios_csv) MHN.delete_if_exists(in_year_not_mhn_txt) MHN.delete_if_exists(in_mhn_not_year_txt) # ----------------------------------------------------------------------------- # Merge codable Conformed project years with codable Exempt project years, # and check for duplicates with different completion years. # ----------------------------------------------------------------------------- with open(tipid_all_csv, 'w') as merged: with open(tipid_conformed_csv, 'r') as conformed: for line in conformed: if int(line.split(',')[1]) > MHN.base_year: merged.write(line) with open(tipid_exempt_csv, 'r') as exempt:
# ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- sas1_log = os.path.join(MHN.temp_dir, '{0}.log'.format(sas1_name)) sas1_lst = os.path.join(MHN.temp_dir, '{0}.lst'.format(sas1_name)) year_csv = os.path.join(MHN.temp_dir, 'year.csv') transact_csv = os.path.join(MHN.temp_dir, 'transact.csv') network_csv = os.path.join(MHN.temp_dir, 'network.csv') future_itin_csv = os.path.join(MHN.temp_dir, 'future_itin.csv') future_route_csv = os.path.join(MHN.temp_dir, 'future_route.csv') # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) MHN.delete_if_exists(year_csv) MHN.delete_if_exists(transact_csv) MHN.delete_if_exists(network_csv) MHN.delete_if_exists(future_itin_csv) MHN.delete_if_exists(future_route_csv) # ----------------------------------------------------------------------------- # Verify that all projects have a non-zero, non-null completion year. # ----------------------------------------------------------------------------- invalid_hwyproj = MHN.get_yearless_hwyproj() if invalid_hwyproj: MHN.die('The following highway projects have no completion year: {0}'.format(', '.join(invalid_hwyproj)))
# ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- sas1_log = os.path.join(MHN.temp_dir, '{0}.log'.format(sas1_name)) sas1_lst = os.path.join(MHN.temp_dir, '{0}.lst'.format(sas1_name)) year_csv = os.path.join(MHN.temp_dir, 'year.csv') transact_csv = os.path.join(MHN.temp_dir, 'transact.csv') network_csv = os.path.join(MHN.temp_dir, 'network.csv') future_itin_csv = os.path.join(MHN.temp_dir, 'future_itin.csv') future_route_csv = os.path.join(MHN.temp_dir, 'future_route.csv') # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) MHN.delete_if_exists(year_csv) MHN.delete_if_exists(transact_csv) MHN.delete_if_exists(network_csv) MHN.delete_if_exists(future_itin_csv) MHN.delete_if_exists(future_route_csv) # ----------------------------------------------------------------------------- # Verify that all projects have a non-zero, non-null completion year. # ----------------------------------------------------------------------------- invalid_hwyproj = MHN.get_yearless_hwyproj() if invalid_hwyproj: MHN.die( 'The following highway projects have no completion year: {0}'.format( ', '.join(invalid_hwyproj)))
MHN.die("{0} doesn't exist!".format(tipid_uncodable_csv)) # ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- tipid_all_csv = os.path.join(MHN.temp_dir, 'tipid_all.csv') early_scenarios_csv = os.path.join(MHN.out_dir, 'early_transit_scenarios.csv') in_year_not_mhn_txt = os.path.join(MHN.out_dir, 'in_year_not_mhn.txt') in_mhn_not_year_txt = os.path.join(MHN.out_dir, 'in_mhn_not_year.txt') # ----------------------------------------------------------------------------- # Clean up old temp/output files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(tipid_all_csv) MHN.delete_if_exists(early_scenarios_csv) MHN.delete_if_exists(in_year_not_mhn_txt) MHN.delete_if_exists(in_mhn_not_year_txt) # ----------------------------------------------------------------------------- # Merge codable Conformed project years with codable Exempt project years, # and check for duplicates with different completion years. # ----------------------------------------------------------------------------- with open(tipid_all_csv, 'w') as merged: with open(tipid_conformed_csv, 'r') as conformed: for line in conformed: if int(line.split(',')[1]) > MHN.base_year: merged.write(line) with open(tipid_exempt_csv, 'r') as exempt:
# ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- sas1_log = os.path.join(MHN.temp_dir, '{0}.log'.format(sas1_name)) sas1_lst = os.path.join(MHN.temp_dir, '{0}.lst'.format(sas1_name)) year_csv = os.path.join(MHN.temp_dir, 'year.csv') transact_csv = os.path.join(MHN.temp_dir, 'transact.csv') network_csv = os.path.join(MHN.temp_dir, 'network.csv') update_link_csv = os.path.join(MHN.temp_dir, 'update_link.csv') # SAS output flag_node_csv = os.path.join(MHN.temp_dir, 'flag_node.csv') # SAS output # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) MHN.delete_if_exists(year_csv) MHN.delete_if_exists(transact_csv) MHN.delete_if_exists(network_csv) MHN.delete_if_exists(update_link_csv) MHN.delete_if_exists(flag_node_csv) # ----------------------------------------------------------------------------- # Write data relevant to specified year and pass to SAS for processing. # ----------------------------------------------------------------------------- arcpy.AddMessage('\nPreparing {0} network attributes...'.format(build_year)) # Export coding for highway projects completed by scenario year. hwyproj_id_field = MHN.route_systems[MHN.hwyproj][1] year_attr = [hwyproj_id_field, 'COMPLETION_YEAR']
sas2_name = 'generate_highway_files_2' # ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- overlap_year_csv = os.path.join(MHN.temp_dir, 'overlap_year.csv') overlap_transact_csv = os.path.join(MHN.temp_dir, 'overlap_transact.csv') overlap_network_csv = os.path.join(MHN.temp_dir, 'overlap_network.csv') sas1_log = os.path.join(MHN.temp_dir, '{0}.log'.format(sas1_name)) sas1_lst = os.path.join(MHN.temp_dir, '{0}.lst'.format(sas1_name)) # sas2_log & sas2_lst are scenario-dependent, defined below # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(overlap_year_csv) MHN.delete_if_exists(overlap_transact_csv) MHN.delete_if_exists(overlap_network_csv) MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) # ----------------------------------------------------------------------------- # Write tollsys.flag file, if desired. # ----------------------------------------------------------------------------- if create_tollsys_flag or abm_output: arcpy.AddMessage('\nGenerating tollsys.flag file...') tollsys_flag = os.path.join(hwy_path, 'tollsys.flag') MHN.write_arc_flag_file(tollsys_flag, '"TOLLSYS" = 1') # ----------------------------------------------------------------------------- # Generate any scenario-independent, ABM-specific files, if desired.
network_csv = os.path.join(MHN.temp_dir, 'network.csv') nodes_csv = os.path.join(MHN.temp_dir, 'nodes.csv') header_csv = os.path.join(MHN.temp_dir, 'header.csv') itin_csv = os.path.join(MHN.temp_dir, 'itin.csv') link_dict_txt = os.path.join(MHN.out_dir, 'link_dictionary.txt') # shortest_path.py input file (called by import_gtfs_bus_routes_2.sas) short_path_txt = os.path.join(MHN.out_dir, 'short_path.txt') # shortest_path.py output file path_err_txt = os.path.join(MHN.out_dir, 'path_errors.txt') hold_check_csv = os.path.join(MHN.out_dir, 'hold_check.csv') hold_times_csv = os.path.join(MHN.out_dir, 'hold_times.csv') routes_processed_csv = os.path.join(MHN.out_dir, 'routes_processed.csv') # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) MHN.delete_if_exists(transact_csv) MHN.delete_if_exists(network_csv) MHN.delete_if_exists(nodes_csv) MHN.delete_if_exists(header_csv) MHN.delete_if_exists(itin_csv) MHN.delete_if_exists(link_dict_txt) MHN.delete_if_exists(short_path_txt) MHN.delete_if_exists(path_err_txt) MHN.delete_if_exists(hold_check_csv) MHN.delete_if_exists(hold_times_csv) MHN.delete_if_exists(routes_processed_csv) # -----------------------------------------------------------------------------
sas1_name = 'import_highway_projects_2' # ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- sas1_log = os.path.join(MHN.temp_dir, '{0}.log'.format(sas1_name)) sas1_lst = os.path.join(MHN.temp_dir, '{0}.lst'.format(sas1_name)) mhn_links_csv = os.path.join(MHN.temp_dir, 'mhn_links.csv') projects_csv = os.path.join(MHN.temp_dir, 'projects.csv') # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) MHN.delete_if_exists(mhn_links_csv) MHN.delete_if_exists(projects_csv) # ----------------------------------------------------------------------------- # Use SAS program to validate coding before import. # ----------------------------------------------------------------------------- arcpy.AddMessage('{0}Validating coding in {1}...'.format('\n', xls)) mhn_links_attr = ['ANODE', 'BNODE', 'BASELINK'] mhn_links_query = ''' "BASELINK" IN ('0', '1') ''' # Ignore BASELINK > 1 mhn_links_view = MHN.make_skinny_table_view(MHN.arc, 'mhn_links_view', mhn_links_attr, mhn_links_query) MHN.write_attribute_csv(mhn_links_view, mhn_links_csv, mhn_links_attr) sas1_sas = os.path.join(MHN.prog_dir, '{0}.sas'.format(sas1_name))
network_csv = os.path.join(MHN.temp_dir, 'network.csv') nodes_csv = os.path.join(MHN.temp_dir, 'nodes.csv') header_csv = os.path.join(MHN.temp_dir, 'header.csv') itin_csv = os.path.join(MHN.temp_dir, 'itin.csv') link_dict_txt = os.path.join(MHN.out_dir, 'link_dictionary.txt') # shortest_path.py input file (called by import_gtfs_bus_routes_2.sas) short_path_txt = os.path.join(MHN.out_dir, 'short_path.txt') # shortest_path.py output file path_err_txt = os.path.join(MHN.out_dir, 'path_errors.txt') hold_check_csv = os.path.join(MHN.out_dir, 'hold_check.csv') hold_times_csv = os.path.join(MHN.out_dir, 'hold_times.csv') routes_processed_csv = os.path.join(MHN.out_dir, 'routes_processed.csv') # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) MHN.delete_if_exists(transact_csv) MHN.delete_if_exists(network_csv) MHN.delete_if_exists(nodes_csv) MHN.delete_if_exists(header_csv) MHN.delete_if_exists(itin_csv) MHN.delete_if_exists(link_dict_txt) MHN.delete_if_exists(short_path_txt) MHN.delete_if_exists(path_err_txt) MHN.delete_if_exists(hold_check_csv) MHN.delete_if_exists(hold_times_csv) MHN.delete_if_exists(routes_processed_csv) # -----------------------------------------------------------------------------
mhn_gdb_path = arcpy.GetParameterAsText(0) # MHN geodatabase MHN = MasterHighwayNetwork(mhn_gdb_path) #arcpy.AddWarning('\nCurrently updating {0}.'.format(MHN.gdb)) # ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- bad_arcs_shp = os.path.join(MHN.temp_dir, 'bad_arcs.shp') duplicate_nodes_shp = os.path.join(MHN.temp_dir, 'duplicate_nodes.shp') overlapping_nodes_shp = os.path.join(MHN.temp_dir, 'overlapping_nodes.shp') # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(bad_arcs_shp) MHN.delete_if_exists(duplicate_nodes_shp) MHN.delete_if_exists(overlapping_nodes_shp) # ----------------------------------------------------------------------------- # Check arcs for all required attributes. # ----------------------------------------------------------------------------- arcpy.AddMessage('\nValidating edits:') # Make a copy of the unmodified arcs. temp_arcs = os.path.join(MHN.mem, 'temp_arcs') arcpy.CopyFeatures_management(MHN.arc, temp_arcs) # Set null values to 0 or a space. MHN.set_nulls_to_zero(temp_arcs, [ 'ANODE', 'BNODE', 'DIRECTIONS', 'TYPE1', 'TYPE2', 'THRULANES1',
MHN = MasterHighwayNetwork(mhn_gdb_path) xls = arcpy.GetParameterAsText(1) # Spreadsheet containing project coding sas1_name = 'import_highway_projects_2' # ----------------------------------------------------------------------------- # Set diagnostic output locations. # ----------------------------------------------------------------------------- sas1_log = os.path.join(MHN.temp_dir, '{0}.log'.format(sas1_name)) sas1_lst = os.path.join(MHN.temp_dir, '{0}.lst'.format(sas1_name)) mhn_links_csv = os.path.join(MHN.temp_dir, 'mhn_links.csv') projects_csv = os.path.join(MHN.temp_dir, 'projects.csv') # ----------------------------------------------------------------------------- # Clean up old temp files, if necessary. # ----------------------------------------------------------------------------- MHN.delete_if_exists(sas1_log) MHN.delete_if_exists(sas1_lst) MHN.delete_if_exists(mhn_links_csv) MHN.delete_if_exists(projects_csv) # ----------------------------------------------------------------------------- # Use SAS program to validate coding before import. # ----------------------------------------------------------------------------- arcpy.AddMessage('{0}Validating coding in {1}...'.format('\n', xls)) mhn_links_attr = ['ANODE', 'BNODE', 'BASELINK'] mhn_links_query = ''' "BASELINK" IN ('0', '1') ''' # Ignore BASELINK > 1 mhn_links_view = MHN.make_skinny_table_view(MHN.arc, 'mhn_links_view', mhn_links_attr, mhn_links_query) MHN.write_attribute_csv(mhn_links_view, mhn_links_csv, mhn_links_attr) sas1_sas = os.path.join(MHN.prog_dir, '{0}.sas'.format(sas1_name))