def save_network(urbanaccess_network=None,dir=config.settings.data_folder, filename=None,overwrite_key=False,overwrite_hdf5=False): """ Write a urbanaccess_network integrated nodes and edges to a node and edge table in a hdf5 file Parameters ---------- urbanaccess_network : object urbanaccess_network object with net_edges and net_nodes dataframes dir : string, optional directory to save hdf5 file filename : string name of the hdf5 file to save with .h5 extension overwrite_key : bool, optional if true any existing table with the specified key name will be overwritten overwrite_hdf5 : bool, optional if true any existing hdf5 file with the specified name in the specified directory will be overwritten Returns ------- None """ assert urbanaccess_network is not None\ or urbanaccess_network.net_edges.empty == False\ or urbanaccess_network.net_nodes.empty == False df_to_hdf5(data=urbanaccess_network.net_edges,key='edges', overwrite_key=overwrite_key,dir=dir, filename=filename,overwrite_hdf5=overwrite_hdf5) df_to_hdf5(data=urbanaccess_network.net_nodes,key='nodes', overwrite_key=overwrite_key,dir=dir,filename=filename, overwrite_hdf5=overwrite_hdf5)
def save_network(urbanaccess_network, filename, dir=config.settings.data_folder, overwrite_key=False, overwrite_hdf5=False): """ Write a urbanaccess_network integrated nodes and edges to a node and edge table in a hdf5 file Parameters ---------- urbanaccess_network : object urbanaccess_network object with net_edges and net_nodes DataFrames filename : string name of the hdf5 file to save with .h5 extension dir : string, optional directory to save hdf5 file overwrite_key : bool, optional if true any existing table with the specified key name will be overwritten overwrite_hdf5 : bool, optional if true any existing hdf5 file with the specified name in the specified directory will be overwritten Returns ------- None """ if urbanaccess_network is None or urbanaccess_network.net_edges.empty or \ urbanaccess_network.net_nodes.empty: raise ValueError('Either no urbanaccess_network specified or ' 'net_edges or net_nodes are empty.') df_to_hdf5(data=urbanaccess_network.net_edges, key='edges', overwrite_key=overwrite_key, dir=dir, filename=filename, overwrite_hdf5=overwrite_hdf5) df_to_hdf5(data=urbanaccess_network.net_nodes, key='nodes', overwrite_key=overwrite_key, dir=dir, filename=filename, overwrite_hdf5=overwrite_hdf5)
def save_processed_gtfs_data(gtfsfeeds_dfs=None, dir=config.settings.data_folder, filename=None): """ Write dataframes in a gtfsfeeds_dfs object to a hdf5 file Parameters ---------- gtfsfeeds_dfs : object gtfsfeeds_dfs object dir : string, optional directory to save hdf5 file filename : string name of the hdf5 file to save with .h5 extension Returns ------- None """ assert gtfsfeeds_dfs is not None \ or gtfsfeeds_dfs.stops.empty == False \ or gtfsfeeds_dfs.routes.empty == False \ or gtfsfeeds_dfs.trips.empty == False \ or gtfsfeeds_dfs.stop_times.empty == False \ or gtfsfeeds_dfs.calendar.empty == False \ or gtfsfeeds_dfs.stop_times_int.empty == False, \ 'gtfsfeeds_dfs is ' \ '' \ 'missing one of ' \ 'the required ' \ 'dataframes.' df_to_hdf5(data=gtfsfeeds_dfs.stops, key='stops', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) df_to_hdf5(data=gtfsfeeds_dfs.routes, key='routes', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) df_to_hdf5(data=gtfsfeeds_dfs.trips, key='trips', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) df_to_hdf5(data=gtfsfeeds_dfs.stop_times, key='stop_times', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) df_to_hdf5(data=gtfsfeeds_dfs.calendar, key='calendar', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) df_to_hdf5(data=gtfsfeeds_dfs.stop_times_int, key='stop_times_int', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) if gtfsfeeds_dfs.headways.empty == False: df_to_hdf5(data=gtfsfeeds_dfs.headways, key='headways', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) if gtfsfeeds_dfs.calendar_dates.empty == False: df_to_hdf5(data=gtfsfeeds_dfs.calendar_dates, key='calendar_dates', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False)
def save_processed_gtfs_data(gtfsfeeds_dfs, filename, dir=config.settings.data_folder): """ Write dataframes in a gtfsfeeds_dfs object to a hdf5 file Parameters ---------- gtfsfeeds_dfs : object gtfsfeeds_dfs object filename : string name of the hdf5 file to save with .h5 extension dir : string, optional directory to save hdf5 file Returns ------- None """ # TODO: refactor check below to use any() for readability if gtfsfeeds_dfs is None or gtfsfeeds_dfs.stops.empty or \ gtfsfeeds_dfs.routes.empty or gtfsfeeds_dfs.trips.empty \ or gtfsfeeds_dfs.stop_times.empty or \ gtfsfeeds_dfs.calendar.empty or \ gtfsfeeds_dfs.stop_times_int.empty: raise ValueError('gtfsfeeds_dfs is missing one of the required ' 'dataframes.') df_to_hdf5(data=gtfsfeeds_dfs.stops, key='stops', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) df_to_hdf5(data=gtfsfeeds_dfs.routes, key='routes', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) df_to_hdf5(data=gtfsfeeds_dfs.trips, key='trips', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) df_to_hdf5(data=gtfsfeeds_dfs.stop_times, key='stop_times', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) df_to_hdf5(data=gtfsfeeds_dfs.calendar, key='calendar', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) df_to_hdf5(data=gtfsfeeds_dfs.stop_times_int, key='stop_times_int', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) if gtfsfeeds_dfs.headways.empty is False: df_to_hdf5(data=gtfsfeeds_dfs.headways, key='headways', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False) if gtfsfeeds_dfs.calendar_dates.empty is False: df_to_hdf5(data=gtfsfeeds_dfs.calendar_dates, key='calendar_dates', overwrite_key=False, dir=dir, filename=filename, overwrite_hdf5=False)