Esempio n. 1
0
def main():
    logging.basicConfig(filename=absolute_path + '/sf_abm_mp.log',
                        level=logging.DEBUG)
    logger = logging.getLogger('main')
    logger.info('{} \n\n'.format(datetime.datetime.now()))

    t_start = time.time()

    ### Read initial graph
    #global g_igraph
    #g_igraph = igraph.Graph.Read_Pickle(absolute_path+'/../data_repo/data/sf/network_graph.pkl')

    global g
    g = interface.readgraph(
        bytes(absolute_path + '/../data_repo/data/sf/network_sparse.mtx',
              encoding='utf-8'))

    for day in [1]:
        for hour in range(9, 10):

            logger.info('*************** DY{} HR{} ***************'.format(
                day, hour))

            t0 = time.time()
            edge_volume = one_step(day, hour)
            t1 = time.time()
            logger.info('DY{}_HR{}: running time {}'.format(
                day, hour, t1 - t0))

            ### Update graph
            # volume_array = np.zeros(g.ecount())
            # volume_array[list(edge_volume.keys())] = np.array(list(edge_volume.values()))*400 ### 400 is the factor to scale Uber/Lyft trip # to total car trip # in SF.
            # g.es['volume'] = volume_array
            # logger.info('DY{}_HR{}: max link volume {}'.format(day, hour, max(volume_array)))
            # g.es['t_new'] = fft_array*(1.2+0.78*(volume_array/capacity_array)**4) ### BPR and (colak, 2015)
            # g.es['weight'] = g.es['t_new']

            #write_geojson(g, day, hour)

    t_end = time.time()
    logger.info('total run time is {} seconds \n\n\n\n\n'.format(t_end -
                                                                 t_start))
Esempio n. 2
0
def network(network_file_edges=None,
            network_file_nodes=None,
            simulation_outputs=None,
            cf_files=[],
            scen_nm=''):
    logger = logging.getLogger("bk_evac")

    links_df0 = pd.read_csv(work_dir + network_file_edges)
    links_df0 = gpd.GeoDataFrame(
        links_df0, crs='epsg:4326',
        geometry=links_df0['geometry'].map(loads))  #.to_crs(3857)
    ### lane assumptions
    ### leave to OSM specified values for motorway and trunk
    links_df0['lanes'] = np.where(
        links_df0['type'].isin([
            'primary', 'primary_link', 'secondary', 'secondary_link',
            'tertiary', 'tertiary_link'
        ]), 2, links_df0['lanes'])
    links_df0['lanes'] = np.where(
        links_df0['type'].isin(['residential', 'unclassified']), 1,
        links_df0['lanes'])
    ### speed assumptions
    links_df0['maxmph'] = np.where(
        links_df0['type'].isin(['primary', 'primary_link']), 55,
        links_df0['maxmph'])
    links_df0['maxmph'] = np.where(
        links_df0['type'].isin(
            ['secondary', 'secondary_link', 'tertiary', 'tertiary_link']), 25,
        links_df0['maxmph'])
    links_df0['maxmph'] = np.where(
        links_df0['type'].isin(['residential', 'unclassified']), 25 * 0.8,
        links_df0['maxmph'])
    if len(cf_files) > 0:
        ### read counterflow links
        cf_links = []
        for cf_file in cf_files:
            cf_link_df = pd.read_csv(work_dir + cf_file)
            cf_links.append(cf_link_df)
        cf_links_df = pd.concat(cf_links)
        ### along counterflow direction
        cf_links_id = cf_links_df.loc[cf_links_df['along'] == 1,
                                      'edge_id_igraph']
        links_df0['lanes'] = np.where(
            links_df0['edge_id_igraph'].isin(cf_links_id),
            links_df0['lanes'] * 2, links_df0['lanes'])
        ### opposite counterflow direction
        opcf_links_id = cf_links_df.loc[cf_links_df['along'] == 0,
                                        'edge_id_igraph']
        links_df0['lanes'] = np.where(
            links_df0['edge_id_igraph'].isin(opcf_links_id), 0,
            links_df0['lanes'])
        links_df0['maxmph'] = np.where(
            links_df0['edge_id_igraph'].isin(opcf_links_id), 0.0000001,
            links_df0['maxmph'])

    links_df0['fft'] = links_df0['length'] / links_df0['maxmph'] * 2.237
    links_df0['capacity'] = 1900 * links_df0['lanes']
    links_df0 = links_df0[[
        'edge_id_igraph', 'start_igraph', 'end_igraph', 'lanes', 'capacity',
        'maxmph', 'fft', 'length', 'geometry'
    ]]
    links_df0.to_csv(scratch_dir + simulation_outputs +
                     '/modified_network_edges_{}.csv'.format(scen_nm),
                     index=False)
    # sys.exit(0)

    nodes_df0 = pd.read_csv(work_dir + network_file_nodes)
    nodes_df0 = gpd.GeoDataFrame(
        nodes_df0,
        crs='epsg:4326',
        geometry=[Point(xy) for xy in zip(nodes_df0['lon'], nodes_df0['lat'])
                  ])  #.to_crs(3857)
    nodes_df0['x'] = nodes_df0['geometry'].apply(lambda x: x.x)
    nodes_df0['y'] = nodes_df0['geometry'].apply(lambda x: x.y)

    ### Convert to mtx
    wgh = links_df0['fft']
    row = links_df0['start_igraph']
    col = links_df0['end_igraph']
    assert max(
        np.max(row) + 1,
        np.max(col) + 1
    ) == nodes_df0.shape[
        0], 'nodes and links dimension do not match, row {}, col {}, nodes {}'.format(
            np.max(row), np.max(col), nodes_df0.shape[0])
    g_coo = ssparse.coo_matrix((wgh, (row, col)),
                               shape=(nodes_df0.shape[0], nodes_df0.shape[0]))
    logging.info("({}, {}), {}".format(g_coo.shape[0], g_coo.shape[1],
                                       len(g_coo.data)))
    sio.mmwrite(
        scratch_dir + simulation_outputs +
        '/network_sparse_{}.mtx'.format(scen_nm), g_coo)
    # g_coo = sio.mmread(absolute_path+'/outputs/network_sparse.mtx'.format(folder))
    g = interface.readgraph(
        bytes(scratch_dir + simulation_outputs +
              '/network_sparse_{}.mtx'.format(scen_nm),
              encoding='utf-8'))

    ### Create link and node objects
    nodes = []
    links = []
    for row in nodes_df0.itertuples():
        real_node = Node(getattr(row, 'node_id_igraph'), getattr(row, 'x'),
                         getattr(row, 'y'), 'real', getattr(row, 'node_osmid'))
        virtual_node = real_node.create_virtual_node()
        virtual_link = real_node.create_virtual_link()
        nodes.append(real_node)
        nodes.append(virtual_node)
        links.append(virtual_link)
    for row in links_df0.itertuples():
        real_link = Link(getattr(row, 'edge_id_igraph'), getattr(row, 'lanes'),
                         getattr(row, 'length'), getattr(row, 'fft'),
                         getattr(row, 'capacity'), 'real',
                         getattr(row, 'start_igraph'),
                         getattr(row, 'end_igraph'), getattr(row, 'geometry'))
        links.append(real_link)

    return g, nodes, links
Esempio n. 3
0
import sys
import scipy.sparse
import scipy.io as sio
import numpy as np
import time
import os

sys.path.insert(0, '/Users/bz247')
from sp import interface

absolute_path = os.path.dirname(os.path.abspath(__file__))
graph_file = absolute_path + '/../0_network/data/sf/network_sparse.mtx'

### Time the PQ
print('########### Priority Queue SP #############')
g_pq = interface.readgraph(bytes(graph_file, encoding='utf-8'))
ta0 = time.time()
try:
    sp_pq = g_pq.dijkstra(1, 20)
except:
    print('route not found')
ta1 = time.time()
route_pq = sp_pq.route(20)
ta2 = time.time()
path_pq = [vertex[1] for vertex in route_pq]
ta3 = time.time()
print('PQ: distance 1020-->20: ', sp_pq.distance(20))
print(
    'PQ: total time {}, dijkstra() {}, route() {}, vertex list {}, \n'.format(
        ta3 - ta0, ta1 - ta0, ta2 - ta1, ta3 - ta2))