Ejemplo n.º 1
0
def PrepSet(point_set, gdf_sub):
    '''
    Prepares a small df of a given origin / destination set, expressed as 'item : Nearest Node ID'
    '''
    Prepared_point_set, gdf_node_pos2, gdf_new = net_p.prepare_newOD(point_set['file'], gdf_sub)
    Prepared_point_set = Prepared_point_set['Node']
    return Prepared_point_set
Ejemplo n.º 2
0
# Take only the largest subgraph which all connected links
len_old = 0
for g in nx.connected_component_subgraphs(G_tograph):
    if len(list(g.edges())) > len_old:
        G1 = g
        len_old = len(list(g.edges()))
G_sub = G1.copy()

#print('number of disconnected compoents is', nx.number_connected_components(G_sub))
nx.info(G_sub)

# Save the simplified transport network back into GeoDataFrame
gdf_sub = net_p.graph_to_df(G_sub)

# assign the OD to the closest node of the biggest subgraph:
gdf_points2, gdf_node_pos2, gdf_new = net_p.prepare_newOD(centroid, gdf_sub)
G2_multi = net_p.gdf_to_simplified_multidigraph(gdf_node_pos2,
                                                gdf_new,
                                                simplify=False)
G2 = net_p.multigraph_to_graph(G2_multi)
gdf2 = net_p.graph_to_df(G2)
allNode = G2.nodes()
allEdge = G2.edges()
od = gdf_points2['Node']

################### traffic flow matrix ####################################################
#read OD demand matrix

import scipy.io

mat = scipy.io.loadmat(r'./input/MZ_inputs/traffic_matrix.mat')
Ejemplo n.º 3
0
def main(adminIsPoint=False):

    ## Define filepath
    path = os.path.realpath(
        os.path.abspath(
            os.path.split(inspect.getfile(inspect.currentframe()))[0]))
    path = os.path.split(path)[0]

    ## Define dash. This .xlsm includes settings for the criticality script
    dash = os.path.join(path, r'dashboard.xlsm')
    ctrl = pd.read_excel(dash, sheetname="AGGREGATE", index_col=0)

    ## Define operative district. Note, this parameter can be anything - it is the sub folder in input, Runtime where files are drawn from
    district = ctrl['Weight'].loc['DISTRICT']

    ## Add logging
    logging.basicConfig(filename=os.path.join(path, 'runtime', district,
                                              "PCS_Criticality_log.log"),
                        level=logging.INFO,
                        format="%(asctime)s-%(levelname)s: %(message)s")
    logging.info("Starting Criticality Process")
    print "Running: Criticality Analysis on %s. Do not interrupt" % district

    ## Path Settings
    # outputs
    outpath = os.path.join(path, 'Outputs', '%s' % district)

    # ensure folders exist
    runtime = os.path.join(path, r'PCS\Criticality\runtime\%s\\' % district)
    for d in [outpath, runtime]:
        if not os.path.isdir(d):
            os.mkdir(d)

    ## Input file setting
    # location of road network
    NETWORK_IN = os.path.join(path, r'runtime\%s\\' % district)

    # location of OD
    OD_IN = os.path.join(path, 'PCS\Criticality\input', '%s' % district)

    # location of administrative boundaries file
    DATA_IN = os.path.join(path, 'PCS\Criticality\Vietnam_Data_Layers')
    inAdmin = os.path.join(DATA_IN, 'Poverty_Communes_2009.shp')

    # road network import. Must be a .csv including geometry information of roads.
    inNetworkFile = os.path.join(NETWORK_IN, 'Network.csv')

    # set WGS 84 coordinate reference system
    crs_in = {'init': 'epsg:4326'}

    # ensure folders exist
    for d in [outpath, runtime, OD_IN]:
        if not os.path.isdir(d):
            os.mkdir(d)

    # error checking - Check input data existence
    for curFile in [dash, inNetworkFile, inAdmin, DATA_IN, OD_IN, NETWORK_IN]:
        if not os.path.exists(curFile):
            logging.error("No input found: %s" % curFile)
            raise ValueError("No input found: %s" % curFile)

    # import input dataframes - road network and control dashboard
    inNetwork = pd.read_csv(inNetworkFile)
    ctrldf = pd.read_excel(dash, sheetname="CRITICALITY", index_col='COL_ID')

    #Inputs
    network = os.path.join(runtime, 'Network.shp')

    ## Network Preparation
    # set default iri value as the mean iri of roads for which iri exists.
    fillvalue = inNetwork['iri_med'].mean()

    # fill iri value where missing
    inNetwork['TC_iri_med'] = inNetwork['iri_med'].fillna(fillvalue)

    # set cost of traversing segment according to length and IRI, per settings in the excel dashboard
    inNetwork['total_cost'] = inNetwork['length'] * (
        ctrldf['Base_cost_km'][0] +
        (ctrldf['IRI_Coeff'][0] * inNetwork['TC_iri_med']))

    # convert the pandas DataFrame to a GeoDataFrame
    ginNetwork = gpd.GeoDataFrame(inNetwork,
                                  crs=crs_in,
                                  geometry=inNetwork['Line_Geometry'].map(
                                      shapely.wkt.loads))

    # set up Shapefile of road network
    ginNetwork.to_file(network, driver='ESRI Shapefile')
    logging.info("Successfully loaded data")

    # Generate admin boundary centroids
    if not adminIsPoint:
        prepareAdminCentroids(ginNetwork, inAdmin, crs_in,
                              os.path.join(OD_IN, 'adm_centroids.shp'))
        logging.info("Created admin centroids")

    # define function for loading origin files into a dictionary. Paramters controlled from dashboard excel
    def makeOrigin(n, ctrldf):
        origindict = {
            'name':
            ctrldf['OName'][n],
            'file':
            os.path.join(path, 'PCS', 'Criticality', 'input', district,
                         '%s.shp' % ctrldf['OName'][n]),
            'scalar_column':
            ctrldf['OScalar'][n]
        }
        return origindict

    # define function for loading destination files into a dictionary. Paramters controlled from dashboard excel
    def makeDestination(n, ctrldf):
        destdict = {
            'name':
            ctrldf['DName'][n],
            'file':
            os.path.join(path, 'PCS', 'Criticality', 'input', district,
                         '%s.shp' % ctrldf['DName'][n]),
            'penalty':
            ctrldf['DPenalty'][n],
            'importance':
            ctrldf['DImportance'][n],
            'annual':
            ctrldf['DAnnual'][n],
            'scalar_column':
            ctrldf['DScalar'][n]
        }
        return destdict

    # load origins and destinations into dictionary, create dictionaries of each set
    origin_1, origin_2, origin_3, origin_4, origin_5 = makeOrigin(
        0, ctrldf), makeOrigin(1, ctrldf), makeOrigin(2, ctrldf), makeOrigin(
            3, ctrldf), makeOrigin(4, ctrldf)
    originlist = {
        '%s' % ctrldf['OName'][0]: origin_1,
        '%s' % ctrldf['OName'][1]: origin_2,
        '%s' % ctrldf['OName'][2]: origin_3,
        '%s' % ctrldf['OName'][3]: origin_4,
        '%s' % ctrldf['OName'][4]: origin_5,
    }
    destination_1, destination_2, destination_3, destination_4, destination_5 = makeDestination(
        0, ctrldf), makeDestination(1, ctrldf), makeDestination(
            2, ctrldf), makeDestination(3, ctrldf), makeDestination(4, ctrldf)
    destinationlist = {
        '%s' % ctrldf['DName'][0]: destination_1,
        '%s' % ctrldf['DName'][1]: destination_2,
        '%s' % ctrldf['DName'][2]: destination_3,
        '%s' % ctrldf['DName'][3]: destination_4,
        '%s' % ctrldf['DName'][4]: destination_5,
    }
    logging.debug("Opened origins and destinations")

    # Prepation of network via TU Delft code
    gdf_points, gdf_node_pos, gdf = net_p.prepare_centroids_network(
        origin_1['file'], network)

    # Create Networkx MultiGraph object from the GeoDataFrame
    G = net_p.gdf_to_simplified_multidigraph(gdf_node_pos, gdf, simplify=False)

    # Change the MultiGraph object to Graph object to reduce computation cost
    G_tograph = net_p.multigraph_to_graph(G)
    logging.debug(
        'Loaded road network: number of disconnected components is: %d' %
        nx.number_connected_components(G_tograph))

    # Observe the properties of the Graph object
    nx.info(G_tograph)

    # Take only the largest subgraph with all connected links
    len_old = 0
    for g in nx.connected_component_subgraphs(G_tograph):
        if len(list(g.edges())) > len_old:
            G1 = g
            len_old = len(list(g.edges()))
    G_sub = G1.copy()

    nx.info(G_sub)

    # Save the simplified transport network into a GeoDataFrame
    gdf_sub = net_p.graph_to_df(G_sub)
    blank, gdf_node_pos2, gdf_new = net_p.prepare_newOD(
        origin_1['file'], gdf_sub)

    #Road Network Graph prep
    G2_multi = net_p.gdf_to_simplified_multidigraph(gdf_node_pos2,
                                                    gdf_new,
                                                    simplify=False)

    # Dump files to runtime if dump = 1
    Filedump(gdf_new, 'Road_Lines', runtime)
    Filedump(gdf_node_pos2, 'Road_Nodes', runtime)
    G2 = net_p.multigraph_to_graph(G2_multi)
    gdf2 = net_p.graph_to_df(G2)
    nLink = len(G2.edges())

    # open empty lists
    Outputs, cost_list, iso_list = [], [], []

    ## Run the calculateOD function for each combination of origins and destinations specified in the control excel
    # append all outputs to the Outputs, cost_list and iso_list objects just created
    for z in ctrldf.index:
        if (((ctrldf['ComboO'][z]) != 0) & ((ctrldf['ComboD'][z]) != 0) &
            (pd.notnull(ctrldf['ComboO'][z])) &
            (pd.notnull(ctrldf['ComboO'][z]))):
            Q = int(ctrldf['ComboNumber'][z])
            logging.info(
                'Computing | combination %s as origin and %s as destination ' %
                (ctrldf['ComboO'][z], ctrldf['ComboD'][z]))
            xx = calculateOD(originlist['%s' % ctrldf['ComboO'][z]],
                             destinationlist['%s' % ctrldf['ComboD'][z]], Q,
                             gdf_sub, G2, nLink, gdf2, runtime, ctrldf)
            Outputs.append(xx)
            cost_list.append("Social_Cost_%s" % Q)
            iso_list.append("Isolated_Trips_%s" % Q)

    # drop unneccessary columns
    Output = inNetwork.drop(["geometry", 'TC_iri_med', 'total_cost'], axis=1)

    # for each object in the Outputs list:
    for o_d_calc in range(0, len(Outputs)):

        # Merge the objects together. This creates multiple columns showing each scenario
        Output = Output.merge(Outputs[o_d_calc]['summary'],
                              how='left',
                              on='ID')

    # sum across the relevant columns - the 'Social_Cost' columns generated above in calculateOD for each O-D file combo
    Output['Cost_total'] = Output[cost_list].sum(axis=1)

    # sum across the relevant columns - the 'Isolated_Trips' columns generated above in calculateOD for each O-D file combo
    Output['Iso_total'] = Output[iso_list].sum(axis=1)

    # Generate an overall criticality score for each road based on user input weights between isolated trips and disrupted trips
    Output['CRIT_SCORE'] = (
        ctrldf['Disrupt_Weight'][0] * Output['Cost_total'] +
        ctrldf['Isolate_Weight'][0] * Output['Iso_total'])

    # normalize for each road
    Output['CRIT_SCORE'] = (
        (Output['CRIT_SCORE'] - Output['CRIT_SCORE'].min()) /
        (Output['CRIT_SCORE'].max() - Output['CRIT_SCORE'].min()))
    logging.info("Calculated PCS Criticality")
    FileOut(Output, 'criticality_output', outpath)
Ejemplo n.º 4
0
def main(adminIsPoint=False):
    path = os.path.realpath(
        os.path.abspath(
            os.path.split(inspect.getfile(inspect.currentframe()))[0]))
    path = os.path.split(path)[0]
    dash = os.path.join(path, r'dashboard.xlsm')
    ctrl = pd.read_excel(dash, sheetname="AGGREGATE", index_col=0)
    district = ctrl['Weight'].loc['DISTRICT']

    logging.basicConfig(filename=os.path.join(path, 'runtime', district,
                                              "PCS_Criticality_log.log"),
                        level=logging.INFO,
                        format="%(asctime)s-%(levelname)s: %(message)s")
    logging.info("Starting Criticality Process")
    print "Running: Criticality Analysis on %s. Do not interrupt" % district
    # Path Settings
    outpath = os.path.join(path, 'Outputs', '%s' % district)
    runtime = os.path.join(path, r'PCS\Criticality\runtime\%s\\' % district)
    for d in [outpath, runtime]:
        if not os.path.isdir(d):
            os.mkdir(d)
    NETWORK_IN = os.path.join(path, r'runtime\%s\\' % district)
    OD_IN = os.path.join(path, 'PCS\Criticality\input', '%s' % district)
    DATA_IN = os.path.join(path, 'PCS\Criticality\Vietnam_Data_Layers')
    inAdmin = os.path.join(DATA_IN, 'Poverty_Communes_2009.shp')
    inNetworkFile = os.path.join(NETWORK_IN, 'Network.csv')

    crs_in = {'init': 'epsg:4326'}  #WGS 84

    #Create folders for analysis
    for d in [outpath, runtime, OD_IN]:
        if not os.path.isdir(d):
            os.mkdir(d)
    #Error checking - Check input data
    for curFile in [dash, inNetworkFile, inAdmin, DATA_IN, OD_IN, NETWORK_IN]:
        if not os.path.exists(curFile):
            logging.error("No input found: %s" % curFile)
            raise ValueError("No input found: %s" % curFile)

    inNetwork = pd.read_csv(inNetworkFile)
    ctrldf = pd.read_excel(dash, sheetname="CRITICALITY", index_col='COL_ID')
    #Inputs
    network = os.path.join(runtime, 'Network.shp')

    #Network Prep
    fillvalue = inNetwork['iri_med'].mean()
    inNetwork['TC_iri_med'] = inNetwork['iri_med'].fillna(fillvalue)
    inNetwork['total_cost'] = inNetwork['length'] * (
        ctrldf['Base_cost_km'][0] +
        (ctrldf['IRI_Coeff'][0] * inNetwork['TC_iri_med']))
    ginNetwork = gpd.GeoDataFrame(inNetwork,
                                  crs=crs_in,
                                  geometry=inNetwork['Line_Geometry'].map(
                                      shapely.wkt.loads))
    ginNetwork.to_file(network, driver='ESRI Shapefile')
    logging.info("Successfully loaded data")
    if not adminIsPoint:
        prepareAdminCentroids(ginNetwork, inAdmin, crs_in,
                              os.path.join(OD_IN, 'adm_centroids.shp'))
        logging.info("Created admin centroids")

    def makeOrigin(n, ctrldf):
        origindict = {
            'name':
            ctrldf['OName'][n],
            'file':
            os.path.join(path, 'PCS', 'Criticality', 'input', district,
                         '%s.shp' % ctrldf['OName'][n]),
            'scalar_column':
            ctrldf['OScalar'][n]
        }
        return origindict

    def makeDestination(n, ctrldf):
        destdict = {
            'name':
            ctrldf['DName'][n],
            'file':
            os.path.join(path, 'PCS', 'Criticality', 'input', district,
                         '%s.shp' % ctrldf['DName'][n]),
            'penalty':
            ctrldf['DPenalty'][n],
            'importance':
            ctrldf['DImportance'][n],
            'annual':
            ctrldf['DAnnual'][n],
            'scalar_column':
            ctrldf['DScalar'][n]
        }
        return destdict

    origin_1, origin_2, origin_3, origin_4, origin_5 = makeOrigin(
        0, ctrldf), makeOrigin(1, ctrldf), makeOrigin(2, ctrldf), makeOrigin(
            3, ctrldf), makeOrigin(4, ctrldf)
    originlist = {
        '%s' % ctrldf['OName'][0]: origin_1,
        '%s' % ctrldf['OName'][1]: origin_2,
        '%s' % ctrldf['OName'][2]: origin_3,
        '%s' % ctrldf['OName'][3]: origin_4,
        '%s' % ctrldf['OName'][4]: origin_5,
    }
    destination_1, destination_2, destination_3, destination_4, destination_5 = makeDestination(
        0, ctrldf), makeDestination(1, ctrldf), makeDestination(
            2, ctrldf), makeDestination(3, ctrldf), makeDestination(4, ctrldf)
    destinationlist = {
        '%s' % ctrldf['DName'][0]: destination_1,
        '%s' % ctrldf['DName'][1]: destination_2,
        '%s' % ctrldf['DName'][2]: destination_3,
        '%s' % ctrldf['DName'][3]: destination_4,
        '%s' % ctrldf['DName'][4]: destination_5,
    }
    logging.debug("Opened origins and destinations")
    # Prepation of network
    gdf_points, gdf_node_pos, gdf = net_p.prepare_centroids_network(
        origin_1['file'], network)
    # Create Networkx MultiGraph object from the GeoDataFrame
    G = net_p.gdf_to_simplified_multidigraph(gdf_node_pos, gdf, simplify=False)
    # Change the MultiGraph object to Graph object to reduce computation cost
    G_tograph = net_p.multigraph_to_graph(G)
    logging.debug(
        'Loaded road network: number of disconnected components is: %d' %
        nx.number_connected_components(G_tograph))
    # Observe the properties of the Graph object
    nx.info(G_tograph)
    # Take only the largest subgraph with all connected links
    len_old = 0
    for g in nx.connected_component_subgraphs(G_tograph):
        if len(list(g.edges())) > len_old:
            G1 = g
            len_old = len(list(g.edges()))
    G_sub = G1.copy()

    nx.info(G_sub)

    # Save the simplified transport network into a GeoDataFrame
    gdf_sub = net_p.graph_to_df(G_sub)
    blank, gdf_node_pos2, gdf_new = net_p.prepare_newOD(
        origin_1['file'], gdf_sub)

    #Road Network Graph prep
    G2_multi = net_p.gdf_to_simplified_multidigraph(gdf_node_pos2,
                                                    gdf_new,
                                                    simplify=False)
    Filedump(gdf_new, 'Road_Lines', runtime)
    Filedump(gdf_node_pos2, 'Road_Nodes', runtime)
    G2 = net_p.multigraph_to_graph(G2_multi)
    gdf2 = net_p.graph_to_df(G2)
    nLink = len(G2.edges())

    Outputs, cost_list, iso_list = [], [], []

    for z in ctrldf.index:
        if (((ctrldf['ComboO'][z]) != 0) & ((ctrldf['ComboD'][z]) != 0) &
            (pd.notnull(ctrldf['ComboO'][z])) &
            (pd.notnull(ctrldf['ComboO'][z]))):
            Q = int(ctrldf['ComboNumber'][z])
            logging.info(
                'Computing | combination %s as origin and %s as destination ' %
                (ctrldf['ComboO'][z], ctrldf['ComboD'][z]))
            xx = calculateOD(originlist['%s' % ctrldf['ComboO'][z]],
                             destinationlist['%s' % ctrldf['ComboD'][z]], Q,
                             gdf_sub, G2, nLink, gdf2, runtime, ctrldf)
            Outputs.append(xx)
            cost_list.append("Social_Cost_%s" % Q)
            iso_list.append("Isolated_Trips_%s" % Q)

    Output = inNetwork.drop(["geometry", 'TC_iri_med', 'total_cost'], axis=1)
    for o_d_calc in range(0, len(Outputs)):
        Output = Output.merge(Outputs[o_d_calc]['summary'],
                              how='left',
                              on='ID')

    Output['Cost_total'] = Output[cost_list].sum(axis=1)
    Output['Iso_total'] = Output[iso_list].sum(axis=1)
    Output['CRIT_SCORE'] = (
        ctrldf['Disrupt_Weight'][0] * Output['Cost_total'] +
        ctrldf['Isolate_Weight'][0] * Output['Iso_total'])
    Output['CRIT_SCORE'] = (
        (Output['CRIT_SCORE'] - Output['CRIT_SCORE'].min()) /
        (Output['CRIT_SCORE'].max() - Output['CRIT_SCORE'].min()))
    logging.info("Calculated PCS Criticality")
    FileOut(Output, 'criticality_output', outpath)