Пример #1
0
def dual_graph() -> nx.MultiGraph:
    """
    Returns
    -------
    nx.MultiGraph
        A dual `NetworkX` `MultiGraph` for `pytest` tests.
    """
    G_dual = mock_graph()
    G_dual = graphs.nX_simple_geoms(G_dual)
    G_dual = graphs.nX_to_dual(G_dual)
    return G_dual
Пример #2
0
             dpi=150)  # WITH LABELS

#
#
# GRAPH MODULE
plot.plot_nX(G, plot_geoms=True, path='images/graph_simple.png',
             dpi=150)  # NO LABELS

G_simple = graphs.nX_simple_geoms(G)
G_decomposed = graphs.nX_decompose(G_simple, 100)
plot.plot_nX(G_decomposed,
             plot_geoms=True,
             path='images/graph_decomposed.png',
             dpi=150)

G_dual = graphs.nX_to_dual(G_simple)
plot.plot_nX_primal_or_dual(G_simple,
                            G_dual,
                            plot_geoms=True,
                            path='images/graph_dual.png',
                            dpi=150)

# graph cleanup examples
lng, lat = -0.13396079424572427, 51.51371088849723
G_utm = mock.make_buffered_osm_graph(lng, lat, 1250)
easting, northing, _zone, _letter = utm.from_latlon(lat, lng)
buffer = 750
min_x = easting - buffer
max_x = easting + buffer
min_y = northing - buffer
max_y = northing + buffer
Пример #3
0
def test_graph_maps_from_nX(diamond_graph):
    # test maps vs. networkX
    G_test = diamond_graph.copy()
    G_test_dual = graphs.nX_to_dual(G_test)
    for G, is_dual in zip((G_test, G_test_dual), (False, True)):
        # set some random 'live' statuses
        for n in G.nodes():
            G.nodes[n]['live'] = bool(np.random.randint(0, 1))

        # generate test maps
        node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G)
        # debug plot
        # plot.plot_graphs(primal=G)
        # plot.plot_graph_maps(node_uids, node_data, edge_data)

        # run check (this checks node to edge maps internally)
        checks.check_network_maps(node_data, edge_data, node_edge_map)

        # check lengths
        assert len(node_uids) == len(node_data) == G.number_of_nodes()
        # edges = x2
        assert len(edge_data) == G.number_of_edges() * 2

        # check node maps (idx and label match in this case...)
        for n_label in node_uids:
            n_idx = node_uids.index(n_label)
            assert node_data[n_idx][0] == G.nodes[n_label]['x']
            assert node_data[n_idx][1] == G.nodes[n_label]['y']
            assert node_data[n_idx][2] == G.nodes[n_label]['live']

        # check edge maps (idx and label match in this case...)
        for start, end, length, angle, imp_fact, start_bear, end_bear in edge_data:
            # print(f'elif (start, end) == ({start}, {end}):')
            # print(f'assert (length, angle, imp_fact, start_bear, end_bear) == ({length}, {angle}, {imp_fact}, {start_bear}, {end_bear})')
            if not is_dual:
                if (start, end) == (0.0, 1.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 120.0, 120.0)
                elif (start, end) == (0.0, 2.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 60.0, 60.0)
                elif (start, end) == (1.0, 0.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, -60.0, -60.0)
                elif (start, end) == (1.0, 2.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 0.0, 0.0)
                elif (start, end) == (1.0, 3.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 60.0, 60.0)
                elif (start, end) == (2.0, 0.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, -120.0, -120.0)
                elif (start, end) == (2.0, 1.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 180.0, 180.0)
                elif (start, end) == (2.0, 3.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 120.0, 120.0)
                elif (start, end) == (3.0, 1.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, -120.0, -120.0)
                elif (start, end) == (3.0, 2.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, -60.0, -60.0)
                else:
                    raise KeyError('Unmatched edge.')
            else:
                s_idx = node_uids[int(start)]
                e_idx = node_uids[int(end)]
                print(s_idx, e_idx)
                if (start, end) == (0.0, 1.0):  # 0_1 0_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, -60.0, 60.0)
                elif (start, end) == (0.0, 2.0):  # 0_1 1_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 120.0, 0.0)
                elif (start, end) == (0.0, 3.0):  # 0_1 1_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 60.0, 1.0, 120.0, 60.0)
                elif (start, end) == (1.0, 0.0):  # 0_2 0_1
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, -120.0, 120.0)
                elif (start, end) == (1.0, 2.0):  # 0_2 1_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 60.0, 180.0)
                elif (start, end) == (1.0, 4.0):  # 0_2 2_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 60.0, 1.0, 60.0, 120.0)
                elif (start, end) == (2.0, 0.0):  # 1_2 0_1
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 180.0, -60.0)
                elif (start, end) == (2.0, 1.0):  # 1_2 0_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 0.0, -120.0)
                elif (start, end) == (2.0, 3.0):  # 1_2 1_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 180.0, 60.0)
                elif (start, end) == (2.0, 4.0):  # 1_2 2_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 0.0, 120.0)
                elif (start, end) == (3.0, 0.0):  # 1_3 0_1
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 60.0, 1.0, -120.0, -60.0)
                elif (start, end) == (3.0, 2.0):  # 1_3 1_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, -120.0, 0.0)
                elif (start, end) == (3.0, 4.0):  # 1_3 2_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 60.0, -60.0)
                elif (start, end) == (4.0, 1.0):  # 2_3 0_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 60.0, 1.0, -60.0, -120.0)
                elif (start, end) == (4.0, 2.0):  # 2_3 1_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, -60.0, 180.0)
                elif (start, end) == (4.0, 3.0):  # 2_3 1_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 120.0, -120.0)
                else:
                    raise KeyError('Unmatched edge.')
    # check that missing geoms throw an error
    G_test = diamond_graph.copy()
    for s, e, k in G_test.edges(keys=True):
        # delete key from first node and break
        del G_test[s][e][k]['geom']
        break
    with pytest.raises(KeyError):
        graphs.graph_maps_from_nX(G_test)

    # check that non-LineString geoms throw an error
    G_test = diamond_graph.copy()
    for s, e, k in G_test.edges(keys=True):
        G_test[s][e][k]['geom'] = geometry.Point([G_test.nodes[s]['x'], G_test.nodes[s]['y']])
    with pytest.raises(TypeError):
        graphs.graph_maps_from_nX(G_test)

    # check that missing node keys throw an error
    G_test = diamond_graph.copy()
    for k in ['x', 'y']:
        for n in G_test.nodes():
            # delete key from first node and break
            del G_test.nodes[n][k]
            break
        with pytest.raises(KeyError):
            graphs.graph_maps_from_nX(G_test)

    # check that invalid imp_factors are caught
    G_test = diamond_graph.copy()
    # corrupt imp_factor value and break
    for corrupt_val in [-1, -np.inf, np.nan]:
        for s, e, k in G_test.edges(keys=True):
            G_test[s][e][k]['imp_factor'] = corrupt_val
            break
        with pytest.raises(ValueError):
            graphs.graph_maps_from_nX(G_test)
Пример #4
0
def test_nX_to_dual(primal_graph, diamond_graph):
    # check that missing geoms throw an error
    G = diamond_graph.copy()
    del G[0][1][0]['geom']
    with pytest.raises(KeyError):
        graphs.nX_to_dual(G)

    # check that non-LineString geoms throw an error
    G = diamond_graph.copy()
    for s, e, k in G.edges(keys=True):
        G[s][e][k]['geom'] = geometry.Point([G.nodes[s]['x'], G.nodes[s]['y']])
    with pytest.raises(TypeError):
        graphs.nX_to_dual(G)

    # check that missing node keys throw an error
    for k in ['x', 'y']:
        G = diamond_graph.copy()
        for n in G.nodes():
            # delete key from first node and break
            del G.nodes[n][k]
            break
        # check that missing key throws an error
        with pytest.raises(KeyError):
            graphs.nX_to_dual(G)

    # test dual
    G = diamond_graph.copy()
    G_dual = graphs.nX_to_dual(G)
    # from cityseer.tools import plot
    # plot.plot_nX_primal_or_dual(primal_graph=G, dual_graph=G_dual, plot_geoms=True, labels=True, node_size=80)

    assert G_dual.number_of_nodes() == 5
    assert G_dual.number_of_edges() == 8
    # the new dual nodes have three edges each, except for the midspan which now has four redges
    for n in G_dual.nodes():
        if n == '1_2':
            assert nx.degree(G_dual, n) == 4
        else:
            assert nx.degree(G_dual, n) == 3
    for start, end, d in G_dual.edges(data=True):
        # the new geoms should also be 100m length (split 50m x 2)
        assert round(d['geom'].length) == 100
        # check the starting and ending bearings per diamond graph
        if (G_dual.nodes[start]['x'], G_dual.nodes[start]['y']) == d['geom'].coords[0]:
            s_x, s_y = d['geom'].coords[0]
            m_x, m_y = d['geom'].coords[1]
            e_x, e_y = d['geom'].coords[-1]
        else:
            s_x, s_y = d['geom'].coords[-1]
            m_x, m_y = d['geom'].coords[1]
            e_x, e_y = d['geom'].coords[0]
        start_bearing = np.rad2deg(np.arctan2(m_y - s_y, m_x - s_x)).round()
        end_bearing = np.rad2deg(np.arctan2(e_y - m_y, e_x - m_x)).round()
        if (start, end) == ('0_1', '0_2'):
            assert (start_bearing, end_bearing) == (-60, 60)
        elif (start, end) == ('0_1', '1_2'):
            assert (start_bearing, end_bearing) == (120, 0)
        elif (start, end) == ('0_1', '1_3'):
            assert (start_bearing, end_bearing) == (120, 60)
        elif (start, end) == ('0_2', '1_2'):
            assert (start_bearing, end_bearing) == (60, 180)
        elif (start, end) == ('0_2', '2_3'):
            assert (start_bearing, end_bearing) == (60, 120)
        elif (start, end) == ('1_2', '1_3'):
            assert (start_bearing, end_bearing) == (180, 60)
        elif (start, end) == ('1_2', '2_3'):
            assert (start_bearing, end_bearing) == (0, 120)
        elif (start, end) == ('1_3', '2_3'):
            assert (start_bearing, end_bearing) == (60, -60)

    # complexify the geoms to check with and without kinks, and in mixed forward and reverse directions
    # see if any issues arise
    G = primal_graph.copy()
    for i, (s, e, k, d) in enumerate(G.edges(data=True, keys=True)):
        # add a kink to each second geom
        if i % 2 == 0:
            geom = d['geom']
            start = geom.coords[0]
            end = geom.coords[-1]
            # bump the new midpoint coordinates
            mid = list(geom.centroid.coords[0])
            mid[0] += 10
            mid[1] -= 10
            # append 3d coord to check behaviour on 3d data
            kinked_3d_geom = []
            for n in [start, mid, end]:
                n = list(n)
                n.append(10)
                kinked_3d_geom.append(n)
            G[s][e][k]['geom'] = geometry.LineString(kinked_3d_geom)
        # flip each third geom
        if i % 3 == 0:
            flipped_coords = np.fliplr(d['geom'].coords.xy)
            G[s][e][k]['geom'] = geometry.LineString([[x, y] for x, y in zip(flipped_coords[0], flipped_coords[1])])
    G_dual = graphs.nX_to_dual(G)
    # from cityseer.tools import plot
    # plot.plot_nX_primal_or_dual(primal_graph=G, dual_graph=G_dual, plot_geoms=True, labels=True, node_size=80)
    # 3 + 4 + 1 + 3 + 3 + (9 + 12) + (9 + 12) + (9 + 12) = 77
    assert G_dual.number_of_nodes() == 79
    assert G_dual.number_of_edges() == 155
    for s, e in G_dual.edges():
        assert G_dual.number_of_edges(s, e) == 1
Пример #5
0
def test_aggregate_landuses_categorical_components(primal_graph):
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map, = graphs.graph_maps_from_nX(primal_graph)
    # setup data
    data_dict = mock.mock_data_dict(primal_graph, random_seed=13)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    data_map = data.assign_to_network(data_map, node_data, edge_data, node_edge_map, 500)
    # set parameters
    betas = np.array([0.02, 0.01, 0.005, 0.0025])
    distances = networks.distance_from_beta(betas)
    qs = np.array([0, 1, 2])
    mock_categorical = mock.mock_categorical_data(len(data_map))
    landuse_classes, landuse_encodings = layers.encode_categorical(mock_categorical)
    mock_matrix = np.full((len(landuse_classes), len(landuse_classes)), 1)
    # set the keys - add shuffling to be sure various orders work
    hill_keys = np.arange(4)
    np.random.shuffle(hill_keys)
    non_hill_keys = np.arange(3)
    np.random.shuffle(non_hill_keys)
    ac_keys = np.array([1, 2, 5])
    np.random.shuffle(ac_keys)
    # generate
    mu_data_hill, mu_data_other, ac_data, ac_data_wt = data.aggregate_landuses(node_data,
                                                                               edge_data,
                                                                               node_edge_map,
                                                                               data_map,
                                                                               distances,
                                                                               betas,
                                                                               landuse_encodings=landuse_encodings,
                                                                               qs=qs,
                                                                               mixed_use_hill_keys=hill_keys,
                                                                               mixed_use_other_keys=non_hill_keys,
                                                                               accessibility_keys=ac_keys,
                                                                               cl_disparity_wt_matrix=mock_matrix,
                                                                               angular=False)
    # hill
    hill = mu_data_hill[np.where(hill_keys == 0)][0]
    hill_branch_wt = mu_data_hill[np.where(hill_keys == 1)][0]
    hill_pw_wt = mu_data_hill[np.where(hill_keys == 2)][0]
    hill_disp_wt = mu_data_hill[np.where(hill_keys == 3)][0]
    # non hill
    shannon = mu_data_other[np.where(non_hill_keys == 0)][0]
    gini = mu_data_other[np.where(non_hill_keys == 1)][0]
    raos = mu_data_other[np.where(non_hill_keys == 2)][0]
    # access non-weighted
    ac_1_nw = ac_data[np.where(ac_keys == 1)][0]
    ac_2_nw = ac_data[np.where(ac_keys == 2)][0]
    ac_5_nw = ac_data[np.where(ac_keys == 5)][0]
    # access weighted
    ac_1_w = ac_data_wt[np.where(ac_keys == 1)][0]
    ac_2_w = ac_data_wt[np.where(ac_keys == 2)][0]
    ac_5_w = ac_data_wt[np.where(ac_keys == 5)][0]
    # test manual metrics against all nodes
    mu_max_unique = len(landuse_classes)
    # test against various distances
    for d_idx in range(len(distances)):
        dist_cutoff = distances[d_idx]
        beta = betas[d_idx]
        for src_idx in range(len(primal_graph)):
            reachable_data, reachable_data_dist, tree_preds = data.aggregate_to_src_idx(src_idx,
                                                                                        node_data,
                                                                                        edge_data,
                                                                                        node_edge_map,
                                                                                        data_map,
                                                                                        dist_cutoff)
            # counts of each class type (array length per max unique classes - not just those within max distance)
            cl_counts = np.full(mu_max_unique, 0)
            # nearest of each class type (likewise)
            cl_nearest = np.full(mu_max_unique, np.inf)
            # aggregate
            a_1_nw = 0
            a_2_nw = 0
            a_5_nw = 0
            a_1_w = 0
            a_2_w = 0
            a_5_w = 0
            # iterate reachable
            for data_idx, (reachable, data_dist) in enumerate(zip(reachable_data, reachable_data_dist)):
                if not reachable:
                    continue
                cl = landuse_encodings[data_idx]
                # double check distance is within threshold
                assert data_dist <= dist_cutoff
                # update the class counts
                cl_counts[cl] += 1
                # if distance is nearer, update the nearest distance array too
                if data_dist < cl_nearest[cl]:
                    cl_nearest[cl] = data_dist
                # aggregate accessibility codes
                if cl == 1:
                    a_1_nw += 1
                    a_1_w += np.exp(-beta * data_dist)
                elif cl == 2:
                    a_2_nw += 1
                    a_2_w += np.exp(-beta * data_dist)
                elif cl == 5:
                    a_5_nw += 1
                    a_5_w += np.exp(-beta * data_dist)
            # assertions
            assert ac_1_nw[d_idx, src_idx] == a_1_nw
            assert ac_2_nw[d_idx, src_idx] == a_2_nw
            assert ac_5_nw[d_idx, src_idx] == a_5_nw

            assert ac_1_w[d_idx, src_idx] == a_1_w
            assert ac_2_w[d_idx, src_idx] == a_2_w
            assert ac_5_w[d_idx, src_idx] == a_5_w

            assert hill[0, d_idx, src_idx] == diversity.hill_diversity(cl_counts, 0)
            assert hill[1, d_idx, src_idx] == diversity.hill_diversity(cl_counts, 1)
            assert hill[2, d_idx, src_idx] == diversity.hill_diversity(cl_counts, 2)

            assert hill_branch_wt[0, d_idx, src_idx] == \
                   diversity.hill_diversity_branch_distance_wt(cl_counts, cl_nearest, 0, beta)
            assert hill_branch_wt[1, d_idx, src_idx] == \
                   diversity.hill_diversity_branch_distance_wt(cl_counts, cl_nearest, 1, beta)
            assert hill_branch_wt[2, d_idx, src_idx] == \
                   diversity.hill_diversity_branch_distance_wt(cl_counts, cl_nearest, 2, beta)

            assert hill_pw_wt[0, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_distance_wt(cl_counts, cl_nearest, 0, beta)
            assert hill_pw_wt[1, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_distance_wt(cl_counts, cl_nearest, 1, beta)
            assert hill_pw_wt[2, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_distance_wt(cl_counts, cl_nearest, 2, beta)

            assert hill_disp_wt[0, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_matrix_wt(cl_counts, mock_matrix, 0)
            assert hill_disp_wt[1, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_matrix_wt(cl_counts, mock_matrix, 1)
            assert hill_disp_wt[2, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_matrix_wt(cl_counts, mock_matrix, 2)

            assert shannon[d_idx, src_idx] == diversity.shannon_diversity(cl_counts)
            assert gini[d_idx, src_idx] == diversity.gini_simpson_diversity(cl_counts)
            assert raos[d_idx, src_idx] == diversity.raos_quadratic_diversity(cl_counts, mock_matrix)

    # check that angular is passed-through
    # actual angular tests happen in test_shortest_path_tree()
    # here the emphasis is simply on checking that the angular instruction gets chained through

    # setup dual data
    G_dual = graphs.nX_to_dual(primal_graph)
    node_labels_dual, node_data_dual, edge_data_dual, node_edge_map_dual = graphs.graph_maps_from_nX(G_dual)
    data_dict_dual = mock.mock_data_dict(G_dual, random_seed=13)
    data_uids_dual, data_map_dual = layers.data_map_from_dict(data_dict_dual)
    data_map_dual = data.assign_to_network(data_map_dual, node_data_dual, edge_data_dual, node_edge_map_dual, 500)
    mock_categorical = mock.mock_categorical_data(len(data_map_dual))
    landuse_classes_dual, landuse_encodings_dual = layers.encode_categorical(mock_categorical)
    mock_matrix = np.full((len(landuse_classes_dual), len(landuse_classes_dual)), 1)

    mu_hill_dual, mu_other_dual, ac_dual, ac_wt_dual = data.aggregate_landuses(node_data_dual,
                                                                               edge_data_dual,
                                                                               node_edge_map_dual,
                                                                               data_map_dual,
                                                                               distances,
                                                                               betas,
                                                                               landuse_encodings_dual,
                                                                               qs=qs,
                                                                               mixed_use_hill_keys=hill_keys,
                                                                               mixed_use_other_keys=non_hill_keys,
                                                                               accessibility_keys=ac_keys,
                                                                               cl_disparity_wt_matrix=mock_matrix,
                                                                               angular=True)

    mu_hill_dual_sidestep, mu_other_dual_sidestep, ac_dual_sidestep, ac_wt_dual_sidestep = \
        data.aggregate_landuses(node_data_dual,
                                edge_data_dual,
                                node_edge_map_dual,
                                data_map_dual,
                                distances,
                                betas,
                                landuse_encodings_dual,
                                qs=qs,
                                mixed_use_hill_keys=hill_keys,
                                mixed_use_other_keys=non_hill_keys,
                                accessibility_keys=ac_keys,
                                cl_disparity_wt_matrix=mock_matrix,
                                angular=False)

    assert not np.allclose(mu_hill_dual, mu_hill_dual_sidestep, atol=0.001, rtol=0)
    assert not np.allclose(mu_other_dual, mu_other_dual_sidestep, atol=0.001, rtol=0)
    assert not np.allclose(ac_dual, ac_dual_sidestep, atol=0.001, rtol=0)
    assert not np.allclose(ac_wt_dual, ac_wt_dual_sidestep, atol=0.001, rtol=0)
Пример #6
0
async def centrality_dual(db_config, nodes_table, links_table, city_pop_id,
                          distances):
    logger.info(f'Loading graph for city: {city_pop_id} '
                f'derived from table: {nodes_table}')
    G = await postGIS_to_networkX(db_config, nodes_table, links_table,
                                  city_pop_id)
    if len(G) == 0:
        return
    logger.info('Casting to dual')
    G = graphs.nX_to_dual(G)  # convert to dual
    logger.info(f'Generating node map and edge map')
    N = networks.NetworkLayerFromNX(G, distances=distances)
    # the round trip graph is needed for the generated lengths, angles, etc.
    logger.info('Making round-trip graph')
    G_round_trip = N.to_networkX()

    db_con = await asyncpg.connect(**db_config)
    # ADD DUAL GRAPH'S VERTICES TABLE
    logger.info('Preparing dual nodes table')
    nodes_table_name_only = nodes_table
    if '.' in nodes_table_name_only:
        nodes_table_name_only = nodes_table_name_only.split('.')[-1]
    await db_con.execute(f'''
    -- add dual nodes table
    CREATE TABLE IF NOT EXISTS {nodes_table}_dual (
        id text PRIMARY KEY,
        city_pop_id int,
        within bool,
        geom geometry(Point, 27700)
    );
    CREATE INDEX IF NOT EXISTS geom_idx_{nodes_table_name_only}_dual
        ON {nodes_table}_dual USING GIST (geom);
    CREATE INDEX IF NOT EXISTS city_pop_idx_{nodes_table_name_only}_dual
        ON {nodes_table}_dual (city_pop_id);
    ''')

    logger.info('Preparing dual nodes data')
    dual_nodes_data = []
    for n, d in G_round_trip.nodes(data=True):
        dual_nodes_data.append([
            n,
            city_pop_id,
            d['live'],  # within
            d['x'],
            d['y']
        ])

    logger.info('Writing dual nodes to DB')
    await db_con.executemany(
        f'''
        INSERT INTO {nodes_table}_dual (id, city_pop_id, within, geom)
        VALUES ($1, $2, $3, ST_SetSRID(ST_MakePoint($4, $5), 27700))
        ON CONFLICT DO NOTHING;
    ''', dual_nodes_data)

    logger.info('Preparing dual edges table')
    links_table_name_only = links_table
    if '.' in links_table_name_only:
        links_table_name_only = links_table_name_only.split('.')[-1]
    await db_con.execute(f'''
    -- add dual links table
    CREATE TABLE IF NOT EXISTS {links_table}_dual (
      id text PRIMARY KEY,
      parent_id text,
      city_pop_id int,
      node_a text,
      node_b text,
      distance real,
      angle real,
      impedance_factor real,
      geom geometry(Linestring, 27700)
    );
    CREATE INDEX IF NOT EXISTS city_pop_idx_{links_table_name_only}_dual 
        ON {links_table}_dual (city_pop_id);
    CREATE INDEX IF NOT EXISTS geom_idx_{links_table_name_only}_dual 
        ON {links_table}_dual USING GIST (geom);
    ''')

    # prepare the dual edges and nodes tables
    logger.info('Preparing data for dual edges table')
    dual_edge_data = []
    parent_primal_counter = {}
    for s, e, d in G_round_trip.edges(data=True):
        # number each of the new dual edges sequentially
        # based on the original parent primal node
        primal_parent = d['parent_primal_node']
        if primal_parent not in parent_primal_counter:
            parent_primal_counter[primal_parent] = 1
        else:
            parent_primal_counter[primal_parent] += 1
        label = f'{primal_parent}_{parent_primal_counter[primal_parent]}'
        # add the data tuple
        dual_edge_data.append(
            (label, primal_parent, city_pop_id, s, e, d['length'],
             d['angle_sum'], d['imp_factor'], d['geom'].wkb_hex))

    logger.info('Writing dual edges to DB')
    await db_con.executemany(
        f'''
    INSERT INTO {links_table}_dual (
        id,
        parent_id,
        city_pop_id,
        node_a,
        node_b,
        distance,
        angle,
        impedance_factor,
        geom)
    VALUES ($1, $2, $3, $4, $5, $6, $7, $8, ST_SetSRID($9::geometry, 27700))
    ON CONFLICT DO NOTHING;
    ''', dual_edge_data)
    await db_con.close()

    logger.info('Calculating centrality paths and centralities '
                'for centrality-path heuristics')
    start = time.localtime()
    measures = [
        'node_density', 'node_harmonic', 'node_beta', 'node_betweenness',
        'node_betweenness_beta'
    ]
    N.node_centrality(measures=measures)
    time_duration = datetime.timedelta(seconds=time.mktime(time.localtime()) -
                                       time.mktime(start))
    logger.info(f'Algo duration: {time_duration}')
    logger.info('Calculating centrality paths and centralities '
                'for simplest-path heuristics')
    start = time.localtime()
    angular_measures = ['node_harmonic_angular', 'node_betweenness_angular']
    N.node_centrality(measures=angular_measures, angular=True)
    time_duration = datetime.timedelta(seconds=time.mktime(time.localtime()) -
                                       time.mktime(start))
    logger.info(f'Algo duration: {time_duration}')
    db_con = await asyncpg.connect(**db_config)
    # check that the columns exist
    # do this separately to control the order in which the columns are added (by theme instead of distance)
    for measure in measures:
        # prepend with "c_"
        c_measure = f'c_{measure}'
        await db_con.execute(f'''
        ALTER TABLE {nodes_table}_dual 
            ADD COLUMN IF NOT EXISTS {c_measure} real[];
        ''')
    for ang_measure in angular_measures:
        c_ang_measure = f'c_{ang_measure}'
        await db_con.execute(f'''
        ALTER TABLE {nodes_table}_dual
            ADD COLUMN IF NOT EXISTS {c_ang_measure} real[];
        ''')
    # Quite slow writing to database so do all distances at once
    logger.info('Prepping data for database')
    metrics = N.metrics_to_dict()
    bulk_data = []
    for k, v in metrics.items():
        # first check that this is a live node
        # (i.e. within the original city boundary)
        if not v['live']:
            continue
        # start node data list - initialise with node label
        node_data = [k]
        # pack centrality path data
        for measure in measures:
            inner_data = []
            for d in distances:
                inner_data.append(v['centrality'][measure][d])
            node_data.append(inner_data)
        # pack simplest path data
        for ang_measure in angular_measures:
            inner_ang_data = []
            for d in distances:
                inner_ang_data.append(v['centrality'][ang_measure][d])
            node_data.append(inner_ang_data)
        bulk_data.append(node_data)
    logger.info('Writing data back to database')
    await db_con.executemany(
        f'''
     UPDATE {nodes_table}_dual
         SET
             c_node_density = $2,
             c_node_harmonic = $3,
             c_node_beta = $4,
             c_node_betweenness = $5,
             c_node_betweenness_beta = $6,
             c_node_harmonic_angular = $7,
             c_node_betweenness_angular = $8
         WHERE id = $1
     ''', bulk_data)
    await db_con.close()
def test_local_centrality(diamond_graph):
    """
    manual checks for all methods against diamond graph
    measures_data is multidimensional in the form of measure_keys x distances x nodes
    """
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(
        diamond_graph)
    # generate dual
    diamond_graph_dual = graphs.nX_to_dual(diamond_graph)
    node_uids_dual, node_data_dual, edge_data_dual, node_edge_map_dual = graphs.graph_maps_from_nX(
        diamond_graph_dual)
    # setup distances and betas
    distances = np.array([50, 150, 250])
    betas = networks.beta_from_distance(distances)

    # NODE SHORTEST
    # set the keys - add shuffling to be sure various orders work
    node_keys = [
        'node_density', 'node_farness', 'node_cycles', 'node_harmonic',
        'node_beta', 'node_betweenness', 'node_betweenness_beta'
    ]
    np.random.shuffle(node_keys)  # in place
    measure_keys = tuple(node_keys)
    measures_data = centrality.local_node_centrality(node_data, edge_data,
                                                     node_edge_map, distances,
                                                     betas, measure_keys)
    # node density
    # additive nodes
    m_idx = node_keys.index('node_density')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [2, 3, 3, 2],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [3, 3, 3, 3],
                       atol=0.001,
                       rtol=0)
    # node farness
    # additive distances
    m_idx = node_keys.index('node_farness')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [200, 300, 300, 200],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [400, 300, 300, 400],
                       atol=0.001,
                       rtol=0)
    # node cycles
    # additive cycles
    m_idx = node_keys.index('node_cycles')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [1, 2, 2, 1],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [2, 2, 2, 2],
                       atol=0.001,
                       rtol=0)
    # node harmonic
    # additive 1 / distances
    m_idx = node_keys.index('node_harmonic')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0.02, 0.03, 0.03, 0.02],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [0.025, 0.03, 0.03, 0.025],
                       atol=0.001,
                       rtol=0)
    # node beta
    # additive exp(-beta * dist)
    m_idx = node_keys.index('node_beta')
    # beta = 0.0
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    # beta = 0.02666667
    assert np.allclose(measures_data[m_idx][1],
                       [0.1389669, 0.20845035, 0.20845035, 0.1389669],
                       atol=0.001,
                       rtol=0)
    # beta = 0.016
    assert np.allclose(measures_data[m_idx][2],
                       [0.44455525, 0.6056895, 0.6056895, 0.44455522],
                       atol=0.001,
                       rtol=0)
    # node betweenness
    # additive 1 per node en route
    m_idx = node_keys.index('node_betweenness')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    # takes first out of multiple equidistant routes
    assert np.allclose(measures_data[m_idx][2], [0, 1, 0, 0],
                       atol=0.001,
                       rtol=0)
    # node betweenness beta
    # additive exp(-beta * dist) en route
    m_idx = node_keys.index('node_betweenness_beta')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)  # beta = 0.08
    assert np.allclose(measures_data[m_idx][1], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)  # beta = 0.02666667
    # takes first out of multiple equidistant routes
    # beta evaluated over 200m distance from 3 to 0 via node 1
    assert np.allclose(measures_data[m_idx][2],
                       [0, 0.0407622, 0, 0])  # beta = 0.016

    # NODE SIMPLEST
    node_keys_angular = ['node_harmonic_angular', 'node_betweenness_angular']
    np.random.shuffle(node_keys_angular)  # in place
    measure_keys = tuple(node_keys_angular)
    measures_data = centrality.local_node_centrality(node_data,
                                                     edge_data,
                                                     node_edge_map,
                                                     distances,
                                                     betas,
                                                     measure_keys,
                                                     angular=True)
    # node harmonic angular
    # additive 1 / (1 + (to_imp / 180))
    m_idx = node_keys_angular.index('node_harmonic_angular')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [2, 3, 3, 2],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [2.75, 3, 3, 2.75],
                       atol=0.001,
                       rtol=0)
    # node betweenness angular
    # additive 1 per node en simplest route
    m_idx = node_keys_angular.index('node_betweenness_angular')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [0, 1, 0, 0],
                       atol=0.001,
                       rtol=0)

    # NODE SIMPLEST ON DUAL
    node_keys_angular = ['node_harmonic_angular', 'node_betweenness_angular']
    np.random.shuffle(node_keys_angular)  # in place
    measure_keys = tuple(node_keys_angular)
    measures_data = centrality.local_node_centrality(node_data_dual,
                                                     edge_data_dual,
                                                     node_edge_map_dual,
                                                     distances,
                                                     betas,
                                                     measure_keys,
                                                     angular=True)
    # node_uids_dual = ('0_1', '0_2', '1_2', '1_3', '2_3')
    # node harmonic angular
    # additive 1 / (1 + (to_imp / 180))
    m_idx = node_keys_angular.index('node_harmonic_angular')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [1.95, 1.95, 2.4, 1.95, 1.95],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [2.45, 2.45, 2.4, 2.45, 2.45],
                       atol=0.001,
                       rtol=0)
    # node betweenness angular
    # additive 1 per node en simplest route
    m_idx = node_keys_angular.index('node_betweenness_angular')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0, 0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [0, 0, 0, 1, 1],
                       atol=0.001,
                       rtol=0)

    # SEGMENT SHORTEST
    segment_keys = [
        'segment_density', 'segment_harmonic', 'segment_beta',
        'segment_betweenness'
    ]
    np.random.shuffle(segment_keys)  # in place
    measure_keys = tuple(segment_keys)
    measures_data = centrality.local_segment_centrality(node_data,
                                                        edge_data,
                                                        node_edge_map,
                                                        distances,
                                                        betas,
                                                        measure_keys,
                                                        angular=False)
    # segment density
    # additive segment lengths
    m_idx = segment_keys.index('segment_density')
    assert np.allclose(measures_data[m_idx][0], [100, 150, 150, 100],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [400, 500, 500, 400],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [500, 500, 500, 500],
                       atol=0.001,
                       rtol=0)
    # segment harmonic
    # segments are potentially approached from two directions
    # i.e. along respective shortest paths to intersection of shortest routes
    # i.e. in this case, the midpoint of the middle segment is apportioned in either direction
    # additive log(b) - log(a) + log(d) - log(c)
    # nearer distance capped at 1m to avert negative numbers
    m_idx = segment_keys.index('segment_harmonic')
    assert np.allclose(measures_data[m_idx][0],
                       [7.824046, 11.736069, 11.736069, 7.824046],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1],
                       [10.832201, 15.437371, 15.437371, 10.832201],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2],
                       [11.407564, 15.437371, 15.437371, 11.407565],
                       atol=0.001,
                       rtol=0)
    # segment beta
    # additive (np.exp(-beta * b) - np.exp(-beta * a)) / -beta + (np.exp(-beta * d) - np.exp(-beta * c)) / -beta
    # beta = 0 resolves to b - a and avoids division through zero
    m_idx = segment_keys.index('segment_beta')
    assert np.allclose(measures_data[m_idx][0],
                       [24.542109, 36.813164, 36.813164, 24.542109],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1],
                       [77.46391, 112.358284, 112.358284, 77.46391],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2],
                       [133.80205, 177.43903, 177.43904, 133.80205],
                       atol=0.001,
                       rtol=0)
    # segment betweenness
    # similar formulation to segment beta: start and end segment of each betweenness pair assigned to intervening nodes
    # distance thresholds are computed using the inside edges of the segments
    # so if the segments are touching, they will count up to the threshold distance...
    m_idx = segment_keys.index('segment_betweenness')
    assert np.allclose(measures_data[m_idx][0], [0, 24.542109, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0, 69.78874, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [0, 99.76293, 0, 0],
                       atol=0.001,
                       rtol=0)

    # SEGMENT SIMPLEST ON PRIMAL!!! ( NO DOUBLE COUNTING )
    segment_keys_angular = [
        'segment_harmonic_hybrid', 'segment_betweeness_hybrid'
    ]
    np.random.shuffle(segment_keys_angular)  # in place
    measure_keys = tuple(segment_keys_angular)
    measures_data = centrality.local_segment_centrality(node_data,
                                                        edge_data,
                                                        node_edge_map,
                                                        distances,
                                                        betas,
                                                        measure_keys,
                                                        angular=True)
    # segment density
    # additive segment lengths divided through angular impedance
    # (f - e) / (1 + (ang / 180))
    m_idx = segment_keys_angular.index('segment_harmonic_hybrid')
    assert np.allclose(measures_data[m_idx][0], [100, 150, 150, 100],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [305, 360, 360, 305],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [410, 420, 420, 410],
                       atol=0.001,
                       rtol=0)
    # segment harmonic
    # additive segment lengths / (1 + (ang / 180))
    m_idx = segment_keys_angular.index('segment_betweeness_hybrid')
    assert np.allclose(measures_data[m_idx][0], [0, 75, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0, 150, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [0, 150, 0, 0],
                       atol=0.001,
                       rtol=0)