Esempio n. 1
0
def test_Network_Layer_From_nX(primal_graph):
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    x_arr = node_data[:, 0]
    y_arr = node_data[:, 1]
    betas = np.array([0.04, 0.02])
    distances = networks.distance_from_beta(betas)

    # test Network_Layer_From_NetworkX's class
    for d, b in zip([distances, None], [None, betas]):
        for angular in [True, False]:
            N = networks.NetworkLayerFromNX(primal_graph, distances=d, betas=b)
            assert np.allclose(N.uids, node_uids, atol=0.001, rtol=0)
            assert np.allclose(N._node_data, node_data, atol=0.001, rtol=0)
            assert np.allclose(N._edge_data, edge_data, atol=0.001, rtol=0)
            assert np.allclose(N.distances, distances, atol=0.001,
                               rtol=0)  # inferred automatically when only betas provided
            assert np.allclose(N.betas, betas, atol=0.001,
                               rtol=0)  # inferred automatically when only distances provided
            assert N._min_threshold_wt == checks.def_min_thresh_wt
            assert np.allclose(N.node_x_arr, x_arr, atol=0.001, rtol=0)
            assert np.allclose(N.node_y_arr, y_arr, atol=0.001, rtol=0)
            assert np.allclose(N.node_live_arr, node_data[:, 2], atol=0.001, rtol=0)
            assert np.allclose(N.edge_lengths_arr, edge_data[:, 2], atol=0.001, rtol=0)
            assert np.allclose(N.edge_angles_arr, edge_data[:, 3], atol=0.001, rtol=0)
            assert np.allclose(N.edge_impedance_factors_arr, edge_data[:, 4], atol=0.001, rtol=0)
            assert np.allclose(N.edge_in_bearings_arr, edge_data[:, 5], atol=0.001, rtol=0)
            assert np.allclose(N.edge_out_bearings_arr, edge_data[:, 6], atol=0.001, rtol=0)

    # check alternate min_threshold_wt gets passed through successfully
    alt_min = 0.02
    alt_distances = networks.distance_from_beta(betas, min_threshold_wt=alt_min)
    N = networks.NetworkLayerFromNX(primal_graph, betas=betas, min_threshold_wt=alt_min)
    assert np.allclose(N.distances, alt_distances, atol=0.001, rtol=0)

    # check for malformed signatures
    with pytest.raises(TypeError):
        networks.NetworkLayerFromNX('boo', distances=distances)
    with pytest.raises(ValueError):
        networks.NetworkLayerFromNX(primal_graph)  # no betas or distances
    with pytest.raises(ValueError):
        networks.NetworkLayerFromNX(primal_graph, distances=None, betas=None)
    with pytest.raises(ValueError):
        networks.NetworkLayerFromNX(primal_graph, distances=[])
    with pytest.raises(ValueError):
        networks.NetworkLayerFromNX(primal_graph, betas=[])
Esempio n. 2
0
    def __init__(
            self,
            networkX_multigraph: nx.MultiGraph,
            distances: list | tuple | np.ndarray = None,
            betas: list | tuple | np.ndarray = None,
            min_threshold_wt: float = checks.def_min_thresh_wt
    ) -> NetworkLayer:
        """
        Directly transposes a `networkX` `MultiGraph` into a `NetworkLayer`. This `class` simplifies the conversion of
        a `NetworkX` `MultiGraph` by calling [`graph_maps_from_nX`](/tools/graphs/#graph_maps_from_nx) internally.
        Methods and properties are inherited from the parent [`NetworkLayer`](#class-networklayer) class.

        Parameters
        ----------
        networkX_multigraph
            A `networkX` `MultiGraph`.
            
            `x` and `y` node attributes are required. The `live` node attribute is optional, but recommended. See
            [`NetworkLayer`](#class-networklayer) for more information about what these attributes represent.
        distances
            See [`NetworkLayer`](#networklayer).
        betas
            See [`NetworkLayer`](#networklayer).
        min_threshold_wt
            See [`NetworkLayer`](#networklayer).

        Returns
        -------
        NetworkLayer
            A `NetworkLayer`.
        """
        node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(
            networkX_multigraph)
        super().__init__(node_uids, node_data, edge_data, node_edge_map,
                         distances, betas, min_threshold_wt)
        # keep reference to networkX graph
        self.networkX_multigraph = networkX_multigraph
Esempio n. 3
0
def test_Network_Layer(primal_graph):
    # manual graph maps for comparison
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    x_arr = node_data[:, 0]
    y_arr = node_data[:, 1]
    betas = [0.02, 0.005]
    distances = networks.distance_from_beta(betas)

    # test NetworkLayer's class
    for d, b in zip([distances, None], [None, betas]):
        for angular in [True, False]:
            N = networks.NetworkLayer(node_uids,
                                      node_data,
                                      edge_data,
                                      node_edge_map,
                                      distances=d,
                                      betas=b)
            assert np.allclose(N.uids, node_uids, atol=0.001, rtol=0)
            assert np.allclose(N._node_data, node_data, atol=0.001, rtol=0)
            assert np.allclose(N._edge_data, edge_data, atol=0.001, rtol=0)
            assert np.allclose(N.distances, distances, atol=0.001,
                               rtol=0)  # inferred automatically when only betas provided
            assert np.allclose(N.betas, betas, atol=0.001,
                               rtol=0)  # inferred automatically when only distances provided
            assert N._min_threshold_wt == checks.def_min_thresh_wt
            assert np.allclose(N.node_x_arr, x_arr, atol=0.001, rtol=0)
            assert np.allclose(N.node_y_arr, y_arr, atol=0.001, rtol=0)
            assert np.allclose(N.node_live_arr, node_data[:, 2], atol=0.001, rtol=0)
            assert np.allclose(N.edge_lengths_arr, edge_data[:, 2], atol=0.001, rtol=0)
            assert np.allclose(N.edge_angles_arr, edge_data[:, 3], atol=0.001, rtol=0)
            assert np.allclose(N.edge_impedance_factors_arr, edge_data[:, 4], atol=0.001, rtol=0)
            assert np.allclose(N.edge_in_bearings_arr, edge_data[:, 5], atol=0.001, rtol=0)
            assert np.allclose(N.edge_out_bearings_arr, edge_data[:, 6], atol=0.001, rtol=0)

    # test round-trip graph to and from NetworkLayer
    N = networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map,
                              distances=distances)
    G_round_trip = N.to_networkX()
    # graph_maps_from_networkX generates implicit live (all True) and weight (all 1) attributes if missing
    # i.e. can't simply check that all nodes equal, so check properties manually
    for n, d in primal_graph.nodes(data=True):
        assert n in G_round_trip
        assert G_round_trip.nodes[n]['x'] == d['x']
        assert G_round_trip.nodes[n]['y'] == d['y']
    # edges can be checked en masse
    assert G_round_trip.edges == primal_graph.edges
    # check alternate min_threshold_wt gets passed through successfully
    alt_min = 0.02
    alt_distances = networks.distance_from_beta(betas, min_threshold_wt=alt_min)
    N = networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map,
                              betas=betas,
                              min_threshold_wt=alt_min)
    assert np.allclose(N.distances, alt_distances, atol=0.001, rtol=0)
    # check for malformed signatures
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids[:-1],
                              node_data,
                              edge_data,
                              node_edge_map,
                              distances)
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                               node_data[:, :-1],
                              edge_data,
                              node_edge_map,
                              distances)
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                               edge_data[:, :-1],
                              node_edge_map,
                              distances)
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                               edge_data[:, :-1],
                              node_edge_map,
                              distances)
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map)  # no betas or distances
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map,
                              distances=None,
                              betas=None)
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map,
                              distances=[])
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map,
                              betas=[])
Esempio n. 4
0
def test_graph_maps_from_nX(diamond_graph):
    # test maps vs. networkX
    G_test = diamond_graph.copy()
    G_test_dual = graphs.nX_to_dual(G_test)
    for G, is_dual in zip((G_test, G_test_dual), (False, True)):
        # set some random 'live' statuses
        for n in G.nodes():
            G.nodes[n]['live'] = bool(np.random.randint(0, 1))

        # generate test maps
        node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G)
        # debug plot
        # plot.plot_graphs(primal=G)
        # plot.plot_graph_maps(node_uids, node_data, edge_data)

        # run check (this checks node to edge maps internally)
        checks.check_network_maps(node_data, edge_data, node_edge_map)

        # check lengths
        assert len(node_uids) == len(node_data) == G.number_of_nodes()
        # edges = x2
        assert len(edge_data) == G.number_of_edges() * 2

        # check node maps (idx and label match in this case...)
        for n_label in node_uids:
            n_idx = node_uids.index(n_label)
            assert node_data[n_idx][0] == G.nodes[n_label]['x']
            assert node_data[n_idx][1] == G.nodes[n_label]['y']
            assert node_data[n_idx][2] == G.nodes[n_label]['live']

        # check edge maps (idx and label match in this case...)
        for start, end, length, angle, imp_fact, start_bear, end_bear in edge_data:
            # print(f'elif (start, end) == ({start}, {end}):')
            # print(f'assert (length, angle, imp_fact, start_bear, end_bear) == ({length}, {angle}, {imp_fact}, {start_bear}, {end_bear})')
            if not is_dual:
                if (start, end) == (0.0, 1.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 120.0, 120.0)
                elif (start, end) == (0.0, 2.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 60.0, 60.0)
                elif (start, end) == (1.0, 0.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, -60.0, -60.0)
                elif (start, end) == (1.0, 2.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 0.0, 0.0)
                elif (start, end) == (1.0, 3.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 60.0, 60.0)
                elif (start, end) == (2.0, 0.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, -120.0, -120.0)
                elif (start, end) == (2.0, 1.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 180.0, 180.0)
                elif (start, end) == (2.0, 3.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, 120.0, 120.0)
                elif (start, end) == (3.0, 1.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, -120.0, -120.0)
                elif (start, end) == (3.0, 2.0):
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 0.0, 1.0, -60.0, -60.0)
                else:
                    raise KeyError('Unmatched edge.')
            else:
                s_idx = node_uids[int(start)]
                e_idx = node_uids[int(end)]
                print(s_idx, e_idx)
                if (start, end) == (0.0, 1.0):  # 0_1 0_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, -60.0, 60.0)
                elif (start, end) == (0.0, 2.0):  # 0_1 1_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 120.0, 0.0)
                elif (start, end) == (0.0, 3.0):  # 0_1 1_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 60.0, 1.0, 120.0, 60.0)
                elif (start, end) == (1.0, 0.0):  # 0_2 0_1
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, -120.0, 120.0)
                elif (start, end) == (1.0, 2.0):  # 0_2 1_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 60.0, 180.0)
                elif (start, end) == (1.0, 4.0):  # 0_2 2_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 60.0, 1.0, 60.0, 120.0)
                elif (start, end) == (2.0, 0.0):  # 1_2 0_1
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 180.0, -60.0)
                elif (start, end) == (2.0, 1.0):  # 1_2 0_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 0.0, -120.0)
                elif (start, end) == (2.0, 3.0):  # 1_2 1_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 180.0, 60.0)
                elif (start, end) == (2.0, 4.0):  # 1_2 2_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 0.0, 120.0)
                elif (start, end) == (3.0, 0.0):  # 1_3 0_1
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 60.0, 1.0, -120.0, -60.0)
                elif (start, end) == (3.0, 2.0):  # 1_3 1_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, -120.0, 0.0)
                elif (start, end) == (3.0, 4.0):  # 1_3 2_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 60.0, -60.0)
                elif (start, end) == (4.0, 1.0):  # 2_3 0_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 60.0, 1.0, -60.0, -120.0)
                elif (start, end) == (4.0, 2.0):  # 2_3 1_2
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, -60.0, 180.0)
                elif (start, end) == (4.0, 3.0):  # 2_3 1_3
                    assert (length, angle, imp_fact, start_bear, end_bear) == (100.0, 120.0, 1.0, 120.0, -120.0)
                else:
                    raise KeyError('Unmatched edge.')
    # check that missing geoms throw an error
    G_test = diamond_graph.copy()
    for s, e, k in G_test.edges(keys=True):
        # delete key from first node and break
        del G_test[s][e][k]['geom']
        break
    with pytest.raises(KeyError):
        graphs.graph_maps_from_nX(G_test)

    # check that non-LineString geoms throw an error
    G_test = diamond_graph.copy()
    for s, e, k in G_test.edges(keys=True):
        G_test[s][e][k]['geom'] = geometry.Point([G_test.nodes[s]['x'], G_test.nodes[s]['y']])
    with pytest.raises(TypeError):
        graphs.graph_maps_from_nX(G_test)

    # check that missing node keys throw an error
    G_test = diamond_graph.copy()
    for k in ['x', 'y']:
        for n in G_test.nodes():
            # delete key from first node and break
            del G_test.nodes[n][k]
            break
        with pytest.raises(KeyError):
            graphs.graph_maps_from_nX(G_test)

    # check that invalid imp_factors are caught
    G_test = diamond_graph.copy()
    # corrupt imp_factor value and break
    for corrupt_val in [-1, -np.inf, np.nan]:
        for s, e, k in G_test.edges(keys=True):
            G_test[s][e][k]['imp_factor'] = corrupt_val
            break
        with pytest.raises(ValueError):
            graphs.graph_maps_from_nX(G_test)
Esempio n. 5
0
def test_nX_from_graph_maps(primal_graph):
    # also see test_networks.test_to_networkX for tests on implementation via Network layer

    # check round trip to and from graph maps results in same graph
    # explicitly set live params for equality checks
    # graph_maps_from_networkX generates these implicitly if missing
    for n in primal_graph.nodes():
        primal_graph.nodes[n]['live'] = bool(np.random.randint(0, 1))

    # test directly from and to graph maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    G_round_trip = graphs.nX_from_graph_maps(node_uids, node_data, edge_data, node_edge_map)
    assert list(G_round_trip.nodes) == list(primal_graph.nodes)
    assert list(G_round_trip.edges) == list(primal_graph.edges)

    # check with metrics dictionary
    N = networks.NetworkLayerFromNX(primal_graph, distances=[500, 1000])

    N.node_centrality(measures=['node_harmonic'])
    data_dict = mock.mock_data_dict(primal_graph)
    landuse_labels = mock.mock_categorical_data(len(data_dict))
    D = layers.DataLayerFromDict(data_dict)
    D.assign_to_network(N, max_dist=400)
    D.compute_landuses(landuse_labels,
                       mixed_use_keys=['hill', 'shannon'],
                       accessibility_keys=['a', 'c'],
                       qs=[0, 1])
    metrics_dict = N.metrics_to_dict()
    # without backbone
    G_round_trip_data = graphs.nX_from_graph_maps(node_uids,
                                                  node_data,
                                                  edge_data,
                                                  node_edge_map,
                                                  metrics_dict=metrics_dict)
    for uid, metrics in metrics_dict.items():
        assert G_round_trip_data.nodes[uid]['metrics'] == metrics
    # with backbone
    G_round_trip_data = graphs.nX_from_graph_maps(node_uids,
                                                  node_data,
                                                  edge_data,
                                                  node_edge_map,
                                                  networkX_multigraph=primal_graph,
                                                  metrics_dict=metrics_dict)
    for uid, metrics in metrics_dict.items():
        assert G_round_trip_data.nodes[uid]['metrics'] == metrics

    # test with decomposed
    G_decomposed = graphs.nX_decompose(primal_graph, decompose_max=20)
    # set live explicitly
    for n in G_decomposed.nodes():
        G_decomposed.nodes[n]['live'] = bool(np.random.randint(0, 1))
    node_uids_d, node_data_d, edge_data_d, node_edge_map_d = graphs.graph_maps_from_nX(G_decomposed)

    G_round_trip_d = graphs.nX_from_graph_maps(node_uids_d, node_data_d, edge_data_d, node_edge_map_d)
    assert list(G_round_trip_d.nodes) == list(G_decomposed.nodes)
    for n, iter_node_data in G_round_trip.nodes(data=True):
        assert n in G_decomposed
        assert iter_node_data['live'] == G_decomposed.nodes[n]['live']
        assert iter_node_data['x'] == G_decomposed.nodes[n]['x']
        assert iter_node_data['y'] == G_decomposed.nodes[n]['y']
    assert G_round_trip_d.edges == G_decomposed.edges

    # error checks for when using backbone graph:
    # mismatching numbers of nodes
    corrupt_G = primal_graph.copy()
    corrupt_G.remove_node(0)
    with pytest.raises(ValueError):
        graphs.nX_from_graph_maps(node_uids,
                                  node_data,
                                  edge_data,
                                  node_edge_map,
                                  networkX_multigraph=corrupt_G)
    # mismatching node uid
    with pytest.raises(KeyError):
        corrupt_node_uids = list(node_uids)
        corrupt_node_uids[0] = 'boo'
        graphs.nX_from_graph_maps(corrupt_node_uids,
                                  node_data,
                                  edge_data,
                                  node_edge_map,
                                  networkX_multigraph=primal_graph)
    # missing edge
    with pytest.raises(KeyError):
        corrupt_primal_graph = primal_graph.copy()
        corrupt_primal_graph.remove_edge(0, 1)
        graphs.nX_from_graph_maps(node_uids,
                                  node_data,
                                  edge_data,
                                  node_edge_map,
                                  networkX_multigraph=corrupt_primal_graph)
Esempio n. 6
0
def test_local_aggregator_numerical_components(primal_graph):
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    # setup data
    data_dict = mock.mock_data_dict(primal_graph, random_seed=13)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    data_map = data.assign_to_network(data_map, node_data, edge_data, node_edge_map, 500)
    # for debugging
    # from cityseer.tools import plot
    # plot.plot_graph_maps(node_uids, node_data, edge_data, data_map)
    # set parameters - use a large enough distance such that simple non-weighted checks can be run for max, mean, variance
    betas = np.array([0.00125])
    distances = networks.distance_from_beta(betas)
    mock_numerical = mock.mock_numerical_data(len(data_dict), num_arrs=2, random_seed=0)
    # compute
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.aggregate_stats(node_data,
                             edge_data,
                             node_edge_map,
                             data_map,
                             distances,
                             betas,
                             numerical_arrays=mock_numerical,
                             angular=False)
    # non connected portions of the graph will have different stats
    # used manual data plots from test_assign_to_network() to see which nodes the data points are assigned to
    # connected graph is from 0 to 48 -> assigned data points are all except 5, 8, 17, 33, 48
    connected_nodes_idx = list(range(49))
    # and the respective data assigned to connected portion of the graph
    connected_data_idx = [i for i in range(len(data_dict)) if i not in [5, 8, 9, 17, 18, 29, 33, 38, 48]]
    # isolated node = 49 -> assigned no data points
    # isolated nodes = 50 & 51 -> assigned data points = 17, 33
    # isolated loop = 52, 53, 54, 55 -> assigned data points = 5, 8, 9, 18, 29, 38, 48
    isolated_nodes_idx = [52, 53, 54, 55]
    isolated_data_idx = [5, 8, 9, 18, 29, 38, 48]
    for stats_idx in range(len(mock_numerical)):
        for d_idx in range(len(distances)):
            # max
            assert np.isnan(stats_max[stats_idx, d_idx, 49])
            assert np.allclose(stats_max[stats_idx, d_idx, [50, 51]], mock_numerical[stats_idx, [17, 33]].max(),
                               atol=0.001, rtol=0)
            assert np.allclose(stats_max[stats_idx, d_idx, isolated_nodes_idx],
                               mock_numerical[stats_idx, isolated_data_idx].max(), atol=0.001, rtol=0)
            assert np.allclose(stats_max[stats_idx, d_idx, connected_nodes_idx],
                               mock_numerical[stats_idx, connected_data_idx].max(), atol=0.001, rtol=0)
            # min
            assert np.isnan(stats_min[stats_idx, d_idx, 49])
            assert np.allclose(stats_min[stats_idx, d_idx, [50, 51]], mock_numerical[stats_idx, [17, 33]].min(),
                               atol=0.001, rtol=0)
            assert np.allclose(stats_min[stats_idx, d_idx, isolated_nodes_idx],
                               mock_numerical[stats_idx, isolated_data_idx].min(), atol=0.001, rtol=0)
            assert np.allclose(stats_min[stats_idx, d_idx, connected_nodes_idx],
                               mock_numerical[stats_idx, connected_data_idx].min(), atol=0.001, rtol=0)
            # sum
            assert stats_sum[stats_idx, d_idx, 49] == 0
            assert np.allclose(stats_sum[stats_idx, d_idx, [50, 51]],
                               mock_numerical[stats_idx, [17, 33]].sum(), atol=0.001, rtol=0)
            assert np.allclose(stats_sum[stats_idx, d_idx, isolated_nodes_idx],
                               mock_numerical[stats_idx, isolated_data_idx].sum(), atol=0.001, rtol=0)
            assert np.allclose(stats_sum[stats_idx, d_idx, connected_nodes_idx],
                               mock_numerical[stats_idx, connected_data_idx].sum(), atol=0.001, rtol=0)
            # mean
            assert np.isnan(stats_mean[stats_idx, d_idx, 49])
            assert np.allclose(stats_mean[stats_idx, d_idx, [50, 51]], mock_numerical[stats_idx, [17, 33]].mean(),
                               atol=0.001, rtol=0)
            assert np.allclose(stats_mean[stats_idx, d_idx, isolated_nodes_idx],
                               mock_numerical[stats_idx, isolated_data_idx].mean(), atol=0.001, rtol=0)
            assert np.allclose(stats_mean[stats_idx, d_idx, connected_nodes_idx],
                               mock_numerical[stats_idx, connected_data_idx].mean(), atol=0.001, rtol=0)
            # variance
            assert np.isnan(stats_variance[stats_idx, d_idx, 49])
            assert np.allclose(stats_variance[stats_idx, d_idx, [50, 51]], mock_numerical[stats_idx, [17, 33]].var(),
                               atol=0.001, rtol=0)
            assert np.allclose(stats_variance[stats_idx, d_idx, isolated_nodes_idx],
                               mock_numerical[stats_idx, isolated_data_idx].var(), atol=0.001, rtol=0)
            assert np.allclose(stats_variance[stats_idx, d_idx, connected_nodes_idx],
                               mock_numerical[stats_idx, connected_data_idx].var(), atol=0.001, rtol=0)
Esempio n. 7
0
def test_local_agg_time(primal_graph):
    """
    Timing tests for landuse and stats aggregations
    """
    if 'GITHUB_ACTIONS' in os.environ:
        return
    os.environ['CITYSEER_QUIET_MODE'] = '1'

    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map, = graphs.graph_maps_from_nX(primal_graph)
    # setup data
    data_dict = mock.mock_data_dict(primal_graph, random_seed=13)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    data_map = data.assign_to_network(data_map, node_data, edge_data, node_edge_map, 500)
    # needs a large enough beta so that distance thresholds aren't encountered
    distances = np.array([np.inf])
    betas = networks.beta_from_distance(distances)
    qs = np.array([0, 1, 2])
    mock_categorical = mock.mock_categorical_data(len(data_map))
    landuse_classes, landuse_encodings = layers.encode_categorical(mock_categorical)
    mock_numerical = mock.mock_numerical_data(len(data_dict), num_arrs=2, random_seed=0)

    def assign_wrapper():
        data.assign_to_network(data_map, node_data, edge_data, node_edge_map, 500)

    # prime the function
    assign_wrapper()
    iters = 20000
    # time and report - roughly 5.675
    func_time = timeit.timeit(assign_wrapper, number=iters)
    print(f'node_cent_wrapper: {func_time} for {iters} iterations')
    assert func_time < 10

    def landuse_agg_wrapper():
        mu_data_hill, mu_data_other, ac_data, ac_data_wt = data.aggregate_landuses(node_data,
                                                                                   edge_data,
                                                                                   node_edge_map,
                                                                                   data_map,
                                                                                   distances,
                                                                                   betas,
                                                                                   mixed_use_hill_keys=np.array([0, 1]),
                                                                                   landuse_encodings=landuse_encodings,
                                                                                   qs=qs,
                                                                                   angular=False)

    # prime the function
    landuse_agg_wrapper()
    iters = 20000
    # time and report - roughly 10.10
    func_time = timeit.timeit(landuse_agg_wrapper, number=iters)
    print(f'node_cent_wrapper: {func_time} for {iters} iterations')
    assert func_time < 15

    def stats_agg_wrapper():
        # compute
        data.aggregate_stats(node_data,
                             edge_data,
                             node_edge_map,
                             data_map,
                             distances,
                             betas,
                             numerical_arrays=mock_numerical,
                             angular=False)

    # prime the function
    stats_agg_wrapper()
    iters = 20000
    # time and report - roughly 4.96
    func_time = timeit.timeit(stats_agg_wrapper, number=iters)
    print(f'segment_cent_wrapper: {func_time} for {iters} iterations')
    assert func_time < 10
Esempio n. 8
0
def test_aggregate_landuses_categorical_components(primal_graph):
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map, = graphs.graph_maps_from_nX(primal_graph)
    # setup data
    data_dict = mock.mock_data_dict(primal_graph, random_seed=13)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    data_map = data.assign_to_network(data_map, node_data, edge_data, node_edge_map, 500)
    # set parameters
    betas = np.array([0.02, 0.01, 0.005, 0.0025])
    distances = networks.distance_from_beta(betas)
    qs = np.array([0, 1, 2])
    mock_categorical = mock.mock_categorical_data(len(data_map))
    landuse_classes, landuse_encodings = layers.encode_categorical(mock_categorical)
    mock_matrix = np.full((len(landuse_classes), len(landuse_classes)), 1)
    # set the keys - add shuffling to be sure various orders work
    hill_keys = np.arange(4)
    np.random.shuffle(hill_keys)
    non_hill_keys = np.arange(3)
    np.random.shuffle(non_hill_keys)
    ac_keys = np.array([1, 2, 5])
    np.random.shuffle(ac_keys)
    # generate
    mu_data_hill, mu_data_other, ac_data, ac_data_wt = data.aggregate_landuses(node_data,
                                                                               edge_data,
                                                                               node_edge_map,
                                                                               data_map,
                                                                               distances,
                                                                               betas,
                                                                               landuse_encodings=landuse_encodings,
                                                                               qs=qs,
                                                                               mixed_use_hill_keys=hill_keys,
                                                                               mixed_use_other_keys=non_hill_keys,
                                                                               accessibility_keys=ac_keys,
                                                                               cl_disparity_wt_matrix=mock_matrix,
                                                                               angular=False)
    # hill
    hill = mu_data_hill[np.where(hill_keys == 0)][0]
    hill_branch_wt = mu_data_hill[np.where(hill_keys == 1)][0]
    hill_pw_wt = mu_data_hill[np.where(hill_keys == 2)][0]
    hill_disp_wt = mu_data_hill[np.where(hill_keys == 3)][0]
    # non hill
    shannon = mu_data_other[np.where(non_hill_keys == 0)][0]
    gini = mu_data_other[np.where(non_hill_keys == 1)][0]
    raos = mu_data_other[np.where(non_hill_keys == 2)][0]
    # access non-weighted
    ac_1_nw = ac_data[np.where(ac_keys == 1)][0]
    ac_2_nw = ac_data[np.where(ac_keys == 2)][0]
    ac_5_nw = ac_data[np.where(ac_keys == 5)][0]
    # access weighted
    ac_1_w = ac_data_wt[np.where(ac_keys == 1)][0]
    ac_2_w = ac_data_wt[np.where(ac_keys == 2)][0]
    ac_5_w = ac_data_wt[np.where(ac_keys == 5)][0]
    # test manual metrics against all nodes
    mu_max_unique = len(landuse_classes)
    # test against various distances
    for d_idx in range(len(distances)):
        dist_cutoff = distances[d_idx]
        beta = betas[d_idx]
        for src_idx in range(len(primal_graph)):
            reachable_data, reachable_data_dist, tree_preds = data.aggregate_to_src_idx(src_idx,
                                                                                        node_data,
                                                                                        edge_data,
                                                                                        node_edge_map,
                                                                                        data_map,
                                                                                        dist_cutoff)
            # counts of each class type (array length per max unique classes - not just those within max distance)
            cl_counts = np.full(mu_max_unique, 0)
            # nearest of each class type (likewise)
            cl_nearest = np.full(mu_max_unique, np.inf)
            # aggregate
            a_1_nw = 0
            a_2_nw = 0
            a_5_nw = 0
            a_1_w = 0
            a_2_w = 0
            a_5_w = 0
            # iterate reachable
            for data_idx, (reachable, data_dist) in enumerate(zip(reachable_data, reachable_data_dist)):
                if not reachable:
                    continue
                cl = landuse_encodings[data_idx]
                # double check distance is within threshold
                assert data_dist <= dist_cutoff
                # update the class counts
                cl_counts[cl] += 1
                # if distance is nearer, update the nearest distance array too
                if data_dist < cl_nearest[cl]:
                    cl_nearest[cl] = data_dist
                # aggregate accessibility codes
                if cl == 1:
                    a_1_nw += 1
                    a_1_w += np.exp(-beta * data_dist)
                elif cl == 2:
                    a_2_nw += 1
                    a_2_w += np.exp(-beta * data_dist)
                elif cl == 5:
                    a_5_nw += 1
                    a_5_w += np.exp(-beta * data_dist)
            # assertions
            assert ac_1_nw[d_idx, src_idx] == a_1_nw
            assert ac_2_nw[d_idx, src_idx] == a_2_nw
            assert ac_5_nw[d_idx, src_idx] == a_5_nw

            assert ac_1_w[d_idx, src_idx] == a_1_w
            assert ac_2_w[d_idx, src_idx] == a_2_w
            assert ac_5_w[d_idx, src_idx] == a_5_w

            assert hill[0, d_idx, src_idx] == diversity.hill_diversity(cl_counts, 0)
            assert hill[1, d_idx, src_idx] == diversity.hill_diversity(cl_counts, 1)
            assert hill[2, d_idx, src_idx] == diversity.hill_diversity(cl_counts, 2)

            assert hill_branch_wt[0, d_idx, src_idx] == \
                   diversity.hill_diversity_branch_distance_wt(cl_counts, cl_nearest, 0, beta)
            assert hill_branch_wt[1, d_idx, src_idx] == \
                   diversity.hill_diversity_branch_distance_wt(cl_counts, cl_nearest, 1, beta)
            assert hill_branch_wt[2, d_idx, src_idx] == \
                   diversity.hill_diversity_branch_distance_wt(cl_counts, cl_nearest, 2, beta)

            assert hill_pw_wt[0, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_distance_wt(cl_counts, cl_nearest, 0, beta)
            assert hill_pw_wt[1, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_distance_wt(cl_counts, cl_nearest, 1, beta)
            assert hill_pw_wt[2, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_distance_wt(cl_counts, cl_nearest, 2, beta)

            assert hill_disp_wt[0, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_matrix_wt(cl_counts, mock_matrix, 0)
            assert hill_disp_wt[1, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_matrix_wt(cl_counts, mock_matrix, 1)
            assert hill_disp_wt[2, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_matrix_wt(cl_counts, mock_matrix, 2)

            assert shannon[d_idx, src_idx] == diversity.shannon_diversity(cl_counts)
            assert gini[d_idx, src_idx] == diversity.gini_simpson_diversity(cl_counts)
            assert raos[d_idx, src_idx] == diversity.raos_quadratic_diversity(cl_counts, mock_matrix)

    # check that angular is passed-through
    # actual angular tests happen in test_shortest_path_tree()
    # here the emphasis is simply on checking that the angular instruction gets chained through

    # setup dual data
    G_dual = graphs.nX_to_dual(primal_graph)
    node_labels_dual, node_data_dual, edge_data_dual, node_edge_map_dual = graphs.graph_maps_from_nX(G_dual)
    data_dict_dual = mock.mock_data_dict(G_dual, random_seed=13)
    data_uids_dual, data_map_dual = layers.data_map_from_dict(data_dict_dual)
    data_map_dual = data.assign_to_network(data_map_dual, node_data_dual, edge_data_dual, node_edge_map_dual, 500)
    mock_categorical = mock.mock_categorical_data(len(data_map_dual))
    landuse_classes_dual, landuse_encodings_dual = layers.encode_categorical(mock_categorical)
    mock_matrix = np.full((len(landuse_classes_dual), len(landuse_classes_dual)), 1)

    mu_hill_dual, mu_other_dual, ac_dual, ac_wt_dual = data.aggregate_landuses(node_data_dual,
                                                                               edge_data_dual,
                                                                               node_edge_map_dual,
                                                                               data_map_dual,
                                                                               distances,
                                                                               betas,
                                                                               landuse_encodings_dual,
                                                                               qs=qs,
                                                                               mixed_use_hill_keys=hill_keys,
                                                                               mixed_use_other_keys=non_hill_keys,
                                                                               accessibility_keys=ac_keys,
                                                                               cl_disparity_wt_matrix=mock_matrix,
                                                                               angular=True)

    mu_hill_dual_sidestep, mu_other_dual_sidestep, ac_dual_sidestep, ac_wt_dual_sidestep = \
        data.aggregate_landuses(node_data_dual,
                                edge_data_dual,
                                node_edge_map_dual,
                                data_map_dual,
                                distances,
                                betas,
                                landuse_encodings_dual,
                                qs=qs,
                                mixed_use_hill_keys=hill_keys,
                                mixed_use_other_keys=non_hill_keys,
                                accessibility_keys=ac_keys,
                                cl_disparity_wt_matrix=mock_matrix,
                                angular=False)

    assert not np.allclose(mu_hill_dual, mu_hill_dual_sidestep, atol=0.001, rtol=0)
    assert not np.allclose(mu_other_dual, mu_other_dual_sidestep, atol=0.001, rtol=0)
    assert not np.allclose(ac_dual, ac_dual_sidestep, atol=0.001, rtol=0)
    assert not np.allclose(ac_wt_dual, ac_wt_dual_sidestep, atol=0.001, rtol=0)
Esempio n. 9
0
def test_assign_to_network(primal_graph):
    # create additional dead-end scenario
    primal_graph.remove_edge(14, 15)
    primal_graph.remove_edge(15, 28)
    # G = graphs.nX_auto_edge_params(G)
    G = graphs.nX_decompose(primal_graph, 50)
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G)
    # generate data
    data_dict = mock.mock_data_dict(G, random_seed=25)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    # override data point locations for test cases vis-a-vis isolated nodes and isolated edges
    data_map[18, :2] = [701200, 5719400]
    data_map[39, :2] = [700750, 5720025]
    data_map[26, :2] = [700400, 5719525]
    # 500m visually confirmed in plots
    data_map_1600 = data_map.copy()
    data_map_1600 = data.assign_to_network(data_map_1600,
                                           node_data,
                                           edge_data,
                                           node_edge_map,
                                           max_dist=1600)
    targets = np.array([
        [0, 164, 163],
        [1, 42, 241],
        [2, 236, 235],
        [3, 48, 262],
        [4, 211, 212],
        [5, 236, 235],
        [6, 58, 57],
        [7, 72, 5],
        [8, 75, 76],
        [9, 92, 9],
        [10, 61, 62],
        [11, 96, 13],
        [12, 0, 59],
        [13, 98, 99],
        [14, 203, 202],
        [15, 121, 120],
        [16, 48, 262],
        [17, 2, 70],
        [18, 182, 183],
        [19, 158, 157],
        [20, 83, 84],
        [21, 2, np.nan],
        [22, 171, 170],
        [23, 266, 52],
        [24, 83, 84],
        [25, 88, 11],
        [26, 49, np.nan],
        [27, 19, 138],
        [28, 134, 135],
        [29, 262, 46],
        [30, 78, 9],
        [31, 188, 189],
        [32, 180, 181],
        [33, 95, 94],
        [34, 226, 225],
        [35, 110, 111],
        [36, 39, 228],
        [37, 158, 25],
        [38, 88, 87],
        [39, 263, np.nan],
        [40, 120, 121],
        [41, 146, 21],
        [42, 10, 97],
        [43, 119, 118],
        [44, 82, 5],
        [45, 11, 88],
        [46, 100, 99],
        [47, 138, 19],
        [48, 14, np.nan],
        [49, 106, 105]
    ])
    # for debugging
    # from cityseer.tools import plot
    # plot.plot_graph_maps(node_data, edge_data, data_map)
    # assignment map includes data x, data y, nearest assigned, next nearest assigned
    assert np.allclose(data_map_1600[:, 2:],
                       targets[:, 1:],
                       equal_nan=True,
                       atol=0,
                       rtol=0)
    # max distance of 0 should return all NaN
    data_map_test_0 = data_map.copy()
    data_map_test_0 = data.assign_to_network(data_map_test_0,
                                             node_data,
                                             edge_data,
                                             node_edge_map,
                                             max_dist=0)
    assert np.all(np.isnan(data_map_test_0[:, 2]))
    assert np.all(np.isnan(data_map_test_0[:, 3]))
    # max distance of 2000 should return no NaN for nearest
    # there will be some NaN for next nearest
    data_map_test_2000 = data_map.copy()
    data_map_test_2000 = data.assign_to_network(data_map_test_2000,
                                                node_data,
                                                edge_data,
                                                node_edge_map,
                                                max_dist=2000)
    assert not np.any(np.isnan(data_map_test_2000[:, 2]))
Esempio n. 10
0
def test_aggregate_to_src_idx(primal_graph):
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    # generate data
    data_dict = mock.mock_data_dict(primal_graph, random_seed=13)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    for max_dist in [400, 750]:
        # in this case, use same assignment max dist as search max dist
        data_map_temp = data_map.copy()
        data_map_temp = data.assign_to_network(data_map_temp,
                                               node_data,
                                               edge_data,
                                               node_edge_map,
                                               max_dist=max_dist)
        for angular in [True, False]:
            for netw_src_idx in range(len(node_data)):
                # aggregate to src...
                reachable_data, reachable_data_dist, tree_preds = data.aggregate_to_src_idx(netw_src_idx,
                                                                                            node_data,
                                                                                            edge_data,
                                                                                            node_edge_map,
                                                                                            data_map_temp,
                                                                                            max_dist,
                                                                                            angular=angular)
                # for debugging
                # from cityseer.tools import plot
                # plot.plot_graph_maps(node_uids, node_data, edge_data, data_map)
                # compare to manual checks on distances:
                netw_x_arr = node_data[:, 0]
                netw_y_arr = node_data[:, 1]
                data_x_arr = data_map_temp[:, 0]
                data_y_arr = data_map_temp[:, 1]
                # get the network distances
                tree_map, tree_edges = centrality.shortest_path_tree(edge_data,
                                                                     node_edge_map,
                                                                     netw_src_idx,
                                                                     max_dist=max_dist,
                                                                     angular=angular)
                tree_dists = tree_map[:, 2]
                # verify distances vs. the max
                for d_idx in range(len(data_map_temp)):
                    # check the integrity of the distances and classes
                    reachable = reachable_data[d_idx]
                    reachable_dist = reachable_data_dist[d_idx]
                    # get the distance via the nearest assigned index
                    nearest_dist = np.inf
                    # if a nearest node has been assigned
                    if np.isfinite(data_map_temp[d_idx, 2]):
                        # get the index for the assigned network node
                        netw_idx = int(data_map_temp[d_idx, 2])
                        # if this node is within the cutoff distance:
                        if tree_dists[netw_idx] < max_dist:
                            # get the distances from the data point to the assigned network node
                            d_d = np.hypot(data_x_arr[d_idx] - netw_x_arr[netw_idx],
                                           data_y_arr[d_idx] - netw_y_arr[netw_idx])
                            # and add it to the network distance path from the source to the assigned node
                            n_d = tree_dists[netw_idx]
                            nearest_dist = d_d + n_d
                    # also get the distance via the next nearest assigned index
                    next_nearest_dist = np.inf
                    # if a nearest node has been assigned
                    if np.isfinite(data_map_temp[d_idx, 3]):
                        # get the index for the assigned network node
                        netw_idx = int(data_map_temp[d_idx, 3])
                        # if this node is within the radial cutoff distance:
                        if tree_dists[netw_idx] < max_dist:
                            # get the distances from the data point to the assigned network node
                            d_d = np.hypot(data_x_arr[d_idx] - netw_x_arr[netw_idx],
                                           data_y_arr[d_idx] - netw_y_arr[netw_idx])
                            # and add it to the network distance path from the source to the assigned node
                            n_d = tree_dists[netw_idx]
                            next_nearest_dist = d_d + n_d
                    # now check distance integrity
                    if np.isinf(reachable_dist):
                        assert not reachable
                        assert nearest_dist > max_dist and next_nearest_dist > max_dist
                    else:
                        assert reachable
                        assert reachable_dist <= max_dist
                        if nearest_dist < next_nearest_dist:
                            assert reachable_dist == nearest_dist
                        else:
                            assert reachable_dist == next_nearest_dist
Esempio n. 11
0
def test_aggregate_landuses_signatures(primal_graph):
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    # setup data
    data_dict = mock.mock_data_dict(primal_graph, random_seed=13)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    data_map = data.assign_to_network(data_map, node_data, edge_data, node_edge_map, 500)
    # set parameters
    betas = np.array([0.02, 0.01, 0.005, 0.0025])
    distances = networks.distance_from_beta(betas)
    qs = np.array([0, 1, 2])
    mock_categorical = mock.mock_categorical_data(len(data_map))
    landuse_classes, landuse_encodings = layers.encode_categorical(mock_categorical)
    # check that empty land_use encodings are caught
    with pytest.raises(ValueError):
        data.aggregate_landuses(node_data,
                                edge_data,
                                node_edge_map,
                                data_map,
                                distances,
                                betas,
                                mixed_use_hill_keys=np.array([0]))
    # check that unequal land_use encodings vs data map lengths are caught
    with pytest.raises(ValueError):
        data.aggregate_landuses(node_data,
                                edge_data,
                                node_edge_map,
                                data_map,
                                distances,
                                betas,
                                landuse_encodings=landuse_encodings[:-1],
                                mixed_use_other_keys=np.array([0]))
    # check that no provided metrics flags
    with pytest.raises(ValueError):
        data.aggregate_landuses(node_data,
                                edge_data,
                                node_edge_map,
                                data_map,
                                distances,
                                betas,
                                landuse_encodings=landuse_encodings)
    # check that missing qs flags
    with pytest.raises(ValueError):
        data.aggregate_landuses(node_data,
                                edge_data,
                                node_edge_map,
                                data_map,
                                distances,
                                betas,
                                mixed_use_hill_keys=np.array([0]),
                                landuse_encodings=landuse_encodings)
    # check that problematic mixed use and accessibility keys are caught
    for mu_h_key, mu_o_key, ac_key in [
        # negatives
        ([-1], [1], [1]),
        ([1], [-1], [1]),
        ([1], [1], [-1]),
        # out of range
        ([4], [1], [1]),
        ([1], [3], [1]),
        ([1], [1], [max(landuse_encodings) + 1]),
        # duplicates
        ([1, 1], [1], [1]),
        ([1], [1, 1], [1]),
        ([1], [1], [1, 1])]:
        with pytest.raises(ValueError):
            data.aggregate_landuses(node_data,
                                    edge_data,
                                    node_edge_map,
                                    data_map,
                                    distances,
                                    betas,
                                    landuse_encodings,
                                    qs=qs,
                                    mixed_use_hill_keys=np.array(mu_h_key),
                                    mixed_use_other_keys=np.array(mu_o_key),
                                    accessibility_keys=np.array(ac_key))
    for h_key, o_key in (([3], []), ([], [2])):
        # check that missing matrix is caught for disparity weighted indices
        with pytest.raises(ValueError):
            data.aggregate_landuses(node_data,
                                    edge_data,
                                    node_edge_map,
                                    data_map,
                                    distances,
                                    betas,
                                    landuse_encodings=landuse_encodings,
                                    qs=qs,
                                    mixed_use_hill_keys=np.array(h_key),
                                    mixed_use_other_keys=np.array(o_key))
        # check that non-square disparity matrix is caught
        mock_matrix = np.full((len(landuse_classes), len(landuse_classes)), 1)
        with pytest.raises(ValueError):
            data.aggregate_landuses(node_data,
                                    edge_data,
                                    node_edge_map,
                                    data_map,
                                    distances,
                                    betas,
                                    landuse_encodings=landuse_encodings,
                                    qs=qs,
                                    mixed_use_hill_keys=np.array(h_key),
                                    mixed_use_other_keys=np.array(o_key),
                                    cl_disparity_wt_matrix=mock_matrix[:-1])
def test_local_centrality_time(primal_graph):
    """
    Keep in mind there are several extraneous variables:
    e.g. may be fairly dramatic differences in timing on larger graphs and larger search distances

    originally based on node_harmonic and node_betweenness:
    OLD VERSION with trim maps:
    Timing: 10.490865555 for 10000 iterations
    version with numba typed list - faster and removes arcane full vs. trim maps workflow
    8.24 for 10000 iterations
    version with node_edge_map Dict - tad slower but worthwhile for cleaner and more intuitive code
    8.88 for 10000 iterations
    version with shortest path tree algo simplified to nodes and non-angular only
    8.19 for 10000 iterations

    if reducing floating precision
    float64 - 17.881911942000002
    float32 - 13.612861239

    notes:
    - Segments of unreachable code used to add to timing: this seems to have been fixed in more recent versions of numba
    - Separating the logic into functions results in ever so slightly slower times...
      though this may be due to function setup at invocation (x10000) which wouldn't be incurred in real scenarios...?
    - Tests on using a List(Dict('x', 'y', etc.) structure proved almost four times slower, so sticking with arrays
    - Experiments with golang proved too complex re: bindings...
    """

    if 'GITHUB_ACTIONS' in os.environ:
        return
    os.environ['CITYSEER_QUIET_MODE'] = '1'
    # load the test graph
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(
        primal_graph)
    # needs a large enough beta so that distance thresholds aren't encountered
    distances = np.array([np.inf])
    betas = networks.beta_from_distance(distances)

    def node_cent_wrapper():
        centrality.local_node_centrality(node_data,
                                         edge_data,
                                         node_edge_map,
                                         distances,
                                         betas,
                                         ('node_harmonic', 'node_betweenness'),
                                         angular=False,
                                         progress_proxy=None)

    # prime the function
    node_cent_wrapper()
    iters = 20000
    # time and report - roughly 6.37s on 4.2GHz i7
    func_time = timeit.timeit(node_cent_wrapper, number=iters)
    print(f'node_cent_wrapper: {func_time} for {iters} iterations')
    assert func_time < 10

    def segment_cent_wrapper():
        centrality.local_segment_centrality(
            node_data,
            edge_data,
            node_edge_map,
            distances,
            betas, ('segment_harmonic', 'segment_betweenness'),
            angular=False,
            progress_proxy=None)

    # prime the function
    segment_cent_wrapper()
    iters = 20000
    # time and report - roughly 9.36s on 4.2GHz i7
    func_time = timeit.timeit(segment_cent_wrapper, number=iters)
    print(f'segment_cent_wrapper: {func_time} for {iters} iterations')
    assert func_time < 13
def test_decomposed_local_centrality(primal_graph):
    # centralities on the original nodes within the decomposed network should equal non-decomposed workflow
    betas = np.array([0.02, 0.01, 0.005, 0.0008, 0.0])
    distances = networks.distance_from_beta(betas)
    node_measure_keys = ('node_density', 'node_farness', 'node_cycles',
                         'node_harmonic', 'node_beta', 'node_betweenness',
                         'node_betweenness_beta')
    segment_measure_keys = ('segment_density', 'segment_harmonic',
                            'segment_beta', 'segment_betweenness')
    # test a decomposed graph
    G_decomposed = graphs.nX_decompose(primal_graph, 20)
    # graph maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(
        primal_graph)  # generate node and edge maps
    node_uids_decomp, node_data_decomp, edge_data_decomp, node_edge_map_decomp = graphs.graph_maps_from_nX(
        G_decomposed)
    # non-decomposed case
    node_measures_data = centrality.local_node_centrality(node_data,
                                                          edge_data,
                                                          node_edge_map,
                                                          distances,
                                                          betas,
                                                          node_measure_keys,
                                                          angular=False)
    # decomposed case
    node_measures_data_decomposed = centrality.local_node_centrality(
        node_data_decomp,
        edge_data_decomp,
        node_edge_map_decomp,
        distances,
        betas,
        node_measure_keys,
        angular=False)
    # node
    d_range = len(distances)
    m_range = len(node_measure_keys)
    assert node_measures_data.shape == (m_range, d_range, len(primal_graph))
    assert node_measures_data_decomposed.shape == (m_range, d_range,
                                                   len(G_decomposed))
    # with increasing decomposition:
    # - node based measures will not match
    # - closeness segment measures will match - these measure to the cut endpoints per thresholds
    # - betweenness segment measures won't match - don't measure to cut endpoints
    # segment versions
    segment_measures_data = centrality.local_segment_centrality(
        node_data,
        edge_data,
        node_edge_map,
        distances,
        betas,
        segment_measure_keys,
        angular=False)
    segment_measures_data_decomposed = centrality.local_segment_centrality(
        node_data_decomp,
        edge_data_decomp,
        node_edge_map_decomp,
        distances,
        betas,
        segment_measure_keys,
        angular=False)
    m_range = len(segment_measure_keys)
    assert segment_measures_data.shape == (m_range, d_range, len(primal_graph))
    assert segment_measures_data_decomposed.shape == (m_range, d_range,
                                                      len(G_decomposed))
    for m_idx in range(m_range):
        for d_idx in range(d_range):
            match = np.allclose(
                segment_measures_data[m_idx][d_idx],
                # compare against the original 56 elements (prior to adding decomposed)
                segment_measures_data_decomposed[m_idx][d_idx][:57],
                atol=0.1,
                rtol=0)  # relax precision
            if m_range in (0, 1, 2):
                assert match
def test_local_centrality(diamond_graph):
    """
    manual checks for all methods against diamond graph
    measures_data is multidimensional in the form of measure_keys x distances x nodes
    """
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(
        diamond_graph)
    # generate dual
    diamond_graph_dual = graphs.nX_to_dual(diamond_graph)
    node_uids_dual, node_data_dual, edge_data_dual, node_edge_map_dual = graphs.graph_maps_from_nX(
        diamond_graph_dual)
    # setup distances and betas
    distances = np.array([50, 150, 250])
    betas = networks.beta_from_distance(distances)

    # NODE SHORTEST
    # set the keys - add shuffling to be sure various orders work
    node_keys = [
        'node_density', 'node_farness', 'node_cycles', 'node_harmonic',
        'node_beta', 'node_betweenness', 'node_betweenness_beta'
    ]
    np.random.shuffle(node_keys)  # in place
    measure_keys = tuple(node_keys)
    measures_data = centrality.local_node_centrality(node_data, edge_data,
                                                     node_edge_map, distances,
                                                     betas, measure_keys)
    # node density
    # additive nodes
    m_idx = node_keys.index('node_density')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [2, 3, 3, 2],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [3, 3, 3, 3],
                       atol=0.001,
                       rtol=0)
    # node farness
    # additive distances
    m_idx = node_keys.index('node_farness')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [200, 300, 300, 200],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [400, 300, 300, 400],
                       atol=0.001,
                       rtol=0)
    # node cycles
    # additive cycles
    m_idx = node_keys.index('node_cycles')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [1, 2, 2, 1],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [2, 2, 2, 2],
                       atol=0.001,
                       rtol=0)
    # node harmonic
    # additive 1 / distances
    m_idx = node_keys.index('node_harmonic')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0.02, 0.03, 0.03, 0.02],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [0.025, 0.03, 0.03, 0.025],
                       atol=0.001,
                       rtol=0)
    # node beta
    # additive exp(-beta * dist)
    m_idx = node_keys.index('node_beta')
    # beta = 0.0
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    # beta = 0.02666667
    assert np.allclose(measures_data[m_idx][1],
                       [0.1389669, 0.20845035, 0.20845035, 0.1389669],
                       atol=0.001,
                       rtol=0)
    # beta = 0.016
    assert np.allclose(measures_data[m_idx][2],
                       [0.44455525, 0.6056895, 0.6056895, 0.44455522],
                       atol=0.001,
                       rtol=0)
    # node betweenness
    # additive 1 per node en route
    m_idx = node_keys.index('node_betweenness')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    # takes first out of multiple equidistant routes
    assert np.allclose(measures_data[m_idx][2], [0, 1, 0, 0],
                       atol=0.001,
                       rtol=0)
    # node betweenness beta
    # additive exp(-beta * dist) en route
    m_idx = node_keys.index('node_betweenness_beta')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)  # beta = 0.08
    assert np.allclose(measures_data[m_idx][1], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)  # beta = 0.02666667
    # takes first out of multiple equidistant routes
    # beta evaluated over 200m distance from 3 to 0 via node 1
    assert np.allclose(measures_data[m_idx][2],
                       [0, 0.0407622, 0, 0])  # beta = 0.016

    # NODE SIMPLEST
    node_keys_angular = ['node_harmonic_angular', 'node_betweenness_angular']
    np.random.shuffle(node_keys_angular)  # in place
    measure_keys = tuple(node_keys_angular)
    measures_data = centrality.local_node_centrality(node_data,
                                                     edge_data,
                                                     node_edge_map,
                                                     distances,
                                                     betas,
                                                     measure_keys,
                                                     angular=True)
    # node harmonic angular
    # additive 1 / (1 + (to_imp / 180))
    m_idx = node_keys_angular.index('node_harmonic_angular')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [2, 3, 3, 2],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [2.75, 3, 3, 2.75],
                       atol=0.001,
                       rtol=0)
    # node betweenness angular
    # additive 1 per node en simplest route
    m_idx = node_keys_angular.index('node_betweenness_angular')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [0, 1, 0, 0],
                       atol=0.001,
                       rtol=0)

    # NODE SIMPLEST ON DUAL
    node_keys_angular = ['node_harmonic_angular', 'node_betweenness_angular']
    np.random.shuffle(node_keys_angular)  # in place
    measure_keys = tuple(node_keys_angular)
    measures_data = centrality.local_node_centrality(node_data_dual,
                                                     edge_data_dual,
                                                     node_edge_map_dual,
                                                     distances,
                                                     betas,
                                                     measure_keys,
                                                     angular=True)
    # node_uids_dual = ('0_1', '0_2', '1_2', '1_3', '2_3')
    # node harmonic angular
    # additive 1 / (1 + (to_imp / 180))
    m_idx = node_keys_angular.index('node_harmonic_angular')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [1.95, 1.95, 2.4, 1.95, 1.95],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [2.45, 2.45, 2.4, 2.45, 2.45],
                       atol=0.001,
                       rtol=0)
    # node betweenness angular
    # additive 1 per node en simplest route
    m_idx = node_keys_angular.index('node_betweenness_angular')
    assert np.allclose(measures_data[m_idx][0], [0, 0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0, 0, 0, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [0, 0, 0, 1, 1],
                       atol=0.001,
                       rtol=0)

    # SEGMENT SHORTEST
    segment_keys = [
        'segment_density', 'segment_harmonic', 'segment_beta',
        'segment_betweenness'
    ]
    np.random.shuffle(segment_keys)  # in place
    measure_keys = tuple(segment_keys)
    measures_data = centrality.local_segment_centrality(node_data,
                                                        edge_data,
                                                        node_edge_map,
                                                        distances,
                                                        betas,
                                                        measure_keys,
                                                        angular=False)
    # segment density
    # additive segment lengths
    m_idx = segment_keys.index('segment_density')
    assert np.allclose(measures_data[m_idx][0], [100, 150, 150, 100],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [400, 500, 500, 400],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [500, 500, 500, 500],
                       atol=0.001,
                       rtol=0)
    # segment harmonic
    # segments are potentially approached from two directions
    # i.e. along respective shortest paths to intersection of shortest routes
    # i.e. in this case, the midpoint of the middle segment is apportioned in either direction
    # additive log(b) - log(a) + log(d) - log(c)
    # nearer distance capped at 1m to avert negative numbers
    m_idx = segment_keys.index('segment_harmonic')
    assert np.allclose(measures_data[m_idx][0],
                       [7.824046, 11.736069, 11.736069, 7.824046],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1],
                       [10.832201, 15.437371, 15.437371, 10.832201],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2],
                       [11.407564, 15.437371, 15.437371, 11.407565],
                       atol=0.001,
                       rtol=0)
    # segment beta
    # additive (np.exp(-beta * b) - np.exp(-beta * a)) / -beta + (np.exp(-beta * d) - np.exp(-beta * c)) / -beta
    # beta = 0 resolves to b - a and avoids division through zero
    m_idx = segment_keys.index('segment_beta')
    assert np.allclose(measures_data[m_idx][0],
                       [24.542109, 36.813164, 36.813164, 24.542109],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1],
                       [77.46391, 112.358284, 112.358284, 77.46391],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2],
                       [133.80205, 177.43903, 177.43904, 133.80205],
                       atol=0.001,
                       rtol=0)
    # segment betweenness
    # similar formulation to segment beta: start and end segment of each betweenness pair assigned to intervening nodes
    # distance thresholds are computed using the inside edges of the segments
    # so if the segments are touching, they will count up to the threshold distance...
    m_idx = segment_keys.index('segment_betweenness')
    assert np.allclose(measures_data[m_idx][0], [0, 24.542109, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0, 69.78874, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [0, 99.76293, 0, 0],
                       atol=0.001,
                       rtol=0)

    # SEGMENT SIMPLEST ON PRIMAL!!! ( NO DOUBLE COUNTING )
    segment_keys_angular = [
        'segment_harmonic_hybrid', 'segment_betweeness_hybrid'
    ]
    np.random.shuffle(segment_keys_angular)  # in place
    measure_keys = tuple(segment_keys_angular)
    measures_data = centrality.local_segment_centrality(node_data,
                                                        edge_data,
                                                        node_edge_map,
                                                        distances,
                                                        betas,
                                                        measure_keys,
                                                        angular=True)
    # segment density
    # additive segment lengths divided through angular impedance
    # (f - e) / (1 + (ang / 180))
    m_idx = segment_keys_angular.index('segment_harmonic_hybrid')
    assert np.allclose(measures_data[m_idx][0], [100, 150, 150, 100],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [305, 360, 360, 305],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [410, 420, 420, 410],
                       atol=0.001,
                       rtol=0)
    # segment harmonic
    # additive segment lengths / (1 + (ang / 180))
    m_idx = segment_keys_angular.index('segment_betweeness_hybrid')
    assert np.allclose(measures_data[m_idx][0], [0, 75, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][1], [0, 150, 0, 0],
                       atol=0.001,
                       rtol=0)
    assert np.allclose(measures_data[m_idx][2], [0, 150, 0, 0],
                       atol=0.001,
                       rtol=0)
def test_shortest_path_tree(primal_graph, dual_graph):
    node_uids_p, node_data_p, edge_data_p, node_edge_map_p = graphs.graph_maps_from_nX(
        primal_graph)
    # prepare round-trip graph for checks
    G_round_trip = graphs.nX_from_graph_maps(node_uids_p, node_data_p,
                                             edge_data_p, node_edge_map_p)
    # prepare dual graph
    node_uids_d, node_data_d, edge_data_d, node_edge_map_d = graphs.graph_maps_from_nX(
        dual_graph)
    assert len(node_uids_d) > len(node_uids_p)
    # test all shortest paths against networkX version of dijkstra
    for max_dist in [0, 500, 2000, np.inf]:
        for src_idx in range(len(primal_graph)):
            # check shortest path maps
            tree_map, tree_edges = centrality.shortest_path_tree(
                edge_data_p,
                node_edge_map_p,
                src_idx,
                max_dist=max_dist,
                angular=False)
            tree_preds_p = tree_map[:, 1]
            tree_short_dists_p = tree_map[:, 2]
            # compare against networkx dijkstra
            nx_dist, nx_path = nx.single_source_dijkstra(G_round_trip,
                                                         src_idx,
                                                         weight='length',
                                                         cutoff=max_dist)
            for j in range(len(primal_graph)):
                if j in nx_path:
                    assert find_path(j, src_idx, tree_preds_p) == nx_path[j]
                    assert np.allclose(tree_short_dists_p[j],
                                       nx_dist[j],
                                       atol=0.001,
                                       rtol=0)
    # compare angular simplest paths for a selection of targets on primal vs. dual
    # remember, this is angular change not distance travelled
    # can be compared from primal to dual in this instance because edge segments are straight
    # i.e. same amount of angular change whether primal or dual graph
    # plot.plot_nX_primal_or_dual(primal=primal_graph, dual=dual_graph, labels=True, node_size=80)
    p_source_idx = node_uids_p.index(0)
    primal_targets = (15, 20, 37)
    dual_sources = ('0_1', '0_16', '0_31')
    dual_targets = ('13_15', '17_20', '36_37')
    for p_target, d_source, d_target in zip(primal_targets, dual_sources,
                                            dual_targets):
        p_target_idx = node_uids_p.index(p_target)
        d_source_idx = node_uids_d.index(
            d_source)  # dual source index changes depending on direction
        d_target_idx = node_uids_d.index(d_target)
        tree_map_p, tree_edges_p = centrality.shortest_path_tree(
            edge_data_p,
            node_edge_map_p,
            p_source_idx,
            max_dist=max_dist,
            angular=True)
        tree_simpl_dists_p = tree_map_p[:, 3]
        tree_map_d, tree_edges_d = centrality.shortest_path_tree(
            edge_data_d,
            node_edge_map_d,
            d_source_idx,
            max_dist=max_dist,
            angular=True)
        tree_simpl_dists_d = tree_map_d[:, 3]
        assert np.allclose(tree_simpl_dists_p[p_target_idx],
                           tree_simpl_dists_d[d_target_idx],
                           atol=0.001,
                           rtol=0)
    # angular impedance should take a simpler but longer path - test basic case on dual
    # source and target are the same for either
    src_idx = node_uids_d.index('11_6')
    target = node_uids_d.index('39_40')
    # SIMPLEST PATH: get simplest path tree using angular impedance
    tree_map, tree_edges = centrality.shortest_path_tree(
        edge_data_d, node_edge_map_d, src_idx, max_dist=np.inf,
        angular=True)  # ANGULAR = TRUE
    # find path
    tree_preds = tree_map[:, 1]
    path = find_path(target, src_idx, tree_preds)
    path_transpose = [node_uids_d[n] for n in path]
    # takes 1597m route via long outside segment
    # tree_dists[int(full_to_trim_idx_map[node_labels.index('39_40')])]
    assert path_transpose == [
        '11_6', '11_14', '10_14', '10_43', '43_44', '40_44', '39_40'
    ]
    # SHORTEST PATH:
    # get shortest path tree using non angular impedance
    tree_map, tree_edges = centrality.shortest_path_tree(
        edge_data_d, node_edge_map_d, src_idx, max_dist=np.inf,
        angular=False)  # ANGULAR = FALSE
    # find path
    tree_preds = tree_map[:, 1]
    path = find_path(target, src_idx, tree_preds)
    path_transpose = [node_uids_d[n] for n in path]
    # takes 1345m shorter route
    # tree_dists[int(full_to_trim_idx_map[node_labels.index('39_40')])]
    assert path_transpose == [
        '11_6', '6_7', '3_7', '3_4', '1_4', '0_1', '0_31', '31_32', '32_34',
        '34_37', '37_39', '39_40'
    ]
    # NO SIDESTEPS - explicit check that sidesteps are prevented
    src_idx = node_uids_d.index('10_43')
    target = node_uids_d.index('10_5')
    tree_map, tree_edges = centrality.shortest_path_tree(edge_data_d,
                                                         node_edge_map_d,
                                                         src_idx,
                                                         max_dist=np.inf,
                                                         angular=True)
    # find path
    tree_preds = tree_map[:, 1]
    path = find_path(target, src_idx, tree_preds)
    path_transpose = [node_uids_d[n] for n in path]
    # print(path_transpose)
    assert path_transpose == ['10_43', '10_5']
    # WITH SIDESTEPS - set angular flag to False
    # manually overwrite distance impedances with angular for this test
    # (angular has to be false otherwise shortest-path sidestepping avoided)
    edge_data_d_temp = edge_data_d.copy()
    # angular impedances at index 3 copied to distance impedances at distance 2
    edge_data_d_temp[:, 2] = edge_data_d_temp[:, 3]
    tree_map, tree_edges = centrality.shortest_path_tree(edge_data_d_temp,
                                                         node_edge_map_d,
                                                         src_idx,
                                                         max_dist=np.inf,
                                                         angular=False)
    # find path
    tree_preds = tree_map[:, 1]
    path = find_path(target, src_idx, tree_preds)
    path_transpose = [node_uids_d[n] for n in path]
    assert path_transpose == ['10_43', '10_14', '10_5']
def test_local_node_centrality(primal_graph):
    """
    Also tested indirectly via test_networks.test_compute_centrality

    Test centrality methods where possible against NetworkX - i.e. harmonic closeness and betweenness
    Note that NetworkX improved closeness is not the same as derivation used in this package
    NetworkX doesn't have a maximum distance cutoff, so run on the whole graph (low beta / high distance)
    """
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(
        primal_graph)
    G_round_trip = graphs.nX_from_graph_maps(node_uids, node_data, edge_data,
                                             node_edge_map)
    # needs a large enough beta so that distance thresholds aren't encountered
    betas = np.array([0.02, 0.01, 0.005, 0.0008, 0.0])
    distances = networks.distance_from_beta(betas)
    # set the keys - add shuffling to be sure various orders work
    measure_keys = [
        'node_density', 'node_farness', 'node_cycles', 'node_harmonic',
        'node_beta', 'node_betweenness', 'node_betweenness_beta'
    ]
    np.random.shuffle(measure_keys)  # in place
    measure_keys = tuple(measure_keys)
    # generate the measures
    measures_data = centrality.local_node_centrality(node_data, edge_data,
                                                     node_edge_map, distances,
                                                     betas, measure_keys)
    node_density = measures_data[measure_keys.index('node_density')]
    node_farness = measures_data[measure_keys.index('node_farness')]
    node_cycles = measures_data[measure_keys.index('node_cycles')]
    node_harmonic = measures_data[measure_keys.index('node_harmonic')]
    node_beta = measures_data[measure_keys.index('node_beta')]
    node_betweenness = measures_data[measure_keys.index('node_betweenness')]
    node_betweenness_beta = measures_data[measure_keys.index(
        'node_betweenness_beta')]
    # improved closeness is derived after the fact
    improved_closness = node_density / node_farness / node_density

    # test node density
    # node density count doesn't include self-node
    # connected component == 49 == len(G) - 1
    # isolated looping component == 3
    # isolated edge == 1
    # isolated node == 0
    for n in node_density[4]:  # infinite distance - exceeds cutoff clashes
        assert n in [49, 3, 1, 0]

    # test harmonic closeness vs NetworkX
    nx_harm_cl = nx.harmonic_centrality(G_round_trip, distance='length')
    nx_harm_cl = np.array([v for v in nx_harm_cl.values()])
    assert np.allclose(nx_harm_cl, node_harmonic[4], atol=0.001, rtol=0)

    # test betweenness vs NetworkX
    # set endpoint counting to false and do not normalise
    # nx node centrality NOT implemented for MultiGraph
    G_non_multi = nx.Graph()  # don't change to MultiGraph!!!
    G_non_multi.add_nodes_from(G_round_trip.nodes())
    for s, e, k, d in G_round_trip.edges(keys=True, data=True):
        assert k == 0
        G_non_multi.add_edge(s, e, **d)
    nx_betw = nx.betweenness_centrality(G_non_multi,
                                        weight='length',
                                        endpoints=False,
                                        normalized=False)
    nx_betw = np.array([v for v in nx_betw.values()])
    # nx betweenness gives 0.5 instead of 1 for all disconnected looping component nodes
    # nx presumably takes equidistant routes into account, in which case only the fraction is aggregated
    assert np.allclose(nx_betw[:52],
                       node_betweenness[4][:52],
                       atol=0.001,
                       rtol=0)

    # do the comparisons array-wise so that betweenness can be aggregated
    d_n = len(distances)
    betw = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    betw_wt = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    dens = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    far_short_dist = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    far_simpl_dist = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    harmonic_cl = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    grav = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    cyc = np.full((d_n, primal_graph.number_of_nodes()), 0.0)

    for src_idx in range(len(primal_graph)):
        # get shortest path maps
        tree_map, tree_edges = centrality.shortest_path_tree(edge_data,
                                                             node_edge_map,
                                                             src_idx,
                                                             max(distances),
                                                             angular=False)
        tree_nodes = np.where(tree_map[:, 0])[0]
        tree_preds = tree_map[:, 1]
        tree_short_dist = tree_map[:, 2]
        tree_simpl_dist = tree_map[:, 3]
        tree_cycles = tree_map[:, 4]
        for to_idx in tree_nodes:
            # skip self nodes
            if to_idx == src_idx:
                continue
            # get shortest / simplest distances
            to_short_dist = tree_short_dist[to_idx]
            to_simpl_dist = tree_simpl_dist[to_idx]
            cycles = tree_cycles[to_idx]
            # continue if exceeds max
            if np.isinf(to_short_dist):
                continue
            for d_idx in range(len(distances)):
                dist_cutoff = distances[d_idx]
                beta = betas[d_idx]
                if to_short_dist <= dist_cutoff:
                    # don't exceed threshold
                    # if to_dist <= dist_cutoff:
                    # aggregate values
                    dens[d_idx][src_idx] += 1
                    far_short_dist[d_idx][src_idx] += to_short_dist
                    far_simpl_dist[d_idx][src_idx] += to_simpl_dist
                    harmonic_cl[d_idx][src_idx] += 1 / to_short_dist
                    grav[d_idx][src_idx] += np.exp(-beta * to_short_dist)
                    # cycles
                    cyc[d_idx][src_idx] += cycles
                    # only process betweenness in one direction
                    if to_idx < src_idx:
                        continue
                    # betweenness - only counting truly between vertices, not starting and ending verts
                    inter_idx = tree_preds[to_idx]
                    # isolated nodes will have no predecessors
                    if np.isnan(inter_idx):
                        continue
                    inter_idx = np.int(inter_idx)
                    while True:
                        # break out of while loop if the intermediary has reached the source node
                        if inter_idx == src_idx:
                            break
                        betw[d_idx][inter_idx] += 1
                        betw_wt[d_idx][inter_idx] += np.exp(-beta *
                                                            to_short_dist)
                        # follow
                        inter_idx = np.int(tree_preds[inter_idx])
    improved_cl = dens / far_short_dist / dens

    assert np.allclose(node_density, dens, atol=0.001, rtol=0)
    assert np.allclose(node_farness, far_short_dist, atol=0.01,
                       rtol=0)  # relax precision
    assert np.allclose(node_cycles, cyc, atol=0.001, rtol=0)
    assert np.allclose(node_harmonic, harmonic_cl, atol=0.001, rtol=0)
    assert np.allclose(node_beta, grav, atol=0.001, rtol=0)
    assert np.allclose(improved_closness,
                       improved_cl,
                       equal_nan=True,
                       atol=0.001,
                       rtol=0)
    assert np.allclose(node_betweenness, betw, atol=0.001, rtol=0)
    assert np.allclose(node_betweenness_beta, betw_wt, atol=0.001, rtol=0)

    # catch typos
    with pytest.raises(ValueError):
        centrality.local_node_centrality(node_data, edge_data, node_edge_map,
                                         distances, betas, ('typo_key', ))