Exemple #1
0
def test_check_distances_and_betas():
    betas = np.array([-0.02, -0.01, -0.005, -0.0025, -0.0])
    distances = np.array(networks.distance_from_beta(betas))

    # zero length arrays
    with pytest.raises(ValueError):
        checks.check_distances_and_betas(np.array([]), betas)
    with pytest.raises(ValueError):
        checks.check_distances_and_betas(distances, np.array([]))
    # mismatching array lengths
    with pytest.raises(ValueError):
        checks.check_distances_and_betas(np.array(distances[:-1]), betas)
    with pytest.raises(ValueError):
        checks.check_distances_and_betas(distances, betas[:-1])
    # check that duplicates are caught
    dup_betas = np.array([-0.02, -0.02])
    dup_distances = np.array(networks.distance_from_beta(dup_betas))
    with pytest.raises(ValueError):
        checks.check_distances_and_betas(dup_distances, dup_betas)
    # positive values of beta
    betas_pos = betas.copy()
    betas_pos[0] = 4
    with pytest.raises(ValueError):
        checks.check_distances_and_betas(distances, betas_pos)
    # negative values of distance
    distances_neg = distances.copy()
    distances_neg[0] = -100
    with pytest.raises(ValueError):
        checks.check_distances_and_betas(distances_neg, betas)
    # inconsistent distances <-> betas
    betas[1] = -0.03
    with pytest.raises(ValueError):
        checks.check_distances_and_betas(distances, betas)
Exemple #2
0
def network_generator():
    for betas in [[-0.008], [-0.008, -0.002, -0.0]]:
        distances = networks.distance_from_beta(betas)
        for angular in [False, True]:
            G = mock.mock_graph()
            G = graphs.nX_simple_geoms(G)
            yield G, distances, betas, angular
Exemple #3
0
def test_beta_from_distance():
    # some basic checks
    for d, b in zip([100, 1600, np.inf], [-0.04, -0.0025, -0.0]):
        # simple straight check against corresponding distance
        assert networks.beta_from_distance(d) == np.array([b])
        # circular check
        assert networks.distance_from_beta(networks.beta_from_distance(d)) == d
        # array form check
        assert networks.beta_from_distance(np.array([d])) == np.array([b])
    # check that custom min_threshold_wt works
    arr = networks.beta_from_distance(172.69388197455342,
                                      min_threshold_wt=0.001)
    assert np.allclose(arr, np.array([-0.04]), atol=0.001, rtol=0)
    # check on array form
    arr = networks.beta_from_distance([100, 1600, np.inf])
    assert np.allclose(arr,
                       np.array([-0.04, -0.0025, -0.0]),
                       atol=0.001,
                       rtol=0)
    # check for type error
    with pytest.raises(TypeError):
        networks.beta_from_distance('boo')
    # check that invalid beta values raise an error
    for d in [-100, 0]:
        with pytest.raises(ValueError):
            networks.beta_from_distance(d)
Exemple #4
0
def test_Network_Layer_From_nX(primal_graph):
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    x_arr = node_data[:, 0]
    y_arr = node_data[:, 1]
    betas = np.array([0.04, 0.02])
    distances = networks.distance_from_beta(betas)

    # test Network_Layer_From_NetworkX's class
    for d, b in zip([distances, None], [None, betas]):
        for angular in [True, False]:
            N = networks.NetworkLayerFromNX(primal_graph, distances=d, betas=b)
            assert np.allclose(N.uids, node_uids, atol=0.001, rtol=0)
            assert np.allclose(N._node_data, node_data, atol=0.001, rtol=0)
            assert np.allclose(N._edge_data, edge_data, atol=0.001, rtol=0)
            assert np.allclose(N.distances, distances, atol=0.001,
                               rtol=0)  # inferred automatically when only betas provided
            assert np.allclose(N.betas, betas, atol=0.001,
                               rtol=0)  # inferred automatically when only distances provided
            assert N._min_threshold_wt == checks.def_min_thresh_wt
            assert np.allclose(N.node_x_arr, x_arr, atol=0.001, rtol=0)
            assert np.allclose(N.node_y_arr, y_arr, atol=0.001, rtol=0)
            assert np.allclose(N.node_live_arr, node_data[:, 2], atol=0.001, rtol=0)
            assert np.allclose(N.edge_lengths_arr, edge_data[:, 2], atol=0.001, rtol=0)
            assert np.allclose(N.edge_angles_arr, edge_data[:, 3], atol=0.001, rtol=0)
            assert np.allclose(N.edge_impedance_factors_arr, edge_data[:, 4], atol=0.001, rtol=0)
            assert np.allclose(N.edge_in_bearings_arr, edge_data[:, 5], atol=0.001, rtol=0)
            assert np.allclose(N.edge_out_bearings_arr, edge_data[:, 6], atol=0.001, rtol=0)

    # check alternate min_threshold_wt gets passed through successfully
    alt_min = 0.02
    alt_distances = networks.distance_from_beta(betas, min_threshold_wt=alt_min)
    N = networks.NetworkLayerFromNX(primal_graph, betas=betas, min_threshold_wt=alt_min)
    assert np.allclose(N.distances, alt_distances, atol=0.001, rtol=0)

    # check for malformed signatures
    with pytest.raises(TypeError):
        networks.NetworkLayerFromNX('boo', distances=distances)
    with pytest.raises(ValueError):
        networks.NetworkLayerFromNX(primal_graph)  # no betas or distances
    with pytest.raises(ValueError):
        networks.NetworkLayerFromNX(primal_graph, distances=None, betas=None)
    with pytest.raises(ValueError):
        networks.NetworkLayerFromNX(primal_graph, distances=[])
    with pytest.raises(ValueError):
        networks.NetworkLayerFromNX(primal_graph, betas=[])
Exemple #5
0
def test_decomposed_local_centrality():
    # centralities on the original nodes within the decomposed network should equal non-decomposed workflow
    betas = np.array([-0.02, -0.01, -0.005, -0.0008, -0.0])
    distances = networks.distance_from_beta(betas)
    measure_keys = ('node_density',
                    'node_farness',
                    'node_cycles',
                    'node_harmonic',
                    'node_beta',
                    'segment_density',
                    'segment_harmonic',
                    'segment_beta',
                    'node_betweenness',
                    'node_betweenness_beta',
                    'segment_betweenness')
    # test a decomposed graph
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G)  # generate node and edge maps
    measures_data = centrality.local_centrality(node_data,
                                                edge_data,
                                                node_edge_map,
                                                distances,
                                                betas,
                                                measure_keys,
                                                angular=False)
    G_decomposed = graphs.nX_decompose(G, 20)
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G_decomposed)
    checks.check_network_maps(node_data, edge_data, node_edge_map)
    measures_data_decomposed = centrality.local_centrality(node_data,
                                                           edge_data,
                                                           node_edge_map,
                                                           distances,
                                                           betas,
                                                           measure_keys,
                                                           angular=False)
    # test harmonic closeness on original nodes for non-decomposed vs decomposed
    d_range = len(distances)
    m_range = len(measure_keys)
    assert measures_data.shape == (m_range, d_range, len(G))
    assert measures_data_decomposed.shape == (m_range, d_range, len(G_decomposed))
    original_node_idx = np.where(node_data[:, 3] == 0)
    # with increasing decomposition:
    # - node based measures will not match
    # - node based segment measures will match - these measure to the cut endpoints per thresholds
    # - betweenness based segment won't match - doesn't measure to cut endpoints
    for m_idx in range(m_range):
        print(m_idx)
        for d_idx in range(d_range):
            match = np.allclose(measures_data[m_idx][d_idx], measures_data_decomposed[m_idx][d_idx][original_node_idx],
                                atol=0.1, rtol=0)  # relax precision
            if not match:
                print('key', measure_keys[m_idx], 'dist:', distances[d_idx], 'match:', match)
            if m_idx in [5, 6, 7]:
                assert match
Exemple #6
0
def avg_distances(beta):
    # looking for the average weight from 0 to d_max based on an impedance curve to d_max
    d = networks.distance_from_beta(beta)
    # area under the curve from 0 to d_max
    a = ((np.exp(-beta * d) - 1) / -beta)
    # divide by base (distance) for height, which gives weight
    w = a / d
    # then solve for the avg_d
    avg_d = -np.log(w) / beta
    # otherwise return the array
    return avg_d
Exemple #7
0
def test_distance_from_beta():
    # some basic checks using float form
    for b, d in zip([0.04, 0.0025, 0.0], [100, 1600, np.inf]):
        # simple straight check against corresponding distance
        assert networks.distance_from_beta(b) == np.array([d])
        # circular check
        assert networks.beta_from_distance(networks.distance_from_beta(b)) == b
        # array form check
        assert networks.distance_from_beta(np.array([b])) == np.array([d])
    # check that custom min_threshold_wt works
    arr = networks.distance_from_beta(0.04, min_threshold_wt=0.001)
    assert np.allclose(arr, np.array([172.69388197455342]), atol=0.001, rtol=0)
    # check on array form
    arr = networks.distance_from_beta([0.04, 0.0025, 0.0])
    assert np.allclose(arr, np.array([100, 1600, np.inf]), atol=0.001, rtol=0)
    # check for type error
    with pytest.raises(TypeError):
        networks.distance_from_beta('boo')
    # check that invalid beta values raise an error
    # positive integer of zero should raise, but not positive float
    for b in [-0.04, 0, -0, -0.0]:
        with pytest.raises(ValueError):
            networks.distance_from_beta(b)
Exemple #8
0
def test_local_centrality():
    '''
    Also tested indirectly via test_networks.test_compute_centrality

    Test centrality methods where possible against NetworkX - i.e. harmonic closeness and betweenness
    Note that NetworkX improved closeness is not the same as derivation used in this package
    NetworkX doesn't have a maximum distance cutoff, so run on the whole graph (low beta / high distance)
    '''
    # load the test graph
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G)  # generate node and edge maps
    G_round_trip = graphs.nX_from_graph_maps(node_uids, node_data, edge_data, node_edge_map)
    # plots for debugging

    # needs a large enough beta so that distance thresholds aren't encountered
    betas = np.array([-0.02, -0.01, -0.005, -0.0008, -0.0])
    distances = networks.distance_from_beta(betas)
    # set the keys - add shuffling to be sure various orders work
    measure_keys = [
        'node_density',
        'node_farness',
        'node_cycles',
        'node_harmonic',
        'node_beta',
        'segment_density',
        'segment_harmonic',
        'segment_beta',
        'node_betweenness',
        'node_betweenness_beta',
        'segment_betweenness'
    ]
    np.random.shuffle(measure_keys)  # in place
    measure_keys = tuple(measure_keys)
    # generate the measures
    measures_data = centrality.local_centrality(node_data,
                                                edge_data,
                                                node_edge_map,
                                                distances,
                                                betas,
                                                measure_keys,
                                                angular=False)
    node_density = measures_data[measure_keys.index('node_density')]
    node_farness = measures_data[measure_keys.index('node_farness')]
    node_cycles = measures_data[measure_keys.index('node_cycles')]
    node_harmonic = measures_data[measure_keys.index('node_harmonic')]
    node_beta = measures_data[measure_keys.index('node_beta')]
    segment_density = measures_data[measure_keys.index('segment_density')]
    segment_harmonic = measures_data[measure_keys.index('segment_harmonic')]
    segment_beta = measures_data[measure_keys.index('segment_beta')]
    node_betweenness = measures_data[measure_keys.index('node_betweenness')]
    node_betweenness_beta = measures_data[measure_keys.index('node_betweenness_beta')]
    segment_betweenness = measures_data[measure_keys.index('segment_betweenness')]
    # post compute improved
    improved_closness = node_density / node_farness / node_density
    # angular keys
    measure_keys_angular = [
        'node_harmonic_angular',
        'segment_harmonic_hybrid',
        'node_betweenness_angular',
        'segment_betweeness_hybrid'
    ]
    np.random.shuffle(measure_keys_angular)  # in place
    measure_keys_angular = tuple(measure_keys_angular)
    # generate the angular measures
    measures_data_angular = centrality.local_centrality(node_data,
                                                        edge_data,
                                                        node_edge_map,
                                                        distances,
                                                        betas,
                                                        measure_keys_angular,
                                                        angular=True)
    node_harmonic_angular = measures_data_angular[measure_keys_angular.index('node_harmonic_angular')]
    segment_harmonic_hybrid = measures_data_angular[measure_keys_angular.index('segment_harmonic_hybrid')]
    node_betweenness_angular = measures_data_angular[measure_keys_angular.index('node_betweenness_angular')]
    segment_betweeness_hybrid = measures_data_angular[measure_keys_angular.index('segment_betweeness_hybrid')]

    # test node density
    # node density count doesn't include self-node
    # connected component == 48 == len(G) - 4
    # isolated looping component == 3
    # isolated edge == 1
    # isolated node == 0
    for n in node_density[4]:  # infinite distance - exceeds cutoff clashes
        assert n in [48, 3, 1, 0]
    # test harmonic closeness vs NetworkX
    nx_harm_cl = nx.harmonic_centrality(G_round_trip, distance='length')
    nx_harm_cl = np.array([v for v in nx_harm_cl.values()])
    assert np.allclose(nx_harm_cl, node_harmonic[4], atol=0.001, rtol=0)

    # test betweenness vs NetworkX
    # set endpoint counting to false and do not normalise
    nx_betw = nx.betweenness_centrality(G_round_trip, weight='length', endpoints=False, normalized=False)
    nx_betw = np.array([v for v in nx_betw.values()])
    # for some reason nx betweenness gives 0.5 instead of 1 for disconnected looping component (should be 1)
    # maybe two equidistant routes being divided through 2
    # nx betweenness gives 0.5 instead of 1 for all disconnected looping component nodes
    # nx presumably takes equidistant routes into account, in which case only the fraction is aggregated
    assert np.allclose(nx_betw[:52], node_betweenness[4][:52], atol=0.001, rtol=0)

    # test against various distances
    for d_idx in range(len(distances)):
        dist_cutoff = distances[d_idx]
        beta = betas[d_idx]

        # do the comparisons array-wise so that betweenness can be aggregated
        betw = np.full(G.number_of_nodes(), 0.0)
        betw_wt = np.full(G.number_of_nodes(), 0.0)
        dens = np.full(G.number_of_nodes(), 0.0)
        far_imp = np.full(G.number_of_nodes(), 0.0)
        far_dist = np.full(G.number_of_nodes(), 0.0)
        harmonic_cl = np.full(G.number_of_nodes(), 0.0)
        grav = np.full(G.number_of_nodes(), 0.0)
        cyc = np.full(G.number_of_nodes(), 0.0)

        for src_idx in range(len(G)):
            # get shortest path maps
            tree_map, tree_edges = centrality.shortest_path_tree(edge_data,
                                                                 node_edge_map,
                                                                 src_idx,
                                                                 dist_cutoff,
                                                                 angular=False)
            tree_preds = tree_map[:, 1]
            tree_dists = tree_map[:, 2]
            tree_imps = tree_map[:, 3]
            tree_cycles = tree_map[:, 4]
            for n_idx in G.nodes():
                # skip self nodes
                if n_idx == src_idx:
                    continue
                # get distance and impedance
                dist = tree_dists[n_idx]
                imp = tree_imps[n_idx]
                # continue if exceeds max
                if np.isinf(dist) or dist > dist_cutoff:
                    continue
                # aggregate values
                dens[src_idx] += 1
                far_imp[src_idx] += imp
                far_dist[src_idx] += dist
                harmonic_cl[src_idx] += 1 / imp
                grav[src_idx] += np.exp(beta * dist)
                # cycles
                if tree_cycles[n_idx]:
                    cyc[src_idx] += 1
                # BETWEENNESS
                # only process betweenness in one direction
                if n_idx < src_idx:
                    continue
                # betweenness - only counting truly between vertices, not starting and ending verts
                inter_idx = tree_preds[n_idx]
                # isolated nodes will have no predecessors
                if np.isnan(inter_idx):
                    continue
                inter_idx = np.int(inter_idx)
                while True:
                    # break out of while loop if the intermediary has reached the source node
                    if inter_idx == src_idx:
                        break
                    betw[inter_idx] += 1
                    betw_wt[inter_idx] += np.exp(beta * dist)
                    # follow
                    inter_idx = np.int(tree_preds[inter_idx])
        improved_cl = dens / far_dist / dens

        assert np.allclose(node_density[d_idx], dens, atol=0.001, rtol=0)
        assert np.allclose(node_farness[d_idx], far_dist, atol=0.01, rtol=0)  # relax precision
        assert np.allclose(node_cycles[d_idx], cyc, atol=0.001, rtol=0)
        assert np.allclose(node_harmonic[d_idx], harmonic_cl, atol=0.001, rtol=0)
        assert np.allclose(node_beta[d_idx], grav, atol=0.001, rtol=0)
        assert np.allclose(improved_closness[d_idx], improved_cl, equal_nan=True, atol=0.001, rtol=0)
        assert np.allclose(node_betweenness[d_idx], betw, atol=0.001, rtol=0)
        assert np.allclose(node_betweenness_beta[d_idx], betw_wt, atol=0.001, rtol=0)

        # TODO: are there possibly ways to test segment_density, harmonic_segment, segment_beta, segment_betweenness
        # for infinite distance, the segment density should match the sum of reachable segments
        length_sum = 0
        for s, e, d in G_round_trip.edges(data=True):
            length_sum += d['length']
        reachable_length_sum = length_sum - \
                               (G_round_trip[50][51]['length'] +
                                G_round_trip[52][53]['length'] +
                                G_round_trip[53][54]['length'] +
                                G_round_trip[54][55]['length'] +
                                G_round_trip[52][55]['length'])
        assert np.allclose(segment_density[-1][:49], reachable_length_sum, atol=0.01, rtol=0)  # relax precision

    # check that problematic keys are caught
    for angular, k in zip([False, True], ['node_harmonic', 'node_harmonic_angular']):
        # catch typos
        with pytest.raises(ValueError):
            centrality.local_centrality(node_data,
                                        edge_data,
                                        node_edge_map,
                                        distances,
                                        betas,
                                        ('typo_key',),
                                        angular=False)
        # catch duplicates
        with pytest.raises(ValueError):
            centrality.local_centrality(node_data,
                                        edge_data,
                                        node_edge_map,
                                        distances,
                                        betas,
                                        (k, k),
                                        angular=False)
        # catch mixed angular and non-angular keys
        with pytest.raises(ValueError):
            centrality.local_centrality(node_data,
                                        edge_data,
                                        node_edge_map,
                                        distances,
                                        betas,
                                        ('node_density', 'node_harmonic_angular'),
                                        angular=False)
Exemple #9
0
def test_compute_centrality(primal_graph):
    """
    Underlying methods also tested via test_networks.test_network_centralities
    """
    betas = np.array([0.01, 0.005])
    distances = networks.distance_from_beta(betas)
    # generate data structures
    N = networks.NetworkLayerFromNX(primal_graph, distances=distances)
    node_data = N._node_data
    edge_data = N._edge_data
    node_edge_map = N._node_edge_map

    # CHECK NODE BASED
    node_measures = ['node_density',
                     'node_farness',
                     'node_cycles',
                     'node_harmonic',
                     'node_beta',
                     'node_betweenness',
                     'node_betweenness_beta']
    node_measures_ang = ['node_harmonic_angular',
                         'node_betweenness_angular']

    # check measures against underlying method
    N = networks.NetworkLayerFromNX(primal_graph, distances=distances)
    N.node_centrality(measures=['node_density'])
    # test against underlying method
    measures_data = centrality.local_node_centrality(node_data,
                                                     edge_data,
                                                     node_edge_map,
                                                     distances,
                                                     betas,
                                                     measure_keys=('node_density',))
    for d_idx, d_key in enumerate(distances):
        assert np.allclose(N.metrics['centrality']['node_density'][d_key], measures_data[0][d_idx])
    # also check the number of returned types for a few assortments of metrics
    np.random.shuffle(node_measures)  # in place
    # not necessary to do all labels, first few should do
    for min_idx in range(3):
        measure_keys = np.array(node_measures[min_idx:])
        N = networks.NetworkLayerFromNX(primal_graph, distances=distances)
        N.node_centrality(measures=node_measures)
        # test against underlying method
        measures_data = centrality.local_node_centrality(node_data,
                                                         edge_data,
                                                         node_edge_map,
                                                         distances,
                                                         betas,
                                                         measure_keys=tuple(measure_keys))
        for m_idx, measure_name in enumerate(measure_keys):
            for d_idx, d_key in enumerate(distances):
                assert np.allclose(N.metrics['centrality'][measure_name][d_key],
                                   measures_data[m_idx][d_idx], atol=0.001, rtol=0)
    # check that angular gets passed through
    N_ang = networks.NetworkLayerFromNX(primal_graph, distances=[2000])
    N_ang.node_centrality(measures=['node_harmonic_angular'],
                          angular=True)
    N = networks.NetworkLayerFromNX(primal_graph, distances=[2000])
    N.node_centrality(measures=['node_harmonic'],
                      angular=False)
    assert not np.allclose(N_ang.metrics['centrality']['node_harmonic_angular'][2000],
                           N.metrics['centrality']['node_harmonic'][2000], atol=0.001, rtol=0)
    assert not np.allclose(N_ang.metrics['centrality']['node_harmonic_angular'][2000],
                           N.metrics['centrality']['node_harmonic'][2000], atol=0.001, rtol=0)
    # check that typos, duplicates, and mixed angular / non-angular are caught
    with pytest.raises(ValueError):
        N.node_centrality(measures=['spelling_typo'])
    with pytest.raises(ValueError):
        N.node_centrality(measures=['node_density', 'node_density'])
    with pytest.raises(ValueError):
        N.node_centrality(measures=['node_density', 'node_harmonic_angular'])

    # CHECK SEGMENTISED
    segment_measures = ['segment_density',
                        'segment_harmonic',
                        'segment_beta',
                        'segment_betweenness']
    segment_measures_ang = ['segment_harmonic_hybrid',
                            'segment_betweeness_hybrid']

    # check measures against underlying method
    N = networks.NetworkLayerFromNX(primal_graph, distances=distances)
    N.segment_centrality(measures=['segment_density'])
    # test against underlying method
    measures_data = centrality.local_segment_centrality(node_data,
                                                        edge_data,
                                                        node_edge_map,
                                                        distances,
                                                        betas,
                                                        measure_keys=('segment_density',))
    for d_idx, d_key in enumerate(distances):
        assert np.allclose(N.metrics['centrality']['segment_density'][d_key], measures_data[0][d_idx])
    # also check the number of returned types for a few assortments of metrics
    np.random.shuffle(segment_measures)  # in place
    # not necessary to do all labels, first few should do
    for min_idx in range(3):
        measure_keys = np.array(segment_measures[min_idx:])
        N = networks.NetworkLayerFromNX(primal_graph,
                                        distances=distances)
        N.segment_centrality(measures=segment_measures)
        # test against underlying method
        measures_data = centrality.local_segment_centrality(node_data,
                                                            edge_data,
                                                            node_edge_map,
                                                            distances,
                                                            betas,
                                                            measure_keys=tuple(measure_keys))
        for m_idx, measure_name in enumerate(measure_keys):
            for d_idx, d_key in enumerate(distances):
                assert np.allclose(N.metrics['centrality'][measure_name][d_key],
                                   measures_data[m_idx][d_idx], atol=0.001, rtol=0)
    # check that angular gets passed through
    N_ang = networks.NetworkLayerFromNX(primal_graph, distances=[2000])
    N_ang.segment_centrality(measures=['segment_harmonic_hybrid'],
                             angular=True)
    N = networks.NetworkLayerFromNX(primal_graph, distances=[2000])
    N.segment_centrality(measures=['segment_harmonic'],
                         angular=False)
    assert not np.allclose(N_ang.metrics['centrality']['segment_harmonic_hybrid'][2000],
                           N.metrics['centrality']['segment_harmonic'][2000], atol=0.001, rtol=0)
    assert not np.allclose(N_ang.metrics['centrality']['segment_harmonic_hybrid'][2000],
                           N.metrics['centrality']['segment_harmonic'][2000], atol=0.001, rtol=0)
    # check that typos, duplicates, and mixed angular / non-angular are caught
    with pytest.raises(ValueError):
        N.segment_centrality(measures=['spelling_typo'])
    with pytest.raises(ValueError):
        N.segment_centrality(measures=['segment_density', 'segment_density'])
    with pytest.raises(ValueError):
        N.segment_centrality(measures=['segment_density', 'segment_harmonic_hybrid'])

    # check that the deprecated method raises:
    with pytest.raises(DeprecationWarning):
        N.compute_centrality()
Exemple #10
0
def test_Network_Layer(primal_graph):
    # manual graph maps for comparison
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    x_arr = node_data[:, 0]
    y_arr = node_data[:, 1]
    betas = [0.02, 0.005]
    distances = networks.distance_from_beta(betas)

    # test NetworkLayer's class
    for d, b in zip([distances, None], [None, betas]):
        for angular in [True, False]:
            N = networks.NetworkLayer(node_uids,
                                      node_data,
                                      edge_data,
                                      node_edge_map,
                                      distances=d,
                                      betas=b)
            assert np.allclose(N.uids, node_uids, atol=0.001, rtol=0)
            assert np.allclose(N._node_data, node_data, atol=0.001, rtol=0)
            assert np.allclose(N._edge_data, edge_data, atol=0.001, rtol=0)
            assert np.allclose(N.distances, distances, atol=0.001,
                               rtol=0)  # inferred automatically when only betas provided
            assert np.allclose(N.betas, betas, atol=0.001,
                               rtol=0)  # inferred automatically when only distances provided
            assert N._min_threshold_wt == checks.def_min_thresh_wt
            assert np.allclose(N.node_x_arr, x_arr, atol=0.001, rtol=0)
            assert np.allclose(N.node_y_arr, y_arr, atol=0.001, rtol=0)
            assert np.allclose(N.node_live_arr, node_data[:, 2], atol=0.001, rtol=0)
            assert np.allclose(N.edge_lengths_arr, edge_data[:, 2], atol=0.001, rtol=0)
            assert np.allclose(N.edge_angles_arr, edge_data[:, 3], atol=0.001, rtol=0)
            assert np.allclose(N.edge_impedance_factors_arr, edge_data[:, 4], atol=0.001, rtol=0)
            assert np.allclose(N.edge_in_bearings_arr, edge_data[:, 5], atol=0.001, rtol=0)
            assert np.allclose(N.edge_out_bearings_arr, edge_data[:, 6], atol=0.001, rtol=0)

    # test round-trip graph to and from NetworkLayer
    N = networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map,
                              distances=distances)
    G_round_trip = N.to_networkX()
    # graph_maps_from_networkX generates implicit live (all True) and weight (all 1) attributes if missing
    # i.e. can't simply check that all nodes equal, so check properties manually
    for n, d in primal_graph.nodes(data=True):
        assert n in G_round_trip
        assert G_round_trip.nodes[n]['x'] == d['x']
        assert G_round_trip.nodes[n]['y'] == d['y']
    # edges can be checked en masse
    assert G_round_trip.edges == primal_graph.edges
    # check alternate min_threshold_wt gets passed through successfully
    alt_min = 0.02
    alt_distances = networks.distance_from_beta(betas, min_threshold_wt=alt_min)
    N = networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map,
                              betas=betas,
                              min_threshold_wt=alt_min)
    assert np.allclose(N.distances, alt_distances, atol=0.001, rtol=0)
    # check for malformed signatures
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids[:-1],
                              node_data,
                              edge_data,
                              node_edge_map,
                              distances)
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                               node_data[:, :-1],
                              edge_data,
                              node_edge_map,
                              distances)
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                               edge_data[:, :-1],
                              node_edge_map,
                              distances)
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                               edge_data[:, :-1],
                              node_edge_map,
                              distances)
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map)  # no betas or distances
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map,
                              distances=None,
                              betas=None)
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map,
                              distances=[])
    with pytest.raises(ValueError):
        networks.NetworkLayer(node_uids,
                              node_data,
                              edge_data,
                              node_edge_map,
                              betas=[])
def test_compute_stats(primal_graph):
    """
    Test stats component
    """
    betas = np.array([0.01, 0.005])
    distances = networks.distance_from_beta(betas)
    # network layer
    N_single = networks.NetworkLayerFromNX(primal_graph, distances=distances)
    N_multi = networks.NetworkLayerFromNX(primal_graph, distances=distances)
    node_map = N_multi._node_data
    edge_map = N_multi._edge_data
    node_edge_map = N_multi._node_edge_map
    # data layer
    data_dict = mock.mock_data_dict(primal_graph)
    D_single = layers.DataLayerFromDict(data_dict)
    D_multi = layers.DataLayerFromDict(data_dict)
    # check single metrics independently against underlying for some use-cases, e.g. hill, non-hill, accessibility...
    D_single.assign_to_network(N_single, max_dist=500)
    D_multi.assign_to_network(N_multi, max_dist=500)
    # generate some mock landuse data
    mock_numeric = mock.mock_numerical_data(len(data_dict), num_arrs=2)
    # generate stats
    D_single.compute_stats(stats_keys='boo', stats_data_arrs=mock_numeric[0])
    D_single.compute_stats(stats_keys='baa', stats_data_arrs=mock_numeric[1])
    D_multi.compute_stats(stats_keys=['boo', 'baa'],
                          stats_data_arrs=mock_numeric)
    # test against underlying method
    data_map = D_single._data
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.aggregate_stats(node_map,
                             edge_map,
                             node_edge_map,
                             data_map,
                             distances,
                             betas,
                             numerical_arrays=mock_numeric)
    stats_keys = [
        'max', 'min', 'sum', 'sum_weighted', 'mean', 'mean_weighted',
        'variance', 'variance_weighted'
    ]
    stats_data = [
        stats_max, stats_min, stats_sum, stats_sum_wt, stats_mean,
        stats_mean_wt, stats_variance, stats_variance_wt
    ]
    for num_idx, num_label in enumerate(['boo', 'baa']):
        for s_key, stats in zip(stats_keys, stats_data):
            for d_idx, d_key in enumerate(distances):
                # check one-at-a-time computed vs multiply computed
                assert np.allclose(
                    N_single.metrics['stats'][num_label][s_key][d_key],
                    N_multi.metrics['stats'][num_label][s_key][d_key],
                    atol=0.001,
                    rtol=0,
                    equal_nan=True)
                # check one-at-a-time against manual
                assert np.allclose(
                    N_single.metrics['stats'][num_label][s_key][d_key],
                    stats[num_idx][d_idx],
                    atol=0.001,
                    rtol=0,
                    equal_nan=True)
                # check multiply computed against manual
                assert np.allclose(
                    N_multi.metrics['stats'][num_label][s_key][d_key],
                    stats[num_idx][d_idx],
                    atol=0.001,
                    rtol=0,
                    equal_nan=True)
    # check that problematic keys and data arrays are caught
    for labels, arrs, err in (
        (['a'], mock_numeric, ValueError),  # mismatching lengths
        (['a', 'b'], None, TypeError),  # missing arrays
        (['a', 'b'], [], ValueError),  # missing arrays
        (None, mock_numeric, TypeError),  # missing labels
        ([], mock_numeric, ValueError)):  # missing labels
        with pytest.raises(err):
            D_multi.compute_stats(stats_keys=labels, stats_data_arrs=arrs)
def network_generator():
    for betas in [[0.008], [0.008, 0.002]]:
        distances = networks.distance_from_beta(betas)
        yield distances, betas
Exemple #13
0
def test_aggregate_landuses_categorical_components(primal_graph):
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map, = graphs.graph_maps_from_nX(primal_graph)
    # setup data
    data_dict = mock.mock_data_dict(primal_graph, random_seed=13)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    data_map = data.assign_to_network(data_map, node_data, edge_data, node_edge_map, 500)
    # set parameters
    betas = np.array([0.02, 0.01, 0.005, 0.0025])
    distances = networks.distance_from_beta(betas)
    qs = np.array([0, 1, 2])
    mock_categorical = mock.mock_categorical_data(len(data_map))
    landuse_classes, landuse_encodings = layers.encode_categorical(mock_categorical)
    mock_matrix = np.full((len(landuse_classes), len(landuse_classes)), 1)
    # set the keys - add shuffling to be sure various orders work
    hill_keys = np.arange(4)
    np.random.shuffle(hill_keys)
    non_hill_keys = np.arange(3)
    np.random.shuffle(non_hill_keys)
    ac_keys = np.array([1, 2, 5])
    np.random.shuffle(ac_keys)
    # generate
    mu_data_hill, mu_data_other, ac_data, ac_data_wt = data.aggregate_landuses(node_data,
                                                                               edge_data,
                                                                               node_edge_map,
                                                                               data_map,
                                                                               distances,
                                                                               betas,
                                                                               landuse_encodings=landuse_encodings,
                                                                               qs=qs,
                                                                               mixed_use_hill_keys=hill_keys,
                                                                               mixed_use_other_keys=non_hill_keys,
                                                                               accessibility_keys=ac_keys,
                                                                               cl_disparity_wt_matrix=mock_matrix,
                                                                               angular=False)
    # hill
    hill = mu_data_hill[np.where(hill_keys == 0)][0]
    hill_branch_wt = mu_data_hill[np.where(hill_keys == 1)][0]
    hill_pw_wt = mu_data_hill[np.where(hill_keys == 2)][0]
    hill_disp_wt = mu_data_hill[np.where(hill_keys == 3)][0]
    # non hill
    shannon = mu_data_other[np.where(non_hill_keys == 0)][0]
    gini = mu_data_other[np.where(non_hill_keys == 1)][0]
    raos = mu_data_other[np.where(non_hill_keys == 2)][0]
    # access non-weighted
    ac_1_nw = ac_data[np.where(ac_keys == 1)][0]
    ac_2_nw = ac_data[np.where(ac_keys == 2)][0]
    ac_5_nw = ac_data[np.where(ac_keys == 5)][0]
    # access weighted
    ac_1_w = ac_data_wt[np.where(ac_keys == 1)][0]
    ac_2_w = ac_data_wt[np.where(ac_keys == 2)][0]
    ac_5_w = ac_data_wt[np.where(ac_keys == 5)][0]
    # test manual metrics against all nodes
    mu_max_unique = len(landuse_classes)
    # test against various distances
    for d_idx in range(len(distances)):
        dist_cutoff = distances[d_idx]
        beta = betas[d_idx]
        for src_idx in range(len(primal_graph)):
            reachable_data, reachable_data_dist, tree_preds = data.aggregate_to_src_idx(src_idx,
                                                                                        node_data,
                                                                                        edge_data,
                                                                                        node_edge_map,
                                                                                        data_map,
                                                                                        dist_cutoff)
            # counts of each class type (array length per max unique classes - not just those within max distance)
            cl_counts = np.full(mu_max_unique, 0)
            # nearest of each class type (likewise)
            cl_nearest = np.full(mu_max_unique, np.inf)
            # aggregate
            a_1_nw = 0
            a_2_nw = 0
            a_5_nw = 0
            a_1_w = 0
            a_2_w = 0
            a_5_w = 0
            # iterate reachable
            for data_idx, (reachable, data_dist) in enumerate(zip(reachable_data, reachable_data_dist)):
                if not reachable:
                    continue
                cl = landuse_encodings[data_idx]
                # double check distance is within threshold
                assert data_dist <= dist_cutoff
                # update the class counts
                cl_counts[cl] += 1
                # if distance is nearer, update the nearest distance array too
                if data_dist < cl_nearest[cl]:
                    cl_nearest[cl] = data_dist
                # aggregate accessibility codes
                if cl == 1:
                    a_1_nw += 1
                    a_1_w += np.exp(-beta * data_dist)
                elif cl == 2:
                    a_2_nw += 1
                    a_2_w += np.exp(-beta * data_dist)
                elif cl == 5:
                    a_5_nw += 1
                    a_5_w += np.exp(-beta * data_dist)
            # assertions
            assert ac_1_nw[d_idx, src_idx] == a_1_nw
            assert ac_2_nw[d_idx, src_idx] == a_2_nw
            assert ac_5_nw[d_idx, src_idx] == a_5_nw

            assert ac_1_w[d_idx, src_idx] == a_1_w
            assert ac_2_w[d_idx, src_idx] == a_2_w
            assert ac_5_w[d_idx, src_idx] == a_5_w

            assert hill[0, d_idx, src_idx] == diversity.hill_diversity(cl_counts, 0)
            assert hill[1, d_idx, src_idx] == diversity.hill_diversity(cl_counts, 1)
            assert hill[2, d_idx, src_idx] == diversity.hill_diversity(cl_counts, 2)

            assert hill_branch_wt[0, d_idx, src_idx] == \
                   diversity.hill_diversity_branch_distance_wt(cl_counts, cl_nearest, 0, beta)
            assert hill_branch_wt[1, d_idx, src_idx] == \
                   diversity.hill_diversity_branch_distance_wt(cl_counts, cl_nearest, 1, beta)
            assert hill_branch_wt[2, d_idx, src_idx] == \
                   diversity.hill_diversity_branch_distance_wt(cl_counts, cl_nearest, 2, beta)

            assert hill_pw_wt[0, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_distance_wt(cl_counts, cl_nearest, 0, beta)
            assert hill_pw_wt[1, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_distance_wt(cl_counts, cl_nearest, 1, beta)
            assert hill_pw_wt[2, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_distance_wt(cl_counts, cl_nearest, 2, beta)

            assert hill_disp_wt[0, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_matrix_wt(cl_counts, mock_matrix, 0)
            assert hill_disp_wt[1, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_matrix_wt(cl_counts, mock_matrix, 1)
            assert hill_disp_wt[2, d_idx, src_idx] == \
                   diversity.hill_diversity_pairwise_matrix_wt(cl_counts, mock_matrix, 2)

            assert shannon[d_idx, src_idx] == diversity.shannon_diversity(cl_counts)
            assert gini[d_idx, src_idx] == diversity.gini_simpson_diversity(cl_counts)
            assert raos[d_idx, src_idx] == diversity.raos_quadratic_diversity(cl_counts, mock_matrix)

    # check that angular is passed-through
    # actual angular tests happen in test_shortest_path_tree()
    # here the emphasis is simply on checking that the angular instruction gets chained through

    # setup dual data
    G_dual = graphs.nX_to_dual(primal_graph)
    node_labels_dual, node_data_dual, edge_data_dual, node_edge_map_dual = graphs.graph_maps_from_nX(G_dual)
    data_dict_dual = mock.mock_data_dict(G_dual, random_seed=13)
    data_uids_dual, data_map_dual = layers.data_map_from_dict(data_dict_dual)
    data_map_dual = data.assign_to_network(data_map_dual, node_data_dual, edge_data_dual, node_edge_map_dual, 500)
    mock_categorical = mock.mock_categorical_data(len(data_map_dual))
    landuse_classes_dual, landuse_encodings_dual = layers.encode_categorical(mock_categorical)
    mock_matrix = np.full((len(landuse_classes_dual), len(landuse_classes_dual)), 1)

    mu_hill_dual, mu_other_dual, ac_dual, ac_wt_dual = data.aggregate_landuses(node_data_dual,
                                                                               edge_data_dual,
                                                                               node_edge_map_dual,
                                                                               data_map_dual,
                                                                               distances,
                                                                               betas,
                                                                               landuse_encodings_dual,
                                                                               qs=qs,
                                                                               mixed_use_hill_keys=hill_keys,
                                                                               mixed_use_other_keys=non_hill_keys,
                                                                               accessibility_keys=ac_keys,
                                                                               cl_disparity_wt_matrix=mock_matrix,
                                                                               angular=True)

    mu_hill_dual_sidestep, mu_other_dual_sidestep, ac_dual_sidestep, ac_wt_dual_sidestep = \
        data.aggregate_landuses(node_data_dual,
                                edge_data_dual,
                                node_edge_map_dual,
                                data_map_dual,
                                distances,
                                betas,
                                landuse_encodings_dual,
                                qs=qs,
                                mixed_use_hill_keys=hill_keys,
                                mixed_use_other_keys=non_hill_keys,
                                accessibility_keys=ac_keys,
                                cl_disparity_wt_matrix=mock_matrix,
                                angular=False)

    assert not np.allclose(mu_hill_dual, mu_hill_dual_sidestep, atol=0.001, rtol=0)
    assert not np.allclose(mu_other_dual, mu_other_dual_sidestep, atol=0.001, rtol=0)
    assert not np.allclose(ac_dual, ac_dual_sidestep, atol=0.001, rtol=0)
    assert not np.allclose(ac_wt_dual, ac_wt_dual_sidestep, atol=0.001, rtol=0)
Exemple #14
0
def test_local_aggregator_numerical_components(primal_graph):
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    # setup data
    data_dict = mock.mock_data_dict(primal_graph, random_seed=13)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    data_map = data.assign_to_network(data_map, node_data, edge_data, node_edge_map, 500)
    # for debugging
    # from cityseer.tools import plot
    # plot.plot_graph_maps(node_uids, node_data, edge_data, data_map)
    # set parameters - use a large enough distance such that simple non-weighted checks can be run for max, mean, variance
    betas = np.array([0.00125])
    distances = networks.distance_from_beta(betas)
    mock_numerical = mock.mock_numerical_data(len(data_dict), num_arrs=2, random_seed=0)
    # compute
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.aggregate_stats(node_data,
                             edge_data,
                             node_edge_map,
                             data_map,
                             distances,
                             betas,
                             numerical_arrays=mock_numerical,
                             angular=False)
    # non connected portions of the graph will have different stats
    # used manual data plots from test_assign_to_network() to see which nodes the data points are assigned to
    # connected graph is from 0 to 48 -> assigned data points are all except 5, 8, 17, 33, 48
    connected_nodes_idx = list(range(49))
    # and the respective data assigned to connected portion of the graph
    connected_data_idx = [i for i in range(len(data_dict)) if i not in [5, 8, 9, 17, 18, 29, 33, 38, 48]]
    # isolated node = 49 -> assigned no data points
    # isolated nodes = 50 & 51 -> assigned data points = 17, 33
    # isolated loop = 52, 53, 54, 55 -> assigned data points = 5, 8, 9, 18, 29, 38, 48
    isolated_nodes_idx = [52, 53, 54, 55]
    isolated_data_idx = [5, 8, 9, 18, 29, 38, 48]
    for stats_idx in range(len(mock_numerical)):
        for d_idx in range(len(distances)):
            # max
            assert np.isnan(stats_max[stats_idx, d_idx, 49])
            assert np.allclose(stats_max[stats_idx, d_idx, [50, 51]], mock_numerical[stats_idx, [17, 33]].max(),
                               atol=0.001, rtol=0)
            assert np.allclose(stats_max[stats_idx, d_idx, isolated_nodes_idx],
                               mock_numerical[stats_idx, isolated_data_idx].max(), atol=0.001, rtol=0)
            assert np.allclose(stats_max[stats_idx, d_idx, connected_nodes_idx],
                               mock_numerical[stats_idx, connected_data_idx].max(), atol=0.001, rtol=0)
            # min
            assert np.isnan(stats_min[stats_idx, d_idx, 49])
            assert np.allclose(stats_min[stats_idx, d_idx, [50, 51]], mock_numerical[stats_idx, [17, 33]].min(),
                               atol=0.001, rtol=0)
            assert np.allclose(stats_min[stats_idx, d_idx, isolated_nodes_idx],
                               mock_numerical[stats_idx, isolated_data_idx].min(), atol=0.001, rtol=0)
            assert np.allclose(stats_min[stats_idx, d_idx, connected_nodes_idx],
                               mock_numerical[stats_idx, connected_data_idx].min(), atol=0.001, rtol=0)
            # sum
            assert stats_sum[stats_idx, d_idx, 49] == 0
            assert np.allclose(stats_sum[stats_idx, d_idx, [50, 51]],
                               mock_numerical[stats_idx, [17, 33]].sum(), atol=0.001, rtol=0)
            assert np.allclose(stats_sum[stats_idx, d_idx, isolated_nodes_idx],
                               mock_numerical[stats_idx, isolated_data_idx].sum(), atol=0.001, rtol=0)
            assert np.allclose(stats_sum[stats_idx, d_idx, connected_nodes_idx],
                               mock_numerical[stats_idx, connected_data_idx].sum(), atol=0.001, rtol=0)
            # mean
            assert np.isnan(stats_mean[stats_idx, d_idx, 49])
            assert np.allclose(stats_mean[stats_idx, d_idx, [50, 51]], mock_numerical[stats_idx, [17, 33]].mean(),
                               atol=0.001, rtol=0)
            assert np.allclose(stats_mean[stats_idx, d_idx, isolated_nodes_idx],
                               mock_numerical[stats_idx, isolated_data_idx].mean(), atol=0.001, rtol=0)
            assert np.allclose(stats_mean[stats_idx, d_idx, connected_nodes_idx],
                               mock_numerical[stats_idx, connected_data_idx].mean(), atol=0.001, rtol=0)
            # variance
            assert np.isnan(stats_variance[stats_idx, d_idx, 49])
            assert np.allclose(stats_variance[stats_idx, d_idx, [50, 51]], mock_numerical[stats_idx, [17, 33]].var(),
                               atol=0.001, rtol=0)
            assert np.allclose(stats_variance[stats_idx, d_idx, isolated_nodes_idx],
                               mock_numerical[stats_idx, isolated_data_idx].var(), atol=0.001, rtol=0)
            assert np.allclose(stats_variance[stats_idx, d_idx, connected_nodes_idx],
                               mock_numerical[stats_idx, connected_data_idx].var(), atol=0.001, rtol=0)
Exemple #15
0
def test_compute_aggregated_A():
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    betas = np.array([-0.01, -0.005])
    distances = networks.distance_from_beta(betas)
    # network layer
    N = networks.Network_Layer_From_nX(G, distances)
    node_map = N._node_data
    edge_map = N._edge_data
    node_edge_map = N._node_edge_map
    # data layer
    data_dict = mock.mock_data_dict(G)
    qs = np.array([0, 1, 2])
    D = layers.Data_Layer_From_Dict(data_dict)
    # check single metrics independently against underlying for some use-cases, e.g. hill, non-hill, accessibility...
    D.assign_to_network(N, max_dist=500)
    # generate some mock landuse data
    landuse_labels = mock.mock_categorical_data(len(data_dict))
    landuse_classes, landuse_encodings = layers.encode_categorical(
        landuse_labels)
    # compute hill mixed uses
    D.compute_aggregated(landuse_labels,
                         mixed_use_keys=['hill_branch_wt'],
                         qs=qs)
    # test against underlying method
    data_map = D._data
    mu_data_hill, mu_data_other, ac_data, ac_data_wt, \
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.local_aggregator(node_map,
                              edge_map,
                              node_edge_map,
                              data_map,
                              distances,
                              betas,
                              landuse_encodings,
                              qs=qs,
                              mixed_use_hill_keys=np.array([1]))
    for q_idx, q_key in enumerate(qs):
        for d_idx, d_key in enumerate(distances):
            assert np.allclose(
                N.metrics['mixed_uses']['hill_branch_wt'][q_key][d_key],
                mu_data_hill[0][q_idx][d_idx],
                atol=0.001,
                rtol=0)
    # gini simpson
    D.compute_aggregated(landuse_labels, mixed_use_keys=['gini_simpson'])
    # test against underlying method
    data_map = D._data
    mu_data_hill, mu_data_other, ac_data, ac_data_wt, \
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.local_aggregator(node_map,
                              edge_map,
                              node_edge_map,
                              data_map,
                              distances,
                              betas,
                              landuse_encodings,
                              mixed_use_other_keys=np.array([1]))
    for d_idx, d_key in enumerate(distances):
        assert np.allclose(N.metrics['mixed_uses']['gini_simpson'][d_key],
                           mu_data_other[0][d_idx],
                           atol=0.001,
                           rtol=0)
    # accessibilities
    D.compute_aggregated(landuse_labels, accessibility_keys=['c'])
    # test against underlying method
    data_map = D._data
    mu_data_hill, mu_data_other, ac_data, ac_data_wt, \
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.local_aggregator(node_map,
                              edge_map,
                              node_edge_map,
                              data_map,
                              distances,
                              betas,
                              landuse_encodings,
                              accessibility_keys=np.array([landuse_classes.index('c')]))
    for d_idx, d_key in enumerate(distances):
        assert np.allclose(
            N.metrics['accessibility']['non_weighted']['c'][d_key],
            ac_data[0][d_idx],
            atol=0.001,
            rtol=0)
        assert np.allclose(N.metrics['accessibility']['weighted']['c'][d_key],
                           ac_data_wt[0][d_idx],
                           atol=0.001,
                           rtol=0)
    # also check the number of returned types for a few assortments of metrics
    mixed_uses_hill_types = np.array([
        'hill', 'hill_branch_wt', 'hill_pairwise_wt', 'hill_pairwise_disparity'
    ])
    mixed_use_other_types = np.array(
        ['shannon', 'gini_simpson', 'raos_pairwise_disparity'])
    ac_codes = np.array(landuse_classes)

    mu_hill_random = np.arange(len(mixed_uses_hill_types))
    np.random.shuffle(mu_hill_random)

    mu_other_random = np.arange(len(mixed_use_other_types))
    np.random.shuffle(mu_other_random)

    ac_random = np.arange(len(landuse_classes))
    np.random.shuffle(ac_random)

    # mock disparity matrix
    mock_disparity_wt_matrix = np.full(
        (len(landuse_classes), len(landuse_classes)), 1)

    # not necessary to do all labels, first few should do
    for mu_h_min in range(3):
        mu_h_keys = np.array(mu_hill_random[mu_h_min:])

        for mu_o_min in range(3):
            mu_o_keys = np.array(mu_other_random[mu_o_min:])

            for ac_min in range(3):
                ac_keys = np.array(ac_random[ac_min:])

                # in the final case, set accessibility to a single code otherwise an error would be raised
                if len(mu_h_keys) == 0 and len(mu_o_keys) == 0 and len(
                        ac_keys) == 0:
                    ac_keys = np.array([0])

                # randomise order of keys and metrics
                mu_h_metrics = mixed_uses_hill_types[mu_h_keys]
                mu_o_metrics = mixed_use_other_types[mu_o_keys]
                ac_metrics = ac_codes[ac_keys]

                N_temp = networks.Network_Layer_From_nX(G, distances)
                D_temp = layers.Data_Layer_From_Dict(data_dict)
                D_temp.assign_to_network(N_temp, max_dist=500)
                D_temp.compute_aggregated(
                    landuse_labels,
                    mixed_use_keys=list(mu_h_metrics) + list(mu_o_metrics),
                    accessibility_keys=ac_metrics,
                    cl_disparity_wt_matrix=mock_disparity_wt_matrix,
                    qs=qs)

                # test against underlying method
                mu_data_hill, mu_data_other, ac_data, ac_data_wt, stats_sum, stats_sum_wt, \
                stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
                    data.local_aggregator(node_map,
                                          edge_map,
                                          node_edge_map,
                                          data_map,
                                          distances,
                                          betas,
                                          landuse_encodings,
                                          qs=qs,
                                          mixed_use_hill_keys=mu_h_keys,
                                          mixed_use_other_keys=mu_o_keys,
                                          accessibility_keys=ac_keys,
                                          cl_disparity_wt_matrix=mock_disparity_wt_matrix)

                for mu_h_idx, mu_h_met in enumerate(mu_h_metrics):
                    for q_idx, q_key in enumerate(qs):
                        for d_idx, d_key in enumerate(distances):
                            assert np.allclose(
                                N_temp.metrics['mixed_uses'][mu_h_met][q_key]
                                [d_key],
                                mu_data_hill[mu_h_idx][q_idx][d_idx],
                                atol=0.001,
                                rtol=0)

                for mu_o_idx, mu_o_met in enumerate(mu_o_metrics):
                    for d_idx, d_key in enumerate(distances):
                        assert np.allclose(
                            N_temp.metrics['mixed_uses'][mu_o_met][d_key],
                            mu_data_other[mu_o_idx][d_idx],
                            atol=0.001,
                            rtol=0)

                for ac_idx, ac_met in enumerate(ac_metrics):
                    for d_idx, d_key in enumerate(distances):
                        assert np.allclose(N_temp.metrics['accessibility']
                                           ['non_weighted'][ac_met][d_key],
                                           ac_data[ac_idx][d_idx],
                                           atol=0.001,
                                           rtol=0)
                        assert np.allclose(N_temp.metrics['accessibility']
                                           ['weighted'][ac_met][d_key],
                                           ac_data_wt[ac_idx][d_idx],
                                           atol=0.001,
                                           rtol=0)

    # most integrity checks happen in underlying method, though check here for mismatching labels length and typos
    with pytest.raises(ValueError):
        D.compute_aggregated(landuse_labels[-1], mixed_use_keys=['shannon'])
    with pytest.raises(ValueError):
        D.compute_aggregated(landuse_labels, mixed_use_keys=['spelling_typo'])
    # don't check accessibility_labels for typos - because only warning is triggered (not all labels will be in all data)
    # check that unassigned data layer flags
    with pytest.raises(ValueError):
        D_new = layers.Data_Layer_From_Dict(data_dict)
        D_new.compute_aggregated(landuse_labels, mixed_use_keys=['shannon'])
Exemple #16
0
def test_aggregate_landuses_signatures(primal_graph):
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    # setup data
    data_dict = mock.mock_data_dict(primal_graph, random_seed=13)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    data_map = data.assign_to_network(data_map, node_data, edge_data, node_edge_map, 500)
    # set parameters
    betas = np.array([0.02, 0.01, 0.005, 0.0025])
    distances = networks.distance_from_beta(betas)
    qs = np.array([0, 1, 2])
    mock_categorical = mock.mock_categorical_data(len(data_map))
    landuse_classes, landuse_encodings = layers.encode_categorical(mock_categorical)
    # check that empty land_use encodings are caught
    with pytest.raises(ValueError):
        data.aggregate_landuses(node_data,
                                edge_data,
                                node_edge_map,
                                data_map,
                                distances,
                                betas,
                                mixed_use_hill_keys=np.array([0]))
    # check that unequal land_use encodings vs data map lengths are caught
    with pytest.raises(ValueError):
        data.aggregate_landuses(node_data,
                                edge_data,
                                node_edge_map,
                                data_map,
                                distances,
                                betas,
                                landuse_encodings=landuse_encodings[:-1],
                                mixed_use_other_keys=np.array([0]))
    # check that no provided metrics flags
    with pytest.raises(ValueError):
        data.aggregate_landuses(node_data,
                                edge_data,
                                node_edge_map,
                                data_map,
                                distances,
                                betas,
                                landuse_encodings=landuse_encodings)
    # check that missing qs flags
    with pytest.raises(ValueError):
        data.aggregate_landuses(node_data,
                                edge_data,
                                node_edge_map,
                                data_map,
                                distances,
                                betas,
                                mixed_use_hill_keys=np.array([0]),
                                landuse_encodings=landuse_encodings)
    # check that problematic mixed use and accessibility keys are caught
    for mu_h_key, mu_o_key, ac_key in [
        # negatives
        ([-1], [1], [1]),
        ([1], [-1], [1]),
        ([1], [1], [-1]),
        # out of range
        ([4], [1], [1]),
        ([1], [3], [1]),
        ([1], [1], [max(landuse_encodings) + 1]),
        # duplicates
        ([1, 1], [1], [1]),
        ([1], [1, 1], [1]),
        ([1], [1], [1, 1])]:
        with pytest.raises(ValueError):
            data.aggregate_landuses(node_data,
                                    edge_data,
                                    node_edge_map,
                                    data_map,
                                    distances,
                                    betas,
                                    landuse_encodings,
                                    qs=qs,
                                    mixed_use_hill_keys=np.array(mu_h_key),
                                    mixed_use_other_keys=np.array(mu_o_key),
                                    accessibility_keys=np.array(ac_key))
    for h_key, o_key in (([3], []), ([], [2])):
        # check that missing matrix is caught for disparity weighted indices
        with pytest.raises(ValueError):
            data.aggregate_landuses(node_data,
                                    edge_data,
                                    node_edge_map,
                                    data_map,
                                    distances,
                                    betas,
                                    landuse_encodings=landuse_encodings,
                                    qs=qs,
                                    mixed_use_hill_keys=np.array(h_key),
                                    mixed_use_other_keys=np.array(o_key))
        # check that non-square disparity matrix is caught
        mock_matrix = np.full((len(landuse_classes), len(landuse_classes)), 1)
        with pytest.raises(ValueError):
            data.aggregate_landuses(node_data,
                                    edge_data,
                                    node_edge_map,
                                    data_map,
                                    distances,
                                    betas,
                                    landuse_encodings=landuse_encodings,
                                    qs=qs,
                                    mixed_use_hill_keys=np.array(h_key),
                                    mixed_use_other_keys=np.array(o_key),
                                    cl_disparity_wt_matrix=mock_matrix[:-1])
Exemple #17
0
def test_compute_centrality():
    '''
    Underlying method also tested via test_networks.test_network_centralities
    '''
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    betas = np.array([-0.01, -0.005])
    distances = networks.distance_from_beta(betas)
    # generate data structures
    N = networks.Network_Layer_From_nX(G, distances)
    node_data = N._node_data
    edge_data = N._edge_data
    node_edge_map = N._node_edge_map
    # check measures against underlying method
    N = networks.Network_Layer_From_nX(G, distances)
    N.compute_centrality(measures=['node_density'])
    # test against underlying method
    measures_data = centrality.local_centrality(
        node_data,
        edge_data,
        node_edge_map,
        distances,
        betas,
        measure_keys=('node_density', ))
    for d_idx, d_key in enumerate(distances):
        assert np.allclose(N.metrics['centrality']['node_density'][d_key],
                           measures_data[0][d_idx])
    # also check the number of returned types for a few assortments of metrics
    measures = [
        'node_density', 'node_farness', 'node_cycles', 'node_harmonic',
        'segment_density', 'node_betweenness', 'segment_betweenness'
    ]
    np.random.shuffle(measures)  # in place
    # not necessary to do all labels, first few should do
    for min_idx in range(3):
        measure_keys = np.array(measures[min_idx:])
        N = networks.Network_Layer_From_nX(G, distances)
        N.compute_centrality(measures=measures)
        # test against underlying method
        measures_data = centrality.local_centrality(
            node_data,
            edge_data,
            node_edge_map,
            distances,
            betas,
            measure_keys=tuple(measure_keys))
        for m_idx, measure_name in enumerate(measure_keys):
            for d_idx, d_key in enumerate(distances):
                assert np.allclose(
                    N.metrics['centrality'][measure_name][d_key],
                    measures_data[m_idx][d_idx],
                    atol=0.001,
                    rtol=0)
    # check that angular gets passed through
    N_ang = networks.Network_Layer_From_nX(G, distances=[2000])
    N_ang.compute_centrality(measures=['node_harmonic_angular'], angular=True)
    N = networks.Network_Layer_From_nX(G, distances=[2000])
    N.compute_centrality(measures=['node_harmonic'], angular=False)
    assert not np.allclose(
        N_ang.metrics['centrality']['node_harmonic_angular'][2000],
        N.metrics['centrality']['node_harmonic'][2000],
        atol=0.001,
        rtol=0)
    assert not np.allclose(
        N_ang.metrics['centrality']['node_harmonic_angular'][2000],
        N.metrics['centrality']['node_harmonic'][2000],
        atol=0.001,
        rtol=0)
    # check that typos, duplicates, and mixed angular / non-angular are caught
    with pytest.raises(ValueError):
        N.compute_centrality(measures=['spelling_typo'])
    with pytest.raises(ValueError):
        N.compute_centrality(measures=['node_density', 'node_density'])
    with pytest.raises(ValueError):
        N.compute_centrality(
            measures=['harmonic_angle', 'node_harmonic_angular'])
Exemple #18
0
def test_compute_aggregated_B():
    '''
    Test stats component
    '''
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    betas = np.array([-0.01, -0.005])
    distances = networks.distance_from_beta(betas)
    # network layer
    N = networks.Network_Layer_From_nX(G, distances)
    node_map = N._node_data
    edge_map = N._edge_data
    node_edge_map = N._node_edge_map
    # data layer
    data_dict = mock.mock_data_dict(G)
    qs = np.array([0, 1, 2])
    D = layers.Data_Layer_From_Dict(data_dict)
    # check single metrics independently against underlying for some use-cases, e.g. hill, non-hill, accessibility...
    D.assign_to_network(N, max_dist=500)

    # generate some mock landuse data
    mock_numeric = mock.mock_numerical_data(len(data_dict), num_arrs=2)

    # generate stats
    D.compute_aggregated(stats_keys=['boo', 'baa'],
                         stats_data_arrs=mock_numeric)

    # test against underlying method
    data_map = D._data
    mu_data_hill, mu_data_other, ac_data, ac_data_wt, \
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.local_aggregator(node_map,
                              edge_map,
                              node_edge_map,
                              data_map,
                              distances,
                              betas,
                              numerical_arrays=mock_numeric)

    stats_keys = [
        'max', 'min', 'sum', 'sum_weighted', 'mean', 'mean_weighted',
        'variance', 'variance_weighted'
    ]
    stats_data = [
        stats_max, stats_min, stats_sum, stats_sum_wt, stats_mean,
        stats_mean_wt, stats_variance, stats_variance_wt
    ]

    for num_idx, num_label in enumerate(['boo', 'baa']):
        for s_key, stats in zip(stats_keys, stats_data):
            for d_idx, d_key in enumerate(distances):
                assert np.allclose(N.metrics['stats'][num_label][s_key][d_key],
                                   stats[num_idx][d_idx],
                                   atol=0.001,
                                   rtol=0)

    # check that mismatching label and array lengths are caught
    for labels, arrs in (
        (['a'], mock_numeric),  # mismatching lengths
        (['a', 'b'], None),  # missing arrays
        (None, mock_numeric)):  # missing labels
        with pytest.raises(ValueError):
            D.compute_aggregated(stats_keys=labels, stats_data_arrs=arrs)
def test_decomposed_local_centrality(primal_graph):
    # centralities on the original nodes within the decomposed network should equal non-decomposed workflow
    betas = np.array([0.02, 0.01, 0.005, 0.0008, 0.0])
    distances = networks.distance_from_beta(betas)
    node_measure_keys = ('node_density', 'node_farness', 'node_cycles',
                         'node_harmonic', 'node_beta', 'node_betweenness',
                         'node_betweenness_beta')
    segment_measure_keys = ('segment_density', 'segment_harmonic',
                            'segment_beta', 'segment_betweenness')
    # test a decomposed graph
    G_decomposed = graphs.nX_decompose(primal_graph, 20)
    # graph maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(
        primal_graph)  # generate node and edge maps
    node_uids_decomp, node_data_decomp, edge_data_decomp, node_edge_map_decomp = graphs.graph_maps_from_nX(
        G_decomposed)
    # non-decomposed case
    node_measures_data = centrality.local_node_centrality(node_data,
                                                          edge_data,
                                                          node_edge_map,
                                                          distances,
                                                          betas,
                                                          node_measure_keys,
                                                          angular=False)
    # decomposed case
    node_measures_data_decomposed = centrality.local_node_centrality(
        node_data_decomp,
        edge_data_decomp,
        node_edge_map_decomp,
        distances,
        betas,
        node_measure_keys,
        angular=False)
    # node
    d_range = len(distances)
    m_range = len(node_measure_keys)
    assert node_measures_data.shape == (m_range, d_range, len(primal_graph))
    assert node_measures_data_decomposed.shape == (m_range, d_range,
                                                   len(G_decomposed))
    # with increasing decomposition:
    # - node based measures will not match
    # - closeness segment measures will match - these measure to the cut endpoints per thresholds
    # - betweenness segment measures won't match - don't measure to cut endpoints
    # segment versions
    segment_measures_data = centrality.local_segment_centrality(
        node_data,
        edge_data,
        node_edge_map,
        distances,
        betas,
        segment_measure_keys,
        angular=False)
    segment_measures_data_decomposed = centrality.local_segment_centrality(
        node_data_decomp,
        edge_data_decomp,
        node_edge_map_decomp,
        distances,
        betas,
        segment_measure_keys,
        angular=False)
    m_range = len(segment_measure_keys)
    assert segment_measures_data.shape == (m_range, d_range, len(primal_graph))
    assert segment_measures_data_decomposed.shape == (m_range, d_range,
                                                      len(G_decomposed))
    for m_idx in range(m_range):
        for d_idx in range(d_range):
            match = np.allclose(
                segment_measures_data[m_idx][d_idx],
                # compare against the original 56 elements (prior to adding decomposed)
                segment_measures_data_decomposed[m_idx][d_idx][:57],
                atol=0.1,
                rtol=0)  # relax precision
            if m_range in (0, 1, 2):
                assert match
def test_local_node_centrality(primal_graph):
    """
    Also tested indirectly via test_networks.test_compute_centrality

    Test centrality methods where possible against NetworkX - i.e. harmonic closeness and betweenness
    Note that NetworkX improved closeness is not the same as derivation used in this package
    NetworkX doesn't have a maximum distance cutoff, so run on the whole graph (low beta / high distance)
    """
    # generate node and edge maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(
        primal_graph)
    G_round_trip = graphs.nX_from_graph_maps(node_uids, node_data, edge_data,
                                             node_edge_map)
    # needs a large enough beta so that distance thresholds aren't encountered
    betas = np.array([0.02, 0.01, 0.005, 0.0008, 0.0])
    distances = networks.distance_from_beta(betas)
    # set the keys - add shuffling to be sure various orders work
    measure_keys = [
        'node_density', 'node_farness', 'node_cycles', 'node_harmonic',
        'node_beta', 'node_betweenness', 'node_betweenness_beta'
    ]
    np.random.shuffle(measure_keys)  # in place
    measure_keys = tuple(measure_keys)
    # generate the measures
    measures_data = centrality.local_node_centrality(node_data, edge_data,
                                                     node_edge_map, distances,
                                                     betas, measure_keys)
    node_density = measures_data[measure_keys.index('node_density')]
    node_farness = measures_data[measure_keys.index('node_farness')]
    node_cycles = measures_data[measure_keys.index('node_cycles')]
    node_harmonic = measures_data[measure_keys.index('node_harmonic')]
    node_beta = measures_data[measure_keys.index('node_beta')]
    node_betweenness = measures_data[measure_keys.index('node_betweenness')]
    node_betweenness_beta = measures_data[measure_keys.index(
        'node_betweenness_beta')]
    # improved closeness is derived after the fact
    improved_closness = node_density / node_farness / node_density

    # test node density
    # node density count doesn't include self-node
    # connected component == 49 == len(G) - 1
    # isolated looping component == 3
    # isolated edge == 1
    # isolated node == 0
    for n in node_density[4]:  # infinite distance - exceeds cutoff clashes
        assert n in [49, 3, 1, 0]

    # test harmonic closeness vs NetworkX
    nx_harm_cl = nx.harmonic_centrality(G_round_trip, distance='length')
    nx_harm_cl = np.array([v for v in nx_harm_cl.values()])
    assert np.allclose(nx_harm_cl, node_harmonic[4], atol=0.001, rtol=0)

    # test betweenness vs NetworkX
    # set endpoint counting to false and do not normalise
    # nx node centrality NOT implemented for MultiGraph
    G_non_multi = nx.Graph()  # don't change to MultiGraph!!!
    G_non_multi.add_nodes_from(G_round_trip.nodes())
    for s, e, k, d in G_round_trip.edges(keys=True, data=True):
        assert k == 0
        G_non_multi.add_edge(s, e, **d)
    nx_betw = nx.betweenness_centrality(G_non_multi,
                                        weight='length',
                                        endpoints=False,
                                        normalized=False)
    nx_betw = np.array([v for v in nx_betw.values()])
    # nx betweenness gives 0.5 instead of 1 for all disconnected looping component nodes
    # nx presumably takes equidistant routes into account, in which case only the fraction is aggregated
    assert np.allclose(nx_betw[:52],
                       node_betweenness[4][:52],
                       atol=0.001,
                       rtol=0)

    # do the comparisons array-wise so that betweenness can be aggregated
    d_n = len(distances)
    betw = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    betw_wt = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    dens = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    far_short_dist = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    far_simpl_dist = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    harmonic_cl = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    grav = np.full((d_n, primal_graph.number_of_nodes()), 0.0)
    cyc = np.full((d_n, primal_graph.number_of_nodes()), 0.0)

    for src_idx in range(len(primal_graph)):
        # get shortest path maps
        tree_map, tree_edges = centrality.shortest_path_tree(edge_data,
                                                             node_edge_map,
                                                             src_idx,
                                                             max(distances),
                                                             angular=False)
        tree_nodes = np.where(tree_map[:, 0])[0]
        tree_preds = tree_map[:, 1]
        tree_short_dist = tree_map[:, 2]
        tree_simpl_dist = tree_map[:, 3]
        tree_cycles = tree_map[:, 4]
        for to_idx in tree_nodes:
            # skip self nodes
            if to_idx == src_idx:
                continue
            # get shortest / simplest distances
            to_short_dist = tree_short_dist[to_idx]
            to_simpl_dist = tree_simpl_dist[to_idx]
            cycles = tree_cycles[to_idx]
            # continue if exceeds max
            if np.isinf(to_short_dist):
                continue
            for d_idx in range(len(distances)):
                dist_cutoff = distances[d_idx]
                beta = betas[d_idx]
                if to_short_dist <= dist_cutoff:
                    # don't exceed threshold
                    # if to_dist <= dist_cutoff:
                    # aggregate values
                    dens[d_idx][src_idx] += 1
                    far_short_dist[d_idx][src_idx] += to_short_dist
                    far_simpl_dist[d_idx][src_idx] += to_simpl_dist
                    harmonic_cl[d_idx][src_idx] += 1 / to_short_dist
                    grav[d_idx][src_idx] += np.exp(-beta * to_short_dist)
                    # cycles
                    cyc[d_idx][src_idx] += cycles
                    # only process betweenness in one direction
                    if to_idx < src_idx:
                        continue
                    # betweenness - only counting truly between vertices, not starting and ending verts
                    inter_idx = tree_preds[to_idx]
                    # isolated nodes will have no predecessors
                    if np.isnan(inter_idx):
                        continue
                    inter_idx = np.int(inter_idx)
                    while True:
                        # break out of while loop if the intermediary has reached the source node
                        if inter_idx == src_idx:
                            break
                        betw[d_idx][inter_idx] += 1
                        betw_wt[d_idx][inter_idx] += np.exp(-beta *
                                                            to_short_dist)
                        # follow
                        inter_idx = np.int(tree_preds[inter_idx])
    improved_cl = dens / far_short_dist / dens

    assert np.allclose(node_density, dens, atol=0.001, rtol=0)
    assert np.allclose(node_farness, far_short_dist, atol=0.01,
                       rtol=0)  # relax precision
    assert np.allclose(node_cycles, cyc, atol=0.001, rtol=0)
    assert np.allclose(node_harmonic, harmonic_cl, atol=0.001, rtol=0)
    assert np.allclose(node_beta, grav, atol=0.001, rtol=0)
    assert np.allclose(improved_closness,
                       improved_cl,
                       equal_nan=True,
                       atol=0.001,
                       rtol=0)
    assert np.allclose(node_betweenness, betw, atol=0.001, rtol=0)
    assert np.allclose(node_betweenness_beta, betw_wt, atol=0.001, rtol=0)

    # catch typos
    with pytest.raises(ValueError):
        centrality.local_node_centrality(node_data, edge_data, node_edge_map,
                                         distances, betas, ('typo_key', ))
Exemple #21
0
def network_generator():
    for betas in [[-0.008], [-0.008, -0.002]]:
        distances = networks.distance_from_beta(betas)
        G = mock.mock_graph()
        G = graphs.nX_simple_geoms(G)
        yield G, distances, betas