def test_decomposed_local_centrality(): # centralities on the original nodes within the decomposed network should equal non-decomposed workflow betas = np.array([-0.02, -0.01, -0.005, -0.0008, -0.0]) distances = networks.distance_from_beta(betas) measure_keys = ('node_density', 'node_farness', 'node_cycles', 'node_harmonic', 'node_beta', 'segment_density', 'segment_harmonic', 'segment_beta', 'node_betweenness', 'node_betweenness_beta', 'segment_betweenness') # test a decomposed graph G = mock.mock_graph() G = graphs.nX_simple_geoms(G) node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G) # generate node and edge maps measures_data = centrality.local_centrality(node_data, edge_data, node_edge_map, distances, betas, measure_keys, angular=False) G_decomposed = graphs.nX_decompose(G, 20) # generate node and edge maps node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G_decomposed) checks.check_network_maps(node_data, edge_data, node_edge_map) measures_data_decomposed = centrality.local_centrality(node_data, edge_data, node_edge_map, distances, betas, measure_keys, angular=False) # test harmonic closeness on original nodes for non-decomposed vs decomposed d_range = len(distances) m_range = len(measure_keys) assert measures_data.shape == (m_range, d_range, len(G)) assert measures_data_decomposed.shape == (m_range, d_range, len(G_decomposed)) original_node_idx = np.where(node_data[:, 3] == 0) # with increasing decomposition: # - node based measures will not match # - node based segment measures will match - these measure to the cut endpoints per thresholds # - betweenness based segment won't match - doesn't measure to cut endpoints for m_idx in range(m_range): print(m_idx) for d_idx in range(d_range): match = np.allclose(measures_data[m_idx][d_idx], measures_data_decomposed[m_idx][d_idx][original_node_idx], atol=0.1, rtol=0) # relax precision if not match: print('key', measure_keys[m_idx], 'dist:', distances[d_idx], 'match:', match) if m_idx in [5, 6, 7]: assert match
def compute_centrality(self, measures: Union[list, tuple] = None, angular: bool = False): # see centrality.local_centrality for integrity checks on closeness and betweenness keys # typos are caught below if not angular: heuristic = 'shortest (non-angular)' options = ('node_density', 'node_farness', 'node_cycles', 'node_harmonic', 'node_beta', 'segment_density', 'segment_harmonic', 'segment_beta', 'node_betweenness', 'node_betweenness_beta', 'segment_betweenness') else: heuristic = 'simplest (angular)' options = ('node_harmonic_angular', 'segment_harmonic_hybrid', 'node_betweenness_angular', 'segment_betweeness_hybrid') if measures is None: raise ValueError(f'Please select at least one measure to compute.') measure_keys = [] for measure in measures: if measure not in options: raise ValueError( f'Invalid network measure: {measure}. ' f'Must be one of {", ".join(options)} when using {heuristic} path heuristic.' ) if measure in measure_keys: raise ValueError( f'Please remove duplicate measure: {measure}.') measure_keys.append(measure) measure_keys = tuple(measure_keys) if not checks.quiet_mode: logger.info( f'Computing {", ".join(measure_keys)} centrality measures using {heuristic} path heuristic.' ) measures_data = centrality.local_centrality( self._node_data, self._edge_data, self._node_edge_map, np.array(self._distances), np.array(self._betas), measure_keys, angular, suppress_progress=checks.quiet_mode) # write the results # writing metrics to dictionary will check for pre-existing # but writing sub-distances arrays will overwrite prior for measure_idx, measure_name in enumerate(measure_keys): if measure_name not in self.metrics['centrality']: self.metrics['centrality'][measure_name] = {} for d_idx, d_key in enumerate(self._distances): self.metrics['centrality'][measure_name][ d_key] = measures_data[measure_idx][d_idx]
def wrapper_func(): ''' node density invokes aggregative workflow betweenness node invokes betweenness workflow segment density invokes segments workflow ''' return centrality.local_centrality(node_data, edge_data, node_edge_map, distances, betas, ('node_density', # 7.16s 'node_betweenness', # 8.08s - adds around 1s 'segment_density', # 11.2s - adds around 3s 'segment_betweenness' ), angular=False, suppress_progress=True)
def test_compute_centrality(): ''' Underlying method also tested via test_networks.test_network_centralities ''' G = mock.mock_graph() G = graphs.nX_simple_geoms(G) betas = np.array([-0.01, -0.005]) distances = networks.distance_from_beta(betas) # generate data structures N = networks.Network_Layer_From_nX(G, distances) node_data = N._node_data edge_data = N._edge_data node_edge_map = N._node_edge_map # check measures against underlying method N = networks.Network_Layer_From_nX(G, distances) N.compute_centrality(measures=['node_density']) # test against underlying method measures_data = centrality.local_centrality( node_data, edge_data, node_edge_map, distances, betas, measure_keys=('node_density', )) for d_idx, d_key in enumerate(distances): assert np.allclose(N.metrics['centrality']['node_density'][d_key], measures_data[0][d_idx]) # also check the number of returned types for a few assortments of metrics measures = [ 'node_density', 'node_farness', 'node_cycles', 'node_harmonic', 'segment_density', 'node_betweenness', 'segment_betweenness' ] np.random.shuffle(measures) # in place # not necessary to do all labels, first few should do for min_idx in range(3): measure_keys = np.array(measures[min_idx:]) N = networks.Network_Layer_From_nX(G, distances) N.compute_centrality(measures=measures) # test against underlying method measures_data = centrality.local_centrality( node_data, edge_data, node_edge_map, distances, betas, measure_keys=tuple(measure_keys)) for m_idx, measure_name in enumerate(measure_keys): for d_idx, d_key in enumerate(distances): assert np.allclose( N.metrics['centrality'][measure_name][d_key], measures_data[m_idx][d_idx], atol=0.001, rtol=0) # check that angular gets passed through N_ang = networks.Network_Layer_From_nX(G, distances=[2000]) N_ang.compute_centrality(measures=['node_harmonic_angular'], angular=True) N = networks.Network_Layer_From_nX(G, distances=[2000]) N.compute_centrality(measures=['node_harmonic'], angular=False) assert not np.allclose( N_ang.metrics['centrality']['node_harmonic_angular'][2000], N.metrics['centrality']['node_harmonic'][2000], atol=0.001, rtol=0) assert not np.allclose( N_ang.metrics['centrality']['node_harmonic_angular'][2000], N.metrics['centrality']['node_harmonic'][2000], atol=0.001, rtol=0) # check that typos, duplicates, and mixed angular / non-angular are caught with pytest.raises(ValueError): N.compute_centrality(measures=['spelling_typo']) with pytest.raises(ValueError): N.compute_centrality(measures=['node_density', 'node_density']) with pytest.raises(ValueError): N.compute_centrality( measures=['harmonic_angle', 'node_harmonic_angular'])
def test_local_centrality(): ''' Also tested indirectly via test_networks.test_compute_centrality Test centrality methods where possible against NetworkX - i.e. harmonic closeness and betweenness Note that NetworkX improved closeness is not the same as derivation used in this package NetworkX doesn't have a maximum distance cutoff, so run on the whole graph (low beta / high distance) ''' # load the test graph G = mock.mock_graph() G = graphs.nX_simple_geoms(G) node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G) # generate node and edge maps G_round_trip = graphs.nX_from_graph_maps(node_uids, node_data, edge_data, node_edge_map) # plots for debugging # needs a large enough beta so that distance thresholds aren't encountered betas = np.array([-0.02, -0.01, -0.005, -0.0008, -0.0]) distances = networks.distance_from_beta(betas) # set the keys - add shuffling to be sure various orders work measure_keys = [ 'node_density', 'node_farness', 'node_cycles', 'node_harmonic', 'node_beta', 'segment_density', 'segment_harmonic', 'segment_beta', 'node_betweenness', 'node_betweenness_beta', 'segment_betweenness' ] np.random.shuffle(measure_keys) # in place measure_keys = tuple(measure_keys) # generate the measures measures_data = centrality.local_centrality(node_data, edge_data, node_edge_map, distances, betas, measure_keys, angular=False) node_density = measures_data[measure_keys.index('node_density')] node_farness = measures_data[measure_keys.index('node_farness')] node_cycles = measures_data[measure_keys.index('node_cycles')] node_harmonic = measures_data[measure_keys.index('node_harmonic')] node_beta = measures_data[measure_keys.index('node_beta')] segment_density = measures_data[measure_keys.index('segment_density')] segment_harmonic = measures_data[measure_keys.index('segment_harmonic')] segment_beta = measures_data[measure_keys.index('segment_beta')] node_betweenness = measures_data[measure_keys.index('node_betweenness')] node_betweenness_beta = measures_data[measure_keys.index('node_betweenness_beta')] segment_betweenness = measures_data[measure_keys.index('segment_betweenness')] # post compute improved improved_closness = node_density / node_farness / node_density # angular keys measure_keys_angular = [ 'node_harmonic_angular', 'segment_harmonic_hybrid', 'node_betweenness_angular', 'segment_betweeness_hybrid' ] np.random.shuffle(measure_keys_angular) # in place measure_keys_angular = tuple(measure_keys_angular) # generate the angular measures measures_data_angular = centrality.local_centrality(node_data, edge_data, node_edge_map, distances, betas, measure_keys_angular, angular=True) node_harmonic_angular = measures_data_angular[measure_keys_angular.index('node_harmonic_angular')] segment_harmonic_hybrid = measures_data_angular[measure_keys_angular.index('segment_harmonic_hybrid')] node_betweenness_angular = measures_data_angular[measure_keys_angular.index('node_betweenness_angular')] segment_betweeness_hybrid = measures_data_angular[measure_keys_angular.index('segment_betweeness_hybrid')] # test node density # node density count doesn't include self-node # connected component == 48 == len(G) - 4 # isolated looping component == 3 # isolated edge == 1 # isolated node == 0 for n in node_density[4]: # infinite distance - exceeds cutoff clashes assert n in [48, 3, 1, 0] # test harmonic closeness vs NetworkX nx_harm_cl = nx.harmonic_centrality(G_round_trip, distance='length') nx_harm_cl = np.array([v for v in nx_harm_cl.values()]) assert np.allclose(nx_harm_cl, node_harmonic[4], atol=0.001, rtol=0) # test betweenness vs NetworkX # set endpoint counting to false and do not normalise nx_betw = nx.betweenness_centrality(G_round_trip, weight='length', endpoints=False, normalized=False) nx_betw = np.array([v for v in nx_betw.values()]) # for some reason nx betweenness gives 0.5 instead of 1 for disconnected looping component (should be 1) # maybe two equidistant routes being divided through 2 # nx betweenness gives 0.5 instead of 1 for all disconnected looping component nodes # nx presumably takes equidistant routes into account, in which case only the fraction is aggregated assert np.allclose(nx_betw[:52], node_betweenness[4][:52], atol=0.001, rtol=0) # test against various distances for d_idx in range(len(distances)): dist_cutoff = distances[d_idx] beta = betas[d_idx] # do the comparisons array-wise so that betweenness can be aggregated betw = np.full(G.number_of_nodes(), 0.0) betw_wt = np.full(G.number_of_nodes(), 0.0) dens = np.full(G.number_of_nodes(), 0.0) far_imp = np.full(G.number_of_nodes(), 0.0) far_dist = np.full(G.number_of_nodes(), 0.0) harmonic_cl = np.full(G.number_of_nodes(), 0.0) grav = np.full(G.number_of_nodes(), 0.0) cyc = np.full(G.number_of_nodes(), 0.0) for src_idx in range(len(G)): # get shortest path maps tree_map, tree_edges = centrality.shortest_path_tree(edge_data, node_edge_map, src_idx, dist_cutoff, angular=False) tree_preds = tree_map[:, 1] tree_dists = tree_map[:, 2] tree_imps = tree_map[:, 3] tree_cycles = tree_map[:, 4] for n_idx in G.nodes(): # skip self nodes if n_idx == src_idx: continue # get distance and impedance dist = tree_dists[n_idx] imp = tree_imps[n_idx] # continue if exceeds max if np.isinf(dist) or dist > dist_cutoff: continue # aggregate values dens[src_idx] += 1 far_imp[src_idx] += imp far_dist[src_idx] += dist harmonic_cl[src_idx] += 1 / imp grav[src_idx] += np.exp(beta * dist) # cycles if tree_cycles[n_idx]: cyc[src_idx] += 1 # BETWEENNESS # only process betweenness in one direction if n_idx < src_idx: continue # betweenness - only counting truly between vertices, not starting and ending verts inter_idx = tree_preds[n_idx] # isolated nodes will have no predecessors if np.isnan(inter_idx): continue inter_idx = np.int(inter_idx) while True: # break out of while loop if the intermediary has reached the source node if inter_idx == src_idx: break betw[inter_idx] += 1 betw_wt[inter_idx] += np.exp(beta * dist) # follow inter_idx = np.int(tree_preds[inter_idx]) improved_cl = dens / far_dist / dens assert np.allclose(node_density[d_idx], dens, atol=0.001, rtol=0) assert np.allclose(node_farness[d_idx], far_dist, atol=0.01, rtol=0) # relax precision assert np.allclose(node_cycles[d_idx], cyc, atol=0.001, rtol=0) assert np.allclose(node_harmonic[d_idx], harmonic_cl, atol=0.001, rtol=0) assert np.allclose(node_beta[d_idx], grav, atol=0.001, rtol=0) assert np.allclose(improved_closness[d_idx], improved_cl, equal_nan=True, atol=0.001, rtol=0) assert np.allclose(node_betweenness[d_idx], betw, atol=0.001, rtol=0) assert np.allclose(node_betweenness_beta[d_idx], betw_wt, atol=0.001, rtol=0) # TODO: are there possibly ways to test segment_density, harmonic_segment, segment_beta, segment_betweenness # for infinite distance, the segment density should match the sum of reachable segments length_sum = 0 for s, e, d in G_round_trip.edges(data=True): length_sum += d['length'] reachable_length_sum = length_sum - \ (G_round_trip[50][51]['length'] + G_round_trip[52][53]['length'] + G_round_trip[53][54]['length'] + G_round_trip[54][55]['length'] + G_round_trip[52][55]['length']) assert np.allclose(segment_density[-1][:49], reachable_length_sum, atol=0.01, rtol=0) # relax precision # check that problematic keys are caught for angular, k in zip([False, True], ['node_harmonic', 'node_harmonic_angular']): # catch typos with pytest.raises(ValueError): centrality.local_centrality(node_data, edge_data, node_edge_map, distances, betas, ('typo_key',), angular=False) # catch duplicates with pytest.raises(ValueError): centrality.local_centrality(node_data, edge_data, node_edge_map, distances, betas, (k, k), angular=False) # catch mixed angular and non-angular keys with pytest.raises(ValueError): centrality.local_centrality(node_data, edge_data, node_edge_map, distances, betas, ('node_density', 'node_harmonic_angular'), angular=False)