def fit(self, X, y): ## build graph of the citation network ids1 = X[:, 0] ids2 = X[:, 1] vertices = list(set(ids1.astype(str)).union(ids2.astype(str))) edges = [ tuple([str(row[0]), str(row[1])]) for row, link in zip(X, y) if link == 1 ] self.graph = igraph.Graph() self.graph.add_vertices(vertices) self.graph.add_edges(edges) self.di_network_graph = nx.DiGraph() self.di_network_graph.add_nodes_from(vertices) self.di_network_graph.add_edges_from(edges) self.un_network_graph = nx.Graph() self.un_network_graph.add_nodes_from(vertices) self.un_network_graph.add_edges_from(edges) vs = zip(vertices, range(len(vertices))) self.hash_vs = {a: b for a, b in vs} #print 'calculating shortest paths...' #self.dmatrix = np.array(self.graph.shortest_paths()) #uncomment # WARNING: cutoff should not be set in our final submission, which is equivalent to set it to infinity print 'calculating betweenness centrality' self.di_b_centrality = self.graph.betweenness(directed=True) self.un_b_centrality = self.graph.betweenness(directed=False) print 'calculating local edge connectivity' H = build_auxiliary_edge_connectivity(self.di_network_graph) R = build_residual_network(H, 'capacity') self.di_connectivity = dict.fromkeys(self.di_network_graph, dict()) for u, v in itertools.combinations(self.di_network_graph, 2): k = local_edge_connectivity(self.di_network_graph, u, v, auxiliary=H, residual=R) self.di_connectivity[u][v] = k H = build_auxiliary_edge_connectivity(self.un_network_graph) R = build_residual_network(H, 'capacity') self.un_connectivity = dict.fromkeys(self.un_network_graph, dict()) for u, v in itertools.combinations(self.un_network_graph, 2): k = local_edge_connectivity(self.un_network_graph, u, v, auxiliary=H, residual=R) self.un_connectivity[u][v] = k
def test_directed_edge_connectivity(): G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges for flow_func in flow_funcs: errmsg = f"Assertion failed in function: {flow_func.__name__}" assert 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg assert 1 == local_edge_connectivity(G, 1, 4, flow_func=flow_func), errmsg assert 1 == nx.edge_connectivity(G, 1, 4, flow_func=flow_func), errmsg assert 2 == nx.edge_connectivity(D, flow_func=flow_func), errmsg assert 2 == local_edge_connectivity(D, 1, 4, flow_func=flow_func), errmsg assert 2 == nx.edge_connectivity(D, 1, 4, flow_func=flow_func), errmsg
def test_directed_edge_connectivity(): G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges for flow_func in flow_funcs: assert_equal(1, nx.edge_connectivity(G, flow_func=flow_func), msg=msg.format(flow_func.__name__)) assert_equal(1, local_edge_connectivity(G, 1, 4, flow_func=flow_func), msg=msg.format(flow_func.__name__)) assert_equal(1, nx.edge_connectivity(G, 1, 4, flow_func=flow_func), msg=msg.format(flow_func.__name__)) assert_equal(2, nx.edge_connectivity(D, flow_func=flow_func), msg=msg.format(flow_func.__name__)) assert_equal(2, local_edge_connectivity(D, 1, 4, flow_func=flow_func), msg=msg.format(flow_func.__name__)) assert_equal(2, nx.edge_connectivity(D, 1, 4, flow_func=flow_func), msg=msg.format(flow_func.__name__))
def test_directed_edge_connectivity(): G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges for flow_func in flow_funcs: assert_equal(1, nx.edge_connectivity(G, flow_func=flow_func), msg=msg.format(flow_func.__name__)) assert_equal(1, local_edge_connectivity(G, 1, 4, flow_func=flow_func), msg=msg.format(flow_func.__name__)) assert_equal(1, nx.edge_connectivity(G, 1, 4, flow_func=flow_func), msg=msg.format(flow_func.__name__)) assert_equal(2, nx.edge_connectivity(D, flow_func=flow_func), msg=msg.format(flow_func.__name__)) assert_equal(2, local_edge_connectivity(D, 1, 4, flow_func=flow_func), msg=msg.format(flow_func.__name__)) assert_equal(2, nx.edge_connectivity(D, 1, 4, flow_func=flow_func), msg=msg.format(flow_func.__name__))
def test_brandes_erlebach(): # Figure 1 chapter 7: Connectivity # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf G = nx.Graph() G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4), (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8), (7, 10), (8, 11), (9, 10), (9, 11), (10, 11)]) for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) assert_equal(3, local_edge_connectivity(G, 1, 11, **kwargs), msg=msg.format(flow_func.__name__)) assert_equal(3, nx.edge_connectivity(G, 1, 11, **kwargs), msg=msg.format(flow_func.__name__)) assert_equal(2, local_node_connectivity(G, 1, 11, **kwargs), msg=msg.format(flow_func.__name__)) assert_equal(2, nx.node_connectivity(G, 1, 11, **kwargs), msg=msg.format(flow_func.__name__)) assert_equal( 2, nx.edge_connectivity(G, **kwargs), # node 5 has degree 2 msg=msg.format(flow_func.__name__)) assert_equal(2, nx.node_connectivity(G, **kwargs), msg=msg.format(flow_func.__name__))
def connections(self): # References taken from NetwrokX documentation def check_connections(self): """ :return: a True/ False value depending on connections """ count = 0 DG = Navigation.read_from_csv_get_attribute('edges.csv', 'nodes.csv', handicap_mode_flag=bool) H = build_auxiliary_edge_connectivity(DG) # And the function for building the residual network from the # flow package # Note that the auxiliary digraph has an edge attribute named capacity R = build_residual_network(H, 'capacity') result = dict.fromkeys(DG, dict()) # Reuse the auxiliary digraph and the residual network by passing them # as parameters #print(local_edge_connectivity(DG, 'Indiana1', 'DisneyLandMonoRail')) for u, v in itertools.combinations(DG, 2): k = local_edge_connectivity(DG, u, v, auxiliary=H, residual=R) result[u][v] = k # print(u,v) # print(result[u][v]) if result[u][v] == 0: count = 1 #print(u,v) if count == 1: return False else: return True
def calc_path_redundancy(graph, node, distances): """Determines the path redundancy (number of node/edge disjoint paths) from one specific node to all other nodes""" # NOTE: we calculate the minimum number of node independent paths as an approximation (and not # the maximum) count_nodes = graph.number_of_nodes() path_redundancy = np.zeros(count_nodes - 1, dtype=[('distance', 'float'), ('count_node_disjoint_paths', 'uint'), ('count_edge_disjoint_paths', 'uint')]) iter_veh = 0 for node_iter_veh in graph.nodes(): if node_iter_veh == node: continue idx_cond = utils.square_to_condensed(node, node_iter_veh, count_nodes) path_redundancy[iter_veh]['distance'] = distances[idx_cond] path_redundancy[iter_veh][ 'count_node_disjoint_paths'] = nx_con_approx.local_node_connectivity( graph, source=node, target=node_iter_veh) path_redundancy[iter_veh][ 'count_edge_disjoint_paths'] = nx_con.local_edge_connectivity( graph, node, node_iter_veh) iter_veh += 1 return path_redundancy
def connected_all(st_lst, H): #print H.edges() for (u, v) in st_lst: if local_edge_connectivity(H, u, v) > 0: continue else: return False return True
def avg_edge_connectivity(G, I, s): total_connectivity = 0 for t in I: if t != s: total_connectivity += local_edge_connectivity(G, s, t) avg_connectivity = total_connectivity / (len(I) - 1) # TODO: normalize return avg_connectivity
def __select_removal_candidate(graph): """ :type graph: networkx.classes.graph.Graph :rtype: tuple """ for u, v in graph.edges_iter(): if local_edge_connectivity(graph, u, v) > 1: return u, v return None
def connected_all(st_lst, H): for edge in st_lst: u = edge[0] v = edge[1] if local_edge_connectivity(H, u, v)>0: continue else: return False return True
def test_brandes_erlebach(): # Figure 1 chapter 7: Connectivity # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf G = nx.Graph() G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4), (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8), (7, 10), (8, 11), (9, 10), (9, 11), (10, 11)]) for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) assert_equal(3, local_edge_connectivity(G, 1, 11, **kwargs), msg=msg.format(flow_func.__name__)) assert_equal(3, nx.edge_connectivity(G, 1, 11, **kwargs), msg=msg.format(flow_func.__name__)) assert_equal(2, local_node_connectivity(G, 1, 11, **kwargs), msg=msg.format(flow_func.__name__)) assert_equal(2, nx.node_connectivity(G, 1, 11, **kwargs), msg=msg.format(flow_func.__name__)) assert_equal(2, nx.edge_connectivity(G, **kwargs), # node 5 has degree 2 msg=msg.format(flow_func.__name__)) assert_equal(2, nx.node_connectivity(G, **kwargs), msg=msg.format(flow_func.__name__))
def test_brandes_erlebach(): # Figure 1 chapter 7: Connectivity # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf G = nx.Graph() G.add_edges_from([ (1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4), (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8), (7, 10), (8, 11), (9, 10), (9, 11), (10, 11), ]) for flow_func in flow_funcs: kwargs = dict(flow_func=flow_func) errmsg = f"Assertion failed in function: {flow_func.__name__}" assert 3 == local_edge_connectivity(G, 1, 11, **kwargs), errmsg assert 3 == nx.edge_connectivity(G, 1, 11, **kwargs), errmsg assert 2 == local_node_connectivity(G, 1, 11, **kwargs), errmsg assert 2 == nx.node_connectivity(G, 1, 11, **kwargs), errmsg assert 2 == nx.edge_connectivity(G, **kwargs), errmsg assert 2 == nx.node_connectivity(G, **kwargs), errmsg if flow_func is flow.preflow_push: assert 3 == nx.edge_connectivity(G, 1, 11, cutoff=2, **kwargs), errmsg else: assert 2 == nx.edge_connectivity(G, 1, 11, cutoff=2, **kwargs), errmsg
def state_features(self, G, K, T, profile): f = [] E_0 = self.profile_to_E0[profile] adjacency_0 = self.profile_to_adjacency0[profile] if params.use_in_out_matrix: out_degree = G.out_degree(self.I) for (i, j) in out_degree: f.extend( RP_utils.polynomialize( RP_utils.safe_div(j, E_0.out_degree(i)), params.num_polynomial)) in_degree = G.in_degree(self.I) for (i, j) in in_degree: f.extend( RP_utils.polynomialize( RP_utils.safe_div(j, E_0.in_degree(i)), params.num_polynomial)) if params.use_total_degree_matrix: for i in self.I: i_total = G.out_degree(i) + G.in_degree(i) i_e0_total = E_0.out_degree(i) + E_0.in_degree(i) f.extend( RP_utils.polynomialize( RP_utils.safe_div(i_total, i_e0_total), params.num_polynomial)) if params.use_in_out_binary_matrix: out_degree = G.out_degree(self.I) for (i, j) in out_degree: f.append(2 * int(j > 0) - 1) in_degree = G.in_degree(self.I) for (i, j) in in_degree: f.append(2 * int(j > 0) - 1) if params.use_voting_rules_matrix: for i in self.profile_to_plurality[profile]: f.extend(RP_utils.polynomialize(i, params.num_polynomial)) for i in self.profile_to_borda[profile]: f.extend(RP_utils.polynomialize(i, params.num_polynomial)) for i in self.profile_to_copeland[profile]: f.extend(RP_utils.polynomialize(i, params.num_polynomial)) for i in self.profile_to_maximin[profile]: f.extend(RP_utils.polynomialize(i, params.num_polynomial)) if params.use_edge_weight: f.extend( RP_utils.polynomialize( E_0[T[0][0]][T[0][1]]['weight'] / self.profile_to_max_edge_weight[profile], params.num_polynomial)) if params.use_vectorized_wmg: f.extend(self.profile_to_vectorized_wmg[profile]) if params.use_posmat: f.extend(self.profile_to_posmat[profile]) if params.use_tier_adjacency_matrix: T_matrix = np.zeros((int(params.m), int(params.m))) for (c1, c2) in T: T_matrix[c1, c2] = 1 T_vec = list(T_matrix.flatten()) f.extend(T_vec) if params.use_connectivity_matrix: for i in self.I: for j in self.I: if i != j: f.extend( RP_utils.polynomialize( local_edge_connectivity(G, i, j) / (params.m - 2), params.num_polynomial) ) # normalized by m-2 since max edges needed to disconnect i and j is all edges but i -> i and i -> j all_pairs_node_connectivity = nx.all_pairs_node_connectivity(G) for i in self.I: for j in self.I: if i != j: f.extend( RP_utils.polynomialize( all_pairs_node_connectivity[i][j] / (params.m - 2), params.num_polynomial) ) # normalized by m-2 since max nodes needed to disconnect i and j is all nodes but i and j # adjacency matrix if params.use_adjacency_matrix: adjacency = nx.adjacency_matrix(G, nodelist=self.I).todense() adjacency = np.multiply(adjacency, adjacency_0) adjacency_normalized = np.divide(adjacency, params.n) f.extend(adjacency_normalized.flatten().tolist()[0]) # K representation if params.use_K_representation: K_list = [] for i in self.I: if i in K: K_list.append(1) else: K_list.append(0) f.extend(K_list) return Variable(torch.from_numpy(np.array(f)).float())
def state_features(self): f = [] legal_actions = self.get_legal_actions() if params.use_in_out_matrix: out_degree = self.G.out_degree(self.I) for (i, j) in out_degree: f.extend( RP_utils.polynomialize(j / params.m, params.num_polynomial)) in_degree = self.G.in_degree(self.I) for (i, j) in in_degree: f.extend( RP_utils.polynomialize(j / params.m, params.num_polynomial)) if params.use_in_out_relative_matrix: out_degree = self.G.out_degree(self.I) for (i, j) in out_degree: f.extend( RP_utils.polynomialize( RP_utils.safe_div(j, self.E_0.out_degree(i)), params.num_polynomial)) in_degree = self.G.in_degree(self.I) for (i, j) in in_degree: f.extend( RP_utils.polynomialize( RP_utils.safe_div(j, self.E_0.in_degree(i)), params.num_polynomial)) if params.use_total_degree_matrix: for i in self.I: i_total = self.G.out_degree(i) + self.G.in_degree(i) i_e0_total = self.E_0.out_degree(i) + self.E_0.in_degree(i) f.extend( RP_utils.polynomialize( RP_utils.safe_div(i_total, i_e0_total), params.num_polynomial)) if params.use_in_out_binary_matrix: out_degree = self.G.out_degree(self.I) for (i, j) in out_degree: f.append(2 * int(j > 0) - 1) in_degree = self.G.in_degree(self.I) for (i, j) in in_degree: f.append(2 * int(j > 0) - 1) if params.use_voting_rules_matrix: for i in self.plurality_scores: f.extend(RP_utils.polynomialize(i, params.num_polynomial)) for i in self.borda_scores: f.extend(RP_utils.polynomialize(i, params.num_polynomial)) for i in self.copeland_scores: f.extend(RP_utils.polynomialize(i, params.num_polynomial)) for i in self.maximin_scores: f.extend(RP_utils.polynomialize(i, params.num_polynomial)) if params.use_edge_weight: f.extend( RP_utils.polynomialize( self.E_0_really[legal_actions[0][0]][legal_actions[0][1]] ['weight'] / self.max_edge_weight, params.num_polynomial)) if params.use_vectorized_wmg: f.extend(self.vectorized_wmg) if params.use_posmat: f.extend(self.posmat) if params.use_tier_adjacency_matrix: T_matrix = np.zeros((int(params.m), int(params.m))) for (c1, c2) in legal_actions: T_matrix[c1, c2] = 1 T_vec = list(T_matrix.flatten()) f.extend(T_vec) if params.use_connectivity_matrix: for i in self.I: for j in self.I: if i != j: f.extend( RP_utils.polynomialize( local_edge_connectivity(self.G, i, j) / (params.m - 2), params.num_polynomial) ) # normalized by m-2 since max edges needed to disconnect i and j is all edges but i -> i and i -> j all_pairs_node_connectivity = nx.all_pairs_node_connectivity( self.G) for i in self.I: for j in self.I: if i != j: f.extend( RP_utils.polynomialize( all_pairs_node_connectivity[i][j] / (params.m - 2), params.num_polynomial) ) # normalized by m-2 since max nodes needed to disconnect i and j is all nodes but i and j # adjacency matrix of current state if params.use_adjacency_matrix: adjacency = nx.adjacency_matrix(self.G, nodelist=self.I).todense() adjacency = np.multiply(adjacency, self.adjacency_0) adjacency_normalized = np.divide(adjacency, params.n) f.extend(adjacency_normalized.flatten().tolist()[0]) # K representation if params.use_K_representation: K_list = [] for i in self.I: if i in self.K: K_list.append(1) else: K_list.append(0) f.extend(K_list) # node2vec every time # G_with_weights = nx.DiGraph() # G_with_weights.add_nodes_from(self.I) # for (cand1, cand2) in self.G.edges(): # G_with_weights.add_edge(cand1, cand2, weight=self.E_0_really[cand1][cand2]['weight']) # node2vec_G = node2vec.Graph(G_with_weights, True, self.node2vec_args.p, self.node2vec_args.q) # node2vec_G.preprocess_transition_probs() # walks = node2vec_G.simulate_walks(self.node2vec_args.num_walks, self.node2vec_args.walk_length) # self.node2vec_model = node2vecmain.learn_embeddings(walks, self.node2vec_args) # node2vec features # node2vec_u = self.node2vec_model.wv[str(u)] # node2vec_v = self.node2vec_model.wv[str(v)] # node2vec_uv = np.append(node2vec_u, node2vec_v) # node2vec_f = np.append(node2vec_uv, np.array(f)) if params.debug_mode >= 3: print("features", f) return Variable(torch.from_numpy(np.array(f)).float())