def test_project_weighted_shared(self): edges = [('A', 'B', 2), ('A', 'C', 1), ('B', 'C', 1), ('B', 'D', 1), ('B', 'E', 2), ('E', 'F', 1)] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) P = bipartite.weighted_projected_graph(self.G, 'ABCDEF') assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): assert P[u][v]['weight'] == Panswer[u][v]['weight'] edges = [('A', 'B', 3), ('A', 'E', 1), ('A', 'C', 1), ('A', 'D', 1), ('B', 'E', 1), ('B', 'C', 1), ('B', 'D', 1), ('C', 'D', 1)] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) P = bipartite.weighted_projected_graph(self.N, 'ABCDE') assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): assert P[u][v]['weight'] == Panswer[u][v]['weight']
def weighted_projected_graph(self): # create a projection on one of the nodes E = bipartite.sets(self.B)[0] print('EEEEEEEEEEEEEEEEEEE') print(E) P = bipartite.weighted_projected_graph(self.B, E, ratio=True) # self.plot_graph(P,'weighted_projected') self.plot_graph_2(P, 'weighted_projected') print('weighted_projected:number of edges:', P.number_of_edges()) print(P.edges()) print(list(P.edges(data=True))) weights = [] for i in list(P.edges(data=True)): weights.append(i[2]['weight']) print(weights) P = bipartite.weighted_projected_graph(self.B, E, ratio=False) self.plot_graph_2(P, 'weighted_projected_not_ratio') print('RRRRRRRRRRRRRRRRRRRRRRRR') print(P.edges()) weights = [] for i in list(P.edges(data=True)): weights.append(i[2]['weight']) print(weights)
def test_project_weighted_shared(self): edges=[('A','B',2), ('A','C',1), ('B','C',1), ('B','D',1), ('B','E',2), ('E','F',1)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.weighted_projected_graph(self.G,'ABCDEF') assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) edges=[('A','B',3), ('A','E',1), ('A','C',1), ('A','D',1), ('B','E',1), ('B','C',1), ('B','D',1), ('C','D',1)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.weighted_projected_graph(self.N,'ABCDE') assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
def test_directed_projection(self): G=nx.DiGraph() G.add_edge('A',1) G.add_edge(1,'B') G.add_edge('A',2) G.add_edge('B',2) P=bipartite.projected_graph(G,'AB') assert_edges_equal(list(P.edges()),[('A','B')]) P=bipartite.weighted_projected_graph(G,'AB') assert_edges_equal(list(P.edges()),[('A','B')]) assert_equal(P['A']['B']['weight'],1) P=bipartite.projected_graph(G,'AB',multigraph=True) assert_edges_equal(list(P.edges()),[('A','B')]) G=nx.DiGraph() G.add_edge('A',1) G.add_edge(1,'B') G.add_edge('A',2) G.add_edge(2,'B') P=bipartite.projected_graph(G,'AB') assert_edges_equal(list(P.edges()),[('A','B')]) P=bipartite.weighted_projected_graph(G,'AB') assert_edges_equal(list(P.edges()),[('A','B')]) assert_equal(P['A']['B']['weight'],2) P=bipartite.projected_graph(G,'AB',multigraph=True) assert_edges_equal(list(P.edges()),[('A','B'),('A','B')])
def test_directed_projection(self): G = nx.DiGraph() G.add_edge("A", 1) G.add_edge(1, "B") G.add_edge("A", 2) G.add_edge("B", 2) P = bipartite.projected_graph(G, "AB") assert_edges_equal(list(P.edges()), [("A", "B")]) P = bipartite.weighted_projected_graph(G, "AB") assert_edges_equal(list(P.edges()), [("A", "B")]) assert P["A"]["B"]["weight"] == 1 P = bipartite.projected_graph(G, "AB", multigraph=True) assert_edges_equal(list(P.edges()), [("A", "B")]) G = nx.DiGraph() G.add_edge("A", 1) G.add_edge(1, "B") G.add_edge("A", 2) G.add_edge(2, "B") P = bipartite.projected_graph(G, "AB") assert_edges_equal(list(P.edges()), [("A", "B")]) P = bipartite.weighted_projected_graph(G, "AB") assert_edges_equal(list(P.edges()), [("A", "B")]) assert P["A"]["B"]["weight"] == 2 P = bipartite.projected_graph(G, "AB", multigraph=True) assert_edges_equal(list(P.edges()), [("A", "B"), ("A", "B")])
def test_project_weighted_ratio(self): edges=[('A','B',2/6.0), ('A','C',1/6.0), ('B','C',1/6.0), ('B','D',1/6.0), ('B','E',2/6.0), ('E','F',1/6.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.weighted_projected_graph(self.G, 'ABCDEF', ratio=True) assert_edges_equal(list(P.edges()),Panswer.edges()) for u,v in list(P.edges()): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) edges=[('A','B',3/3.0), ('A','E',1/3.0), ('A','C',1/3.0), ('A','D',1/3.0), ('B','E',1/3.0), ('B','C',1/3.0), ('B','D',1/3.0), ('C','D',1/3.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.weighted_projected_graph(self.N, 'ABCDE', ratio=True) assert_edges_equal(list(P.edges()),Panswer.edges()) for u,v in list(P.edges()): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
def network_output(edgelist, nodelist, removed_subs): # create bipartitie graph B = nx.Graph() B.add_nodes_from(set(edgelist['name']), bipartite=0) B.add_nodes_from(set(edgelist['sub']), bipartite=1) B.add_edges_from(list(zip(edgelist['name'], edgelist['sub']))) # add node type variable ## 0 sub, 1 FN, 2 FT, 3 CN, 4 CT ? type_dict = dict(zip(nodelist.name, nodelist.mod_type)) nx.set_node_attributes(B, type_dict, name='type') # remove issue subs B.remove_nodes_from(removed_subs) # create unipartite subreddit graph subs = bipartite.weighted_projected_graph( B, set(edgelist['sub']) - removed_subs) subs = subs.edge_subgraph(subs.edges()) # create unipartite moderator graph mods = bipartite.weighted_projected_graph(B, set(edgelist['name'])) mods = mods.edge_subgraph(mods.edges()) output = {'B': B, 'mods': mods, 'subs': subs, 'type_dict': type_dict} return output
def test_project_weighted_shared(self): edges = [("A", "B", 2), ("A", "C", 1), ("B", "C", 1), ("B", "D", 1), ("B", "E", 2), ("E", "F", 1)] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) P = bipartite.weighted_projected_graph(self.G, "ABCDEF") assert_edges_equal(P.edges(), Panswer.edges()) for u, v in P.edges(): assert_equal(P[u][v]["weight"], Panswer[u][v]["weight"]) edges = [ ("A", "B", 3), ("A", "E", 1), ("A", "C", 1), ("A", "D", 1), ("B", "E", 1), ("B", "C", 1), ("B", "D", 1), ("C", "D", 1), ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) P = bipartite.weighted_projected_graph(self.N, "ABCDE") assert_edges_equal(P.edges(), Panswer.edges()) for u, v in P.edges(): assert_equal(P[u][v]["weight"], Panswer[u][v]["weight"])
def test_directed_projection(self): G = nx.DiGraph() G.add_edge('A', 1) G.add_edge(1, 'B') G.add_edge('A', 2) G.add_edge('B', 2) P = bipartite.projected_graph(G, 'AB') assert_edges_equal(list(P.edges()), [('A', 'B')]) P = bipartite.weighted_projected_graph(G, 'AB') assert_edges_equal(list(P.edges()), [('A', 'B')]) assert_equal(P['A']['B']['weight'], 1) P = bipartite.projected_graph(G, 'AB', multigraph=True) assert_edges_equal(list(P.edges()), [('A', 'B')]) G = nx.DiGraph() G.add_edge('A', 1) G.add_edge(1, 'B') G.add_edge('A', 2) G.add_edge(2, 'B') P = bipartite.projected_graph(G, 'AB') assert_edges_equal(list(P.edges()), [('A', 'B')]) P = bipartite.weighted_projected_graph(G, 'AB') assert_edges_equal(list(P.edges()), [('A', 'B')]) assert_equal(P['A']['B']['weight'], 2) P = bipartite.projected_graph(G, 'AB', multigraph=True) assert_edges_equal(list(P.edges()), [('A', 'B'), ('A', 'B')])
def test_project_weighted_ratio(self): edges = [ ("A", "B", 2 / 6.0), ("A", "C", 1 / 6.0), ("B", "C", 1 / 6.0), ("B", "D", 1 / 6.0), ("B", "E", 2 / 6.0), ("E", "F", 1 / 6.0), ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) P = bipartite.weighted_projected_graph(self.G, "ABCDEF", ratio=True) assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): assert P[u][v]["weight"] == Panswer[u][v]["weight"] edges = [ ("A", "B", 3 / 3.0), ("A", "E", 1 / 3.0), ("A", "C", 1 / 3.0), ("A", "D", 1 / 3.0), ("B", "E", 1 / 3.0), ("B", "C", 1 / 3.0), ("B", "D", 1 / 3.0), ("C", "D", 1 / 3.0), ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) P = bipartite.weighted_projected_graph(self.N, "ABCDE", ratio=True) assert_edges_equal(list(P.edges()), Panswer.edges()) for u, v in list(P.edges()): assert P[u][v]["weight"] == Panswer[u][v]["weight"]
def test_project_weighted_ratio(self): edges=[('A','B',2/6.0), ('A','C',1/6.0), ('B','C',1/6.0), ('B','D',1/6.0), ('B','E',2/6.0), ('E','F',1/6.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.weighted_projected_graph(self.G, 'ABCDEF', ratio=True) assert_edges_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) edges=[('A','B',3/3.0), ('A','E',1/3.0), ('A','C',1/3.0), ('A','D',1/3.0), ('B','E',1/3.0), ('B','C',1/3.0), ('B','D',1/3.0), ('C','D',1/3.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.weighted_projected_graph(self.N, 'ABCDE', ratio=True) assert_edges_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight'])
def test_directed_projection(self): G = nx.DiGraph() G.add_edge("A", 1) G.add_edge(1, "B") G.add_edge("A", 2) G.add_edge("B", 2) P = bipartite.projected_graph(G, "AB") assert_equal(sorted(P.edges()), [("A", "B")]) P = bipartite.weighted_projected_graph(G, "AB") assert_equal(sorted(P.edges()), [("A", "B")]) assert_equal(P["A"]["B"]["weight"], 1) P = bipartite.projected_graph(G, "AB", multigraph=True) assert_equal(sorted(P.edges()), [("A", "B")]) G = nx.DiGraph() G.add_edge("A", 1) G.add_edge(1, "B") G.add_edge("A", 2) G.add_edge(2, "B") P = bipartite.projected_graph(G, "AB") assert_equal(sorted(P.edges()), [("A", "B")]) P = bipartite.weighted_projected_graph(G, "AB") assert_equal(sorted(P.edges()), [("A", "B")]) assert_equal(P["A"]["B"]["weight"], 2) P = bipartite.projected_graph(G, "AB", multigraph=True) assert_equal(sorted(P.edges()), [("A", "B"), ("A", "B")])
def project(bipartite): """Project the bipartite graph on both sides. Returns ------- graph_papers : nx graph Graph where two papers are connected if they share an author. graph_authors : nx graph Graph where two authors are connected if they wrote a paper together. """ nodes_papers = { n for n, d in bipartite.nodes(data=True) if d['bipartite'] == 0 } nodes_authors = set(bipartite) - nodes_papers graph_papers = nxb.weighted_projected_graph(bipartite, nodes_papers) graph_authors = nxb.weighted_projected_graph(bipartite, nodes_authors) print( f'projection: {graph_papers.number_of_nodes():,} papers and {graph_papers.number_of_edges():,} edges' ) print( f'projection: {graph_authors.number_of_nodes():,} authors and {graph_authors.number_of_edges():,} edges' ) return graph_papers, graph_authors
def test_project_weighted_ratio(self): edges = [ ("A", "B", 2 / 6.0), ("A", "C", 1 / 6.0), ("B", "C", 1 / 6.0), ("B", "D", 1 / 6.0), ("B", "E", 2 / 6.0), ("E", "F", 1 / 6.0), ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) P = bipartite.weighted_projected_graph(self.G, "ABCDEF", ratio=True) assert_equal(P.edges(), Panswer.edges()) for u, v in P.edges(): assert_equal(P[u][v]["weight"], Panswer[u][v]["weight"]) edges = [ ("A", "B", 3 / 3.0), ("A", "E", 1 / 3.0), ("A", "C", 1 / 3.0), ("A", "D", 1 / 3.0), ("B", "E", 1 / 3.0), ("B", "C", 1 / 3.0), ("B", "D", 1 / 3.0), ("C", "D", 1 / 3.0), ] Panswer = nx.Graph() Panswer.add_weighted_edges_from(edges) P = bipartite.weighted_projected_graph(self.N, "ABCDE", ratio=True) assert_equal(P.edges(), Panswer.edges()) for u, v in P.edges(): assert_equal(P[u][v]["weight"], Panswer[u][v]["weight"])
def test_path_weighted_projected_graph(self): G=nx.path_graph(4) P=bipartite.weighted_projected_graph(G,[1,3]) assert_nodes_equal(list(P),[1,3]) assert_edges_equal(list(P.edges()),[(1,3)]) P[1][3]['weight']=1 P=bipartite.weighted_projected_graph(G,[0,2]) assert_nodes_equal(list(P),[0,2]) assert_edges_equal(list(P.edges()),[(0,2)]) P[0][2]['weight']=1
def test_path_weighted_projected_graph(self): G = nx.path_graph(4) P = bipartite.weighted_projected_graph(G, [1, 3]) assert_nodes_equal(list(P), [1, 3]) assert_edges_equal(list(P.edges()), [(1, 3)]) P[1][3]['weight'] = 1 P = bipartite.weighted_projected_graph(G, [0, 2]) assert_nodes_equal(list(P), [0, 2]) assert_edges_equal(list(P.edges()), [(0, 2)]) P[0][2]['weight'] = 1
def test_path_weighted_projected_graph(self): G = nx.path_graph(4) P = bipartite.weighted_projected_graph(G, [1, 3]) assert_equal(sorted(P.nodes()), [1, 3]) assert_equal(sorted(P.edges()), [(1, 3)]) P[1][3]["weight"] = 1 P = bipartite.weighted_projected_graph(G, [0, 2]) assert_equal(sorted(P.nodes()), [0, 2]) assert_equal(sorted(P.edges()), [(0, 2)]) P[0][2]["weight"] = 1
def create_complete_graph(CProject): """ Creates a multipartite graph consisting of papers on the one hand, and all facts of available plugin-results on the other hand. Args: CProject Returns: (bipartite_graph, monopartite_fact_graph, monopartite_paper_graph, paper_nodes, fact_nodes) """ partition_mapping = {"papers":0, "binomial":1, "genus":2, "genussp":3, "carb3":4, "prot3":5, "dna":6, "prot":7, "human":8} gene = ["human"] species = ["binomial"] sequence = ["dna", "prot"] plugins = {"gene":gene, "species": species, "sequence":sequence} M = nx.Graph() labels = {} for ctree in CProject.get_ctrees(): for plugin, types in plugins.items(): for ptype in types: try: results = ctree.show_results(plugin).get(ptype, []) except AttributeError: continue if len(results) > 0: source = " ".join(ctree.get_title().split()) if not source in M.nodes(): # add paper node to one side of the bipartite network M.add_node(source, bipartite=0) labels[str(source)] = str(source) for result in results: target = result.get("exact") # add fact node to other side of the bipartite network if not target in M.nodes(): M.add_node(target, bipartite=1, ptype=ptype) labels[target] = target.encode("utf-8").decode("utf-8") # add a link between a paper and author M.add_edge(source, target) paper_nodes = set(n for n,d in M.nodes(data=True) if d.get('bipartite')==0) fact_nodes = set(M) - paper_nodes fact_graph = bipartite.weighted_projected_graph(M, fact_nodes) paper_graph = bipartite.weighted_projected_graph(M, paper_nodes) return M, fact_graph, paper_graph, fact_nodes, paper_nodes
def test_path_weighted_projected_directed_graph(self): G = nx.DiGraph() G.add_path(list(range(4))) P = bipartite.weighted_projected_graph(G, [1, 3]) assert_equal(sorted(P.nodes()), [1, 3]) assert_equal(sorted(P.edges()), [(1, 3)]) P[1][3]["weight"] = 1 P = bipartite.weighted_projected_graph(G, [0, 2]) assert_equal(sorted(P.nodes()), [0, 2]) assert_equal(sorted(P.edges()), [(0, 2)]) P[0][2]["weight"] = 1
def test_path_weighted_projected_directed_graph(self): G = nx.DiGraph() nx.add_path(G, range(4)) P = bipartite.weighted_projected_graph(G, [1, 3]) assert nodes_equal(list(P), [1, 3]) assert edges_equal(list(P.edges()), [(1, 3)]) P[1][3]["weight"] = 1 P = bipartite.weighted_projected_graph(G, [0, 2]) assert nodes_equal(list(P), [0, 2]) assert edges_equal(list(P.edges()), [(0, 2)]) P[0][2]["weight"] = 1
def test_path_weighted_projected_directed_graph(self): G=nx.DiGraph() G.add_path(list(range(4))) P=bipartite.weighted_projected_graph(G,[1,3]) assert_nodes_equal(P.nodes(),[1,3]) assert_edges_equal(P.edges(),[(1,3)]) P[1][3]['weight']=1 P=bipartite.weighted_projected_graph(G,[0,2]) assert_nodes_equal(P.nodes(),[0,2]) assert_edges_equal(P.edges(),[(0,2)]) P[0][2]['weight']=1
def create_network(CProject, plugin, query): """ Creates the network between papers and plugin results. Plugin may be any of ["regex", "gene", "sequence", "species"] Query corresponds to the fact types found by a plugin, for "species" it is one of ["binomial", "genus", "genussp"] for "sequences" it is one of ["carb3", "prot3", "dna", "prot"] for "gene" it is "human" for "regex" it is "regexname". Args: CProject object plugin = "string" query = "string" Returns: (bipartite_graph, monopartite_graph, fact_nodes, paper_nodes) >>> bipartiteGraph, factGraph, paperGraph, fact_nodes, paper_nodes = create_network(CProject, "species", "binomial") """ B = nx.Graph() labels = {} for ctree in CProject.get_ctrees(): try: results = ctree.show_results(plugin).get(query, []) except AttributeError: continue if len(results) > 0: # add paper node to one side of the bipartite network source = " ".join(ctree.get_title().split()) B.add_node(source, bipartite=0) labels[str(source)] = str(source) for result in results: exact = result.get("exact") # add fact node to other side of the bipartite network B.add_node(exact, bipartite=1) labels[exact] = exact.encode("utf-8").decode("utf-8") # add a link between a paper and author B.add_edge(source, exact) paper_nodes = set(n for n,d in B.nodes(data=True) if d['bipartite']==0) fact_nodes = set(B) - paper_nodes fact_graph = bipartite.weighted_projected_graph(B, fact_nodes) paper_graph = bipartite.weighted_projected_graph(B, paper_nodes) return B, fact_graph, paper_graph, fact_nodes, paper_nodes
def answer_three(): # Your Code Here G = answer_two() weighted_projected_graph = bipartite.weighted_projected_graph(G, employees) return weighted_projected_graph
def make_network(raw_file_list): # handles the individual nodes collect_nodes_by_source = [] list_of_source_names = [] node_list = [] GB = nx.Graph() # for all the nodes... for i in range(len(raw_file_list)): # check whether they are name, source or else (not returned). "i" works as an index to identify the respective node when it comes back checker, a = my_containsAny(raw_file_list[i], i) # raw data starts with a source, all following non-source lines refer to names or tags. So all returned nodes should be linked to each other if checker == "source": GB.add_node(raw_file_list[a], bipartite = 0) source = raw_file_list[a] while source == raw_file_list[a]: if checker == "node": GB.add_node(raw_file_list[a], bipartite = 1) GB.add_edge(raw_file_list[a], raw_file_list[a+1]) G = bnx.weighted_projected_graph(GB, GB.nodes(["bipartite"]==1)) #nx.write_graphml(GB, "abolitionists_bipartite.graphml") nx.write_pajek(G, "abolitionists.net") print "done!"
def get_bip_proj(df, timechunk): n1, n2, edgelist = create_edgelist(df) B = nx.Graph() B.add_nodes_from(set(n1), bipartite=0) B.add_nodes_from(set(n2), bipartite=1) B.add_weighted_edges_from(edgelist) l = bipartite.weighted_projected_graph(B, n1) try: pos = pickle.load(open('projection_pos.sav', 'rb')) except: pos = nx.spring_layout(l, k=0.17) plt.axis('off') nx.draw_networkx(l, node_size=2, with_labels=False, node_color='red', edge_color='black', width=0.3, pos=pos) plt.savefig('images/' + timechunk + '_projection.png') plt.close()
def project_notwork(bnetwork, list_of_items): if nx.is_bipartite(bipartite_network): # print("My network is Bipartite") projected_nodes = bipartite.weighted_projected_graph( bipartite_network, list_of_items) # nx.draw(projected_users, with_labels=True) return projected_nodes
def create_projected_graph(batchnerID, minweight, entityType): '''Creates a projected graph and saves the edges as a dataframe.''' # create empty multigraph - multigraph is an undirected graph with parallel edges G = nx.MultiGraph() # import edge dataframe and create network G = nx.from_pandas_edgelist(batchnerID, source='doc', target='id', edge_attr=True) # project the graph onto entities, removing documents from the graph projected_graph = bipartite.weighted_projected_graph(G, batchnerID.id) if minweight != 0: projected_graph = nx.Graph([ (u, v, d) for u, v, d in projected_graph.edges(data=True) if d['weight'] > minweight ]) else: pass for col in ('label', 'entityType'): node_atts = dict(zip( batchnerID.id, batchnerID[col], )) nx.set_node_attributes(projected_graph, node_atts, col) return (projected_graph)
def get_bip_proj(df, starttime, endtime, n1, n2, edgelist, out): if starttime != endtime: timechunk = starttime else: timechunk = starttime + '-' + endtime B = nx.Graph() B.add_nodes_from(set(n1), bipartite=0) B.add_nodes_from(set(n2), bipartite=1) B.add_edges_from(edgelist) # node_color = [] # for node in B.nodes(data=True): # if node[1]['bipartite'] == 0: # node_color.append('blue') # else: # node_color.append('red') # # plt.axis('off') # pos = nx.drawing.layout.bipartite_layout(B, set(n2)) # nx.draw_networkx(B,node_size = 3, with_labels = False, node_color = node_color, edge_color = 'black', width = 0.3, pos = pos) # plt.savefig(out+'bipartite.png') # plt.close() l = bipartite.weighted_projected_graph(B, n1) return l
def answer_three(): # Your Code Here net = answer_two() proj_net = bipartite.weighted_projected_graph(B=net, nodes=employees) return proj_net # Your Answer Here
def incremental_reconstruction(data): data.invent_reference_lla() graph = data.load_tracks_graph() tracks, images = tracks_and_images(graph) remaining_images = set(images) print 'images', len(images) print 'nonfisheye images', len(remaining_images) image_graph = bipartite.weighted_projected_graph(graph, images) reconstructions = [] pairs = compute_image_pairs(graph, image_graph, data.config) for im1, im2 in pairs: if im1 in remaining_images and im2 in remaining_images: reconstruction = bootstrap_reconstruction(data, graph, im1, im2) if reconstruction: remaining_images.remove(im1) remaining_images.remove(im2) reconstruction = grow_reconstruction(data, graph, reconstruction, remaining_images) reconstructions.append(reconstruction) reconstructions = sorted(reconstructions, key=lambda x: -len(x.shots)) data.save_reconstruction(reconstructions) for k, r in enumerate(reconstructions): print 'Reconstruction', k, ':', len(r.shots), 'images', ',', len(r.points),'points' print len(reconstructions), 'partial reconstructions in total.'
def answer_four(): # Your Code Here G = answer_two() plot_graph(G, type=2) E = bipartite.weighted_projected_graph(G, employees) plot_graph(E, type=1) relationship_df = pd.read_csv('graphs/employee_relationships.txt', delim_whitespace=True, header=None, names=["E1", "E2", "Relationship"]) print(relationship_df) relationship_g = nx.from_pandas_edgelist(relationship_df, "E1", "E2", edge_attr="Relationship") weight_attr = {(i[0], i[1]): i[2] for i in E.edges(data="weight")} nx.set_edge_attributes(relationship_g, 0, "Shared_Movies") nx.set_edge_attributes(relationship_g, weight_attr, "Shared_Movies") idx = [(i[0], i[1]) for i in relationship_g.edges(data=True)] relationship = [i[2] for i in relationship_g.edges(data="Relationship")] movies = [i[2] for i in relationship_g.edges(data="Shared_Movies")] correlation_df = pd.DataFrame( { "Relationship": relationship, "Shared_Movies": movies }, index=idx) print(correlation_df) correlation_pearson = correlation_df.corr("pearson") print(correlation_pearson) # G = answer_one() # G = answer_two() # # G = answer_three() # answer_four()
def ProjectionIngredients(M, X): Q = bipartite.weighted_projected_graph( M, X) #projecao dos ingredientes sobre os paises #nx.draw_networkx(Q) #plt.show() #print(Q.edges(data='weight')) f = open("ingred_paises_top10.txt", "w") aux = Q.edges(data='weight') aux = sorted(aux, key=lambda aux: aux[2], reverse=True) for i in range(len(aux)): #if aux[i][2] == len(Y): f.write(aux[i][0] + ";" + aux[i][1] + ";" + str(aux[i][2]) + "\n") f.close() #print("Ingredientes mais utilizados por todos os países:", max_edge) f = open("ingred_mais_combinaveis.txt", "w") aux = Q.nodes() aux = sorted(aux, key=lambda aux: Q.degree(aux), reverse=True) for i in range(len(aux)): f.write(aux[i] + " ;" + str(Q.degree(aux[i])) + "\n") f.close() #f = open("ingred_menos_combinaveis.txt", "w") #if len(aux) > 10: #for i in range(len(aux)-10, len(aux)): #f.write(aux[i]+";"+str(Q.degree(aux[i]))+"\n") #else: #for i in range(len(aux)): #f.write(aux[i]+";"+str(Q.degree(aux[i]))+"\n") #f.close() return Q
def answer_four(): # Your Code Here # Obtain Relation Score DF relation_score = nx.read_edgelist(path='Employee_Relationships.txt', data=[('relationship', int)], delimiter='\t') relation_df = pd.DataFrame(relation_score.edges(data=True), columns=['n1', 'n2', 'relationship']) relation_df['relationship'] = relation_df['relationship'].map( lambda x: x['relationship']) # Obtain Movies Choices DF movie_choice = nx.read_edgelist('Employee_Movie_Choices.txt', delimiter='\t') movie_choice = bipartite.weighted_projected_graph(movie_choice, employees) choice_df = pd.DataFrame(movie_choice.edges(data=True), columns=['n1', 'n2', 'common']) choice_df['common'] = choice_df['common'].map(lambda x: x['weight']) # Merge two data frame df = pd.merge(left=relation_df, right=choice_df, how='outer', on=['n1', 'n2']) df = df.fillna(0) # Get correlation return df.corr()['relationship']['common'] # Your Answer Here
def incremental_reconstruction(data): """Run the entire incremental reconstruction pipeline.""" data.invent_reference_lla() graph = data.load_tracks_graph() tracks, images = tracks_and_images(graph) remaining_images = set(images) gcp = None if data.ground_control_points_exist(): gcp = data.load_ground_control_points() image_graph = bipartite.weighted_projected_graph(graph, images) reconstructions = [] pairs = compute_image_pairs(graph, image_graph, data.config) for im1, im2 in pairs: if im1 in remaining_images and im2 in remaining_images: reconstruction = bootstrap_reconstruction(data, graph, im1, im2) if reconstruction: remaining_images.remove(im1) remaining_images.remove(im2) reconstruction = grow_reconstruction( data, graph, reconstruction, remaining_images, gcp) reconstructions.append(reconstruction) reconstructions = sorted(reconstructions, key=lambda x: -len(x.shots)) data.save_reconstruction(reconstructions) for k, r in enumerate(reconstructions): logger.info("Reconstruction {}: {} images, {} points".format( k, len(r.shots), len(r.points))) logger.info("{} partial reconstructions in total.".format( len(reconstructions)))
def write_report(self, data, graph, features_time, matches_time, tracks_time): tracks, images = matching.tracks_and_images(graph) image_graph = bipartite.weighted_projected_graph(graph, images) matrix = [] for im1 in data.images(): row = [] for im2 in data.images(): if im1 in image_graph and im2 in image_graph[im1]: row.append(image_graph[im1][im2]['weight']) else: row.append(0) matrix.append(row) report = { "wall_times": { "load_features": features_time, "load_matches": matches_time, "compute_tracks": tracks_time, }, "wall_time": features_time + matches_time + tracks_time, "num_images": len(images), "num_tracks": len(tracks), "viewing_graph": matrix } data.save_report(io.json_dumps(report), 'tracks.json')
def incremental_reconstruction(data): data.invent_reference_lla() graph = data.load_tracks_graph() tracks, images = tracks_and_images(graph) remaining_images = set(nonfisheye_cameras(data, images)) print 'images', len(images) print 'nonfisheye images', len(remaining_images) image_graph = bipartite.weighted_projected_graph(graph, images) reconstructions = [] pairs = compute_image_pairs(graph, image_graph, data.config) for im1, im2 in pairs: if im1 in remaining_images and im2 in remaining_images: reconstruction = bootstrap_reconstruction(data, graph, im1, im2) if reconstruction: remaining_images.remove(im1) remaining_images.remove(im2) reconstruction = grow_reconstruction(data, graph, reconstruction, remaining_images, image_graph) reconstructions.append(reconstruction) reconstructions = sorted(reconstructions, key=lambda x: -len(x['shots'])) data.save_reconstruction(reconstructions) for k, r in enumerate(reconstructions): print 'Reconstruction', k, ':', len(r['shots']), 'images', ',', len( r['points']), 'points' print len(reconstructions), 'partial reconstructions in total.'
def answer_three(): g = answer_two() X = set(employees) P = bipartite.weighted_projected_graph(g,X) return P
def nets(df): edgelist = df[['author', 'post_id']] B = nx.Graph() B.add_nodes_from(set(edgelist['author']), bipartite=0) B.add_nodes_from(set(edgelist['post_id']), bipartite=1) B.add_edges_from(list(zip(edgelist['author'], edgelist['post_id']))) assert (len(B.nodes()) == len(df['author'].unique()) + len(df['post_id'].unique())), 'number of nodes is off' assert (len(B.edges()) == len( edgelist.drop_duplicates())), 'number of edges is off' print(f'the two-mode network has {len(B.nodes())} nodes' f' and density of {nx.density(B)}') ## one mode projections authors = bipartite.projected_graph(B, set(edgelist['author'])) assert (len(authors.nodes()) == len( df['author'].unique())), 'number of authors is off' print(f'the author network has {len(authors.nodes())} nodes' f' and density of {nx.density(authors)}') posts = bipartite.weighted_projected_graph(B, set(edgelist['post_id'])) assert (len(posts.nodes()) == len( df['post_id'].unique())), 'number of posts is off' print(f'the post network has {len(posts.nodes())} nodes' f' and density of {nx.density(posts)}') return B, authors, posts
def predictAuc(self): # alpha = [0.2, 0.4, 0.6, 0.8, 0.95] alpha = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95] edgeLength = len(self.graph.edges()) edges = self.graph.edges() graph = nx.Graph() iterations = 1 aucDict = {} allAUC = [] for a in alpha: auc = [] for iter in range(iterations): sampledLen = int(edgeLength * a) graph.add_nodes_from(self.top_nodes, bipartite='Customers') graph.add_nodes_from(self.bottom_nodes, bipartite='Products') for node in set(n for n, d in graph.nodes(data=True) if d['bipartite'] == 'Customers'): graph.node[node]['CustCategory'] = self.graph.node[node][ 'CustCategory'] for node in set(n for n, d in graph.nodes(data=True) if d['bipartite'] == 'Products'): graph.node[node]['Category'] = self.graph.node[node][ 'Category'] sampledIndex = list(np.random.choice(edgeLength, sampledLen)) sampledEdges = [] for i in sampledIndex: sampledEdges.append(edges[i]) graph.add_edges_from(sampledEdges) for i in range(edgeLength - sampledLen): max = (-1) * np.inf max_c = 0 max_p = 0 for c in self.top_nodes: for p in self.bottom_nodes: if (c, p) in sampledEdges: continue else: graph.add_edge(c, p) oneModeProjections = bipartite.weighted_projected_graph( graph, self.top_nodes, ratio=True) prob = len(oneModeProjections[c]) * len( graph[p]) if (prob > max): max = prob max_c, max_p = (c, p) graph.remove_edge(c, p) print(a) graph.add_edge(max_c, max_p) aucScore = self.CalcAUCScore(graph) auc.append(aucScore) allAUC.append(auc) graph.clear() aucDict[a] = sum(auc) / len(auc) print(allAUC) print(a) print(allAUC) print("prf attach" + str(aucDict)) return aucDict
def targettarget(self): print("Building Target Projection ...") nodes = [t for d in self.drugs for t in d.targets] G = bipartite.weighted_projected_graph(self.__drugtarget, nodes) self.graph_properties(G) self.__targettarget = G df = nx.to_pandas_edgelist(G) self.save_graph(self.added_new_drugs, df, G, "target_projection") self.save()
def __init_subgraphs(self): sg_largest = max(nx.connected_component_subgraphs(self.__g), key=len) # wrap in error code? # split into bipartite on author and Journal Author, Journal = bi.sets(sg_largest) # largest connected component subgraphs self.__j_lg_cc_subgraph = bi.weighted_projected_graph( sg_largest, Journal) self.__a_lg_cc_subgraph = bi.weighted_projected_graph( sg_largest, Author) # trim and merge connected components to form islands sub graph a_sg_island = self.__trim(self.__j_lg_cc_subgraph) j_sg_island = self.__trim(self.__a_lg_cc_subgraph) self.__islands_graph = self.__merge_graph(a_sg_island, j_sg_island) # <---- add centrality measures here as node attributes ---> #author weighted proj self.add_degree_centrality( self.__a_lg_cc_subgraph) ## need to double for each... self.add_pagerank(self.__a_lg_cc_subgraph) self.add_betweenness_centrality(self.__a_lg_cc_subgraph) #journal weighted proj self.add_degree_centrality( self.__j_lg_cc_subgraph) ## need to double for each... self.add_pagerank(self.__j_lg_cc_subgraph) self.add_betweenness_centrality(self.__j_lg_cc_subgraph) #islands self.add_degree_centrality(self.__islands_graph) self.add_pagerank(self.__islands_graph) self.add_betweenness_centrality(self.__islands_graph) # convert these to pandas dfs self.__a_lg_cc_nodes, self.__a_lg_cc_edges = self.__to_pandas_df( self.__a_lg_cc_subgraph) self.__j_lg_cc_nodes, self.__j_lg_cc_edges = self.__to_pandas_df( self.__j_lg_cc_subgraph) self.__islands_nodes, self.__islands_edges = self.__to_pandas_df( self.__islands_graph)
def projection_slow(df): B = getBipartiteNetwork(df) G = bipartite.weighted_projected_graph(B, df.subreddit.unique()) node_stats = pd.DataFrame( { 'degree':dict(nx.degree(G)), 'degree_centrality':nx.degree_centrality(G), 'core_number':nx.core_number(G) }) return node_stats
def answer_three(): G = answer_two() df = pd.read_csv('Employee_Movie_Choices.txt', sep='\t', header=0) employees = set(df['#Employee']) P = bipartite.weighted_projected_graph(G, employees) return P
def test_project_multigraph(self): G = nx.Graph() G.add_edge("a", 1) G.add_edge("b", 1) G.add_edge("a", 2) G.add_edge("b", 2) P = bipartite.projected_graph(G, "ab") assert_edges_equal(P.edges(), [("a", "b")]) P = bipartite.weighted_projected_graph(G, "ab") assert_edges_equal(P.edges(), [("a", "b")]) P = bipartite.projected_graph(G, "ab", multigraph=True) assert_edges_equal(P.edges(), [("a", "b"), ("a", "b")])
def test_project_multigraph(self): G=nx.Graph() G.add_edge('a',1) G.add_edge('b',1) G.add_edge('a',2) G.add_edge('b',2) P=bipartite.projected_graph(G,'ab') assert_edges_equal(list(P.edges()),[('a','b')]) P=bipartite.weighted_projected_graph(G,'ab') assert_edges_equal(list(P.edges()),[('a','b')]) P=bipartite.projected_graph(G,'ab',multigraph=True) assert_edges_equal(list(P.edges()),[('a','b'),('a','b')])
def test_star_projected_graph(self): G = nx.star_graph(3) P = bipartite.projected_graph(G, [1, 2, 3]) assert_equal(sorted(P.nodes()), [1, 2, 3]) assert_equal(sorted(P.edges()), [(1, 2), (1, 3), (2, 3)]) P = bipartite.weighted_projected_graph(G, [1, 2, 3]) assert_equal(sorted(P.nodes()), [1, 2, 3]) assert_equal(sorted(P.edges()), [(1, 2), (1, 3), (2, 3)]) P = bipartite.projected_graph(G, [0]) assert_equal(sorted(P.nodes()), [0]) assert_equal(sorted(P.edges()), [])
def main(): path = os.getcwd() + "/Sampled_Data/second_guys/" edge_list,users,tracks = data_import(path+"merged2.txt") B = Bipartite(users,tracks,edge_list) projected_B = bipartite.weighted_projected_graph(B,users) print(weight_dist(projected_B)) #print("number of tracks",len(projected_B.nodes())) #B_threshold = threshold(projected_B,6) #giant = max(nx.connected_component_subgraphs(B_threshold), key=len) #components = [] with open("edge_list2_users.txt","w") as f: for i,j in projected_B.edges_iter(): f.write(str(i)+ " " + str(j) +" "+ str(projected_B[i][j]["weight"]) +"\n")
def create_network(CProject, plugin, query): """ Creates the network between papers and plugin results. Plugin may be any of ["regex", "gene", "sequence", "species"] Query corresponds to the fact types found by a plugin, for "species" it is one of ["binomial", "genus", "genussp"] for "sequences" it is one of ["carb3", "prot3", "dna", "prot"] for "gene" it is "human" for "regex" it is "regexname". Args: CProject object plugin = "string" query = "string" Returns: (bipartite_graph, monopartite_graph, paper_nodes, fact_nodes) >>> bipartiteGraph, factGraph, paperNodes, factNodes = create_network(CProject, "species", "binomial") """ B = nx.Graph() labels = {} for ct in CProject.get_ctrees(): ctree_ID, ctree = ct.items()[0] results = ctree.show_results(plugin).get(query, []) if len(results) > 0: B.add_node(ctree_ID, bipartite=0) labels[str(ctree_ID)] = str(ctree_ID) for result in results: B.add_node(result, bipartite=1) labels[result] = result.encode("utf-8").decode("utf-8") # add a link between a paper and author B.add_edge(ctree_ID, result) paper_nodes = set(n for n,d in B.nodes(data=True) if d['bipartite']==0) fact_nodes = set(B) - paper_nodes G = bipartite.weighted_projected_graph(B, fact_nodes) return B, G, paper_nodes, fact_nodes
def write_report(self, data, graph, features_time, matches_time, tracks_time): tracks, images = matching.tracks_and_images(graph) image_graph = bipartite.weighted_projected_graph(graph, images) view_graph = [] for im1 in data.images(): for im2 in data.images(): if im1 in image_graph and im2 in image_graph[im1]: weight = image_graph[im1][im2]['weight'] view_graph.append((im1, im2, weight)) report = { "wall_times": { "load_features": features_time, "load_matches": matches_time, "compute_tracks": tracks_time, }, "wall_time": features_time + matches_time + tracks_time, "num_images": len(images), "num_tracks": len(tracks), "view_graph": view_graph } data.save_report(io.json_dumps(report), 'tracks.json')
def create_network(CProject, plugin, query): """ Creates the network between papers and plugin results. Args: CProject object plugin = "string" query = "string" Returns: (bipartite_graph, monopartite_graph, paper_nodes, fact_nodes) """ B = nx.Graph() labels = {} for ct in CProject.get_ctrees(): ctree_ID, ctree = ct.items()[0] results = ctree.show_results(plugin).get(query, []) if len(results) > 0: B.add_node(ctree_ID, bipartite=0) labels[str(ctree_ID)] = str(ctree_ID) for result in results: B.add_node(result, bipartite=1) labels[result] = result.encode("utf-8").decode("utf-8") # add a link between a paper and author B.add_edge(ctree_ID, result) paper_nodes = set(n for n,d in B.nodes(data=True) if d['bipartite']==0) fact_nodes = set(B) - paper_nodes G = bipartite.weighted_projected_graph(B, fact_nodes) return B, G, paper_nodes, fact_nodes
cid=line[52:61] #amt=int(line[36:43]) g.add_edge(pid,cid) if cid not in can_list: can_list.append(cid) if pid not in pac_list: pac_list.append(pid) if pid in pacs: g.node[pid]=pacs[pid] else: pacs[pid]={'type':'unknown'} if cid in candidates: g.node[cid]=candidates[cid] else: candidates[cid]={'type':'unknown'} cannet=bi.weighted_projected_graph(g, can_list, ratio=False) def trim_edges(g, weight=1): g2=net.Graph() for f, to, edata in g.edges(data=True): if edata['weight'] > weight: g2.add_edge(f,to,edata) g2.node[f]=g.node[f] g2.node[to]=g.node[to] return g2 import multimode as mm cancore=trim_edges(cannet, weight=50) mm.plot_multimode(cancore, type_string='party')
These data were collected by Davis et al. in the 1930s. They represent observed attendance at 14 social events by 18 Southern women. The graph is bipartite (clubs, women). """ import networkx as nx import networkx.algorithms.bipartite as bipartite G = nx.davis_southern_women_graph() women = G.graph['top'] clubs = G.graph['bottom'] print("Biadjacency matrix") print(bipartite.biadjacency_matrix(G,women,clubs)) # project bipartite graph onto women nodes W = bipartite.projected_graph(G, women) print('') print("#Friends, Member") for w in women: print('%d %s' % (W.degree(w),w)) # project bipartite graph onto women nodes keeping number of co-occurence # the degree computed is weighted and counts the total number of shared contacts W = bipartite.weighted_projected_graph(G, women) print('') print("#Friend meetings, Member") for w in women: print('%d %s' % (W.degree(w,weight='weight'),w))
g=net.Graph() ## we need to keep track separately of nodes of all types pacs=[] candidates=[] ## Construct a directed graph from edges in the CSV file for row in r: if row[0] not in pacs: pacs.append(row[0]) if row[12] not in candidates: candidates.append(row[12]) g.add_edge(row[0],row[12], weight=int(row[10])) ## compute the projected graph pacnet=bi.weighted_projected_graph(g, pacs, ratio=False) pacnet=net.connected_component_subgraphs(pacnet)[0] weights=[math.log(edata['weight']) for f,t,edata in pacnet.edges(data=True)] net.draw_networkx(p,width=weights, edge_color=weights) ## Compute the candidate network cannet=bi.weighted_projected_graph(g, candidates, ratio=False) cannet=net.connected_component_subgraphs(cannet)[0] weights=[math.log(edata['weight']) for f,t,edata in cannet.edges(data=True)] plot.figure(2) ## switch to a fresh canvas net.draw_networkx(cannet,width=weights, edge_color=weights)
def plot_sample_activation_pattern(point, show_edges=False): mlab.figure(1, bgcolor=(1, 1, 1)) mlab.clf() # colour preferences color_active = (0.8,0,0) color_inactive = (0.8,0.8,0.8) # load mf positions mf_positions = point.get_cell_positions()['MFs'] x = mf_positions[:,0] y = mf_positions[:,1] z = mf_positions[:,2] # generate stimulation pattern in an appropriate format pattern_generator = patterns.SpatiallyCorrelatedStimulationPatternGenerator(point) pattern = pattern_generator.generate() not_pattern = [m for m in range(point.n_mf) if m not in pattern] binary_pattern = np.zeros(mf_positions.shape[0]) binary_pattern[pattern] = 1 # create mayavi plots for mfs pts_active = mlab.points3d(x[pattern], y[pattern], z[pattern], scale_factor=3, resolution=16, opacity=1, scale_mode='none', color=color_active) pts_inactive = mlab.points3d(x[not_pattern], y[not_pattern], z[not_pattern], scale_factor=3, resolution=16, opacity=1, scale_mode='none', color=color_inactive) mlab.pipeline.volume(mlab.pipeline.gaussian_splatter(pts_active), color=color_active) # if requested, plot the 'heaviest' edges in the projected network if show_edges: projected_graph = bipartite.weighted_projected_graph(point.network_graph, point.graph_mf_nodes) weights = np.array([data['weight'] for u,v,data in projected_graph.edges(data=True)]) threshold_weight = np.percentile(weights, 70) max_weight = weights.max() for u,v,data in projected_graph.edges(data=True): if data['weight'] > threshold_weight: u_idx = point.nC_cell_index_from_graph_node(u)[0] v_idx = point.nC_cell_index_from_graph_node(v)[0] x = [mf_positions[u_idx,0], mf_positions[v_idx,0]] y = [mf_positions[u_idx,1], mf_positions[v_idx,1]] z = [mf_positions[u_idx,2], mf_positions[v_idx,2]] if binary_pattern[u_idx]==binary_pattern[v_idx]==0: color = (0,0,0) elif binary_pattern[u_idx]==binary_pattern[v_idx]==1: color = (1,0,0) else: color = (0,1,0) edge = mlab.plot3d(x, y, z, color=color, opacity=0.5, tube_radius=data['weight']/10.) # show figure mlab.show()
pg1 = bipartite.projected_graph(G, bottom_nodes) print "Unweighted moderator to moderator projection made" print "Average unweighted degree: ",mymean(pg1.degree_iter()) if export: nx.write_gexf(pg1,"C:\\Users\\Theseus\\Documents\\moderatorproject\\bottoms.gexf") print "gexf exported" pg2 = bipartite.projected_graph(G, top_nodes) print "Unweighted subreddit to subreddit projection made" print "Average unweighted degree: ",mymean(pg2.degree_iter()) if export: nx.write_gexf(pg2,"C:\\Users\\Theseus\\Documents\\moderatorproject\\tops.gexf") print "gexf exported" wpg1 = bipartite.weighted_projected_graph(G, bottom_nodes) print "Weighted bottom node projection made" print "Average weighted moderator-moderator degree: ",mymean(wpg1.degree_iter(weight='weight')) if export: nx.write_gexf(wpg1,"C:\\Users\\Theseus\\Documents\\moderatorproject\\weightedbottoms.gexf") print "gexf exported" wpg2 = bipartite.weighted_projected_graph(G, top_nodes) print "Weighted top node projection made" print "Average weighted subreddit-subreddit degree: ",mymean(wpg2.degree_iter(weight='weight')) if export: nx.write_gexf(wpg2,"C:\\Users\\Theseus\\Documents\\moderatorproject\\weightedtops.gexf") print "gexf exported" print "done"
# remember that node ids go from 1 to n_grc or n_mf if node <= n_mf: pos_index = node - 1 g.node[node]['bipartite'] = 0 g.node[node]['x'] = float(mf_pos[pos_index, 0]) g.node[node]['y'] = float(mf_pos[pos_index, 1]) g.node[node]['z'] = float(mf_pos[pos_index, 2]) else: pos_index = node - 1 - n_mf g.node[node]['bipartite'] = 1 g.node[node]['x'] = float(grc_pos[pos_index, 0]) g.node[node]['y'] = float(grc_pos[pos_index, 1]) g.node[node]['z'] = float(grc_pos[pos_index, 2]) # project bipartite graph onto GrCs grc_nodes = set(n for n,d in g.nodes(data=True) if d['bipartite']==1) projected_gr = bipartite.weighted_projected_graph(g, grc_nodes) # project bipartite graph onto MFs mf_nodes = set(n for n,d in g.nodes(data=True) if d['bipartite']==0) projected_mf = bipartite.weighted_projected_graph(g, mf_nodes) # invert weight to give a correct 'heatmap' plot in Gephi for u, v, data in projected_gr.edges(data=True): data['weight'] = n_grc_dend - data['weight'] for u, v, data in projected_mf.edges(data=True): data['weight'] = float(len(projected_mf.neighbors(u)) + len(projected_mf.neighbors(v)))/2 - data['weight'] # create randomised version of the same graph r = g.copy() r.remove_edges_from(g.edges()) mf_nodes = list(mf_nodes) for n in grc_nodes: r.add_edges_from([(random.choice(mf_nodes), n) for each in range(n_grc_dend)]) # project randomised graph onto GrCs and invert weights