def draw_graph(self, H, u, v, flow1, F1, flow2, F2): if not HAVE_PLT: return pos = nx.spring_layout(self.G) plt.subplot(1,2,1) plt.axis('off') nx.draw_networkx_nodes(self.G,pos) nx.draw_networkx_edges(self.G,pos) nx.draw_networkx_labels(self.G,pos) nx.draw_networkx_edge_labels( self.G, pos, edge_labels={(u,v):'{}/{}'.format( F1[u][v], self.G[u][v]['capacity'] ) for (u,v,data) in nx.to_edgelist(self.G)}) plt.title('before: flow={}'.format(flow1)) plt.subplot(1,2,2) plt.axis('off') nx.draw_networkx_nodes(self.G,pos) nx.draw_networkx_edges(self.G,pos) nx.draw_networkx_edges( self.G, pos, edgelist=[(u,v)], width=3.0, edge_color='b') nx.draw_networkx_labels(self.G,pos) nx.draw_networkx_edge_labels( self.G, pos, edge_labels={(u,v):'{}/{}'.format( F2[u][v],H[u][v]['capacity'] ) for (u,v,data) in nx.to_edgelist(self.G)}) plt.title('after: flow={}'.format(flow2))
def draw_graph(self, H, u, v, flow1, F1, flow2, F2): if not HAVE_PLT: return pos = nx.spring_layout(self.G) plt.subplot(1, 2, 1) plt.axis('off') nx.draw_networkx_nodes(self.G, pos) nx.draw_networkx_edges(self.G, pos) nx.draw_networkx_labels(self.G, pos) nx.draw_networkx_edge_labels( self.G, pos, edge_labels={(u, v): '{}/{}'.format( F1[u][v], self.G[u][v]['capacity'] ) for (u, v, data) in nx.to_edgelist(self.G)}) plt.title('before: flow={}'.format(flow1)) plt.subplot(1, 2, 2) plt.axis('off') nx.draw_networkx_nodes(self.G, pos) nx.draw_networkx_edges(self.G, pos) nx.draw_networkx_edges( self.G, pos, edgelist=[(u, v)], width=3.0, edge_color='b') nx.draw_networkx_labels(self.G, pos) nx.draw_networkx_edge_labels( self.G, pos, edge_labels={(u, v): '{}/{}'.format( F2[u][v], H[u][v]['capacity'] ) for (u, v, data) in nx.to_edgelist(self.G)}) plt.title('after: flow={}'.format(flow2))
def top_edges(g1, max_edges = 100): g2 = type(g1)() g2.add_nodes_from(g1.nodes()) ewts = [x[2]['weight'] for x in nx.to_edgelist(g1)] keepwts = set(argsort(ewts)[::-1][:max_edges]) g2.add_weighted_edges_from([ (e[0],e[1],e[2]['weight']) for i,e in enumerate(nx.to_edgelist(g1)) if i in keepwts]) return g2
def viz(): exp = request.args.get('e') basename = request.args.get('f') policy = request.args.get('p') exp_filepath = os.path.join(os.path.dirname( __file__), 'res', exp, basename + '.txt') names_filepath = os.path.join(os.path.dirname( __file__), 'res', exp, basename + '_names.csv') # Wordtree visualization. tree = load_tree(f_exp=exp_filepath, f_names=names_filepath, policy=policy) print 'done loading tree' tree = nx.convert_node_labels_to_integers(tree, first_label=-1, ordering='sorted') type_color = dict(observation='black', action='red') edges = [] for n1, n2, _ in nx.to_edgelist(tree): edges.append( [n2, tree.node[n2]['label'], n1, tree.node[n2]['count'], type_color[tree.node[n2]['type_']]]) print 'done loading wordtree edgelist' # Treemap visualization. edges_treemap = [] edges_treemap_other = [] for n1, n2, _ in nx.to_edgelist(tree): edges_treemap.append( [{'v': str(n2), 'f': tree.node[n2]['label']}, None if n1 == -1 else str(n1), tree.node[n2]['count'], np.mean(tree.node[n2]['cum_reward'])]) edges_treemap_other.append( [tree.node[n2]['belief'], tree.node[n2]['t'], np.mean(tree.node[n2]['reward'])]) print 'done loading treemap edgelist' # Other plots. #png_actions = os.path.join( # exp, basename, 'a_e{}_p-{}.png'.format(episode, policy)) #png_rewards = os.path.join( # exp, basename, 'r_t_e{}.png'.format(episode)) return render_template( 'test.html', exp=exp, basename=basename, policy=policy, edges=edges, edges_treemap=edges_treemap, edges_treemap_other=edges_treemap_other)
def bliss(graph1, graph2): edgelist1 = [ tuple(list(elem)[:2]) for elem in list(nx.to_edgelist(graph1)) ] gr1 = ig.Graph(len(graph1), edgelist1) edgelist2 = [ tuple(list(elem)[:2]) for elem in list(nx.to_edgelist(graph2)) ] gr2 = ig.Graph(len(graph2), edgelist2) if gr2.isomorphic_bliss(gr1): print('Graphs are isomorphic') else: print('Graphs are not isomorphic')
def matchRoadEdge(graphPicklePath): roadGraph = nx.read_gpickle(graphPicklePath) nodeList = roadGraph.nodes(data=True) _nodeList = roadGraph.nodes(data=False) nNode = len(nodeList) pos = [] for i in xrange(nNode): pos.append(nodeList[i][0]) pass shpLayout = dict(zip(roadGraph, pos)) _node1 = random.choice(_nodeList) _node2 = random.choice(_nodeList) _path = nx.dijkstra_path(roadGraph, _node1, _node2, None) pathGraph = nx.subgraph(roadGraph, _path) termnodelist = [_node1, _node2] edgeList = nx.to_edgelist(roadGraph, nodelist=None) pathEdgeList = nx.to_edgelist(pathGraph, nodelist=None) contourAttribute = nx.get_edge_attributes(roadGraph, 'contour') pathContourAttribute = nx.get_edge_attributes(pathGraph, 'contour') # --------------------------------------------------------------------------- for i, edge in enumerate(pathEdgeList): # print i, (edge[0],edge[1]), pathContourAttribute[(edge[0],edge[1])] pass print termnodelist # --------------------------------------------------------------------------- for i, pedge in enumerate(pathEdgeList): pathEdgeFeat = pathContourAttribute[(pedge[0], pedge[1])] _matchScores = [] for j, gedge in enumerate(edgeList): graphEdgeFeat = contourAttribute[(gedge[0], gedge[1])] seq = difflib.SequenceMatcher(None, pathEdgeFeat, graphEdgeFeat) _match = seq.ratio() _matchScores.append(_match) # print i, j, _match pass hist = np.histogram(_matchScores, bins=10) print i, hist[0] pass pass
def __init__(self, G): self.G = G self.edgelist = list(nx.to_edgelist(G)) self.edge_to_idx = {} self.max_calls = 0 for i, edge in enumerate(self.edgelist): self.edge_to_idx[edge[0], edge[1]] = i
def ShowGraph(): def RandomLetter(): string.ascii_letters import random letter = random.choice(string.ascii_lowercase) return letter # erdos renyi graph # generate a graph which has n=20 nodes, probablity p = 0.2. ER = nx.random_graphs.erdos_renyi_graph(20, 0.2) PW = nx.powerlaw_cluster_graph(20,3,0) # the shell layout pos = nx.shell_layout(ER) NER = [] for e in nx.to_edgelist(ER): T = (e[0],e[1],RandomLetter()) NER.append(T) print (NER) nx.draw(ER, pos, with_labels = False, node_size = 30) plt.show() query = "a.b.c.d" querylist=query.split("+", query.count(".", 0, len(query))) print (querylist) query1 ="a[2,3].h.(b+c+d.(e+f+g+t)+u).a[2,3].(b+c+d+e.(f+g))).y.(i+9)"
def neighorCount(urlShpFile): roadGraph = nx.read_shp(urlShpFile) nodeList = roadGraph.nodes(data=False) nNode = len(nodeList) print nNode edgesLst = nx.to_edgelist(roadGraph) print len(edgesLst) nodeNeighorhood = dict() neighborhoodSize = dict() nsize = [x for x in xrange(15)] for i in nsize: neighborhoodSize[i] = 0 pass singleNeighborNodeList = [] for _node in nodeList: _neighbors = [x for x in nx.all_neighbors(roadGraph, _node)] if not _node in nodeNeighorhood: nNeighbor = len(_neighbors) nodeNeighorhood[_node] = nNeighbor neighborhoodSize[nNeighbor] += 1 pass pass for key in neighborhoodSize.keys(): print key, '\t:', neighborhoodSize[key] pass pass
def updateContourEdgeInfo(urlShpFile, graphPicklePath): roadGraphd = nx.read_shp(urlShpFile) roadGraph = roadGraphd.to_undirected() _edgeList = nx.to_edgelist(roadGraph) contourAttribute = dict() # spline parameters s=0.0 # smoothness parameter k=4 # spline order for _edge in _edgeList: _edgePts = getEdgePoints(_edge) _r = [item[0] for item in _edgePts] _c = [item[1] for item in _edgePts] # _r2,_c2 = getUniformSampledPoints(_r,_c) _r2,_c2 = getMeasuredSamplePoints(_r,_c) x = _r2 y = _c2 # M = 2*( len(x) + k) M = len(x) if M <= k: M = 2*k t = np.linspace(0, len(x), M) x = np.interp(t, np.arange(len(x)), x) y = np.interp(t, np.arange(len(y)), y) z = t # find the knot points tckp,u = splprep([x,y,z],s=s,k=k,nest=-1) # evaluate spline, including interpolated points xnew,ynew,znew = splev(linspace(0,1,M),tckp) dx,dy,dz = splev(linspace(0,1,M), tckp, der=1) slp = [] cnv = 180/ np.pi for i in xrange(len(dx)): slp.append(np.arctan((dy[i]/dx[i]))*cnv ) pass # quantize the slope and assign to edge descriptor list roadLetFeat = [] for elem in slp: feat = genFeat(elem, _alphabetSize) roadLetFeat.append(feat) pass contourAttribute[(_edge[0],_edge[1])] = roadLetFeat pass nx.set_edge_attributes(roadGraph, 'contour', contourAttribute) nx.write_gpickle(roadGraph, graphPicklePath) pass
def networkxTograph(graph): G = graph.copy() mapping = dict(zip(G.nodes(), range(G.number_of_nodes()))) reverse_mapping = dict(zip(range(G.number_of_nodes()), G.nodes())) G = nx.relabel_nodes(G, mapping) G_ig = ig.Graph(len(G), list(zip(*list(zip(*nx.to_edgelist(G)))[:2]))) return G_ig, reverse_mapping, mapping
def simple_networkx_graph(directed=True): num_nodes = 10 # node = np.arange(num_nodes).astype(np.int64) edge_index = \ torch.tensor([[0, 0, 1, 1, 2, 2, 3, 3, 3, 4, 4, 5, 6, 6, 7, 7, 9], [1, 2, 2, 3, 3, 8, 4, 5, 6, 5, 6, 7, 8, 9, 8, 9, 8]]).long() x = torch.zeros([num_nodes, 2]) y = torch.tensor([0, 0, 1, 1, 2, 2, 3, 3, 4, 4]).long() for i in range(num_nodes): x[i] = np.random.randint(1, num_nodes) edge_x = torch.zeros([edge_index.shape[1], 2]) edge_y = torch.tensor([0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3]).long() for i in range(edge_index.shape[1]): edge_x[i] = np.random.randint(1, num_nodes) G = nx.DiGraph() G.add_nodes_from(range(num_nodes)) for i, (u, v) in enumerate(edge_index.T.tolist()): G.add_edge(u, v) # if it is undirected, modify the edge attributes if directed is False: G = G.to_undirected() H = G.to_directed() edge_index = np.zeros([2, edge_index.shape[1] * 2]).astype(np.int64) edge_x = np.zeros([edge_x.shape[0] * 2, edge_x.shape[1]]) edge_y = np.zeros(edge_y.shape[0] * 2).astype(np.int64) for i, nx_edge in enumerate(nx.to_edgelist(H)): edge_index[:, i] = np.array([nx_edge[0], nx_edge[1]]).astype(np.int64) edge_x[i] = nx_edge[2]['edge_attr'] edge_y[i] = nx_edge[2]['edge_y'] graph_x = torch.tensor([[0, 1]]) graph_y = torch.tensor([0]) return G, x, y, edge_x, edge_y, edge_index, graph_x, graph_y
def write_npz(problem, out_file): '''Write problem to NPZ file. out_file may be a file name or an open file descriptor.''' p, g, h = problem.pedigree, problem.genotype, problem.haplotype if isinstance(out_file, str): util.mkdir_if_not_exists(os.path.dirname(out_file)) # Wrap every non-np-array quantity by a np-array np.savez(out_file, pedigree_nodes=p.graph.nodes(), pedigree_graph=np.array([nx.to_edgelist(p.graph)]), pedigree_sample_id=p.sample_id, pedigree_sex=p.sex, pedigree_phenotype=p.phenotype, pedigree_node_type=p.node_type, pedigree_sample_index=p.sample_index, pedigree_num_genotyped=np.array([p.num_genotyped]), genotype_data=g.data, genotype_snp=g.snp, genotype_map=g.map, haplotype_data=h.data, haplotype_snp=h.snp, haplotype_qc=h.qc, haplotype_hap_type=h.hap_type, haplotype_poo_phase=h.poo_phase, error=problem.error, frames=np.array([problem.frames]), # problem.frames.to_array(), info=np.array([problem.info]), lam=problem.lam)
def __init__(self, another='not'): if another == 'not': a = nx.read_gexf(another) else: a = nx.read_gexf('ego-graph.gexf') # Считывание файла self.a = a self.adj = nx.to_numpy_matrix(a) # Получение матрицы смежности self.nodes = np.arange(len(nx.nodes(a))) # Список всех вершин self.n = len(self.nodes) # Кол-во вершин self.edges = nx.to_edgelist(a) # Список смежности self.reversed_adj = self.adj.transpose() self.not_oriented = self.adj + self.reversed_adj self.not_oriented[np.nonzero(self.not_oriented)] = 1 # Получаем неориентированный граф self.tranz = self.floyd_warshall() self.r = self.radius() self.d = self.diametr() # self.nodes = np.arange(4) # self.adj = np.matrix([[0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1], [1, 0, 1, 0]]) # self.not_oriented = self.adj + self.reversed_adj # self.not_oriented[np.nonzero(self.not_oriented)] = 1 self.edges = [] for i, row in enumerate(self.not_oriented): for j, node in enumerate(row.flat): if node != 0: self.edges.append((i, j))
def to_edgelist(self): """ Export the current transforms as a list of edge tuples, with each tuple having the format: (node_a, node_b, {metadata}) Returns --------- edgelist : (n,) list Of edge tuples """ # save cleaned edges export = [] # loop through (node, node, edge attributes) for edge in nx.to_edgelist(self.transforms): a, b, attr = edge # geometry is a node property but save it to the # edge so we don't need two dictionaries try: b_attr = self.transforms.nodes[b] except BaseException: # networkx 1.X API b_attr = self.transforms.node[b] # apply node geometry to edge attributes if 'geometry' in b_attr: attr['geometry'] = b_attr['geometry'] # save the matrix as a float list attr['matrix'] = np.asanyarray( attr['matrix'], dtype=np.float64).tolist() export.append((a, b, attr)) return export
def from_nxGraph(nxGraph, coordinates, properties={}): """ Creates a Karst graph from a Networkx graph. Takes a graph in the Networkx format and the coordinates of the nodes to generate a karstic network. Parameters ---------- nxGraph : networkx graph the input graph coordinates : dictionnary the coordinates of the node, keys are node names properties : dictionnary optional argument containing properties associated with the nodes Returns ------- KGraph A KGraph object Examples -------- >>> myKGraph = kn.from_nxGraph(G, coord) >>> myKGraph = kn.from_nxGraph(G, coord, prop) """ # Initialization of the complete graph edges = nx.to_edgelist(nxGraph) Kg = KGraph(edges, coordinates, properties) return Kg
def write_npz(problem, out_file): '''Write problem to NPZ file. out_file may be a file name or an open file descriptor.''' p, g, h = problem.pedigree, problem.genotype, problem.haplotype if isinstance(out_file, str): util.mkdir_if_not_exists(os.path.dirname(out_file)) # Wrap every non-np-array quantity by a np-array np.savez( out_file, pedigree_nodes=p.graph.nodes(), pedigree_graph=np.array([nx.to_edgelist(p.graph)]), pedigree_sample_id=p.sample_id, pedigree_sex=p.sex, pedigree_phenotype=p.phenotype, pedigree_node_type=p.node_type, pedigree_sample_index=p.sample_index, pedigree_num_genotyped=np.array([p.num_genotyped]), genotype_data=g.data, genotype_snp=g.snp, genotype_map=g.map, haplotype_data=h.data, haplotype_snp=h.snp, haplotype_qc=h.qc, haplotype_hap_type=h.hap_type, haplotype_poo_phase=h.poo_phase, error=problem.error, frames=np.array([problem.frames]), # problem.frames.to_array(), info=np.array([problem.info]), lam=problem.lam)
def parseGraph(urlShpFile): roadGraphd = nx.read_shp(urlShpFile) roadGraph = roadGraphd.to_undirected() # roadGraph = nx.read_shp(urlShpFile) # roadGraph = list(nx.connected_component_subgraphs(roadGraph.to_undirected()))[0] nodeLst = roadGraph.nodes(data=False) roadEdgeList = nx.to_edgelist(roadGraph) weightAttribute = dict() for roadEdge in roadEdgeList: weightAttribute[(roadEdge[0], roadEdge[1])] = 1 pass nx.set_edge_attributes(roadGraph, 'weight', weightAttribute) for node in nodeLst: _n = roadGraph.neighbors(node) print len(_n), node if (len(_n) > 4): findTree(roadGraph, node) # findmst(roadGraph, node) pass
def process(self): # Read data into huge `Data` list. dataset = torch.load( os.path.join( self.root, f'{self.k}cycles_n{self.n}_{self.n_samples}samples_{self.s}.pt' )) data_list = [] for sample in dataset: graph, y, label = sample edge_list = nx.to_edgelist(graph) edges = [np.array([edge[0], edge[1]]) for edge in edge_list] edges2 = [np.array([edge[1], edge[0]]) for edge in edge_list] edge_index = torch.tensor(np.array(edges + edges2).T, dtype=torch.long) x = torch.ones(graph.number_of_nodes(), 1, dtype=torch.float) y = torch.tensor( [1], dtype=torch.long) if label == 'has-kcycle' else torch.tensor( [0], dtype=torch.long) data_list.append( Data(x=x, edge_index=edge_index, edge_attr=None, y=y)) # Subsample the data if self.train: all_data = len(data_list) to_select = int(all_data * self.proportion) print(to_select, "samples were selected") data_list = data_list[:to_select] data, slices = self.collate(data_list) torch.save((data, slices), self.processed_paths[0])
def roadData1(): roadGraph = nx.read_shp(urlShpFile) roadEdgeList = nx.to_edgelist(roadGraph) _flag = True for roadEdge in roadEdgeList: while _flag: print roadEdge[0] print roadEdge[1] roadEdgeData = roadEdge[2] edgeKeyList = roadEdgeData.keys() for edgeKey in edgeKeyList: print edgeKey print roadEdgeData[edgeKey] pass edgePointStr = roadEdgeData['Wkt'] b1 = edgePointStr.find('(') b2 = edgePointStr.find(')') edgePointsSubStr = edgePointStr[b1 + 1:b2] print edgePointsSubStr edgePointPairStrLst = edgePointsSubStr.split(',') edgePointPairLst = [] for edgePointPairStr in edgePointPairStrLst: edgePointPair = [float(x) for x in edgePointPairStr.split(' ')] edgePointPairLst.append(edgePointPair) pass # coordinates of points in an edge for _item in edgePointPairLst: print _item pass _flag = False pass
def roadData2(): roadGraph = nx.read_shp(urlShpFile) nodeList = roadGraph.nodes(data=False) nNode = len(nodeList) _node = random.choice(nodeList) _nodeEdgeList = nx.edges(roadGraph, _node) count = 0 while (len(_nodeEdgeList) < 2): _node = random.choice(nodeList) _nodeEdgeList = nx.edges(roadGraph, _node) count += 1 print _nodeEdgeList _nodeEdgeLst = nx.to_edgelist(roadGraph, _node) for _edge in _nodeEdgeLst: _edgePts = getEdgePoints(_edge) print _edgePts edgePointsLst = [] for roadEdge in _nodeEdgeLst: edgePointPairLst = getEdgePoints(roadEdge) edgePointsLst.append(edgePointPairLst) dispNodeEdgeGraph(edgePointsLst) pass
def roadData(): roadGraph = nx.read_shp(urlShpFile) nodeList = roadGraph.nodes(data=False) nNode = len(nodeList) qnodeList = [] for i in xrange(1): qnodeList.append(random.choice(nodeList)) pass print 'qnodelist', qnodeList roadEdgeList = nx.to_edgelist(roadGraph, qnodeList) _flag = 10 for roadEdge in roadEdgeList: if _flag: edgePointPairLst = getEdgePoints(roadEdge) nAlpha = [float(x) for x in roadEdge[0]] nOmega = [float(x) for x in roadEdge[1]] # print nAlpha # print nOmega # for edgePointPair in edgePointPairLst: # print edgePointPair # pass edgePoints = [] edgePoints.append(nAlpha) for edgePointPair in edgePointPairLst: edgePoints.append(edgePointPair) pass edgePoints.append(nOmega) dispEdgeGraph(edgePoints) _flag -= 1 pass pass
def analyseGraphDist(graphPicklePath): roadGraph = nx.read_gpickle(graphPicklePath) nodeList = roadGraph.nodes(data=True) _nodeList = roadGraph.nodes(data=False) nNode = len(nodeList) pos = [] for i in xrange(nNode): pos.append(nodeList[i][0]) pass shpLayout = dict(zip(roadGraph, pos)) _node1 = random.choice(_nodeList) _node2 = random.choice(_nodeList) _path = nx.dijkstra_path(roadGraph, _node1, _node2, None) pathGraph = nx.subgraph(roadGraph, _path) termnodelist = [_node1, _node2] edgeList = nx.to_edgelist(roadGraph, nodelist=None) pathEdgeList = nx.to_edgelist(pathGraph, nodelist=None) contourAttribute = nx.get_edge_attributes(roadGraph, 'contour') pathContourAttribute = nx.get_edge_attributes(pathGraph, 'contour') distList = [] for pedge in pathEdgeList: pathEdgeFeat = pathContourAttribute[(pedge[0], pedge[1])] # print pathEdgeFeat # print len(pathEdgeFeat) edgePtsList = getEdgePoints(pedge) # print edgePtsList _distList = [] for i in range(len(edgePtsList) - 1): _dist = geopy.distance.vincenty(edgePtsList[i], edgePtsList[i + 1]).meters _distList.append(_dist) pass distList.append(np.sum(_distList) / len(edgePtsList)) print np.average(distList) print np.std(distList)
def featRoadLet(_urlShpFile): roadGraph = nx.read_shp(urlShpFile) roadEdgeList = nx.to_edgelist(roadGraph) nodeList = roadGraph.nodes(data=False) roadEdgeList = nx.to_edgelist(roadGraph) roadEdgeFeatDict = dict() for roadEdge in roadEdgeList: roadEdgeFeatDict[(roadEdge[0], roadEdge[1])] = getRoadEdgeFeat(roadEdge) pass nx.set_edge_attributes(roadGraph, 'roadLetCurvature', roadEdgeFeatDict) # return the road graph with the edge attribute with new value return roadGraph pass
def initialize_dgl_graph(state): nx_graph = state.instance nx_complete_graph = nx_graph.copy() make_complete_nx_graph(nx_complete_graph) dgl_graph = generate_dgl_graph(nx_complete_graph) node_snorm, edge_snorm = initialize_dgl_features(state, dgl_graph, [], nx.to_edgelist(nx_graph), 'cpu') return dgl_graph, node_snorm, edge_snorm
def gammaIso(graph1, graph2): def KFilter(ig1, ig2, K=10, gamma=0.9): #Size-Filter if (len(ig1.vs) > K and len(ig2.vs) > K): #Filter based on gamma isomorphism criterion if (len(ig2.vs) >= gamma * (len(ig1.vs))): return True return False nodes_graph1 = graph1.nodes.keys() edges_graph1 = graph1.edges nodes_graph2 = graph2.nodes.keys() edges_graph2 = graph2.edges G1 = InitGraph(nodes_graph1, edges_graph1) G2 = InitGraph(nodes_graph2, edges_graph2) G1.remove_nodes_from(nx.isolates( G1)) #Drop Isolates (Commenting it out as doing it in igraph) G2.remove_nodes_from(nx.isolates(G2)) #Drop Isolates #Remove self loops self_edge_list = G1.selfloop_edges() for e in self_edge_list: G1.remove_edge(*e) self_edge_list = G2.selfloop_edges() for e in self_edge_list: G2.remove_edge(*e) ig1 = ig.Graph(len(G1), [(int(x), int(y)) for x, y in zip(*zip(*nx.to_edgelist(G1))[:2])], directed=True) ig2 = ig.Graph(len(G2), [(int(x), int(y)) for x, y in zip(*zip(*nx.to_edgelist(G2))[:2])], directed=True) ig1.vs.select(_degree=0).delete() #Drop Isolates ig2.vs.select(_degree=0).delete() #Drop Isolates layout = ig1.layout("kk") ig.plot(ig1, layout=layout) layout = ig2.layout("kk") ig.plot(ig2, layout=layout) '''
def thr_graph(g1, thr = .9): g2 = type(g1)() g2.add_nodes_from(g1.nodes()) g2.add_weighted_edges_from([ (e[0],e[1],1.) for e in nx.to_edgelist(g1) if e[2]['weight'] >= thr]) return g2
def graph(self): """ Draws the Graph/Network switches ... """ # --------------------------------------------------------- N = self.G.nodes() d = defaultdict(list) E = self.G.number_of_edges() print "The number of Nodes in this Network:", N print "The number of Edges in this Network:", E fig = plt.figure() fig.canvas.set_window_title("The ERnet Topology View") nx.draw_networkx(self.G) plt.show() g = ig.Graph(len(self.G), zip(*zip(*nx.to_edgelist(self.G))[:2])) self.pt(g) cl = g.community_fastgreedy() # print cl membership = cl.as_clustering().membership print membership self.pt(g, membership) # print g.get_all_shortest_paths (2, 33) membership.pop(0) for q, a in zip(N, membership): print 'The Node {0} --> Belongs to cluster {1}.'.format(q, a) # The following procedure is to get the exact nodes of each cluster for i in range(max(membership)): i += 1 for j in range(len(N)): if membership[j] == i: d[i].append(N[j]) print d.items() # Test the subgraphs correctness, which is the clusters fig = plt.figure() fig.canvas.set_window_title("Sub-Graph/Clique 1 of ERnet") G3 = self.G.subgraph( d[1] ) # each index in dictionary "d" is considered as a one cluster/subgraph of G nx.draw_networkx(G3) plt.show()
def networkx_to_igraph(networkx_graph: DiGraph) -> Graph: """ Convert a networkx DiGraph to an iGraph Graph. Code via https://stackoverflow.com/a/39085829 By https://stackoverflow.com/users/1628638/ulrich-stern and SO contributors """ return Graph(len(networkx_graph), list(zip(*list(zip(*to_edgelist(networkx_graph)))[:2])))
def print_graph(G): zop = [] NS = str(G.number_of_nodes()) zop.append(NS + "\n") conn = nx.to_edgelist(G) for (u, v, _) in conn: zop.append(f"{u} {v} {random.randint(1, 99)}\n") with open(NS + ".in", "w") as f: f.writelines(zop)
def le(nome): G = nx.read_weighted_edgelist(nome, nodetype=int, comments='%') maiorComponente = max(nx.connected_component_subgraphs(G), key=len) adicionar = [] for i in nx.to_edgelist(maiorComponente): adicionar.append(tuple([i[0], i[1], i[2]])) return ig.Graph().TupleList(adicionar)
def save_graph(adj, nodelist_mapping, filename): """Save a graph in the form of an edgelist from its adjacency matrix and node mapping""" G_adj = nx.relabel_nodes(nx.Graph(adj), nodelist_mapping) edgelists = list(nx.to_edgelist(G_adj)) f = open(filename, "w") for i in range(len(edgelists)): f.write(str(edgelists[i][0]) + '\t' + str(edgelists[i][1]) + '\n') f.close()
def get_edges_data(net, data_key, default_val=0.): index = dict(zip(list(net), range(len(net)))) data = np.full((len(net), len(net)), default_val) edges = nx.to_edgelist(net) for (n1, n2, edge) in edges: try: data[index[n1], index[n2]] = edge["synapse"].__dict__[data_key] except: data[index[n1], index[n2]] = default_val return data
def toKeyValueList(self): services.BGXlog.logInfo('Translating DAG to key-value pairs') db_list = [] for node in self.sort(): data = {'type': 'node', 'value': node.value()} value = json.dumps(data) db_list.append(tuple((node.key(), value))) for edge in nx.to_edgelist(self.graph): db_list.append(Transaction.hashFromTuple(edge)) return db_list
def mfinderFormat(outGraph, outFileName): #Convert to a list of edges in the form of a tuple of node number for the originating node, #node number of the ending node, and a dictionary of edge attributes. I only care #about the from and to nodes. edgelist = nx.to_edgelist(outGraph) with open(outFileName, 'w') as outFile: for edge in edgelist: #The 1 at the end is a weight that mfinder expects, but does not use outFile.write("{0} {1} 1\n".format(edge[0], edge[1])) outFile.close() print "Wrote mfinder input file to {0}".format(outFileName)
def test_from_edgelist(self): # Pandas DataFrame g = nx.cycle_graph(10) G = nx.Graph() G.add_nodes_from(g) G.add_weighted_edges_from((u, v, u) for u, v in g.edges()) edgelist = nx.to_edgelist(G) source = [s for s, t, d in edgelist] target = [t for s, t, d in edgelist] weight = [d['weight'] for s, t, d in edgelist] edges = pd.DataFrame({'source': source, 'target': target, 'weight': weight}) GG = nx.from_pandas_edgelist(edges, edge_attr='weight') assert_nodes_equal(G.nodes(), GG.nodes()) assert_edges_equal(G.edges(), GG.edges()) GW = nx.to_networkx_graph(edges, create_using=nx.Graph()) assert_nodes_equal(G.nodes(), GW.nodes()) assert_edges_equal(G.edges(), GW.edges())
def to_edgelist(self): """ Export the current transforms as a list of edge tuples, with each tuple having the format: (node_a, node_b, {metadata}) Returns ------- edgelist: (n,) list of tuples """ # save cleaned edges export = [] # loop through (node, node, edge attributes) for edge in nx.to_edgelist(self.transforms): a, b, c = edge # geometry is a node property but save it to the # edge so we don't need two dictionaries if 'geometry' in self.transforms.node[b]: c['geometry'] = self.transforms.node[b]['geometry'] # save the matrix as a float list c['matrix'] = np.asanyarray(c['matrix'], dtype=np.float64).tolist() export.append((a, b, c)) return export
def export(self): export = to_edgelist(self.transforms) for e in export: e[2]['matrix'] = np.array(e[2]['matrix']).tolist() return export
def main(): graph = networkx.DiGraph(numpy.loadtxt('../resources/House4.mat')) with open('../resources/House4.tsv', 'w+') as data_out: for edge in networkx.to_edgelist(graph): print(edge[0], edge[1], edge[2]['weight'], sep='\t', file=data_out)
def centrality(): start_time = datetime.now() #TODO add config file read #TODO support cross network calculations (author_node --is--> author_node) ## >Get the REQUIRED parameters req_params = {} for entry in req_param_list: if request.args.get(entry) is not None: req_params[entry] = urllib2.unquote(request.args.get(entry)).replace('\'', '') else: ret_string = {'error': 'Required parameter missing: ' + entry} inf_sup.append_to_log(log_filename, str(ret_string)) return jsonify(ret_string) #TODO Validate start_date, end_date ## >Verify the metric is valid if req_params['metric'] not in metric_list: ret_string = {'error': 'Invalid metric requested'} inf_sup.append_to_log(log_filename, str(ret_string)) return jsonify(ret_string) ## >Verify the start date is before the end date if int(req_params['start_date']) > int(req_params['end_date']): ret_string = {'error': 'End data before start date'} inf_sup.append_to_log(log_filename, str(ret_string)) return jsonify(ret_string) ## >Get the OPTIONAL parameters opt_params = {} for entry in opt_param_list: if request.args.get(entry) is not None: opt_params[entry] = urllib2.unquote(request.args.get(entry)).replace('\'', '') else: opt_params[entry] = None #TODO validate the optional parameters ## >Get the FORMAT parameters for_params = {} for entry in format_param_list: if request.args.get(entry) is not None: for_params[entry] = urllib2.unquote(request.args.get(entry)).replace('\'', '') else: for_params[entry] = None params = dict(req_params.items() + opt_params.items() + for_params.items()) ## >Build the mongo query mongo_query = {} mongo_query['PostDate'] = {'$gte': params['start_date'], '$lte': params['end_date']} mongo_query['Network'] = params['network'] for param, value in opt_params.iteritems(): if value is not None: if param is 'type': mongo_query['Type'] = opt_params['type'] if param is 'twit_collect': mongo_query['Meta.sources'] = {'$in': [opt_params['twit_collect']]} if param is 'matched_project': mongo_query['Matching'] = {'$elemMatch': {'ProjectId': opt_params['matched_project']}} if param is 'matched_topic': #TODO pass if param is 'scored_project': #TODO pass if param is 'scored_topic': #TODO pass ## >Check if there are any matches if author_collection.find(mongo_query).count == 0: ret_string = {'error': 'No connections found matching the criteria'} inf_sup.append_to_log(log_filename, str(ret_string)) return jsonify(ret_string) else: ## >Map/reduce the A-->A connections a2a_map = Code(""" function () { emit({"author": this.Author, "connection": this.Connection}, {"count": 1} ); } """) a2a_reduce = Code(""" function (key, values) { var count = 0; values.forEach(function(v) { count += v['count']; }); return {"count": count}; } """) a2a_result = author_collection.map_reduce(a2a_map, a2a_reduce, "a2a_results", query=mongo_query).find() ## >Build the author list author_list = [] for a2a_count in a2a_result: con_author = a2a_count['_id']['author'].replace('&', '&') con_connect = a2a_count['_id']['connection'].replace('&', '&') if (len(con_author) > 0) and (len(con_connect) > 0): author_list.append((con_author, con_connect, int(a2a_count['value']['count']))) ## >Influence Calculations if len(author_list) > 0: ## >Create a black graph G = nx.DiGraph() ## >Add the endges to the graph G.add_weighted_edges_from(author_list) ## >Run the requested metric, on the graph 'G' try: calc_metric, stats = inf.run_metric(params['metric'], G, 'weight', True) except: try: if params['metric'] is 'pagerank': calc_metric, stats = inf.run_metric('pagerank_norm', G, 'weight', True) else: return jsonify({'error': 'Error calculating metric'}) except: return jsonify({'error': 'Pagerank did not converge'}) else: ret_string = {'error': 'No connections found matching the criteria'} inf_sup.append_to_log(log_filename, str(ret_string)) return jsonify(ret_string) ## >Build the dictionary to return data_results = {} ## >Append the metric data data_results['metrics'] = calc_metric ## >If graph requested if for_params['return_graph'] is not None: if for_params['return_graph'].lower() == 'true': ## >If format = data if for_params['format'] is None: ## >Append the graph data data_results['graph'] = nx.to_edgelist(G, nodelist=None) ## >If format = graphml elif for_params['format'].lower() == 'graphml': ## >Create the graphml filename graphml_name = inf_sup.create_filename(params) ## >Get the graphml data graphml_data = '\n'.join(nx.generate_graphml(G)) ## >Add the versioning graphml_final = '<?xml version="1.0" encoding="UTF-8"?>' + "\n" h = HTMLParser.HTMLParser() for line in graphml_data.split("\n"): ## >Escape the html content line = h.unescape(line) ## >For each node add appropriate metric data into the graphml if '<node id="' in line: graphml_final += (line.replace('/>', '>') + "\n") node_name = line.partition('"')[-1].rpartition('"')[0] graphml_final += ' <data key="d1">' + str(calc_metric[node_name]) + '</data>' + "\n" graphml_final += ' </node>' + "\n" else: graphml_final += line + "\n" ## >Add the key for the metric attribute if '<key' in line: graphml_final += ' <key attr.name="' + params['metric'] + '" attr.type="float" for="node" id="d1" />' if app.debug is True: ## >Write out the graphml for testing graphml_name = inf_sup.create_filename(params) with open(graphml_name, 'w') as output_file: for line in graphml_final: output_file.write(line.encode('utf-8')) if not output_file.closed: output_file.close() ## >Create the appropriate response to return the graphml response = make_response(graphml_final) response.headers["Content-Type"] = 'text/xml' response.headers["Content-Distribution"] = 'attachment; filename=%s' % (graphml_name,) return response ## >To the log statistics = {} statistics['api_query'] = params statistics['mongo_query'] = mongo_query statistics['influence_metric'] = params['metric'] statistics['metric_runtime'] = stats statistics['full_runtime'] = str(datetime.now() - start_time) statistics['graph_nodes'] = G.order() statistics['graph_edges'] = G.size() inf_sup.append_to_log(log_filename, str(statistics)) if app.debug is True: ### >Write out the influence for testing graphml_name = inf_sup.create_filename(params) influence_file = graphml_name.replace('.graphml', '.txt') with open(influence_file, 'w') as output_file: graph_list = calc_metric.items() for item in graph_list: output_file.write(item[0].encode('utf_8') + "," + str(item[1]) + '\n') if not output_file.closed: output_file.close() return jsonify(result=data_results)
def design_network(topography, diameters, verbose=True): """ builds the design problem and solves it """ # expand the multi-faucets nodes topo = nx.DiGraph(topography) faucets = [n for n,d in topo.nodes_iter(data=True) if d['type']=="faucet"] for f in faucets: if topo.node[f]['nb_faucets'] > 1: for i in range(topo.node[f]['nb_faucets']): g = '%s_%d' % (f,i+1) topo.add_node(g,{'altitude':topo.node[f]['altitude'], 'nb_faucets':1, 'load_factor':1, 'type':'dispatch'}) topo.add_edge(f,g,{'length':1}) # Number of non-zeros elements in A* # This part should be removed later #tank = [n for n,d in topo.nodes_iter(data=True) if d['type']=="tank"][0] #all_paths = [nx.shortest_path_length(topo,tank,n) for n in topo.nodes_iter()] #nb_pipes = topo.number_of_edges() #nb_nodes = topo.number_of_nodes() #nb_diams = len(diameters) #nb_anz = ((sum(all_paths) + nb_pipes) * nb_diams + nb_nodes) - 1 + (nb_diams * nb_pipes) #print nb_anz # Useful values tank = [n for n,d in topo.nodes_iter(data=True) if d['type']=="tank"][0] nb_pipes = topo.number_of_edges() nb_nodes = topo.number_of_nodes() nb_diams = len(diameters) pipes = topo.edges(data=True) if verbose: print("Nb pipes: %d" % nb_pipes) print("Nb nodes: %d" % nb_nodes) print("Nb diameters: %d" % nb_diams) # Diameters temperature-dependant values a = calcul_a(topo.graph['watertemp']) for diam in diameters: if diam['type']==1: # PVC diam['p'] = 2 - 0.219 diam['q'] = 5 - 0.219 diam['beta'] = 0.0826 * 0.235 * (a * 1e6)**(-0.219) #TODO values for IRON pipes (source: DiameterVector) # elif diam['type']==2: # IRON # Double ironA = new Double(0); # Double ironB = new Double(0); # calculIron(rugosite / diameter, ironA, ironB); # p = 2 - ironB.doubleValue(); # q = 5 - ironB.doubleValue(); # beta = 0.0826 * ironA.doubleValue() * Math.pow(a * 1e6, # -ironB.doubleValue()); # Vector c = # nb_pipes * nb_diam nb_pipes # [ diam_costs ... , very_high_cost ] diam_cost = [diam['cost'] for diam in diameters] #TODO check what is a very high cost #very_high_cost = max(diam_cost) very_high_cost = 6.86e+02 c = diam_cost * nb_pipes + [very_high_cost] * nb_pipes # Matrix A # nb_pipes * nb_diam nb_pipes # [ [1,1,1...] 0 0 , ] # nb_pipes [ 0 [1,1,1...] 0 , 0 ] # [ 0 0 [1,1,1...] , ] values = [1] * (nb_pipes * nb_diams) rows = sum([ [i] * nb_diams for i in range(nb_pipes)], []) cols = range(nb_pipes * nb_diams) A = spmatrix(values,rows,cols,size=(nb_pipes,nb_pipes*nb_diams+nb_pipes)) # Vector b # nb_pipes # [ length ...] b = [ d['length'] for n1,n2,d in pipes ] # Matrix G_up # nb_pipes * nb_diam nb_pipes # [ [f(d1),f(d2),...] 0 0 , ] # nb_pipes [ [f(d1),f(d2),...] [f(d1),f(d2),...] 0 , -I ] # [ [f(d1),f(d2),...] 0 [f(d1),f(d2),...] , ] # 1 [ [c(d1),c(d2),...] [c(d1),c(d2),...] [c(d1),c(d2),...] , 0 ] values = [] rows = [] cols = [] row = 0 for n1,n2,d in pipes: # left part of matrix print(tank) print(n2) print(nx.shortest_path(topo,tank,n2)) print(nx.to_edgelist(topo,nx.shortest_path(topo,tank,n2))) path = nx.shortest_path(topo,tank,n2) for idx in range(len(path))[1:]: for diam in diameters: value = ( topo.graph['targetflow'] * topo.node[path[idx]]['load_factor'] / 1000 ) ** diam['p'] value /= diam['diam']**diam['q'] value *= diam['beta'] values.append(value) rows.extend( [row] * nb_diams ) col = i_pipe(path[idx-1],path[idx],pipes) * nb_diams cols.extend( [i + col for i in range(nb_diams)] ) # right part values.append(-1.0) rows.append(row) cols.append(nb_pipes * nb_diams + row) row += 1 #TODO check if ok to add length constraint # length constraint #values.extend( [1.0] * nb_diams * nb_pipes ) #rows.extend( [ nb_pipes ] * nb_diams * nb_pipes ) #cols.extend( range(nb_diams * nb_pipes)) # max cost constraint values.extend( [diam["cost"] for diam in diameters] * nb_pipes ) rows.extend( [ nb_pipes ] * nb_diams * nb_pipes ) cols.extend( range(nb_diams * nb_pipes)) G_up = spmatrix(values,rows,cols,size=(nb_pipes+1,nb_pipes*nb_diams+nb_pipes)) return (c, A, b, G_up)
def centrality(): start_time = datetime.now() # TODO add config file read # TODO support cross network calculations (author_node --is--> author_node) ## >Get the REQUIRED parameters req_params = {} for entry in req_param_list: if request.args.get(entry) is not None: req_params[entry] = urllib2.unquote(request.args.get(entry)).replace("'", "") else: ret_string = "Required parameter missing: " + entry return jsonify(result=ret_string) # TODO Validate start_date, end_date ## >Verify the metric is valid if req_params["metric"] not in metric_list: return jsonify(result="Invalid metric requested") ## >Verify the start date is before the end date if int(req_params["start_date"]) > int(req_params["end_date"]): return jsonify(result="End data before start date") ## >Get the OPTIONAL parameters opt_params = {} for entry in opt_param_list: if request.args.get(entry) is not None: opt_params[entry] = urllib2.unquote(request.args.get(entry)).replace("'", "") else: opt_params[entry] = None # TODO validate the optional parameters ## >Get the FORMAT parameters for_params = {} for entry in format_param_list: if request.args.get(entry) is not None: for_params[entry] = urllib2.unquote(request.args.get(entry)).replace("'", "") else: for_params[entry] = None params = dict(req_params.items() + opt_params.items() + for_params.items()) ## >Build the mongo query mongo_query = {} mongo_query["PostDate"] = {"$gte": params["start_date"], "$lte": params["end_date"]} mongo_query["Network"] = params["network"] for param, value in opt_params.iteritems(): if value is not None: if param is "type": mongo_query["Type"] = opt_params["type"] if param is "twit_collect": mongo_query["Meta.sources"] = {"$in": [opt_params["twit_collect"]]} if param is "matched_project": mongo_query["Matching"] = {"$elemMatch": {"ProjectId": opt_params["matched_project"]}} if param is "matched_topic": # TODO pass if param is "scored_project": # TODO pass if param is "scored_topic": # TODO pass ## >Check if there are any matches if author_collection.find(mongo_query).count == 0: return "No connections found matching the criteria" else: ## >Map/reduce the A-->A connections a2a_map = Code( """ function () { emit({"author": this.Author, "connection": this.Connection}, {"count": 1} ); } """ ) a2a_reduce = Code( """ function (key, values) { var count = 0; values.forEach(function(v) { count += v['count']; }); return {"count": count}; } """ ) a2a_result = author_collection.map_reduce(a2a_map, a2a_reduce, "a2a_results", query=mongo_query).find() ## >Build the author list author_list = [] for a2a_count in a2a_result: author_list.append( ( a2a_count["_id"]["author"].replace("&", "/x26"), a2a_count["_id"]["connection"].replace("&", "/x26"), int(a2a_count["value"]["count"]), ) ) ## >Influence Calculations if len(author_list) > 0: ## >Create a black graph G = nx.DiGraph() ## >Add the endges to the graph G.add_weighted_edges_from(author_list) ## >Run the requested metric, on the graph 'G' calc_metric, stats = inf.run_metric(params["metric"], G, "weight", True) else: return jsonify(result="Parameters produced no graph/metrics") ## >Build the dictionary to return data_results = {} ## >Append the metric data data_results["metrics"] = calc_metric ## >If graph requested if for_params["return_graph"] is not None: if for_params["return_graph"].lower() == "true": ## >If format = data if for_params["format"] is None: ## >Append the graph data data_results["graph"] = nx.to_edgelist(G, nodelist=None) ## >If format = graphml elif for_params["format"].lower() == "graphml": ## >Create the graphml filename graphml_name = inf_sup.create_filename(params) ## >Get the graphml data graphml_data = "\n".join(nx.generate_graphml(G)) ## >Add the versioning graphml_final = '<?xml version="1.0" encoding="UTF-8"?>' + "\n" h = HTMLParser.HTMLParser() for line in graphml_data.split("\n"): ## >Escape the html content line = h.unescape(line) ## >For each node add appropriate metric data into the graphml if '<node id="' in line: graphml_final += line.replace("/>", ">") + "\n" node_name = line.partition('"')[-1].rpartition('"')[0] graphml_final += ' <data key="d1">' + str(calc_metric[node_name]) + "</data>" + "\n" graphml_final += " </node>" + "\n" else: graphml_final += line + "\n" ## >Add the key for the metric attribute if "<key" in line: graphml_final += ( ' <key attr.name="' + params["metric"] + '" attr.type="float" for="node" id="d1" />' ) if app.debug is True: ## >Write out the graphml for testing with open(graphml_name, "w") as output_file: for line in graphml_final: output_file.write(line.encode("utf-8")) if not output_file.closed: output_file.close() ## >Create the appropriate response to return the graphml response = make_response(graphml_final) response.headers["Content-Type"] = "text/xml" response.headers["Content-Distribution"] = "attachment; filename=%s" % (graphml_name,) return response if app.debug is True: ## >If debug mode add the query parameters data_results["query"] = params ## >And add statistics about the process statistics = {} statistics["runtime"] = str(datetime.now() - start_time) data_results["stats"] = statistics ## >Add the mongo query used data_results["query"] = mongo_query return jsonify(result=data_results)
def test_to_edgelist(self): G = nx.Graph([(1, 1)]) elist = nx.to_edgelist(G, nodelist=list(G)) assert_edges_equal(G.edges(data=True), elist)
from construct_network import BuildGraph import networkx as nx import time import sys import cPickle from itertools import combinations G = BuildGraph() el = nx.to_edgelist(G) names = {} numbers = {} i = 0 edgelist = [] for e in el: for j in [0,1]: if not numbers.has_key(e[j]): numbers[e[j]] = i names[i] = e[j] i += 1 edgelist.append((numbers[e[0]], numbers[e[1]])) el = [] cPickle.dump(names, open("names.mat","w")) cPickle.dump(numbers, open("numbers.mat","w")) #Construct A and A' start = time.time() A = {}; Aprime = {}; nnz = [0]*len(numbers)
def write_edge_list(filename, network): with open(filename, "w+") as fh: [fh.write("%d, %d\n" % (source, target)) for source, target, weight in nx.to_edgelist(network)]
def trim(graph, threshold=0.2): thr = threshold * graph.size() for i, a in enumerate(sorted(nx.to_edgelist(graph), key=(lambda x: x[2]['weight']))): if i > thr: graph.remove_edge(a[0], a[1])
def run( reset = False, base_net = 'kn', comp_net = 'fn', demand_bdtnp = False): tgs,tfs = nio.getNet() ktgs,ktfs = nio.getKNet() bd = nio.getBDTNP() #btgs,btfs = nio.getBDTNP() sush = nio.getSush(on_fail = 'compute') tfset = set(ktfs.keys()) tgset = set(ktgs.keys()) tg_int = set(tgs.keys()).intersection(ktgs.keys()) tf_int = set(tfs.keys()).intersection(ktfs.keys()) if demand_bdtnp: tg_int = tg_int.intersection(bd.keys()) tf_int = tf_int.intersection(bd.keys()) sfRN = [(tf, tg, float(wt)) for tg, elt in tgs.iteritems() if tg in tg_int for tf, wt in zip(elt['tfs'], elt['weights']) if tf in tf_int] kRN = [(tf, tg, float(wt)) for tg, elt in ktgs.iteritems() if tg in tg_int for tf, wt in zip(elt['tfs'], elt['weights']) if tf in tf_int] #Sushmita network with signed edges suRN = [(tf, tg, float(wt)) for tg, elt in sush.iteritems() if tg in tg_int for tf, wt in zip(elt['tfs'], elt['weights']) if tf in tf_int] #Sushmita network with unsigned edges suaRN = [(tf, tg, abs(float(wt))) for tg, elt in sush.iteritems() if tg in tg_int for tf, wt in zip(elt['tfs'], elt['weights']) if tf in tf_int] edges = [ kRN, sfRN, suRN, suaRN] ng = 4 fg, kg, sug, suag = [nx.DiGraph() for i in range(4)] nodes = array(list(tf_int.union(tg_int))) graphs = {'kg':kg,'fg':fg,'sug':sug,'suag':suag} for g, edges in zip(graphs.values(), edges): g.add_nodes_from(nodes) g.add_weighted_edges_from(edges) for gname in ['fg','suag']: for prc in [10,50,75,85,90,95,98,99]: thr = percentile([e[2]['weight'] for e in nx.to_edgelist(graphs[gname])], prc) graphs.update([('{0}_thr{1:2.2}'.format(gname,thr), nfu.thr_graph(graphs[gname],thr))]) v0 = graphs.values() k0 = graphs.keys() tot_edges = len(nx.to_edgelist(graphs['fg'])) for k, v in zip(k0,v0): for n_c in [2,4,8 ,12]: for max_edges in array([.5,1.,2.,5.]) * tot_edges : if not 'thr' in k: continue gfilt = nfu.filter_graph(v, n_c = n_c) gfilt = nfu.top_edges(gfilt, max_edges = max_edges) gthr = nfu.thr_graph(gfilt, 1e-8) graphs.update([('{0}_flt{1}'.format(k,n_c),gfilt)]) graphs.update([('{0}_flt{1}_thr0'.format(k,n_c),gthr)]) #nfplots.show(kg,pos,node_color = 'none') #nfplots.show(fg,pos,node_color = 'white', alpha = .2, with_labels = False) return graphs
def build_edgelist(): for item in network_collection: temp_edgelist = nx.to_edgelist(item) edgelist_collection.append(temp_edgelist)
def run2( reset = False, base_net = 'kn', comp_net = 'fn', demand_bdtnp = False): bd = nio.getBDTNP() ktgs,ktfs = nio.getKNet() tgs,tfs = nio.getNet() sush = nio.getSush(on_fail = 'compute') tfset = set(ktfs.keys()) tgset = set(ktgs.keys()) tg_int = set(tgs.keys()).intersection(ktgs.keys()) tf_int = set(tfs.keys()).intersection(ktfs.keys()) if demand_bdtnp: tg_int = tg_int.intersection(bd.keys()) tf_int = tf_int.intersection(bd.keys()) if base_net =='kn': b_edges = [(tf, tg, float(wt)) for tg, elt in ktgs.iteritems() if tg in tg_int for tf, wt in zip(elt['tfs'], elt['weights']) if tf in tf_int] if comp_net == 'fn': c_edges = [(tf, tg, float(wt)) for tg, elt in tgs.iteritems() if tg in tg_int for tf, wt in zip(elt['tfs'], elt['weights']) if tf in tf_int] elif comp_net == 'sn': #Sushmita network with signed edges c_edges = [(tf, tg, float(wt)) for tg, elt in sush.iteritems() if tg in tg_int for tf, wt in zip(elt['tfs'], elt['weights']) if tf in tf_int] elif comp_net == 'sna': #Sushmita network with unsigned edges c_edges = [(tf, tg, abs(float(wt))) for tg, elt in sush.iteritems() if tg in tg_int for tf, wt in zip(elt['tfs'], elt['weights']) if tf in tf_int] elif comp_net == 'kn': c_edges = [(tf, tg, float(wt)) for tg, elt in ktgs.iteritems() if tg in tg_int for tf, wt in zip(elt['tfs'], elt['weights']) if tf in tf_int] ng = 4 nodes = array(list(tf_int.union(tg_int))) bg = nx.DiGraph() bg.add_nodes_from(nodes) bg.add_weighted_edges_from(b_edges) cg = nx.DiGraph() cg.add_nodes_from(nodes) cg.add_weighted_edges_from(c_edges) cgraphs = {comp_net:cg} v0 = cgraphs.values() k0 = cgraphs.keys() for k,g in zip(k0,v0): for prc in [10]: thr = percentile([e[2]['weight'] for e in nx.to_edgelist(g)], prc) cgraphs.update([('{0}_thr{1:2.2}'.format(k,thr), nfu.thr_graph(g,thr))]) gt = nfu.thr_graph(g,thr) v0 = cgraphs.values() k0 = cgraphs.keys() for k, v in zip(k0,v0): tot_edges = len(nx.to_edgelist(v)) for n_c in [1,2,4]: for max_edges in array([.2,.5,1.]) * tot_edges : if not 'thr' in k: continue gfilt = nfu.filter_graph(v, n_c = n_c) gfilt = nfu.top_edges(gfilt, max_edges = max_edges) gthr = nfu.thr_graph(gfilt, 1e-8) cgraphs.update([('{0}_flt{1}_nedge{2}'.format(k,n_c,max_edges),gfilt)]) cgraphs.update([('{0}_flt{1}_nedge{2}_thr0'.format(k,n_c,max_edges),gthr)]) ''' When you don't have hidden variables, networks can be modelled mby information criterion. In what settings can you incur causality from datasets. You need a prior to limit the number of arrowsin your graph: The idea: come up with an idea from computational learning theory and come up with a model for interventions. Spatial, genetic, time data to penalize network edges... Granger causality uses time varying data to ''' #nfplots.show(kg,pos,node_color = 'none') #nfplots.show(fg,pos,node_color = 'white', alpha = .2, with_labels = False) return bg, cgraphs
def run_sig(genes, show_disc = False, weighted = True): genes2 = [] for g in genes: genes2.append((g[0], [[gelt[0], gelt[1]] for gelt in g[1]], g[2])) genes = genes2 modules = [m[0] for m in genes] if len(modules[0]) == 2: module_type = 'doubles' else: module_type = 'triples' counts = [m[1] for m in genes] tgs,tfs = nio.getNet() bd = nio.getBDTNP() nodes_allowed = set(bd.keys()) cnodes = list(nodes_allowed) dnodes = [] dedges = [] cedges = [] cnodes = [] for m in genes: for tginfo in m[1]: tg = tginfo[0] tg_mcount = tginfo[1] dtgnode = '{0}_{1}_mod{2}'.format(tg,tg,m[0]) ctgnode = '{0}'.format(tg) dnodes.append(dtgnode) cnodes.append(ctgnode) for tf in m[0]: dtfnode = '{0}_{1}_mod{2}'.format(tf,tg,m[0]) ctfnode = '{0}'.format(tf) dnodes.append(dtfnode) cnodes.append(ctfnode) dedges.append((dtfnode, dtgnode,tg_mcount)) cedges.append((ctfnode, ctgnode,tg_mcount)) nodes_allowed = list(set(cnodes)) if show_disc: dgraph, cgraph = [nx.Graph() for i in range(2)] dgraph.add_nodes_from(list(set(dnodes))) dgraph.add_weighted_edges_from(list(set(dedges))) f = myplots.fignum(4, (8,8)) ax = f.add_subplot(111) pos=nx.graphviz_layout(dgraph,prog="neato") # color nodes the same in each connected subgraph C=nx.connected_component_subgraphs(dgraph) for g in C: c=[random.random()]*nx.number_of_nodes(g) # random color... nx.draw(g, pos, node_size=40, node_color=c, vmin=0.0, vmax=1.0, with_labels=False ) figtitle = 'mcmc_disc' f.savefig(figtemplate.format(figtitle)) return cgraph = nx.DiGraph() cgraph.add_nodes_from(cnodes) cedgegrps = [(k,list(g)) for k, g in it.groupby(\ sorted(cedges, key = lambda x: (x[0],x[1])), key = lambda x: (x[0],x[1]))] cedges = [ (k[0],k[1], sum([gelt[2] for gelt in g])) for k,g in cedgegrps] if weighted == False: for ce in cedges: ce[2] = 1 cgraph.add_weighted_edges_from(list(set(cedges))) sfRN = [(tf, tg, float(wt)) for tg, elt in tgs.iteritems() if tg in nodes_allowed for tf, wt in zip(elt['tfs'], elt['weights']) if tf in nodes_allowed] fg = nx.DiGraph() fg.add_nodes_from(cnodes) fg.add_weighted_edges_from(sfRN) colors = mycolors.getct(len(cnodes)) f = myplots.fignum(5, (8,8)) ax =f.add_subplot(111) pos=nx.graphviz_layout(fg,prog="neato") # color nodes the same in each connected subgraph nx.draw(cgraph, pos, node_size=100, node_color=colors, vmin=0.0, vmax=1.0, with_labels=False, alpha = 1. ) ax.set_title('connectivity of MCMC for network {0}'.format(module_type)) figtitle = 'mcmc_network_{0}{1}'.\ format(module_type,'' if weighted else 'unweighted') f.savefig(figtemplate.format(figtitle)) f = myplots.fignum(5, (8,8)) ax =f.add_subplot(111) #pos=nx.graphviz_layout(fg,prog="neato") # color nodes the same in each connected subgraph nx.draw(fg, pos, node_size=100, node_color=colors, vmin=0.0, vmax=1.0, with_labels=False ) ax.set_title('connectivity of reference for network {0}'.format(module_type)) figtitle = 'mcmc_ref_network_{0}{1}'.\ format(module_type,'' if weighted else 'unweighted') f.savefig(figtemplate.format(figtitle)) graphs = {'mcmc':cgraph,'network':fg} v0 = graphs.values() k0 = graphs.keys() for k,g in zip(k0,v0): for prc in [1,50,95]: thr = percentile([e[2]['weight'] for e in nx.to_edgelist(g)], prc) graphs.update([('{0}_thr{1}%'.format(k,prc), nfu.thr_graph(g,thr))]) v0 = graphs.values() k0 = graphs.keys() for k, v in zip(k0,v0): tot_edges = len(nx.to_edgelist(fg)) for n_c in [2,4,6,8,12,20]: for max_edges in array([.5,1.,2.]) * tot_edges : gfilt = nfu.filter_graph(v, n_c = n_c) gfilt = nfu.top_edges(gfilt, max_edges = max_edges) gthr = nfu.thr_graph(gfilt, 1e-8) graphs.update([('{0}_flt{1}'.format(k,n_c),gfilt)]) graphs.update([('{0}_flt{1}_thr0'.format(k,n_c),gthr)]) return graphs