def test_graph1(self): G = nx.OrderedGraph([ (3, 10), (2, 13), (1, 13), (7, 11), (0, 8), (8, 13), (0, 2), (0, 7), (0, 10), (1, 7), ]) self.check_graph(G, is_planar=True)
def load_data(self): savefile = os.path.join(self.datastore,"graphs", self.graph_name + ".adjlist.gz") if os.path.isfile(savefile): print(" loading from cache file" + savefile) self.nx_graph = nx.read_adjlist(savefile) else: self.nx_graph = nx.OrderedGraph( nx.readwrite.gpickle.read_gpickle(at.get(self.at_hash, datastore=self.datastore))) print(" writing graph") nx.write_adjlist(self.nx_graph, savefile)
def find_cycles(sub_network, weight='x_pu'): """ Find all cycles in the sub_network and record them in sub_network.C. networkx collects the cycles with more than 2 edges; then the 2-edge cycles from the MultiGraph must be collected separately (for cases where there are multiple lines between the same pairs of buses). Cycles with infinite impedance are skipped. """ branches_bus0 = sub_network.branches()["bus0"] branches_i = branches_bus0.index #reduce to a non-multi-graph for cycles with > 2 edges mgraph = sub_network.graph(weight=weight, inf_weight=False) graph = nx.OrderedGraph(mgraph) cycles = nx.cycle_basis(graph) #number of 2-edge cycles num_multi = len(mgraph.edges()) - len(graph.edges()) sub_network.C = dok_matrix((len(branches_bus0), len(cycles) + num_multi)) for j, cycle in enumerate(cycles): for i in range(len(cycle)): branch = next( iterkeys(mgraph[cycle[i]][cycle[(i + 1) % len(cycle)]])) branch_i = branches_i.get_loc(branch) sign = +1 if branches_bus0.iat[branch_i] == cycle[i] else -1 sub_network.C[branch_i, j] += sign #counter for multis c = len(cycles) #add multi-graph 2-edge cycles for multiple branches between same pairs of buses for u, v in graph.edges(): bs = list(mgraph[u][v].keys()) if len(bs) > 1: first = bs[0] first_i = branches_i.get_loc(first) for b in bs[1:]: b_i = branches_i.get_loc(b) sign = -1 if branches_bus0.iat[b_i] == branches_bus0.iat[ first_i] else +1 sub_network.C[first_i, c] = 1 sub_network.C[b_i, c] = sign c += 1
def load_data(self): self.benchmark = self.datastore + "/graphs/HumanNet-XN.tsv" edgelist = pd.read_csv(self.benchmark, header=None, sep="\t", skiprows=1).values[:, :2].tolist() self.nx_graph = nx.OrderedGraph(edgelist) # Map nodes from ncbi to hugo names self.nx_graph = nx.relabel.relabel_nodes( self.nx_graph, ncbi_to_hugo_map(self.nx_graph.nodes, datastore=self.datastore)) # Remove nodes which are not covered by the map for node in list(self.nx_graph.nodes): if isinstance(node, float): self.nx_graph.remove_node(node)
def bfs_sample_neighbors(self, gene, num_neighbors, include_self=True): neighbors = nx.OrderedGraph() if include_self: neighbors.add_node(gene) bfs = nx.bfs_edges(self.nx_graph, gene) for u, v in bfs: if neighbors.number_of_nodes() == num_neighbors: break neighbors.add_node(v) for node in neighbors.nodes(): for u, v, d in self.nx_graph.edges(node, data="weight"): if neighbors.has_node(u) and neighbors.has_node(v): neighbors.add_weighted_edges_from([(u, v, d)]) return neighbors
def test_precompute_obj(self): G = nx.OrderedGraph() elist = [[0, 1], [1, 2], [1, 3], [2, 3]] G.add_edges_from(elist) N = G.number_of_nodes() w = nx.adjacency_matrix(G, nodelist=range(N)) obj = partial(maxcut_obj, w=w) qc = QuantumCircuit(N, N) qc.x([0]) backend = Aer.get_backend('statevector_simulator') sv = execute(qc, backend=backend).result().get_statevector() precomputed = precompute_obj(obj, N) self.assertEqual(len(precomputed[np.where(sv)]), 1) self.assertEqual(obj_from_statevector(sv, obj), precomputed[np.where(sv)][0])
def load_data(self): # previously human # savefile = self.datastore + "/graphs/stringdb_graph_" + self.graph_type + "_edges.adjlist" # currently mouse if self.graph_type == 'all': savefile = "../data/graphs/stringdb_all_mouse_graph_edges.adjlist" else: savefile = "../data/graphs/stringdb_coex_mouse_graph_" + self.graph_type + "_edges.adjlist" if os.path.isfile(savefile): print(" loading from cache file" + savefile) self.nx_graph = nx.read_adjlist(savefile) else: print( "Building StringDB Graph. It can take a while the first time..." ) # previously human # self.proteinlinks = self.datastore + "/graphs/9606.protein.links.detailed.v11.0.txt" # currently mouse self.proteinlinks = "../data/graphs/10090.protein.links.detailed.v11.0.txt.gz" print(" ensp_to_ensg_map") # fixed ensp_to_hugo_map to ensp_to_ensg for mouse ensmap = ensp_to_hugo_map() print(" reading self.proteinlinks") # previously not gzipped # currently unzipping edges = pd.read_csv(self.proteinlinks, sep=' ', compression='gzip') selected_edges = edges[self.name_to_edge[self.graph_type]] != 0 edgelist = edges[selected_edges][["protein1", "protein2"]].values.tolist() # previously human # human ID has 4 numbers (9606), hence [5:] to remove that ID from protein ID # edgelist = [[ensmap[edge[0][5:]], ensmap[edge[1][5:]]] for edge in edgelist # if edge[0][5:] in ensmap.keys() and edge[1][5:] in ensmap.keys()] # currently mouse (5 number ID 10090) edgelist = [ [ensmap[edge[0][6:]], ensmap[edge[1][6:]]] for edge in edgelist if edge[0][6:] in ensmap.keys() and edge[1][6:] in ensmap.keys() and edge[0][6:] != edge[1][6:] ] # remove self edges print(" creating OrderedGraph") self.nx_graph = nx.OrderedGraph(edgelist) print(" writing graph") nx.write_adjlist(self.nx_graph, savefile) print("Graph built !")
def load_data(self): savefile = os.path.join(self.datastore,"graphs", 'hetio_{}'.format(self.graph_type) + ".adjlist.gz") if os.path.isfile(savefile): print(" loading from cache file" + savefile) self.nx_graph = nx.read_adjlist(savefile) else: pkl_file = os.path.join(self.datastore,"graphs", 'hetio_{}_graph'.format(self.graph_type) + ".pkl") if not os.path.isfile(pkl_file): self._process_and_pickle(save_name=pkl_file) self.nx_graph = nx.OrderedGraph(nx.read_gpickle(pkl_file)) print(" writing graph") nx.write_adjlist(self.nx_graph, savefile)
def find_fragments(self, coordinates: np.ndarray, labels: list, graph=False, **kwargs) -> Union[list, nx.OrderedGraph]: """Reduces a set of labeled coordinates to a graph with edges drawn between species which are closer than a distance cutoff (calculated from covalent radii) then extracts connected fragments from that graph. Args: coordinates: np.array, required, cartesian coordinate array labels: list[str], required, list of particle labels graph: bool, (optional), **kwargs: additional keyword arguments Returns: list of Molecule objects or an nx.OrderedGraph object """ if len(coordinates) != len( labels): # TODO: test the assertion in find_fragments raise AssertionError( "Coordinates and labels of different lengths! {} & {}".format( coordinates.shape, len(labels))) fragment_graph = nx.OrderedGraph() for label, coordinate in zip(labels, coordinates): graph_node = Particle(label, coordinate) fragment_graph.add_node(graph_node) all_r = get_all_magnitudes(coordinates) for (particle1, particle2), r in zip(it.combinations(fragment_graph.nodes, 2), all_r): if r <= self.get_cutoff_distance(particle1.label, particle2.label): fragment_graph.add_edge(particle1, particle2, weight=r, **kwargs) if graph: return fragment_graph else: return self.graph_to_fragments(fragment_graph)
def Cycles(n, branches_i=None): """ Light-weight function for finding all cycles a given network. """ branches = pd.concat({c: n.df(c)[['bus0', 'bus1']] for c in sorted(n.branch_components)})\ .rename(columns={'bus0': 'source', 'bus1': 'target'}) if branches_i is None: branches_i = branches.index.rename(['component', 'branch_i']) else: branches = branches.reindex(branches_i) branches = branches.assign(index=branches_i.to_numpy()) branches_bus0 = branches['source'] mgraph = nx.from_pandas_edgelist(branches, edge_attr=True, create_using=nx.MultiGraph) graph = nx.OrderedGraph(mgraph) cycles = nx.cycle_basis(graph) # number of 2-edge cycles num_multi = len(mgraph.edges()) - len(graph.edges()) C = scipy.sparse.dok_matrix((len(branches_bus0), len(cycles) + num_multi)) for j, cycle in enumerate(cycles): for i, start in enumerate(cycle): end = cycle[(i + 1) % len(cycle)] branch = branches_i.get_loc(graph[start][end]['index']) sign = +1 if branches_bus0.iat[branch] == cycle[i] else -1 C[branch, j] += sign # counter for multis c = len(cycles) # add multi-graph 2-edge cycles for multiple branches between same pairs # of buses for u, v in graph.edges(): bs = list(mgraph[u][v].values()) if len(bs) > 1: first = branches_i.get_loc(bs[0]['index']) for b in bs[1:]: other = branches_i.get_loc(b['index']) sign = - \ 1 if branches_bus0.iat[other] == branches_bus0.iat[first] else +1 C[first, c] = 1 C[other, c] = sign c += 1 return DataArray(C.todense(), { 'branch': branches_i, 'cycle': range(C.shape[1]) }, ('branch', 'cycle'))
def build_networkx_graph(nodes, edges): # Build networkx datastructure ascii_graph = networkx.OrderedGraph() ascii_graph.add_nodes_from((node, { "position": tuple(pos) }) for pos, node in nodes.items()) ascii_graph.add_edges_from((edge['nodes'][0], edge['nodes'][1], { "length": len(edge["points"]), "points": [tuple(el) for el in edge["points"]] }) for edge in edges) networkx.set_edge_attributes(ascii_graph, name="label", values={ edge["nodes"]: edge["label"][1:-1] for edge in edges if "label" in edge }) return ascii_graph
def make_coupling_graph(block_interaction_data, blau_coord_combinations, on_attribute='coupling'): """ Make a networkx graph from block interaction data. Assumes pre computed blau coord combinations. Parameters ------------- block_interaction_data : pandas dataframe Dataframe containing numbers of edges and interaction strengths between different blocks within a larger graph. blau:coord_combinations : list List of tuples containing the different region name age range paris on_attribute : str (opt) String specifying which variable to take as the edge weight. """ #coupling_graph = nx.Graph() coupling_graph = nx.OrderedGraph() for pair_1 in tqdm(blau_coord_combinations): for pair_2 in blau_coord_combinations: try: coupling = list(block_interaction_data.loc[ (block_interaction_data['cat_1'] == pair_1) & (block_interaction_data['cat_2'] == pair_2)] [on_attribute])[0] except: pdb.set_trace() coupling_graph.add_edge(pair_1, pair_2, weight=coupling) return coupling_graph
def test_simulate_qiskit_amps(): elist = [[0, 1], [1, 2], [2, 3], [3, 4], [4, 0], [0, 5], [1, 6], [2, 7], [3, 8], [4, 9], [5, 7], [5, 8], [6, 8], [6, 9], [7, 9]] G = nx.OrderedGraph() G.add_edges_from(elist) parameters = np.array([ 5.192253984583296, 5.144373231492732, 5.9438949617723775, 5.807748946652058, 3.533458907810596, 6.006206583282401, 6.122313961527631, 6.218468942101044, 6.227704753217614, 0.3895570099244132, -0.1809282325810937, 0.8844522327007089, 0.7916086532373585, 0.21294534589417236, 0.4328896243354414, 0.8327451563500539, 0.7694639329585451, 0.4727893829336214 ]) beta = parameters[:9] gamma = -parameters[9:] result = simulate_qiskit_amps(G, gamma, beta) assert abs(abs(result) - 12) < 1e-2
def Grid(dimx = 9,dimy = 9, deltax=100, deltay=100, energy_list=[], **kwargs): "Create a grid network with a 4-neighborhood" if len(energy_list) == 0: print("no energy list supplied for grid creation, aborting") return None x=0 y=0 G=nx.OrderedGraph() for i in range(dimx): for j in range(dimy): #we want to increase x with j and i with j (fill rows first) node = GraphNode(x+j*deltax,y+i*deltay, energy_list[j+i*dimx]) G.add_node(GraphNode(x+j*deltax,y+i*deltay, energy_list[j+i*dimx]), pos=node.pos, energy=node.energy) for node1,node2 in combinations(G.nodes(),2): dist = la.norm(node1.pos - node2.pos) if dist <= 100: G.add_edge(node1,node2) return G
def load_data(self): print("Building StringDB Graph. It can take a while the first time...") savefile = "data/graphs/stringdb_graph_" + self.graph_type + "_edges.adjlist" if os.path.isfile(savefile): self.nx_graph = nx.read_adjlist(savefile) else: ensmap = ensp_to_hugo_map() edges = pd.read_csv(self.proteinlinks, sep=' ') selected_edges = edges[self.name_to_edge[self.graph_type]] != 0 edgelist = edges[selected_edges][["protein1", "protein2"]].values.tolist() edgelist = [[ensmap[edge[0][5:]], ensmap[edge[1][5:]]] for edge in edgelist if edge[0][5:] in ensmap.keys() and edge[1][5:] in ensmap.keys()] self.nx_graph = nx.OrderedGraph(edgelist) nx.write_adjlist(self.nx_graph, savefile) print("Graph built !")
def test_graph3(self): G = nx.OrderedGraph([ (0, 7), (3, 11), (3, 4), (8, 9), (4, 11), (1, 7), (1, 13), (1, 11), (3, 5), (5, 7), (1, 3), (0, 4), (5, 11), (5, 13), ]) self.check_graph(G, is_planar=False)
def gfa_to_G(gfa, kmer_size): # G = nx.DiGraph(k=kmer_size, name='gfa') G = nx.OrderedGraph(k=kmer_size, name='gfa') with open(gfa, 'r') as fin: for line in fin: record_type = line[0] if record_type in ['#', 'H', 'C', 'P']: continue elif record_type == 'S': name, attr = line_to_node(line) G.add_node(name + '+', seq=attr['seq'], cov=attr['KC'] * 1.0 / len(attr['seq']), len=len(attr['seq']), A=attr['seq'].count('A') * 1.0 / len(attr['seq']), C=attr['seq'].count('C') * 1.0 / len(attr['seq']), G=attr['seq'].count('G') * 1.0 / len(attr['seq']), T=attr['seq'].count('T') * 1.0 / len(attr['seq'])) G.add_node(name + '-', seq=reverse_complement(attr['seq']), cov=attr['KC'] * 1.0 / len(attr['seq']), len=len(attr['seq']), A=attr['seq'].count('T') * 1.0 / len(attr['seq']), C=attr['seq'].count('G') * 1.0 / len(attr['seq']), G=attr['seq'].count('C') * 1.0 / len(attr['seq']), T=attr['seq'].count('A') * 1.0 / len(attr['seq'])) elif record_type == 'L': cov = nx.get_node_attributes(G, 'cov') u, v, attr = line_to_edge(line) G.add_edge(u, v, **attr) nx.set_edge_attributes(G, { (u, v): graphs.get_weight_attr(cov[u], cov[v], 0.05, 0.05) }) u, v, attr = line_to_rc_edge(line) G.add_edge(u, v, **attr) nx.set_edge_attributes(G, { (u, v): graphs.get_weight_attr(cov[u], cov[v], 0.05, 0.05) }) graphs.write_G_statistics(G) return G
def countryStability(countryName): countryCrops = [ x for x in list( set(production.loc[(production['Area'] == countryName) & (production['Y' + str(year)] > 0), 'Item'].tolist())) if x in list(servingSizes.keys()) ] nutrientList = [x for x in list(foodNutrients)[4:-1] if x != 'Sodium'] stabilities = [] for thresh in threshes: bnk = nx.OrderedGraph() bnk.add_nodes_from(countryCrops, bipartite=0) bnk.add_nodes_from(nutrientList, bipartite=0) edges = [] weights = [] for crop in countryCrops: newEdges = [] for nutrient in nutrientList: weight = np.mean( foodNutrients.loc[foodNutrients['FAO_name'] == crop, nutrient]) weight = weight * 10**6 * 10**( -2) * (1 / servingSizes[crop] ) / population[population['Country Name'] == countryName][year].iloc[0] weight = weight * np.mean( production.loc[(production['Area'] == countryName) & (production['Item'] == crop)]['Y' + year]) if weight > 0.1: newEdges.append((crop, nutrient, weight)) weights.append(weight) edges.extend(newEdges) bnk.add_edges_from(newEdges) if bnk.degree[crop] == 0: bnk.remove_node(crop) countryCrops = [x for x in countryCrops if x != crop] curve = multicurve_unweighted(bnk, len(countryCrops), 1000) stabilities.append( sum([curve[i] / len(curve) for i in range(len(curve))])) return stabilities
def place_rooms(self, rooms): rooms = [rooms] nodes = list(self.grid) self.rng.shuffle(nodes) for start in nodes: graph = nx.OrderedGraph() room = rooms[0][0] graph.add_node(start, id="r_{}".format(len(graph)), name=room, start=True) for group in rooms: self.nb_attempts = 0 graph = self._walk(graph, start, set(group) - {room}) if not graph: break if graph: return graph return None
def place_rooms(self, rooms): nodes = list(self.grid) self.rng.shuffle(nodes) for start in nodes: G = nx.OrderedGraph() room = rooms[0][0] G.add_node(start, id="r_{}".format(len(G)), name=room, start=True) for group in rooms: self.nb_attempts = 0 G = self._walk(G, start, set(group) - {room}) if not G: break if G: return G return None
def input(self, file): tabfile = open(file, 'r') nodes = () #set() edges = () #set() for line in tabfile: elements = line.split("\t") node1 = elements[0].strip() node2 = elements[1].strip() if (node1 not in nodes): nodes += (node1, ) if (node2 not in nodes): nodes += (node2, ) edge = (node1, node2) if (edge not in edges): edges += (edge, ) self.G = networkx.OrderedGraph() self.G.add_nodes_from(nodes) self.G.add_edges_from(edges)
def _make_graph(self): import networkx as nx # Keep graph nodes in order so we can reproduce the same layout. from collections import OrderedDict class OrderedGraph(nx.Graph): node_dict_factory = OrderedDict adjlist_dict_factory = OrderedDict G = nx.OrderedGraph() G.add_nodes_from(self.nodes) edges = self.edges if edges: max_weight = float(max(e.weight for e in edges)) for e in edges: G.add_edge(e.nodes[0], e.nodes[1], weight=e.weight / max_weight, edge_object=e) return G
def extract_network_scopes( G: Union[nx.Graph, nx.OrderedGraph] ) -> Dict[str, Union[nx.Graph, nx.OrderedGraph]]: scopes = { "all": filter_graph(G, min_component_size=3), } # get world scopes node_attr_world = lambda t: t[1].get("world") sorted_nodes = sorted((tup for tup in G.nodes(data=True)), key=node_attr_world) for world, grp in groupby(sorted_nodes, key=node_attr_world): sg = nx.OrderedGraph() sg.add_nodes_from(grp) sg.add_edges_from( (src, dest) for src, dest in G.edges() if src in sg and dest in sg) if sg.size(): scopes[world] = filter_graph(sg, min_component_size=3) return scopes
def test_qasm_sv_obj_from_elist(self): elist = [[3,1],[3,2],[0,1],[0,2],[1,2]] G = nx.OrderedGraph() G.add_edges_from(elist) def obj_f_cut(x): cut = 0 for i, j in G.edges(): if x[i] != x[j]: # the edge is cut cut -= 1 return cut w = nx.adjacency_matrix(G, nodelist=range(4)).toarray() obj = partial(maxcut_obj,w=w) C, _ = get_maxcut_operator(w) obj_sv = ObjectiveWrapper(obj, varform_description={'name':'QAOA', 'p':10, 'num_qubits':4, 'cost_operator':C}, backend_description={'package':'qiskit', 'provider':'Aer', 'name':'statevector_simulator'}, execute_parameters={}).get_obj() obj_qasm = ObjectiveWrapper(obj, varform_description={'name':'QAOA', 'p':10, 'num_qubits':4, 'cost_operator':C}, backend_description={'package':'qiskit', 'provider':'Aer', 'name':'qasm_simulator'}, execute_parameters={'shots':10000}).get_obj() obj_sv_custom = ObjectiveWrapper(obj_f_cut, varform_description={'name':'QAOA', 'p':10, 'num_qubits':4, 'cost_operator':C}, backend_description={'package':'qiskit', 'provider':'Aer', 'name':'statevector_simulator'}, execute_parameters={}).get_obj() obj_qasm_custom = ObjectiveWrapper(obj_f_cut, varform_description={'name':'QAOA', 'p':10, 'num_qubits':4, 'cost_operator':C}, backend_description={'package':'qiskit', 'provider':'Aer', 'name':'qasm_simulator'}, execute_parameters={'shots':10000}).get_obj() parameters = np.array([ 5.97337687, 2.58355601, 1.40698116, 1.41929411, -0.78430107, -4.46418963, -0.61290647, -0.59975086, 0.48811492, 4.20269641, -2.71558857, 2.82117292, 2.93922949, 2.06076731, 2.19543793, 2.42960372, -1.0079554 , 2.22741002, -1.06316475, 0.53106839]) sv_imported = obj_sv(parameters) qasm_imported = obj_qasm(parameters) sv_custom = obj_sv_custom(parameters) qasm_custom = obj_qasm_custom(parameters) self.assertTrue(np.isclose(sv_imported, sv_custom)) self.assertTrue(np.isclose(sv_imported, qasm_imported, rtol=0.01)) self.assertTrue(np.isclose(sv_custom, qasm_custom, rtol=0.01))
def random(num_nodes: int, num_edges: int, weight: int = 10000) -> nx.OrderedDiGraph: """ Generates a random undirected networkx DiGraph (as in always same edge both ways) Args: num_nodes: number of nodes num_edges: number of edges weight: weight to be assigned to all edges Returns: A DiGraph generated as described """ graph = nx.OrderedGraph() graph.add_nodes_from(range(num_nodes)) for _ in range(num_edges): src = np.random.randint(0, num_nodes) dst = np.random.randint(0, num_nodes) graph.add_edge(src, dst, weight=weight) return graph.to_directed()
def test_tuplelabels(self): # https://github.com/networkx/networkx/pull/1048 # Writing tuple labels to GML failed. G = nx.OrderedGraph() G.add_edge((0, 1), (1, 0)) data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer)) answer = """graph [ node [ id 0 label "(0,1)" ] node [ id 1 label "(1,0)" ] edge [ source 0 target 1 ] ]""" assert data == answer
def test_graph2(self): G = nx.OrderedGraph([ (1, 2), (4, 13), (0, 13), (4, 5), (7, 10), (1, 7), (0, 3), (2, 6), (5, 6), (7, 13), (4, 8), (0, 8), (0, 9), (2, 13), (6, 7), (3, 6), (2, 8), ]) self.check_graph(G, is_planar=False)
def generate_and_save_graph(results, path): # Cleanup remove_old_static_files() # Generate network graph G = nx.OrderedGraph() G.add_nodes_from(results['nodes']) G.add_edges_from(results['edges']) degrees = dict(G.degree) nx.draw( G, pos=nx.spring_layout(G), edge_color='gray', node_color='y', node_size=[v * 50 for v in degrees.values()], with_labels=True, ) # Save graph file plt.savefig(path) plt.close()
def ManHattan(dimx = 9,dimy = 9, deltax=100, deltay=100, energy_list=[], posList = [], mobileNodeCount = 0, **kwargs): G= nx.OrderedGraph() if len(posList) == 0: #print("no pos list supplied for grid creation, aborting") #return None if len(energy_list) == 0: print("no energy list supplied for grid creation, aborting") return None x=50 y=50 #create fixed nodes: for i in range(dimx): for j in range(dimy): #we want to increase x with j and i with j (fill rows first) node = GraphNode(x+j*deltax,y+i*deltay, energy_list[j+i*dimx]) G.add_node(GraphNode(x+j*deltax,y+i*deltay, energy_list[j+i*dimx]), pos=node.pos, energy=node.energy) x= 0 y = 0 intersections_x = [] intersections_y = [] for i in range(dimx): intersections_x.append(x+i*deltax) intersections_y.append(y+i*deltax) np.random.seed(kwargs['seed']) static_nodes = dimx*dimy for i in range(mobileNodeCount): node = GraphNode(np.random.choice(intersections_x), np.random.choice(intersections_y), energy_list[static_nodes+i]) G.add_node(GraphNode(np.random.choice(intersections_x), np.random.choice(intersections_y), energy_list[static_nodes+i]), pos=node.pos, energy = node.energy) else: for pos, energy in zip(posList,energy_list): node = GraphNode(pos[0],pos[1],energy) G.add_node(GraphNode(pos[0],pos[1],energy), pos =node.pos, energy = node.energy) for node1,node2 in combinations(G.nodes(),2): dist = la.norm(node1.pos - node2.pos) if dist <= 100: G.add_edge(node1,node2) return G
def make_coupling_graph(self, block_interaction_data, on_attribute='coupling'): """ Assumes pre computed blau coord combinations. """ # coupling_graph = nx.Graph() coupling_graph = nx.OrderedGraph() for pair_1 in tqdm(self.blau_coord_combinations): for pair_2 in self.blau_coord_combinations: coupling = list(block_interaction_data.loc[ (block_interaction_data['cat_1'] == pair_1) & (block_interaction_data['cat_2'] == pair_2)] [on_attribute])[0] coupling_graph.add_edge(pair_1, pair_2, weight=coupling) return coupling_graph