def build_graph(self, data, score): # import biopython, that is needed for the pairwise alignment # http://biopython.org/DIST/docs/api/Bio.pairwise2-module.html from Bio import pairwise2 # import a library required to produce gexf graphs # https://networkx.github.io/documentation/networkx-1.10/reference/generated/networkx.readwrite.gexf.write_gexf.html#networkx.readwrite.gexf.write_gexf from networkx import nx ###Similarity graph self.graph = nx.Graph() ### pairwise alignement with biopython k = 0 for key in data: #These loops are made in order to avoid redundant alignement k += 1 for i in range(k, len(data.keys())): key_2 = list(data.keys()) if key != key_2[ i]: # Check with we don't align two identical sequences. print(key_2[i]) seq1 = data.get(key) seq2 = data.get(key_2[i]) pairwise_result = pairwise2.align.globalxx(seq1, seq2) # Add the edge if score is greater than minimal score #We take only the first ten characters of the identifier. #It could need to be adjusted regarding the identifier length. if pairwise_result[0][2] > score: self.graph.add_edge(key[0:10], key_2[i][0:10], weight=int(pairwise_result[0][2])) # Alignement score display print("score = ", pairwise_result[0][2]) return self.graph
def draw_small_graph(graph): """Draw the graph showing edge weight :param graph: graph object to draw """ graph_nx = nx.Graph() graph_nx.add_weighted_edges_from(graph.weighted_edges) labels = nx.get_edge_attributes(graph_nx, 'weight') pos = nx.spring_layout(graph_nx) nx.draw_networkx_edge_labels(graph_nx, pos=pos, edge_labels=labels) nx.draw(graph_nx, pos=pos, with_labels=True, node_size=10, node_color="skyblue", node_shape="o", alpha=0.5, linewidths=30) plt.title(graph.name) plt.show()
def betweenness_centrality_labeling(self, graph, approx=None): result = {} labeled_graph = nx.Graph(graph) if approx is None: centrality = list(nx.betweenness_centrality(graph).items()) else: centrality = list( nx.betweenness_centrality(graph, k=approx).items()) sorted_centrality = sorted(centrality, key=lambda n: n[1], reverse=True) dict_ = {} label = 0 for t in sorted_centrality: dict_[t[0]] = label label += 1 nx.set_node_attributes(labeled_graph, dict_, 'labeling') ordered_nodes = list(zip(*sorted_centrality))[0] result['labeled_graph'] = labeled_graph result['sorted_centrality'] = sorted_centrality result['ordered_nodes'] = ordered_nodes return result
def receptive_field_padding(self, normalized_graph): """ Method that ensures uniformity across receptive fields when width or rf_size are too big :param normalized_graph: rf_transformed graph to which we add dummy nodes :return: uniformized graph """ graph = nx.Graph(normalized_graph) keys = [key for key, v in dict(normalized_graph.nodes()).items()] labels = [value for key, value in dict(nx.get_node_attributes(normalized_graph, 'labeling')).items()] # add extra dummy nodes as long as rf_size is not reached ######################################################### counter = 1 while len(graph.nodes()) < self.rf_size: graph.add_node(max(keys) + counter, attr_name=self.dummy_value, labeling=max(labels) + counter) counter += 1 ######################################################### return graph
def compute_graph_ranking(graph: nx.Graph, vertex: int, original_node_order: dict): """ Method that relabels a graph w.r.t. nodes distances to given root :param graph: subgraph to rank :param vertex: landmark vertex for the ranking :param original_node_order: original ranking :return: graph labeled by the new ranking """ labeled_graph = nx.Graph(graph) ordered_graph = compute_ranking_distance(graph, vertex) labels = nx.get_node_attributes(ordered_graph, 'labeling') new_order = relabel_graph(graph=ordered_graph, original_labeling=labels, new_labeling=original_node_order) nx.set_node_attributes(labeled_graph, new_order, 'labeling') return labeled_graph
def nauty_graph_automorphism(graph: nx.Graph): """ Graph canonicalization funtion, meant to break timebreakers of the non-injective ranking function :param graph: subgraph to be canonicalized :return: canonicalized subgraph """ # convert labels to integers to give nauty the node partitions required graph_int_labeled = convert_node_labels_to_integers(graph) canonicalized_graph = nx.Graph(graph_int_labeled) # get canonicalized graph using nauty nauty = Graph(len(graph_int_labeled.nodes()), directed=False) nauty.set_adjacency_dict({node: list(nbr) for node, nbr in graph_int_labeled.adjacency()}) labels_dict = nx.get_node_attributes(graph_int_labeled, 'labeling') canonical_labeling_order = {k: canonical_labeling(nauty)[k] for k in range(len(graph_int_labeled.nodes()))} canonical_order = relabel_graph(graph_int_labeled, labels_dict, canonical_labeling_order) nx.set_node_attributes(canonicalized_graph, canonical_order, 'labeling') return canonicalized_graph
def canonicalizes(self,subgraph): st=time.time() #wl_subgraph_normalized=self.wl_normalization(subgraph)['labeled_graph'] #g_relabel=convert_node_labels_to_integers(wl_subgraph_normalized) g_relabel=convert_node_labels_to_integers(subgraph) labeled_graph=nx.Graph(g_relabel) nauty_graph=Graph(len(g_relabel.nodes()),directed=False) nauty_graph.set_adjacency_dict({n:list(nbrdict) for n,nbrdict in g_relabel.adjacency()}) labels_dict=nx.get_node_attributes(g_relabel,'labeling') canonical_labeling_dict={k:canonical_labeling(nauty_graph)[k] for k in range(len(g_relabel.nodes()))} new_ordered_dict=self.rank_label_wrt_dict(g_relabel,labels_dict,canonical_labeling_dict) nx.set_node_attributes(labeled_graph,new_ordered_dict,'labeling') ed=time.time() self.all_times['canonicalizes'].append(ed-st) return labeled_graph
""" Created on Sun Aug 2 22:41:48 2020 @author: eric """ #--------------------------------------------------------------------- # Code qui permet de tracer un graphe pour simuler un réeau social # On ajoute des sommets (node) # On dessine les arêtes (edge) entre les sommets voulus #--------------------------------------------------------------------- from networkx import nx, diameter, radius, center import matplotlib.pyplot as plt reseau_social = nx.Graph() reseau_social.add_node('laurent') reseau_social.add_node('pierre') reseau_social.add_node('lucie') reseau_social.add_node('sophie') reseau_social.add_node('martin') reseau_social.add_node('jacques') reseau_social.add_edge('laurent', 'pierre') reseau_social.add_edge('lucie', 'pierre') #reseau_social.add_edge('laurent','lucie') reseau_social.add_edge('sophie', 'lucie') reseau_social.add_edge('sophie', 'pierre') reseau_social.add_edge('sophie', 'martin') reseau_social.add_edge('martin', 'laurent')
def test_cycle_undirected_unweighted(self): G = nx.Graph() G.add_edge(1, 2) assert_equal(nx.global_reaching_centrality(G), 0)
def test_undirected_weighted_star(self): G = nx.Graph() G.add_edge(1, 2, weight=1) G.add_edge(1, 3, weight=2) assert_equal(nx.global_reaching_centrality(G, normalized=False), 0.25)
def test_negatively_weighted(self): G = nx.Graph() G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)]) nx.global_reaching_centrality(G, weight='weight')
from networkx import nx mapa = nx.Graph() # ------------------------------------------ CLASSE PARA CRIACAO DE ARESTA class Aresta: def __init__(self, origem, destino, custo): self._origem = origem self._destino = destino self._custo = custo self._feromonio = 0.1 def getOrigem(self): return self._origem def getDestino(self): return self._destino def getCusto(self): return self._custo def getFeromonio(self): return self._feromonio def setFeromonio(self, feromonio): self._feromonio = feromonio # ------------------------------------------ CLASSE PARA CRIACAO DE GRAFO class Grafo:
10: "ber", 11: "mun", 12: "mil", 13: "pra", 14: "vie", 15: "zag", 16: "rom" } switch_link_matrix = [(1, 2), (1, 4), (2, 3), (2, 5), (3, 4), (3, 6), (4, 7), (4, 9), (5, 6), (5, 10), (6, 7), (6, 11), (7, 8), (8, 9), (8, 12), (10, 11), (10, 13), (11, 12), (11, 14), (12, 16), (13, 14), (14, 15), (15, 16)] host_count_per_switch = 1 topology = nx.Graph() nodes = list(switch_names.keys()) topology.add_nodes_from(nodes) topology.add_edges_from(switch_link_matrix) result = minimum_spanning_tree(topology) no_flood_links = list(set(switch_link_matrix) - set(result.edges)) # ---------- initialize network ----------------------------- #dpid = DPID_BASE OpenFlow14Switch = partial(OVSKernelSwitch, protocols=OPENFLOW_PROTOCOL) #STPEnabledSwitch = partial(OVSKernelSwitch, protocols=OPENFLOW_PROTOCOL, failMode="standalone", stp=True) net = Containernet(ipBase=IP_BASE) net.addController("c0", controller=RemoteController,
def __init__(self, g_dict=None): if g_dict is None: g_dict = [] self._g_dict = g_dict self.g_networkx = nx.Graph(g_dict)
def test_cycle_undirected_weighted(self): G = nx.Graph() G.add_edge(1, 2, weight=1) grc = nx.global_reaching_centrality assert grc(G, normalized=False) == 0
def test_undirected_weighted_star(self): G = nx.Graph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)]) grc = nx.global_reaching_centrality assert grc(G, normalized=False, weight='weight') == 0.375
def test_convert_to_integers_raise(): G = nx.Graph() with pytest.raises(nx.NetworkXError) as excinfo: H = convert_node_labels_to_integers(G, ordering="increasing age")
def labeling_to_root(self,graph,vertex): labeled_graph=nx.Graph(graph) source_path_lengths = nx.single_source_dijkstra_path_length(graph, vertex) nx.set_node_attributes(labeled_graph,source_path_lengths,'labeling') return labeled_graph
def gnp_random_graph_Renana(n ,rand_num, p_1, p_2, p_12, SH_per, seed=None, directed=False): """Returns a random graph with two populations Parameters ---------- n : int The number of nodes. rand_num : float Location in the parameter space p_1 : float Probability for edge creation in the SH group // More correct is the distance among close friends. p_2 : float Probability for edge creation in the NORMAL group. p_12 : float Probability for edge creation in between the NORMAL and the SH groups. SH_per : float The amount of SH in the network seed : int, optional Seed for random number generator (default=None). directed : bool, optional (default=False) If ``True``, this function returns a directed graph. Notes ----- This algorithm runs in `O(n^2)` time. References ---------- .. [1] P. Erdős and A. Rényi, On Random Graphs, Publ. Math. 6, 290 (1959). .. [2] E. N. Gilbert, Random Graphs, Ann. Math. Stat., 30, 1141 (1959). .. [3] R. Peres, The impact of network characteristics on the diffusion of innovations, Physica A, (2014) """ if directed: G=nx.DiGraph() else: G=nx.Graph() G.name="gnp_random_graph_Renana(%s,%s,%s,%s)"%(n,p_1,p_2,p_12) SH_size = int(SH_per*n) G.add_nodes_from(range(SH_size), group ='SH') G.add_nodes_from(range(SH_size,n), group ='Normal') color_map = {'SH':'#9B0029', 'Normal':'#003366'} colors = [color_map[G._node[node]['group']] for node in G] if not seed is None: np.random.seed(seed) if G.is_directed(): edges=itertools.permutations(range(n),2) else: edges=itertools.combinations(range(n),2) for e in edges: if (e[0] < SH_size) and (e[1] < SH_size): if np.abs(rand_num[e[0]] - rand_num[e[1]])< p_1: G.add_edge(*e) if (e[0] >= SH_size) and (e[1] >= SH_size): if np.abs(rand_num[e[0]] - rand_num[e[1]]) < p_2: G.add_edge(*e) if ((e[0] < SH_size) and (e[1] >= SH_size)) or ((e[0] >= SH_size) and (e[1] < SH_size)): if np.random.uniform() < p_12: G.add_edge(*e) return G, colors
x = x ^ l[i*16 + j] denseHash.append(x) s = "" for c in denseHash: s += "{0:02x}".format(c) return s grid = [] for i in xrange(128): kh = knotHash("%s-%d" % (inpt, i)) gridline = [] for c in kh: gridline.extend([int(c) for c in "{0:04b}".format(int(c, 16))]) grid.append(gridline) graph = nx.Graph() for y in xrange(128): for x in xrange(128): if grid[y][x]: graph.add_node((y,x)) for y in xrange(128): for x in xrange(128): if y > 0: if grid[y][x] and grid[y-1][x]: graph.add_edge((y,x), (y-1,x)) if x > 0: if grid[y][x] and grid[y][x-1]: graph.add_edge((y,x), (y,x-1)) # part 1 print sum(sum(gridline) for gridline in grid)
def main(): df = pd.read_csv('metrosp_stations.csv') df = df.drop(columns=['Unnamed: 0']) df.neigh = df.neigh.str[1:-1].str.split(',').tolist() df = df.neigh.apply(pd.Series) \ .merge(df, left_index = True, right_index = True) \ .drop(["neigh"], axis = 1) \ .melt(id_vars = ['name','station','lat', 'lon', 'line'], value_name = "onlyNeigh") \ .drop("variable", axis = 1) \ .dropna() df.to_json('metroSP.json') with open('metroSPNotEdited.json') as json_file: dataNotEdited = json.load(json_file) with open('metroSP.json') as json_file: data = json.load(json_file) listaDeAdjacencia = {} listaEstacoes = [] # Montando lista de adjacência for (key, estacao) in data['station'].items(): if not listaDeAdjacencia.get(estacao): listaDeAdjacencia[estacao] = [] listaEstacoes.append(estacao) listaDeAdjacencia[estacao].append(data['onlyNeigh'][key]) # Salvando lista de adjacência with open('listaAdjacencia.json', 'w') as json_file: json.dump(listaDeAdjacencia, json_file) while True: os.system("clear") opcao = menuPrincipal() if opcao == '1': origem = input("Estação de Origem: ") destino = input("Estação de Destino: ") menor_caminho = BFS(listaDeAdjacencia, origem, destino) print('Esse é o menor caminho para chegar no seu destino:') for item in menor_caminho: print( recupera_nome(dataNotEdited, item) + ' - ' + recupera_linha(dataNotEdited, item)) G = nx.Graph() labels = {} mapaDeCores = [] for estacao, arestas in listaDeAdjacencia.items(): corNo = recupera_linha(dataNotEdited, estacao) labels[estacao] = recupera_nome(dataNotEdited, estacao) G.add_node(estacao) # corNo = nx.get_node_attributes(G,'color') # corNo = recupera_linha(dataNotEdited, estacao) if corNo == '[lilas]': mapaDeCores.append('#8B008B') elif corNo == '[verde]': mapaDeCores.append('#006400') elif corNo == '[azul]': mapaDeCores.append('#000080') elif corNo == '[vermelha]': mapaDeCores.append('#FF0000') elif corNo == '[amarela]': mapaDeCores.append('#FF8C00') elif corNo == '[prata]': mapaDeCores.append('#1C1C1C') else: mapaDeCores.append('#8B4513') for aresta in arestas: G.add_edge(estacao, aresta) listaNos = G.nodes() listaNos = sorted((set(listaNos))) posicoes = get_posicoes() fig1 = plt.figure('Grafo Principal') nx.draw(G, pos=posicoes, nodelist=listaNos, with_labels=True, labels=labels, node_color=mapaDeCores, font_size=6, font_color='white', edge_color='#A0522D') fig1.set_facecolor("#00000F") fig2 = plt.figure('Subgrafo') fig2.set_facecolor("#00000F") ax = plt.gca() ax.set_facecolor('#00000F') subgraph = G.subgraph(menor_caminho) nx.draw_networkx(subgraph, pos=posicoes, font_size=8, edge_color='#00CED1', node_color='#00CED1', font_color='white') plt.show() else: os.system("clear") print('Encerrando Programa!') break
[i, data["Address Receiver"][k], data["Transaction_value"][k]]) connections[i] = {value for value in temp} for k in temp: temp2 = [] for p in range(len(data["Address Sender"])): if data["Address Sender"][p] == k: temp2.append(data["Address Receiver"][p]) num.append([ k, data["Address Receiver"][p], data["Transaction_value"][p] ]) connections[k] = {value for value in temp2} print("Building Succeeds!") G = nx.Graph(connections) pos = nx.spring_layout(G) colormap = [] for node in G: for k in num: if k[1] == i: if k[2] < 1: colormap.append('blue') elif k[2] > 20: colormap.append('red') else: colormap.append('green') nx.draw(G, node_color=colormap, with_labels=True) plt.show()
"""Graph partioning program from slide 89""" from dimod import DiscreteQuadraticModel from dwave.system import LeapHybridDQMSampler from networkx import nx lagrange = 10 num_colors = 4 colors = range(num_colors) dqm = DiscreteQuadraticModel() G = nx.Graph() G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (0, 6)]) n_edges = len(G.edges) for p in G.nodes: dqm.add_variable(num_colors, label=p) for p in G.nodes: dqm.set_linear(p, colors) for p0, p1 in G.edges: dqm.set_quadratic(p0, p1, {(c, c): lagrange for c in colors}) for p0, p1 in G.edges: dqm.set_quadratic(p0, p1, {(c, c): lagrange for c in colors}) sampler = LeapHybridDQMSampler() sampleset = sampler.sample_dqm(dqm) sample = sampleset.first.sample energy = sampleset.first.energy valid = True for edge in G.edges: i, j = edge if sample[i] == sample[j]: valid = False
def wl_normalization(self, graph): result = {} labeled_graph = nx.Graph(graph) relabel_dict_ = {} graph_node_list = list(graph.nodes()) for i in range(len(graph_node_list)): relabel_dict_[graph_node_list[i]] = i i += 1 inv_relabel_dict_ = {v: k for k, v in relabel_dict_.items()} graph_relabel = nx.relabel_nodes(graph, relabel_dict_) label_lookup = {} label_counter = 0 l_aux = list( nx.get_node_attributes(graph_relabel, 'attr_name').values()) labels = np.zeros(len(l_aux), dtype=np.int32) adjency_list = list([ list(x[1].keys()) for x in graph_relabel.adjacency() ]) #adjency list à l'ancienne comme version 1.0 de networkx for j in range(len(l_aux)): if not (l_aux[j] in label_lookup): label_lookup[l_aux[j]] = label_counter labels[j] = label_counter label_counter += 1 else: labels[j] = label_lookup[l_aux[j]] # labels are associated to a natural number # starting with 0. new_labels = copy.deepcopy(labels) # create an empty lookup table label_lookup = {} label_counter = 0 for v in range(len(adjency_list)): # form a multiset label of the node v of the i'th graph # and convert it to a string long_label = np.concatenate( (np.array([labels[v]]), np.sort(labels[adjency_list[v]]))) long_label_string = str(long_label) # if the multiset label has not yet occurred, add it to the # lookup table and assign a number to it if not (long_label_string in label_lookup): label_lookup[long_label_string] = label_counter new_labels[v] = label_counter label_counter += 1 else: new_labels[v] = label_lookup[long_label_string] # fill the column for i'th graph in phi labels = copy.deepcopy(new_labels) dict_ = {inv_relabel_dict_[i]: labels[i] for i in range(len(labels))} nx.set_node_attributes(labeled_graph, dict_, 'labeling') result['labeled_graph'] = labeled_graph result['ordered_nodes'] = [ x[0] for x in sorted(dict_.items(), key=lambda x: x[1]) ] return result
def test_undirected_weighted_star(self): G = nx.Graph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)]) centrality = nx.local_reaching_centrality(G, 1, normalized=False, weight='weight') assert centrality == 1.5
def __init__(self, directed=True): self.graph = nx.DiGraph() # self.sc_authors = sc_authors # self.rc_authors = rc_authors if not directed: self.graph = nx.Graph()
def test_cycle_undirected_unweighted(self): G = nx.Graph() G.add_edge(1, 2) assert nx.global_reaching_centrality(G, weight=None) == 0
def convert_network(network): G = nx.Graph() for user, friends in network.items(): for friend in friends: G.add_edge(user, friend) return G
def test_negatively_weighted(self): with pytest.raises(nx.NetworkXError): G = nx.Graph() G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)]) nx.local_reaching_centrality(G, 0, weight='weight')
def buildGraphGexf(root, title, data, flt=[]): """Convert supplied raw data into GEXF format (e.g. for Gephi) GEXF produced by PyGEXF doesn't work with SigmaJS because SJS needs coordinates for each node. flt is a list of event types to include, if not set everything is included. Args: root (str): TBD title (str): unused data (list): scan result as list flt (list): TBD Returns: str: TBD """ mapping = SpiderFootHelpers.buildGraphData(data, flt) graph = nx.Graph() nodelist = dict() ncounter = 0 for pair in mapping: (dst, src) = pair col = ["0", "0", "0"] # Leave out this special case if dst == "ROOT" or src == "ROOT": continue if dst not in nodelist: ncounter = ncounter + 1 if dst in root: col = ["255", "0", "0"] graph.node[dst]['viz'] = { 'color': { 'r': col[0], 'g': col[1], 'b': col[2] } } nodelist[dst] = ncounter if src not in nodelist: ncounter = ncounter + 1 if src in root: col = ["255", "0", "0"] graph.add_node(src) graph.node[src]['viz'] = { 'color': { 'r': col[0], 'g': col[1], 'b': col[2] } } nodelist[src] = ncounter graph.add_edge(src, dst) gexf = GEXFWriter(graph=graph) return str(gexf).encode('utf-8')