Exemplo n.º 1
0
    def scoring(self):
        if self.players_loc[0][1] == self.size - 1:
            if self.index == 0:
                return 1000
            else:
                return -1000

        if self.players_loc[1][1] == 0:
            if self.index == 0:
                return -1000
            else:
                return 1000

        con1 = nx.node_connected_component(self.graph, tuple(self.players_loc[0]))
        con1 = filter(lambda x: x[1] == self.size - 1, con1)
        d1 = min(nx.shortest_path_length(self.graph, tuple(self.players_loc[0]), node) for node in con1)
        con2 = nx.node_connected_component(self.graph, tuple(self.players_loc[1]))
        con2 = filter(lambda x: x[1] == 0, con2)
        d2 = min(nx.shortest_path_length(self.graph, tuple(self.players_loc[1]), node) for node in con2)
        wall_score = WALL_SCORE * (self.wall_counts[1] - self.wall_counts[0])
        distance_score = DISTANCE_SCORE * (d2 - d1)

        if self.score_func0 is not None and self.score_func1 is not None:
            v0 = self.score_func0 * self.wall_counts[0]
            v1 = self.score_func1 * self.wall_counts[1]
            if self.index == 0:
                return distance_score - (v1 - v0)
            else:
                return -distance_score + (v1 - v0)


        if self.index == 0:
            return distance_score - wall_score
        else:
            return -distance_score + wall_score
Exemplo n.º 2
0
def get_path_two_residues(hbfile, sourceRes, targetRes, pathCutoff, edgeCutoff, middleRes="ARGA0082"):
    '''Get all the pathways between two residues.
    
    The weight of each edge should be larger than or equal to the threshold,
    and the length of the pathway should be equal to or less than the number specified by lenLessThan.
    
    '''
    g = nx.Graph()
    for eachLine in open(hbfile):
        fields = eachLine.split()
        if len(fields) == 3 and float(fields[2]) < edgeCutoff:
            continue
        g.add_edge(fields[0], fields[1])
    if (sourceRes in g.nodes()) and (targetRes in g.nodes()) and (nx.has_path(g, sourceRes, targetRes)):
        for eachPath in sorted(nx.all_simple_paths(g, sourceRes, targetRes, pathCutoff), key=lambda p: len(p)):
            print eachPath
    else:
        print "No path"
        
        if (sourceRes in g.nodes()) and (targetRes in g.nodes()):
            print "Both %s and %s in the network but are not connected" % (sourceRes, targetRes)
            
        if sourceRes in g.nodes():
            print "%s in: " % sourceRes, nx.node_connected_component(g, sourceRes)
            if middleRes in nx.node_connected_component(g, sourceRes):
                print "%s connecting with %s" % (middleRes, sourceRes)
                
        if targetRes in g.nodes():
            print "%s in: " % targetRes, nx.node_connected_component(g, targetRes) 
            if middleRes in nx.node_connected_component(g, targetRes):
                print "%s connecting with %s" % (middleRes, targetRes)
                
        if middleRes in g.nodes():
            print "%s in: " % middleRes, nx.node_connected_component(g, middleRes) 
Exemplo n.º 3
0
def convertir_en_conexo(grafo):
    nodos = grafo.nodes()
    nodos_conectados = nx.node_connected_component(grafo, 0)
    nodos.sort()
    nodos_conectados.sort()

    conectar = []
    while nodos != nodos_conectados:
        nodo_en_esta_componente = -1
        nodo_en_otra_componente = -1
        # elijo nodo de una y otra componente
        for v in nodos:
            if v not in nodos_conectados:
                nodo_en_otra_componente = v
            else:
                nodo_en_esta_componente = v
            if nodo_en_esta_componente != -1 and nodo_en_otra_componente != -1:
                break

        #recuerdo que conectar
        conectar.append((nodo_en_otra_componente, nodo_en_esta_componente))

        # actualizo nodos y nodos_conectados
        for v in nodos_conectados:
            nodos.remove(v)
        if len(nodos) > 0:
            nodos_conectados = nx.node_connected_component(grafo, nodo_en_otra_componente)
            nodos_conectados.sort()

    #conecto las componentes conexas
    for u, v in conectar:
        grafo.add_edge(u, v)
Exemplo n.º 4
0
def get_junction_tree(G):
    print("triangulate it")
    G_triangulated = triangulate_graph(G)
    print("get cliques")
    cliques = nx.find_cliques(G_triangulated)
    print("make the junction tree")
    junction_tree = nx.Graph()
    junction_tree.add_nodes_from([tuple(clique) for clique in cliques])
    intersection_size = {
        combo: len(combo[0] + combo[1]) - len(set(combo[0] + combo[1]))
        for combo in combinations(junction_tree.nodes(), 2)
    }
    intersection_size = {
        key: val
        for key, val in intersection_size.items() if val != 0
    }
    ordered_by_weights = {
        pair[0]
        for pair in sorted(intersection_size.items(), key=lambda kv: -kv[1])
    }
    for pair in ordered_by_weights:
        cluster_1 = pair[0]
        cluster_2 = pair[1]
        if not nx.node_connected_component(
                junction_tree, cluster_1) == nx.node_connected_component(
                    junction_tree, cluster_2):
            junction_tree.add_edge(cluster_1, cluster_2)
    return junction_tree
Exemplo n.º 5
0
def find_nearest_neighbors2(db, a, nmax=10, rates=None, Peq=None):
    if rates is None:
        edges = [ (ts.minimum1, ts.minimum2, ts.energy) for ts in 
                 db.transition_states(order_energy=True)]
    else:
        edges = [((u,v), k*Peq[u]) for (u,v), k in 
                 rates.iteritems() if u<v]
        edges.sort(key=lambda uvk: -uvk[1])
        assert edges[0][1] > edges[1][1]
        edges = [uv for uv, k in edges]

    subtrees = nx.utils.UnionFind()
    subtrees[a]
    graph = nx.Graph()
    graph.add_node(a)
    for u,v, e in edges:
        uroot = subtrees[u]
        vroot = subtrees[v]
        if uroot != vroot:
            subtrees.union(u,v)
            graph.add_edge(u,v)
#            graph.add_edge(u,v)
            if subtrees[u] == subtrees[a]:
                print "energy", e, u._id, v._id
                cc = nx.node_connected_component(graph, a)
                if len(cc) >= nmax:
                    print "found a tree of sufficient size"
                    return cc
    print "finishing without having found a tree of sufficient size"
    cc = nx.node_connected_component(graph, a)
    return cc
Exemplo n.º 6
0
def segment(segment_graph, sorted_edges):
  for u,v,d in sorted_edges:
    connected1 = nx.node_connected_component(segment_graph, u)
    connected2 = nx.node_connected_component(segment_graph, v)
    if d['weight'] <= min_internal_diff(connected1, connected2, segment_graph):
      segment_graph.add_edge(u,v,weight=d['weight'])
  return segment_graph
Exemplo n.º 7
0
 def check_connected(self):
     # Make sure there are still ways to reach the end
     self.graph.remove_edges_from(self.special_edges)
     con1 = nx.node_connected_component(self.graph, tuple(self.players_loc[0]))
     con2 = nx.node_connected_component(self.graph, tuple(self.players_loc[1]))
     connected = any(y == self.size - 1 for x, y in con1) and any(y == 0 for x, y in con2)
     self.graph.add_edges_from(self.special_edges)
     return connected
Exemplo n.º 8
0
def min_cut_volume(recorder, graph, labels, params):
    assert params.n_shot == 1 and params.n_way == 2
    cut = nx.minimum_edge_cut(graph, 0, 1)
    graph.remove_edges_from(cut)
    part_a = nx.node_connected_component(graph, 0)
    part_b = nx.node_connected_component(graph, 1)
    label_0, label_1 = labels[0].item(), labels[1].item()
    return connect_parts_labels(recorder, graph, part_a, part_b, labels,
                                label_0, label_1)
def test_inversion_option(c1,c2,join_options,ograph,linked):
    a=c1
    b=linked[c1]
    c=c2
    d=linked[c2]

    #k=linked[internal_node]
    ograph.remove_edge(a,b)
    ograph.remove_edge(c,d)
                                                    ##                                                                        ---b a-------c d----
    if a in nx.node_connected_component(ograph,c):  ## exchance labels of a,b if necessary, so the nodes are in this config:  ---a b-------c d------ 
        x=b
        b=a
        a=x
     
                                                    ##                                                                        ---b a-------d c----
    if a in nx.node_connected_component(ograph,d):  ## exchance labels of a,b if necessary, so the nodes are in this config:  ---a b-------c d------ 
        x=b
        b=a
        a=x

        x=d
        d=c
        c=x

                                                    ##                                                                        ---a b-------d c----
    if b in nx.node_connected_component(ograph,d):  ## exchance labels of c,d if necessary, so the nodes are in this config:  ---a b-------c d------ 
        x=d
        d=c
        c=x

    n_scaffold = old_div(len(nx.node_connected_component(ograph,b)),2)
    print("inversion n nodes",n_scaffold)


    total_i_len = sum( ograph[i][j]['length'] for i,j in nx.bfs_edges(ograph,b) )
    print("inv len",total_i_len)
    if total_i_len < 10000.0 or n_scaffold<2:
        print("inversion length",total_i_len,n_scaffold,"too short")
        join_options.append( (0.0,(),() ) )                                                                                              
        ograph.add_edge(a,b,length=default_gapsize,contig=False)
        ograph.add_edge(c,d,length=default_gapsize,contig=False)

        return

    interc_score0 = intercalation_score_raw(a,b,d,ograph) 
    interc_score1 = intercalation_score_raw(a,c,d,ograph) 
    print("inversion0", interc_score0)
    print("inversion", interc_score1)
    join_options.append( (interc_score0,(),() ) )
    join_options.append( (interc_score1,((a,c),(b,d)),((a,b),(c,d)) ) )
    ograph.add_edge(a,b,length=default_gapsize,contig=False)
    ograph.add_edge(c,d,length=default_gapsize,contig=False)

    return
Exemplo n.º 10
0
def sparsity_heuristic(graph, i, j):
    #TODO: consider 8X8 window around diagonal only
    cc_1 = nx.node_connected_component(graph, (i, j))
    cc_2 = nx.node_connected_component(graph, (i + 1, j))
    score = min(abs(len(cc_1) - len(cc_2)), 64)
    if len(cc_1) < len(cc_2):
        graph[(i, j)][(i + 1, j + 1)][HEURISTICS_SCORE] = graph[
            (i, j)][(i + 1, j + 1)][HEURISTICS_SCORE] + score
    elif len(cc_1) > len(cc_2):
        graph[(i + 1, j)][(i, j + 1)][HEURISTICS_SCORE] = graph[
            (i + 1, j)][(i, j + 1)][HEURISTICS_SCORE] + score
Exemplo n.º 11
0
def prune_loops(g, s, t):
    ps = list(nx.articulation_points(g))
    for p in ps:
        if p in g and p not in [s, t]:
            g1 = nx.Graph(g)
            g1.remove_node(p)
            cs = nx.node_connected_component(g1, s)
            ct = nx.node_connected_component(g1, t)
            c = cs.union(ct)
            c.add(p)
            g.remove_nodes_from([n for n in g if n not in c])
 def largest_cluster(self, distribution=False):
     sizes = []
     for node in self.__latt.nodes():
         if len(nx.node_connected_component(self.__latt, node)) != 1:
             sizes.append(
                 len(nx.node_connected_component(self.__latt, node)))
     mx = max(sizes) if len(sizes) > 0 else 0
     if not distribution:
         del sizes
         return mx
     else:
         return sizes
Exemplo n.º 13
0
 def _calculate_groups_size_function(self, T):
     G = T.copy()
     for edge in T.edges():
         u, v = edge
         G.remove_edge(u, v)
         Nu = nx.node_connected_component(G, u)
         Nu = len(Nu)
         Nv = nx.node_connected_component(G, v)
         Nv = len(Nv)
         T[u][v]["group_size"] = T[u][v]["weight"] * min(Nu, Nv)
         G.add_edge(u, v)
         G[u][v] = T[u][v]
Exemplo n.º 14
0
 def _calculate_groups_size_function(self, T):
     G = T.copy()
     for edge in T.edges():
         u, v = edge
         G.remove_edge(u, v)
         Nu = nx.node_connected_component(G, u)
         Nu = len(Nu)
         Nv = nx.node_connected_component(G, v)
         Nv = len(Nv)
         T[u][v]["group_size"] = T[u][v]["weight"] * min(Nu, Nv)
         G.add_edge(u, v)
         G[u][v] = T[u][v]
Exemplo n.º 15
0
def stoer_wagner_volume(recorder, graph, labels, params):
    assert params.n_way == 2
    try:
        _, (part_a, part_b) = nx.stoer_wagner(graph)
    except nx.exception.NetworkXError:
        assert params.n_shot == 1
        part_a = nx.node_connected_component(graph, 0)
        part_b = nx.node_connected_component(graph, 1)
    label_0, label_1 = torch.min(labels).item(), torch.max(labels).item()
    assert (labels == label_0).sum().item() * 2 == len(graph)
    return connect_parts_labels(recorder, graph, part_a, part_b, labels,
                                label_0, label_1)
Exemplo n.º 16
0
def reduce_graph(g):
    # show_graph(g,3)
    nx.set_edge_attributes(g, 'capacity', 1)
    g = nx.convert_node_labels_to_integers(g, 0)
    g1 = nx.Graph()
    for edge in g.edges():
        if edge in g1.edges():
            g1.edge[edge[0]][edge[1]]['capacity'] += 1
        else:
            g1.add_edge(edge[0], edge[1], capacity=1)
    construction(g1, 0, set(g1.nodes()))
    # show_graph(A, 3)
    capacities=nx.get_edge_attributes(A,'capacity')
    if max(capacities.values())<=1:
        return g,None,1
    if max(capacities.values())==2:
        return g,None,2
    remove_edges = list()
    biconnected = list()
    for e in A.edges(data=True):
        if e[2]['capacity'] <= 2:
            remove_edges.append(e)
        if e[2]['capacity'] == 2:
            biconnected.append(e)
    A.remove_edges_from(remove_edges)
    for e in biconnected:
        cut_edges = list(nx.minimum_edge_cut(g1, e[0], e[1]))
        if cut_edges[1][0] in nx.node_connected_component(A, cut_edges[0][0]):
            g.add_edge(cut_edges[0][0], cut_edges[1][0])
            if cut_edges[1][1] in nx.node_connected_component(A, cut_edges[0][1]):
                g.add_edge(cut_edges[0][1], cut_edges[1][1])
            else:
                raise Exception("Biconnected edge does not match")
        elif cut_edges[1][0] in nx.node_connected_component(A, cut_edges[0][1]):
            g.add_edge(cut_edges[0][0], cut_edges[1][1])
            if cut_edges[1][1] in nx.node_connected_component(A, cut_edges[0][0]):
                g.add_edge(cut_edges[0][1], cut_edges[1][0])
            else:
                raise Exception("Biconnected edge does not match")
        else:
            raise Exception("Biconnected edge does not match")
        # print e[0],g1[e[0]]
        # print e[1],g1[e[1]]
    # show_graph(A, 1)
    # show_graph(g,1)
    G = list(nx.connected_components(A))
    connected_components=list(nx.connected_components(A))
    for i in connected_components:
        if len(i)<3:
            G.remove(i)
    if G==[]:
        return g,None,2
    return g,G,3
Exemplo n.º 17
0
    def distance(self, game):
        con1 = nx.node_connected_component(game.graph, tuple(game.players_loc[0]))
        con1 = filter(lambda x: x[1] == game.size - 1, con1)
        d1 = min(nx.shortest_path_length(game.graph, tuple(game.players_loc[0]), node) for node in con1)
        con2 = nx.node_connected_component(game.graph, tuple(game.players_loc[1]))
        con2 = filter(lambda x: x[1] == 0, con2)
        d2 = min(nx.shortest_path_length(game.graph, tuple(game.players_loc[1]), node) for node in con2)
        distance_score =  d2 - d1

        if game.index == 0:
            return distance_score
        else:
            return -distance_score
Exemplo n.º 18
0
def calc_closeness(G, normalized=True):
    # Calculates the closeness centrality
    closeness = {node: nx.closeness_centrality(G, node) for node in G}
    if not normalized:
        g_n = len(G)
        closeness = {
            node: value * (G.number_of_nodes() - 1) /
            (len(nx.node_connected_component(G, node)) - 1)
            if 0 < len(nx.node_connected_component(G, node)) else 0
            for node, value in closeness.items()
        }
    #end if
    return closeness
 def get_molecules_change(self, bond_change):
     # Compute the change in molecules.
     atoms_involved = np.unique(
         bond_change[:, :2]
     )  # Atoms indices that are involved in one of the reaction of bond_change
     molecule_subset_old = set()
     molecule_subset_new = set()
     # Get the molecules involved in the reactions of bond_change.
     for atom in atoms_involved:
         molecule_subset_old = molecule_subset_old.union(
             nx.node_connected_component(self.molecule_graph, atom))
     # Get the molecules before the reaction
     [
         old_molecules, old_molecules_count, old_molecules_frame,
         old_mol_atoms_involved
     ] = self.get_molecules_frame(
         self.molecule_graph.subgraph(molecule_subset_old), atoms_involved)
     old_molecule_bond_change = self.get_molecule_bond_change(
         old_mol_atoms_involved, atoms_involved, bond_change)
     # Update the graph of the system with the reactions in bond_change
     self.update_molecule_graph(bond_change)
     for atom in atoms_involved:
         molecule_subset_new = molecule_subset_new.union(
             nx.node_connected_component(self.molecule_graph, atom))
     # Get the molecules after the reactions.
     [
         new_molecules, new_molecules_count, new_molecules_frame,
         new_mol_atoms_involved
     ] = self.get_molecules_frame(
         self.molecule_graph.subgraph(molecule_subset_new), atoms_involved)
     new_molecule_bond_change = self.get_molecule_bond_change(
         new_mol_atoms_involved, atoms_involved, bond_change)
     molecules_frame_change = new_molecules.copy()
     molecules_counts_change = new_molecules_count.copy()
     # Calculate the molecules changes
     for mol in range(old_molecules.shape[0]):
         molecule_idx = np.where(
             (molecules_frame_change == old_molecules[mol, :]).all(axis=1))
         if molecule_idx[0].shape[0] == 0:
             molecules_frame_change = np.vstack(
                 [molecules_frame_change, old_molecules[mol, :]])
             molecules_counts_change = np.append(molecules_counts_change,
                                                 -old_molecules_count[mol])
         elif molecule_idx[0].shape[0] == 1:
             molecules_counts_change[
                 molecule_idx[0]] -= old_molecules_count[mol]
     return [
         molecules_frame_change, molecules_counts_change,
         old_molecules_frame, old_molecule_bond_change, new_molecules_frame,
         new_molecule_bond_change
     ]
Exemplo n.º 20
0
    def connected_paths(self, path_id, include_self=False):
        """
        Given an index of self.paths find other paths which
        overlap with that path.

        Parameters
        -----------
        path_id : int
          Index of self.paths
        include_self : bool
          Should the result include path_id or not

        Returns
        -----------
        path_ids :  (n, ) int
          Indexes of self.paths that overlap input path_id
        """
        if len(self.root) == 1:
            path_ids = np.arange(len(self.polygons_closed))
        else:
            path_ids = list(nx.node_connected_component(
                self.enclosure,
                path_id))
        if include_self:
            return np.array(path_ids)
        return np.setdiff1d(path_ids, [path_id])
Exemplo n.º 21
0
 def multi_collapse(self):
     """
     performs collapsing across multiple ProphetExperiment
     retains only core complexes seen in multiple experiments
     i.e exp 1 A-B-C
     exp 2 A-B-D
     exp3 A-B-C
     keep ABC as most frequent combination of subunits
     """
     allhypo = pd.concat([exp.get_hypo() for exp in self.allexps])
     # this is only for later splitting to make sure there is no other $
     names = list(allhypo.index + "$" + allhypo["CREP"])
     annot_gr = self.simil_graph_weight(allhypo, names)
     allhypo["nm"] = names
     # now we need to uniform the name across all annotation
     tosub = []
     count = 1
     for test in names:
         try:
             torename = nx.node_connected_component(annot_gr, test)
             annot_gr.remove_nodes_from(torename)
             # select only hypo in torename and rename using cmplx + count
             tmp = allhypo[allhypo["nm"].isin(torename)]
             tmp["ID"] = "cmplx__" + str(count)
             tosub.append(tmp)
         except KeyError:
             # remove inplace faster to catch than test has_node
             pass
         finally:
             count += 1
     if tosub:
         self.all_hypo = pd.concat(tosub, axis=0)
     else:
         self.allhypo = pd.DataFrame()
Exemplo n.º 22
0
def testFun(filePath):
    import networkx as nx
    cmd.delete("all")
    
    cmd.fetch("1C3W")
    cmd.hide("lines", "all")
    cmd.show("cartoon", "1C3W")
    cmd.color("green", "all")
    
    #------------------------------ cmd.color("yellow", "resi 194 and resn GLU")
    #------------------------------- cmd.show("sticks", "resi 194 and resn GLU")
    highlightRes("GLUA0194", color="yellow")
    highlightRes("GLUA0204", color="yellow")
    
        
    g = loadHbTxt(filePath)
    allNodes = nx.node_connected_component(g, "GLUA0194")
    #===========================================================================
    # print allNodes
    #===========================================================================
    
    accRes = {}
    for line in open("/Users/xzhu/sibyl/BR/1C3W/hydro/def/raw/acc.res"):
        fields = line.split()
        resString = fields[1] + fields[2]
        acc = float(fields[4])
        accRes[resString] = acc
    
    colorThreshold = 0.02
    for eachResidue in accRes.keys():
        if accRes[eachResidue] > colorThreshold:
            if eachResidue in allNodes:
                print eachResidue
                highlightRes(eachResidue)
def find_all_subgraphs(di_graph_object, list_of_nodes_to_exclude=None):
    assert isinstance(di_graph_object, nx.DiGraph)
    di_graph_object_copy = di_graph_object.copy()
    tmp_di_graph_object = remove_list_of_nodes(di_graph_object_copy,
                                               list_of_nodes_to_exclude)
    tmp_list_of_bottleneck_nodes = look_for_bottleneck_nodes(
        tmp_di_graph_object)

    tmp_list_of_subgraph_node_pairs = []

    tmp_subgraph_dict = {}
    tmp_subgraph_dict['graph'] = nx.DiGraph()
    tmp_subgraph_dict[
        'condition'] = qmlReader.new_questionnaire_classes.ConditionObject(
            condition_string=True)
    subgraph_dict = defaultdict(tmp_subgraph_dict)

    for i in range(len(tmp_list_of_bottleneck_nodes) - 1):
        tmp_list_of_subgraph_node_pairs.append(
            (tmp_list_of_bottleneck_nodes[i],
             tmp_list_of_bottleneck_nodes[i + 1]))
        subgraph_dict.append()
    tmp_subgraph_nodes_dict = {}
    for node in tmp_list_of_bottleneck_nodes:
        di_graph_object_copy = remove_all_edges_connecting_to_node(
            di_graph_object_copy, node=node)
    for node_pair in tmp_list_of_subgraph_node_pairs:
        tmp_di_graph_object_copy = di_graph_object_copy.copy()
        # tmp_subgraph_nodes_dict[node_pair] = find_all_nodes_inbetween(di_graph_object=tmp_di_graph_object, source=node_pair[0], target=node_pair[1])
        tmp_subgraph_nodes_dict[node_pair] = nx.node_connected_component(
            nx.to_undirected(tmp_di_graph_object_copy), node_pair[0])
    return tmp_subgraph_nodes_dict
Exemplo n.º 24
0
def get_connected_components_jaccard_similarity(documents,
                                                jaccard_threshold=.2,
                                                field_type="text"):
    """
        Find the connected components of documents sharing the same n-gram based on a threshold for Jaccard similarity.
    """
    document_text = {}
    for k, v in documents.items():
        try:
            document_text[k] = v[field_type]
        except:
            pass
    G = nx.Graph()
    similarity = {}
    ads = list(document_text)
    G.add_nodes_from(ads)

    for i in range(0, len(ads) - 1):
        a = []
        for j in range(i + 1, len(ads)):
            similarity[(ads[i], ads[j])] = round(
                distance.jaccard(document_text[ads[i]], document_text[ads[j]]),
                3)

    for k, v in similarity.items():
        if v <= jaccard_threshold:
            G.add_edge(k[0], k[1])

    connected_components = set()

    for i in G.nodes():
        connected_components.add(str(sorted(nx.node_connected_component(G,
                                                                        i))))

    return connected_components
Exemplo n.º 25
0
def far_end(g, n):
    if not g.degree(n) == 1:
        print("wtf: this should be a leaf")
        exit(0)
    for m in nx.node_connected_component(g, n):
        if (not m == n) and g.degree(m) == 1:
            return m
Exemplo n.º 26
0
def test_endInversion_option(free_end, internal_node, join_options, ograph,
                             linked):
    if free_end in linked:
        x = free_end
        free_end = internal_node
        internal_node = x
    print("end inversion", free_end, linked.get(free_end), internal_node,
          linked[internal_node])

    k = linked[internal_node]
    ograph.remove_edge(internal_node, k)

    if free_end in nx.node_connected_component(ograph, internal_node):
        x = k
        k = internal_node
        internal_node = x

    sc = link_test(ograph, k, internal_node)
    join_options.append((sc, (), ()))
    print("end inversion existing:", sc)

    sc = link_test(ograph, free_end, internal_node)
    join_options.append(
        (sc, ((free_end, internal_node), ), ((internal_node, k), )))
    print("end inversion:", sc)

    ograph.add_edge(internal_node, k, length=default_gapsize, contig=False)
    return
Exemplo n.º 27
0
def get_connected_components_jaccard_similarity(documents, jaccard_threshold=.2, field_type="text"):
    """
        Find the connected components of documents sharing the same n-gram based on a threshold for Jaccard similarity.
    """
    document_text = {}
    for k,v in documents.items():
        try:
            document_text[k] = v[field_type]
        except:
            pass
    G = nx.Graph()
    similarity = {}
    ads = list(document_text)
    G.add_nodes_from(ads)

    for i in range(0,len(ads)-1):
        a = []
        for j in range(i+1,len(ads)):
            similarity[(ads[i],ads[j])] =  round(distance.jaccard(document_text[ads[i]], document_text[ads[j]]),3)

    for k, v in similarity.items():
        if v <= jaccard_threshold:
            G.add_edge(k[0],k[1])

    connected_components = set()

    for i in G.nodes():
        connected_components.add(str(sorted(nx.node_connected_component(G, i))))

    return connected_components
def sdg_min_cut(G, u, v):
    """
    Computes minimum u, v cut using Ford-Fulkerson Algorithm, from
    "Computing the minimum cut and maximum flow of undirected graphs" by
    Schroeder, Jonatan and Guedes, ALP and Duarte Jr, Elias P.
    :param G: graph
    :param u: source vertex
    :param v: sink vertex
    :return: partition of vertices of G, S1 and S2, as well as the corresponding max flow.
    """
    D = G.to_directed()
    max_flow = 0
    path_queue, flow = breadth_first_search_path(D, u, v)
    while flow != -1:
        max_flow += flow
        predecessor = path_queue.popleft()
        while len(path_queue) != 0:
            successor = path_queue.popleft()
            if D[predecessor][successor]['weight'] == flow:
                D.remove_edge(predecessor, successor)
                D.remove_edge(successor, predecessor)
            else:
                D[predecessor][successor]['weight'] -= flow
                D[successor][predecessor]['weight'] += flow
            predecessor = successor
        path_queue, flow = breadth_first_search_path(D, u, v)
    S1 = nx.node_connected_component(D.to_undirected(), u)
    S2 = {v for v in G.nodes_iter() if v not in S1}
    return S1, S2, max_flow
Exemplo n.º 29
0
    def toggleComponentSuspicious(self, state):
        if not self.selectedDomain:
            return

        graphComponent=nx.node_connected_component(self.suspiciousGraph,
                self.selectedDomain)
        for domain in graphComponent:
            node=self.suspiciousGraph.node[domain]

            if state==Qt.Checked:
                node['isMalicious']=True
                self.componentsTableModel.setComponentMalicious(domain, True)
            elif state==Qt.Unchecked:
                node['isMalicious']=False
                self.componentsTableModel.setComponentMalicious(domain, False)

        """
        update table data
        """
        if self.toggleHideNotMaliciousDomains.isChecked():
            self.toggleHideNonMalicious(Qt.Checked)
        else:
            self.toggleHideNonMalicious(Qt.Unchecked)

        self.updateStatusBar()
Exemplo n.º 30
0
    def _reduce_graph(self, graph, min0list):
        """determine how much of the graph to include in the disconnectivity graph
        """
        used_nodes = []
        # make sure we include the subgraph containing min0
        if len(min0list) > 0:
            for min0 in min0list:
                nodes = nx.node_connected_component(graph, min0)
                if len(nodes) > 2:
                    used_nodes += nodes
                else:
                    print("dgraph: too few nodes connected to", min0)  
        if len(used_nodes) == 0: 
            # use the biggest connected cluster
            cc = sorted(nx.connected_components(graph), key=len, reverse=True)
            used_nodes += cc[0]  # list is ordered by size of cluster

        if self.subgraph_size is not None:
            node_lists = nx.connected_components(graph)
            for nodes in node_lists:
                if len(nodes) >= self.subgraph_size:
                    used_nodes += nodes

        newgraph = graph.subgraph(used_nodes).copy()
        return newgraph
Exemplo n.º 31
0
def test1(n, p):
    """tests DC and BFS-based connectivity alg on a G(n,p)"""
    #seed(69)

    G = nx.gnp_random_graph(n, p)
    print("G has {} connected components".format(
        nx.number_connected_components(G)))
    DC = DynamicCon(G)
    N = 100
    allTrues1 = [False] * N
    allTrues2 = [False] * N
    for i in range(N):
        node1, node2 = getRandomConnectedNodes(G)
        allTrues1[i] = areConnected(G, node1, node2)
        allTrues2[i] = DC.connected(node1, node2)
    print("The BFS-based alg works correctly = " + str(False not in allTrues1))
    print("The DC-based alg works correctly = " + str(False not in allTrues2))

    allFalses1 = [True] * N
    allFalses2 = [True] * N
    for i in range(N):
        node1, node2 = getRandomNotConnectedNodes(G)
        allFalses1[i] = areConnected(G, node1, node2)
        allFalses2[i] = DC.connected(node1, node2)
        if allFalses2[i]:
            print("DC reports that {} and {} are connected!".format(
                node1, node2))
            print("Node1: {}, node2: {}. CC of node1: {}".format(
                node1, node2, nx.node_connected_component(G, node1)))

    print("The BFS-based alg works correctly = " + str(True not in allFalses1))
    print("The DC-based alg works correctly = " + str(True not in allFalses2))
Exemplo n.º 32
0
    def recommend(self, i, items):
        kk = np.zeros(len(items))
        for c in self.G:
            H = nx.node_connected_component(self.G[c], i)
            d = np.shape(self.S[0])[0]
            Sc = np.zeros((d, d))
            bc = np.zeros(d)
            Tc = 0
            for j in H:
                Sc += self.S[j]
                bc += self.b[j]
                Tc += self.T[j]

            if invertible(Sc):
                Sinv = np.linalg.inv(Sc)
                theta_est = np.dot(Sinv, bc)
                for l in self.i_clusters[c]:
                    kk[l] = np.dot(items[l, :],
                                   theta_est) + self.beta * np.dot(
                                       items[l, :], np.dot(Sinv, items[l, :]))
            else:
                for l in self.i_clusters[c]:
                    kk[l] = np.random.uniform(0, 1, 1)

        return np.argmax(kk)
def create_dataset_from_parquet(pages, links, views):
    mapping, edges, ts = sample_random(pages, links, views)

    # get subgraph and compute pagerank
    g = nx.subgraph(
        nx.from_pandas_edgelist(edges,
                                source="src",
                                target="dst",
                                create_using=nx.DiGraph),
        ts.id,
    )
    print(f"sampled graph has {g.number_of_nodes()} nodes")
    pr = nx.pagerank(g)
    ordered = sorted(pr.keys(), key=pr.get, reverse=True)

    # only keep the largest connected component, top pagerank node is most
    # likely to be part of the largest fully connected component
    g = nx.Graph(g).subgraph(
        nx.node_connected_component(nx.Graph(g), ordered[0]))
    print(f"largest component has {g.number_of_nodes()} nodes")

    # create a list sorted by pagerank
    connected = pd.DataFrame({
        "id": g.nodes(),
        "pagerank": [pr[x] for x in g.nodes()]
    }).sort_values("pagerank", ascending=False)
    return connected.merge(mapping), edges, connected.merge(ts).drop(
        "pagerank", axis=1)
def far_end(g,n):
    if not g.degree(n)==1:
        print("wtf: this should be a leaf")
        exit(0)
    for m in nx.node_connected_component(g,n):
        if (not m==n) and g.degree(m)==1:
            return m
Exemplo n.º 35
0
def build_beatles_cluster():
    nodes = nx.node_connected_component(G, "303")
    print 'beatles nodes', len(nodes)
    p = r.pipeline()
    for node in nodes:
        p.sadd('BEATLES-SET', node)
    p.execute()
def test_endInversion_option(free_end,internal_node,join_options,ograph,linked):
    if free_end in linked:
        x=free_end
        free_end = internal_node
        internal_node = x
    print("end inversion",free_end,linked.get(free_end),internal_node,linked[internal_node])

    k=linked[internal_node]
    ograph.remove_edge(internal_node,k)

    if free_end in nx.node_connected_component(ograph,internal_node): 
        x=k
        k=internal_node
        internal_node=x

    sc = link_test(ograph,k,internal_node)
    join_options.append( (sc,(),()))
    print("end inversion existing:",sc)

    sc = link_test(ograph,free_end,internal_node)
    join_options.append( (sc ,((free_end,internal_node),),((internal_node,k),) ) ) 
    print("end inversion:",sc)

    ograph.add_edge(internal_node,k,length=default_gapsize, contig=False) 
    return
Exemplo n.º 37
0
def generic_product_rule(g, op):
    sel1 = random.sample(g.nodes(), 2)
    if nx.has_path(g, *sel1):
        g.add_edge(*sel1)
        return sel1
    sel2 = random.sample(g.nodes(), 2)
    if nx.has_path(g, *sel2):
        g.add_edge(*sel2)
        return sel2
    elif op( len(nx.node_connected_component(g, sel2[0])) * len(nx.node_connected_component(g, sel2[1])), \
             len(nx.node_connected_component(g, sel1[0])) * len(nx.node_connected_component(g, sel1[1])) ):
        g.add_edge(*sel2)
        return sel2
    else:
        g.add_edge(*sel1)
        return sel1
Exemplo n.º 38
0
    def connected_paths(self, path_id, include_self=False):
        """
        Given an index of self.paths find other paths which
        overlap with that path.

        Parameters
        -----------
        path_id : int
          Index of self.paths
        include_self : bool
          Should the result include path_id or not

        Returns
        -----------
        path_ids :  (n, ) int
          Indexes of self.paths that overlap input path_id
        """
        if len(self.root) == 1:
            path_ids = np.arange(len(self.polygons_closed))
        else:
            path_ids = list(
                nx.node_connected_component(self.enclosure, path_id))
        if include_self:
            return np.array(path_ids)
        return np.setdiff1d(path_ids, [path_id])
Exemplo n.º 39
0
    def min_ab_sep(G, C_i, C_j):
        #copy G for edge deletion
        G_copy = G.copy()
        #find neigbors of C_i
        A_C_i = []
        for node in C_i:
            u = [n for n in G.neighbors(node) if n not in C_i]
            A_C_i = A_C_i + u
        #remove edges from Ci to ACi and between ACi
        for i in C_i:
            for l in A_C_i:
                if G_copy.has_edge(i, l):
                    G_copy.remove_edge(i, l)
                elif G_copy.has_edge(l, i):
                    G_copy.remove_edge(l, i)

        for i in A_C_i:
            for l in A_C_i:
                if G_copy.has_edge(i, l):
                    G_copy.remove_edge(i, l)
        #select node from Cj and find all connected nodes
        j = C_j[0]
        R_j = nx.node_connected_component(G_copy, j)

        #determine minimal a,b-separator
        intersection = list(set(A_C_i) & set(R_j))

        return intersection
Exemplo n.º 40
0
    def __init__(self, points, sensing_radius, boundary):
        alpha_complex = AlphaComplex(points)
        simplex_tree = alpha_complex.create_simplex_tree(
            max_alpha_square=sensing_radius**2)

        self._simplices = [[], [], []]
        self._simplices[0] = [
            simplex[0] for simplex, _ in simplex_tree.get_skeleton(0)
        ]
        self._simplices[1] = [
            tuple(simplex) for simplex, _ in simplex_tree.get_skeleton(1)
            if len(simplex) == 2
        ]
        self._simplices[2] = [
            tuple(simplex) for simplex, _ in simplex_tree.get_skeleton(2)
            if len(simplex) == 3
        ]

        graph = nx.Graph()
        graph.add_nodes_from(self._simplices[0])
        graph.add_edges_from(self._simplices[1])

        self._boundary_cycles = CMap(graph, points).get_boundary_cycles()
        self._boundary_cycles.remove(boundary.alpha_cycle)

        self._connected_nodes = nx.node_connected_component(graph, 0)
Exemplo n.º 41
0
    def lay_down_nodes(self):
        """
        Given a directed graph, finds the connected component which includes
        the root node and then determines positions in the circular view.

        :return: Positions of the nodes from the perspective of the root node
        !!! returned dict does not contain positions of unconnected nodes!
        """
        # Find undirected graph (needed for connected component discovery)
        gr_undirected = self.graph.to_undirected()

        # Remove disconnected nodes from the graph
        component_nodes = nx.node_connected_component(gr_undirected, self.root_node)
        for node in list(gr_undirected.nodes()):
            if node not in component_nodes:
                gr_undirected.remove_node(node)

        # Find bfs tree of the connected components
        bfs_tree = nx.bfs_tree(gr_undirected, self.root_node)
        self.bfs_tree[self.root_node] = bfs_tree

        # Position the nodes in a circular fashion according to the bfs tree
        pos = gpos.hierarchy_pos(bfs_tree, self.root_node,
                                 width=2 * math.pi, xcenter=0.5)
        new_pos = {u: (r * math.cos(theta), r * math.sin(theta))
                   for u, (theta, r) in pos.items()}

        # Set positions to the networkx object
        nx.set_node_attributes(self.graph, name='pos', values=new_pos)

        # Also, return the positions
        return new_pos
    def determine_2Dness(
            self, delta):  #obtain dimension and direction for the delta value
        G = self.define_bonds(delta)
        ranks = []
        directions = []

        for loc in np.arange(0, len(self.structure),
                             8):  #all atoms in a unit cell
            if self.check_3Dmetal(self.structure.species_and_occu[loc]
                                  ):  #check if it is a magnetic metal
                rank_nodes = np.array([], dtype=np.int16)
                accepted_nodes = np.arange(8) + loc
                try:
                    for node in nx.node_connected_component(G, loc):
                        if (node in accepted_nodes):
                            rank_nodes = np.append(rank_nodes, node)
                except KeyError:
                    rank_nodes = [0]
                rank_matrix = np.array([np.round(self.frac_coords[node],decimals=3) \
                                                               for node in rank_nodes])
                rank_matrix = rank_matrix - rank_matrix[0]
                rank = np.linalg.matrix_rank(rank_matrix)
                ranks.append(rank)

                #find direction
                direction = "0"
                if rank == 1:
                    direction = str(rank_matrix[1])
                elif rank == 2:
                    direction = str(
                        np.cross(rank_matrix[1], rank_matrix[-1]) * 4)
                directions.append(direction)

        return [np.amax(ranks), directions[np.argmax(ranks)]]
Exemplo n.º 43
0
def reduce_rates(rates, B, A=None):
    B = set(B)
    if A is not None:
        A = set(A)
        if A.intersection(B):
            raise Exception("A and B share", len(A.intersection(B)), "nodes")
    graph = nx.Graph()
    graph.add_edges_from(rates.iterkeys())

    # remove nodes not connected to B
    # TODO: this only works if B is fully connected
    connected_nodes = nx.node_connected_component(graph, iter(B).next())
    connected_nodes = set(connected_nodes)
    all_nodes = set(graph.nodes())
    if len(connected_nodes) != len(all_nodes):
        print "removing", len(all_nodes) - len(connected_nodes), "nodes that are not connected to B"

        rates = dict((uv, rate) for uv, rate in rates.iteritems() if uv[0] in connected_nodes)

        if B - connected_nodes:
            raise Exception("the nodes in B are not all connected")

        if A is not None:
            if A - connected_nodes:
                raise Exception("the A nodes are not all connected to the B nodes")

    return rates
def reduce_rates(rates, B, A=None):
    B = set(B)
    if A is not None:
        A = set(A)
        if A.intersection(B):
            raise Exception("A and B share", len(A.intersection(B)), "nodes")
    graph = nx.Graph()
    graph.add_edges_from(rates.iterkeys())

    # remove nodes not connected to B
    # TODO: this only works if B is fully connected
    connected_nodes = nx.node_connected_component(graph, iter(B).next())
    connected_nodes = set(connected_nodes)
    all_nodes = set(graph.nodes())
    if len(connected_nodes) != len(all_nodes):
        print "removing", len(all_nodes) - len(
            connected_nodes), "nodes that are not connected to B"

        rates = dict((uv, rate) for uv, rate in rates.iteritems()
                     if uv[0] in connected_nodes)

        if B - connected_nodes:
            raise Exception("the nodes in B are not all connected")

        if A is not None:
            if A - connected_nodes:
                raise Exception(
                    "the A nodes are not all connected to the B nodes")

    return rates
Exemplo n.º 45
0
    def from_hfp_to_connectedcomps(self, index):
        """
        Computes connected components upto a death of a 0-cycle.
        Parameters:
        -----------
                hfp : Homology peristenece class from dionysus2
                filtration : filtration from dionysus2
                num_vertices : number of 0-dimensional simplices in the data (typically number of data points)
                index : filtration index of death of 0-cycle

        Returns:
        --------
        List of connected components as a set
        """
        total_iters = np.arange(self.num_vertices, index + 1)
        adj = np.eye(self.num_vertices)
        for i, doi in enumerate(total_iters):
            current_vertices = list(self.filtration[doi])
            if doi == index:
                repvertex = current_vertices[1]
            adj[current_vertices[0], current_vertices[1]] = 1
        adj = np.maximum(adj, adj.T)
        graph = csr_matrix(adj)
        n_components, labels = connected_components(csgraph=graph,
                                                    directed=False,
                                                    return_labels=True)
        G = nx.from_scipy_sparse_matrix(graph)
        concomps = nx.node_connected_component(G, repvertex)
        # print('Vertex %d is connected to'%(repvertex),concomps)
        return concomps
Exemplo n.º 46
0
    def _reduce_graph(self, graph, min0list):
        """determine how much of the graph to include in the disconnectivity graph
        """
        used_nodes = []
        # make sure we include the subgraph containing min0
        if len(min0list) > 0:
            for min0 in min0list:
                nodes = nx.node_connected_component(graph, min0)
                if len(nodes) > 2:
                    used_nodes += nodes
                else:
                    print("dgraph: too few nodes connected to", min0)
        if len(used_nodes) == 0:
            # use the biggest connected cluster
            cc = sorted(nx.connected_components(graph), key=len, reverse=True)
            used_nodes += cc[0]  # list is ordered by size of cluster

        if self.subgraph_size is not None:
            node_lists = nx.connected_components(graph)
            for nodes in node_lists:
                if len(nodes) >= self.subgraph_size:
                    used_nodes += nodes

        newgraph = graph.subgraph(used_nodes).copy()
        return newgraph
Exemplo n.º 47
0
    def __init__(self, num_users, d, num_rounds, L, random_init=True):
        super(COFIBA, self).__init__(num_users, d, num_rounds)

        self.L = L

        if random_init:
            self.G = {
                0: nx.gnp_random_graph(num_users, edge_probability(num_users))
            }

            self.GI = nx.gnp_random_graph(L, edge_probability(num_users))
            c = 0
            self.i_ind = np.zeros(L)
            self.i_clusters = {}
            C = set(range(L))
            while len(C) != 0:
                l = next(iter(C))
                C0 = set(nx.node_connected_component(self.GI, l))
                self.i_clusters[c] = list(C0)
                for l1 in C0:
                    self.i_ind[l1] = c
                c += 1
                C = C - C0
        else:
            self.G = {0: nx.complete_graph(num_users)}

            self.i_ind = np.zeros(L)
            self.i_clusters = {0: [i for i in range(L)]}
            self.GI = nx.complete_graph(L)

        self.alpha = 4 * np.sqrt(d)
        self.num_clusters = np.ones(num_rounds)
Exemplo n.º 48
0
def find_attractor(decStateTransMap):

    '''
        Arguments:
            -- 1. decStateTransMap
        Return:
            -- attractor
    '''
    attractor_list = nx.simple_cycles(decStateTransMap) #in case of deterministic system, any cycle without considering edge direction will be directed cycle.
    attractors = {}
    #attractors['fixed'] = []
    #attractors['cycle'] = []

    undirectedMap = nx.DiGraph.to_undirected(decStateTransMap)

    for u in attractor_list:
        attractors[u[0]] = {}
        if len(u) == 1:
            attractors[u[0]]['type'] = 'fixed'
        else:
            attractors[u[0]]['type'] = 'cycle'

    for v in attractors.iterkeys():
        basin = nx.node_connected_component(undirectedMap, v)
        attractors[v]['basin'] = basin
        attractors[v]['basin-size'] = len(basin)

    sorted_attractors = OrderedDict(sorted(attractors.items(), key=lambda kv: kv[1]['basin-size'], reverse=True))
    return sorted_attractors
Exemplo n.º 49
0
 def connected_paths(self, path_id, include_self = False):
     if len(self.root) == 1:
         path_ids = np.arange(len(self.polygons_closed))
     else:
         path_ids = list(nx.node_connected_component(self.enclosure, path_id))
     if include_self: 
         return np.array(path_ids)
     return np.setdiff1d(path_ids, [path_id])
Exemplo n.º 50
0
def wordrank(node):
    response = my_localRtr.get_Rel_one(node,"Fw", len(nx.node_connected_component(my_localRtr.G, node)) )
    nodesandpaths=[]
    for n,p in response.iteritems():
        path=p[1]
        nodesandpaths.append([n,path])
    response=json.dumps(nodesandpaths)
    return make_response(response)
Exemplo n.º 51
0
def connected_open(graph):
    broken = set()
    for node, degree in graph.degree().items():
        if degree == 2:    continue
        if node in broken: continue
        [broken.add(i) for i in nx.node_connected_component(g, node)]
    okay = set(graph.nodes()).difference(broken)
    return broken, okay
def test_interc_option( gap_edge1, gap_edge2, free_end1, join_options, ograph ):
    if not ograph.has_edge(gap_edge1,gap_edge2):
        print("expected nodes to be connected: {} {}".format(gap_edge1, gap_edge2))
        raise Exception('not connected')
    ograph.remove_edge( gap_edge1, gap_edge2 )
    if gap_edge1 in nx.node_connected_component(ograph,gap_edge2): 
        print("problem: these should be disconnected now:",gap_edge1,gap_edge2)
        raise Exception('too connected i')
    if gap_edge2 in nx.node_connected_component(ograph,free_end1): 
        print("problem: these should have been disconnected all along",free_end1,gap_edge2)
        raise Exception('too connected j')
    if gap_edge1 in nx.node_connected_component(ograph,free_end1): 
        print("problem: these should have been disconnected all along",free_end1,gap_edge1) 
        raise Exception('too connected k')
    interc_score = intercalation_score(gap_edge1,free_end1,gap_edge2,ograph)
    ograph.add_edge(gap_edge1, gap_edge2, length=default_gapsize,contig=False)
    d = far_end(ograph,free_end1)
    join_options.append( (interc_score,((gap_edge1,free_end1),(d,gap_edge2)),((gap_edge1, gap_edge2),) ) )
Exemplo n.º 53
0
    def get_Rel_one(self,ipt,tp,N,cut=6.0):
        if N>= len( nx.node_connected_component(self.G, ipt) ):
            N=len( nx.node_connected_component(self.G, ipt) )

        T2L,T2P=nx.single_source_dijkstra(self.G,ipt,cutoff=cut,weight=tp)
        count=len(T2L.keys())

        while count<N:
            cut=cut+1.0
            T2L,T2P=nx.single_source_dijkstra(self.G,ipt,cutoff=cut,weight=tp)
            count=len(T2L.keys())

        sorted_T=sorted(T2L.keys(),key=T2L.get)[:N]
        Rel=[]
        for t in sorted_T:
            Rel.append((t,[T2L[t],T2P[t]]))
        Rel=collections.OrderedDict(Rel)

        return Rel
def edges_for_group(edges, group_id):
    import pandas as pd
    import networkx as nx
    g = nx.Graph()
    g.add_edges_from(edges)
    
    group_nodes = nx.node_connected_component(g, group_id) # (Used in query below)

    edge_df = pd.DataFrame(edges, columns=['u', 'v'])
    group_edges = edge_df.query('u in @group_nodes or v in @group_nodes')
    return group_edges.values
Exemplo n.º 55
0
def connected_edges(G, nodes):
    '''
    Given graph G and list of nodes, return the list of edges that 
    are connected to nodes
    '''
    nodes_in_G = deque()
    for node in nodes:
        if not G.has_node(node): continue
        nodes_in_G.extend(nx.node_connected_component(G, node))
    edges = G.subgraph(nodes_in_G).edges()
    return edges
Exemplo n.º 56
0
def main_algorithm(G,exploration_strategy):
    K = set()
    p = position(entiers=True)

    while len(K) < len(nx.node_connected_component(G,p)):
        p = position(entiers=True)
        if p not in K:
            if effaceLesAretesBloquees(G,K):
                draw_graph(G)
            K.add( p )
        exploration_strategy(G,K)
def test_clustering(G, n, s, t, delim):
    """
    Given a node, find the connected component n is in.
    """
    sub_g = G.subgraph(nx.node_connected_component(G,n ))
    print ("Nodes:")
    pprint(sub_g.nodes())
    print("\nEdges")
    pprint(sub_g.edges())
    print("\nCluster")
    pprint(get_merged_cluster(tumor_g, s, t, delim))
def link_test(og,a,b,gapsize=default_gapsize,max_stretch=200000):
    coords = {}
    facing={}
    traverse_and_layout(a,coords,facing,0,-1,og,maxD=200000)
    traverse_and_layout(b,coords,facing,gapsize,+1,og,maxD=200000)
#    print "#x:setup done"
    sys.stdout.flush()
    
    score=0.0
    for n1 in nx.node_connected_component(og,a):
        if abs(coords.get(n1,-1e10))>max_stretch: continue
        for n2 in nx.node_connected_component(og,b):
            if abs(coords.get(n2,1e10))>max_stretch: continue
            if strand_check(n1,n2,coords,facing):
                distance = coords[n2] - coords[n1]
                if distance < max_stretch:
                    x=get_score(n1,n2,distance)
#                    print "#x:partial",n1,n2,x
                    sys.stdout.flush()
                    score += x
    return score
def intercalation_score_raw(a,b,c,og):
#    print "#intercalation test:",a,b,c
    if a in nx.node_connected_component(og,b): print("a should not be connected to b",a,b)
    if b in nx.node_connected_component(og,c): print("b should not be connected to c",b,c)
    if a in nx.node_connected_component(og,c): print("a should not be connected to c",a,c)
#    coordinates1={}
    coordinates2={}
    facing={}
    traverse_and_layout(a,coordinates2,facing,0,-1,og)
#    coordinates2=dict(coordinates1)
#    traverse_and_layout(c,coordinates1,facing,1000,+1,og)
    traverse_and_layout(b,coordinates2,facing,default_gapsize,+1,og)
    traverse_and_layout(c,coordinates2,facing,default_gapsize+max(coordinates2.values()),+1,og)

#    score_0=0.0
#    for n1 in nx.node_connected_component(og,a):
#        for n2 in nx.node_connected_component(og,c):
#            if strand_check(n1,n2,coordinates1,facing):
#                distance = coordinates1[n2] - coordinates1[n1]
#                if distance < 200000:
#                    if distance<0: print "wtf1?"
#                    score_0 += get_score(n1,n2,distance)

    score_1=0.0
    for n1 in nx.node_connected_component(og,a):
        for n2 in nx.node_connected_component(og,c):
            if strand_check(n1,n2,coordinates2,facing):
                distance = coordinates2[n2] - coordinates2[n1]
                if distance < 200000:
                    if distance<0: print("wtf2?")
                    score_1 += get_score(n1,n2,distance)

        for n2 in nx.node_connected_component(og,b):
            if strand_check(n1,n2,coordinates2,facing):
                distance = coordinates2[n2] - coordinates2[n1]
                if distance < 200000:
                    if distance<0: print("wtf3?")
                    score_1 += get_score(n1,n2,distance)

    for n1 in nx.node_connected_component(og,b):
        for n2 in nx.node_connected_component(og,c):
            if strand_check(n1,n2,coordinates2,facing):
                distance = coordinates2[n2] - coordinates2[n1]
                if distance < 200000:
                    if distance<0: print("wtf4?")
                    score_1 += get_score(n1,n2,distance)

#    print "#intercalation scores:",score_1,score_0
    return (score_1)
Exemplo n.º 60
0
	def nodeInConnectedComponent(self, node):
		"""
		Place a node in a connected component of the graph. 

		Arguments:
			node - the node whose connected component is desired. 

		Returns:
			A connected subgraph of the original graph which contains the node.	
		"""	
		gr = self.g
		neighhboringNodes = nx.node_connected_component(gr, node)
		subgraph = nx.subgraph(gr, neighhboringNodes)
		return subgraph