Example #1
0
    def setUp(self):
        G1 = cnlti(nx.grid_2d_graph(2, 2), first_label=0, ordering="sorted")
        G2 = cnlti(nx.lollipop_graph(3, 3), first_label=4, ordering="sorted")
        G3 = cnlti(nx.house_graph(), first_label=10, ordering="sorted")
        self.G = nx.union(G1, G2)
        self.G = nx.union(self.G, G3)
        self.DG = nx.DiGraph([(1, 2), (1, 3), (2, 3)])
        self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1)

        self.gc = []
        G = nx.DiGraph()
        G.add_edges_from([(1, 2), (2, 3), (2, 8), (3, 4), (3, 7), (4, 5),
                          (5, 3), (5, 6), (7, 4), (7, 6), (8, 1), (8, 7)])
        C = [[3, 4, 5, 7], [1, 2, 8], [6]]
        self.gc.append((G, C))

        G = nx.DiGraph()
        G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
        C = [[2, 3, 4],[1]]
        self.gc.append((G, C))

        G = nx.DiGraph()
        G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
        C = [[1, 2, 3]]
        self.gc.append((G,C))

        # Eppstein's tests
        G = nx.DiGraph({0:[1], 1:[2, 3], 2:[4, 5], 3:[4, 5], 4:[6], 5:[], 6:[]})
        C = [[0], [1], [2],[ 3], [4], [5], [6]]
        self.gc.append((G,C))

        G = nx.DiGraph({0:[1], 1:[2, 3, 4], 2:[0, 3], 3:[4], 4:[3]})
        C = [[0, 1, 2], [3, 4]]
        self.gc.append((G, C))
Example #2
0
 def setUp(self):
     G1=cnlti(nx.grid_2d_graph(2,2),first_label=0,ordering="sorted")
     G2=cnlti(nx.lollipop_graph(3,3),first_label=4,ordering="sorted")
     G3=cnlti(nx.house_graph(),first_label=10,ordering="sorted")
     self.G=nx.union(G1,G2)
     self.G=nx.union(self.G,G3)
     self.DG=nx.DiGraph([(1,2),(1,3),(2,3)])
     self.grid=cnlti(nx.grid_2d_graph(4,4),first_label=1)
Example #3
0
def spingraph_from_graph(graph):
    # even_graph = nx.relabel_nodes(graph, lambda x:x*2)
    # odd_graph = nx.relabel_nodes(graph, lambda x:2*x+1)
    # union_graph  = nx.union(even_graph, odd_graph)
    # on the fly union saves about 20% memory, ugly but more efficient
    union_graph  = nx.union(nx.relabel_nodes(graph, lambda x:x*2),nx.relabel_nodes(graph, lambda x:2*x+1))
    # from pudb import set_trace; set_trace()
    for spin_down_node in xrange(1,union_graph.order(),2):
        spin_up_node = spin_down_node -1
        for spin_down_node_neighbour in union_graph[spin_down_node].keys():
            if spin_down_node_neighbour % 2 ==0:
                continue
            if spin_down_node_neighbour < spin_down_node:             # is either top or left neighbour
                if spin_down_node_neighbour == spin_down_node-2:      # is left neighbour
                    union_graph.add_edge(spin_up_node,spin_down_node_neighbour,weight=-p.tso)
                    union_graph.add_edge(spin_down_node_neighbour,spin_up_node,weight=-p.tso)
                else:
                    union_graph.add_edge(spin_up_node,spin_down_node_neighbour,weight=+1j*p.tso)
                    union_graph.add_edge(spin_down_node_neighbour,spin_up_node,weight=-1j*p.tso)
            if spin_down_node_neighbour > spin_down_node:             # is either right or bottom neighbour
                if spin_down_node_neighbour == spin_down_node+2:      # is right neighbour
                    union_graph.add_edge(spin_up_node,spin_down_node_neighbour,weight=p.tso)
                    union_graph.add_edge(spin_down_node_neighbour,spin_up_node,weight=p.tso)
                else:
                    union_graph.add_edge(spin_up_node,spin_down_node_neighbour,weight=-1j*p.tso)
                    union_graph.add_edge(spin_down_node_neighbour,spin_up_node,weight=+1j*p.tso)
    return union_graph
def patternsets2(MotifG1, MotifG2):
    #enumerate all possible permutations of node labels,
    #minimum is sharing one edge, all the way to max is the smaller number of edges, complexity 2^edgenum_max
    #return a set of possibly isomorphic collapses

    patternset = set()
    edgenum_max = min(MotifG1.number_of_edges(), MotifG2.number_of_edges())

    #select L (two+) edges to overlap
    for L in range(1, edgenum_max + 1):
        print L
        L_subsets = list(itertools.combinations(MotifG1.edges(),L))
        L_subsets2 = list(itertools.combinations(MotifG2.edges(),L))
        for subset1 in L_subsets:
            for subset2 in L_subsets2:
                print "already chose these" +str(L)+" edges in Motif2"
                print subset2
                permutations = list(itertools.permutations(subset1))
                i = 0
                for permutation in permutations:
                    print "this permutation is"
                    print permutation
                    print "in this particular order" + str(i)
                    if MotifG1 == MotifG2:
                        print "waring!!!same motif non-relabled"
                        G = nx.disjoint_union(MotifG1, MotifG2)
                    else:
                        G = nx.union(MotifG1, MotifG2)

                    if len(G) != 0:
                        G2 = nx.Graph()
                        G22 = nx.Graph()
                        Motif2merged_nodes = set()
                        for j in range(0, len(permutation)):
                            edge_1 = permutation[j]
                            edge_2 = subset2[j]
                            print "edge 1"
                            print edge_1
                            print "edge 2"
                            print edge_2

                            if edge_2[0] not in Motif2merged_nodes:
                                G1 = merge_nodes(G, edge_1[0], edge_2[0])
                                Motif2merged_nodes.add(edge_2[0])
                            if edge_2[1] not in Motif2merged_nodes:
                                G2 = merge_nodes(G1, edge_1[1], edge_2[1])
                                Motif2merged_nodes.add(edge_2[1])

                            if edge_2[0] not in Motif2merged_nodes:
                                G11 = merge_nodes(G, edge_1[1], edge_2[0])
                            if edge_2[1] not in Motif2merged_nodes:
                                G22 = merge_nodes(G11, edge_1[0], edge_2[1])

                        patternset.add(G2)
                        patternset.add(G22)
                        print G2.nodes()
                    i += 1


    return patternset
Example #5
0
    def __call__(self, code, charge_type):
        errors = {}
        for type in code.types:
            errors[type] = code.Syndrome(type, charge_type)

        shrunk_errs, shrunk_exts, matches = {}, {}, {}
        loops_graph = nx.Graph()
        for t1 in code.types:
            [t2, t3] = code.complementaryTypes(t1)
            shrunk_errs[t1] = nx.union(errors[t2], errors[t3])
            shrunk_exts[t1] = code.External[t2] + code.External[t3]
            alt_ext = code.External[t1][0]
            matches[t1] = DSP_Matching(shrunk_errs[t1], shrunk_exts[t1], 2, alt_ext)

            for start in matches[t1]:
                end = matches[t1][start]
                chain = DSP_Path(code.Dual[t1], start, end)
                links = len(chain) -1

                for i in range(links):
                    node1, node2 = chain[i], chain[i+1]
                    edge = (node1, node2)
                    if edge in loops_graph.edges():
                        loops_graph.remove_edge(*edge)
                    else:
                        loops_graph.add_edge(*edge)
        Exts = code.External['red']+code.External['blue']+code.External['green']

        code, loops_graph = correctLoops(code, loops_graph, charge_type)
        while hasConnectedBoundaries(code, loops_graph, Exts):
            ext1, ext2 = connectedBoundaries(loops_graph, Exts)
            code, loops_graph = makeBoundLoop(code, loops_graph, ext1, ext2)
            code, loops_graph = correctLoops(code, loops_graph, charge_type)
        return code
Example #6
0
def union_all(graphs, rename=(None,) , name=None):
    graphs_names = zip_longest(graphs,rename)
    U, gname = next(graphs_names)
    for H,hname in graphs_names:
        U = nx.union(U, H, (gname,hname),name=name)
        gname = None
    return U
Example #7
0
 def _and_gate(num, bp1, idx1, bp2, idx2):
     #
     # AND gates are constructed as follows:
     #
     # Given BP_1 and BP_2, we merge the acc node of BP_1 with the src
     # node of BP_2 and the rej node of BP_1 with the rej node of BP_2.
     #
     t1 = bp1.nlayers
     t2 = bp2.nlayers
     relabel_layers(bp2.graph, t1)
     oldlayer = bp2.graph.node[('src', idx2)]['layer']
     newnode = ('node-%d' % num, num)
     g = nx.union(bp1.graph, bp2.graph)
     g = contract(g, ('acc', idx1), ('src', idx2), newnode)
     g = contract(g, ('rej', idx1), ('rej', idx2), ('rej', num))
     g = relabel(g, num)
     g.node[newnode]['layer'] = oldlayer
     def eval(inp):
         if inp <= t1 - 1:
             return bp1.inp(inp)
         elif inp <= t1 + t2 - 1:
             return bp2.inp(inp - t1)
         else:
             raise Exception("andgate eval failed on %s!" % inp)
     return _Graph(eval, g, t1 + t2, num)
Example #8
0
File: tsp.py Project: davimba/tsp
    def crearSubGrafo(self,key,key2,key3):

        temp = nx.Graph()

        for i in range(0,6):

            if i == 0:
                temp.add_node(key+str(i)+key3,extremo=True)
                temp.add_node(key2+str(i)+key3,extremo=True)
            elif i == 5:
                temp.add_node(key+str(i)+key3,extremo=True)
                temp.add_node(key2+str(i)+key3,extremo=True)
            else:
                temp.add_node(key+str(i)+key3)
                temp.add_node(key2+str(i)+key3)

            if (i != 0):

                temp.add_edge(key + str(i-1) +key3 , key+str(i)+key3)
                temp.add_edge(key2 + str(i-1)+key3 , key2+str(i)+key3)

            if (i == 2):

                temp.add_edge(key+'2'+key3 , key2+'0'+key3)
                temp.add_edge(key+'0'+key3 , key2+'2'+key3)

            if (i == 5):

                temp.add_edge(key+'3'+key3 , key2+'5'+key3)
                temp.add_edge(key+'5' +key3, key2+'3'+key3)


        self.grafo =  nx.union(self.grafo,temp)
    def combine_graphs(self, true_class_bias=1,
                       multi_class_bias=0, multi_class_threshold=0,
                       class_graph=None, instance_graph=None):
        """Combine graphs."""
        probs = np.array([instance_graph.node[v]['prob']
                          for v in instance_graph.nodes()])

        id_offset = max(instance_graph.nodes()) + 1
        offset_pred_graph = \
            nx.relabel_nodes(class_graph, lambda x: x + id_offset)
        union_graph = nx.union(instance_graph, offset_pred_graph)

        mcb = multi_class_bias
        if mcb != 0:
            # add links from each instance to the class nodes
            # with a length inversely proportional to the prob
            for u in instance_graph.nodes():
                # find k-th largest prob value (k=multi_class_threshold)
                # and instantiate only the k most probable edges
                th = sorted(probs[u], reverse=True)[multi_class_threshold]
                for group, prob in enumerate(probs[u]):
                    if prob >= th:
                        group_id = group + id_offset
                        length = (1 - mcb) * self._prob_to_len(prob)
                        union_graph.add_edge(u, group_id, len=length)

        if true_class_bias != 0:
            # add links from each instance to its assigned class
            for u in instance_graph.nodes():
                group_id = instance_graph.node[u]['group'] + id_offset
                union_graph.add_edge(u, group_id,
                                     len=1 - true_class_bias)

        return union_graph
Example #10
0
    def setUp(self):
        # G is the example graph in Figure 1 from Batagelj and
        # Zaversnik's paper titled An O(m) Algorithm for Cores
        # Decomposition of Networks, 2003,
        # http://arXiv.org/abs/cs/0310049.  With nodes labeled as
        # shown, the 3-core is given by nodes 1-8, the 2-core by nodes
        # 9-16, the 1-core by nodes 17-20 and node 21 is in the
        # 0-core.
        t1 = nx.convert_node_labels_to_integers(nx.tetrahedral_graph(), 1)
        t2 = nx.convert_node_labels_to_integers(t1, 5)
        G = nx.union(t1, t2)
        G.add_edges_from([(3, 7), (2, 11), (11, 5), (11, 12), (5, 12),
                          (12, 19), (12, 18), (3, 9), (7, 9), (7, 10),
                          (9, 10), (9, 20), (17, 13), (13, 14), (14, 15),
                          (15, 16), (16, 13)])
        G.add_node(21)
        self.G = G

        # Create the graph H resulting from the degree sequence
        # [0, 1, 2, 2, 2, 2, 3] when using the Havel-Hakimi algorithm.

        degseq = [0, 1, 2, 2, 2, 2, 3]
        H = nx.havel_hakimi_graph(degseq)
        mapping = {6: 0, 0: 1, 4: 3, 5: 6, 3: 4, 1: 2, 2: 5}
        self.H = nx.relabel_nodes(H, mapping)
Example #11
0
    def copy_and_offset_with_mirror(self, original, offset_val, reflect=False):
        """Add a copy of the graph, offsetting all nodes by a given
        vector. For nodes with the "rung" attribute, add an edge
        between existing node and its offset copy."""
        # make an unchanged copy and an offset/mirrored copy
        orig_copy = original.copy()
        offset_copy = original.copy()
        for nodeid in offset_copy.node:
            # perform an offset
            xyz = offset_copy.node[nodeid]["xyz"]
            xyz = pt_plus_pt(xyz, offset_val)
            if reflect:
                ## also perform a mirror in the y axis
                xyz = [xyz[0], - xyz[1], xyz[2]]
            offset_copy.node[nodeid]["xyz"] = xyz

        # make a union of the original and copy, renaming nodes
        # note that this requires nx to be updated to svn 1520 or above
        # which fixes a bug where union discards node attributes
        new_graph = nx.union(orig_copy, offset_copy, rename=("G-", "H-"))
        # make edges between nodes in original and copy depending on label
        for nodeid in new_graph.node:
            if nodeid.startswith("G-"):
                h_node_id = nodeid.replace("G", "H")
                #connect nodes labelled walkway or join
                if  new_graph.node[nodeid]['label'] == 'walkway':
                    new_graph.node[h_node_id]['label'] = 'walkway'
                    new_graph.add_edge(nodeid, h_node_id, label='walkway')
                if  new_graph.node[nodeid]['label'] == 'join':
                    new_graph.node[h_node_id]['label'] = 'join'
                    new_graph.add_edge(nodeid, h_node_id, label='join')
        new_graph.frame_count = original.frame_count
        return new_graph
Example #12
0
    def combine_graphs(self, true_class_bias=1,
                       multi_class_bias=0, multi_class_threshold=0,
                       class_graph=None, instance_graph=None):
        """Combine graphs."""
        probs = np.array([instance_graph.node[v]['prob']
                          for v in instance_graph.nodes()])

        id_offset = max(instance_graph.nodes()) + 1
        offset_pred_graph = \
            nx.relabel_nodes(class_graph, lambda x: x + id_offset)
        union_graph = nx.union(instance_graph, offset_pred_graph)

        if multi_class_bias != 0:
            for u in instance_graph.nodes():
                for group, prob in enumerate(probs[u]):
                    if prob >= multi_class_threshold:
                        group_id = group + id_offset
                        weight = prob * multi_class_bias
                        union_graph.add_edge(
                            u, group_id,
                            weight=weight,
                            len=self._weigth_to_len(weight))

        if true_class_bias != 0:
            for u in instance_graph.nodes():
                group_id = instance_graph.node[u]['group'] + id_offset
                union_graph.add_edge(u, group_id,
                                     weight=true_class_bias,
                                     len=self._weigth_to_len(true_class_bias))

        return union_graph
Example #13
0
    def computeDocumentGraph(self, verbose=False):
        """Create a single document graph from the union of the graphs created
           for each sentence in the archive. Note that the algorithm in NetworkX
           is different based on whether the Python version is greater than or
           equal to 2.6"""
        # Note that this as written does not include the currentGraph in the DocumentGraph
        # Maybe this should be changed
        self.__documentGraph = ConTextMarkup()
        if verbose:
            print "Document markup has %d edges" % self.__document.number_of_edges()
        markups = [e[1] for e in self.__document.edges(data=True) if e[2].get("category") == "markup"]
        if verbose:
            print "Document markup has %d conTextMarkup objects" % len(markups)
        ic = 0
        for i in range(len(markups)):
            # for m in markups:
            m = markups[i]
            if verbose:
                print "markup %d has %d total items including %d targets" % (
                    i,
                    m.number_of_nodes(),
                    m.getNumMarkedTargets(),
                )

            self.__documentGraph = nx.union(m, self.__documentGraph)
            if verbose:
                print "documentGraph now has %d nodes" % self.__documentGraph.number_of_nodes()
def _get_best_graph_cost_pair(semantic_forest, head_key, semantic_weight):
    assert isinstance(semantic_forest, SemanticForest)
    assert isinstance(semantic_weight, SemanticWeight)
    basic_ontology = semantic_forest.basic_ontology
    obj = semantic_forest.graph_nodes[head_key]

    if isinstance(obj, GroundedToken):
        function = obj.function
    else:
        raise Exception

    graph = nx.MultiDiGraph()
    graph.add_node(head_key)
    if function.valence == 0:
        cost = get_semantic_tree_graph_cost(semantic_forest, graph, semantic_weight)
        return GraphCostPair(graph, cost)

    else:
        all_pairs = [[] for _ in range(function.valence)]
        for u, v, edge_key, data in semantic_forest.forest_graph.edges(keys=True, data=True):
            v_graph, v_cost = _get_best_graph_cost_pair(semantic_forest, v, semantic_weight)
            arg_idx = data['arg_idx']
            pair = GraphHeadKeyCostPair(v_graph, v, edge_key, v_cost)
            all_pairs[arg_idx].append(pair)

        for arg_idx, pairs in enumerate(all_pairs):
            best_pair = min(pairs, key=lambda p: _get_cost(semantic_forest, head_key, p, semantic_weight))
            graph = nx.union(graph, best_pair.graph)
            graph.add_edge(head_key, best_pair.head, arg_idx=arg_idx, key=best_pair.key)

        cost = get_semantic_tree_graph_cost(semantic_forest, graph, semantic_weight)
        return GraphCostPair(graph, cost)
Example #15
0
def draw_graph(label_flag=True, remove_isolated=True, different_size=True, iso_level=10, node_size=40):
    G=build_graph(fb.get_friends_network())
    betweenness=nx.betweenness_centrality(G)
    degree=nx.degree_centrality(G)
    degree_num=[ degree[v] for v in G]
    maxdegree=max(degree_num);mindegree=min(degree_num);
    print maxdegree,mindegree
    clustering=nx.clustering(G)
    print nx.transitivity(G)
    # Judge whether remove the isolated point from graph
    if remove_isolated is True:
        H = nx.empty_graph()
        for SG in nx.connected_component_subgraphs(G):
            if SG.number_of_nodes() > iso_level:
                H = nx.union(SG, H)
        G = H
    # Ajust graph for better presentation
    if different_size is True:
        L = nx.degree(G)
        G.dot_size = {}
        for k, v in L.items():
            G.dot_size[k] = v
        #node_size = [betweenness[v] *1000 for v in G]
        node_size = [G.dot_size[v] * 10 for v in G]
        node_color= [((degree[v]-mindegree))/(maxdegree-mindegree) for v in G]
        #edge_width = [getcommonfriends(u,v) for u,v in G.edges()]
    pos = nx.spring_layout(G, iterations=15)
    nx.draw_networkx_edges(G, pos, alpha=0.05)
    nx.draw_networkx_nodes(G, pos, node_size=node_size, node_color=node_color, vmin=0.0,vmax=1.0, alpha=0.3)
    # Judge whether shows label
    if label_flag is True:
        nx.draw_networkx_labels(G, pos, font_size=6,alpha=0.1)
    #nx.draw_graphviz(G)
    plt.show()
    return G
Example #16
0
def core_substitution(graph, orig_cip, new_cip):
    """
    graph is the whole graph..
    subgraph is the interface region in that we will transplant
    new_cip_graph which is the interface and the new core
    """

    # preprocess
    graph = _edge_to_vertex(graph)
    assert (
    set(orig_cip.graph.nodes()) - set(graph.nodes()) == set([])), 'lsgg_compose_util orig_cip_graph not in graph'

    # get isomorphism
    iso = find_all_isomorphisms(orig_cip.interface_graph, new_cip.interface_graph).next()
    if len(iso) != len(orig_cip.interface_graph):
        logger.log(5, "lsgg_compose_util grammar hash collision, discovered in 'core_substution' ")
        return None

    # make graph union (the old graph and the new cip are now floating side by side)
    graph = nx.union(graph, new_cip.graph, rename=('', '-'))

    graph.remove_nodes_from(map(str, orig_cip.core_nodes))

    # merge interface nodes
    for k, v in iso.iteritems():
        merge(graph, str(k), '-' + str(v))

    graph = eg._revert_edge_to_vertex_transform(graph)
    re = nx.convert_node_labels_to_integers(graph)
    return re
Example #17
0
def draw_graph(username, password, filename='graph.txt', label_flag=True, remove_isolated=True, different_size=True, iso_level=10, node_size=40):
    """Reading data from file and draw the graph.If not exists, create the file and re-scratch data from net"""
    print "Generating graph..."
    try:
        with open(filename, 'r') as f:
            G = p.load(f)
    except:
        G = getgraph(username, password)
        with open(filename, 'w') as f:
            p.dump(G, f)
    #nx.draw(G)
    # Judge whether remove the isolated point from graph
    if remove_isolated is True:
        H = nx.empty_graph()
        for SG in nx.connected_component_subgraphs(G):
            if SG.number_of_nodes() > iso_level:
                H = nx.union(SG, H)
        G = H
    # Ajust graph for better presentation
    if different_size is True:
        L = nx.degree(G)
        G.dot_size = {}
        for k, v in L.items():
            G.dot_size[k] = v
        node_size = [G.dot_size[v] * 10 for v in G]
    pos = nx.spring_layout(G, iterations=50)
    nx.draw_networkx_edges(G, pos, alpha=0.2)
    nx.draw_networkx_nodes(G, pos, node_size=node_size, node_color='r', alpha=0.3)
    # Judge whether shows label
    if label_flag is True:
        nx.draw_networkx_labels(G, pos, alpha=0.5)
    #nx.draw_graphviz(G)
    plt.show()

    return G
Example #18
0
 def mergeNFA(self, nfa1, nfa2):
     nfa1.graph = nx.union(nfa1.graph, nfa2.graph)
     nfa1.graph.add_edge(nfa1.first, nfa2.first, label="epsilon")
     # nfa1.graph.add_edge(nfa2.last, nfa1.last, label='epsilon')
     nfa1.lastArr[nfa2.last] = nfa2.property
     nfa1.refresh()
     return nfa1
Example #19
0
def load_graph(data_dir):
    import pickle
    aminer = pickle.load(open("D:\\Users\\chenwei\\experiment\\aminer_two_3.pickle"))
    linkedin = pickle.load(open("D:\\Users\\chenwei\\experiment\\linkedin_two_filter_3.pickle"))
    id = 0
    for n in aminer.nodes():
        n
    merge = nx.union(linkedin, aminer)
    return merge   
Example #20
0
    def __call__(self, code, charge_type):
        l,d = code.depth, code.dimension
        s = {}
        for type in code.types:
            s[type] = code.Syndrome(type, charge_type)

        uc = nx.union(s['green'], nx.union(s['red'], s['blue']))
        for edge in code.Dual['red'].edges():
            break
        scale = common.euclidean_dist(edge[0], edge[1])+.1

        i = 1

        while uc.nodes() != []:
        	clusters = GCC_Partition(uc, i*scale)
        	for cluster in clusters:
        		code, uc = GCC_Annihilate(cluster, code, uc, charge_type, i*scale)
        	i += 1
        return code
Example #21
0
 def _xor_gate(num, bp1, idx1, bp2, idx2):
     #
     # XOR gates are constructed as follows:
     #
     # Given BP_1 and BP_2 (where BP_2 is the "smaller" of the two BPs),
     # produce NOT BP_2, merge the acc node of BP_1 with the src node of
     # NOT BP_2, and merge the rej node of BP_1 with the src node of
     # BP_2.
     #
     assert num > idx1 and num > idx2
     if len(bp1.graph) < len(bp2.graph):
         bp1, bp2 = bp2, bp1
     # choose a temporary idx outside the range of possible indices
     tmpidx = len(bp1.graph) + len(bp2.graph)
     t1 = bp1.nlayers
     t2 = bp2.nlayers
     relabel_layers(bp2.graph, t1)
     oldlayer = bp2.graph.node[('src', idx2)]['layer']
     # construct (G_2, not(G_2))
     bp2not = _not_gate(tmpidx, bp2, idx2)
     # Need to relabel the internal wires as bp2not so we don't end up
     # with duplicate node names when we merge the graphs
     bp2not.graph = relabel_internal(bp2not.graph, tmpidx)
     g = nx.union(bp2.graph, bp2not.graph)
     g = contract(g, ('acc', tmpidx), ('acc', idx2), ('acc', num))
     g = contract(g, ('rej', tmpidx), ('rej', idx2), ('rej', num))
     # construct XOR(G_1, G_2)
     g = nx.union(bp1.graph, g)
     accnode = ('acc-%d' % num, num)
     rejnode = ('rej-%d' % num, num)
     g = contract(g, ('acc', idx1), ('src', tmpidx), accnode)
     g = contract(g, ('rej', idx1), ('src', idx2), rejnode)
     g = relabel(g, num)
     g.node[accnode]['layer'] = oldlayer
     g.node[rejnode]['layer'] = oldlayer
     def eval(inp):
         if inp <= t1 - 1:
             return bp1.inp(inp)
         elif inp <= t1 + t2 - 1:
             return bp2.inp(inp - t1)
         else:
             raise Exception("xorgate eval failed on %s!" % inp)
     return _Graph(eval, g, t1 + t2, num)
Example #22
0
def test_union_multigraph():
    G = nx.MultiGraph()
    G.add_edge(1, 2, key=0)
    G.add_edge(1, 2, key=1)
    H = nx.MultiGraph()
    H.add_edge(3, 4, key=0)
    H.add_edge(3, 4, key=1)
    GH = nx.union(G, H)
    assert_equal(set(GH), set(G) | set(H))
    assert_equal(set(GH.edges(keys=True)), set(G.edges(keys=True)) | set(H.edges(keys=True)))
Example #23
0
def create_topology(self, PoP, k, h):
    topology = fnss.Topology()
    for core in range(PoP):
        tmp = fnss.k_ary_tree_topology(k, h)
        for node in tmp.node:
            if tmp.node[node]['type']<>'root':
                tmp.node[node]['server']=core*(k**(h+1)-1)
        tmp_tree = nx.relabel_nodes(tmp, {node:node+core*(k**(h+1)-1) for node in tmp.node})
        topology = nx.union(topology, tmp_tree)
        # Full mesh in the core of network
        for i in range(core):
            topology.edge[i*(k**(h+1)-1)][core*(k**(h+1)-1)] = {}
            topology.edge[core*(k**(h+1)-1)][i*(k**(h+1)-1)] = {}
            
    return topology


#class Topology(fnss.Topology):
#    def __init__(self, core, k, h, cache_budget):
#        cache_size = cache_budget/float(core*(k**h-1))
#        self.topology = self._create_topology(core, k, h)
#        self.clients = {node:self.topology.node[node] for node in self.topology.node \
#                        if self.topology.node[node]['type']=='leaf'}
#        self.pops = {node:self.topology.node[node] for node in self.topology.node \
#                        if self.topology.node[node]['type']=='root'}
#        self.routers = {node:self.topology.node[node] for node in self.topology.node \
#                        if self.topology.node[node]['type'] in ['leaf','intermediate']}
##        props = open('properties', 'r').readlines()
#        
#
#        self.content_store = {node:cache(cache_size) for node in self.topology.nodes_iter()}
#        self.informations = {node:{} for node in self.topology.nodes_iter()}
#        
#        
#    def _create_topology(self, PoP, k, h):
#        topology = fnss.Topology()
#        for core in range(PoP):
#            tmp = fnss.k_ary_tree_topology(k, h)
#            for node in tmp.node:
#                if tmp.node[node]['type']<>'leaf':
#                    tmp.node[node]['server']=core*(k**(h+1)-1)
#            tmp_tree = nx.relabel_nodes(tmp, {node:node+core*(k**(h+1)-1) for node in tmp.node})
#            topology = nx.union(topology, tmp_tree)
#            # Full mesh in the core of network
#            for i in range(core):
#                topology.edge[i*(k**(h+1)-1)][core*(k**(h+1)-1)] = {}
#                topology.edge[core*(k**(h+1)-1)][i*(k**(h+1)-1)] = {}
#                
#        return topology
#
#        
    
        
    
 def test_wf_improved(self):
     G = nx.union(self.P4, nx.path_graph([4, 5, 6]))
     c = nx.closeness_centrality(G)
     cwf = nx.closeness_centrality(G, wf_improved=False)
     res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25,
            4: 0.222, 5: 0.333, 6: 0.222}
     wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5,
               4: 0.667, 5: 1.0, 6: 0.667}
     for n in G:
         assert_almost_equal(c[n], res[n], places=3)
         assert_almost_equal(cwf[n], wf_res[n], places=3)
Example #25
0
def compute_documentGraph(cd):
    """Create a single document graph from the union of the graphs created
        for each sentence in the archive. Note that the algorithm in NetworkX
        is different based on whether the Python version is greater than or
        equal to 2.6"""
    # Note that this as written does not include the currentGraph in the DocumentGraph
    documentGraph = nx.DiGraph()
    markups = [e[1] for e in cd.edges(data=True) if e[2].get('category') == 'markup']
    for i in range(len(markups)):
        m = markups[i]
        documentGraph = nx.union(m,documentGraph)

    return documentGraph
Example #26
0
    def copy_and_rotate_around_xy_plane(self, original, tangent):
        #tangent = int(360 / sectors)
        orig_copy = original.copy()
        offset_copy = original.copy()

        for nodeid in offset_copy.node:
            xyz = offset_copy.node[nodeid]["xyz"]
            xyz = xy_rotate(xyz, tangent)
            xyz = [int(round(xyz[0], 0)), int(round(xyz[1], 0)),
                   int(round(xyz[2], 0))]
            offset_copy.node[nodeid]["xyz"] = xyz
        new_graph = nx.union(orig_copy, offset_copy, rename=("G-", "H-"))
        new_graph.frame_count = original.frame_count
        return new_graph
Example #27
0
 def build_fault_tree_for_app(self):
     recordCloudList = []
     nonOverlapping = 1
     for cloudItem in self.cloudList:    
         if self.faultTree.number_of_nodes() == 0:
             self.faultTree = cloudItem.topology
         else:
             for i in range(len(cloudItem.dataCenterID)):
                 if cloudItem.dataCenterID[i] in recordCloudList:
                     nonOverlapping = 0
                     break                
             if nonOverlapping == 1:
                 self.faultTree \
                         = nx.union(self.faultTree, cloudItem.topology)
         for i in range(len(cloudItem.dataCenterID)):
             recordCloudList.append(cloudItem.dataCenterID[i])
     
     for appItem in self.appList:
         self.faultTree = nx.union(self.faultTree, appItem.topology)
         for dcItem in self.faultTree.node:
             if self.faultTree.node[dcItem].keys()[0] == 'DATACENTER'\
                     and dcItem in appItem.dcList:
                 self.faultTree.add_edge(dcItem, appItem.jobID)
Example #28
0
def core_substitution(graph, orig_cip_graph, new_cip_graph):
    """
    graph is the whole graph..
    subgraph is the interfaceregrion in that we will transplant
    new_cip_graph which is the interface and the new core
    """
    assert( set(orig_cip_graph.nodes()) - set(graph.nodes()) == set([]) ), 'orig_cip_graph not in graph'

    # select only the interfaces of the cips
    new_graph_interface_nodes = [n for n, d in new_cip_graph.nodes(data=True) if 'core' not in d]
    new_cip_interface_graph = nx.subgraph(new_cip_graph, new_graph_interface_nodes)

    original_graph_interface_nodes = [n for n, d in orig_cip_graph.nodes(data=True) if 'core' not in d]
    original_interface_graph = nx.subgraph(orig_cip_graph, original_graph_interface_nodes)
    # get isomorphism between interfaces, if none is found we return an empty graph

    iso = get_good_isomorphism(graph,
                               orig_cip_graph,
                               new_cip_graph,
                               original_interface_graph,
                               new_cip_interface_graph)

    if len(iso) != len(original_interface_graph):
        # print iso
        # draw.display(orig_cip_graph)
        # draw.display(new_cip_graph)
        #draw.graphlearn([orig_cip_graph, new_cip_graph],size=10)
        logger.log(5,"grammar hash collision, discovered in 'core_substution' ")
        return nx.Graph()

    # ok we got an isomorphism so lets do the merging
    graph = nx.union(graph, new_cip_graph, rename=('', '-'))

    # removing old core
    # original_graph_core_nodes = [n for n, d in orig_cip_graph.nodes(data=True) if 'core' in d]
    original_graph_core_nodes = [n for n, d in orig_cip_graph.nodes(data=True) if 'core' in d]

    for n in original_graph_core_nodes:
        graph.remove_node(str(n))

    # merge interfaces
    for k, v in iso.iteritems():
        graph.node[str(k)][
            'interface'] = True  # i am marking the interface only for the backflow probability calculation in graphlearn, this is probably deleteable because we also do this in merge, also this line is superlong Ooo
        merge(graph, str(k), '-' + str(v))
    # unionizing killed my labels so we need to relabel


    return nx.convert_node_labels_to_integers(graph)
Example #29
0
 def test_disconnected_graph_root_node(self):
     """Test for a single component of a disconnected graph."""
     G = nx.barbell_graph(3, 0)
     H = nx.barbell_graph(3, 0)
     mapping = dict(zip(range(6), 'abcdef'))
     nx.relabel_nodes(H, mapping, copy=False)
     G = nx.union(G, H)
     chains = list(nx.chain_decomposition(G, root='a'))
     expected = [
         [('a', 'b'), ('b', 'c'), ('c', 'a')],
         [('d', 'e'), ('e', 'f'), ('f', 'd')],
     ]
     self.assertEqual(len(chains), len(expected))
     for chain in chains:
         self.assertContainsChain(chain, expected)
Example #30
0
    def _create_topology(self, PoP, k, h):
        topology = fnss.Topology()
        for core in range(PoP):
            tmp = fnss.k_ary_tree_topology(k, h)
            for node in tmp.node:
                if tmp.node[node]['type'] <> 'root':
                    tmp.node[node]['server'] = core * (k ** (h + 1) - 1)
            tmp_tree = nx.relabel_nodes(tmp, {node: node + core * (k ** (h + 1) - 1) for node in tmp.node})
            topology = nx.union(topology, tmp_tree)
            # Full mesh in the core of network
            for i in range(core):
                topology.edge[i * (k ** (h + 1) - 1)][core * (k ** (h + 1) - 1)] = {}
                topology.edge[core * (k ** (h + 1) - 1)][i * (k ** (h + 1) - 1)] = {}

        return topology
Example #31
0
 def test_disconnected_graph(self):
     """Test for a graph with multiple connected components."""
     G = nx.barbell_graph(3, 0)
     H = nx.barbell_graph(3, 0)
     mapping = dict(zip(range(6), 'abcdef'))
     nx.relabel_nodes(H, mapping, copy=False)
     G = nx.union(G, H)
     chains = list(nx.chain_decomposition(G))
     expected = [
         [(0, 1), (1, 2), (2, 0)],
         [(3, 4), (4, 5), (5, 3)],
         [('a', 'b'), ('b', 'c'), ('c', 'a')],
         [('d', 'e'), ('e', 'f'), ('f', 'd')],
     ]
     self.assertEqual(len(chains), len(expected))
     for chain in chains:
         self.assertContainsChain(chain, expected)
 def test_wf_improved(self):
     G = nx.union(self.P4, nx.path_graph([4, 5, 6]))
     c = nx.closeness_centrality(G)
     cwf = nx.closeness_centrality(G, wf_improved=False)
     res = {
         0: 0.25,
         1: 0.375,
         2: 0.375,
         3: 0.25,
         4: 0.222,
         5: 0.333,
         6: 0.222
     }
     wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, 4: 0.667, 5: 1.0, 6: 0.667}
     for n in G:
         assert_almost_equal(c[n], res[n], places=3)
         assert_almost_equal(cwf[n], wf_res[n], places=3)
 def test_disjoint_clique(self):
     """"
     A group of num_clique of size size_clique disjoint, should maximize
     the modularity and have a modularity of 1 - 1/ num_clique
     """
     for _ in range(self.number_of_tests):
         size_clique = random.randint(5, 20)
         num_clique = random.randint(5, 20)
         graph = nx.Graph()
         for i in range(num_clique):
             clique_i = nx.complete_graph(size_clique)
             graph = nx.union(graph, clique_i, rename=("", str(i) + "_"))
         part = dict([])
         for node in graph:
             part[node] = node.split("_")[0].strip()
         mod = co.modularity(part, graph)
         self.assertAlmostEqual(mod, 1. - 1. / float(num_clique))
Example #34
0
def generateFatTree(k):
    ''' A fat tree is composed by k PODs. Each POD is connected to all the core switches by
        means of the aggregate-switches: each one of those is connected to k/2 core switches '''
        
    fatTree = nx.Graph()
    cores = map(lambda z: ("c"+str(z),{"type":"core"}),range(0,k^2/4))
    fatTree.add_nodes_from(cores)
    pods = [generatePod(k, z) for z in range(0,k)]
    for pod in pods:
        fatTree = nx.union(fatTree, pod)
        for node in pod.nodes(data=True):
            if node[1]["type"] == "aggregate1":
                fatTree.add_edges_from([(node[0],core[0]) for core in cores[0:k/2]])
            elif node[1]["type"] == "aggregate2":
                fatTree.add_edges_from([(node[0],core[0]) for core in cores[k/2:k]])
                
    return fatTree
Example #35
0
    def disabling(disabling_term, right_operand, gt_edges=None):

        if disabling_term is None or right_operand is None:
            return None

        disabling_term.graph = networkx.union(disabling_term.graph,
                                              right_operand.graph)

        l_end = HiddenMarkovModelTopology.__find_end_states(disabling_term)
        r_start = HiddenMarkovModelTopology.__find_start_states(right_operand)
        r_end = HiddenMarkovModelTopology.__find_end_states(right_operand)

        seq_edges = []
        if gt_edges is not None:
            seq_edges[:] = gt_edges[:]

            # add a transition to the disabling term from all the ground terms in the left operand
            for edge in gt_edges:
                print('edge ({}, {})'.format(edge[0].name, edge[1].name))
                print(disabling_term.graph[edge[0]][edge[1]])
                prob = numpy.exp(
                    disabling_term.graph[edge[0]][edge[1]]['probability'])
                #print('edge ({} {}) prob: {}, updated: {}'.format(edge[0].name, edge[1].name, prob, prob / (len(r_start) + 1)))
                prob = prob / (len(r_start) + 1)
                disabling_term.graph[edge[0]][
                    edge[1]]['probability'] = numpy.log(prob)
                for i in range(0, len(r_start)):
                    disabling_term.add_transition(edge[0], r_start[i], prob)
                    seq_edges.append((edge[0], r_start[i]))

        # add a transition to the disabling term for all terms that end the left operand
        for i in range(0, len(l_end)):
            prob = numpy.exp(disabling_term.graph[l_end[i]][disabling_term.end]
                             ['probability']) / len(r_start)
            for j in range(0, len(r_start)):
                disabling_term.add_transition(l_end[i], r_start[j], prob)
                seq_edges.append((l_end[i], r_start[i]))
            disabling_term.graph.remove_edge(l_end[i], disabling_term.end)
        # add a transition from all final states in the right operand to the end state
        for i in range(0, len(r_end)):
            prob = numpy.exp(disabling_term.graph[r_end[i]][right_operand.end]
                             ['probability'])
            disabling_term.add_transition(r_end[i], disabling_term.end, prob)
        disabling_term.bake()
        return disabling_term, seq_edges
Example #36
0
    def merge_all_subgraphs(self):
        """Generates a single networkx graph object from the subgraphs that have
        been processed

        Returns
        -------
        finalGraph : NetworkX graph obj
            the final graph produced merging all the subgraphs. The produced
            graph may have disconneted parts

        """

        finalGraph = nx.Graph()

        for subgraph in self.workingSubgraphsList:
            finalGraph = nx.union(finalGraph, subgraph)

        return finalGraph
Example #37
0
    def _read_graph(self):
        """
        Reads or initializes a graph.

        :return:
        """
        args = self._args
        if self.train_graph is None or self.test_graph is None:
            helper.log(f'Reading graph from {args.input}')
            self._reader = nx.read_adjlist if args.fmt == 'adjlist' else nx.read_edgelist
            self._creator = nx.DiGraph if args.directed else nx.Graph
            self.graph = self._reader(path=args.input,
                                      create_using=self._creator,
                                      nodetype=int)
            self.num_nodes = self.graph.number_of_nodes()
            self.num_edges = self.graph.number_of_edges()
        else:
            self.graph = nx.union(self.train_graph, self.test_graph)
Example #38
0
def musketeer_on_subgraphs(original, params=None):
    components = nx.connected_component_subgraphs(original)
    merged_G = nx.Graph()

    component_is_edited = params.get('component_is_edited',
                                     [True] * len(components))

    for G_num, G in enumerate(components):
        if component_is_edited[G_num]:
            replica = generate_graph(original=G, params=params)
        else:
            replica = G

        merged_G = nx.union(merged_G, replica)

    merged_G.name = getattr(original, 'name',
                            'graph') + '_replica_' + timeNow()

    return merged_G
Example #39
0
    def computeDocumentGraph(self):
        """Create a single document graph from the union of the graphs created
           for each sentence in the archive. Note that the algorithm in NetworkX 
           is different based on whether the Python version is greater than or
           equal to 2.6"""
        self.__documentGraph = nx.DiGraph()
        for key in self.__archive.keys():
            if (platform.python_version() <= '2.6'):
                g = self.__archive[key]["graph"]
                self.__documentGraph = nx.union(g, self.__documentGraph)
                # this should work but doesn't preseve the node data attributes
            else:
                nds = g.nodes(
                    data=True)  # for python < 2.6 need to use code below
                for n in nds:
                    self.__documentGraph.add_node(n[0],
                                                  category=n[1]['category'])

                self.__documentGraph.add_edges_from(g.edges())
Example #40
0
    def _add_application_seperate_method(self, app, tc) -> DiGraph:
        # Seperate method: All applications are seperate

        appDAG = DiGraph()
        # Add all tasks as nodes to the DAG
        for t in app.verticies.values():
            tgn_id = t.id
            tgn = TaskGraphNode_Task(tgn_id, t)
            appDAG.add_node(tgn_id)
            self.nodes[tgn_id] = tgn

        # For each stream
        for s_id in app.edges.keys():
            s = tc.F[s_id]

            if s.is_self_stream():
                # Simply add edges for self-streams
                appDAG.add_edge(s.sender_task_id, list(s.receiver_task_ids)[0])
            else:
                t_sender_id = s.sender_task_id
                tgn_sender = self.nodes[t_sender_id]

                # Add a node for the stream
                tgn_id = f"{s_id}"
                tgn = TaskGraphNode_Stream(tgn_id, s)
                appDAG.add_node(tgn_id)
                self.nodes[tgn_id] = tgn

                # Connect it to sender task
                appDAG.add_edge(tgn_sender.id, tgn.id)

                # Connect stream node with receiver task nodes
                for reciever_t_id in s.receiver_task_ids:
                    tgn_receiver = self.nodes[reciever_t_id]
                    appDAG.add_edge(tgn.id, tgn_receiver.id)

        self.DAG = nx.union(self.DAG, appDAG)
        if app.id in tc.A_sec:
            self.keyDAGs.append((appDAG, app.id))
        elif app.id in tc.A_app:
            self.normalDAGs.append((appDAG, app.id))
        else:
            raise ValueError
Example #41
0
    def setup_class(cls):
        # G is the example graph in Figure 1 from Batagelj and
        # Zaversnik's paper titled An O(m) Algorithm for Cores
        # Decomposition of Networks, 2003,
        # http://arXiv.org/abs/cs/0310049.  With nodes labeled as
        # shown, the 3-core is given by nodes 1-8, the 2-core by nodes
        # 9-16, the 1-core by nodes 17-20 and node 21 is in the
        # 0-core.
        t1 = nx.convert_node_labels_to_integers(nx.tetrahedral_graph(), 1)
        t2 = nx.convert_node_labels_to_integers(t1, 5)
        G = nx.union(t1, t2)
        G.add_edges_from(
            [
                (3, 7),
                (2, 11),
                (11, 5),
                (11, 12),
                (5, 12),
                (12, 19),
                (12, 18),
                (3, 9),
                (7, 9),
                (7, 10),
                (9, 10),
                (9, 20),
                (17, 13),
                (13, 14),
                (14, 15),
                (15, 16),
                (16, 13),
            ]
        )
        G.add_node(21)
        cls.G = G

        # Create the graph H resulting from the degree sequence
        # [0, 1, 2, 2, 2, 2, 3] when using the Havel-Hakimi algorithm.

        degseq = [0, 1, 2, 2, 2, 2, 3]
        H = nx.havel_hakimi_graph(degseq)
        mapping = {6: 0, 0: 1, 4: 3, 5: 6, 3: 4, 1: 2, 2: 5}
        cls.H = nx.relabel_nodes(H, mapping)
Example #42
0
def decompose_all_sccs(g, 
	score_function=score_subgraph,
	min_loop_size=2,
	max_loop_size=12,
	modes=("pos", "neg"),
	undirected=False):
	'''
	run decomposition on each SCC in g
	'''
	h = nx.DiGraph()
	roots = []

	if undirected:
		component_iter = nx.connected_components(g.to_undirected())
	else:
		component_iter = nx.strongly_connected_components(g)

	for cc in component_iter:
		print ("processing CC", cc)
		cc = g.subgraph(cc)
		cc_tree = bottom_up_partition(cc, 
			score_function=score_function,
			min_loop_size=min_loop_size,
			max_loop_size=max_loop_size,
			modes=modes,
			undirected=undirected)
		
		degrees = dict(cc_tree.out_degree())
		root = min(degrees, key=degrees.get)
		roots.append(root)
		h = nx.union(h, cc_tree)
		print ()

	print (roots)

	if len(roots) > 1:
		# add final root to represent whole network
		all_nodes = frozenset(roots)
		for root in roots:
			h.add_edge(root, all_nodes)

	return h
Example #43
0
def union_all(graphs, rename=(None, ), name=None):
    """Return the union of all graphs.

    The graphs must be disjoint, otherwise an exception is raised.

    Parameters
    ----------
    graphs : list of graphs
       List of NetworkX graphs

    rename : bool , default=(None, None)
       Node names of G and H can be changed by specifying the tuple
       rename=('G-','H-') (for example).  Node "u" in G is then renamed
       "G-u" and "v" in H is renamed "H-v".

    name : string
       Specify the name for the union graph@not_implemnted_for('direct

    Returns
    -------
    U : a graph with the same type as the first graph in list

    Notes
    -----
    To force a disjoint union with node relabeling, use
    disjoint_union_all(G,H) or convert_node_labels_to integers().

    Graph, edge, and node attributes are propagated to the union graph.
    If a graph attribute is present in multiple graphs, then the value
    from the last graph in the list with that attribute is used.

    See Also
    --------
    union
    disjoint_union_all
    """
    graphs_names = zip_longest(graphs, rename)
    U, gname = next(graphs_names)
    for H, hname in graphs_names:
        U = nx.union(U, H, (gname, hname), name=name)
        gname = None
    return U
Example #44
0
    def copy_and_offset_with_mirror(self, offset, mirror=False):
        # make an unchanged copy and an offset/mirrored copy
        orig_copy = self.copy()
        offset_copy = self.copy()
        for nodeid in offset_copy.node:
            # perform an offset
            xyz = offset_copy.node[nodeid]["xyz"]
            xyz = pt_plus_pt(xyz, offset)
            if mirror:
                ## also perform a mirror in the y axis
                xyz = [xyz[0], -xyz[1], xyz[2]]
            offset_copy.node[nodeid]["xyz"] = xyz

        # make a union of the original and copy, renaming nodes
        # note that this requires nx to be updated to svn 1520 or above
        # which fixes a bug where union discards node attributes
        new_graph = nx.union(orig_copy, offset_copy, rename=("G-", "H-"))

            # make edges between corresponding nodes in original and copy where needed
        for nodeid in new_graph.node:
            if nodeid.startswith("G-"):
                # print "looking at", nodeid
                if "rung" in new_graph.node[nodeid]:
                    h_node_id = nodeid.replace("G", "H")
                    h_node = new_graph.node[h_node_id]
                    new_graph.add_edge(nodeid, h_node_id)
                    # print("making edge between " + 
                    #       str(new_graph.node[nodeid]["xyz"]) + 
                    #       " : " + 
                    #       str(new_graph.node[h_node_id]["xyz"]))


        # rename nodes back to integers: FIXME doesn't work because bug in nx
        # strips node attributes.
        # new_graph = nx.convert_node_labels_to_integers(new_graph)

        # clear self and add edges from new graph
        self.clear()
        for edge in new_graph.edges_iter():
            self.add_node(edge[0], xyz=new_graph.node[edge[0]]["xyz"])
            self.add_node(edge[1], xyz=new_graph.node[edge[1]]["xyz"])
            self.add_edge(edge[0], edge[1])
Example #45
0
def gen_hierarchical_net(n, level):
    """Generate hiearchical graph using method proposed of:
    Ravasz E, Barabasi AL, Hierarchical organization in complex networks, PRE, 2003.

    Parameters
    ----------
    n : int
        Number of nodes in the lowest level.
    level : int
        Number of hierarchical levels to create

    Returns
    -------
    networkx graph
        The hierchically-structured network

    """

    if level == 0:
        return nx.complete_graph(n)
    else:
        fullG = nx.Graph()

        #get lower-order graphs
        for i in range(n):
            fullG = nx.union(gen_hierarchical_net(n, level - 1),
                             fullG,
                             rename=(str(i) + '.', ''))

        edges = []
        suffix = ''
        for l in range(level - 1):
            suffix += '.0'

        #connect outer nodes to the center
        center = '0.0' + suffix
        for node in fullG.nodes():
            if not '0' in node:
                edges.append((node, center))

        fullG.add_edges_from(edges)
        return fullG
Example #46
0
def test_union_attributes():
    g = nx.Graph()
    g.add_node(0, x=4)
    g.add_node(1, x=5)
    g.add_edge(0, 1, size=5)
    g.graph["name"] = "g"

    h = g.copy()
    h.graph["name"] = "h"
    h.graph["attr"] = "attr"
    h.nodes[0]["x"] = 7

    gh = nx.union(g, h, rename=("g", "h"))
    assert set(gh.nodes()) == {"h0", "h1", "g0", "g1"}
    for n in gh:
        graph, node = n
        assert gh.nodes[n] == eval(graph).nodes[int(node)]

    assert gh.graph["attr"] == "attr"
    assert gh.graph["name"] == "h"  # h graph attributes take precendent
    def union(self, other, rename=False):
        '''
        Union/add two topologies together to form a larger topology.

        If rename is False, the method assumes that node names 
        don't clash (i.e., you've called addNodeLabelPrefix or 
        you've explicitly chosen names to avoid clashes).  
        If rename is True, nodes/links are relabeled such that the
        new "prefix" for each node is the graph name (i.e., for graph
        name A, node h1 is renamed A_h1).
        
        This method returns a new Topology object and does not modify
        either topology used for unioning.
        '''
        if rename:
            self.nxgraph = Topology.__relabel_graph(self.__nxgraph, self.name)
            other.nxgraph = Topology.__relabel_graph(other.__nxgraph, other.name)
        nxgraph = nx.union(self.nxgraph, other.nxgraph, name="{}_{}".format(self.name, other.name))
        newtopo = Topology(nxgraph=nxgraph, name="{}_{}".format(self.name, other.name))
        return newtopo
 def test_ring_clique(self) :
     """"
     then, a group of num_clique of size size_clique connected with only two links to other in a ring
     have a modularity of 1 - 1/ num_clique - num_clique / num_links
     """
     for num_test in range(self.numtest) :
         size_clique = random.randint(5, 20)
         num_clique = random.randint(5, 20)
         g = nx.Graph()
         for i in range(num_clique) :
             clique_i = nx.complete_graph(size_clique)
             g = nx.union(g, clique_i, rename=("",str(i)+"_"))
             if i > 0 :
                 g.add_edge(str(i)+"_0", str(i-1)+"_1")
         g.add_edge("0_0", str(num_clique-1)+"_1")
         part = dict([])
         for node in g :
             part[node] = node.split("_")[0].strip()
         mod = co.modularity(part, g)
         self.assertAlmostEqual(mod, 1. - 1./float(num_clique) - float(num_clique) / float(g.number_of_edges()), msg = "Num clique: " + str(num_clique) + " size_clique: " + str(size_clique) )
Example #49
0
def merge_graph(g1, g2, merge_type):  # 聚合图
    """
    :param g1: 第一个图对象
    :param g2:  第二个图对象
    :param merge_type:  聚合方式("subgraph","union","dis_union", "cartesian", "compose")
    :return:
    """
    if merge_type == "subgraph":
        new_g = nx.subgraph(g1, g2)  # g2为 list,
    elif merge_type == "union":  # 不相交的拼接
        new_g = nx.union(g1, g2)
    elif merge_type == "dis_union":  # 所有节点都不同的不相交拼接
        new_g = nx.disjoint_union(g1, g2)
    elif merge_type == "cartesian":  # 笛卡尔乘积图
        new_g = nx.cartesian_product(g1, g2)
    elif merge_type == "compose":
        new_g = nx.compose(g1, g2)  # 与g1 一样新的图
    else:
        raise ValueError("error merge_type")
    return new_g
Example #50
0
def test_union_attributes():
    g = nx.Graph()
    g.add_node(0, x=4)
    g.add_node(1, x=5)
    g.add_edge(0, 1, size=5)
    g.graph['name'] = 'g'

    h = g.copy()
    h.graph['name'] = 'h'
    h.graph['attr'] = 'attr'
    h.nodes[0]['x'] = 7

    gh = nx.union(g, h, rename=('g', 'h'))
    assert_equal(set(gh.nodes()), set(['h0', 'h1', 'g0', 'g1']))
    for n in gh:
        graph, node = n
        assert_equal(gh.nodes[n], eval(graph).nodes[int(node)])

    assert_equal(gh.graph['attr'], 'attr')
    assert_equal(gh.graph['name'], 'h')  # h graph attributes take precendent
Example #51
0
    def test_ring(self):
        """
        Test that community found are good using a ring of cliques
        """
        for num_test in range(self.numtest):
            size_clique = random.randint(5, 20)
            num_clique = random.randint(5, 20)
            g = nx.Graph()
            for i in range(num_clique):
                clique_i = nx.complete_graph(size_clique)
                g = nx.union(g, clique_i, rename=("", str(i) + "_"))
                if i > 0:
                    g.add_edge(str(i) + "_0", str(i - 1) + "_1")
            g.add_edge("0_0", str(num_clique - 1) + "_1")
            part = co.best_partition(g)

            for clique in range(num_clique):
                p = part[str(clique) + "_0"]
                for node in range(size_clique):
                    self.assertEqual(p, part[str(clique) + "_" + str(node)])
Example #52
0
def _get_best_graph_cost_pair(semantic_forest, head_key, semantic_weight):
    assert isinstance(semantic_forest, SemanticForest)
    assert isinstance(semantic_weight, SemanticWeight)
    basic_ontology = semantic_forest.basic_ontology
    obj = semantic_forest.graph_nodes[head_key]

    if isinstance(obj, GroundedToken):
        function = obj.function
    else:
        raise Exception

    graph = nx.MultiDiGraph()
    graph.add_node(head_key)
    if function.valence == 0:
        cost = get_semantic_tree_graph_cost(semantic_forest, graph,
                                            semantic_weight)
        return GraphCostPair(graph, cost)

    else:
        all_pairs = [[] for _ in range(function.valence)]
        for u, v, edge_key, data in semantic_forest.forest_graph.edges(
                keys=True, data=True):
            v_graph, v_cost = _get_best_graph_cost_pair(
                semantic_forest, v, semantic_weight)
            arg_idx = data['arg_idx']
            pair = GraphHeadKeyCostPair(v_graph, v, edge_key, v_cost)
            all_pairs[arg_idx].append(pair)

        for arg_idx, pairs in enumerate(all_pairs):
            best_pair = min(pairs,
                            key=lambda p: _get_cost(semantic_forest, head_key,
                                                    p, semantic_weight))
            graph = nx.union(graph, best_pair.graph)
            graph.add_edge(head_key,
                           best_pair.head,
                           arg_idx=arg_idx,
                           key=best_pair.key)

        cost = get_semantic_tree_graph_cost(semantic_forest, graph,
                                            semantic_weight)
        return GraphCostPair(graph, cost)
Example #53
0
def two_reg_edge_rewiring(n1, k1, n2, k2, rat):
    G1 = nx.random_regular_graph(k1, n1)
    G2 = nx.random_regular_graph(k2, n2)

    H = nx.union(G1, G2, rename=['G1-', 'G2-'])

    mix_mat = nx.degree_mixing_matrix(H)
    mix_mat[k1, k2] = rat
    mix_mat[k2, k1] = mix_mat[k1, k2]
    mix_mat[k1, k1] -= rat
    mix_mat[k2, k2] -= rat
    print mix_mat

    for i in range((n1 * k1 + n2 * k2)):
        selected_H1_edge = H.edges()[rd.randint(0,
                                                (n1 * k1 + n2 * k2) / 2 - 1)]
        H1_edge_ele_1 = selected_H1_edge[0]
        H1_edge_ele_2 = selected_H1_edge[1]

        while 1:
            selected_H2_edge = H.edges()[rd.randint(
                0, (n1 * k1 + n2 * k2) / 2 - 1)]
            H2_edge_ele_1 = selected_H2_edge[0]
            H2_edge_ele_2 = selected_H2_edge[1]
            if H1_edge_ele_1 != H2_edge_ele_1 and H1_edge_ele_2 != H2_edge_ele_2 and H1_edge_ele_1 != H2_edge_ele_2 and H1_edge_ele_2 != H2_edge_ele_1 and H1_edge_ele_1 not in H[
                    H2_edge_ele_1] and H1_edge_ele_2 not in H[H2_edge_ele_2]:
                break

        if (mix_mat[len(H[H1_edge_ele_1]),
                    len(H[H2_edge_ele_1])] * mix_mat[len(H[H1_edge_ele_2]),
                                                     len(H[H2_edge_ele_2])]
            ) / (mix_mat[len(H[H1_edge_ele_1]),
                         len(H[H1_edge_ele_2])] *
                 mix_mat[len(H[H2_edge_ele_1]),
                         len(H[H2_edge_ele_2])]) > rd.random():
            H.add_edge(H1_edge_ele_1, H2_edge_ele_1)
            H.add_edge(H1_edge_ele_2, H2_edge_ele_2)
            H.remove_edge(H1_edge_ele_1, H1_edge_ele_2)
            H.remove_edge(H2_edge_ele_1, H2_edge_ele_2)

    return H
def merge_graphs(g, h, attributes=list(), merge_parents=False):
    """

    :param g: a networkx graph
    :param h: a networkx graph (preferably the smaller graph being added to g)
    :param attributes: A list of attributes to match on.  If present, all other attributes will be Don't Care's
    :param merge_parents: If True, nodes that have children will still be merged.  Default is 'False'
    :return: returns the union of the graphs per the CAGS schema
    """
    # TODO: ADD the ability to pass in a set of edges which will also be created manually. (link graph roots, etc)
    #       Alternately, users could be expected to create edges by retrieving nodes by attribute

    # create  a union graph
    G = nx.union(g, h, rename=('g-', 'h-'))

    # Look through graph for duplicates
    for n1 in h.nodes(data=True):
        # The None:None node is likely to be a root node in a nested dictionary so don't merge it
        if not ("" in n1[1] and n1[1][""] == "") and \
                (merge_parents == True or len(g.successors(n1[0])) == 0):  # Was 'None' instead of "" but that doesn't work in graphs
            for n2 in g.nodes(data=True):
                # Match nodes
                match = True
                if len(attributes) == 0:
                    if n1[1] != n2[1]:
                        match = False
                else:
                    # for each attribute to match...
                    for attribute in attributes:
                        # check if node 1 has the attribute
                        if attribute in n1[1]:
                            # if it does, match is false if n2 doesn't have the attribute or has a different value
                            if attribute not in n2[
                                    1] or n1[1][attribute] != n2[1][attribute]:
                                match = False
                if match:
                    # if there is a duplicate based on attributes, merge the nodes
                    G = nx.relabel_nodes(G, {"h-" + n1[0]: "g-" + n2[0]})
                    print "matched, {0} and {1}".format(n1[0], n2[0])

    return G
Example #55
0
    def test_ring(self):
        """
        Test that community found are good using a ring of cliques
        """
        for _ in range(self.number_of_tests):
            size_clique = random.randint(5, 20)
            num_clique = random.randint(5, 20)
            graph = nx.Graph()
            for i in range(num_clique):
                clique_i = nx.complete_graph(size_clique)
                graph = nx.union(graph, clique_i, rename=("", str(i) + "_"))
                if i > 0:
                    graph.add_edge(str(i) + "_0", str(i - 1) + "_1")
            graph.add_edge("0_0", str(num_clique - 1) + "_1")
            part = co.best_partition(graph)

            for clique in range(num_clique):
                part_name = part[str(clique) + "_0"]
                for node in range(size_clique):
                    expected = part[str(clique) + "_" + str(node)]
                    self.assertEqual(part_name, expected)
def FourClusterGraph(n1, n2, n3, n4):
    G = ThreeClusterGraph(n1, n2, n3)
    H = nx.complete_graph(n4)
    mapping = {}
    for i in range(n4):
        mapping[i] = i + n1 + n2 + n3
    H = nx.relabel_nodes(H, mapping=mapping)

    I = nx.union(G, H)
    I.add_edge(n1 + n2 + n3 - 1, n1 + n2 + n3)
    I.weighted = False
    #set weight to 1
    for e in I.edges_iter():
        I.add_edge(e[0], e[1], weight=1)

    print(I.number_of_edges())
    print(I.number_of_nodes())

    print(I.edges())
    #Draw(I);
    return I
Example #57
0
    def __generate_cliques(self, min_size, max_size, k):

        cliques = {}

        for i in range(k):

            cliques[i] = []

            clique = self.__generate_clique(random.randint(min_size, max_size))

            for node in nx.nodes(clique):
                clique.nodes[node]['clique'] = i

            self.__G = nx.union(self.__G, clique, rename=('A-', 'B-'))

        self.__G = nx.convert_node_labels_to_integers(self.__G)

        for node in nx.nodes(self.__G):
            cliques[self.__G.nodes[node]['clique']].append(node)

        return cliques
Example #58
0
def generate_nbad_either(num_nodes: int):
    n = int(np.round((np.sqrt(4 * num_nodes + 1) - 3) / 2))

    nnodes_U = n**2 - n + 3
    U = generate_nbad_unioning_fast(nnodes_U)

    nnodes_M = 4 * (n - 1) + 5
    M = generate_nbad_neighborbl(nnodes_M)

    M.remove_node('t')
    U.remove_node('s')

    G = nx.union(M, U, rename=('M-', 'U-'))

    #merge the appropriate nodes
    for i in range(n):
        #connect 'M-t_{i}' with 'U-({i},0)'
        G.add_edge('M-t_{}'.format(i), 'U-({}, 0)'.format(i))

    G = nx.relabel_nodes(G, {'M-s': 's', 'U-t': 't'})
    return G
Example #59
0
def compute_node_equivalence(g1, g2):
    helper_graph = nx.union(g1, g2, rename=(
        'g1-',
        'g2-'))  #nodes start with `rename` prefix. Used to check for cycles
    equivalence_g1 = {}  # maps nodes from g1 to g2
    equivalence_g2 = {}  # maps nodes from g2 to g1
    len_g1 = len(g1.nodes)
    len_g2 = len(g2.nodes)
    similarity_matrix = compute_node_similarity_matrix(g1, g2)
    try:
        similarity_matrix = similarity_flooding(similarity_matrix,
                                                g1,
                                                g2,
                                                alpha=0.1,
                                                n_iter=50)
    except Exception:
        print("Similarity flooding failed. PCG graph has no nodes or edges.")
    edit_cost_matrix = compute_edit_cost_matrix(similarity_matrix, 0.4, 0.4)
    rows, cols = linear_sum_assignment(edit_cost_matrix)
    nodes_g1 = list(g1.nodes)
    nodes_g2 = list(g2.nodes)
    for pairs in zip(rows, cols):
        if pairs[0] < len_g1 and pairs[1] < len_g2:
            # nodes are equivalent
            eq_g1 = nodes_g1[pairs[0]]
            eq_g2 = nodes_g2[pairs[1]]
            merged_helper = nx.algorithms.minors.contracted_nodes(
                helper_graph, 'g1-' + eq_g1, 'g2-' + eq_g2, self_loops=False)
            try:
                if 'g1-inputs.0' in merged_helper:
                    cycles = nx.algorithms.cycles.find_cycle(
                        merged_helper, source='g1-inputs.0')
                if 'g2-inputs.0' in merged_helper:
                    cycles = nx.algorithms.cycles.find_cycle(
                        merged_helper, source='g2-inputs.0')
            except nx.NetworkXNoCycle as n:
                equivalence_g2[eq_g2] = eq_g1
                equivalence_g1[eq_g1] = eq_g2
                helper_graph = merged_helper
    return equivalence_g1, equivalence_g2
Example #60
0
def kcore_decompose_graph(graph=None, max_deg=None):
    g = graph.copy()
    high_degree_nodes = []
    low_degree_nodes = []
    m = min([e[1] for e in g.degree()])
    t = max(max_deg, m)

    for n in g.nodes():
        if len(list(g.neighbors(n))) > t:
            high_degree_nodes.append(n)
        else:
            low_degree_nodes.append(n)
    g_high_degree0 = g.subgraph(high_degree_nodes)
    n_nodes = len(high_degree_nodes)

    g_low_degree0 = g.subgraph(low_degree_nodes)

    g_union = g_low_degree0.copy()

    while n_nodes > 0:
        high_degree_nodes = []
        low_degree_nodes = []
        m = min([e[1] for e in g_high_degree0.degree()])
        t = max(max_deg, m)

        for n in g_high_degree0.nodes():
            if len(list(g_high_degree0.neighbors(n))) > t:
                high_degree_nodes.append(n)
            else:
                low_degree_nodes.append(n)
        g_high_degree = g_high_degree0.subgraph(high_degree_nodes)
        n_nodes = len(high_degree_nodes)

        g_low_degree = g_high_degree0.subgraph(low_degree_nodes)

        g_union = nx.union(g_union, g_low_degree)

        g_high_degree0 = g_high_degree

    return g_union