Exemplo n.º 1
0
def test_networkx(n):
    """return the number of seconds required to run the algorithm
    """
    g = make_graph(n, 'networkx')

    s = time.time()
    nx.maximum_spanning_arborescence(g, attr='weight', default=1)
    return time.time() - s
Exemplo n.º 2
0
    def UpdateTreeFromDiGraph(self, root, forest=False):

        try:
            self.DiGraph
        except:
            print('Run UpdateDiGraph!')
            return

        if forest == False:

            self.Tree = nx.maximum_spanning_arborescence(self.DiGraph.copy())

            # set tree root
            self.TreeRoot = root

            self.TransferAttributes(self.Tree, self.DiGraph)
            self.UpdateReducedTree()

        else:
            self.Tree = nx.maximum_branching(self.DiGraph.copy())

            # set tree root
            self.TreeRoot = None
            self.TransferAttributes(self.Tree, self.DiGraph)
            self.UpdateReducedTree()

        self.Tree.node[root]['root'] = '1'
Exemplo n.º 3
0
def VERTEX_SEARCH(x, M0):
    Gx = nx.DiGraph()

    for i in range(M0):
        if x[i] == 1: Gx.add_edge(E[i][0], E[i][1], weight=0.0)

    # Vx1 contains all vertices that are guaranteed reachable from s
    Vx1 = [] if not V[0] in Gx.nodes() else list(nx.descendants(Gx, V[0]))

    for i in range(M0, M):
        Gx.add_edge(E[i][0], E[i][1], weight=logp[i])
    assert (V[0] in Gx.nodes())

    # Vx2 contains all vertices that may be reachable from s
    Vx2 = list(nx.descendants(Gx, V[0]))
    assert (all(Vi in Vx2 for Vi in Vx1))

    # Remove nodes in Gx that are not in Vx2
    for Vi in Gx.nodes():
        if Vi != V[0] and not Vi in Vx2: Gx.remove_node(Vi)

    # Initial setup for vertex search
    Vx = [Vi for Vi in Vx2 if not Vi in Vx1]
    Nx = len(Vx)
    SearchTree = [([1] * Nx, 0)]
    px = 0.0
    t = 0

    while t < len(SearchTree):
        y, j = SearchTree[t]

        if j < Nx:
            SearchTree.append((y, j + 1))
            z = y[:j] + [0] + [1] * (Nx - j - 1)
            Vz = [V[0]] + Vx1 + [Vx[k] for k in range(Nx) if z[k] == 1]

            if sum(L[n] for n in range(N) if V[n] in Vz) > L_THRES:
                SearchTree.append((z, j + 1))

        elif j == Nx:
            Vy = [V[0]] + Vx1 + [Vx[k] for k in range(Nx) if y[k] == 1]
            Gy = Gx.copy()

            # Remove nodes not in Vy
            for Vi in Gy.nodes():
                if not Vi in Vy: Gy.remove_node(Vi)

            if len(nx.descendants(Gy, V[0])) + 1 == len(Vy):
                Ar = nx.maximum_spanning_arborescence(Gy, attr="weight")
                logpy = sum([
                    Ar.get_edge_data(Vi, Vj)["weight"]
                    for Vi, Vj in Ar.edges()
                ])
                px = max(px, exp(logpy))

        t += 1

    return px
def smst(G, x):
    g = []
    M = nx.maximum_spanning_arborescence(
        G
    )  #Finding out a maximum spanning arborescence from a directed and weighted graph
    sum = 0
    for (u, v) in M.edges():
        w = M[u][v]['weight']
        u = int(u)
        v = int(v)
        sum = sum + w
        g.append((u, v, w))
    graph.append((x, g, sum))
Exemplo n.º 5
0
 def build_tree(self, data):
     graph = self.build_graph(data)
     return nx.maximum_spanning_arborescence(
         nx.from_numpy_matrix(graph).to_directed())
def submax(G, paths):
    node_list = list(G.nodes())
    len_d = len(node_list)
    #Extracting the attributes from the graphml file
    dy = nx.get_node_attributes(G, 'chunk_no')
    ds = nx.get_node_attributes(G, 'position')
    dt = nx.get_node_attributes(G, 'length_word')
    #calcuating the final position index of every node
    df = {key: (ds[key] + dt[key] - 1) for key in ds}
    #Extracting the weight of every edge
    w = nx.get_edge_attributes(G, 'weight')
    arcs = []
    for (u, v) in G.edges():
        y = G[u][v]['weight']
        u = int(u)
        v = int(v)
        arcs.append(
            Arc(v, y, u)
        )  #the list arcs stores the destination, edge weight and source of every arc as a named tuple
    bestInEdge = {
    }  #dictionary  mapping every node to its maximum weighted incoming arc
    z = {}
    temp = []
    #looking for the maximum weighted incoming arc for every node
    for i in range(0, len_d):
        temp.clear()
        n = int(node_list[i])
        for arc in arcs:
            if arc.tail == n:
                temp.append(arc)  #storing all the incoming arcs of every node
        z[n] = temp
        if len(z[n]) != 0:
            bestInEdge[n] = max(
                z[n])  #storing the maximum weighted incoming arc of every node
    d = sorted(
        bestInEdge.values(), key=operator.itemgetter(1)
    )  #sorting the nodes on the basis of their maximum weighted incomimg arc
    contradict = {
    }  #dictionary mapping every node to a list of conflicting nodes.
    for n in node_list:
        c = dy[str(n)]
        s = ds[str(n)]
        f = df[str(n)]
        for node in node_list:
            c1 = dy[str(node)]
            s1 = ds[str(node)]
            f1 = df[str(node)]
            if node != n and c == c1:  #identifying the conflicting nodes
                if (s1 >= s and s1 <= f) or (
                        f1 >= s and f1 <= f
                ):  #A conflicting node is one having the same chunk number as the node and whose starting or ending position indices overlap with that node.
                    contradict.setdefault(int(n), []).append(
                        int(node)
                    )  #appending a list of conflicting nodes corresponding to every node of the graph
    for k in node_list:
        if int(k) not in contradict:
            contradict[int(k)] = []
    key_list = []
    #list storing the nodes in the descending order of their maximum weighted incoming arc
    for arc in d:
        key_list.append(arc.tail)
    key_list.reverse()
    #removing the conflicting nodes from the contradict dictionary
    for i in key_list:
        if i in contradict:
            b = contradict[i]
            for x in b:
                if x in contradict:
                    del contradict[
                        x]  #removing all the conflicting nodes corresponding to a particular node from the Contradict dictionary
    spanned = [i for i in contradict.keys()
               ]  #storing a list of all the non-conflicting nodes
    rem = []
    for i in node_list:
        if int(i) not in spanned:
            rem.append(int(i))  #storing a list of conflicting nodes
    #Creating a subgraph from the non-conflicting nodes
    X = nx.DiGraph()
    X = G
    for i in rem:
        X.remove_node(
            str(i)
        )  #removing the conflicting nodes and their adjacent edges from the original graph to form the subgraph

    try:
        H = nx.maximum_spanning_arborescence(
            X)  #forming a maximum spanning arborescence from the subgraph
        s = 0
        p = []
        #calculating the weight of the maximum spanning subtree
        for (u, v) in H.edges():
            w = H[u][v]['weight']
            s = s + w
            p.append((u, v, w))
        subgraph.append(
            (paths, spanned, p, s)
        )  #storing the path,edge sequence and total weight of the maximum spanning subtree
    except:
        D = nx.Graph()
        D = X.to_undirected()
        l = sorted(
            nx.connected_components(D), key=len, reverse=True
        )  #Storing the connected components of the disconnected graphs formed using the non conflicting nodes
        #Selecting the connected component with greater number of nodes
        k = list(l[0])
        if (len(k) != 0):
            for i in spanned:
                if str(i) not in k:
                    X.remove_node(str(
                        i))  #removing the nodes of the disconnected components
        H = nx.maximum_spanning_arborescence(
            X)  #forming a maximum spanning arborescence from the subgraph
        s = 0
        p = []
        #calculating the weight of the maximum spanning subtree
        for (u, v) in H.edges():
            w = H[u][v]['weight']
            s = s + w
            p.append((u, v, w))
        subgraph.append(
            (paths, k, p, s)
        )  #storing the path,edge sequence and total weight of the maximum spanning subtree
        return
Exemplo n.º 7
0
def branching_rest():
    graph, devices, vulns = generate_graph(5, 2, 3, chance=75)
    branching = maximum_branching(graph)
    arborescence = maximum_spanning_arborescence(graph)

    pass
Exemplo n.º 8
0
def generate_hydra_fifo_product_digraphs(x_slots, y_slots, root, ext, avoid=None, seed=12345, peturbation_delta=1e-6):
    '''
    Generates a simple hydra io networks through the following algorithm:

        1. create network with all connections between neighbors

        2. starting from root node, remove all incoming connections that have an outgoing connection

        3. create maximally spanning arborescence using fifo_load(head) * fifo_load(tail) as the weight

        4. perturb the solution N times by

            1. randomly adding 25% of the connections pruned in step (3.)

            2. reprune

            3. keep most performant

        5. continue to perturb until the fractional improvement drops below ``peturbation_delta``

    There is not a unique solution and so a random seed is used to generate the
    network.

    :param x_slots: maximum number of x positions for nodes

    :param y_slots: maximum number of y positions for nodes

    :param root: ``(x,y)`` position for root node, ``0 <= x < x_slots``, ``0 <= y < y_slots``

    :param ext: position of the node representing the external system

    :param avoid: ``list`` of ``(x,y)`` nodes to exclude from network

    :param seed: random seed for configuration (insures repeatabilty)

    :param peturbation_delta: continue to try peturbations if network score continues to change by this much (fractional, if None, don't check peturbations)

    :returns: 3 networkx ``DiGraph``s indicating the upstream miso network, the downstream miso network, and the mosi network

    '''

    random.seed(seed)

    g = generate_2d_grid_digraph(x_slots, y_slots, root, ext)
    #    g = nx.grid_2d_graph(x_slots, y_slots, create_using=nx.DiGraph) # base graph for upstream

    if avoid:
        g.remove_nodes_from(avoid)

    # create root
    g.remove_edges_from(list(g.in_edges(root)))

    # remove in edges from each successor
    nodes = list(g.successors(root))
    while nodes:
        random.shuffle(nodes)
        edges_to_remove = [edge for edge in g.in_edges(nodes) \
            if edge[::-1] in g.out_edges(nodes) \
            and g.in_degree(edge[1]) > 1]
        g.remove_edges_from(edges_to_remove)
        nodes = list(set([edge[1] for edge in g.out_edges(nodes)]))
    shortest_paths = deepcopy(list(g.edges()))

    # calculate fifo product
    network_fifo_load_score(g)
    for edge in g.edges():
        g.edges[edge]['weight'] = g.nodes[edge[0]]['load'] * g.nodes[edge[1]]['load']

    g = nx.maximum_spanning_arborescence(g)

    if not peturbation_delta is None:
        size = int(max(len(shortest_paths)/4,1))
        n = len(shortest_paths)
        g = check_peturbations(g, shortest_paths, size, n, peturbation_delta=peturbation_delta, pruning_algorithm=random_prune, scoring_algorithm=network_fifo_load_score)

    return generate_digraphs_from_upstream(g, root, ext)